mirror of
https://github.com/astral-sh/setup-uv.git
synced 2025-01-31 14:31:25 +08:00
Support a list of cache-dependency-glob (#41)
Closes: #40 Because of build conflics (forgot to run `npm i` after pulling) I deleted the `dist`folder to start fresh. Turns out that it still contained the source maps from the very first commit. I removed the argument to generate those a long time ago
This commit is contained in:
parent
606b0d67da
commit
d9a2b6b6fa
40
.github/workflows/test-cache.yml
vendored
40
.github/workflows/test-cache.yml
vendored
@ -48,6 +48,46 @@ jobs:
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
|
||||
test-setup-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup with cache
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- run: uv sync
|
||||
working-directory: __tests__/fixtures/uv-project
|
||||
test-restore-cache-dependency-glob:
|
||||
runs-on: ubuntu-latest
|
||||
needs: test-setup-cache-dependency-glob
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Change pyproject.toml
|
||||
run: |
|
||||
echo '[tool.uv]' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
echo 'dev-dependencies = []' >> __tests__/fixtures/uv-project/pyproject.toml
|
||||
- name: Restore with cache
|
||||
id: restore
|
||||
uses: ./
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
__tests__/fixtures/uv-project/uv.lock
|
||||
**/pyproject.toml
|
||||
cache-suffix: ${{ github.run_id }}-${{ github.run_attempt }}
|
||||
- name: Cache was not hit
|
||||
run: |
|
||||
if [ "$CACHE_HIT" == "true" ]; then
|
||||
exit 1
|
||||
fi
|
||||
env:
|
||||
CACHE_HIT: ${{ steps.restore.outputs.cache-hit }}
|
||||
|
||||
test-setup-cache-local:
|
||||
runs-on: oracle-aarch64
|
||||
steps:
|
||||
|
10
README.md
10
README.md
@ -125,6 +125,16 @@ changes. The glob matches files relative to the repository root.
|
||||
cache-dependency-glob: "**requirements*.txt"
|
||||
```
|
||||
|
||||
```yaml
|
||||
- name: Define a list of cache dependency globs
|
||||
uses: astral-sh/setup-uv@v1
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-dependency-glob: |
|
||||
'**requirements*.txt'
|
||||
'**pyproject.toml'
|
||||
```
|
||||
|
||||
### API rate limit
|
||||
|
||||
To avoid hitting the `API rate limit exceeded` error, supply a GitHub token via the `github-token`
|
||||
|
10
dist/save-cache/index.js
generated
vendored
10
dist/save-cache/index.js
generated
vendored
@ -82825,16 +82825,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.STATE_CACHE_MATCHED_KEY = exports.STATE_CACHE_KEY = void 0;
|
||||
exports.restoreCache = restoreCache;
|
||||
const cache = __importStar(__nccwpck_require__(7799));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const path_1 = __importDefault(__nccwpck_require__(1017));
|
||||
const inputs_1 = __nccwpck_require__(9378);
|
||||
const platforms_1 = __nccwpck_require__(6005);
|
||||
exports.STATE_CACHE_KEY = "cache-key";
|
||||
@ -82862,10 +82858,10 @@ function computeKeys(version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (inputs_1.cacheDependencyGlob !== "") {
|
||||
const fullCacheDependencyGlob = `${process.env["GITHUB_WORKSPACE"]}${path_1.default.sep}${inputs_1.cacheDependencyGlob}`;
|
||||
cacheDependencyPathHash += yield glob.hashFiles(fullCacheDependencyGlob);
|
||||
core.info(`Searching files using cache dependency glob: ${inputs_1.cacheDependencyGlob.split("\n").join(",")}`);
|
||||
cacheDependencyPathHash += yield glob.hashFiles(inputs_1.cacheDependencyGlob, undefined, undefined, true);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob}], make sure you have checked out the target repository`);
|
||||
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
1
dist/save-cache/index.js.map
generated
vendored
1
dist/save-cache/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1
dist/save-cache/sourcemap-register.js
generated
vendored
1
dist/save-cache/sourcemap-register.js
generated
vendored
File diff suppressed because one or more lines are too long
453
dist/setup/37.index.js
generated
vendored
453
dist/setup/37.index.js
generated
vendored
@ -1,453 +0,0 @@
|
||||
"use strict";
|
||||
exports.id = 37;
|
||||
exports.ids = [37];
|
||||
exports.modules = {
|
||||
|
||||
/***/ 4037:
|
||||
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
|
||||
|
||||
__webpack_require__.r(__webpack_exports__);
|
||||
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
|
||||
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
|
||||
/* harmony export */ });
|
||||
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
|
||||
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
|
||||
|
||||
|
||||
|
||||
let s = 0;
|
||||
const S = {
|
||||
START_BOUNDARY: s++,
|
||||
HEADER_FIELD_START: s++,
|
||||
HEADER_FIELD: s++,
|
||||
HEADER_VALUE_START: s++,
|
||||
HEADER_VALUE: s++,
|
||||
HEADER_VALUE_ALMOST_DONE: s++,
|
||||
HEADERS_ALMOST_DONE: s++,
|
||||
PART_DATA_START: s++,
|
||||
PART_DATA: s++,
|
||||
END: s++
|
||||
};
|
||||
|
||||
let f = 1;
|
||||
const F = {
|
||||
PART_BOUNDARY: f,
|
||||
LAST_BOUNDARY: f *= 2
|
||||
};
|
||||
|
||||
const LF = 10;
|
||||
const CR = 13;
|
||||
const SPACE = 32;
|
||||
const HYPHEN = 45;
|
||||
const COLON = 58;
|
||||
const A = 97;
|
||||
const Z = 122;
|
||||
|
||||
const lower = c => c | 0x20;
|
||||
|
||||
const noop = () => {};
|
||||
|
||||
class MultipartParser {
|
||||
/**
|
||||
* @param {string} boundary
|
||||
*/
|
||||
constructor(boundary) {
|
||||
this.index = 0;
|
||||
this.flags = 0;
|
||||
|
||||
this.onHeaderEnd = noop;
|
||||
this.onHeaderField = noop;
|
||||
this.onHeadersEnd = noop;
|
||||
this.onHeaderValue = noop;
|
||||
this.onPartBegin = noop;
|
||||
this.onPartData = noop;
|
||||
this.onPartEnd = noop;
|
||||
|
||||
this.boundaryChars = {};
|
||||
|
||||
boundary = '\r\n--' + boundary;
|
||||
const ui8a = new Uint8Array(boundary.length);
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
ui8a[i] = boundary.charCodeAt(i);
|
||||
this.boundaryChars[ui8a[i]] = true;
|
||||
}
|
||||
|
||||
this.boundary = ui8a;
|
||||
this.lookbehind = new Uint8Array(this.boundary.length + 8);
|
||||
this.state = S.START_BOUNDARY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Uint8Array} data
|
||||
*/
|
||||
write(data) {
|
||||
let i = 0;
|
||||
const length_ = data.length;
|
||||
let previousIndex = this.index;
|
||||
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
|
||||
const boundaryLength = this.boundary.length;
|
||||
const boundaryEnd = boundaryLength - 1;
|
||||
const bufferLength = data.length;
|
||||
let c;
|
||||
let cl;
|
||||
|
||||
const mark = name => {
|
||||
this[name + 'Mark'] = i;
|
||||
};
|
||||
|
||||
const clear = name => {
|
||||
delete this[name + 'Mark'];
|
||||
};
|
||||
|
||||
const callback = (callbackSymbol, start, end, ui8a) => {
|
||||
if (start === undefined || start !== end) {
|
||||
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
|
||||
}
|
||||
};
|
||||
|
||||
const dataCallback = (name, clear) => {
|
||||
const markSymbol = name + 'Mark';
|
||||
if (!(markSymbol in this)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (clear) {
|
||||
callback(name, this[markSymbol], i, data);
|
||||
delete this[markSymbol];
|
||||
} else {
|
||||
callback(name, this[markSymbol], data.length, data);
|
||||
this[markSymbol] = 0;
|
||||
}
|
||||
};
|
||||
|
||||
for (i = 0; i < length_; i++) {
|
||||
c = data[i];
|
||||
|
||||
switch (state) {
|
||||
case S.START_BOUNDARY:
|
||||
if (index === boundary.length - 2) {
|
||||
if (c === HYPHEN) {
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else if (c !== CR) {
|
||||
return;
|
||||
}
|
||||
|
||||
index++;
|
||||
break;
|
||||
} else if (index - 1 === boundary.length - 2) {
|
||||
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
|
||||
index = 0;
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (c !== boundary[index + 2]) {
|
||||
index = -2;
|
||||
}
|
||||
|
||||
if (c === boundary[index + 2]) {
|
||||
index++;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_FIELD_START:
|
||||
state = S.HEADER_FIELD;
|
||||
mark('onHeaderField');
|
||||
index = 0;
|
||||
// falls through
|
||||
case S.HEADER_FIELD:
|
||||
if (c === CR) {
|
||||
clear('onHeaderField');
|
||||
state = S.HEADERS_ALMOST_DONE;
|
||||
break;
|
||||
}
|
||||
|
||||
index++;
|
||||
if (c === HYPHEN) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (c === COLON) {
|
||||
if (index === 1) {
|
||||
// empty header field
|
||||
return;
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField', true);
|
||||
state = S.HEADER_VALUE_START;
|
||||
break;
|
||||
}
|
||||
|
||||
cl = lower(c);
|
||||
if (cl < A || cl > Z) {
|
||||
return;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_START:
|
||||
if (c === SPACE) {
|
||||
break;
|
||||
}
|
||||
|
||||
mark('onHeaderValue');
|
||||
state = S.HEADER_VALUE;
|
||||
// falls through
|
||||
case S.HEADER_VALUE:
|
||||
if (c === CR) {
|
||||
dataCallback('onHeaderValue', true);
|
||||
callback('onHeaderEnd');
|
||||
state = S.HEADER_VALUE_ALMOST_DONE;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.HEADER_VALUE_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
case S.HEADERS_ALMOST_DONE:
|
||||
if (c !== LF) {
|
||||
return;
|
||||
}
|
||||
|
||||
callback('onHeadersEnd');
|
||||
state = S.PART_DATA_START;
|
||||
break;
|
||||
case S.PART_DATA_START:
|
||||
state = S.PART_DATA;
|
||||
mark('onPartData');
|
||||
// falls through
|
||||
case S.PART_DATA:
|
||||
previousIndex = index;
|
||||
|
||||
if (index === 0) {
|
||||
// boyer-moore derrived algorithm to safely skip non-boundary data
|
||||
i += boundaryEnd;
|
||||
while (i < bufferLength && !(data[i] in boundaryChars)) {
|
||||
i += boundaryLength;
|
||||
}
|
||||
|
||||
i -= boundaryEnd;
|
||||
c = data[i];
|
||||
}
|
||||
|
||||
if (index < boundary.length) {
|
||||
if (boundary[index] === c) {
|
||||
if (index === 0) {
|
||||
dataCallback('onPartData', true);
|
||||
}
|
||||
|
||||
index++;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index === boundary.length) {
|
||||
index++;
|
||||
if (c === CR) {
|
||||
// CR = part boundary
|
||||
flags |= F.PART_BOUNDARY;
|
||||
} else if (c === HYPHEN) {
|
||||
// HYPHEN = end boundary
|
||||
flags |= F.LAST_BOUNDARY;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else if (index - 1 === boundary.length) {
|
||||
if (flags & F.PART_BOUNDARY) {
|
||||
index = 0;
|
||||
if (c === LF) {
|
||||
// unset the PART_BOUNDARY flag
|
||||
flags &= ~F.PART_BOUNDARY;
|
||||
callback('onPartEnd');
|
||||
callback('onPartBegin');
|
||||
state = S.HEADER_FIELD_START;
|
||||
break;
|
||||
}
|
||||
} else if (flags & F.LAST_BOUNDARY) {
|
||||
if (c === HYPHEN) {
|
||||
callback('onPartEnd');
|
||||
state = S.END;
|
||||
flags = 0;
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
} else {
|
||||
index = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (index > 0) {
|
||||
// when matching a possible boundary, keep a lookbehind reference
|
||||
// in case it turns out to be a false lead
|
||||
lookbehind[index - 1] = c;
|
||||
} else if (previousIndex > 0) {
|
||||
// if our boundary turned out to be rubbish, the captured lookbehind
|
||||
// belongs to partData
|
||||
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
|
||||
callback('onPartData', 0, previousIndex, _lookbehind);
|
||||
previousIndex = 0;
|
||||
mark('onPartData');
|
||||
|
||||
// reconsider the current character even so it interrupted the sequence
|
||||
// it could be the beginning of a new sequence
|
||||
i--;
|
||||
}
|
||||
|
||||
break;
|
||||
case S.END:
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unexpected state entered: ${state}`);
|
||||
}
|
||||
}
|
||||
|
||||
dataCallback('onHeaderField');
|
||||
dataCallback('onHeaderValue');
|
||||
dataCallback('onPartData');
|
||||
|
||||
// Update properties for the next call
|
||||
this.index = index;
|
||||
this.state = state;
|
||||
this.flags = flags;
|
||||
}
|
||||
|
||||
end() {
|
||||
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
|
||||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
|
||||
this.onPartEnd();
|
||||
} else if (this.state !== S.END) {
|
||||
throw new Error('MultipartParser.end(): stream ended unexpectedly');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function _fileName(headerValue) {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
|
||||
if (!m) {
|
||||
return;
|
||||
}
|
||||
|
||||
const match = m[2] || m[3] || '';
|
||||
let filename = match.slice(match.lastIndexOf('\\') + 1);
|
||||
filename = filename.replace(/%22/g, '"');
|
||||
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
|
||||
return String.fromCharCode(code);
|
||||
});
|
||||
return filename;
|
||||
}
|
||||
|
||||
async function toFormData(Body, ct) {
|
||||
if (!/multipart/i.test(ct)) {
|
||||
throw new TypeError('Failed to fetch');
|
||||
}
|
||||
|
||||
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('no or bad content-type header, no multipart boundary');
|
||||
}
|
||||
|
||||
const parser = new MultipartParser(m[1] || m[2]);
|
||||
|
||||
let headerField;
|
||||
let headerValue;
|
||||
let entryValue;
|
||||
let entryName;
|
||||
let contentType;
|
||||
let filename;
|
||||
const entryChunks = [];
|
||||
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
|
||||
|
||||
const onPartData = ui8a => {
|
||||
entryValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
const appendToFile = ui8a => {
|
||||
entryChunks.push(ui8a);
|
||||
};
|
||||
|
||||
const appendFileToFormData = () => {
|
||||
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
|
||||
formData.append(entryName, file);
|
||||
};
|
||||
|
||||
const appendEntryToFormData = () => {
|
||||
formData.append(entryName, entryValue);
|
||||
};
|
||||
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
decoder.decode();
|
||||
|
||||
parser.onPartBegin = function () {
|
||||
parser.onPartData = onPartData;
|
||||
parser.onPartEnd = appendEntryToFormData;
|
||||
|
||||
headerField = '';
|
||||
headerValue = '';
|
||||
entryValue = '';
|
||||
entryName = '';
|
||||
contentType = '';
|
||||
filename = null;
|
||||
entryChunks.length = 0;
|
||||
};
|
||||
|
||||
parser.onHeaderField = function (ui8a) {
|
||||
headerField += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderValue = function (ui8a) {
|
||||
headerValue += decoder.decode(ui8a, {stream: true});
|
||||
};
|
||||
|
||||
parser.onHeaderEnd = function () {
|
||||
headerValue += decoder.decode();
|
||||
headerField = headerField.toLowerCase();
|
||||
|
||||
if (headerField === 'content-disposition') {
|
||||
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
|
||||
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
|
||||
|
||||
if (m) {
|
||||
entryName = m[2] || m[3] || '';
|
||||
}
|
||||
|
||||
filename = _fileName(headerValue);
|
||||
|
||||
if (filename) {
|
||||
parser.onPartData = appendToFile;
|
||||
parser.onPartEnd = appendFileToFormData;
|
||||
}
|
||||
} else if (headerField === 'content-type') {
|
||||
contentType = headerValue;
|
||||
}
|
||||
|
||||
headerValue = '';
|
||||
headerField = '';
|
||||
};
|
||||
|
||||
for await (const chunk of Body) {
|
||||
parser.write(chunk);
|
||||
}
|
||||
|
||||
parser.end();
|
||||
|
||||
return formData;
|
||||
}
|
||||
|
||||
|
||||
/***/ })
|
||||
|
||||
};
|
||||
;
|
||||
//# sourceMappingURL=37.index.js.map
|
1
dist/setup/37.index.js.map
generated
vendored
1
dist/setup/37.index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
10
dist/setup/index.js
generated
vendored
10
dist/setup/index.js
generated
vendored
@ -83722,16 +83722,12 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.STATE_CACHE_MATCHED_KEY = exports.STATE_CACHE_KEY = void 0;
|
||||
exports.restoreCache = restoreCache;
|
||||
const cache = __importStar(__nccwpck_require__(7799));
|
||||
const glob = __importStar(__nccwpck_require__(8090));
|
||||
const core = __importStar(__nccwpck_require__(2186));
|
||||
const path_1 = __importDefault(__nccwpck_require__(1017));
|
||||
const inputs_1 = __nccwpck_require__(9378);
|
||||
const platforms_1 = __nccwpck_require__(6005);
|
||||
exports.STATE_CACHE_KEY = "cache-key";
|
||||
@ -83759,10 +83755,10 @@ function computeKeys(version) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (inputs_1.cacheDependencyGlob !== "") {
|
||||
const fullCacheDependencyGlob = `${process.env["GITHUB_WORKSPACE"]}${path_1.default.sep}${inputs_1.cacheDependencyGlob}`;
|
||||
cacheDependencyPathHash += yield glob.hashFiles(fullCacheDependencyGlob);
|
||||
core.info(`Searching files using cache dependency glob: ${inputs_1.cacheDependencyGlob.split("\n").join(",")}`);
|
||||
cacheDependencyPathHash += yield glob.hashFiles(inputs_1.cacheDependencyGlob, undefined, undefined, true);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob}], make sure you have checked out the target repository`);
|
||||
throw new Error(`No file in ${process.cwd()} matched to [${inputs_1.cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
|
1
dist/setup/index.js.map
generated
vendored
1
dist/setup/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1056
dist/setup/licenses.txt
generated
vendored
1056
dist/setup/licenses.txt
generated
vendored
File diff suppressed because it is too large
Load Diff
1
dist/setup/sourcemap-register.js
generated
vendored
1
dist/setup/sourcemap-register.js
generated
vendored
File diff suppressed because one or more lines are too long
35031
dist/update-checksums/index.js
generated
vendored
35031
dist/update-checksums/index.js
generated
vendored
File diff suppressed because one or more lines are too long
1
dist/update-checksums/index.js.map
generated
vendored
1
dist/update-checksums/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1
dist/update-checksums/sourcemap-register.js
generated
vendored
1
dist/update-checksums/sourcemap-register.js
generated
vendored
File diff suppressed because one or more lines are too long
34791
dist/update-default-version/index.js
generated
vendored
34791
dist/update-default-version/index.js
generated
vendored
File diff suppressed because one or more lines are too long
1
dist/update-default-version/index.js.map
generated
vendored
1
dist/update-default-version/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
1
dist/update-default-version/sourcemap-register.js
generated
vendored
1
dist/update-default-version/sourcemap-register.js
generated
vendored
File diff suppressed because one or more lines are too long
14
src/cache/restore-cache.ts
vendored
14
src/cache/restore-cache.ts
vendored
@ -1,7 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as core from "@actions/core";
|
||||
import path from "path";
|
||||
import {
|
||||
cacheDependencyGlob,
|
||||
cacheLocalPath,
|
||||
@ -37,11 +36,18 @@ export async function restoreCache(version: string): Promise<void> {
|
||||
async function computeKeys(version: string): Promise<string> {
|
||||
let cacheDependencyPathHash = "-";
|
||||
if (cacheDependencyGlob !== "") {
|
||||
const fullCacheDependencyGlob = `${process.env["GITHUB_WORKSPACE"]}${path.sep}${cacheDependencyGlob}`;
|
||||
cacheDependencyPathHash += await glob.hashFiles(fullCacheDependencyGlob);
|
||||
core.info(
|
||||
`Searching files using cache dependency glob: ${cacheDependencyGlob.split("\n").join(",")}`,
|
||||
);
|
||||
cacheDependencyPathHash += await glob.hashFiles(
|
||||
cacheDependencyGlob,
|
||||
undefined,
|
||||
undefined,
|
||||
true,
|
||||
);
|
||||
if (cacheDependencyPathHash === "-") {
|
||||
throw new Error(
|
||||
`No file in ${process.cwd()} matched to [${cacheDependencyGlob}], make sure you have checked out the target repository`,
|
||||
`No file in ${process.cwd()} matched to [${cacheDependencyGlob.split("\n").join(",")}], make sure you have checked out the target repository`,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
Loading…
x
Reference in New Issue
Block a user