mirror of
https://code.forgejo.org/actions/cache.git
synced 2025-04-22 16:53:57 +08:00
v0.5.0
This commit is contained in:
parent
e4a331f6ce
commit
9c23f97836
4 changed files with 295 additions and 34 deletions
104
dist/restore/index.js
vendored
104
dist/restore/index.js
vendored
|
@ -43294,12 +43294,43 @@ class CacheService {
|
||||||
}
|
}
|
||||||
restoreCache(paths, primaryKey, restoreKeys) {
|
restoreCache(paths, primaryKey, restoreKeys) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return "";
|
restoreKeys = restoreKeys || [];
|
||||||
|
const keys = [primaryKey, ...restoreKeys];
|
||||||
|
core.debug("Resolved Keys:");
|
||||||
|
core.debug(JSON.stringify(keys));
|
||||||
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = yield this.getS3CacheKey(keys);
|
||||||
|
if (!cacheEntry) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const archivePath = path.join(yield utils.createTempDirectory(), cacheEntry);
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
try {
|
||||||
|
// Download the cache from the cache entry
|
||||||
|
yield this.downloadFromS3(cacheEntry, archivePath);
|
||||||
|
if (core.isDebug()) {
|
||||||
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
|
}
|
||||||
|
core.info(`Cache Size: ~${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
||||||
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
|
core.info("Cache restored successfully");
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
yield utils.unlinkFile(archivePath);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cacheEntry;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
saveCache(paths, key) {
|
saveCache(paths, key) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const cacheId = this.getCacheId(key);
|
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug("Cache Paths:");
|
core.debug("Cache Paths:");
|
||||||
|
@ -43313,8 +43344,8 @@ class CacheService {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${key})`);
|
||||||
yield this.uploadToS3(cacheId, archivePath);
|
yield this.uploadToS3(key, archivePath);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -43330,20 +43361,73 @@ class CacheService {
|
||||||
}
|
}
|
||||||
uploadToS3(key, archivePath) {
|
uploadToS3(key, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const client = new aws_sdk_1.S3();
|
|
||||||
const data = fs_1.default.readFileSync(archivePath).toString("base64");
|
const data = fs_1.default.readFileSync(archivePath).toString("base64");
|
||||||
return client
|
return this._client
|
||||||
.putObject({
|
.putObject({
|
||||||
Bucket: this._bucket,
|
Bucket: this._bucket,
|
||||||
Key: key,
|
Key: path.join(this.getCacheFolder(), key),
|
||||||
Body: data
|
Body: data
|
||||||
})
|
})
|
||||||
.promise();
|
.promise();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
getCacheId(primaryKey) {
|
downloadFromS3(key, savePath) {
|
||||||
var _a;
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return `${(_a = process.env["GITHUB_REPOSITORY"]) === null || _a === void 0 ? void 0 : _a.replace("/", "-").toLowerCase()}-${primaryKey}`;
|
try {
|
||||||
|
const response = yield this._client
|
||||||
|
.getObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), key)
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
fs_1.default.writeFileSync(savePath, response.Body);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
core.warning("Could not download cache from S3");
|
||||||
|
core.warning(err.message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getS3CacheKey(keys) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
// return first matching key
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
if (i === 0) {
|
||||||
|
// look for exact match
|
||||||
|
try {
|
||||||
|
yield this._client
|
||||||
|
.headObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
return keys[i];
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
}
|
||||||
|
catch (_a) { }
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// look for match with newest added date that matches a prefix
|
||||||
|
try {
|
||||||
|
const response = yield this._client
|
||||||
|
.listObjectsV2({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Prefix: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
core.debug(JSON.stringify(response));
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
}
|
||||||
|
catch (_b) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getCacheFolder() {
|
||||||
|
return process.env["GITHUB_REPOSITORY"]
|
||||||
|
.replace("/", "-")
|
||||||
|
.toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.CacheService = CacheService;
|
exports.CacheService = CacheService;
|
||||||
|
|
104
dist/save/index.js
vendored
104
dist/save/index.js
vendored
|
@ -43294,12 +43294,43 @@ class CacheService {
|
||||||
}
|
}
|
||||||
restoreCache(paths, primaryKey, restoreKeys) {
|
restoreCache(paths, primaryKey, restoreKeys) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return "";
|
restoreKeys = restoreKeys || [];
|
||||||
|
const keys = [primaryKey, ...restoreKeys];
|
||||||
|
core.debug("Resolved Keys:");
|
||||||
|
core.debug(JSON.stringify(keys));
|
||||||
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = yield this.getS3CacheKey(keys);
|
||||||
|
if (!cacheEntry) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const archivePath = path.join(yield utils.createTempDirectory(), cacheEntry);
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
try {
|
||||||
|
// Download the cache from the cache entry
|
||||||
|
yield this.downloadFromS3(cacheEntry, archivePath);
|
||||||
|
if (core.isDebug()) {
|
||||||
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
|
}
|
||||||
|
core.info(`Cache Size: ~${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
||||||
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
|
core.info("Cache restored successfully");
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
yield utils.unlinkFile(archivePath);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cacheEntry;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
saveCache(paths, key) {
|
saveCache(paths, key) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const cacheId = this.getCacheId(key);
|
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug("Cache Paths:");
|
core.debug("Cache Paths:");
|
||||||
|
@ -43313,8 +43344,8 @@ class CacheService {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
core.info(`Archive Size: ${filesize_1.default(fs_1.default.statSync(archivePath).size)}`);
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${key})`);
|
||||||
yield this.uploadToS3(cacheId, archivePath);
|
yield this.uploadToS3(key, archivePath);
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
|
@ -43330,20 +43361,73 @@ class CacheService {
|
||||||
}
|
}
|
||||||
uploadToS3(key, archivePath) {
|
uploadToS3(key, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const client = new aws_sdk_1.S3();
|
|
||||||
const data = fs_1.default.readFileSync(archivePath).toString("base64");
|
const data = fs_1.default.readFileSync(archivePath).toString("base64");
|
||||||
return client
|
return this._client
|
||||||
.putObject({
|
.putObject({
|
||||||
Bucket: this._bucket,
|
Bucket: this._bucket,
|
||||||
Key: key,
|
Key: path.join(this.getCacheFolder(), key),
|
||||||
Body: data
|
Body: data
|
||||||
})
|
})
|
||||||
.promise();
|
.promise();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
getCacheId(primaryKey) {
|
downloadFromS3(key, savePath) {
|
||||||
var _a;
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
return `${(_a = process.env["GITHUB_REPOSITORY"]) === null || _a === void 0 ? void 0 : _a.replace("/", "-").toLowerCase()}-${primaryKey}`;
|
try {
|
||||||
|
const response = yield this._client
|
||||||
|
.getObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), key)
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
fs_1.default.writeFileSync(savePath, response.Body);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
core.warning("Could not download cache from S3");
|
||||||
|
core.warning(err.message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getS3CacheKey(keys) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
// return first matching key
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
if (i === 0) {
|
||||||
|
// look for exact match
|
||||||
|
try {
|
||||||
|
yield this._client
|
||||||
|
.headObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
return keys[i];
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
}
|
||||||
|
catch (_a) { }
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// look for match with newest added date that matches a prefix
|
||||||
|
try {
|
||||||
|
const response = yield this._client
|
||||||
|
.listObjectsV2({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Prefix: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
core.debug(JSON.stringify(response));
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
}
|
||||||
|
catch (_b) { }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
getCacheFolder() {
|
||||||
|
return process.env["GITHUB_REPOSITORY"]
|
||||||
|
.replace("/", "-")
|
||||||
|
.toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.CacheService = CacheService;
|
exports.CacheService = CacheService;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "0.4.0",
|
"version": "0.5.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
|
|
|
@ -1,7 +1,12 @@
|
||||||
import * as utils from "@actions/cache/lib/internal/cacheUtils";
|
import * as utils from "@actions/cache/lib/internal/cacheUtils";
|
||||||
import { createTar, listTar } from "@actions/cache/lib/internal/tar";
|
import {
|
||||||
|
createTar,
|
||||||
|
extractTar,
|
||||||
|
listTar
|
||||||
|
} from "@actions/cache/lib/internal/tar";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import { AWSError, S3 } from "aws-sdk";
|
import { AWSError, S3 } from "aws-sdk";
|
||||||
|
import { GetObjectOutput, ListObjectsV2Output } from "aws-sdk/clients/s3";
|
||||||
import { PromiseResult } from "aws-sdk/lib/request";
|
import { PromiseResult } from "aws-sdk/lib/request";
|
||||||
import filesize from "filesize";
|
import filesize from "filesize";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
@ -32,12 +37,54 @@ export class CacheService {
|
||||||
primaryKey: string,
|
primaryKey: string,
|
||||||
restoreKeys: string[]
|
restoreKeys: string[]
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
return "";
|
restoreKeys = restoreKeys || [];
|
||||||
|
const keys = [primaryKey, ...restoreKeys];
|
||||||
|
|
||||||
|
core.debug("Resolved Keys:");
|
||||||
|
core.debug(JSON.stringify(keys));
|
||||||
|
|
||||||
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = await this.getS3CacheKey(keys);
|
||||||
|
if (!cacheEntry) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const archivePath = path.join(
|
||||||
|
await utils.createTempDirectory(),
|
||||||
|
cacheEntry
|
||||||
|
);
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Download the cache from the cache entry
|
||||||
|
await this.downloadFromS3(cacheEntry, archivePath);
|
||||||
|
|
||||||
|
if (core.isDebug()) {
|
||||||
|
await listTar(archivePath, compressionMethod);
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info(
|
||||||
|
`Cache Size: ~${filesize(fs.statSync(archivePath).size)}`
|
||||||
|
);
|
||||||
|
|
||||||
|
await extractTar(archivePath, compressionMethod);
|
||||||
|
core.info("Cache restored successfully");
|
||||||
|
} finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
await utils.unlinkFile(archivePath);
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cacheEntry;
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveCache(paths: string[], key: string): Promise<string> {
|
async saveCache(paths: string[], key: string): Promise<string> {
|
||||||
const cacheId: string = this.getCacheId(key);
|
|
||||||
|
|
||||||
const compressionMethod = await utils.getCompressionMethod();
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
const cachePaths = await utils.resolvePaths(paths);
|
const cachePaths = await utils.resolvePaths(paths);
|
||||||
|
@ -62,8 +109,8 @@ export class CacheService {
|
||||||
`Archive Size: ${filesize(fs.statSync(archivePath).size)}`
|
`Archive Size: ${filesize(fs.statSync(archivePath).size)}`
|
||||||
);
|
);
|
||||||
|
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${key})`);
|
||||||
await this.uploadToS3(cacheId, archivePath);
|
await this.uploadToS3(key, archivePath);
|
||||||
} finally {
|
} finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
@ -80,21 +127,67 @@ export class CacheService {
|
||||||
key: string,
|
key: string,
|
||||||
archivePath: string
|
archivePath: string
|
||||||
): Promise<PromiseResult<S3.PutObjectOutput, AWSError>> {
|
): Promise<PromiseResult<S3.PutObjectOutput, AWSError>> {
|
||||||
const client = new S3();
|
|
||||||
const data = fs.readFileSync(archivePath).toString("base64");
|
const data = fs.readFileSync(archivePath).toString("base64");
|
||||||
|
|
||||||
return client
|
return this._client
|
||||||
.putObject({
|
.putObject({
|
||||||
Bucket: this._bucket,
|
Bucket: this._bucket,
|
||||||
Key: key,
|
Key: path.join(this.getCacheFolder(), key),
|
||||||
Body: data
|
Body: data
|
||||||
})
|
})
|
||||||
.promise();
|
.promise();
|
||||||
}
|
}
|
||||||
|
|
||||||
private getCacheId(primaryKey: string): string {
|
private async downloadFromS3(key: string, savePath: string): Promise<void> {
|
||||||
return `${process.env["GITHUB_REPOSITORY"]
|
try {
|
||||||
?.replace("/", "-")
|
const response: GetObjectOutput = await this._client
|
||||||
.toLowerCase()}-${primaryKey}`;
|
.getObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), key)
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
fs.writeFileSync(savePath, response.Body);
|
||||||
|
} catch (err) {
|
||||||
|
core.warning("Could not download cache from S3");
|
||||||
|
core.warning(err.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async getS3CacheKey(keys: string[]): Promise<string | undefined> {
|
||||||
|
// return first matching key
|
||||||
|
for (let i = 0; i < keys.length; i++) {
|
||||||
|
if (i === 0) {
|
||||||
|
// look for exact match
|
||||||
|
try {
|
||||||
|
await this._client
|
||||||
|
.headObject({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Key: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
return keys[i];
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
} catch {}
|
||||||
|
} else {
|
||||||
|
// look for match with newest added date that matches a prefix
|
||||||
|
try {
|
||||||
|
const response: ListObjectsV2Output = await this._client
|
||||||
|
.listObjectsV2({
|
||||||
|
Bucket: this._bucket,
|
||||||
|
Prefix: path.join(this.getCacheFolder(), keys[i])
|
||||||
|
})
|
||||||
|
.promise();
|
||||||
|
core.debug(JSON.stringify(response));
|
||||||
|
// eslint-disable-next-line no-empty
|
||||||
|
} catch {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getCacheFolder(): string {
|
||||||
|
return (process.env["GITHUB_REPOSITORY"] as string)
|
||||||
|
.replace("/", "-")
|
||||||
|
.toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue