Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b894d59a8d | ||
|
|
e78327dd9e | ||
|
|
ccdddcc049 | ||
|
|
b5ec9edd91 | ||
|
|
3f2513fdf4 | ||
|
|
19c46583c5 | ||
|
|
b8e72aae83 |
@@ -1,5 +1,14 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2.1.0
|
||||||
|
|
||||||
|
- Only hash `Cargo.{lock,toml}` files in the configured workspace directories.
|
||||||
|
|
||||||
|
## 2.0.2
|
||||||
|
|
||||||
|
- Avoid calling `cargo metadata` on pre-cleanup.
|
||||||
|
- Added `prefix-key`, `cache-directories` and `cache-targets` options.
|
||||||
|
|
||||||
## 2.0.1
|
## 2.0.1
|
||||||
|
|
||||||
- Primarily just updating dependencies to fix GitHub deprecation notices.
|
- Primarily just updating dependencies to fix GitHub deprecation notices.
|
||||||
|
|||||||
26
README.md
26
README.md
@@ -14,32 +14,44 @@ sensible defaults.
|
|||||||
|
|
||||||
- uses: Swatinem/rust-cache@v2
|
- uses: Swatinem/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
# An explicit cache key that is used instead of the automatic `job`-based
|
# The prefix cache key, this can be changed to start a new cache manually.
|
||||||
# cache key and is thus stable across jobs.
|
# default: "v0-rust"
|
||||||
# Default: empty
|
prefix-key: ""
|
||||||
|
|
||||||
|
# A cache key that is used instead of the automatic `job`-based key,
|
||||||
|
# and is stable over multiple jobs.
|
||||||
|
# default: empty
|
||||||
shared-key: ""
|
shared-key: ""
|
||||||
|
|
||||||
# An additional cache key that is added alongside the automatic `job`-based
|
# An additional cache key that is added alongside the automatic `job`-based
|
||||||
# cache key and can be used to further differentiate jobs.
|
# cache key and can be used to further differentiate jobs.
|
||||||
# Default: empty
|
# default: empty
|
||||||
key: ""
|
key: ""
|
||||||
|
|
||||||
# A whitespace separated list of env-var *prefixes* who's value contributes
|
# A whitespace separated list of env-var *prefixes* who's value contributes
|
||||||
# to the environment cache key.
|
# to the environment cache key.
|
||||||
# The env-vars are matched by *prefix*, so the default `RUST` var will
|
# The env-vars are matched by *prefix*, so the default `RUST` var will
|
||||||
# match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc.
|
# match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc.
|
||||||
# Default: "CARGO CC CFLAGS CXX CMAKE RUST"
|
# default: "CARGO CC CFLAGS CXX CMAKE RUST"
|
||||||
env-vars: ""
|
env-vars: ""
|
||||||
|
|
||||||
# The cargo workspaces and target directory configuration.
|
# The cargo workspaces and target directory configuration.
|
||||||
# These entries are separated by newlines and have the form
|
# These entries are separated by newlines and have the form
|
||||||
# `$workspace -> $target`. The `$target` part is treated as a directory
|
# `$workspace -> $target`. The `$target` part is treated as a directory
|
||||||
# relative to the `$workspace` and defaults to "target" if not explicitly given.
|
# relative to the `$workspace` and defaults to "target" if not explicitly given.
|
||||||
# Default: ". -> target"
|
# default: ". -> target"
|
||||||
workspaces: ""
|
workspaces: ""
|
||||||
|
|
||||||
|
# Additional non workspace directories to be cached, separated by newlines.
|
||||||
|
cache-directories: ""
|
||||||
|
|
||||||
|
# Determines whether workspace `target` directories are cached.
|
||||||
|
# If `false`, only the cargo registry will be cached.
|
||||||
|
# default: "true"
|
||||||
|
cache-targets: ""
|
||||||
|
|
||||||
# Determines if the cache should be saved even when the workflow has failed.
|
# Determines if the cache should be saved even when the workflow has failed.
|
||||||
# Default: "false"
|
# default: "false"
|
||||||
cache-on-failure: ""
|
cache-on-failure: ""
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
23
action.yml
23
action.yml
@@ -2,24 +2,35 @@ name: "Rust Cache"
|
|||||||
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
|
description: "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults."
|
||||||
author: "Arpad Borsos <swatinem@swatinem.de>"
|
author: "Arpad Borsos <swatinem@swatinem.de>"
|
||||||
inputs:
|
inputs:
|
||||||
|
prefix-key:
|
||||||
|
description: "The prefix cache key, this can be changed to start a new cache manually."
|
||||||
|
required: false
|
||||||
|
default: "v0-rust"
|
||||||
shared-key:
|
shared-key:
|
||||||
description: "An additional cache key that is stable over multiple jobs"
|
description: "A cache key that is used instead of the automatic `job`-based key, and is stable over multiple jobs."
|
||||||
required: false
|
required: false
|
||||||
key:
|
key:
|
||||||
description: "An additional key for the cache"
|
description: "An additional cache key that is added alongside the automatic `job`-based cache key and can be used to further differentiate jobs."
|
||||||
required: false
|
required: false
|
||||||
env-vars:
|
env-vars:
|
||||||
description: "Additional environment variables to include in the cache key, separated by spaces"
|
description: "Additional environment variables to include in the cache key, separated by spaces."
|
||||||
required: false
|
required: false
|
||||||
workspaces:
|
workspaces:
|
||||||
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines"
|
description: "Paths to multiple Cargo workspaces and their target directories, separated by newlines."
|
||||||
required: false
|
required: false
|
||||||
|
cache-directories:
|
||||||
|
description: "Additional non workspace directories to be cached, separated by newlines."
|
||||||
|
required: false
|
||||||
|
cache-targets:
|
||||||
|
description: "Determines whether workspace targets are cached. If `false`, only the cargo registry will be cached."
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
cache-on-failure:
|
cache-on-failure:
|
||||||
description: "Cache even if the build fails. Defaults to false"
|
description: "Cache even if the build fails. Defaults to false."
|
||||||
required: false
|
required: false
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: "A boolean value that indicates an exact match was found"
|
description: "A boolean value that indicates an exact match was found."
|
||||||
runs:
|
runs:
|
||||||
using: "node16"
|
using: "node16"
|
||||||
main: "dist/restore/index.js"
|
main: "dist/restore/index.js"
|
||||||
|
|||||||
79
dist/restore/index.js
vendored
79
dist/restore/index.js
vendored
@@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||||
class Workspace {
|
class Workspace {
|
||||||
constructor(root, target) {
|
constructor(root, target) {
|
||||||
@@ -64485,9 +64486,11 @@ class Workspace {
|
|||||||
async getPackages() {
|
async getPackages() {
|
||||||
let packages = [];
|
let packages = [];
|
||||||
try {
|
try {
|
||||||
|
lib_core.debug(`collecting metadata for "${this.root}"`);
|
||||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||||
cwd: this.root,
|
cwd: this.root,
|
||||||
}));
|
}));
|
||||||
|
lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||||
for (const pkg of meta.packages) {
|
for (const pkg of meta.packages) {
|
||||||
if (pkg.manifest_path.startsWith(this.root)) {
|
if (pkg.manifest_path.startsWith(this.root)) {
|
||||||
continue;
|
continue;
|
||||||
@@ -64545,7 +64548,7 @@ class CacheConfig {
|
|||||||
// Construct key prefix:
|
// Construct key prefix:
|
||||||
// This uses either the `shared-key` input,
|
// This uses either the `shared-key` input,
|
||||||
// or the `key` input combined with the `job` key.
|
// or the `key` input combined with the `job` key.
|
||||||
let key = `v0-rust`;
|
let key = lib_core.getInput("prefix-key") || "v0-rust";
|
||||||
const sharedKey = lib_core.getInput("shared-key");
|
const sharedKey = lib_core.getInput("shared-key");
|
||||||
if (sharedKey) {
|
if (sharedKey) {
|
||||||
key += `-${sharedKey}`;
|
key += `-${sharedKey}`;
|
||||||
@@ -64600,11 +64603,23 @@ class CacheConfig {
|
|||||||
// might create/overwrite lockfiles.
|
// might create/overwrite lockfiles.
|
||||||
let lockHash = lib_core.getState(STATE_LOCKFILE_HASH);
|
let lockHash = lib_core.getState(STATE_LOCKFILE_HASH);
|
||||||
let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]");
|
let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]");
|
||||||
|
// Constructs the workspace config and paths to restore:
|
||||||
|
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||||
|
const workspaces = [];
|
||||||
|
const workspacesInput = lib_core.getInput("workspaces") || ".";
|
||||||
|
for (const workspace of workspacesInput.trim().split("\n")) {
|
||||||
|
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
||||||
|
root = external_path_default().resolve(root);
|
||||||
|
target = external_path_default().join(root, target);
|
||||||
|
workspaces.push(new Workspace(root, target));
|
||||||
|
}
|
||||||
|
self.workspaces = workspaces;
|
||||||
if (!lockHash) {
|
if (!lockHash) {
|
||||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||||
followSymbolicLinks: false,
|
for (const workspace of workspaces) {
|
||||||
});
|
const root = workspace.root;
|
||||||
keyFiles = await globber.glob();
|
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||||
|
}
|
||||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||||
hasher = external_crypto_default().createHash("sha1");
|
hasher = external_crypto_default().createHash("sha1");
|
||||||
for (const file of keyFiles) {
|
for (const file of keyFiles) {
|
||||||
@@ -64619,18 +64634,15 @@ class CacheConfig {
|
|||||||
self.keyFiles = keyFiles;
|
self.keyFiles = keyFiles;
|
||||||
key += `-${lockHash}`;
|
key += `-${lockHash}`;
|
||||||
self.cacheKey = key;
|
self.cacheKey = key;
|
||||||
// Constructs the workspace config and paths to restore:
|
self.cachePaths = [config_CARGO_HOME];
|
||||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true";
|
||||||
const workspaces = [];
|
if (cacheTargets === "true") {
|
||||||
const workspacesInput = lib_core.getInput("workspaces") || ".";
|
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||||
for (const workspace of workspacesInput.trim().split("\n")) {
|
}
|
||||||
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
const cacheDirectories = lib_core.getInput("cache-directories");
|
||||||
root = external_path_default().resolve(root);
|
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||||
target = external_path_default().join(root, target);
|
self.cachePaths.push(dir);
|
||||||
workspaces.push(new Workspace(root, target));
|
|
||||||
}
|
}
|
||||||
self.workspaces = workspaces;
|
|
||||||
self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
printInfo() {
|
printInfo() {
|
||||||
@@ -64670,6 +64682,12 @@ async function getRustVersion() {
|
|||||||
.filter((s) => s.length === 2);
|
.filter((s) => s.length === 2);
|
||||||
return Object.fromEntries(splits);
|
return Object.fromEntries(splits);
|
||||||
}
|
}
|
||||||
|
async function globFiles(pattern) {
|
||||||
|
const globber = await glob.create(pattern, {
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
});
|
||||||
|
return await globber.glob();
|
||||||
|
}
|
||||||
|
|
||||||
;// CONCATENATED MODULE: ./src/cleanup.ts
|
;// CONCATENATED MODULE: ./src/cleanup.ts
|
||||||
|
|
||||||
@@ -64832,30 +64850,34 @@ async function cleanGit(packages) {
|
|||||||
}
|
}
|
||||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||||
/**
|
/**
|
||||||
* Removes all files or directories in `dirName`, except the ones matching
|
* Removes all files or directories in `dirName` matching some criteria.
|
||||||
* any string in the `keepPrefix` set.
|
|
||||||
*
|
|
||||||
* The matching strips and trailing `-$hash` suffix.
|
|
||||||
*
|
*
|
||||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||||
* than one week.
|
* than one week.
|
||||||
|
*
|
||||||
|
* Otherwise, it will remove everything that does not match any string in the
|
||||||
|
* `keepPrefix` set.
|
||||||
|
* The matching strips and trailing `-$hash` suffix.
|
||||||
*/
|
*/
|
||||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||||
const dir = await external_fs_default().promises.opendir(dirName);
|
const dir = await external_fs_default().promises.opendir(dirName);
|
||||||
for await (const dirent of dir) {
|
for await (const dirent of dir) {
|
||||||
|
if (checkTimestamp) {
|
||||||
|
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||||
|
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||||
|
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||||
|
if (isOutdated) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
let name = dirent.name;
|
let name = dirent.name;
|
||||||
// strip the trailing hash
|
// strip the trailing hash
|
||||||
const idx = name.lastIndexOf("-");
|
const idx = name.lastIndexOf("-");
|
||||||
if (idx !== -1) {
|
if (idx !== -1) {
|
||||||
name = name.slice(0, idx);
|
name = name.slice(0, idx);
|
||||||
}
|
}
|
||||||
let isOutdated = false;
|
if (!keepPrefix.has(name)) {
|
||||||
if (checkTimestamp) {
|
|
||||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
|
||||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
|
||||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
|
||||||
}
|
|
||||||
if (!keepPrefix.has(name) || isOutdated) {
|
|
||||||
await rm(dir.path, dirent);
|
await rm(dir.path, dirent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -64925,8 +64947,7 @@ async function run() {
|
|||||||
// pre-clean the target directory on cache mismatch
|
// pre-clean the target directory on cache mismatch
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
try {
|
try {
|
||||||
const packages = await workspace.getPackages();
|
await cleanTargetDir(workspace.target, [], true);
|
||||||
await cleanTargetDir(workspace.target, packages, true);
|
|
||||||
}
|
}
|
||||||
catch { }
|
catch { }
|
||||||
}
|
}
|
||||||
|
|||||||
76
dist/save/index.js
vendored
76
dist/save/index.js
vendored
@@ -64476,6 +64476,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
|||||||
;// CONCATENATED MODULE: ./src/workspace.ts
|
;// CONCATENATED MODULE: ./src/workspace.ts
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
|
||||||
class Workspace {
|
class Workspace {
|
||||||
constructor(root, target) {
|
constructor(root, target) {
|
||||||
@@ -64485,9 +64486,11 @@ class Workspace {
|
|||||||
async getPackages() {
|
async getPackages() {
|
||||||
let packages = [];
|
let packages = [];
|
||||||
try {
|
try {
|
||||||
|
core.debug(`collecting metadata for "${this.root}"`);
|
||||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||||
cwd: this.root,
|
cwd: this.root,
|
||||||
}));
|
}));
|
||||||
|
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||||
for (const pkg of meta.packages) {
|
for (const pkg of meta.packages) {
|
||||||
if (pkg.manifest_path.startsWith(this.root)) {
|
if (pkg.manifest_path.startsWith(this.root)) {
|
||||||
continue;
|
continue;
|
||||||
@@ -64545,7 +64548,7 @@ class CacheConfig {
|
|||||||
// Construct key prefix:
|
// Construct key prefix:
|
||||||
// This uses either the `shared-key` input,
|
// This uses either the `shared-key` input,
|
||||||
// or the `key` input combined with the `job` key.
|
// or the `key` input combined with the `job` key.
|
||||||
let key = `v0-rust`;
|
let key = core.getInput("prefix-key") || "v0-rust";
|
||||||
const sharedKey = core.getInput("shared-key");
|
const sharedKey = core.getInput("shared-key");
|
||||||
if (sharedKey) {
|
if (sharedKey) {
|
||||||
key += `-${sharedKey}`;
|
key += `-${sharedKey}`;
|
||||||
@@ -64600,11 +64603,23 @@ class CacheConfig {
|
|||||||
// might create/overwrite lockfiles.
|
// might create/overwrite lockfiles.
|
||||||
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
||||||
let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
||||||
|
// Constructs the workspace config and paths to restore:
|
||||||
|
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||||
|
const workspaces = [];
|
||||||
|
const workspacesInput = core.getInput("workspaces") || ".";
|
||||||
|
for (const workspace of workspacesInput.trim().split("\n")) {
|
||||||
|
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
||||||
|
root = external_path_default().resolve(root);
|
||||||
|
target = external_path_default().join(root, target);
|
||||||
|
workspaces.push(new Workspace(root, target));
|
||||||
|
}
|
||||||
|
self.workspaces = workspaces;
|
||||||
if (!lockHash) {
|
if (!lockHash) {
|
||||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||||
followSymbolicLinks: false,
|
for (const workspace of workspaces) {
|
||||||
});
|
const root = workspace.root;
|
||||||
keyFiles = await globber.glob();
|
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
|
||||||
|
}
|
||||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||||
hasher = external_crypto_default().createHash("sha1");
|
hasher = external_crypto_default().createHash("sha1");
|
||||||
for (const file of keyFiles) {
|
for (const file of keyFiles) {
|
||||||
@@ -64619,18 +64634,15 @@ class CacheConfig {
|
|||||||
self.keyFiles = keyFiles;
|
self.keyFiles = keyFiles;
|
||||||
key += `-${lockHash}`;
|
key += `-${lockHash}`;
|
||||||
self.cacheKey = key;
|
self.cacheKey = key;
|
||||||
// Constructs the workspace config and paths to restore:
|
self.cachePaths = [CARGO_HOME];
|
||||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
|
||||||
const workspaces = [];
|
if (cacheTargets === "true") {
|
||||||
const workspacesInput = core.getInput("workspaces") || ".";
|
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||||
for (const workspace of workspacesInput.trim().split("\n")) {
|
}
|
||||||
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
const cacheDirectories = core.getInput("cache-directories");
|
||||||
root = external_path_default().resolve(root);
|
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||||
target = external_path_default().join(root, target);
|
self.cachePaths.push(dir);
|
||||||
workspaces.push(new Workspace(root, target));
|
|
||||||
}
|
}
|
||||||
self.workspaces = workspaces;
|
|
||||||
self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
printInfo() {
|
printInfo() {
|
||||||
@@ -64670,6 +64682,12 @@ async function getRustVersion() {
|
|||||||
.filter((s) => s.length === 2);
|
.filter((s) => s.length === 2);
|
||||||
return Object.fromEntries(splits);
|
return Object.fromEntries(splits);
|
||||||
}
|
}
|
||||||
|
async function globFiles(pattern) {
|
||||||
|
const globber = await glob.create(pattern, {
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
});
|
||||||
|
return await globber.glob();
|
||||||
|
}
|
||||||
|
|
||||||
;// CONCATENATED MODULE: ./src/cleanup.ts
|
;// CONCATENATED MODULE: ./src/cleanup.ts
|
||||||
|
|
||||||
@@ -64832,30 +64850,34 @@ async function cleanGit(packages) {
|
|||||||
}
|
}
|
||||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||||
/**
|
/**
|
||||||
* Removes all files or directories in `dirName`, except the ones matching
|
* Removes all files or directories in `dirName` matching some criteria.
|
||||||
* any string in the `keepPrefix` set.
|
|
||||||
*
|
|
||||||
* The matching strips and trailing `-$hash` suffix.
|
|
||||||
*
|
*
|
||||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||||
* than one week.
|
* than one week.
|
||||||
|
*
|
||||||
|
* Otherwise, it will remove everything that does not match any string in the
|
||||||
|
* `keepPrefix` set.
|
||||||
|
* The matching strips and trailing `-$hash` suffix.
|
||||||
*/
|
*/
|
||||||
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
|
||||||
const dir = await external_fs_default().promises.opendir(dirName);
|
const dir = await external_fs_default().promises.opendir(dirName);
|
||||||
for await (const dirent of dir) {
|
for await (const dirent of dir) {
|
||||||
|
if (checkTimestamp) {
|
||||||
|
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||||
|
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||||
|
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||||
|
if (isOutdated) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
let name = dirent.name;
|
let name = dirent.name;
|
||||||
// strip the trailing hash
|
// strip the trailing hash
|
||||||
const idx = name.lastIndexOf("-");
|
const idx = name.lastIndexOf("-");
|
||||||
if (idx !== -1) {
|
if (idx !== -1) {
|
||||||
name = name.slice(0, idx);
|
name = name.slice(0, idx);
|
||||||
}
|
}
|
||||||
let isOutdated = false;
|
if (!keepPrefix.has(name)) {
|
||||||
if (checkTimestamp) {
|
|
||||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
|
||||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
|
||||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
|
||||||
}
|
|
||||||
if (!keepPrefix.has(name) || isOutdated) {
|
|
||||||
await rm(dir.path, dirent);
|
await rm(dir.path, dirent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
21
package-lock.json
generated
21
package-lock.json
generated
@@ -1,15 +1,14 @@
|
|||||||
{
|
{
|
||||||
"name": "rust-cache",
|
"name": "rust-cache",
|
||||||
"version": "2.0.1",
|
"version": "2.1.0",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "rust-cache",
|
"version": "2.1.0",
|
||||||
"version": "2.0.1",
|
|
||||||
"license": "LGPL-3.0",
|
"license": "LGPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.0.5",
|
"@actions/cache": "^3.0.6",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/glob": "^0.3.0",
|
"@actions/glob": "^0.3.0",
|
||||||
@@ -24,15 +23,16 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.0.5",
|
"version": "3.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^2.0.1",
|
"@actions/http-client": "^2.0.1",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
|
"@azure/abort-controller": "^1.1.0",
|
||||||
"@azure/ms-rest-js": "^2.6.0",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
"@azure/storage-blob": "^12.8.0",
|
"@azure/storage-blob": "^12.8.0",
|
||||||
"semver": "^6.1.0",
|
"semver": "^6.1.0",
|
||||||
@@ -667,15 +667,16 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.0.5",
|
"version": "3.0.6",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.6.tgz",
|
||||||
"integrity": "sha512-0WpPmwnRPkn5k5ASmjoX8bY8NrZEPTwN+64nGYJmR/bHjEVgC8svdf5K956wi67tNJBGJky2+UfvNbUOtHmMHg==",
|
"integrity": "sha512-Tttit+nqmxgb2M5Ufj5p8Lwd+fx329HOTLzxMrY4aaaZqBzqetgWlEfszMyiXfX4cJML+bzLJbyD9rNYt8TJ8g==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^2.0.1",
|
"@actions/http-client": "^2.0.1",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
|
"@azure/abort-controller": "^1.1.0",
|
||||||
"@azure/ms-rest-js": "^2.6.0",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
"@azure/storage-blob": "^12.8.0",
|
"@azure/storage-blob": "^12.8.0",
|
||||||
"semver": "^6.1.0",
|
"semver": "^6.1.0",
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"private": true,
|
"private": true,
|
||||||
"name": "rust-cache",
|
"name": "rust-cache",
|
||||||
"version": "2.0.1",
|
"version": "2.1.0",
|
||||||
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"actions",
|
"actions",
|
||||||
@@ -22,7 +22,7 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
"homepage": "https://github.com/Swatinem/rust-cache#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.0.5",
|
"@actions/cache": "^3.0.6",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/glob": "^0.3.0",
|
"@actions/glob": "^0.3.0",
|
||||||
|
|||||||
@@ -180,17 +180,29 @@ export async function cleanGit(packages: Packages) {
|
|||||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Removes all files or directories in `dirName`, except the ones matching
|
* Removes all files or directories in `dirName` matching some criteria.
|
||||||
* any string in the `keepPrefix` set.
|
|
||||||
*
|
|
||||||
* The matching strips and trailing `-$hash` suffix.
|
|
||||||
*
|
*
|
||||||
* When the `checkTimestamp` flag is set, this will also remove anything older
|
* When the `checkTimestamp` flag is set, this will also remove anything older
|
||||||
* than one week.
|
* than one week.
|
||||||
|
*
|
||||||
|
* Otherwise, it will remove everything that does not match any string in the
|
||||||
|
* `keepPrefix` set.
|
||||||
|
* The matching strips and trailing `-$hash` suffix.
|
||||||
*/
|
*/
|
||||||
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
|
async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp = false) {
|
||||||
const dir = await fs.promises.opendir(dirName);
|
const dir = await fs.promises.opendir(dirName);
|
||||||
for await (const dirent of dir) {
|
for await (const dirent of dir) {
|
||||||
|
if (checkTimestamp) {
|
||||||
|
const fileName = path.join(dir.path, dirent.name);
|
||||||
|
const { mtime } = await fs.promises.stat(fileName);
|
||||||
|
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
||||||
|
|
||||||
|
if (isOutdated) {
|
||||||
|
await rm(dir.path, dirent);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let name = dirent.name;
|
let name = dirent.name;
|
||||||
|
|
||||||
// strip the trailing hash
|
// strip the trailing hash
|
||||||
@@ -199,14 +211,7 @@ async function rmExcept(dirName: string, keepPrefix: Set<string>, checkTimestamp
|
|||||||
name = name.slice(0, idx);
|
name = name.slice(0, idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
let isOutdated = false;
|
if (!keepPrefix.has(name)) {
|
||||||
if (checkTimestamp) {
|
|
||||||
const fileName = path.join(dir.path, dirent.name);
|
|
||||||
const { mtime } = await fs.promises.stat(fileName);
|
|
||||||
isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!keepPrefix.has(name) || isOutdated) {
|
|
||||||
await rm(dir.path, dirent);
|
await rm(dir.path, dirent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ export class CacheConfig {
|
|||||||
// This uses either the `shared-key` input,
|
// This uses either the `shared-key` input,
|
||||||
// or the `key` input combined with the `job` key.
|
// or the `key` input combined with the `job` key.
|
||||||
|
|
||||||
let key = `v0-rust`;
|
let key = core.getInput("prefix-key") || "v0-rust";
|
||||||
|
|
||||||
const sharedKey = core.getInput("shared-key");
|
const sharedKey = core.getInput("shared-key");
|
||||||
if (sharedKey) {
|
if (sharedKey) {
|
||||||
@@ -118,11 +118,29 @@ export class CacheConfig {
|
|||||||
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
let lockHash = core.getState(STATE_LOCKFILE_HASH);
|
||||||
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
|
||||||
|
|
||||||
|
// Constructs the workspace config and paths to restore:
|
||||||
|
// The workspaces are given using a `$workspace -> $target` syntax.
|
||||||
|
|
||||||
|
const workspaces: Array<Workspace> = [];
|
||||||
|
const workspacesInput = core.getInput("workspaces") || ".";
|
||||||
|
for (const workspace of workspacesInput.trim().split("\n")) {
|
||||||
|
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
||||||
|
root = path.resolve(root);
|
||||||
|
target = path.join(root, target);
|
||||||
|
workspaces.push(new Workspace(root, target));
|
||||||
|
}
|
||||||
|
self.workspaces = workspaces;
|
||||||
|
|
||||||
if (!lockHash) {
|
if (!lockHash) {
|
||||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
|
||||||
followSymbolicLinks: false,
|
for (const workspace of workspaces) {
|
||||||
});
|
const root = workspace.root;
|
||||||
keyFiles = await globber.glob();
|
keyFiles.push(
|
||||||
|
...(await globFiles(
|
||||||
|
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
|
||||||
|
)),
|
||||||
|
);
|
||||||
|
}
|
||||||
keyFiles.sort((a, b) => a.localeCompare(b));
|
keyFiles.sort((a, b) => a.localeCompare(b));
|
||||||
|
|
||||||
hasher = crypto.createHash("sha1");
|
hasher = crypto.createHash("sha1");
|
||||||
@@ -138,23 +156,20 @@ export class CacheConfig {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.keyFiles = keyFiles;
|
self.keyFiles = keyFiles;
|
||||||
|
|
||||||
key += `-${lockHash}`;
|
key += `-${lockHash}`;
|
||||||
self.cacheKey = key;
|
self.cacheKey = key;
|
||||||
|
|
||||||
// Constructs the workspace config and paths to restore:
|
self.cachePaths = [CARGO_HOME];
|
||||||
// The workspaces are given using a `$workspace -> $target` syntax.
|
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
|
||||||
|
if (cacheTargets === "true") {
|
||||||
const workspaces: Array<Workspace> = [];
|
self.cachePaths.push(...workspaces.map((ws) => ws.target));
|
||||||
const workspacesInput = core.getInput("workspaces") || ".";
|
|
||||||
for (const workspace of workspacesInput.trim().split("\n")) {
|
|
||||||
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
|
|
||||||
root = path.resolve(root);
|
|
||||||
target = path.join(root, target);
|
|
||||||
workspaces.push(new Workspace(root, target));
|
|
||||||
}
|
}
|
||||||
self.workspaces = workspaces;
|
|
||||||
|
|
||||||
self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)];
|
const cacheDirectories = core.getInput("cache-directories");
|
||||||
|
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
|
||||||
|
self.cachePaths.push(dir);
|
||||||
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
@@ -203,3 +218,10 @@ async function getRustVersion(): Promise<RustVersion> {
|
|||||||
.filter((s) => s.length === 2);
|
.filter((s) => s.length === 2);
|
||||||
return Object.fromEntries(splits);
|
return Object.fromEntries(splits);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function globFiles(pattern: string): Promise<string[]> {
|
||||||
|
const globber = await glob.create(pattern, {
|
||||||
|
followSymbolicLinks: false,
|
||||||
|
});
|
||||||
|
return await globber.glob();
|
||||||
|
}
|
||||||
|
|||||||
@@ -43,9 +43,7 @@ async function run() {
|
|||||||
// pre-clean the target directory on cache mismatch
|
// pre-clean the target directory on cache mismatch
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
try {
|
try {
|
||||||
const packages = await workspace.getPackages();
|
await cleanTargetDir(workspace.target, [], true);
|
||||||
|
|
||||||
await cleanTargetDir(workspace.target, packages, true);
|
|
||||||
} catch {}
|
} catch {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
import { getCmdOutput } from "./utils";
|
import { getCmdOutput } from "./utils";
|
||||||
@@ -10,11 +11,13 @@ export class Workspace {
|
|||||||
public async getPackages(): Promise<Packages> {
|
public async getPackages(): Promise<Packages> {
|
||||||
let packages: Packages = [];
|
let packages: Packages = [];
|
||||||
try {
|
try {
|
||||||
|
core.debug(`collecting metadata for "${this.root}"`);
|
||||||
const meta: Meta = JSON.parse(
|
const meta: Meta = JSON.parse(
|
||||||
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], {
|
||||||
cwd: this.root,
|
cwd: this.root,
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
|
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
|
||||||
for (const pkg of meta.packages) {
|
for (const pkg of meta.packages) {
|
||||||
if (pkg.manifest_path.startsWith(this.root)) {
|
if (pkg.manifest_path.startsWith(this.root)) {
|
||||||
continue;
|
continue;
|
||||||
|
|||||||
Reference in New Issue
Block a user