3 Commits

Author SHA1 Message Date
Arpad Borsos
c19371144d 2.9.1 2026-03-12 18:15:22 +01:00
Arpad Borsos
781e8d91ab try reverting pipeline change 2026-03-12 18:11:51 +01:00
Arpad Borsos
3d1fa4654a add changelog 2026-03-12 18:09:29 +01:00
6 changed files with 237 additions and 56 deletions

View File

@@ -1,5 +1,12 @@
# Changelog # Changelog
## 2.9.0
- Update to `node24`
- Support running from within a `nix` shell
- Consider all installed toolchains for cache key
- Use case-insensitive comparison to determine exact cache hit
## 2.8.2 ## 2.8.2
- Don't overwrite env for cargo-metadata call - Don't overwrite env for cargo-metadata call

View File

@@ -204269,8 +204269,6 @@ function glob_hashFiles(patterns_1) {
// EXTERNAL MODULE: external "fs/promises" // EXTERNAL MODULE: external "fs/promises"
var promises_ = __nccwpck_require__(91943); var promises_ = __nccwpck_require__(91943);
var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_); var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_);
;// CONCATENATED MODULE: external "stream/promises"
const external_stream_promises_namespaceObject = require("stream/promises");
;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js
/*! /*!
* Copyright (c) Squirrel Chat et al., All rights reserved. * Copyright (c) Squirrel Chat et al., All rights reserved.
@@ -249750,7 +249748,6 @@ class Workspace {
const HOME = external_os_default().homedir(); const HOME = external_os_default().homedir();
const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG"; const STATE_CONFIG = "RUST_CACHE_CONFIG";
@@ -249812,7 +249809,7 @@ class CacheConfig {
key += `-${inputKey}`; key += `-${inputKey}`;
} }
const job = process.env.GITHUB_JOB; const job = process.env.GITHUB_JOB;
if ((job) && getInput("add-job-id-key").toLowerCase() == "true") { if (job && getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`; key += `-${job}`;
} }
} }
@@ -249951,7 +249948,9 @@ class CacheConfig {
} }
keyFiles = sort_and_uniq(keyFiles); keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) { for (const file of keyFiles) {
await (0,external_stream_promises_namespaceObject.pipeline)((0,external_fs_.createReadStream)(file), hasher); for await (const chunk of (0,external_fs_.createReadStream)(file)) {
hasher.update(chunk);
}
} }
keyFiles.push(...parsedKeyFiles); keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles); self.keyFiles = sort_and_uniq(keyFiles);

9
dist/save/index.js vendored
View File

@@ -204269,8 +204269,6 @@ function glob_hashFiles(patterns_1) {
// EXTERNAL MODULE: external "fs/promises" // EXTERNAL MODULE: external "fs/promises"
var promises_ = __nccwpck_require__(91943); var promises_ = __nccwpck_require__(91943);
var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_); var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_);
;// CONCATENATED MODULE: external "stream/promises"
const external_stream_promises_namespaceObject = require("stream/promises");
;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js
/*! /*!
* Copyright (c) Squirrel Chat et al., All rights reserved. * Copyright (c) Squirrel Chat et al., All rights reserved.
@@ -249750,7 +249748,6 @@ class Workspace {
const HOME = external_os_default().homedir(); const HOME = external_os_default().homedir();
const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG"; const STATE_CONFIG = "RUST_CACHE_CONFIG";
@@ -249812,7 +249809,7 @@ class CacheConfig {
key += `-${inputKey}`; key += `-${inputKey}`;
} }
const job = process.env.GITHUB_JOB; const job = process.env.GITHUB_JOB;
if ((job) && getInput("add-job-id-key").toLowerCase() == "true") { if (job && getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`; key += `-${job}`;
} }
} }
@@ -249951,7 +249948,9 @@ class CacheConfig {
} }
keyFiles = sort_and_uniq(keyFiles); keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) { for (const file of keyFiles) {
await (0,external_stream_promises_namespaceObject.pipeline)((0,external_fs_.createReadStream)(file), hasher); for await (const chunk of (0,external_fs_.createReadStream)(file)) {
hasher.update(chunk);
}
} }
keyFiles.push(...parsedKeyFiles); keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles); self.keyFiles = sort_and_uniq(keyFiles);

259
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{ {
"private": true, "private": true,
"name": "rust-cache", "name": "rust-cache",
"version": "2.9.0", "version": "2.9.1",
"description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.", "description": "A GitHub Action that implements smart caching for rust/cargo projects with sensible defaults.",
"keywords": [ "keywords": [
"actions", "actions",

View File

@@ -3,7 +3,6 @@ import * as glob from "@actions/glob";
import crypto from "crypto"; import crypto from "crypto";
import fs from "fs/promises"; import fs from "fs/promises";
import { createReadStream } from "fs"; import { createReadStream } from "fs";
import { pipeline } from "stream/promises";
import os from "os"; import os from "os";
import path from "path"; import path from "path";
import * as toml from "smol-toml"; import * as toml from "smol-toml";
@@ -84,7 +83,7 @@ export class CacheConfig {
} }
const job = process.env.GITHUB_JOB; const job = process.env.GITHUB_JOB;
if ((job) && core.getInput("add-job-id-key").toLowerCase() == "true") { if (job && core.getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`; key += `-${job}`;
} }
} }
@@ -256,7 +255,9 @@ export class CacheConfig {
keyFiles = sort_and_uniq(keyFiles); keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) { for (const file of keyFiles) {
await pipeline(createReadStream(file), hasher); for await (const chunk of createReadStream(file)) {
hasher.update(chunk);
}
} }
keyFiles.push(...parsedKeyFiles); keyFiles.push(...parsedKeyFiles);