From 63918b250420a39dc70626ec9e86a899a3df31a4 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Fri, 17 Apr 2026 21:56:45 +0530 Subject: [PATCH 01/15] feat: background auto-update with SHA256 verification - Add src/lib/auto-update.ts + src/lib/release.ts: 24h-throttled detached background check, POSIX rename-in-place swap on next launch, SHA256 hash verification before staging (tri-state distinguishes legacy releases from fetch failures), version sidecar for the 'auto-updated to vX.Y.Z' stderr note - Opt out via AUTO_UPDATE=false in ~/.worktreerc or WORKTREE_NO_UPDATE=1 - Argv guard prevents detached child from re-running hooks - release.yml: --minify, cross-arch strip (llvm-strip on Linux), ad-hoc codesign on macOS (fixes SIGKILL on Apple Silicon from unsigned binaries), native-target smoke tests, SHA256SUMS publishing - Bump to 1.3.0 --- .github/workflows/ci.yml | 2 +- .github/workflows/release.yml | 71 +++++- .gitignore | 1 + CHANGELOG.md | 17 ++ README.md | 22 +- package.json | 2 +- src/commands/internal-update-check.ts | 14 ++ src/commands/update.ts | 189 ++++++++-------- src/index.ts | 31 ++- src/lib/auto-update.ts | 297 ++++++++++++++++++++++++++ src/lib/config.test.ts | 22 ++ src/lib/config.ts | 58 +++-- src/lib/release.test.ts | 82 +++++++ src/lib/release.ts | 219 +++++++++++++++++++ 14 files changed, 897 insertions(+), 130 deletions(-) create mode 100644 src/commands/internal-update-check.ts create mode 100644 src/lib/auto-update.ts create mode 100644 src/lib/release.test.ts create mode 100644 src/lib/release.ts diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1cec1f2..08b99a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: name: Lint, Format & Type Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: oven-sh/setup-bun@v2 - run: bun install --frozen-lockfile - run: bun run lint diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3161c82..2d159b6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -28,28 +28,87 @@ jobs: bun_target: bun-linux-arm64 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: oven-sh/setup-bun@v2 - run: bun install --frozen-lockfile - - run: bun build src/index.ts --compile --target=${{ matrix.bun_target }} --outfile dist/worktree-${{ matrix.target }} - - uses: actions/upload-artifact@v4 + + - name: Compile binary + run: | + bun build src/index.ts \ + --compile \ + --minify \ + --sourcemap=none \ + --target=${{ matrix.bun_target }} \ + --outfile dist/worktree-${{ matrix.target }} + + - name: Install llvm-strip (Linux) + if: startsWith(matrix.target, 'linux-') + run: sudo apt-get update && sudo apt-get install -y llvm + + - name: Strip symbols (Linux) + if: startsWith(matrix.target, 'linux-') + run: llvm-strip --strip-unneeded dist/worktree-${{ matrix.target }} + + - name: Strip symbols (macOS) + if: startsWith(matrix.target, 'darwin-') + run: strip dist/worktree-${{ matrix.target }} + + - name: Ad-hoc codesign (macOS) + if: startsWith(matrix.target, 'darwin-') + run: | + xattr -cr dist/worktree-${{ matrix.target }} + codesign --force --sign - dist/worktree-${{ matrix.target }} + codesign -dv dist/worktree-${{ matrix.target }} + + - name: Verify binary architecture and size + run: | + file dist/worktree-${{ matrix.target }} + ls -lh dist/worktree-${{ matrix.target }} + + - name: Smoke-test --version (native targets only) + if: matrix.target != 'linux-arm64' + run: | + chmod +x dist/worktree-${{ matrix.target }} + ./dist/worktree-${{ matrix.target }} --version + + - name: Compute SHA256 + run: | + cd dist + shasum -a 256 worktree-${{ matrix.target }} > worktree-${{ matrix.target }}.sha256 + + - uses: actions/upload-artifact@v7 with: name: worktree-${{ matrix.target }} - path: dist/worktree-${{ matrix.target }} + path: | + dist/worktree-${{ matrix.target }} + dist/worktree-${{ matrix.target }}.sha256 release: name: Create Release needs: build runs-on: ubuntu-latest steps: - - uses: actions/download-artifact@v4 + - uses: actions/download-artifact@v8 with: path: artifacts merge-multiple: true + - name: Aggregate SHA256SUMS + run: | + cd artifacts + : > SHA256SUMS + for f in worktree-*.sha256; do + cat "$f" >> SHA256SUMS + tail -c1 "$f" | od -An -c | grep -q '\\n' || echo >> SHA256SUMS + done + rm worktree-*.sha256 + cat SHA256SUMS + - run: chmod +x artifacts/worktree-* - uses: softprops/action-gh-release@v2 with: generate_release_notes: true - files: artifacts/worktree-* + files: | + artifacts/worktree-* + artifacts/SHA256SUMS diff --git a/.gitignore b/.gitignore index fda1c98..ffafb61 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ node_modules/ dist/ .worktrees docs/ +tasks/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 79d2758..8f86a61 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,23 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [1.3.0] - 2026-04-17 + +### Added + +- **Background auto-update**: on launch, `worktree` checks GitHub for a newer release at most once every 24 hours in a detached background process. When a newer version is found, the binary is downloaded, verified against SHA256, and staged. The next invocation atomically swaps in the new binary and prints a one-line `worktree auto-updated to vX.Y.Z` note on stderr. Opt out via `AUTO_UPDATE=false` in `~/.worktreerc` or `WORKTREE_NO_UPDATE=1` in the environment. +- **Release integrity**: every GitHub Release now publishes a `SHA256SUMS` file. The `worktree update` command and the background auto-updater both verify the downloaded binary against this hash before installing. Releases without `SHA256SUMS` (legacy) still work but without verification. + +### Changed + +- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) to address GitHub's deprecation of Node.js 20 actions +- Release binaries now built with `--minify --sourcemap=none` and stripped of debug symbols (`llvm-strip` on Linux, `strip` on macOS). Binary sizes are slightly smaller; functional behavior unchanged. +- Release workflow smoke-tests each built binary via `--version` before publishing to catch regressions. + +### Fixed + +- macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** in the release workflow after stripping. Prior releases shipped unsigned binaries, which Apple Silicon (arm64) macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. + ## [1.2.0] - 2026-04-17 ### Added diff --git a/README.md b/README.md index bb6eddd..f22ecdf 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,27 @@ Then use `gw create feature-auth`, `gw list`, etc. ## Update -Re-run the install command to get the latest version. +### Automatic + +Once installed, `worktree` checks GitHub for a newer release at most once every 24 hours, in the background. When a newer version is found, it is downloaded, verified against a SHA256 hash, and staged. The **next** time you invoke `worktree`, the binary is swapped atomically and the command runs against the new version — you'll see a one-line note on stderr. + +To disable, create `~/.worktreerc` with: + +``` +AUTO_UPDATE=false +``` + +Or set `WORKTREE_NO_UPDATE=1` in your environment (useful in CI). + +Auto-update is a no-op when running via `bun run dev` or in any non-standalone invocation. + +### Manual + +```bash +worktree update +``` + +Forces an immediate check + download + replace, bypassing the 24-hour throttle. Requires write permission to the binary location (use `sudo` if installed under `/usr/local/bin`). ## Platforms diff --git a/package.json b/package.json index 14f86c7..0f7e392 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "worktree-cli", - "version": "1.2.0", + "version": "1.3.0", "description": "Git worktree manager with automatic env file copying, dependency installation, and editor integration.", "type": "module", "module": "src/index.ts", diff --git a/src/commands/internal-update-check.ts b/src/commands/internal-update-check.ts new file mode 100644 index 0000000..1576c7e --- /dev/null +++ b/src/commands/internal-update-check.ts @@ -0,0 +1,14 @@ +import { command } from "@drizzle-team/brocli"; +import { + INTERNAL_CHECK_SUBCOMMAND, + runBackgroundUpdateCheck, +} from "../lib/auto-update"; + +export const internalUpdateCheckCommand = command({ + name: INTERNAL_CHECK_SUBCOMMAND, + desc: "", + hidden: true, + handler: async () => { + await runBackgroundUpdateCheck(); + }, +}); diff --git a/src/commands/update.ts b/src/commands/update.ts index 21c353f..0a30c77 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -4,39 +4,32 @@ import pkg from "../../package.json"; import { tryCatch } from "../lib/try-catch"; import { printSuccess, printError, printInfo, COLORS } from "../lib/logger"; import { EXIT_CODES } from "../lib/constants"; - -const REPO = "bhagyamudgal/worktree-cli"; - -function getAssetName(): string { - const platform = process.platform; - const arch = process.arch; - - if (platform !== "darwin" && platform !== "linux") { - printError(`Unsupported platform: ${platform}`); - process.exit(EXIT_CODES.ERROR); - } - if (arch !== "arm64" && arch !== "x64") { - printError(`Unsupported architecture: ${arch}`); - process.exit(EXIT_CODES.ERROR); - } - - return `worktree-${platform}-${arch}`; +import { + compareVersions, + downloadAsset, + fetchLatestRelease, + fetchSha256Sums, + getAssetName, + verifyBinaryHash, +} from "../lib/release"; + +function isStandaloneBinary(): boolean { + return ( + Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") + ); } -type ReleaseAsset = { - name: string; - browser_download_url: string; -}; +function isEaccesError(error: unknown): boolean { + if (!(error instanceof Error)) return false; + if (!("code" in error)) return false; + return (error as NodeJS.ErrnoException).code === "EACCES"; +} export const updateCommand = command({ name: "update", desc: "Update worktree CLI to the latest version", handler: async () => { - const isStandalone = - Bun.main.startsWith("/$bunfs/") || - import.meta.url.includes("$bunfs/"); - - if (!isStandalone) { + if (!isStandaloneBinary()) { printError( "Update is only available for standalone compiled binaries." ); @@ -44,128 +37,122 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } - const currentVersion = pkg.version; - const binaryPath = process.execPath; - - printInfo(`Current version: v${currentVersion}`); - - const { data: response, error: fetchError } = await tryCatch( - fetch(`https://api.github.com/repos/${REPO}/releases/latest`) - ); - if (fetchError || !response) { + const assetName = getAssetName(); + if (!assetName) { printError( - "Failed to check for updates. Check your internet connection." + `Unsupported platform/arch: ${process.platform}/${process.arch}` ); process.exit(EXIT_CODES.ERROR); } - if (!response.ok) { + const currentVersion = pkg.version; + const binaryPath = process.execPath; + + printInfo(`Current version: v${currentVersion}`); + + const { data: release, error: releaseError } = + await tryCatch(fetchLatestRelease()); + if (releaseError || !release) { printError( - `GitHub API error: ${response.status} ${response.statusText}` + releaseError?.message ?? + "Failed to check for updates. Check your internet connection." ); process.exit(EXIT_CODES.ERROR); } - const { data: release, error: jsonError } = await tryCatch( - response.json() - ); - if ( - jsonError || - !release || - typeof release !== "object" || - typeof release.tag_name !== "string" || - !Array.isArray(release.assets) - ) { - printError("Failed to parse release data."); - process.exit(EXIT_CODES.ERROR); - } - - const latestVersion = release.tag_name.replace(/^v/, ""); - printInfo(`Latest version: v${latestVersion}`); + printInfo(`Latest version: v${release.version}`); console.error(""); - if (currentVersion === latestVersion) { + const cmp = compareVersions(currentVersion, release.version); + if (cmp === 0) { printSuccess("Already up to date!"); return; } - - const [curMajor, curMinor, curPatch] = currentVersion - .split(".") - .map(Number); - const [latMajor, latMinor, latPatch] = latestVersion - .split(".") - .map(Number); - - const isNewer = - curMajor > latMajor || - (curMajor === latMajor && curMinor > latMinor) || - (curMajor === latMajor && - curMinor === latMinor && - curPatch > latPatch); - - if (isNewer) { + if (cmp > 0) { printSuccess( "Current version is newer than the latest release. No update needed." ); return; } - const assetName = getAssetName(); - const asset = (release.assets as ReleaseAsset[]).find( - (entry) => entry.name === assetName - ); + const asset = release.assets.find(function (entry) { + return entry.name === assetName; + }); if (!asset) { - printError( - `Release ${release.tag_name} is missing asset ${assetName}.` - ); + printError(`Release ${release.tag} is missing asset ${assetName}.`); process.exit(EXIT_CODES.ERROR); } printInfo(`Downloading ${assetName}...`); - const { data: downloadResponse, error: dlError } = await tryCatch( - fetch(asset.browser_download_url) + const tmpPath = `${binaryPath}.update-tmp`; + const { error: dlError } = await tryCatch( + downloadAsset(asset, tmpPath) ); - if (dlError || !downloadResponse || !downloadResponse.ok) { - printError(`Failed to download ${assetName}.`); + if (dlError) { + await fs.unlink(tmpPath).catch(function () {}); + printError(dlError.message); process.exit(EXIT_CODES.ERROR); } - const { data: buffer, error: bufError } = await tryCatch( - downloadResponse.arrayBuffer() - ); - if (bufError || !buffer) { - printError("Failed to read download."); + const { error: chmodError } = await tryCatch(fs.chmod(tmpPath, 0o755)); + if (chmodError) { + await fs.unlink(tmpPath).catch(function () {}); + printError( + `Failed to mark binary executable: ${chmodError.message}` + ); process.exit(EXIT_CODES.ERROR); } - const tmpPath = `${binaryPath}.update-tmp`; - const { error: writeError } = await tryCatch( - fs.writeFile(tmpPath, Buffer.from(buffer), { mode: 0o755 }) - ); - if (writeError) { - await fs.unlink(tmpPath).catch(() => {}); - if ("code" in writeError && writeError.code === "EACCES") { - printError("Permission denied. Try: sudo worktree update"); - } else { - printError(`Failed to write update: ${writeError.message}`); - } + const sums = await fetchSha256Sums(release.assets); + if (sums.kind === "error") { + await fs.unlink(tmpPath).catch(function () {}); + printError( + `SHA256SUMS is published but could not be fetched: ${sums.reason}. Refusing to install.` + ); process.exit(EXIT_CODES.ERROR); } + if (sums.kind === "ok") { + const expected = sums.sums[assetName]; + if (!expected) { + await fs.unlink(tmpPath).catch(function () {}); + printError( + `SHA256SUMS is missing an entry for ${assetName}; refusing to install.` + ); + process.exit(EXIT_CODES.ERROR); + } + const ok = await verifyBinaryHash(tmpPath, expected); + if (!ok) { + await fs.unlink(tmpPath).catch(function () {}); + printError( + `Hash mismatch for ${assetName}; refusing to install.` + ); + process.exit(EXIT_CODES.ERROR); + } + printInfo("Verified SHA256 checksum."); + } else { + printInfo( + "No SHA256SUMS published for this release; proceeding without hash verification." + ); + } const { error: renameError } = await tryCatch( fs.rename(tmpPath, binaryPath) ); if (renameError) { - await fs.unlink(tmpPath).catch(() => {}); - printError(`Failed to replace binary: ${renameError.message}`); + await fs.unlink(tmpPath).catch(function () {}); + if (isEaccesError(renameError)) { + printError("Permission denied. Try: sudo worktree update"); + } else { + printError(`Failed to replace binary: ${renameError.message}`); + } process.exit(EXIT_CODES.ERROR); } const { BOLD, GREEN, DIM, RESET } = COLORS; console.error(""); console.error( - `${GREEN}${BOLD}Updated!${RESET} v${currentVersion} → v${latestVersion}` + `${GREEN}${BOLD}Updated!${RESET} v${currentVersion} → v${release.version}` ); console.error(` ${DIM}Binary: ${binaryPath}${RESET}`); }, diff --git a/src/index.ts b/src/index.ts index 12c1ddc..3576809 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,13 +1,34 @@ import { run } from "@drizzle-team/brocli"; import { createCommand } from "./commands/create"; +import { internalUpdateCheckCommand } from "./commands/internal-update-check"; import { listCommand } from "./commands/list"; import { openCommand } from "./commands/open"; import { removeCommand } from "./commands/remove"; import { updateCommand } from "./commands/update"; +import { + applyPendingUpdate, + INTERNAL_CHECK_SUBCOMMAND, + scheduleBackgroundUpdateCheck, +} from "./lib/auto-update"; import pkg from "../package.json"; -run([createCommand, listCommand, openCommand, removeCommand, updateCommand], { - name: "worktree", - description: pkg.description ?? "Git worktree manager", - version: pkg.version, -}); +if (!process.argv.slice(2).includes(INTERNAL_CHECK_SUBCOMMAND)) { + applyPendingUpdate(); + void scheduleBackgroundUpdateCheck(); +} + +run( + [ + createCommand, + listCommand, + openCommand, + removeCommand, + updateCommand, + internalUpdateCheckCommand, + ], + { + name: "worktree", + description: pkg.description ?? "Git worktree manager", + version: pkg.version, + } +); diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts new file mode 100644 index 0000000..2a74383 --- /dev/null +++ b/src/lib/auto-update.ts @@ -0,0 +1,297 @@ +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { loadGlobalConfig } from "./config"; +import { + compareVersions, + downloadAsset, + fetchLatestRelease, + fetchSha256Sums, + getAssetName, + verifyBinaryHash, +} from "./release"; +import { tryCatch, tryCatchSync } from "./try-catch"; +import { COLORS } from "./constants"; +import pkg from "../../package.json"; + +const STAGING_FILENAME = ".worktree.next"; +const VERSION_SIDECAR_FILENAME = ".worktree.next.version"; +const TWENTY_FOUR_HOURS_MS = 24 * 60 * 60 * 1000; +const PROBE_TIMEOUT_MS = 2_000; +const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; + +function isStandalone(): boolean { + return ( + Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") + ); +} + +function getBinaryDir(): string { + return path.dirname(process.execPath); +} + +function getStagingPath(): string { + return path.join(getBinaryDir(), STAGING_FILENAME); +} + +function getVersionSidecarPath(): string { + return path.join(getBinaryDir(), VERSION_SIDECAR_FILENAME); +} + +function getCacheDir(): string { + return path.join(os.homedir(), ".cache", "worktree-cli"); +} + +function getLastCheckPath(): string { + return path.join(getCacheDir(), "last-check"); +} + +function getLastErrorPath(): string { + return path.join(getCacheDir(), "last-error"); +} + +function ensureCacheDir(): void { + fs.mkdirSync(getCacheDir(), { recursive: true }); +} + +function appendLastError(kind: "apply" | "check", message: string): void { + try { + ensureCacheDir(); + const line = `${new Date().toISOString()} ${kind}: ${message}\n`; + fs.appendFileSync(getLastErrorPath(), line); + } catch { + // never let the error log itself block anything + } +} + +function applyPendingUpdate(): void { + // Sync on purpose: runs before brocli.run; avoids top-level await. + try { + if (!isStandalone()) return; + const stagedPath = getStagingPath(); + if (!fs.existsSync(stagedPath)) return; + + let newVersion = ""; + try { + newVersion = fs + .readFileSync(getVersionSidecarPath(), "utf8") + .trim(); + } catch { + // missing sidecar is not fatal + } + + fs.renameSync(stagedPath, process.execPath); + try { + fs.unlinkSync(getVersionSidecarPath()); + } catch { + // best-effort cleanup + } + + const { GREEN, BOLD, RESET } = COLORS; + const label = newVersion ? ` to ${BOLD}v${newVersion}${RESET}` : ""; + console.error(`worktree ${GREEN}${BOLD}auto-updated${RESET}${label}`); + } catch (error) { + appendLastError( + "apply", + error instanceof Error ? error.message : String(error) + ); + } +} + +async function readLastCheckMs(): Promise { + const { data: text, error } = await tryCatch( + Bun.file(getLastCheckPath()).text() + ); + if (error || !text) return null; + const parsed = Number(text.trim()); + if (!Number.isFinite(parsed)) return null; + return parsed; +} + +async function isAutoUpdateDisabled(): Promise { + if (process.env.WORKTREE_NO_UPDATE === "1") return true; + const { data: config, error } = await tryCatch(loadGlobalConfig()); + if (error || !config) return false; + return config.AUTO_UPDATE === false; +} + +async function scheduleBackgroundUpdateCheck(): Promise { + try { + if (!isStandalone()) return; + if (await isAutoUpdateDisabled()) return; + + const lastCheck = await readLastCheckMs(); + const now = Date.now(); + const shouldSkip = + lastCheck !== null && + now - lastCheck >= 0 && + now - lastCheck < TWENTY_FOUR_HOURS_MS; + if (shouldSkip) return; + + // Best-effort throttle: two simultaneous launches may both check. + // Accepted trade-off — worst case is 2 API calls within the anon 60/hr limit. + ensureCacheDir(); + fs.writeFileSync(getLastCheckPath(), String(now)); + + Bun.spawn({ + cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], + stdio: ["ignore", "ignore", "ignore"], + stderr: "ignore", + stdout: "ignore", + stdin: "ignore", + }).unref(); + } catch { + // never block the user's command + } +} + +async function runBackgroundUpdateCheck(): Promise { + const assetName = getAssetName(); + if (!assetName) { + appendLastError("check", `unsupported platform/arch`); + return; + } + + const { data: release, error: releaseError } = + await tryCatch(fetchLatestRelease()); + if (releaseError || !release) { + appendLastError( + "check", + `fetchLatestRelease: ${releaseError?.message ?? "unknown"}` + ); + return; + } + + if (compareVersions(pkg.version, release.version) >= 0) return; + + const asset = release.assets.find(function (entry) { + return entry.name === assetName; + }); + if (!asset) { + appendLastError( + "check", + `release ${release.tag} missing asset ${assetName}` + ); + return; + } + + const binaryDir = getBinaryDir(); + const tmpPath = path.join( + binaryDir, + `${STAGING_FILENAME}.${process.pid}.tmp` + ); + + const { error: dlError } = await tryCatch(downloadAsset(asset, tmpPath)); + if (dlError) { + safeUnlink(tmpPath); + appendLastError("check", `download: ${dlError.message}`); + return; + } + + // Verify integrity BEFORE making the binary executable or running it. + // Running an unverified binary (even just `--version`) is code execution. + const sums = await fetchSha256Sums(release.assets); + if (sums.kind === "error") { + safeUnlink(tmpPath); + appendLastError( + "check", + `SHA256SUMS fetch failed — refusing to stage: ${sums.reason}` + ); + return; + } + if (sums.kind === "ok") { + const expected = sums.sums[assetName]; + if (!expected) { + safeUnlink(tmpPath); + appendLastError( + "check", + `SHA256SUMS missing entry for ${assetName}` + ); + return; + } + const ok = await verifyBinaryHash(tmpPath, expected); + if (!ok) { + safeUnlink(tmpPath); + appendLastError("check", `hash mismatch for ${assetName}`); + return; + } + } + // sums.kind === "not-published" → legacy release without SHA256SUMS; trust TLS. + + const { error: chmodError } = tryCatchSync(function () { + fs.chmodSync(tmpPath, 0o755); + }); + if (chmodError) { + safeUnlink(tmpPath); + appendLastError("check", `chmod: ${chmodError.message}`); + return; + } + + const probe = probeBinaryVersion(tmpPath); + if (!probe.ok) { + safeUnlink(tmpPath); + appendLastError("check", `probe: ${probe.reason}`); + return; + } + + const { error: renameError } = tryCatchSync(function () { + fs.renameSync(tmpPath, getStagingPath()); + }); + if (renameError) { + safeUnlink(tmpPath); + appendLastError("check", `stage: ${renameError.message}`); + return; + } + + tryCatchSync(function () { + fs.writeFileSync(getVersionSidecarPath(), probe.version); + }); +} + +type ProbeResult = + | { ok: true; version: string } + | { ok: false; reason: string }; + +function probeBinaryVersion(filePath: string): ProbeResult { + let result; + try { + result = Bun.spawnSync({ + cmd: [filePath, "--version"], + stdout: "pipe", + stderr: "pipe", + timeout: PROBE_TIMEOUT_MS, + }); + } catch (error) { + return { + ok: false, + reason: error instanceof Error ? error.message : String(error), + }; + } + + if (result.exitCode !== 0) { + return { ok: false, reason: `exit ${result.exitCode}` }; + } + + const stdout = result.stdout.toString("utf8"); + const stderr = result.stderr.toString("utf8"); + const match = /v?(\d+\.\d+\.\d+(?:[-+][\w.]+)?)/.exec( + stdout + "\n" + stderr + ); + if (!match) return { ok: false, reason: "no version in --version output" }; + return { ok: true, version: match[1] }; +} + +function safeUnlink(filePath: string): void { + try { + fs.unlinkSync(filePath); + } catch { + // best-effort + } +} + +export { + applyPendingUpdate, + scheduleBackgroundUpdateCheck, + runBackgroundUpdateCheck, + INTERNAL_CHECK_SUBCOMMAND, +}; diff --git a/src/lib/config.test.ts b/src/lib/config.test.ts index ebf361d..d9f9bfd 100644 --- a/src/lib/config.test.ts +++ b/src/lib/config.test.ts @@ -92,4 +92,26 @@ describe("validateConfig", () => { const config = validateConfig({ DEFAULT_BASE: "origin/dev" }); expect(config.WORKTREE_DIR).toBe(DEFAULT_WORKTREE_DIR); }); + + it("AUTO_UPDATE defaults to true", () => { + const config = validateConfig({}); + expect(config.AUTO_UPDATE).toBe(true); + }); + + it("accepts AUTO_UPDATE=false", () => { + const config = validateConfig({ AUTO_UPDATE: "false" }); + expect(config.AUTO_UPDATE).toBe(false); + }); + + it("accepts AUTO_UPDATE=0, yes, 1 variants", () => { + expect(validateConfig({ AUTO_UPDATE: "0" }).AUTO_UPDATE).toBe(false); + expect(validateConfig({ AUTO_UPDATE: "no" }).AUTO_UPDATE).toBe(false); + expect(validateConfig({ AUTO_UPDATE: "true" }).AUTO_UPDATE).toBe(true); + expect(validateConfig({ AUTO_UPDATE: "1" }).AUTO_UPDATE).toBe(true); + expect(validateConfig({ AUTO_UPDATE: "yes" }).AUTO_UPDATE).toBe(true); + }); + + it("rejects unparseable AUTO_UPDATE", () => { + expect(() => validateConfig({ AUTO_UPDATE: "junk" })).toThrow(); + }); }); diff --git a/src/lib/config.ts b/src/lib/config.ts index f70bafc..335d8c0 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,11 +1,35 @@ import { z } from "zod"; +import os from "node:os"; +import path from "node:path"; import { DEFAULT_WORKTREE_DIR } from "./constants"; import { tryCatch } from "./try-catch"; -import path from "node:path"; + +const booleanLike = z + .union([z.boolean(), z.string()]) + .transform(function (value) { + if (typeof value === "boolean") return value; + const normalized = value.trim().toLowerCase(); + if ( + normalized === "true" || + normalized === "1" || + normalized === "yes" + ) { + return true; + } + if ( + normalized === "false" || + normalized === "0" || + normalized === "no" + ) { + return false; + } + throw new Error(`Expected boolean-like value, got "${value}"`); + }); const configSchema = z.object({ DEFAULT_BASE: z.string().optional(), WORKTREE_DIR: z.string().default(DEFAULT_WORKTREE_DIR), + AUTO_UPDATE: booleanLike.default(true), }); type Config = z.infer; @@ -37,24 +61,28 @@ function validateConfig(raw: Record): Config { return configSchema.parse(raw); } -async function loadConfig(root: string): Promise { - const configPath = path.join(root, ".worktreerc"); - const file = Bun.file(configPath); +async function readConfigFile(filePath: string): Promise { + const file = Bun.file(filePath); const isExists = await file.exists(); - - if (!isExists) { - return validateConfig({}); - } - + if (!isExists) return validateConfig({}); const { data: content, error } = await tryCatch(file.text()); + if (error) return validateConfig({}); + const { data: parsed, error: parseError } = await tryCatch( + Promise.resolve().then(function () { + return validateConfig(parseConfigContent(content)); + }) + ); + if (parseError || !parsed) return validateConfig({}); + return parsed; +} - if (error) { - return validateConfig({}); - } +async function loadConfig(root: string): Promise { + return readConfigFile(path.join(root, ".worktreerc")); +} - const raw = parseConfigContent(content); - return validateConfig(raw); +async function loadGlobalConfig(): Promise { + return readConfigFile(path.join(os.homedir(), ".worktreerc")); } -export { loadConfig, parseConfigContent, validateConfig }; +export { loadConfig, loadGlobalConfig, parseConfigContent, validateConfig }; export type { Config }; diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts new file mode 100644 index 0000000..7c30a6c --- /dev/null +++ b/src/lib/release.test.ts @@ -0,0 +1,82 @@ +import { describe, expect, it } from "bun:test"; +import { compareVersions, parseSha256Sums } from "./release"; + +describe("compareVersions", () => { + it("returns 0 for equal versions", () => { + expect(compareVersions("1.2.3", "1.2.3")).toBe(0); + expect(compareVersions("v1.2.3", "1.2.3")).toBe(0); + }); + + it("returns negative when a < b", () => { + expect(compareVersions("1.2.3", "1.2.4")).toBeLessThan(0); + expect(compareVersions("1.2.3", "1.3.0")).toBeLessThan(0); + expect(compareVersions("1.2.3", "2.0.0")).toBeLessThan(0); + }); + + it("returns positive when a > b", () => { + expect(compareVersions("1.2.4", "1.2.3")).toBeGreaterThan(0); + expect(compareVersions("2.0.0", "1.9.9")).toBeGreaterThan(0); + }); + + it("handles missing components as 0", () => { + expect(compareVersions("1", "1.0.0")).toBe(0); + expect(compareVersions("1.0", "1.0.1")).toBeLessThan(0); + }); + + it("treats prerelease tags on the patch as equal to the base patch", () => { + expect(compareVersions("1.2.3-beta", "1.2.3")).toBe(0); + expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBe(0); + expect(compareVersions("1.2.3-rc.1", "1.2.4")).toBeLessThan(0); + }); + + it("never returns NaN for garbage input", () => { + expect(Number.isFinite(compareVersions("junk", "1.2.3"))).toBe(true); + expect(Number.isFinite(compareVersions("1.2.3", "also-junk"))).toBe( + true + ); + }); +}); + +describe("parseSha256Sums", () => { + it("parses standard shasum -a 256 output", () => { + const text = [ + "a".repeat(64) + " worktree-darwin-arm64", + "b".repeat(64) + " worktree-linux-x64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + expect(result["worktree-linux-x64"]).toBe("b".repeat(64)); + }); + + it("parses BSD-style asterisk prefix", () => { + const text = "c".repeat(64) + " *worktree-darwin-x64"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-x64"]).toBe("c".repeat(64)); + }); + + it("skips comments and blank lines", () => { + const text = [ + "# header comment", + "", + "d".repeat(64) + " worktree-linux-arm64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(Object.keys(result)).toEqual(["worktree-linux-arm64"]); + }); + + it("lowercases the hash", () => { + const hash = "ABCDEF" + "0".repeat(58); + const text = hash + " worktree-linux-x64"; + const result = parseSha256Sums(text); + expect(result["worktree-linux-x64"]).toBe(hash.toLowerCase()); + }); + + it("ignores malformed lines", () => { + const text = [ + "not-a-hash worktree-darwin-arm64", + "e".repeat(64) + " worktree-linux-x64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(Object.keys(result)).toEqual(["worktree-linux-x64"]); + }); +}); diff --git a/src/lib/release.ts b/src/lib/release.ts new file mode 100644 index 0000000..1efc869 --- /dev/null +++ b/src/lib/release.ts @@ -0,0 +1,219 @@ +import { tryCatch } from "./try-catch"; + +const REPO = "bhagyamudgal/worktree-cli"; +const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; + +const DEFAULT_META_TIMEOUT_MS = 30_000; +const DEFAULT_ASSET_TIMEOUT_MS = 600_000; + +type ReleaseAsset = { + name: string; + browser_download_url: string; +}; + +type ReleaseInfo = { + tag: string; + version: string; + assets: ReleaseAsset[]; +}; + +function getAssetName(): string | null { + const platform = process.platform; + const arch = process.arch; + if (platform !== "darwin" && platform !== "linux") return null; + if (arch !== "arm64" && arch !== "x64") return null; + return `worktree-${platform}-${arch}`; +} + +function parseNumericSegment(raw: string | undefined): number { + if (raw === undefined) return 0; + const leadingInt = /^(\d+)/.exec(raw); + if (!leadingInt) return 0; + const n = Number(leadingInt[1]); + return Number.isFinite(n) ? n : 0; +} + +function compareVersions(a: string, b: string): number { + const parse = function (v: string): [number, number, number] { + const [maj, min, patch] = v.replace(/^v/, "").split("."); + return [ + parseNumericSegment(maj), + parseNumericSegment(min), + parseNumericSegment(patch), + ]; + }; + const [aMaj, aMin, aPatch] = parse(a); + const [bMaj, bMin, bPatch] = parse(b); + if (aMaj !== bMaj) return aMaj - bMaj; + if (aMin !== bMin) return aMin - bMin; + return aPatch - bPatch; +} + +function isReleaseInfo(value: unknown): value is { + tag_name: string; + assets: ReleaseAsset[]; +} { + if (!value || typeof value !== "object") return false; + const rec = value as Record; + if (typeof rec.tag_name !== "string") return false; + if (!Array.isArray(rec.assets)) return false; + return rec.assets.every(function (entry: unknown) { + if (!entry || typeof entry !== "object") return false; + const asset = entry as Record; + return ( + typeof asset.name === "string" && + typeof asset.browser_download_url === "string" + ); + }); +} + +async function fetchWithTimeout( + url: string, + timeoutMs: number +): Promise { + const controller = new AbortController(); + const timer = setTimeout(function () { + controller.abort(); + }, timeoutMs); + try { + return await fetch(url, { signal: controller.signal }); + } finally { + clearTimeout(timer); + } +} + +async function fetchLatestRelease( + timeoutMs: number = DEFAULT_META_TIMEOUT_MS +): Promise { + const { data: response, error } = await tryCatch( + fetchWithTimeout(API_RELEASES_LATEST, timeoutMs) + ); + if (error || !response) { + throw new Error( + `Failed to reach GitHub releases API: ${error?.message ?? "unknown"}` + ); + } + if (!response.ok) { + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}` + ); + } + const { data: json, error: jsonError } = await tryCatch(response.json()); + if (jsonError) + throw new Error(`Invalid release JSON: ${jsonError.message}`); + if (!isReleaseInfo(json)) + throw new Error("Release payload missing tag_name or assets"); + return { + tag: json.tag_name, + version: json.tag_name.replace(/^v/, ""), + assets: json.assets, + }; +} + +async function downloadAsset( + asset: ReleaseAsset, + destPath: string, + timeoutMs: number = DEFAULT_ASSET_TIMEOUT_MS +): Promise { + const { data: response, error } = await tryCatch( + fetchWithTimeout(asset.browser_download_url, timeoutMs) + ); + if (error || !response) { + throw new Error( + `Failed to download ${asset.name}: ${error?.message ?? "unknown"}` + ); + } + if (!response.ok) { + throw new Error( + `Download ${asset.name} failed: ${response.status} ${response.statusText}` + ); + } + const { data: buffer, error: bufError } = await tryCatch( + response.arrayBuffer() + ); + if (bufError || !buffer) { + throw new Error(`Failed to read ${asset.name} body`); + } + await Bun.write(destPath, buffer); +} + +async function verifyBinaryHash( + filePath: string, + expectedSha256: string +): Promise { + const hasher = new Bun.CryptoHasher("sha256"); + const file = Bun.file(filePath); + const { data: buffer, error } = await tryCatch(file.arrayBuffer()); + if (error || !buffer) return false; + hasher.update(buffer); + const actual = hasher.digest("hex"); + return constantTimeEquals( + actual.toLowerCase(), + expectedSha256.toLowerCase() + ); +} + +function constantTimeEquals(a: string, b: string): boolean { + if (a.length !== b.length) return false; + let diff = 0; + for (let i = 0; i < a.length; i++) { + diff |= a.charCodeAt(i) ^ b.charCodeAt(i); + } + return diff === 0; +} + +type Sha256SumsResult = + | { kind: "not-published" } + | { kind: "ok"; sums: Record } + | { kind: "error"; reason: string }; + +async function fetchSha256Sums( + assets: ReleaseAsset[], + timeoutMs: number = DEFAULT_META_TIMEOUT_MS +): Promise { + const sumsAsset = assets.find(function (entry) { + return entry.name === "SHA256SUMS"; + }); + if (!sumsAsset) return { kind: "not-published" }; + const { data: response, error } = await tryCatch( + fetchWithTimeout(sumsAsset.browser_download_url, timeoutMs) + ); + if (error || !response) { + return { kind: "error", reason: error?.message ?? "network error" }; + } + if (!response.ok) { + return { + kind: "error", + reason: `${response.status} ${response.statusText}`, + }; + } + const { data: text, error: textError } = await tryCatch(response.text()); + if (textError || !text) { + return { kind: "error", reason: "empty SHA256SUMS body" }; + } + return { kind: "ok", sums: parseSha256Sums(text) }; +} + +function parseSha256Sums(text: string): Record { + const result: Record = {}; + for (const line of text.split("\n")) { + const trimmed = line.trim(); + if (trimmed === "" || trimmed.startsWith("#")) continue; + const match = /^([0-9a-fA-F]{64})\s+\*?(.+)$/.exec(trimmed); + if (!match) continue; + const [, hash, filename] = match; + result[filename.trim()] = hash.toLowerCase(); + } + return result; +} + +export { + getAssetName, + compareVersions, + fetchLatestRelease, + downloadAsset, + verifyBinaryHash, + fetchSha256Sums, + parseSha256Sums, +}; +export type { ReleaseAsset, ReleaseInfo, Sha256SumsResult }; From 121de31ad1f4fed32ef7bbe8270dab92234ad902 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Fri, 17 Apr 2026 22:51:12 +0530 Subject: [PATCH 02/15] =?UTF-8?q?fix:=20address=20PR=20#5=20review=20?= =?UTF-8?q?=E2=80=94=20hardening=20follow-ups?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Move last-check write to child; failed checks don't burn 24h window - Simplify probe to runs-only; use release.version for sidecar - SemVer 2.0 prerelease ordering in compareVersions - Rotate last-error log at 64 KB (keep last 20 lines) - Smoke-test linux-arm64 on native runner (ubuntu-24.04-arm) - Extract shared isStandalone() to release.ts - Stream verifyBinaryHash instead of loading full binary into memory - Consistent COLORS import site (logger.ts) - Log sidecar write failures to last-error - Extract safeUnlink helper in update.ts --- .github/workflows/release.yml | 5 +- src/commands/update.ts | 26 +++++----- src/lib/auto-update.ts | 95 +++++++++++++++++++++-------------- src/lib/release.test.ts | 14 ++++-- src/lib/release.ts | 74 ++++++++++++++++++++------- 5 files changed, 139 insertions(+), 75 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2d159b6..35642cb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -24,7 +24,7 @@ jobs: os: ubuntu-latest bun_target: bun-linux-x64 - target: linux-arm64 - os: ubuntu-latest + os: ubuntu-24.04-arm bun_target: bun-linux-arm64 steps: @@ -65,8 +65,7 @@ jobs: file dist/worktree-${{ matrix.target }} ls -lh dist/worktree-${{ matrix.target }} - - name: Smoke-test --version (native targets only) - if: matrix.target != 'linux-arm64' + - name: Smoke-test --version run: | chmod +x dist/worktree-${{ matrix.target }} ./dist/worktree-${{ matrix.target }} --version diff --git a/src/commands/update.ts b/src/commands/update.ts index 0a30c77..58c485f 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -10,26 +10,26 @@ import { fetchLatestRelease, fetchSha256Sums, getAssetName, + isStandalone, verifyBinaryHash, } from "../lib/release"; -function isStandaloneBinary(): boolean { - return ( - Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") - ); -} - function isEaccesError(error: unknown): boolean { if (!(error instanceof Error)) return false; if (!("code" in error)) return false; return (error as NodeJS.ErrnoException).code === "EACCES"; } +// Best-effort cleanup — the primary error is surfaced by the caller. +async function safeUnlink(filePath: string): Promise { + await fs.unlink(filePath).catch(function () {}); +} + export const updateCommand = command({ name: "update", desc: "Update worktree CLI to the latest version", handler: async () => { - if (!isStandaloneBinary()) { + if (!isStandalone()) { printError( "Update is only available for standalone compiled binaries." ); @@ -90,14 +90,14 @@ export const updateCommand = command({ downloadAsset(asset, tmpPath) ); if (dlError) { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); printError(dlError.message); process.exit(EXIT_CODES.ERROR); } const { error: chmodError } = await tryCatch(fs.chmod(tmpPath, 0o755)); if (chmodError) { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); printError( `Failed to mark binary executable: ${chmodError.message}` ); @@ -106,7 +106,7 @@ export const updateCommand = command({ const sums = await fetchSha256Sums(release.assets); if (sums.kind === "error") { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); printError( `SHA256SUMS is published but could not be fetched: ${sums.reason}. Refusing to install.` ); @@ -115,7 +115,7 @@ export const updateCommand = command({ if (sums.kind === "ok") { const expected = sums.sums[assetName]; if (!expected) { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); printError( `SHA256SUMS is missing an entry for ${assetName}; refusing to install.` ); @@ -123,7 +123,7 @@ export const updateCommand = command({ } const ok = await verifyBinaryHash(tmpPath, expected); if (!ok) { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); printError( `Hash mismatch for ${assetName}; refusing to install.` ); @@ -140,7 +140,7 @@ export const updateCommand = command({ fs.rename(tmpPath, binaryPath) ); if (renameError) { - await fs.unlink(tmpPath).catch(function () {}); + await safeUnlink(tmpPath); if (isEaccesError(renameError)) { printError("Permission denied. Try: sudo worktree update"); } else { diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index 2a74383..b6b3d43 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -8,24 +8,21 @@ import { fetchLatestRelease, fetchSha256Sums, getAssetName, + isStandalone, verifyBinaryHash, } from "./release"; import { tryCatch, tryCatchSync } from "./try-catch"; -import { COLORS } from "./constants"; +import { COLORS } from "./logger"; import pkg from "../../package.json"; const STAGING_FILENAME = ".worktree.next"; const VERSION_SIDECAR_FILENAME = ".worktree.next.version"; const TWENTY_FOUR_HOURS_MS = 24 * 60 * 60 * 1000; const PROBE_TIMEOUT_MS = 2_000; +const MAX_ERROR_LOG_BYTES = 64 * 1024; +const ERROR_LOG_KEEP_LINES = 20; const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; -function isStandalone(): boolean { - return ( - Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") - ); -} - function getBinaryDir(): string { return path.dirname(process.execPath); } @@ -58,12 +55,29 @@ function appendLastError(kind: "apply" | "check", message: string): void { try { ensureCacheDir(); const line = `${new Date().toISOString()} ${kind}: ${message}\n`; - fs.appendFileSync(getLastErrorPath(), line); + const logPath = getLastErrorPath(); + rotateErrorLogIfOversized(logPath); + fs.appendFileSync(logPath, line); } catch { // never let the error log itself block anything } } +function rotateErrorLogIfOversized(logPath: string): void { + try { + const stat = fs.statSync(logPath); + if (stat.size <= MAX_ERROR_LOG_BYTES) return; + const existing = fs.readFileSync(logPath, "utf8"); + const lines = existing.split("\n").filter(function (line) { + return line !== ""; + }); + const kept = lines.slice(-ERROR_LOG_KEEP_LINES).join("\n") + "\n"; + fs.writeFileSync(logPath, kept); + } catch { + // best-effort rotation — if stat/read/write fails, fall through + } +} + function applyPendingUpdate(): void { // Sync on purpose: runs before brocli.run; avoids top-level await. try { @@ -128,11 +142,10 @@ async function scheduleBackgroundUpdateCheck(): Promise { now - lastCheck < TWENTY_FOUR_HOURS_MS; if (shouldSkip) return; - // Best-effort throttle: two simultaneous launches may both check. - // Accepted trade-off — worst case is 2 API calls within the anon 60/hr limit. - ensureCacheDir(); - fs.writeFileSync(getLastCheckPath(), String(now)); - + // Parent does NOT write last-check; the child writes it after a + // successful check completes. Simultaneous launches may both spawn + // (accepted trade-off — worst case 2 API calls within the anon 60/hr + // limit) but a failed check never burns the 24h window. Bun.spawn({ cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], stdio: ["ignore", "ignore", "ignore"], @@ -145,6 +158,16 @@ async function scheduleBackgroundUpdateCheck(): Promise { } } +function recordCheckCompleted(): void { + const { error } = tryCatchSync(function () { + ensureCacheDir(); + fs.writeFileSync(getLastCheckPath(), String(Date.now())); + }); + if (error) { + appendLastError("check", `last-check write: ${error.message}`); + } +} + async function runBackgroundUpdateCheck(): Promise { const assetName = getAssetName(); if (!assetName) { @@ -162,7 +185,10 @@ async function runBackgroundUpdateCheck(): Promise { return; } - if (compareVersions(pkg.version, release.version) >= 0) return; + if (compareVersions(pkg.version, release.version) >= 0) { + recordCheckCompleted(); + return; + } const asset = release.assets.find(function (entry) { return entry.name === assetName; @@ -227,7 +253,7 @@ async function runBackgroundUpdateCheck(): Promise { return; } - const probe = probeBinaryVersion(tmpPath); + const probe = probeBinaryRuns(tmpPath); if (!probe.ok) { safeUnlink(tmpPath); appendLastError("check", `probe: ${probe.reason}`); @@ -243,42 +269,37 @@ async function runBackgroundUpdateCheck(): Promise { return; } - tryCatchSync(function () { - fs.writeFileSync(getVersionSidecarPath(), probe.version); + const { error: sidecarError } = tryCatchSync(function () { + fs.writeFileSync(getVersionSidecarPath(), release.version); }); + if (sidecarError) { + appendLastError("check", `sidecar: ${sidecarError.message}`); + } + + recordCheckCompleted(); } -type ProbeResult = - | { ok: true; version: string } - | { ok: false; reason: string }; +type ProbeResult = { ok: true } | { ok: false; reason: string }; -function probeBinaryVersion(filePath: string): ProbeResult { - let result; - try { - result = Bun.spawnSync({ +function probeBinaryRuns(filePath: string): ProbeResult { + const { data: result, error } = tryCatchSync(function () { + return Bun.spawnSync({ cmd: [filePath, "--version"], - stdout: "pipe", - stderr: "pipe", + stdout: "ignore", + stderr: "ignore", timeout: PROBE_TIMEOUT_MS, }); - } catch (error) { + }); + if (error || !result) { return { ok: false, - reason: error instanceof Error ? error.message : String(error), + reason: error?.message ?? "spawn failed", }; } - if (result.exitCode !== 0) { return { ok: false, reason: `exit ${result.exitCode}` }; } - - const stdout = result.stdout.toString("utf8"); - const stderr = result.stderr.toString("utf8"); - const match = /v?(\d+\.\d+\.\d+(?:[-+][\w.]+)?)/.exec( - stdout + "\n" + stderr - ); - if (!match) return { ok: false, reason: "no version in --version output" }; - return { ok: true, version: match[1] }; + return { ok: true }; } function safeUnlink(filePath: string): void { diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index 7c30a6c..9082b39 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -23,12 +23,20 @@ describe("compareVersions", () => { expect(compareVersions("1.0", "1.0.1")).toBeLessThan(0); }); - it("treats prerelease tags on the patch as equal to the base patch", () => { - expect(compareVersions("1.2.3-beta", "1.2.3")).toBe(0); - expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBe(0); + it("treats prerelease tags as less than the base version (SemVer 2.0)", () => { + expect(compareVersions("1.2.3-beta", "1.2.3")).toBeLessThan(0); + expect(compareVersions("1.2.3", "1.2.3-beta")).toBeGreaterThan(0); expect(compareVersions("1.2.3-rc.1", "1.2.4")).toBeLessThan(0); }); + it("orders prerelease tags lexicographically", () => { + expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBeLessThan(0); + expect(compareVersions("1.2.3-rc.1", "1.2.3-beta.1")).toBeGreaterThan( + 0 + ); + expect(compareVersions("1.2.3-alpha", "1.2.3-alpha")).toBe(0); + }); + it("never returns NaN for garbage input", () => { expect(Number.isFinite(compareVersions("junk", "1.2.3"))).toBe(true); expect(Number.isFinite(compareVersions("1.2.3", "also-junk"))).toBe( diff --git a/src/lib/release.ts b/src/lib/release.ts index 1efc869..53500bc 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -17,6 +17,12 @@ type ReleaseInfo = { assets: ReleaseAsset[]; }; +function isStandalone(): boolean { + return ( + Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") + ); +} + function getAssetName(): string | null { const platform = process.platform; const arch = process.arch; @@ -33,20 +39,44 @@ function parseNumericSegment(raw: string | undefined): number { return Number.isFinite(n) ? n : 0; } -function compareVersions(a: string, b: string): number { - const parse = function (v: string): [number, number, number] { - const [maj, min, patch] = v.replace(/^v/, "").split("."); - return [ - parseNumericSegment(maj), - parseNumericSegment(min), - parseNumericSegment(patch), - ]; +type ParsedVersion = { + major: number; + minor: number; + patch: number; + prerelease: string | null; +}; + +function parseVersion(v: string): ParsedVersion { + const stripped = v.replace(/^v/, ""); + const dashIndex = stripped.indexOf("-"); + const core = dashIndex === -1 ? stripped : stripped.slice(0, dashIndex); + const prerelease = + dashIndex === -1 ? null : stripped.slice(dashIndex + 1) || null; + const [maj, min, patch] = core.split("."); + return { + major: parseNumericSegment(maj), + minor: parseNumericSegment(min), + patch: parseNumericSegment(patch), + prerelease, }; - const [aMaj, aMin, aPatch] = parse(a); - const [bMaj, bMin, bPatch] = parse(b); - if (aMaj !== bMaj) return aMaj - bMaj; - if (aMin !== bMin) return aMin - bMin; - return aPatch - bPatch; +} + +function comparePrerelease(a: string | null, b: string | null): number { + if (a === b) return 0; + if (a === null) return 1; + if (b === null) return -1; + if (a < b) return -1; + if (a > b) return 1; + return 0; +} + +function compareVersions(a: string, b: string): number { + const pa = parseVersion(a); + const pb = parseVersion(b); + if (pa.major !== pb.major) return pa.major - pb.major; + if (pa.minor !== pb.minor) return pa.minor - pb.minor; + if (pa.patch !== pb.patch) return pa.patch - pb.patch; + return comparePrerelease(pa.prerelease, pb.prerelease); } function isReleaseInfo(value: unknown): value is { @@ -143,9 +173,14 @@ async function verifyBinaryHash( ): Promise { const hasher = new Bun.CryptoHasher("sha256"); const file = Bun.file(filePath); - const { data: buffer, error } = await tryCatch(file.arrayBuffer()); - if (error || !buffer) return false; - hasher.update(buffer); + const { error } = await tryCatch( + (async function () { + for await (const chunk of file.stream()) { + hasher.update(chunk); + } + })() + ); + if (error) return false; const actual = hasher.digest("hex"); return constantTimeEquals( actual.toLowerCase(), @@ -208,12 +243,13 @@ function parseSha256Sums(text: string): Record { } export { - getAssetName, compareVersions, - fetchLatestRelease, downloadAsset, - verifyBinaryHash, + fetchLatestRelease, fetchSha256Sums, + getAssetName, + isStandalone, parseSha256Sums, + verifyBinaryHash, }; export type { ReleaseAsset, ReleaseInfo, Sha256SumsResult }; From 66e746a7b0dfb3496b43dd6ed93dc95884a9bf10 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 15:33:19 +0530 Subject: [PATCH 03/15] fix: ai comments --- README.md | 5 +++-- src/index.ts | 2 +- src/lib/auto-update.ts | 4 ++++ src/lib/config.ts | 14 ++++++++++++-- src/lib/release.test.ts | 14 +++++++++++--- src/lib/release.ts | 5 ++++- 6 files changed, 35 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index f22ecdf..5972bd1 100644 --- a/README.md +++ b/README.md @@ -62,12 +62,13 @@ On `remove`, it: ## Config -The `.worktreerc` file supports: +The `.worktreerc` file (in the repo root for project settings, in `~/` for user settings) supports: | Key | Description | Example | |-----|-------------|---------| | `DEFAULT_BASE` | Default base branch for new worktrees | `origin/dev` | | `WORKTREE_DIR` | Directory name for worktrees (default: `.worktrees`) | `.worktrees` | +| `AUTO_UPDATE` | Enable background auto-update checks in `~/.worktreerc` only (default: `true`) | `false` | ## Alias @@ -87,7 +88,7 @@ Once installed, `worktree` checks GitHub for a newer release at most once every To disable, create `~/.worktreerc` with: -``` +```ini AUTO_UPDATE=false ``` diff --git a/src/index.ts b/src/index.ts index 3576809..72d2c59 100644 --- a/src/index.ts +++ b/src/index.ts @@ -12,7 +12,7 @@ import { } from "./lib/auto-update"; import pkg from "../package.json"; -if (!process.argv.slice(2).includes(INTERNAL_CHECK_SUBCOMMAND)) { +if (process.argv[2] !== INTERNAL_CHECK_SUBCOMMAND) { applyPendingUpdate(); void scheduleBackgroundUpdateCheck(); } diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index b6b3d43..5fd0ada 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -288,6 +288,10 @@ function probeBinaryRuns(filePath: string): ProbeResult { stdout: "ignore", stderr: "ignore", timeout: PROBE_TIMEOUT_MS, + // Disable auto-update in the probe child — otherwise its top-level + // scheduleBackgroundUpdateCheck could spawn a grandchild if the + // 24h throttle has expired. + env: { ...process.env, WORKTREE_NO_UPDATE: "1" }, }); }); if (error || !result) { diff --git a/src/lib/config.ts b/src/lib/config.ts index 335d8c0..4077ced 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -66,13 +66,23 @@ async function readConfigFile(filePath: string): Promise { const isExists = await file.exists(); if (!isExists) return validateConfig({}); const { data: content, error } = await tryCatch(file.text()); - if (error) return validateConfig({}); + if (error) { + console.error( + `warning: could not read ${filePath}: ${error.message}. Using defaults.` + ); + return validateConfig({}); + } const { data: parsed, error: parseError } = await tryCatch( Promise.resolve().then(function () { return validateConfig(parseConfigContent(content)); }) ); - if (parseError || !parsed) return validateConfig({}); + if (parseError || !parsed) { + console.error( + `warning: ${filePath} is invalid: ${parseError?.message ?? "unknown"}. Using defaults.` + ); + return validateConfig({}); + } return parsed; } diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index 9082b39..d9da698 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -29,7 +29,7 @@ describe("compareVersions", () => { expect(compareVersions("1.2.3-rc.1", "1.2.4")).toBeLessThan(0); }); - it("orders prerelease tags lexicographically", () => { + it("orders prerelease tags lexicographically (NOT strict SemVer)", () => { expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBeLessThan(0); expect(compareVersions("1.2.3-rc.1", "1.2.3-beta.1")).toBeGreaterThan( 0 @@ -37,6 +37,14 @@ describe("compareVersions", () => { expect(compareVersions("1.2.3-alpha", "1.2.3-alpha")).toBe(0); }); + it("compares numeric prerelease segments as strings (rc.10 < rc.2)", () => { + // Intentional non-strict-SemVer: "rc.10" sorts before "rc.2" by + // lexicographic string compare. Strict SemVer 2.0 §11 would order + // numeric segments numerically (rc.2 < rc.10). If prereleases ever + // start shipping from this repo, revisit. + expect(compareVersions("1.2.3-rc.10", "1.2.3-rc.2")).toBeLessThan(0); + }); + it("never returns NaN for garbage input", () => { expect(Number.isFinite(compareVersions("junk", "1.2.3"))).toBe(true); expect(Number.isFinite(compareVersions("1.2.3", "also-junk"))).toBe( @@ -56,8 +64,8 @@ describe("parseSha256Sums", () => { expect(result["worktree-linux-x64"]).toBe("b".repeat(64)); }); - it("parses BSD-style asterisk prefix", () => { - const text = "c".repeat(64) + " *worktree-darwin-x64"; + it("parses BSD-style asterisk prefix (shasum -b)", () => { + const text = "c".repeat(64) + " *worktree-darwin-x64"; const result = parseSha256Sums(text); expect(result["worktree-darwin-x64"]).toBe("c".repeat(64)); }); diff --git a/src/lib/release.ts b/src/lib/release.ts index 53500bc..93b563e 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -223,7 +223,10 @@ async function fetchSha256Sums( }; } const { data: text, error: textError } = await tryCatch(response.text()); - if (textError || !text) { + if (textError) { + return { kind: "error", reason: textError.message }; + } + if (!text) { return { kind: "error", reason: "empty SHA256SUMS body" }; } return { kind: "ok", sums: parseSha256Sums(text) }; From d8aceae411ac1a07d2fe5474b248d1b9ece7a913 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 20:07:52 +0530 Subject: [PATCH 04/15] =?UTF-8?q?fix:=20harden=20PR=20#5=20auto-update=20?= =?UTF-8?q?=E2=80=94=20re-verify=20on=20apply,=20fail-closed=20config,=20d?= =?UTF-8?q?iscriminated=20hash=20result?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - auto-update.ts: re-verify staged binary hash before swap (meta sidecar now stores version+sha256); WORKTREE_NO_UPDATE guards applyPendingUpdate; user-visible stderr on apply failure; recordCheckCompleted on structural mismatches; probe stderr capture; log spawn failures; orphan-staging cleanup - release.ts: verifyBinaryHash now returns discriminated HashResult (mismatch vs io-error); new verifyBinaryHashSync; withTimeout callback form covers body reads - config.ts: readConfigFile throws on parse error; loadGlobalConfig propagates → isAutoUpdateDisabled fails closed; warn once per path with ~-prefixed display - update.ts: reorder SHA256 verification BEFORE chmod; handle HashResult discriminated union - fs-utils.ts (new): shared safeUnlink / safeUnlinkSync --- src/commands/update.ts | 41 ++++--- src/lib/auto-update.ts | 273 ++++++++++++++++++++++++++++++++--------- src/lib/config.ts | 38 ++++-- src/lib/fs-utils.ts | 16 +++ src/lib/release.ts | 163 ++++++++++++++---------- 5 files changed, 384 insertions(+), 147 deletions(-) create mode 100644 src/lib/fs-utils.ts diff --git a/src/commands/update.ts b/src/commands/update.ts index 58c485f..b14cf4d 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -4,6 +4,7 @@ import pkg from "../../package.json"; import { tryCatch } from "../lib/try-catch"; import { printSuccess, printError, printInfo, COLORS } from "../lib/logger"; import { EXIT_CODES } from "../lib/constants"; +import { safeUnlink } from "../lib/fs-utils"; import { compareVersions, downloadAsset, @@ -17,14 +18,10 @@ import { function isEaccesError(error: unknown): boolean { if (!(error instanceof Error)) return false; if (!("code" in error)) return false; + // Node errno exceptions carry `code` but TypeScript's Error lacks it. return (error as NodeJS.ErrnoException).code === "EACCES"; } -// Best-effort cleanup — the primary error is surfaced by the caller. -async function safeUnlink(filePath: string): Promise { - await fs.unlink(filePath).catch(function () {}); -} - export const updateCommand = command({ name: "update", desc: "Update worktree CLI to the latest version", @@ -95,15 +92,6 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } - const { error: chmodError } = await tryCatch(fs.chmod(tmpPath, 0o755)); - if (chmodError) { - await safeUnlink(tmpPath); - printError( - `Failed to mark binary executable: ${chmodError.message}` - ); - process.exit(EXIT_CODES.ERROR); - } - const sums = await fetchSha256Sums(release.assets); if (sums.kind === "error") { await safeUnlink(tmpPath); @@ -121,12 +109,18 @@ export const updateCommand = command({ ); process.exit(EXIT_CODES.ERROR); } - const ok = await verifyBinaryHash(tmpPath, expected); - if (!ok) { + const result = await verifyBinaryHash(tmpPath, expected); + if (!result.ok) { await safeUnlink(tmpPath); - printError( - `Hash mismatch for ${assetName}; refusing to install.` - ); + if (result.kind === "io-error") { + printError( + `Could not read downloaded binary for hash check: ${result.cause.message}.` + ); + } else { + printError( + `Hash mismatch for ${assetName}; refusing to install.` + ); + } process.exit(EXIT_CODES.ERROR); } printInfo("Verified SHA256 checksum."); @@ -136,6 +130,15 @@ export const updateCommand = command({ ); } + const { error: chmodError } = await tryCatch(fs.chmod(tmpPath, 0o755)); + if (chmodError) { + await safeUnlink(tmpPath); + printError( + `Failed to mark binary executable: ${chmodError.message}` + ); + process.exit(EXIT_CODES.ERROR); + } + const { error: renameError } = await tryCatch( fs.rename(tmpPath, binaryPath) ); diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index 5fd0ada..cb63f2f 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -2,6 +2,7 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; import { loadGlobalConfig } from "./config"; +import { safeUnlinkSync } from "./fs-utils"; import { compareVersions, downloadAsset, @@ -10,18 +11,22 @@ import { getAssetName, isStandalone, verifyBinaryHash, + verifyBinaryHashSync, } from "./release"; import { tryCatch, tryCatchSync } from "./try-catch"; import { COLORS } from "./logger"; import pkg from "../../package.json"; const STAGING_FILENAME = ".worktree.next"; -const VERSION_SIDECAR_FILENAME = ".worktree.next.version"; +const META_SIDECAR_FILENAME = ".worktree.next.meta"; const TWENTY_FOUR_HOURS_MS = 24 * 60 * 60 * 1000; const PROBE_TIMEOUT_MS = 2_000; const MAX_ERROR_LOG_BYTES = 64 * 1024; const ERROR_LOG_KEEP_LINES = 20; +const PROBE_STDERR_TRUNCATE_BYTES = 500; const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; +const SIDECAR_VERSION_PATTERN = /^\d+\.\d+\.\d+(?:-[\w.-]+)?$/; +const SIDECAR_HASH_PATTERN = /^[0-9a-f]{64}$/; function getBinaryDir(): string { return path.dirname(process.execPath); @@ -31,8 +36,8 @@ function getStagingPath(): string { return path.join(getBinaryDir(), STAGING_FILENAME); } -function getVersionSidecarPath(): string { - return path.join(getBinaryDir(), VERSION_SIDECAR_FILENAME); +function getMetaSidecarPath(): string { + return path.join(getBinaryDir(), META_SIDECAR_FILENAME); } function getCacheDir(): string { @@ -78,54 +83,149 @@ function rotateErrorLogIfOversized(logPath: string): void { } } +type SidecarMeta = { version: string; sha256: string }; + +function formatSidecar(meta: SidecarMeta): string { + return `version=${meta.version}\nsha256=${meta.sha256}\n`; +} + +const SIDECAR_KNOWN_KEYS = new Set(["version", "sha256"]); + +function parseSidecar(text: string): SidecarMeta | null { + const kv: Record = {}; + for (const line of text.split("\n")) { + const trimmed = line.trim(); + if (trimmed === "") continue; + const eq = trimmed.indexOf("="); + if (eq === -1) return null; + const key = trimmed.slice(0, eq).trim(); + if (!SIDECAR_KNOWN_KEYS.has(key)) return null; + if (Object.prototype.hasOwnProperty.call(kv, key)) return null; + kv[key] = trimmed.slice(eq + 1).trim(); + } + const version = kv.version ?? ""; + const sha256 = (kv.sha256 ?? "").toLowerCase(); + if (!SIDECAR_VERSION_PATTERN.test(version)) return null; + if (!SIDECAR_HASH_PATTERN.test(sha256)) return null; + return { version, sha256 }; +} + +function cleanupStagedArtifacts(): void { + safeUnlinkSync(getStagingPath()); + safeUnlinkSync(getMetaSidecarPath()); +} + function applyPendingUpdate(): void { // Sync on purpose: runs before brocli.run; avoids top-level await. + if (process.env.WORKTREE_NO_UPDATE === "1") return; try { if (!isStandalone()) return; const stagedPath = getStagingPath(); if (!fs.existsSync(stagedPath)) return; - let newVersion = ""; - try { - newVersion = fs - .readFileSync(getVersionSidecarPath(), "utf8") - .trim(); - } catch { - // missing sidecar is not fatal + const metaPath = getMetaSidecarPath(); + if (!fs.existsSync(metaPath)) { + // Staging was partial; clean up the orphaned binary. + safeUnlinkSync(stagedPath); + appendLastError( + "apply", + "staged binary without sidecar — discarded" + ); + warnApplyFailed("staged update was incomplete (missing metadata)"); + return; } - fs.renameSync(stagedPath, process.execPath); - try { - fs.unlinkSync(getVersionSidecarPath()); - } catch { - // best-effort cleanup + const { data: metaText, error: metaReadError } = tryCatchSync( + function () { + return fs.readFileSync(metaPath, "utf8"); + } + ); + if (metaReadError) { + cleanupStagedArtifacts(); + appendLastError("apply", `sidecar read: ${metaReadError.message}`); + warnApplyFailed( + `could not read staged metadata (${metaReadError.message})` + ); + return; + } + const meta = parseSidecar(metaText); + if (!meta) { + cleanupStagedArtifacts(); + appendLastError("apply", "sidecar malformed — discarded stage"); + warnApplyFailed("staged metadata was malformed"); + return; } + const verify = verifyBinaryHashSync(stagedPath, meta.sha256); + if (!verify.ok) { + cleanupStagedArtifacts(); + if (verify.kind === "io-error") { + appendLastError( + "apply", + `hash io-error: ${verify.cause.message}` + ); + warnApplyFailed( + `could not read staged binary (${verify.cause.message})` + ); + } else { + appendLastError("apply", "staged binary hash mismatch"); + warnApplyFailed( + "staged binary failed integrity check — discarded" + ); + } + return; + } + + fs.renameSync(stagedPath, process.execPath); + safeUnlinkSync(metaPath); + const { GREEN, BOLD, RESET } = COLORS; - const label = newVersion ? ` to ${BOLD}v${newVersion}${RESET}` : ""; - console.error(`worktree ${GREEN}${BOLD}auto-updated${RESET}${label}`); - } catch (error) { - appendLastError( - "apply", - error instanceof Error ? error.message : String(error) + console.error( + `worktree ${GREEN}${BOLD}auto-updated${RESET} to ${BOLD}v${meta.version}${RESET}` ); + } catch (error) { + const message = error instanceof Error ? error.message : String(error); + appendLastError("apply", message); + warnApplyFailed(message); } } -async function readLastCheckMs(): Promise { - const { data: text, error } = await tryCatch( - Bun.file(getLastCheckPath()).text() +function warnApplyFailed(reason: string): void { + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: could not apply staged update (${reason}); continuing with current version${RESET}` ); - if (error || !text) return null; +} + +async function readLastCheckMs(): Promise { + const file = Bun.file(getLastCheckPath()); + const { data: exists } = await tryCatch(file.exists()); + if (!exists) return null; + const { data: text, error } = await tryCatch(file.text()); + if (error) { + appendLastError("check", `last-check read: ${error.message}`); + return null; + } + if (!text) return null; const parsed = Number(text.trim()); - if (!Number.isFinite(parsed)) return null; + if (!Number.isFinite(parsed)) { + appendLastError( + "check", + `last-check corrupt: ${JSON.stringify(text.slice(0, 40))}` + ); + return null; + } return parsed; } async function isAutoUpdateDisabled(): Promise { if (process.env.WORKTREE_NO_UPDATE === "1") return true; const { data: config, error } = await tryCatch(loadGlobalConfig()); - if (error || !config) return false; + if (error) { + appendLastError("check", `config load: ${error.message}`); + return true; + } + if (!config) return true; return config.AUTO_UPDATE === false; } @@ -153,8 +253,11 @@ async function scheduleBackgroundUpdateCheck(): Promise { stdout: "ignore", stdin: "ignore", }).unref(); - } catch { - // never block the user's command + } catch (error) { + appendLastError( + "check", + `spawn: ${error instanceof Error ? error.message : String(error)}` + ); } } @@ -171,7 +274,10 @@ function recordCheckCompleted(): void { async function runBackgroundUpdateCheck(): Promise { const assetName = getAssetName(); if (!assetName) { + // Structural mismatch that won't be fixed by retrying sooner — burn + // the 24h window to avoid thrashing GitHub's API on every launch. appendLastError("check", `unsupported platform/arch`); + recordCheckCompleted(); return; } @@ -198,6 +304,7 @@ async function runBackgroundUpdateCheck(): Promise { "check", `release ${release.tag} missing asset ${assetName}` ); + recordCheckCompleted(); return; } @@ -209,7 +316,7 @@ async function runBackgroundUpdateCheck(): Promise { const { error: dlError } = await tryCatch(downloadAsset(asset, tmpPath)); if (dlError) { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); appendLastError("check", `download: ${dlError.message}`); return; } @@ -218,29 +325,39 @@ async function runBackgroundUpdateCheck(): Promise { // Running an unverified binary (even just `--version`) is code execution. const sums = await fetchSha256Sums(release.assets); if (sums.kind === "error") { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); appendLastError( "check", `SHA256SUMS fetch failed — refusing to stage: ${sums.reason}` ); return; } + + let verifiedHash: string | null = null; if (sums.kind === "ok") { const expected = sums.sums[assetName]; if (!expected) { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); appendLastError( "check", `SHA256SUMS missing entry for ${assetName}` ); return; } - const ok = await verifyBinaryHash(tmpPath, expected); - if (!ok) { - safeUnlink(tmpPath); - appendLastError("check", `hash mismatch for ${assetName}`); + const verify = await verifyBinaryHash(tmpPath, expected); + if (!verify.ok) { + safeUnlinkSync(tmpPath); + if (verify.kind === "io-error") { + appendLastError( + "check", + `hash io-error for ${assetName}: ${verify.cause.message}` + ); + } else { + appendLastError("check", `hash mismatch for ${assetName}`); + } return; } + verifiedHash = expected.toLowerCase(); } // sums.kind === "not-published" → legacy release without SHA256SUMS; trust TLS. @@ -248,34 +365,77 @@ async function runBackgroundUpdateCheck(): Promise { fs.chmodSync(tmpPath, 0o755); }); if (chmodError) { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); appendLastError("check", `chmod: ${chmodError.message}`); return; } const probe = probeBinaryRuns(tmpPath); if (!probe.ok) { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); appendLastError("check", `probe: ${probe.reason}`); return; } + // Legacy releases without SHA256SUMS can't stage via the re-verify path + // because applyPendingUpdate needs a hash to check. Compute it ourselves + // from the probed binary so apply can still run. + if (verifiedHash === null) { + const { data: computed, error: hashError } = tryCatchSync(function () { + const buffer = fs.readFileSync(tmpPath); + const hasher = new Bun.CryptoHasher("sha256"); + hasher.update(buffer); + return hasher.digest("hex").toLowerCase(); + }); + if (hashError || !computed) { + safeUnlinkSync(tmpPath); + appendLastError( + "check", + `post-probe hash: ${hashError?.message ?? "unknown"}` + ); + return; + } + verifiedHash = computed; + } + + // Commit sidecar BEFORE the binary rename so applyPendingUpdate never + // sees a staged binary without its metadata. + const metaTmpPath = path.join( + binaryDir, + `${META_SIDECAR_FILENAME}.${process.pid}.tmp` + ); + const sidecarContent = formatSidecar({ + version: release.version, + sha256: verifiedHash, + }); + const { error: metaWriteError } = tryCatchSync(function () { + fs.writeFileSync(metaTmpPath, sidecarContent); + }); + if (metaWriteError) { + safeUnlinkSync(tmpPath); + appendLastError("check", `sidecar write: ${metaWriteError.message}`); + return; + } + const { error: metaRenameError } = tryCatchSync(function () { + fs.renameSync(metaTmpPath, getMetaSidecarPath()); + }); + if (metaRenameError) { + safeUnlinkSync(tmpPath); + safeUnlinkSync(metaTmpPath); + appendLastError("check", `sidecar commit: ${metaRenameError.message}`); + return; + } + const { error: renameError } = tryCatchSync(function () { fs.renameSync(tmpPath, getStagingPath()); }); if (renameError) { - safeUnlink(tmpPath); + safeUnlinkSync(tmpPath); + safeUnlinkSync(getMetaSidecarPath()); appendLastError("check", `stage: ${renameError.message}`); return; } - const { error: sidecarError } = tryCatchSync(function () { - fs.writeFileSync(getVersionSidecarPath(), release.version); - }); - if (sidecarError) { - appendLastError("check", `sidecar: ${sidecarError.message}`); - } - recordCheckCompleted(); } @@ -286,11 +446,11 @@ function probeBinaryRuns(filePath: string): ProbeResult { return Bun.spawnSync({ cmd: [filePath, "--version"], stdout: "ignore", - stderr: "ignore", + stderr: "pipe", timeout: PROBE_TIMEOUT_MS, // Disable auto-update in the probe child — otherwise its top-level - // scheduleBackgroundUpdateCheck could spawn a grandchild if the - // 24h throttle has expired. + // scheduleBackgroundUpdateCheck could spawn a grandchild, and + // applyPendingUpdate could consume a stale staged binary. env: { ...process.env, WORKTREE_NO_UPDATE: "1" }, }); }); @@ -301,17 +461,20 @@ function probeBinaryRuns(filePath: string): ProbeResult { }; } if (result.exitCode !== 0) { - return { ok: false, reason: `exit ${result.exitCode}` }; + const stderr = decodeProbeStderr(result.stderr); + const base = `exit ${result.exitCode}`; + return { ok: false, reason: stderr ? `${base}: ${stderr}` : base }; } return { ok: true }; } -function safeUnlink(filePath: string): void { - try { - fs.unlinkSync(filePath); - } catch { - // best-effort +function decodeProbeStderr(stderr: unknown): string { + if (!(stderr instanceof Uint8Array) && !(stderr instanceof Buffer)) { + return ""; } + const bytes = stderr instanceof Buffer ? new Uint8Array(stderr) : stderr; + const truncated = bytes.slice(0, PROBE_STDERR_TRUNCATE_BYTES); + return new TextDecoder().decode(truncated).trim(); } export { diff --git a/src/lib/config.ts b/src/lib/config.ts index 4077ced..2bfbdbe 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -61,14 +61,33 @@ function validateConfig(raw: Record): Config { return configSchema.parse(raw); } +const warnedPaths = new Set(); + +function displayPath(filePath: string): string { + const home = os.homedir(); + if (filePath === home) return "~"; + if (filePath.startsWith(home + path.sep)) { + return "~" + filePath.slice(home.length); + } + return filePath; +} + +function warnOnce(filePath: string, message: string): void { + if (warnedPaths.has(filePath)) return; + warnedPaths.add(filePath); + console.error(message); +} + async function readConfigFile(filePath: string): Promise { const file = Bun.file(filePath); const isExists = await file.exists(); if (!isExists) return validateConfig({}); + const display = displayPath(filePath); const { data: content, error } = await tryCatch(file.text()); if (error) { - console.error( - `warning: could not read ${filePath}: ${error.message}. Using defaults.` + warnOnce( + filePath, + `warning: could not read ${display}: ${error.message}. Using defaults.` ); return validateConfig({}); } @@ -77,17 +96,22 @@ async function readConfigFile(filePath: string): Promise { return validateConfig(parseConfigContent(content)); }) ); - if (parseError || !parsed) { - console.error( - `warning: ${filePath} is invalid: ${parseError?.message ?? "unknown"}. Using defaults.` + if (parseError) { + warnOnce( + filePath, + `warning: ${display} is invalid: ${parseError.message}.` ); - return validateConfig({}); + throw parseError; } return parsed; } async function loadConfig(root: string): Promise { - return readConfigFile(path.join(root, ".worktreerc")); + const { data, error } = await tryCatch( + readConfigFile(path.join(root, ".worktreerc")) + ); + if (error || !data) return validateConfig({}); + return data; } async function loadGlobalConfig(): Promise { diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts new file mode 100644 index 0000000..4b3b2f9 --- /dev/null +++ b/src/lib/fs-utils.ts @@ -0,0 +1,16 @@ +import fs from "node:fs"; +import fsPromises from "node:fs/promises"; + +async function safeUnlink(filePath: string): Promise { + await fsPromises.unlink(filePath).catch(function () {}); +} + +function safeUnlinkSync(filePath: string): void { + try { + fs.unlinkSync(filePath); + } catch { + // best-effort + } +} + +export { safeUnlink, safeUnlinkSync }; diff --git a/src/lib/release.ts b/src/lib/release.ts index 93b563e..57edeae 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -1,4 +1,5 @@ -import { tryCatch } from "./try-catch"; +import fs from "node:fs"; +import { tryCatch, tryCatchSync } from "./try-catch"; const REPO = "bhagyamudgal/worktree-cli"; const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; @@ -97,16 +98,18 @@ function isReleaseInfo(value: unknown): value is { }); } -async function fetchWithTimeout( +async function withTimeout( url: string, - timeoutMs: number -): Promise { + timeoutMs: number, + handler: (response: Response) => Promise +): Promise { const controller = new AbortController(); const timer = setTimeout(function () { controller.abort(); }, timeoutMs); try { - return await fetch(url, { signal: controller.signal }); + const response = await fetch(url, { signal: controller.signal }); + return await handler(response); } finally { clearTimeout(timer); } @@ -115,29 +118,30 @@ async function fetchWithTimeout( async function fetchLatestRelease( timeoutMs: number = DEFAULT_META_TIMEOUT_MS ): Promise { - const { data: response, error } = await tryCatch( - fetchWithTimeout(API_RELEASES_LATEST, timeoutMs) + const { data: result, error } = await tryCatch( + withTimeout(API_RELEASES_LATEST, timeoutMs, async function (response) { + if (!response.ok) { + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}` + ); + } + const json = await response.json(); + if (!isReleaseInfo(json)) { + throw new Error("Release payload missing tag_name or assets"); + } + return { + tag: json.tag_name, + version: json.tag_name.replace(/^v/, ""), + assets: json.assets, + }; + }) ); - if (error || !response) { + if (error || !result) { throw new Error( `Failed to reach GitHub releases API: ${error?.message ?? "unknown"}` ); } - if (!response.ok) { - throw new Error( - `GitHub API error: ${response.status} ${response.statusText}` - ); - } - const { data: json, error: jsonError } = await tryCatch(response.json()); - if (jsonError) - throw new Error(`Invalid release JSON: ${jsonError.message}`); - if (!isReleaseInfo(json)) - throw new Error("Release payload missing tag_name or assets"); - return { - tag: json.tag_name, - version: json.tag_name.replace(/^v/, ""), - assets: json.assets, - }; + return result; } async function downloadAsset( @@ -145,32 +149,35 @@ async function downloadAsset( destPath: string, timeoutMs: number = DEFAULT_ASSET_TIMEOUT_MS ): Promise { - const { data: response, error } = await tryCatch( - fetchWithTimeout(asset.browser_download_url, timeoutMs) - ); - if (error || !response) { - throw new Error( - `Failed to download ${asset.name}: ${error?.message ?? "unknown"}` - ); - } - if (!response.ok) { - throw new Error( - `Download ${asset.name} failed: ${response.status} ${response.statusText}` - ); - } - const { data: buffer, error: bufError } = await tryCatch( - response.arrayBuffer() + const { error } = await tryCatch( + withTimeout( + asset.browser_download_url, + timeoutMs, + async function (response) { + if (!response.ok) { + throw new Error( + `Download ${asset.name} failed: ${response.status} ${response.statusText}` + ); + } + const buffer = await response.arrayBuffer(); + await Bun.write(destPath, buffer); + } + ) ); - if (bufError || !buffer) { - throw new Error(`Failed to read ${asset.name} body`); + if (error) { + throw new Error(`Failed to download ${asset.name}: ${error.message}`); } - await Bun.write(destPath, buffer); } +type HashResult = + | { ok: true } + | { ok: false; kind: "mismatch" } + | { ok: false; kind: "io-error"; cause: Error }; + async function verifyBinaryHash( filePath: string, expectedSha256: string -): Promise { +): Promise { const hasher = new Bun.CryptoHasher("sha256"); const file = Bun.file(filePath); const { error } = await tryCatch( @@ -180,12 +187,29 @@ async function verifyBinaryHash( } })() ); - if (error) return false; - const actual = hasher.digest("hex"); - return constantTimeEquals( - actual.toLowerCase(), - expectedSha256.toLowerCase() - ); + if (error) return { ok: false, kind: "io-error", cause: error }; + const actual = hasher.digest("hex").toLowerCase(); + if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { + return { ok: true }; + } + return { ok: false, kind: "mismatch" }; +} + +function verifyBinaryHashSync( + filePath: string, + expectedSha256: string +): HashResult { + const { data: actual, error } = tryCatchSync(function () { + const buffer = fs.readFileSync(filePath); + const hasher = new Bun.CryptoHasher("sha256"); + hasher.update(buffer); + return hasher.digest("hex").toLowerCase(); + }); + if (error) return { ok: false, kind: "io-error", cause: error }; + if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { + return { ok: true }; + } + return { ok: false, kind: "mismatch" }; } function constantTimeEquals(a: string, b: string): boolean { @@ -210,26 +234,32 @@ async function fetchSha256Sums( return entry.name === "SHA256SUMS"; }); if (!sumsAsset) return { kind: "not-published" }; - const { data: response, error } = await tryCatch( - fetchWithTimeout(sumsAsset.browser_download_url, timeoutMs) + const { data: result, error } = await tryCatch( + withTimeout( + sumsAsset.browser_download_url, + timeoutMs, + async function (response): Promise { + if (!response.ok) { + return { + kind: "error", + reason: `${response.status} ${response.statusText}`, + }; + } + const text = await response.text(); + if (!text) { + return { + kind: "error", + reason: "empty SHA256SUMS body", + }; + } + return { kind: "ok", sums: parseSha256Sums(text) }; + } + ) ); - if (error || !response) { + if (error || !result) { return { kind: "error", reason: error?.message ?? "network error" }; } - if (!response.ok) { - return { - kind: "error", - reason: `${response.status} ${response.statusText}`, - }; - } - const { data: text, error: textError } = await tryCatch(response.text()); - if (textError) { - return { kind: "error", reason: textError.message }; - } - if (!text) { - return { kind: "error", reason: "empty SHA256SUMS body" }; - } - return { kind: "ok", sums: parseSha256Sums(text) }; + return result; } function parseSha256Sums(text: string): Record { @@ -254,5 +284,6 @@ export { isStandalone, parseSha256Sums, verifyBinaryHash, + verifyBinaryHashSync, }; -export type { ReleaseAsset, ReleaseInfo, Sha256SumsResult }; +export type { HashResult, ReleaseAsset, ReleaseInfo, Sha256SumsResult }; From af3db7af589b88f0d7637ffcfedccd11cd97758a Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 20:15:26 +0530 Subject: [PATCH 05/15] fix: ai comment --- src/lib/auto-update.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index cb63f2f..c57f1ee 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -178,6 +178,10 @@ function applyPendingUpdate(): void { fs.renameSync(stagedPath, process.execPath); safeUnlinkSync(metaPath); + // Bump the throttle so the sibling scheduleBackgroundUpdateCheck() in + // src/index.ts doesn't spawn a redundant child-check immediately after + // a just-applied update — the new binary is already current. + recordCheckCompleted(); const { GREEN, BOLD, RESET } = COLORS; console.error( From cb33f2e5471d85e2d460769a988c7864ce97f1e6 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 20:49:02 +0530 Subject: [PATCH 06/15] fix: harden PR #5 auto-update from self-review findings - safeUnlink*: silent on ENOENT only; emit dim stderr warning on EACCES/EPERM/etc. - applyPendingUpdate: wrap rename in tryCatchSync; on EACCES/EPERM/EROFS clean up staged artifacts and suggest 'sudo worktree update' (breaks infinite apply-fail loop) - runBackgroundUpdateCheck: don't burn 24h throttle on asset-missing (transient) - verifyBinaryHashSync: switch to 64 KB chunked openSync+readSync; no full-file Buffer - downloadAsset: stream Response directly to disk via Bun.write(destPath, response) - release.ts: extract computeSha256Async/computeSha256Sync; dedupe 3 hashing sites - fetchLatestRelease/downloadAsset: preserve errno via Error.cause chain - isEaccesError: walk .cause chain so download EACCES suggests sudo - parseSha256Sums: throw on duplicate filename entries - probeBinaryRuns: surface 'timed out after 2000ms' instead of 'exit null' - scheduleBackgroundUpdateCheck: rethrow non-errno errors so TypeErrors surface - scheduleBackgroundUpdateCheck: spawn with detached:true (setsid) so background download survives terminal close - release.yml: drop dead tail/od newline guard; shasum always emits trailing newline --- .github/workflows/release.yml | 208 +++++++++++++++++----------------- CHANGELOG.md | 10 ++ src/commands/update.ts | 16 ++- src/lib/auto-update.ts | 51 +++++++-- src/lib/fs-utils.ts | 27 ++++- src/lib/release.ts | 67 ++++++++--- 6 files changed, 238 insertions(+), 141 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 35642cb..9c5628e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,113 +1,109 @@ name: Release on: - push: - tags: ["v*"] + push: + tags: ["v*"] permissions: - contents: write + contents: write jobs: - build: - name: Build ${{ matrix.target }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - target: darwin-arm64 - os: macos-latest - bun_target: bun-darwin-arm64 - - target: darwin-x64 - os: macos-15-intel - bun_target: bun-darwin-x64 - - target: linux-x64 - os: ubuntu-latest - bun_target: bun-linux-x64 - - target: linux-arm64 - os: ubuntu-24.04-arm - bun_target: bun-linux-arm64 - - steps: - - uses: actions/checkout@v6 - - uses: oven-sh/setup-bun@v2 - - run: bun install --frozen-lockfile - - - name: Compile binary - run: | - bun build src/index.ts \ - --compile \ - --minify \ - --sourcemap=none \ - --target=${{ matrix.bun_target }} \ - --outfile dist/worktree-${{ matrix.target }} - - - name: Install llvm-strip (Linux) - if: startsWith(matrix.target, 'linux-') - run: sudo apt-get update && sudo apt-get install -y llvm - - - name: Strip symbols (Linux) - if: startsWith(matrix.target, 'linux-') - run: llvm-strip --strip-unneeded dist/worktree-${{ matrix.target }} - - - name: Strip symbols (macOS) - if: startsWith(matrix.target, 'darwin-') - run: strip dist/worktree-${{ matrix.target }} - - - name: Ad-hoc codesign (macOS) - if: startsWith(matrix.target, 'darwin-') - run: | - xattr -cr dist/worktree-${{ matrix.target }} - codesign --force --sign - dist/worktree-${{ matrix.target }} - codesign -dv dist/worktree-${{ matrix.target }} - - - name: Verify binary architecture and size - run: | - file dist/worktree-${{ matrix.target }} - ls -lh dist/worktree-${{ matrix.target }} - - - name: Smoke-test --version - run: | - chmod +x dist/worktree-${{ matrix.target }} - ./dist/worktree-${{ matrix.target }} --version - - - name: Compute SHA256 - run: | - cd dist - shasum -a 256 worktree-${{ matrix.target }} > worktree-${{ matrix.target }}.sha256 - - - uses: actions/upload-artifact@v7 - with: - name: worktree-${{ matrix.target }} - path: | - dist/worktree-${{ matrix.target }} - dist/worktree-${{ matrix.target }}.sha256 - - release: - name: Create Release - needs: build - runs-on: ubuntu-latest - steps: - - uses: actions/download-artifact@v8 - with: - path: artifacts - merge-multiple: true - - - name: Aggregate SHA256SUMS - run: | - cd artifacts - : > SHA256SUMS - for f in worktree-*.sha256; do - cat "$f" >> SHA256SUMS - tail -c1 "$f" | od -An -c | grep -q '\\n' || echo >> SHA256SUMS - done - rm worktree-*.sha256 - cat SHA256SUMS - - - run: chmod +x artifacts/worktree-* - - - uses: softprops/action-gh-release@v2 - with: - generate_release_notes: true - files: | - artifacts/worktree-* - artifacts/SHA256SUMS + build: + name: Build ${{ matrix.target }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - target: darwin-arm64 + os: macos-latest + bun_target: bun-darwin-arm64 + - target: darwin-x64 + os: macos-15-intel + bun_target: bun-darwin-x64 + - target: linux-x64 + os: ubuntu-latest + bun_target: bun-linux-x64 + - target: linux-arm64 + os: ubuntu-24.04-arm + bun_target: bun-linux-arm64 + + steps: + - uses: actions/checkout@v6 + - uses: oven-sh/setup-bun@v2 + - run: bun install --frozen-lockfile + + - name: Compile binary + run: | + bun build src/index.ts \ + --compile \ + --minify \ + --sourcemap=none \ + --target=${{ matrix.bun_target }} \ + --outfile dist/worktree-${{ matrix.target }} + + - name: Install llvm-strip (Linux) + if: startsWith(matrix.target, 'linux-') + run: sudo apt-get update && sudo apt-get install -y llvm + + - name: Strip symbols (Linux) + if: startsWith(matrix.target, 'linux-') + run: llvm-strip --strip-unneeded dist/worktree-${{ matrix.target }} + + - name: Strip symbols (macOS) + if: startsWith(matrix.target, 'darwin-') + run: strip dist/worktree-${{ matrix.target }} + + - name: Ad-hoc codesign (macOS) + if: startsWith(matrix.target, 'darwin-') + run: | + xattr -cr dist/worktree-${{ matrix.target }} + codesign --force --sign - dist/worktree-${{ matrix.target }} + codesign -dv dist/worktree-${{ matrix.target }} + + - name: Verify binary architecture and size + run: | + file dist/worktree-${{ matrix.target }} + ls -lh dist/worktree-${{ matrix.target }} + + - name: Smoke-test --version + run: | + chmod +x dist/worktree-${{ matrix.target }} + ./dist/worktree-${{ matrix.target }} --version + + - name: Compute SHA256 + run: | + cd dist + shasum -a 256 worktree-${{ matrix.target }} > worktree-${{ matrix.target }}.sha256 + + - uses: actions/upload-artifact@v7 + with: + name: worktree-${{ matrix.target }} + path: | + dist/worktree-${{ matrix.target }} + dist/worktree-${{ matrix.target }}.sha256 + + release: + name: Create Release + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v8 + with: + path: artifacts + merge-multiple: true + + - name: Aggregate SHA256SUMS + run: | + cd artifacts + cat worktree-*.sha256 > SHA256SUMS + rm worktree-*.sha256 + cat SHA256SUMS + + - run: chmod +x artifacts/worktree-* + + - uses: softprops/action-gh-release@v2 + with: + generate_release_notes: true + files: | + artifacts/worktree-* + artifacts/SHA256SUMS diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f86a61..b5a41d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** in the release workflow after stripping. Prior releases shipped unsigned binaries, which Apple Silicon (arm64) macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. +- Startup hash verification of a staged binary now reads in 64 KB chunks instead of buffering the full binary in memory, keeping peak RSS flat on every launch. +- Asset-download phase now streams directly from `fetch` to disk via `Bun.write(destPath, response)` instead of round-tripping through an `ArrayBuffer`. +- A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle — the next launch retries so users on the lagging arch get updated promptly once the asset is uploaded. +- `worktree auto-update` failures caused by a read-only binary directory (`EACCES`/`EPERM`/`EROFS` on the atomic swap) now clean up the staged artifacts and print a one-line `run "sudo worktree update"` hint on stderr instead of looping on every launch. +- `worktree update` now recognises `EACCES` on the download step and surfaces the same `sudo worktree update` hint it already showed on the rename step. +- Unlink errors in cleanup paths now distinguish `ENOENT` (silent, expected) from real failures (`EACCES`, `EPERM`, etc.) — real failures emit a dim stderr warning instead of being silently swallowed. +- `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). +- Probe-timeout failures now surface as `timed out after 2000ms` instead of the opaque `exit null`. +- Network errors from `fetchLatestRelease`/`downloadAsset` now preserve the underlying errno chain via `Error.cause`, so callers can classify failures accurately. +- The background update child is now spawned with `detached: true` (POSIX `setsid()`), so a slow download isn't cut short when the user's shell/terminal exits — the child finishes the check in its own session. ## [1.2.0] - 2026-04-17 diff --git a/src/commands/update.ts b/src/commands/update.ts index b14cf4d..654fa0c 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -17,9 +17,13 @@ import { function isEaccesError(error: unknown): boolean { if (!(error instanceof Error)) return false; - if (!("code" in error)) return false; - // Node errno exceptions carry `code` but TypeScript's Error lacks it. - return (error as NodeJS.ErrnoException).code === "EACCES"; + if ("code" in error && (error as NodeJS.ErrnoException).code === "EACCES") { + return true; + } + // release.ts wraps errno errors as `new Error(msg, { cause })` — walk the + // chain so EACCES surfaces even when the top-level Error lacks .code. + if ("cause" in error && error.cause) return isEaccesError(error.cause); + return false; } export const updateCommand = command({ @@ -88,7 +92,11 @@ export const updateCommand = command({ ); if (dlError) { await safeUnlink(tmpPath); - printError(dlError.message); + if (isEaccesError(dlError)) { + printError("Permission denied. Try: sudo worktree update"); + } else { + printError(dlError.message); + } process.exit(EXIT_CODES.ERROR); } diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index c57f1ee..efb9edc 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -5,6 +5,7 @@ import { loadGlobalConfig } from "./config"; import { safeUnlinkSync } from "./fs-utils"; import { compareVersions, + computeSha256Sync, downloadAsset, fetchLatestRelease, fetchSha256Sums, @@ -176,7 +177,25 @@ function applyPendingUpdate(): void { return; } - fs.renameSync(stagedPath, process.execPath); + const { error: renameError } = tryCatchSync(function () { + fs.renameSync(stagedPath, process.execPath); + }); + if (renameError) { + const code = (renameError as NodeJS.ErrnoException).code; + if (code === "EACCES" || code === "EPERM" || code === "EROFS") { + // Cleanup so we don't re-enter this failure on every launch. + cleanupStagedArtifacts(); + appendLastError( + "apply", + `rename ${code}: ${renameError.message}` + ); + warnApplyFailed( + `binary directory not writable (${code}) — run "sudo worktree update" to install the pending update manually` + ); + return; + } + throw renameError; + } safeUnlinkSync(metaPath); // Bump the throttle so the sibling scheduleBackgroundUpdateCheck() in // src/index.ts doesn't spawn a redundant child-check immediately after @@ -256,12 +275,18 @@ async function scheduleBackgroundUpdateCheck(): Promise { stderr: "ignore", stdout: "ignore", stdin: "ignore", + // POSIX: setsid() — survives terminal close (SIGHUP) so a slow + // download isn't cut short when the user's shell exits. + detached: true, }).unref(); } catch (error) { - appendLastError( - "check", - `spawn: ${error instanceof Error ? error.message : String(error)}` - ); + // Only swallow errno-style errors (spawn failures, fs errors). Let + // programmer bugs (TypeError, ReferenceError) propagate so they + // surface via Bun's default unhandled-rejection handler. + if (!(error instanceof Error) || !("code" in error)) { + throw error; + } + appendLastError("check", `spawn: ${error.message}`); } } @@ -304,11 +329,12 @@ async function runBackgroundUpdateCheck(): Promise { return entry.name === assetName; }); if (!asset) { + // Don't record completion — asset-missing is transient (maintainer may + // upload the missing arch moments later). Let the next launch retry. appendLastError( "check", `release ${release.tag} missing asset ${assetName}` ); - recordCheckCompleted(); return; } @@ -386,10 +412,7 @@ async function runBackgroundUpdateCheck(): Promise { // from the probed binary so apply can still run. if (verifiedHash === null) { const { data: computed, error: hashError } = tryCatchSync(function () { - const buffer = fs.readFileSync(tmpPath); - const hasher = new Bun.CryptoHasher("sha256"); - hasher.update(buffer); - return hasher.digest("hex").toLowerCase(); + return computeSha256Sync(tmpPath); }); if (hashError || !computed) { safeUnlinkSync(tmpPath); @@ -464,6 +487,14 @@ function probeBinaryRuns(filePath: string): ProbeResult { reason: error?.message ?? "spawn failed", }; } + if (result.exitCode === null) { + // Bun.spawnSync returns exitCode=null when the child was killed by the + // timeout — surface that explicitly instead of the opaque "exit null". + return { + ok: false, + reason: `timed out after ${PROBE_TIMEOUT_MS}ms`, + }; + } if (result.exitCode !== 0) { const stderr = decodeProbeStderr(result.stderr); const base = `exit ${result.exitCode}`; diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts index 4b3b2f9..e646cf3 100644 --- a/src/lib/fs-utils.ts +++ b/src/lib/fs-utils.ts @@ -1,15 +1,36 @@ import fs from "node:fs"; import fsPromises from "node:fs/promises"; +import { COLORS } from "./logger"; + +function isEnoent(error: unknown): boolean { + return ( + error instanceof Error && + "code" in error && + (error as NodeJS.ErrnoException).code === "ENOENT" + ); +} + +function warnUnlinkFailure(filePath: string, error: unknown): void { + const message = error instanceof Error ? error.message : String(error); + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: could not remove ${filePath} (${message})${RESET}` + ); +} async function safeUnlink(filePath: string): Promise { - await fsPromises.unlink(filePath).catch(function () {}); + await fsPromises.unlink(filePath).catch(function (error) { + if (isEnoent(error)) return; + warnUnlinkFailure(filePath, error); + }); } function safeUnlinkSync(filePath: string): void { try { fs.unlinkSync(filePath); - } catch { - // best-effort + } catch (error) { + if (isEnoent(error)) return; + warnUnlinkFailure(filePath, error); } } diff --git a/src/lib/release.ts b/src/lib/release.ts index 57edeae..6724ae4 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -138,7 +138,8 @@ async function fetchLatestRelease( ); if (error || !result) { throw new Error( - `Failed to reach GitHub releases API: ${error?.message ?? "unknown"}` + `Failed to reach GitHub releases API: ${error?.message ?? "unknown"}`, + { cause: error ?? undefined } ); } return result; @@ -159,13 +160,14 @@ async function downloadAsset( `Download ${asset.name} failed: ${response.status} ${response.statusText}` ); } - const buffer = await response.arrayBuffer(); - await Bun.write(destPath, buffer); + await Bun.write(destPath, response); } ) ); if (error) { - throw new Error(`Failed to download ${asset.name}: ${error.message}`); + throw new Error(`Failed to download ${asset.name}: ${error.message}`, { + cause: error, + }); } } @@ -174,21 +176,47 @@ type HashResult = | { ok: false; kind: "mismatch" } | { ok: false; kind: "io-error"; cause: Error }; +const HASH_CHUNK_BYTES = 64 * 1024; + +async function computeSha256Async(filePath: string): Promise { + const hasher = new Bun.CryptoHasher("sha256"); + const file = Bun.file(filePath); + for await (const chunk of file.stream()) { + hasher.update(chunk); + } + return hasher.digest("hex").toLowerCase(); +} + +function computeSha256Sync(filePath: string): string { + const hasher = new Bun.CryptoHasher("sha256"); + const fd = fs.openSync(filePath, "r"); + try { + const buffer = Buffer.alloc(HASH_CHUNK_BYTES); + while (true) { + const bytesRead = fs.readSync( + fd, + buffer, + 0, + HASH_CHUNK_BYTES, + null + ); + if (bytesRead === 0) break; + hasher.update(buffer.subarray(0, bytesRead)); + } + } finally { + fs.closeSync(fd); + } + return hasher.digest("hex").toLowerCase(); +} + async function verifyBinaryHash( filePath: string, expectedSha256: string ): Promise { - const hasher = new Bun.CryptoHasher("sha256"); - const file = Bun.file(filePath); - const { error } = await tryCatch( - (async function () { - for await (const chunk of file.stream()) { - hasher.update(chunk); - } - })() + const { data: actual, error } = await tryCatch( + computeSha256Async(filePath) ); if (error) return { ok: false, kind: "io-error", cause: error }; - const actual = hasher.digest("hex").toLowerCase(); if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { return { ok: true }; } @@ -200,10 +228,7 @@ function verifyBinaryHashSync( expectedSha256: string ): HashResult { const { data: actual, error } = tryCatchSync(function () { - const buffer = fs.readFileSync(filePath); - const hasher = new Bun.CryptoHasher("sha256"); - hasher.update(buffer); - return hasher.digest("hex").toLowerCase(); + return computeSha256Sync(filePath); }); if (error) return { ok: false, kind: "io-error", cause: error }; if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { @@ -270,13 +295,19 @@ function parseSha256Sums(text: string): Record { const match = /^([0-9a-fA-F]{64})\s+\*?(.+)$/.exec(trimmed); if (!match) continue; const [, hash, filename] = match; - result[filename.trim()] = hash.toLowerCase(); + const name = filename.trim(); + if (Object.prototype.hasOwnProperty.call(result, name)) { + throw new Error(`Duplicate SHA256SUMS entry for ${name}`); + } + result[name] = hash.toLowerCase(); } return result; } export { compareVersions, + computeSha256Async, + computeSha256Sync, downloadAsset, fetchLatestRelease, fetchSha256Sums, From 7a4c6cfef6c2746feea9fc922fc3ba4981acd240 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 21:29:31 +0530 Subject: [PATCH 07/15] =?UTF-8?q?fix:=20harden=20PR=20#5=20auto-update=20f?= =?UTF-8?q?rom=20self-review=20findings=20(15=20hardening=20fixes=20?= =?UTF-8?q?=E2=80=94=20see=20CHANGELOG)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/release.yml | 15 +++++ CHANGELOG.md | 20 ++++++- README.md | 2 + src/commands/update.ts | 86 +++++++++++++-------------- src/lib/auto-update.ts | 109 ++++++++++++++++++---------------- src/lib/config.ts | 42 +++++++------ src/lib/fs-utils.ts | 33 +++++++++- src/lib/release.ts | 80 ++++++++++++++++++++++--- 8 files changed, 260 insertions(+), 127 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9c5628e..61ab04a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -101,9 +101,24 @@ jobs: - run: chmod +x artifacts/worktree-* + # Create the release as a draft and upload all assets to it. + # Publishing happens in the next step so `releases/latest` never + # returns a release where binaries are attached but SHA256SUMS + # is still being uploaded — that window would let clients fall + # through to the TLS-only "not-published" path and install + # unverified. - uses: softprops/action-gh-release@v2 with: + draft: true generate_release_notes: true files: | artifacts/worktree-* artifacts/SHA256SUMS + + - name: Publish release (flip draft to public) + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REF_NAME: ${{ github.ref_name }} + REPO: ${{ github.repository }} + run: | + gh release edit "$REF_NAME" --repo "$REPO" --draft=false diff --git a/CHANGELOG.md b/CHANGELOG.md index b5a41d3..4e8d91e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,9 +14,21 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed -- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) to address GitHub's deprecation of Node.js 20 actions +- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) to address GitHub's deprecation of Node.js 20 actions. - Release binaries now built with `--minify --sourcemap=none` and stripped of debug symbols (`llvm-strip` on Linux, `strip` on macOS). Binary sizes are slightly smaller; functional behavior unchanged. - Release workflow smoke-tests each built binary via `--version` before publishing to catch regressions. +- `AUTO_UPDATE` set in a project `.worktreerc` now warns once that it is ignored — the flag only takes effect in `~/.worktreerc`, matching the README. +- `readConfigFile` is now fail-open on parse errors (warn + return defaults) for both project and global configs, instead of asymmetrically swallowing for project / propagating for global. +- SHA256SUMS verification is centralized in a new `verifyAssetAgainstSums` helper; the foreground `update` command and the background check share the same discriminated-union result contract. + +### Security + +- SHA256SUMS parser now allocates its result map with `Object.create(null)` to block `__proto__`/`constructor`/`prototype` pollution from a tampered sums file. +- SHA256 hash comparison now uses `node:crypto.timingSafeEqual` (C-level constant-time) instead of a userland XOR loop that V8/JSC may short-circuit. +- Release asset downloads now reject responses whose declared `Content-Length` exceeds 200 MB, capping the blast radius of a malicious CDN before SHA verification fires. +- The staging tmp path is now `safeUnlinkSync`-ed before each download, so an attacker-planted symlink in a shared install directory cannot redirect the write to an arbitrary target. Same treatment applies to the sidecar tmp path. +- GitHub release workflow now creates releases as **draft**, uploads all files (binaries + `SHA256SUMS`), and flips to public in a subsequent step — eliminating the window where `releases/latest` returned a non-draft release with binaries attached but `SHA256SUMS` still uploading (which would let clients fall through to the TLS-only "not-published" install path). +- `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). ### Fixed @@ -25,9 +37,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Asset-download phase now streams directly from `fetch` to disk via `Bun.write(destPath, response)` instead of round-tripping through an `ArrayBuffer`. - A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle — the next launch retries so users on the lagging arch get updated promptly once the asset is uploaded. - `worktree auto-update` failures caused by a read-only binary directory (`EACCES`/`EPERM`/`EROFS` on the atomic swap) now clean up the staged artifacts and print a one-line `run "sudo worktree update"` hint on stderr instead of looping on every launch. -- `worktree update` now recognises `EACCES` on the download step and surfaces the same `sudo worktree update` hint it already showed on the rename step. +- Non-permission rename failures (`ETXTBSY`, `EXDEV`, `ENOSPC`, `EIO`, `EBUSY`) now cleanup the staged artifacts too, so a persistent rename failure can't pin a warning on every single launch. +- Orphan `.worktree.next.meta` sidecar left by an interrupted background stage is now cleaned up on the next launch instead of lingering indefinitely. +- Background check's 24h throttle no longer thrashes if the cache file's `.exists()` probe errors out — the error is logged and the check proceeds as if no check has run, matching the symmetric `.text()` read-error behavior. +- `worktree update` now recognises `EACCES`/`EPERM`/`EROFS` on the download and rename steps (previously only `EACCES`) and surfaces the deepest `Error.cause` message so users see the real errno (`ENOTFOUND`, `ECONNRESET`, etc.) instead of the generic wrapper. - Unlink errors in cleanup paths now distinguish `ENOENT` (silent, expected) from real failures (`EACCES`, `EPERM`, etc.) — real failures emit a dim stderr warning instead of being silently swallowed. -- `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). - Probe-timeout failures now surface as `timed out after 2000ms` instead of the opaque `exit null`. - Network errors from `fetchLatestRelease`/`downloadAsset` now preserve the underlying errno chain via `Error.cause`, so callers can classify failures accurately. - The background update child is now spawned with `detached: true` (POSIX `setsid()`), so a slow download isn't cut short when the user's shell/terminal exits — the child finishes the check in its own session. diff --git a/README.md b/README.md index 5972bd1..c858dd3 100644 --- a/README.md +++ b/README.md @@ -96,6 +96,8 @@ Or set `WORKTREE_NO_UPDATE=1` in your environment (useful in CI). Auto-update is a no-op when running via `bun run dev` or in any non-standalone invocation. +Background check failures (network errors, hash mismatches, filesystem issues) are logged to `~/.cache/worktree-cli/last-error` — check this file if auto-updates seem stuck. + ### Manual ```bash diff --git a/src/commands/update.ts b/src/commands/update.ts index 654fa0c..cd0cfcf 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -4,28 +4,20 @@ import pkg from "../../package.json"; import { tryCatch } from "../lib/try-catch"; import { printSuccess, printError, printInfo, COLORS } from "../lib/logger"; import { EXIT_CODES } from "../lib/constants"; -import { safeUnlink } from "../lib/fs-utils"; +import { + classifyWriteError, + deepestMessage, + safeUnlink, +} from "../lib/fs-utils"; import { compareVersions, downloadAsset, fetchLatestRelease, - fetchSha256Sums, getAssetName, isStandalone, - verifyBinaryHash, + verifyAssetAgainstSums, } from "../lib/release"; -function isEaccesError(error: unknown): boolean { - if (!(error instanceof Error)) return false; - if ("code" in error && (error as NodeJS.ErrnoException).code === "EACCES") { - return true; - } - // release.ts wraps errno errors as `new Error(msg, { cause })` — walk the - // chain so EACCES surfaces even when the top-level Error lacks .code. - if ("cause" in error && error.cause) return isEaccesError(error.cause); - return false; -} - export const updateCommand = command({ name: "update", desc: "Update worktree CLI to the latest version", @@ -55,8 +47,9 @@ export const updateCommand = command({ await tryCatch(fetchLatestRelease()); if (releaseError || !release) { printError( - releaseError?.message ?? - "Failed to check for updates. Check your internet connection." + releaseError + ? `Failed to check for updates: ${deepestMessage(releaseError)}` + : "Failed to check for updates. Check your internet connection." ); process.exit(EXIT_CODES.ERROR); } @@ -87,50 +80,49 @@ export const updateCommand = command({ printInfo(`Downloading ${assetName}...`); const tmpPath = `${binaryPath}.update-tmp`; + // Clear any pre-existing entry so the write can't follow a planted + // symlink in a shared install dir. + await safeUnlink(tmpPath); const { error: dlError } = await tryCatch( downloadAsset(asset, tmpPath) ); if (dlError) { await safeUnlink(tmpPath); - if (isEaccesError(dlError)) { + if (classifyWriteError(dlError) !== null) { printError("Permission denied. Try: sudo worktree update"); } else { - printError(dlError.message); + printError(deepestMessage(dlError)); } process.exit(EXIT_CODES.ERROR); } - const sums = await fetchSha256Sums(release.assets); - if (sums.kind === "error") { + const verify = await verifyAssetAgainstSums( + tmpPath, + assetName, + release.assets + ); + if (!verify.ok) { await safeUnlink(tmpPath); - printError( - `SHA256SUMS is published but could not be fetched: ${sums.reason}. Refusing to install.` - ); - process.exit(EXIT_CODES.ERROR); - } - if (sums.kind === "ok") { - const expected = sums.sums[assetName]; - if (!expected) { - await safeUnlink(tmpPath); + if (verify.kind === "sums-error") { + printError( + `SHA256SUMS is published but could not be fetched: ${verify.reason}. Refusing to install.` + ); + } else if (verify.kind === "missing-entry") { printError( `SHA256SUMS is missing an entry for ${assetName}; refusing to install.` ); - process.exit(EXIT_CODES.ERROR); - } - const result = await verifyBinaryHash(tmpPath, expected); - if (!result.ok) { - await safeUnlink(tmpPath); - if (result.kind === "io-error") { - printError( - `Could not read downloaded binary for hash check: ${result.cause.message}.` - ); - } else { - printError( - `Hash mismatch for ${assetName}; refusing to install.` - ); - } - process.exit(EXIT_CODES.ERROR); + } else if (verify.kind === "hash-io-error") { + printError( + `Could not read downloaded binary for hash check: ${verify.cause.message}.` + ); + } else { + printError( + `Hash mismatch for ${assetName}; refusing to install.` + ); } + process.exit(EXIT_CODES.ERROR); + } + if (verify.hash !== null) { printInfo("Verified SHA256 checksum."); } else { printInfo( @@ -152,10 +144,12 @@ export const updateCommand = command({ ); if (renameError) { await safeUnlink(tmpPath); - if (isEaccesError(renameError)) { + if (classifyWriteError(renameError) !== null) { printError("Permission denied. Try: sudo worktree update"); } else { - printError(`Failed to replace binary: ${renameError.message}`); + printError( + `Failed to replace binary: ${deepestMessage(renameError)}` + ); } process.exit(EXIT_CODES.ERROR); } diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index efb9edc..020db99 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -2,16 +2,15 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; import { loadGlobalConfig } from "./config"; -import { safeUnlinkSync } from "./fs-utils"; +import { classifyWriteError, safeUnlinkSync } from "./fs-utils"; import { compareVersions, computeSha256Sync, downloadAsset, fetchLatestRelease, - fetchSha256Sums, getAssetName, isStandalone, - verifyBinaryHash, + verifyAssetAgainstSums, verifyBinaryHashSync, } from "./release"; import { tryCatch, tryCatchSync } from "./try-catch"; @@ -122,7 +121,12 @@ function applyPendingUpdate(): void { try { if (!isStandalone()) return; const stagedPath = getStagingPath(); - if (!fs.existsSync(stagedPath)) return; + if (!fs.existsSync(stagedPath)) { + // Orphan sidecar from an interrupted stage — clean it up so it + // doesn't linger forever waiting for a binary that never comes. + safeUnlinkSync(getMetaSidecarPath()); + return; + } const metaPath = getMetaSidecarPath(); if (!fs.existsSync(metaPath)) { @@ -181,20 +185,27 @@ function applyPendingUpdate(): void { fs.renameSync(stagedPath, process.execPath); }); if (renameError) { - const code = (renameError as NodeJS.ErrnoException).code; - if (code === "EACCES" || code === "EPERM" || code === "EROFS") { - // Cleanup so we don't re-enter this failure on every launch. - cleanupStagedArtifacts(); - appendLastError( - "apply", - `rename ${code}: ${renameError.message}` + // Always cleanup so a persistent rename failure (ETXTBSY, EXDEV, + // ENOSPC, EIO, EBUSY, EACCES/EPERM/EROFS) doesn't loop on every + // launch — if rename fails once it's almost certainly going to + // fail again until the user intervenes. + cleanupStagedArtifacts(); + const writeCode = classifyWriteError(renameError); + const rawCode = (renameError as NodeJS.ErrnoException).code; + appendLastError( + "apply", + `rename ${rawCode ?? "unknown"}: ${renameError.message}` + ); + if (writeCode !== null) { + warnApplyFailed( + `binary directory not writable (${writeCode}) — run "sudo worktree update" to install the pending update manually` ); + } else { warnApplyFailed( - `binary directory not writable (${code}) — run "sudo worktree update" to install the pending update manually` + `rename failed (${rawCode ?? renameError.message}) — staged update discarded` ); - return; } - throw renameError; + return; } safeUnlinkSync(metaPath); // Bump the throttle so the sibling scheduleBackgroundUpdateCheck() in @@ -222,7 +233,11 @@ function warnApplyFailed(reason: string): void { async function readLastCheckMs(): Promise { const file = Bun.file(getLastCheckPath()); - const { data: exists } = await tryCatch(file.exists()); + const { data: exists, error: existsError } = await tryCatch(file.exists()); + if (existsError) { + appendLastError("check", `last-check exists: ${existsError.message}`); + return null; + } if (!exists) return null; const { data: text, error } = await tryCatch(file.text()); if (error) { @@ -243,12 +258,7 @@ async function readLastCheckMs(): Promise { async function isAutoUpdateDisabled(): Promise { if (process.env.WORKTREE_NO_UPDATE === "1") return true; - const { data: config, error } = await tryCatch(loadGlobalConfig()); - if (error) { - appendLastError("check", `config load: ${error.message}`); - return true; - } - if (!config) return true; + const config = await loadGlobalConfig(); return config.AUTO_UPDATE === false; } @@ -344,6 +354,9 @@ async function runBackgroundUpdateCheck(): Promise { `${STAGING_FILENAME}.${process.pid}.tmp` ); + // Clear any pre-existing entry (symlink / leftover tmp) so the subsequent + // write can't follow a planted symlink to an attacker-chosen target. + safeUnlinkSync(tmpPath); const { error: dlError } = await tryCatch(downloadAsset(asset, tmpPath)); if (dlError) { safeUnlinkSync(tmpPath); @@ -353,43 +366,35 @@ async function runBackgroundUpdateCheck(): Promise { // Verify integrity BEFORE making the binary executable or running it. // Running an unverified binary (even just `--version`) is code execution. - const sums = await fetchSha256Sums(release.assets); - if (sums.kind === "error") { + const verify = await verifyAssetAgainstSums( + tmpPath, + assetName, + release.assets + ); + if (!verify.ok) { safeUnlinkSync(tmpPath); - appendLastError( - "check", - `SHA256SUMS fetch failed — refusing to stage: ${sums.reason}` - ); - return; - } - - let verifiedHash: string | null = null; - if (sums.kind === "ok") { - const expected = sums.sums[assetName]; - if (!expected) { - safeUnlinkSync(tmpPath); + if (verify.kind === "sums-error") { + appendLastError( + "check", + `SHA256SUMS fetch failed — refusing to stage: ${verify.reason}` + ); + } else if (verify.kind === "missing-entry") { appendLastError( "check", `SHA256SUMS missing entry for ${assetName}` ); - return; - } - const verify = await verifyBinaryHash(tmpPath, expected); - if (!verify.ok) { - safeUnlinkSync(tmpPath); - if (verify.kind === "io-error") { - appendLastError( - "check", - `hash io-error for ${assetName}: ${verify.cause.message}` - ); - } else { - appendLastError("check", `hash mismatch for ${assetName}`); - } - return; + } else if (verify.kind === "hash-io-error") { + appendLastError( + "check", + `hash io-error for ${assetName}: ${verify.cause.message}` + ); + } else { + appendLastError("check", `hash mismatch for ${assetName}`); } - verifiedHash = expected.toLowerCase(); + return; } - // sums.kind === "not-published" → legacy release without SHA256SUMS; trust TLS. + // verify.hash === null → legacy release without SHA256SUMS; trust TLS. + let verifiedHash: string | null = verify.hash; const { error: chmodError } = tryCatchSync(function () { fs.chmodSync(tmpPath, 0o755); @@ -435,6 +440,8 @@ async function runBackgroundUpdateCheck(): Promise { version: release.version, sha256: verifiedHash, }); + // Same symlink-safety treatment as the binary tmpPath. + safeUnlinkSync(metaTmpPath); const { error: metaWriteError } = tryCatchSync(function () { fs.writeFileSync(metaTmpPath, sidecarContent); }); diff --git a/src/lib/config.ts b/src/lib/config.ts index 2bfbdbe..f25f135 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -2,7 +2,7 @@ import { z } from "zod"; import os from "node:os"; import path from "node:path"; import { DEFAULT_WORKTREE_DIR } from "./constants"; -import { tryCatch } from "./try-catch"; +import { tryCatch, tryCatchSync } from "./try-catch"; const booleanLike = z .union([z.boolean(), z.string()]) @@ -78,44 +78,50 @@ function warnOnce(filePath: string, message: string): void { console.error(message); } -async function readConfigFile(filePath: string): Promise { +type ConfigScope = "project" | "global"; + +async function readConfigFile( + filePath: string, + scope: ConfigScope +): Promise { const file = Bun.file(filePath); const isExists = await file.exists(); if (!isExists) return validateConfig({}); const display = displayPath(filePath); - const { data: content, error } = await tryCatch(file.text()); - if (error) { + const { data: content, error: readError } = await tryCatch(file.text()); + if (readError) { warnOnce( filePath, - `warning: could not read ${display}: ${error.message}. Using defaults.` + `warning: could not read ${display}: ${readError.message}. Using defaults.` ); return validateConfig({}); } - const { data: parsed, error: parseError } = await tryCatch( - Promise.resolve().then(function () { - return validateConfig(parseConfigContent(content)); - }) - ); + const raw = parseConfigContent(content); + if (scope === "project" && "AUTO_UPDATE" in raw) { + warnOnce( + `${filePath}:AUTO_UPDATE`, + `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead.` + ); + } + const { data: parsed, error: parseError } = tryCatchSync(function () { + return validateConfig(raw); + }); if (parseError) { warnOnce( filePath, - `warning: ${display} is invalid: ${parseError.message}.` + `warning: ${display} is invalid: ${parseError.message}. Using defaults.` ); - throw parseError; + return validateConfig({}); } return parsed; } async function loadConfig(root: string): Promise { - const { data, error } = await tryCatch( - readConfigFile(path.join(root, ".worktreerc")) - ); - if (error || !data) return validateConfig({}); - return data; + return readConfigFile(path.join(root, ".worktreerc"), "project"); } async function loadGlobalConfig(): Promise { - return readConfigFile(path.join(os.homedir(), ".worktreerc")); + return readConfigFile(path.join(os.homedir(), ".worktreerc"), "global"); } export { loadConfig, loadGlobalConfig, parseConfigContent, validateConfig }; diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts index e646cf3..ddf7d3b 100644 --- a/src/lib/fs-utils.ts +++ b/src/lib/fs-utils.ts @@ -34,4 +34,35 @@ function safeUnlinkSync(filePath: string): void { } } -export { safeUnlink, safeUnlinkSync }; +type WriteErrorCode = "EACCES" | "EPERM" | "EROFS"; + +// release.ts wraps errno errors as `new Error(msg, { cause })` — walk the +// cause chain so the "binary directory not writable" branch fires even +// when the top-level Error lacks a .code field. +function classifyWriteError(error: unknown): WriteErrorCode | null { + let cur: unknown = error; + while (cur instanceof Error) { + if ("code" in cur) { + const code = (cur as NodeJS.ErrnoException).code; + if (code === "EACCES" || code === "EPERM" || code === "EROFS") { + return code; + } + } + cur = cur.cause; + } + return null; +} + +// Walk `Error.cause` to the deepest leaf so the original errno message +// (ENOTFOUND, ECONNRESET, ETIMEDOUT) surfaces to the user instead of the +// generic wrapper ("Failed to reach GitHub releases API"). +function deepestMessage(error: unknown): string { + let cur: unknown = error; + while (cur instanceof Error && cur.cause !== undefined) { + cur = cur.cause; + } + return cur instanceof Error ? cur.message : String(cur); +} + +export { classifyWriteError, deepestMessage, safeUnlink, safeUnlinkSync }; +export type { WriteErrorCode }; diff --git a/src/lib/release.ts b/src/lib/release.ts index 6724ae4..c42acba 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -1,4 +1,5 @@ import fs from "node:fs"; +import { timingSafeEqual } from "node:crypto"; import { tryCatch, tryCatchSync } from "./try-catch"; const REPO = "bhagyamudgal/worktree-cli"; @@ -6,6 +7,10 @@ const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/lates const DEFAULT_META_TIMEOUT_MS = 30_000; const DEFAULT_ASSET_TIMEOUT_MS = 600_000; +// Hard cap on downloaded release assets. Current binaries are ~50 MB, so +// 200 MB leaves 4× headroom while still rejecting a malicious CDN response +// that would fill the binary-dir filesystem before SHA verification fires. +const MAX_ASSET_BYTES = 200 * 1024 * 1024; type ReleaseAsset = { name: string; @@ -160,6 +165,18 @@ async function downloadAsset( `Download ${asset.name} failed: ${response.status} ${response.statusText}` ); } + const contentLength = response.headers.get("content-length"); + if (contentLength !== null) { + const declared = Number(contentLength); + if ( + Number.isFinite(declared) && + declared > MAX_ASSET_BYTES + ) { + throw new Error( + `Download ${asset.name} refused: declared size ${declared} bytes exceeds cap ${MAX_ASSET_BYTES} bytes` + ); + } + } await Bun.write(destPath, response); } ) @@ -239,11 +256,9 @@ function verifyBinaryHashSync( function constantTimeEquals(a: string, b: string): boolean { if (a.length !== b.length) return false; - let diff = 0; - for (let i = 0; i < a.length; i++) { - diff |= a.charCodeAt(i) ^ b.charCodeAt(i); - } - return diff === 0; + // node:crypto.timingSafeEqual is a C-level constant-time compare — + // strictly better than a userland XOR loop that V8/JSC may short-circuit. + return timingSafeEqual(Buffer.from(a), Buffer.from(b)); } type Sha256SumsResult = @@ -287,8 +302,51 @@ async function fetchSha256Sums( return result; } +// Unified entry point for "download is on disk — verify it against SHA256SUMS +// before we chmod/rename/execute". Both the foreground `update` command and +// the background check need this exact flow; keeping it in one place means a +// future tweak (e.g., stricter not-published handling) lands everywhere. +type VerifyAssetResult = + | { ok: true; hash: string | null } // hash === null when SHA256SUMS isn't published + | { ok: false; kind: "sums-error"; reason: string } + | { ok: false; kind: "missing-entry" } + | { ok: false; kind: "hash-io-error"; cause: Error } + | { ok: false; kind: "hash-mismatch" }; + +async function verifyAssetAgainstSums( + tmpPath: string, + assetName: string, + assets: ReleaseAsset[] +): Promise { + const sums = await fetchSha256Sums(assets); + if (sums.kind === "error") { + return { ok: false, kind: "sums-error", reason: sums.reason }; + } + if (sums.kind === "not-published") { + return { ok: true, hash: null }; + } + const expected = sums.sums[assetName]; + if (!expected) { + return { ok: false, kind: "missing-entry" }; + } + const hashResult = await verifyBinaryHash(tmpPath, expected); + if (!hashResult.ok) { + if (hashResult.kind === "io-error") { + return { + ok: false, + kind: "hash-io-error", + cause: hashResult.cause, + }; + } + return { ok: false, kind: "hash-mismatch" }; + } + return { ok: true, hash: expected.toLowerCase() }; +} + function parseSha256Sums(text: string): Record { - const result: Record = {}; + // Object.create(null) blocks __proto__/constructor/prototype key + // pollution from an attacker-substituted SHA256SUMS file. + const result: Record = Object.create(null); for (const line of text.split("\n")) { const trimmed = line.trim(); if (trimmed === "" || trimmed.startsWith("#")) continue; @@ -306,7 +364,6 @@ function parseSha256Sums(text: string): Record { export { compareVersions, - computeSha256Async, computeSha256Sync, downloadAsset, fetchLatestRelease, @@ -314,7 +371,14 @@ export { getAssetName, isStandalone, parseSha256Sums, + verifyAssetAgainstSums, verifyBinaryHash, verifyBinaryHashSync, }; -export type { HashResult, ReleaseAsset, ReleaseInfo, Sha256SumsResult }; +export type { + HashResult, + ReleaseAsset, + ReleaseInfo, + Sha256SumsResult, + VerifyAssetResult, +}; From 7bc2f692924376274fff9e819f48ef3160464b47 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sat, 18 Apr 2026 22:24:25 +0530 Subject: [PATCH 08/15] =?UTF-8?q?fix:=20=20=20harden=20PR=20#5=20auto-upda?= =?UTF-8?q?te=20=E2=80=94=20race-safe=20stage,=20bound=20RAM,=20fail-close?= =?UTF-8?q?d=20config?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 15 +++ src/commands/internal-update-check.ts | 13 +- src/lib/auto-update.ts | 153 ++++++++++++++++++----- src/lib/config.ts | 28 ++++- src/lib/release.test.ts | 169 +++++++++++++++++++++++++- src/lib/release.ts | 92 +++++++++++++- 6 files changed, 430 insertions(+), 40 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4e8d91e..4f23334 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - The staging tmp path is now `safeUnlinkSync`-ed before each download, so an attacker-planted symlink in a shared install directory cannot redirect the write to an arbitrary target. Same treatment applies to the sidecar tmp path. - GitHub release workflow now creates releases as **draft**, uploads all files (binaries + `SHA256SUMS`), and flips to public in a subsequent step — eliminating the window where `releases/latest` returned a non-draft release with binaries attached but `SHA256SUMS` still uploading (which would let clients fall through to the TLS-only "not-published" install path). - `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). +- `release.version` is now validated against the sidecar regex before being written, matching the reader's strictness — closes a writer/reader asymmetry that would be exploitable if the parser's rules ever loosen. +- Concurrent-launch race on staged-update commit: the window between "sidecar committed" and "binary committed" is now protected by a 60-second mtime grace period. `applyPendingUpdate` no longer reaps an apparently-orphan file that is fresh enough to be a concurrent producer still mid-commit, so simultaneous terminal launches no longer silently discard a correctly hash-verified staged binary. +- Asset downloads now stream the body through a manual reader that enforces `MAX_ASSET_BYTES` during buffering, not only after — a CDN omitting or falsifying `Content-Length` can no longer balloon RAM before the size check fires. Also propagates the fetch `AbortSignal` into the body read so a slowloris response can't stretch past the 600s timeout. +- GitHub API fetches now send `User-Agent` (`worktree-cli/vX.Y.Z`), `Accept: application/vnd.github+json`, and `X-GitHub-Api-Version: 2022-11-28`. Setting `GITHUB_TOKEN` in the environment raises the rate limit from 60/hr (anonymous) to 5000/hr (authenticated). +- `isAutoUpdateDisabled` now fails closed on broken `~/.worktreerc` — a typo'd config no longer silently re-enables auto-update against a user's explicit opt-out. ### Fixed @@ -45,6 +50,16 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Probe-timeout failures now surface as `timed out after 2000ms` instead of the opaque `exit null`. - Network errors from `fetchLatestRelease`/`downloadAsset` now preserve the underlying errno chain via `Error.cause`, so callers can classify failures accurately. - The background update child is now spawned with `detached: true` (POSIX `setsid()`), so a slow download isn't cut short when the user's shell/terminal exits — the child finishes the check in its own session. +- `applyPendingUpdate`'s outer catch now rethrows non-errno errors so programmer bugs surface with a stack trace via Bun's unhandled handler instead of being silently reduced to a dim warning. Matches the discipline in `scheduleBackgroundUpdateCheck`. +- The detached background-check child's stderr is now appended to `~/.cache/worktree-cli/last-error` (previously `"ignore"`). Unhandled throws from `runBackgroundUpdateCheck` now surface a full stack trace on the next foreground launch instead of failing silently. The internal command handler also wraps the run in a top-level try/catch that appends a panic trace before exiting non-zero. +- Parent's stderr file descriptor is now closed in a `finally` block, so a synchronous `Bun.spawn` failure (`EMFILE`, `EPERM`, `EAGAIN`) no longer leaks the fd on every foreground launch. +- Empty catches in the log-write helpers (`appendLastError`, `rotateErrorLogIfOversized`) now emit a one-shot dim stderr warning if `~/.cache/worktree-cli/last-error` itself is unwritable — diagnostics-of-diagnostics can no longer disappear silently. +- `fetchSha256Sums` error results now include a `retryable` boolean distinguishing transient (5xx, network) failures from permanent (4xx) ones; callers can use the hint to choose retry semantics without re-inspecting status strings. + +### Tests + +- Added coverage for `verifyAssetAgainstSums` across all six result shapes (legacy, normal, 5xx retryable, 4xx permanent, missing-entry, hash-mismatch, hash-io-error) via stubbed `fetch` and a precomputed-hash asset file — pins the tri-state safety contract against future refactors. +- Added coverage for `parseSha256Sums` duplicate-filename rejection (defense-in-depth against tampered mirrors). ## [1.2.0] - 2026-04-17 diff --git a/src/commands/internal-update-check.ts b/src/commands/internal-update-check.ts index 1576c7e..5cb5850 100644 --- a/src/commands/internal-update-check.ts +++ b/src/commands/internal-update-check.ts @@ -1,5 +1,6 @@ import { command } from "@drizzle-team/brocli"; import { + appendBackgroundCheckPanic, INTERNAL_CHECK_SUBCOMMAND, runBackgroundUpdateCheck, } from "../lib/auto-update"; @@ -9,6 +10,16 @@ export const internalUpdateCheckCommand = command({ desc: "", hidden: true, handler: async () => { - await runBackgroundUpdateCheck(); + // Silent detached child has no error surface by design + // (stdio/stderr redirected in scheduleBackgroundUpdateCheck). Wrap + // the whole run so any unhandled throw from runBackgroundUpdateCheck + // appends a full stack trace to last-error before exiting non-zero. + // Without this, a programmer bug here is invisible in production. + try { + await runBackgroundUpdateCheck(); + } catch (error) { + appendBackgroundCheckPanic(error); + process.exit(1); + } }, }); diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index 020db99..9169efd 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -1,7 +1,7 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; -import { loadGlobalConfig } from "./config"; +import { shouldAutoUpdate } from "./config"; import { classifyWriteError, safeUnlinkSync } from "./fs-utils"; import { compareVersions, @@ -27,6 +27,11 @@ const PROBE_STDERR_TRUNCATE_BYTES = 500; const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; const SIDECAR_VERSION_PATTERN = /^\d+\.\d+\.\d+(?:-[\w.-]+)?$/; const SIDECAR_HASH_PATTERN = /^[0-9a-f]{64}$/; +// Window in which a "partial" stage (sidecar without binary, or binary +// without sidecar) may be a concurrent producer mid-commit rather than a +// genuine orphan. applyPendingUpdate defers reaping anything fresher than +// this so a simultaneous-launch race can't discard a correct staged update. +const STAGING_ORPHAN_GRACE_MS = 60 * 1000; function getBinaryDir(): string { return path.dirname(process.execPath); @@ -56,6 +61,25 @@ function ensureCacheDir(): void { fs.mkdirSync(getCacheDir(), { recursive: true }); } +let hasWarnedAboutLogFailure = false; + +function warnLogFailureOnce(reason: string): void { + if (hasWarnedAboutLogFailure) return; + hasWarnedAboutLogFailure = true; + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update error log unwritable (${reason}) — diagnostics unavailable${RESET}` + ); +} + +function appendBackgroundCheckPanic(error: unknown): void { + const detail = + error instanceof Error + ? `${error.name}: ${error.message}\n${error.stack ?? ""}` + : String(error); + appendLastError("check", `PANIC — ${detail.replace(/\n/g, "\n ")}`); +} + function appendLastError(kind: "apply" | "check", message: string): void { try { ensureCacheDir(); @@ -63,8 +87,10 @@ function appendLastError(kind: "apply" | "check", message: string): void { const logPath = getLastErrorPath(); rotateErrorLogIfOversized(logPath); fs.appendFileSync(logPath, line); - } catch { - // never let the error log itself block anything + } catch (error) { + warnLogFailureOnce( + error instanceof Error ? error.message : String(error) + ); } } @@ -78,8 +104,10 @@ function rotateErrorLogIfOversized(logPath: string): void { }); const kept = lines.slice(-ERROR_LOG_KEEP_LINES).join("\n") + "\n"; fs.writeFileSync(logPath, kept); - } catch { - // best-effort rotation — if stat/read/write fails, fall through + } catch (error) { + warnLogFailureOnce( + error instanceof Error ? error.message : String(error) + ); } } @@ -115,22 +143,40 @@ function cleanupStagedArtifacts(): void { safeUnlinkSync(getMetaSidecarPath()); } +function isWithinGracePeriod(filePath: string): boolean { + const { data: stat } = tryCatchSync(function () { + return fs.statSync(filePath); + }); + if (!stat) return false; + return Date.now() - stat.mtimeMs < STAGING_ORPHAN_GRACE_MS; +} + function applyPendingUpdate(): void { // Sync on purpose: runs before brocli.run; avoids top-level await. if (process.env.WORKTREE_NO_UPDATE === "1") return; try { if (!isStandalone()) return; const stagedPath = getStagingPath(); + const metaPath = getMetaSidecarPath(); if (!fs.existsSync(stagedPath)) { - // Orphan sidecar from an interrupted stage — clean it up so it - // doesn't linger forever waiting for a binary that never comes. - safeUnlinkSync(getMetaSidecarPath()); + // Sidecar without binary. Two possible causes: + // (a) a concurrent producer just committed the sidecar and is + // about to rename the binary into place; or + // (b) a crashed/abandoned previous stage left only the sidecar. + // Differentiate by mtime — within the grace window, leave alone + // so the producer can complete; past the window, treat as orphan + // and reap. + if (isWithinGracePeriod(metaPath)) return; + safeUnlinkSync(metaPath); return; } - const metaPath = getMetaSidecarPath(); if (!fs.existsSync(metaPath)) { - // Staging was partial; clean up the orphaned binary. + // Binary without sidecar. Same producer-mid-commit vs orphan + // ambiguity — grace period prevents a concurrent launch from + // discarding a correctly-staged binary whose sidecar is about + // to land. + if (isWithinGracePeriod(stagedPath)) return; safeUnlinkSync(stagedPath); appendLastError( "apply", @@ -218,9 +264,16 @@ function applyPendingUpdate(): void { `worktree ${GREEN}${BOLD}auto-updated${RESET} to ${BOLD}v${meta.version}${RESET}` ); } catch (error) { - const message = error instanceof Error ? error.message : String(error); - appendLastError("apply", message); - warnApplyFailed(message); + // Only swallow errno-style I/O errors. Programmer bugs (TypeError, + // ReferenceError, unexpected throws from parseSidecar, etc.) must + // propagate so Bun's unhandled handler surfaces a stack trace — + // otherwise a regression here degrades to a cryptic DIM warning + // forever. Matches the discipline in scheduleBackgroundUpdateCheck. + if (!(error instanceof Error) || !("code" in error)) { + throw error; + } + appendLastError("apply", error.message); + warnApplyFailed(error.message); } } @@ -258,8 +311,9 @@ async function readLastCheckMs(): Promise { async function isAutoUpdateDisabled(): Promise { if (process.env.WORKTREE_NO_UPDATE === "1") return true; - const config = await loadGlobalConfig(); - return config.AUTO_UPDATE === false; + // shouldAutoUpdate fails CLOSED on broken ~/.worktreerc so a typo'd + // config doesn't silently override a user's intended opt-out. + return !(await shouldAutoUpdate()); } async function scheduleBackgroundUpdateCheck(): Promise { @@ -279,16 +333,37 @@ async function scheduleBackgroundUpdateCheck(): Promise { // successful check completes. Simultaneous launches may both spawn // (accepted trade-off — worst case 2 API calls within the anon 60/hr // limit) but a failed check never burns the 24h window. - Bun.spawn({ - cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], - stdio: ["ignore", "ignore", "ignore"], - stderr: "ignore", - stdout: "ignore", - stdin: "ignore", - // POSIX: setsid() — survives terminal close (SIGHUP) so a slow - // download isn't cut short when the user's shell exits. - detached: true, - }).unref(); + // + // Child stderr is appended to last-error (not "ignore") so an + // unhandled throw inside runBackgroundUpdateCheck surfaces with a + // stack trace on the next foreground launch — otherwise programmer + // bugs in the background path are invisible in production. + const { data: stderrFd } = tryCatchSync(function () { + ensureCacheDir(); + return fs.openSync(getLastErrorPath(), "a"); + }); + try { + Bun.spawn({ + cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], + stdin: "ignore", + stdout: "ignore", + stderr: stderrFd ?? "ignore", + // POSIX: setsid() — survives terminal close (SIGHUP) so a + // slow download isn't cut short when the user's shell exits. + detached: true, + }).unref(); + } finally { + // Close the parent's copy regardless of spawn outcome. Bun.spawn + // throws synchronously on spawn failures (EMFILE, EPERM, EAGAIN) + // before the catch below runs, so without the finally a spawn + // failure would leak the fd on every foreground launch. + if (stderrFd !== null) { + const inheritedFd = stderrFd; + tryCatchSync(function () { + fs.closeSync(inheritedFd); + }); + } + } } catch (error) { // Only swallow errno-style errors (spawn failures, fs errors). Let // programmer bugs (TypeError, ReferenceError) propagate so they @@ -393,7 +468,10 @@ async function runBackgroundUpdateCheck(): Promise { } return; } - // verify.hash === null → legacy release without SHA256SUMS; trust TLS. + // verify.hash === null → legacy release without SHA256SUMS. Upstream + // integrity rests on TLS alone; no end-to-end hash exists to compare + // against. See the sidecar-commit comment below for the narrower + // guarantee we can still provide for legacy releases. let verifiedHash: string | null = verify.hash; const { error: chmodError } = tryCatchSync(function () { @@ -413,8 +491,12 @@ async function runBackgroundUpdateCheck(): Promise { } // Legacy releases without SHA256SUMS can't stage via the re-verify path - // because applyPendingUpdate needs a hash to check. Compute it ourselves - // from the probed binary so apply can still run. + // because applyPendingUpdate needs a hash to check. Self-compute from + // the downloaded bytes so apply can still run — note this hash is NOT + // an end-to-end integrity check (same bytes generate the hash being + // compared against). Its only guarantee is detecting stage→apply + // on-disk corruption; upstream tampering for legacy releases is + // blocked only by TLS. if (verifiedHash === null) { const { data: computed, error: hashError } = tryCatchSync(function () { return computeSha256Sync(tmpPath); @@ -430,8 +512,18 @@ async function runBackgroundUpdateCheck(): Promise { verifiedHash = computed; } - // Commit sidecar BEFORE the binary rename so applyPendingUpdate never - // sees a staged binary without its metadata. + // Validate server-controlled release.version BEFORE writing — keeps the + // writer in lockstep with the reader's SIDECAR_VERSION_PATTERN check so a + // future parser relaxation can't turn a crafted tag into a hash-spoof. + if (!SIDECAR_VERSION_PATTERN.test(release.version)) { + safeUnlinkSync(tmpPath); + appendLastError( + "check", + `invalid release version for sidecar: ${JSON.stringify(release.version.slice(0, 40))}` + ); + return; + } + const metaTmpPath = path.join( binaryDir, `${META_SIDECAR_FILENAME}.${process.pid}.tmp` @@ -520,6 +612,7 @@ function decodeProbeStderr(stderr: unknown): string { } export { + appendBackgroundCheckPanic, applyPendingUpdate, scheduleBackgroundUpdateCheck, runBackgroundUpdateCheck, diff --git a/src/lib/config.ts b/src/lib/config.ts index f25f135..c1f298e 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -124,5 +124,31 @@ async function loadGlobalConfig(): Promise { return readConfigFile(path.join(os.homedir(), ".worktreerc"), "global"); } -export { loadConfig, loadGlobalConfig, parseConfigContent, validateConfig }; +// Auto-update reads config on every background check and must fail CLOSED +// on parse/read errors. The generic readConfigFile falls back to defaults +// (AUTO_UPDATE: true), which would silently override a user's explicit +// opt-out whenever their config has a typo. This helper returns the +// narrow signal "should auto-update fire?" with fail-closed semantics. +async function shouldAutoUpdate(): Promise { + const filePath = path.join(os.homedir(), ".worktreerc"); + const file = Bun.file(filePath); + const isExists = await file.exists(); + if (!isExists) return true; + const { data: content, error: readError } = await tryCatch(file.text()); + if (readError) return false; + const raw = parseConfigContent(content); + const { data: parsed, error: parseError } = tryCatchSync(function () { + return validateConfig(raw); + }); + if (parseError) return false; + return parsed.AUTO_UPDATE; +} + +export { + loadConfig, + loadGlobalConfig, + parseConfigContent, + shouldAutoUpdate, + validateConfig, +}; export type { Config }; diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index d9da698..b6ab32c 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -1,5 +1,13 @@ -import { describe, expect, it } from "bun:test"; -import { compareVersions, parseSha256Sums } from "./release"; +import { afterEach, beforeEach, describe, expect, it } from "bun:test"; +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { + compareVersions, + parseSha256Sums, + verifyAssetAgainstSums, + type ReleaseAsset, +} from "./release"; describe("compareVersions", () => { it("returns 0 for equal versions", () => { @@ -95,4 +103,161 @@ describe("parseSha256Sums", () => { const result = parseSha256Sums(text); expect(Object.keys(result)).toEqual(["worktree-linux-x64"]); }); + + it("rejects duplicate filename entries with different hashes", () => { + const dupe = [ + "a".repeat(64) + " worktree-darwin-arm64", + "b".repeat(64) + " worktree-darwin-arm64", + ].join("\n"); + expect(() => parseSha256Sums(dupe)).toThrow(/Duplicate/); + }); +}); + +describe("verifyAssetAgainstSums", () => { + const ASSET_BYTES = new Uint8Array([ + 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, + ]); + // Precomputed SHA256 of "hello world" (ASSET_BYTES) — avoids runtime + // dependence on Bun.CryptoHasher in test setup. + const ASSET_SHA = + "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"; + const ASSET_NAME = "worktree-darwin-arm64"; + + let tmpFile: string; + let originalFetch: typeof globalThis.fetch; + + beforeEach(() => { + tmpFile = path.join( + os.tmpdir(), + `verify-test-${process.pid}-${Date.now()}` + ); + fs.writeFileSync(tmpFile, ASSET_BYTES); + originalFetch = globalThis.fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + try { + fs.unlinkSync(tmpFile); + } catch { + // best-effort cleanup + } + }); + + function makeAsset(name: string): ReleaseAsset { + return { + name, + browser_download_url: `https://example.invalid/${name}`, + }; + } + + function resolveFetchUrl(input: RequestInfo | URL): string { + if (typeof input === "string") return input; + if (input instanceof URL) return input.toString(); + return input.url; + } + + function stubFetch(handler: (url: string) => Response): void { + globalThis.fetch = async function ( + input: RequestInfo | URL + ): Promise { + return handler(resolveFetchUrl(input)); + } as typeof globalThis.fetch; + } + + it("returns ok with null hash when SHA256SUMS is not published (legacy)", async () => { + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + ]); + expect(result).toEqual({ ok: true, hash: null }); + }); + + it("returns ok with lowercase hex when SHA256SUMS contains the entry", async () => { + const sumsBody = `${ASSET_SHA} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: true, hash: ASSET_SHA }); + }); + + it("returns sums-error with retryable flag when SHA256SUMS fetch fails 5xx", async () => { + stubFetch(function () { + return new Response("bad gateway", { + status: 502, + statusText: "Bad Gateway", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(true); + expect(result.reason).toContain("502"); + }); + + it("returns sums-error marked non-retryable on 4xx (permanent)", async () => { + stubFetch(function () { + return new Response("not found", { + status: 404, + statusText: "Not Found", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(false); + }); + + it("returns missing-entry when SHA256SUMS exists but has no row for asset", async () => { + const sumsBody = `${ASSET_SHA} some-other-asset\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: false, kind: "missing-entry" }); + }); + + it("returns hash-mismatch when entry exists but content differs", async () => { + const wrongHash = "0".repeat(64); + const sumsBody = `${wrongHash} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: false, kind: "hash-mismatch" }); + }); + + it("returns hash-io-error when the binary file is unreadable", async () => { + const sumsBody = `${ASSET_SHA} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums( + "/nonexistent/path/file.bin", + ASSET_NAME, + [makeAsset(ASSET_NAME), makeAsset("SHA256SUMS")] + ); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("hash-io-error"); + }); }); diff --git a/src/lib/release.ts b/src/lib/release.ts index c42acba..e81edd3 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -1,10 +1,28 @@ import fs from "node:fs"; import { timingSafeEqual } from "node:crypto"; import { tryCatch, tryCatchSync } from "./try-catch"; +import pkg from "../../package.json"; const REPO = "bhagyamudgal/worktree-cli"; const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; +// GitHub REST API requires a User-Agent. Default Bun UA works today but is +// fragile under future GitHub policy. Identifying as worktree-cli also +// makes this client traceable in logs if something ever misbehaves. +// When GITHUB_TOKEN is set, rate limit bumps from 60/hr (anon) to 5000/hr. +function buildGitHubHeaders(): Record { + const headers: Record = { + "User-Agent": `worktree-cli/${pkg.version}`, + Accept: "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + }; + const token = process.env.GITHUB_TOKEN; + if (token && token.length > 0) { + headers.Authorization = `Bearer ${token}`; + } + return headers; +} + const DEFAULT_META_TIMEOUT_MS = 30_000; const DEFAULT_ASSET_TIMEOUT_MS = 600_000; // Hard cap on downloaded release assets. Current binaries are ~50 MB, so @@ -113,7 +131,10 @@ async function withTimeout( controller.abort(); }, timeoutMs); try { - const response = await fetch(url, { signal: controller.signal }); + const response = await fetch(url, { + signal: controller.signal, + headers: buildGitHubHeaders(), + }); return await handler(response); } finally { clearTimeout(timer); @@ -177,7 +198,46 @@ async function downloadAsset( ); } } - await Bun.write(destPath, response); + if (!response.body) { + throw new Error( + `Download ${asset.name} refused: empty response body` + ); + } + // Stream the body into a growing chunk list, tracking bytes + // received so we can bail out BEFORE buffering past the cap + // when Content-Length is absent or forged. A plain + // `response.arrayBuffer()` would buffer the whole body + // before any cap check; a plain `Bun.write(destPath, response)` + // doesn't propagate the fetch AbortSignal reliably into the + // body read. Manual reader gives us both bounds. + const reader = response.body.getReader(); + const chunks: Uint8Array[] = []; + let bytesReceived = 0; + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (!value) continue; + bytesReceived += value.byteLength; + if (bytesReceived > MAX_ASSET_BYTES) { + throw new Error( + `Download ${asset.name} exceeded cap: ${bytesReceived} bytes > ${MAX_ASSET_BYTES} bytes` + ); + } + chunks.push(value); + } + } finally { + tryCatchSync(function () { + reader.releaseLock(); + }); + } + const buffer = new Uint8Array(bytesReceived); + let offset = 0; + for (const chunk of chunks) { + buffer.set(chunk, offset); + offset += chunk.byteLength; + } + await Bun.write(destPath, buffer); } ) ); @@ -264,7 +324,14 @@ function constantTimeEquals(a: string, b: string): boolean { type Sha256SumsResult = | { kind: "not-published" } | { kind: "ok"; sums: Record } - | { kind: "error"; reason: string }; + | { kind: "error"; reason: string; retryable: boolean }; + +// 5xx / network errors are retryable (transient CDN or server issues). +// 4xx responses are permanent (missing asset, auth problem). Callers can +// use this to decide whether to skip burning the 24h throttle. +function isRetryableHttpStatus(status: number): boolean { + return status >= 500 && status < 600; +} async function fetchSha256Sums( assets: ReleaseAsset[], @@ -283,6 +350,7 @@ async function fetchSha256Sums( return { kind: "error", reason: `${response.status} ${response.statusText}`, + retryable: isRetryableHttpStatus(response.status), }; } const text = await response.text(); @@ -290,6 +358,8 @@ async function fetchSha256Sums( return { kind: "error", reason: "empty SHA256SUMS body", + // Likely mid-publish or transient; next launch retries. + retryable: true, }; } return { kind: "ok", sums: parseSha256Sums(text) }; @@ -297,7 +367,12 @@ async function fetchSha256Sums( ) ); if (error || !result) { - return { kind: "error", reason: error?.message ?? "network error" }; + return { + kind: "error", + reason: error?.message ?? "network error", + // Network errors (DNS, abort, fetch throw) are transient. + retryable: true, + }; } return result; } @@ -308,7 +383,7 @@ async function fetchSha256Sums( // future tweak (e.g., stricter not-published handling) lands everywhere. type VerifyAssetResult = | { ok: true; hash: string | null } // hash === null when SHA256SUMS isn't published - | { ok: false; kind: "sums-error"; reason: string } + | { ok: false; kind: "sums-error"; reason: string; retryable: boolean } | { ok: false; kind: "missing-entry" } | { ok: false; kind: "hash-io-error"; cause: Error } | { ok: false; kind: "hash-mismatch" }; @@ -320,7 +395,12 @@ async function verifyAssetAgainstSums( ): Promise { const sums = await fetchSha256Sums(assets); if (sums.kind === "error") { - return { ok: false, kind: "sums-error", reason: sums.reason }; + return { + ok: false, + kind: "sums-error", + reason: sums.reason, + retryable: sums.retryable, + }; } if (sums.kind === "not-published") { return { ok: true, hash: null }; From 46caeb136ffa741720dbc0dd20e0904a564100d0 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sun, 19 Apr 2026 00:02:05 +0530 Subject: [PATCH 09/15] =?UTF-8?q?fix:=20harden=20PR=20#5=20from=20self-rev?= =?UTF-8?q?iew=20=E2=80=94=20silent=20downgrade,=20host=20pinning,=20fail-?= =?UTF-8?q?loud=20config=20(F1-F23)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/release.yml | 16 +++ CHANGELOG.md | 2 +- README.md | 14 +-- src/commands/update.ts | 18 +++- src/index.ts | 47 ++++++++- src/lib/auto-update.ts | 112 ++++++++++++++++++-- src/lib/config.ts | 50 ++++++--- src/lib/fs-utils.ts | 19 +++- src/lib/release.test.ts | 185 +++++++++++++++++++++++++++++++++- src/lib/release.ts | 83 ++++++++++++++- 10 files changed, 503 insertions(+), 43 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 61ab04a..1da5a1b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -66,6 +66,8 @@ jobs: ls -lh dist/worktree-${{ matrix.target }} - name: Smoke-test --version + env: + WORKTREE_NO_UPDATE: "1" run: | chmod +x dist/worktree-${{ matrix.target }} ./dist/worktree-${{ matrix.target }} --version @@ -95,8 +97,22 @@ jobs: - name: Aggregate SHA256SUMS run: | cd artifacts + # Fail-fast: if no .sha256 sidecars were downloaded (e.g., + # an upload-artifact rename regression), the cat below + # would silently produce an empty SHA256SUMS that bricks + # every client's auto-update install. + count=$(ls -1 worktree-*.sha256 2>/dev/null | wc -l | tr -d ' ') + expected=4 + if [ "$count" != "$expected" ]; then + echo "expected $expected .sha256 files, got $count" >&2 + exit 1 + fi cat worktree-*.sha256 > SHA256SUMS rm worktree-*.sha256 + if [ ! -s SHA256SUMS ]; then + echo "SHA256SUMS empty after aggregation — abort" >&2 + exit 1 + fi cat SHA256SUMS - run: chmod +x artifacts/worktree-* diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f23334..22d58b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,7 +26,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - SHA256SUMS parser now allocates its result map with `Object.create(null)` to block `__proto__`/`constructor`/`prototype` pollution from a tampered sums file. - SHA256 hash comparison now uses `node:crypto.timingSafeEqual` (C-level constant-time) instead of a userland XOR loop that V8/JSC may short-circuit. - Release asset downloads now reject responses whose declared `Content-Length` exceeds 200 MB, capping the blast radius of a malicious CDN before SHA verification fires. -- The staging tmp path is now `safeUnlinkSync`-ed before each download, so an attacker-planted symlink in a shared install directory cannot redirect the write to an arbitrary target. Same treatment applies to the sidecar tmp path. +- The staging tmp path is now `safeUnlinkSync`-ed before each download — best-effort defense against a planted symlink in a shared install directory. (Note: this is *not* race-free: an attacker who can write to the binary directory could re-plant the symlink between the unlink and the subsequent write. Closing that race fully would require an `O_EXCL | O_NOFOLLOW` open. Same treatment applies to the sidecar tmp path.) - GitHub release workflow now creates releases as **draft**, uploads all files (binaries + `SHA256SUMS`), and flips to public in a subsequent step — eliminating the window where `releases/latest` returned a non-draft release with binaries attached but `SHA256SUMS` still uploading (which would let clients fall through to the TLS-only "not-published" install path). - `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). - `release.version` is now validated against the sidecar regex before being written, matching the reader's strictness — closes a writer/reader asymmetry that would be exploitable if the parser's rules ever loosen. diff --git a/README.md b/README.md index c858dd3..899f278 100644 --- a/README.md +++ b/README.md @@ -62,13 +62,15 @@ On `remove`, it: ## Config -The `.worktreerc` file (in the repo root for project settings, in `~/` for user settings) supports: +`.worktreerc` keys are read from one of two locations depending on the key: -| Key | Description | Example | -|-----|-------------|---------| -| `DEFAULT_BASE` | Default base branch for new worktrees | `origin/dev` | -| `WORKTREE_DIR` | Directory name for worktrees (default: `.worktrees`) | `.worktrees` | -| `AUTO_UPDATE` | Enable background auto-update checks in `~/.worktreerc` only (default: `true`) | `false` | +| Key | Description | Where | Example | +|-----|-------------|-------|---------| +| `DEFAULT_BASE` | Default base branch for new worktrees | Project (`/.worktreerc`) | `origin/dev` | +| `WORKTREE_DIR` | Directory name for worktrees (default: `.worktrees`) | Project (`/.worktreerc`) | `.worktrees` | +| `AUTO_UPDATE` | Enable background auto-update checks (default: `true`) | User (`~/.worktreerc`) only | `false` | + +`DEFAULT_BASE` and `WORKTREE_DIR` placed in `~/.worktreerc` are ignored — `worktree` reads them from the project file at the repo root only. `AUTO_UPDATE` placed in a project `.worktreerc` is ignored with a warning — it must live in `~/.worktreerc` so it applies across all repos under your control. ## Alias diff --git a/src/commands/update.ts b/src/commands/update.ts index cd0cfcf..1f02d58 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -17,6 +17,10 @@ import { isStandalone, verifyAssetAgainstSums, } from "../lib/release"; +import { + cleanupStagedArtifacts, + recordCheckCompleted, +} from "../lib/auto-update"; export const updateCommand = command({ name: "update", @@ -89,7 +93,9 @@ export const updateCommand = command({ if (dlError) { await safeUnlink(tmpPath); if (classifyWriteError(dlError) !== null) { - printError("Permission denied. Try: sudo worktree update"); + printError( + `Permission denied (${deepestMessage(dlError)}). Try: sudo worktree update` + ); } else { printError(deepestMessage(dlError)); } @@ -145,7 +151,9 @@ export const updateCommand = command({ if (renameError) { await safeUnlink(tmpPath); if (classifyWriteError(renameError) !== null) { - printError("Permission denied. Try: sudo worktree update"); + printError( + `Permission denied (${deepestMessage(renameError)}). Try: sudo worktree update` + ); } else { printError( `Failed to replace binary: ${deepestMessage(renameError)}` @@ -154,6 +162,12 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } + // Foreground update succeeded — invalidate any pending background + // stage so the next launch doesn't silently downgrade us by applying + // an older staged version. Bump the throttle for the same reason. + cleanupStagedArtifacts(); + recordCheckCompleted(); + const { BOLD, GREEN, DIM, RESET } = COLORS; console.error(""); console.error( diff --git a/src/index.ts b/src/index.ts index 72d2c59..4985f58 100644 --- a/src/index.ts +++ b/src/index.ts @@ -6,15 +6,56 @@ import { openCommand } from "./commands/open"; import { removeCommand } from "./commands/remove"; import { updateCommand } from "./commands/update"; import { + appendBackgroundCheckPanic, applyPendingUpdate, INTERNAL_CHECK_SUBCOMMAND, scheduleBackgroundUpdateCheck, } from "./lib/auto-update"; +import { COLORS } from "./lib/logger"; import pkg from "../package.json"; -if (process.argv[2] !== INTERNAL_CHECK_SUBCOMMAND) { - applyPendingUpdate(); - void scheduleBackgroundUpdateCheck(); +const META_FLAGS = new Set(["--version", "-v", "--help", "-h"]); +const FOREGROUND_UPDATE_SUBCOMMAND = "update"; + +function isMetaInvocation(): boolean { + // Match only the FIRST positional arg so `worktree create my-feature -h` + // (where `-h` is e.g. a base-branch value) doesn't accidentally skip + // auto-update; only the literal `worktree --version` / `worktree -h` + // forms qualify as "pure metadata". + const first = process.argv[2]; + return first !== undefined && META_FLAGS.has(first); +} + +function shouldSkipAutoUpdate(): boolean { + const first = process.argv[2]; + if (first === INTERNAL_CHECK_SUBCOMMAND) return true; + // `worktree update` is the foreground updater itself — racing the + // background spawn against it lets the background child stage the same + // version foreground just installed and then "auto-update" the user + // back to it on the next launch (apply path no-op, but logs noise). + if (first === FOREGROUND_UPDATE_SUBCOMMAND) return true; + return isMetaInvocation(); +} + +if (!shouldSkipAutoUpdate()) { + try { + applyPendingUpdate(); + } catch (error) { + // applyPendingUpdate re-throws non-errno errors so a programmer bug + // doesn't silently degrade. But we MUST NOT crash the entry point — + // the user's actual command (including `worktree update` for manual + // recovery) needs to run. Log the panic, hint at the escape hatch, + // and continue. + appendBackgroundCheckPanic(error); + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update apply failed unexpectedly — set WORKTREE_NO_UPDATE=1 to disable; see ~/.cache/worktree-cli/last-error${RESET}` + ); + } + // .catch funnels future programmer-bug throws into the panic logger + // instead of becoming an unhandled rejection that prints a stack trace + // mixed with the user's command output. + void scheduleBackgroundUpdateCheck().catch(appendBackgroundCheckPanic); } run( diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index 9169efd..bc8bbf0 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -62,6 +62,14 @@ function ensureCacheDir(): void { } let hasWarnedAboutLogFailure = false; +// Process-scoped latch: once the throttle file (last-check) becomes +// unwritable, every subsequent recordCheckCompleted within this process +// short-circuits. Without the latch, scheduleBackgroundUpdateCheck would +// re-spawn the detached child on every launch (parent reads → no record → +// spawn → child writes → write fails → retry next launch), hammering +// GitHub's API. See F11 in PR review. +let hasCacheWriteFailed = false; +let hasWarnedAboutCacheWriteFailureOnce = false; function warnLogFailureOnce(reason: string): void { if (hasWarnedAboutLogFailure) return; @@ -72,6 +80,15 @@ function warnLogFailureOnce(reason: string): void { ); } +function warnCacheWriteFailureOnce(reason: string): void { + if (hasWarnedAboutCacheWriteFailureOnce) return; + hasWarnedAboutCacheWriteFailureOnce = true; + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update throttle cache unwritable (${reason}) — disabling auto-update for this process${RESET}` + ); +} + function appendBackgroundCheckPanic(error: unknown): void { const detail = error instanceof Error @@ -207,6 +224,31 @@ function applyPendingUpdate(): void { return; } + // Block silent downgrade: a stage from a previous background check + // can be older than the running binary if the user ran `worktree + // update` (foreground) in between. Without this gate, the apply + // path would silently revert to the older staged version. + const stageCmp = compareVersions(pkg.version, meta.version); + if (stageCmp > 0) { + // Strictly older stage — discard AND burn the throttle so we + // don't immediately re-stage the same older release. + cleanupStagedArtifacts(); + appendLastError( + "apply", + `discarded stale stage v${meta.version} (running v${pkg.version})` + ); + recordCheckCompleted(); + return; + } + if (stageCmp === 0) { + // Same version (cleanup race from a prior apply, or foreground + // update wrote a stage matching what's already installed). Just + // discard — don't bump the throttle, let the next launch find + // genuinely-newer releases on its normal cadence. + cleanupStagedArtifacts(); + return; + } + const verify = verifyBinaryHashSync(stagedPath, meta.sha256); if (!verify.ok) { cleanupStagedArtifacts(); @@ -312,13 +354,21 @@ async function readLastCheckMs(): Promise { async function isAutoUpdateDisabled(): Promise { if (process.env.WORKTREE_NO_UPDATE === "1") return true; // shouldAutoUpdate fails CLOSED on broken ~/.worktreerc so a typo'd - // config doesn't silently override a user's intended opt-out. - return !(await shouldAutoUpdate()); + // config doesn't silently override a user's intended opt-out. Pass + // appendLastError so the user can discover *why* it's disabled — without + // this hook, a typo'd AUTO_UPDATE silently turns off updates forever. + return !(await shouldAutoUpdate(function (msg) { + appendLastError("check", msg); + })); } async function scheduleBackgroundUpdateCheck(): Promise { try { if (!isStandalone()) return; + // If recordCheckCompleted has already failed in this process, the + // child would also fail — skip the spawn to avoid burning the API + // and leaving stale stage artifacts. + if (hasCacheWriteFailed) return; if (await isAutoUpdateDisabled()) return; const lastCheck = await readLastCheckMs(); @@ -376,12 +426,17 @@ async function scheduleBackgroundUpdateCheck(): Promise { } function recordCheckCompleted(): void { + if (hasCacheWriteFailed) return; const { error } = tryCatchSync(function () { ensureCacheDir(); fs.writeFileSync(getLastCheckPath(), String(Date.now())); }); if (error) { + // Latch process-wide so the next call doesn't re-attempt (and so + // scheduleBackgroundUpdateCheck can short-circuit before spawning). + hasCacheWriteFailed = true; appendLastError("check", `last-check write: ${error.message}`); + warnCacheWriteFailureOnce(error.message); } } @@ -453,18 +508,30 @@ async function runBackgroundUpdateCheck(): Promise { "check", `SHA256SUMS fetch failed — refusing to stage: ${verify.reason}` ); + // Burn the throttle for permanent failures (404, parse error, + // host-pin rejection); transient failures (5xx, 429, 403) keep + // retrying on the next launch. + if (!verify.retryable) { + recordCheckCompleted(); + } } else if (verify.kind === "missing-entry") { appendLastError( "check", `SHA256SUMS missing entry for ${assetName}` ); + // Permanent — same release won't grow a missing entry. + recordCheckCompleted(); } else if (verify.kind === "hash-io-error") { + // Local IO can be transient (disk full mid-write); don't burn + // the throttle. appendLastError( "check", `hash io-error for ${assetName}: ${verify.cause.message}` ); } else { appendLastError("check", `hash mismatch for ${assetName}`); + // Permanent — same release won't change its hash. + recordCheckCompleted(); } return; } @@ -539,7 +606,13 @@ async function runBackgroundUpdateCheck(): Promise { }); if (metaWriteError) { safeUnlinkSync(tmpPath); + safeUnlinkSync(metaTmpPath); appendLastError("check", `sidecar write: ${metaWriteError.message}`); + // Structural permission/readonly errors don't fix themselves on retry + // — burn the throttle so we don't re-download on every launch. + if (classifyWriteError(metaWriteError) !== null) { + recordCheckCompleted(); + } return; } const { error: metaRenameError } = tryCatchSync(function () { @@ -549,6 +622,9 @@ async function runBackgroundUpdateCheck(): Promise { safeUnlinkSync(tmpPath); safeUnlinkSync(metaTmpPath); appendLastError("check", `sidecar commit: ${metaRenameError.message}`); + if (classifyWriteError(metaRenameError) !== null) { + recordCheckCompleted(); + } return; } @@ -559,6 +635,9 @@ async function runBackgroundUpdateCheck(): Promise { safeUnlinkSync(tmpPath); safeUnlinkSync(getMetaSidecarPath()); appendLastError("check", `stage: ${renameError.message}`); + if (classifyWriteError(renameError) !== null) { + recordCheckCompleted(); + } return; } @@ -567,11 +646,15 @@ async function runBackgroundUpdateCheck(): Promise { type ProbeResult = { ok: true } | { ok: false; reason: string }; +const PROBE_VERSION_PATTERN = /\d+\.\d+\.\d+/; + function probeBinaryRuns(filePath: string): ProbeResult { const { data: result, error } = tryCatchSync(function () { return Bun.spawnSync({ cmd: [filePath, "--version"], - stdout: "ignore", + // Capture stdout so we can verify the binary actually printed a + // version string — exit-0-with-garbage would otherwise pass. + stdout: "pipe", stderr: "pipe", timeout: PROBE_TIMEOUT_MS, // Disable auto-update in the probe child — otherwise its top-level @@ -595,18 +678,29 @@ function probeBinaryRuns(filePath: string): ProbeResult { }; } if (result.exitCode !== 0) { - const stderr = decodeProbeStderr(result.stderr); + const stderr = decodeProbeStream(result.stderr); const base = `exit ${result.exitCode}`; return { ok: false, reason: stderr ? `${base}: ${stderr}` : base }; } + const stdout = decodeProbeStream(result.stdout); + if (!PROBE_VERSION_PATTERN.test(stdout)) { + const truncated = stdout.slice(0, 80); + return { + ok: false, + reason: `version output did not match expected format: ${JSON.stringify(truncated)}`, + }; + } return { ok: true }; } -function decodeProbeStderr(stderr: unknown): string { - if (!(stderr instanceof Uint8Array) && !(stderr instanceof Buffer)) { - return ""; +function decodeProbeStream(stream: unknown): string { + if (!(stream instanceof Uint8Array) && !(stream instanceof Buffer)) { + // Future Bun API change could surface stream as ReadableStream/string + // — return a debuggable marker instead of "" so the regression is + // visible in last-error rather than silently degrading diagnostics. + return ``; } - const bytes = stderr instanceof Buffer ? new Uint8Array(stderr) : stderr; + const bytes = stream instanceof Buffer ? new Uint8Array(stream) : stream; const truncated = bytes.slice(0, PROBE_STDERR_TRUNCATE_BYTES); return new TextDecoder().decode(truncated).trim(); } @@ -614,6 +708,8 @@ function decodeProbeStderr(stderr: unknown): string { export { appendBackgroundCheckPanic, applyPendingUpdate, + cleanupStagedArtifacts, + recordCheckCompleted, scheduleBackgroundUpdateCheck, runBackgroundUpdateCheck, INTERNAL_CHECK_SUBCOMMAND, diff --git a/src/lib/config.ts b/src/lib/config.ts index c1f298e..9efe807 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -102,6 +102,11 @@ async function readConfigFile( `${filePath}:AUTO_UPDATE`, `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead.` ); + // Strip the ignored key BEFORE validation; otherwise an invalid value + // (e.g. AUTO_UPDATE=junk) throws inside booleanLike and the catch + // below falls back to all defaults, silently discarding any valid + // DEFAULT_BASE / WORKTREE_DIR from the same file. + delete raw.AUTO_UPDATE; } const { data: parsed, error: parseError } = tryCatchSync(function () { return validateConfig(raw); @@ -120,35 +125,50 @@ async function loadConfig(root: string): Promise { return readConfigFile(path.join(root, ".worktreerc"), "project"); } -async function loadGlobalConfig(): Promise { - return readConfigFile(path.join(os.homedir(), ".worktreerc"), "global"); -} - // Auto-update reads config on every background check and must fail CLOSED // on parse/read errors. The generic readConfigFile falls back to defaults // (AUTO_UPDATE: true), which would silently override a user's explicit // opt-out whenever their config has a typo. This helper returns the // narrow signal "should auto-update fire?" with fail-closed semantics. -async function shouldAutoUpdate(): Promise { +// +// `onError` lets the caller (auto-update.ts) record diagnostics to the +// last-error log so a user with a typo doesn't have auto-update silently +// disabled forever. Optional to avoid forcing an import on call sites that +// don't need diagnostics. +async function shouldAutoUpdate( + onError?: (message: string) => void +): Promise { const filePath = path.join(os.homedir(), ".worktreerc"); const file = Bun.file(filePath); - const isExists = await file.exists(); + // file.exists() can throw on EACCES (e.g., parent dir unreadable). Guard + // it instead of letting it bubble up and crash the scheduler. + const { data: isExists, error: existsError } = await tryCatch( + file.exists() + ); + if (existsError) { + onError?.(`shouldAutoUpdate exists: ${existsError.message}`); + return false; + } if (!isExists) return true; const { data: content, error: readError } = await tryCatch(file.text()); - if (readError) return false; + if (readError) { + onError?.( + `~/.worktreerc read failed; auto-update disabled until fixed: ${readError.message}` + ); + return false; + } const raw = parseConfigContent(content); const { data: parsed, error: parseError } = tryCatchSync(function () { return validateConfig(raw); }); - if (parseError) return false; + if (parseError) { + onError?.( + `~/.worktreerc invalid; auto-update disabled until fixed: ${parseError.message}` + ); + return false; + } return parsed.AUTO_UPDATE; } -export { - loadConfig, - loadGlobalConfig, - parseConfigContent, - shouldAutoUpdate, - validateConfig, -}; +export { loadConfig, parseConfigContent, shouldAutoUpdate, validateConfig }; export type { Config }; diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts index ddf7d3b..f363028 100644 --- a/src/lib/fs-utils.ts +++ b/src/lib/fs-utils.ts @@ -34,18 +34,29 @@ function safeUnlinkSync(filePath: string): void { } } -type WriteErrorCode = "EACCES" | "EPERM" | "EROFS"; +type WriteErrorCode = "EACCES" | "EPERM" | "EROFS" | "EBUSY" | "ETXTBSY"; // release.ts wraps errno errors as `new Error(msg, { cause })` — walk the // cause chain so the "binary directory not writable" branch fires even -// when the top-level Error lacks a .code field. +// when the top-level Error lacks a .code field. EBUSY (Windows: file +// locked) and ETXTBSY (Linux: text file busy) are included because they +// behave like permanent failures from the auto-updater's POV — retry on +// every launch would just re-download the same blob and re-fail. +const WRITE_ERROR_CODES = new Set([ + "EACCES", + "EPERM", + "EROFS", + "EBUSY", + "ETXTBSY", +]); + function classifyWriteError(error: unknown): WriteErrorCode | null { let cur: unknown = error; while (cur instanceof Error) { if ("code" in cur) { const code = (cur as NodeJS.ErrnoException).code; - if (code === "EACCES" || code === "EPERM" || code === "EROFS") { - return code; + if (code !== undefined && WRITE_ERROR_CODES.has(code)) { + return code as WriteErrorCode; } } cur = cur.cause; diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index b6ab32c..046e954 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -4,6 +4,7 @@ import os from "node:os"; import path from "node:path"; import { compareVersions, + fetchLatestRelease, parseSha256Sums, verifyAssetAgainstSums, type ReleaseAsset, @@ -111,6 +112,128 @@ describe("parseSha256Sums", () => { ].join("\n"); expect(() => parseSha256Sums(dupe)).toThrow(/Duplicate/); }); + + it("handles CRLF line endings", () => { + const text = + "a".repeat(64) + + " worktree-darwin-arm64\r\n" + + "b".repeat(64) + + " worktree-linux-x64\r\n"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + expect(result["worktree-linux-x64"]).toBe("b".repeat(64)); + }); + + it("handles missing trailing newline", () => { + const text = "a".repeat(64) + " worktree-darwin-arm64"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + }); + + it("rejects BSD-tagged-format `SHA256 (file) = hex` (not the format we publish)", () => { + const text = `SHA256 (worktree-darwin-arm64) = ${"a".repeat(64)}`; + const result = parseSha256Sums(text); + // Parser regex doesn't match this format — entry is silently skipped. + // Asserting empty result pins the parser's behavior so a future + // regression that loosens the regex is caught here instead of + // accepting unverified hashes downstream. + expect(Object.keys(result)).toEqual([]); + }); +}); + +describe("fetchLatestRelease — JSON-shape boundary", () => { + let originalFetch: typeof globalThis.fetch; + + beforeEach(() => { + originalFetch = globalThis.fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + function stubFetch(handler: () => Response): void { + globalThis.fetch = async function ( + _input: RequestInfo | URL + ): Promise { + return handler(); + } as typeof globalThis.fetch; + } + + it("returns parsed release on a valid payload", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ + tag_name: "v1.2.3", + assets: [ + { + name: "worktree-darwin-arm64", + browser_download_url: + "https://objects.githubusercontent.com/worktree-darwin-arm64", + }, + ], + }), + { status: 200 } + ); + }); + const result = await fetchLatestRelease(); + expect(result.tag).toBe("v1.2.3"); + expect(result.version).toBe("1.2.3"); + expect(result.assets).toHaveLength(1); + }); + + it("throws on missing tag_name", async () => { + stubFetch(function () { + return new Response(JSON.stringify({ assets: [] }), { + status: 200, + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on missing assets array", async () => { + stubFetch(function () { + return new Response(JSON.stringify({ tag_name: "v1.0.0" }), { + status: 200, + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on assets being a non-array", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ tag_name: "v1.0.0", assets: "x" }), + { status: 200 } + ); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on malformed tag_name (path-traversal-shaped)", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ + tag_name: "v1.2.3/../evil", + assets: [], + }), + { status: 200 } + ); + }); + await expect(fetchLatestRelease()).rejects.toThrow( + /Release tag malformed/ + ); + }); + + it("throws on non-2xx HTTP", async () => { + stubFetch(function () { + return new Response("server error", { + status: 500, + statusText: "Internal Server Error", + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/500/); + }); }); describe("verifyAssetAgainstSums", () => { @@ -145,9 +268,11 @@ describe("verifyAssetAgainstSums", () => { }); function makeAsset(name: string): ReleaseAsset { + // Use an allowlisted host so the host-pin guard inside withTimeout + // doesn't reject the request before stubFetch can intercept it. return { name, - browser_download_url: `https://example.invalid/${name}`, + browser_download_url: `https://objects.githubusercontent.com/${name}`, }; } @@ -221,6 +346,64 @@ describe("verifyAssetAgainstSums", () => { expect(result.retryable).toBe(false); }); + it("returns sums-error marked retryable on 403 (rate limit)", async () => { + stubFetch(function () { + return new Response("forbidden", { + status: 403, + statusText: "Forbidden", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + // 403/429 are GitHub rate-limit signals — transient, NOT permanent. + expect(result.retryable).toBe(true); + }); + + it("returns sums-error marked retryable on 429 (rate limit)", async () => { + stubFetch(function () { + return new Response("too many requests", { + status: 429, + statusText: "Too Many Requests", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(true); + }); + + it("returns non-retryable sums-error when SHA256SUMS contains duplicate entry (tampering)", async () => { + const dupeBody = [ + "a".repeat(64) + " " + ASSET_NAME, + "b".repeat(64) + " " + ASSET_NAME, + ].join("\n"); + stubFetch(function () { + return new Response(dupeBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + // Duplicate entries are tampering, not transient — must NOT retry. + expect(result.retryable).toBe(false); + expect(result.reason).toMatch(/parse|Duplicate/); + }); + it("returns missing-entry when SHA256SUMS exists but has no row for asset", async () => { const sumsBody = `${ASSET_SHA} some-other-asset\n`; stubFetch(function () { diff --git a/src/lib/release.ts b/src/lib/release.ts index e81edd3..bbb4c38 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -6,6 +6,32 @@ import pkg from "../../package.json"; const REPO = "bhagyamudgal/worktree-cli"; const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; +// Defense in depth: even though browser_download_url comes from a trusted +// GitHub API JSON, host-pin the fetch so a future CDN substitution or +// release-asset compromise can't redirect downloads to an attacker-chosen +// origin. api.github.com serves the release JSON; objects.githubusercontent +// .com / github-releases.githubusercontent.com / release-assets.githubuser +// content.com host the binary assets; codeload.github.com handles a +// fallback path; github.com handles redirects. +const ALLOWED_RELEASE_HOSTS = new Set([ + "api.github.com", + "github.com", + "codeload.github.com", + "objects.githubusercontent.com", + "release-assets.githubusercontent.com", + "github-releases.githubusercontent.com", +]); + +function isAllowedReleaseHost(urlString: string): boolean { + const { data: parsed } = tryCatchSync(function () { + return new URL(urlString); + }); + if (!parsed) return false; + return ALLOWED_RELEASE_HOSTS.has(parsed.host); +} + +const RELEASE_TAG_PATTERN = /^v?\d+\.\d+\.\d+(?:-[\w.-]+)?$/; + // GitHub REST API requires a User-Agent. Default Bun UA works today but is // fragile under future GitHub policy. Identifying as worktree-cli also // makes this client traceable in logs if something ever misbehaves. @@ -126,6 +152,11 @@ async function withTimeout( timeoutMs: number, handler: (response: Response) => Promise ): Promise { + if (!isAllowedReleaseHost(url)) { + throw new Error( + `Refused to fetch URL with disallowed host: ${JSON.stringify(url.slice(0, 120))}` + ); + } const controller = new AbortController(); const timer = setTimeout(function () { controller.abort(); @@ -135,6 +166,16 @@ async function withTimeout( signal: controller.signal, headers: buildGitHubHeaders(), }); + // Re-validate the post-redirect URL: GitHub release downloads always + // bounce through 302 → objects.githubusercontent.com, so the initial + // host-pin alone is bypassable by anyone who can MITM the first hop + // and inject `Location: https://attacker.example/`. Trust only what + // the redirect chain settled on. + if (response.url && !isAllowedReleaseHost(response.url)) { + throw new Error( + `Refused redirect to disallowed host: ${JSON.stringify(response.url.slice(0, 120))}` + ); + } return await handler(response); } finally { clearTimeout(timer); @@ -155,6 +196,15 @@ async function fetchLatestRelease( if (!isReleaseInfo(json)) { throw new Error("Release payload missing tag_name or assets"); } + // Validate tag shape at the API boundary so a malicious or + // misconfigured tag (e.g., "v1.2.3/../evil") can't propagate + // through to printError messages, log lines, or any future + // path-joining caller. + if (!RELEASE_TAG_PATTERN.test(json.tag_name)) { + throw new Error( + `Release tag malformed: ${JSON.stringify(json.tag_name.slice(0, 40))}` + ); + } return { tag: json.tag_name, version: json.tag_name.replace(/^v/, ""), @@ -231,6 +281,15 @@ async function downloadAsset( reader.releaseLock(); }); } + if (bytesReceived === 0) { + // 200 OK with empty body (CDN edge case, misconfigured + // proxy). Without this guard, Bun.write writes an empty + // file and SHA verification later reports a misleading + // "hash mismatch" instead of the upstream cause. + throw new Error( + `Download ${asset.name} refused: empty response body` + ); + } const buffer = new Uint8Array(bytesReceived); let offset = 0; for (const chunk of chunks) { @@ -327,9 +386,11 @@ type Sha256SumsResult = | { kind: "error"; reason: string; retryable: boolean }; // 5xx / network errors are retryable (transient CDN or server issues). -// 4xx responses are permanent (missing asset, auth problem). Callers can -// use this to decide whether to skip burning the 24h throttle. +// Most 4xx responses are permanent (missing asset, auth problem), but +// 403/429 specifically signal rate-limiting (especially for anonymous +// callers — 60/hr GitHub limit) and ARE transient. function isRetryableHttpStatus(status: number): boolean { + if (status === 403 || status === 429) return true; return status >= 500 && status < 600; } @@ -362,7 +423,23 @@ async function fetchSha256Sums( retryable: true, }; } - return { kind: "ok", sums: parseSha256Sums(text) }; + // parseSha256Sums throws on duplicate-entry tampering — that + // is a permanent integrity failure for THIS release, not a + // transient network issue. Catch it inline so it doesn't + // collapse into the generic retryable network branch below. + const { data: parsed, error: parseError } = tryCatchSync( + function () { + return parseSha256Sums(text); + } + ); + if (parseError) { + return { + kind: "error", + reason: `SHA256SUMS parse: ${parseError.message}`, + retryable: false, + }; + } + return { kind: "ok", sums: parsed }; } ) ); From c5ceded801c413cdfd0660275d03be802e74721a Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sun, 19 Apr 2026 21:54:12 +0530 Subject: [PATCH 10/15] refactor: trim verbose auto-update comments --- CHANGELOG.md | 1 + src/commands/internal-update-check.ts | 6 +- src/commands/update.ts | 7 +- src/index.ts | 20 +--- src/lib/auto-update.ts | 140 +++++--------------------- src/lib/config.ts | 20 +--- src/lib/fs-utils.ts | 11 +- src/lib/release.test.ts | 18 +--- src/lib/release.ts | 63 ++---------- 9 files changed, 57 insertions(+), 229 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22d58b6..f3af6d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,6 +14,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed +- Trimmed verbose auto-update implementation comments; the hardening rationale lives in git history. - CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) to address GitHub's deprecation of Node.js 20 actions. - Release binaries now built with `--minify --sourcemap=none` and stripped of debug symbols (`llvm-strip` on Linux, `strip` on macOS). Binary sizes are slightly smaller; functional behavior unchanged. - Release workflow smoke-tests each built binary via `--version` before publishing to catch regressions. diff --git a/src/commands/internal-update-check.ts b/src/commands/internal-update-check.ts index 5cb5850..a8e01e6 100644 --- a/src/commands/internal-update-check.ts +++ b/src/commands/internal-update-check.ts @@ -10,11 +10,7 @@ export const internalUpdateCheckCommand = command({ desc: "", hidden: true, handler: async () => { - // Silent detached child has no error surface by design - // (stdio/stderr redirected in scheduleBackgroundUpdateCheck). Wrap - // the whole run so any unhandled throw from runBackgroundUpdateCheck - // appends a full stack trace to last-error before exiting non-zero. - // Without this, a programmer bug here is invisible in production. + // Detached child's stderr is redirected; catch so panics still hit last-error. try { await runBackgroundUpdateCheck(); } catch (error) { diff --git a/src/commands/update.ts b/src/commands/update.ts index 1f02d58..5b0fc55 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -84,8 +84,7 @@ export const updateCommand = command({ printInfo(`Downloading ${assetName}...`); const tmpPath = `${binaryPath}.update-tmp`; - // Clear any pre-existing entry so the write can't follow a planted - // symlink in a shared install dir. + // Pre-unlink to prevent symlink-follow in shared install dirs. await safeUnlink(tmpPath); const { error: dlError } = await tryCatch( downloadAsset(asset, tmpPath) @@ -162,9 +161,7 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } - // Foreground update succeeded — invalidate any pending background - // stage so the next launch doesn't silently downgrade us by applying - // an older staged version. Bump the throttle for the same reason. + // Invalidate pending stage + bump throttle to prevent silent downgrade on next launch. cleanupStagedArtifacts(); recordCheckCompleted(); diff --git a/src/index.ts b/src/index.ts index 4985f58..1c4d8d4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -18,10 +18,7 @@ const META_FLAGS = new Set(["--version", "-v", "--help", "-h"]); const FOREGROUND_UPDATE_SUBCOMMAND = "update"; function isMetaInvocation(): boolean { - // Match only the FIRST positional arg so `worktree create my-feature -h` - // (where `-h` is e.g. a base-branch value) doesn't accidentally skip - // auto-update; only the literal `worktree --version` / `worktree -h` - // forms qualify as "pure metadata". + // Match only the first positional arg so flag-as-value (e.g. `create my-feature -h`) still auto-updates. const first = process.argv[2]; return first !== undefined && META_FLAGS.has(first); } @@ -29,10 +26,7 @@ function isMetaInvocation(): boolean { function shouldSkipAutoUpdate(): boolean { const first = process.argv[2]; if (first === INTERNAL_CHECK_SUBCOMMAND) return true; - // `worktree update` is the foreground updater itself — racing the - // background spawn against it lets the background child stage the same - // version foreground just installed and then "auto-update" the user - // back to it on the next launch (apply path no-op, but logs noise). + // Skip for the foreground updater to avoid racing its own binary install. if (first === FOREGROUND_UPDATE_SUBCOMMAND) return true; return isMetaInvocation(); } @@ -41,20 +35,14 @@ if (!shouldSkipAutoUpdate()) { try { applyPendingUpdate(); } catch (error) { - // applyPendingUpdate re-throws non-errno errors so a programmer bug - // doesn't silently degrade. But we MUST NOT crash the entry point — - // the user's actual command (including `worktree update` for manual - // recovery) needs to run. Log the panic, hint at the escape hatch, - // and continue. + // Never crash the entry point — the user's command (including `worktree update`) must still run. appendBackgroundCheckPanic(error); const { DIM, RESET } = COLORS; console.error( `${DIM}worktree: auto-update apply failed unexpectedly — set WORKTREE_NO_UPDATE=1 to disable; see ~/.cache/worktree-cli/last-error${RESET}` ); } - // .catch funnels future programmer-bug throws into the panic logger - // instead of becoming an unhandled rejection that prints a stack trace - // mixed with the user's command output. + // Funnel async throws into the panic logger, not an unhandled rejection. void scheduleBackgroundUpdateCheck().catch(appendBackgroundCheckPanic); } diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index bc8bbf0..d4b5dd0 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -27,10 +27,7 @@ const PROBE_STDERR_TRUNCATE_BYTES = 500; const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; const SIDECAR_VERSION_PATTERN = /^\d+\.\d+\.\d+(?:-[\w.-]+)?$/; const SIDECAR_HASH_PATTERN = /^[0-9a-f]{64}$/; -// Window in which a "partial" stage (sidecar without binary, or binary -// without sidecar) may be a concurrent producer mid-commit rather than a -// genuine orphan. applyPendingUpdate defers reaping anything fresher than -// this so a simultaneous-launch race can't discard a correct staged update. +// Defer reaping partial stages: a concurrent producer mid-commit looks identical to an orphan. const STAGING_ORPHAN_GRACE_MS = 60 * 1000; function getBinaryDir(): string { @@ -62,12 +59,6 @@ function ensureCacheDir(): void { } let hasWarnedAboutLogFailure = false; -// Process-scoped latch: once the throttle file (last-check) becomes -// unwritable, every subsequent recordCheckCompleted within this process -// short-circuits. Without the latch, scheduleBackgroundUpdateCheck would -// re-spawn the detached child on every launch (parent reads → no record → -// spawn → child writes → write fails → retry next launch), hammering -// GitHub's API. See F11 in PR review. let hasCacheWriteFailed = false; let hasWarnedAboutCacheWriteFailureOnce = false; @@ -169,30 +160,19 @@ function isWithinGracePeriod(filePath: string): boolean { } function applyPendingUpdate(): void { - // Sync on purpose: runs before brocli.run; avoids top-level await. if (process.env.WORKTREE_NO_UPDATE === "1") return; try { if (!isStandalone()) return; const stagedPath = getStagingPath(); const metaPath = getMetaSidecarPath(); if (!fs.existsSync(stagedPath)) { - // Sidecar without binary. Two possible causes: - // (a) a concurrent producer just committed the sidecar and is - // about to rename the binary into place; or - // (b) a crashed/abandoned previous stage left only the sidecar. - // Differentiate by mtime — within the grace window, leave alone - // so the producer can complete; past the window, treat as orphan - // and reap. + // Within grace window, assume concurrent producer; past it, reap orphan. if (isWithinGracePeriod(metaPath)) return; safeUnlinkSync(metaPath); return; } if (!fs.existsSync(metaPath)) { - // Binary without sidecar. Same producer-mid-commit vs orphan - // ambiguity — grace period prevents a concurrent launch from - // discarding a correctly-staged binary whose sidecar is about - // to land. if (isWithinGracePeriod(stagedPath)) return; safeUnlinkSync(stagedPath); appendLastError( @@ -224,14 +204,9 @@ function applyPendingUpdate(): void { return; } - // Block silent downgrade: a stage from a previous background check - // can be older than the running binary if the user ran `worktree - // update` (foreground) in between. Without this gate, the apply - // path would silently revert to the older staged version. + // Gate against silent downgrade from a stale stage (e.g. foreground update raced a background check). const stageCmp = compareVersions(pkg.version, meta.version); if (stageCmp > 0) { - // Strictly older stage — discard AND burn the throttle so we - // don't immediately re-stage the same older release. cleanupStagedArtifacts(); appendLastError( "apply", @@ -241,10 +216,6 @@ function applyPendingUpdate(): void { return; } if (stageCmp === 0) { - // Same version (cleanup race from a prior apply, or foreground - // update wrote a stage matching what's already installed). Just - // discard — don't bump the throttle, let the next launch find - // genuinely-newer releases on its normal cadence. cleanupStagedArtifacts(); return; } @@ -273,10 +244,7 @@ function applyPendingUpdate(): void { fs.renameSync(stagedPath, process.execPath); }); if (renameError) { - // Always cleanup so a persistent rename failure (ETXTBSY, EXDEV, - // ENOSPC, EIO, EBUSY, EACCES/EPERM/EROFS) doesn't loop on every - // launch — if rename fails once it's almost certainly going to - // fail again until the user intervenes. + // Persistent rename failures won't self-heal; cleanup to avoid looping on every launch. cleanupStagedArtifacts(); const writeCode = classifyWriteError(renameError); const rawCode = (renameError as NodeJS.ErrnoException).code; @@ -296,9 +264,7 @@ function applyPendingUpdate(): void { return; } safeUnlinkSync(metaPath); - // Bump the throttle so the sibling scheduleBackgroundUpdateCheck() in - // src/index.ts doesn't spawn a redundant child-check immediately after - // a just-applied update — the new binary is already current. + // Bump throttle so the sibling scheduleBackgroundUpdateCheck doesn't redundantly re-check. recordCheckCompleted(); const { GREEN, BOLD, RESET } = COLORS; @@ -306,11 +272,7 @@ function applyPendingUpdate(): void { `worktree ${GREEN}${BOLD}auto-updated${RESET} to ${BOLD}v${meta.version}${RESET}` ); } catch (error) { - // Only swallow errno-style I/O errors. Programmer bugs (TypeError, - // ReferenceError, unexpected throws from parseSidecar, etc.) must - // propagate so Bun's unhandled handler surfaces a stack trace — - // otherwise a regression here degrades to a cryptic DIM warning - // forever. Matches the discipline in scheduleBackgroundUpdateCheck. + // Swallow errno-style I/O only; let programmer bugs propagate with a stack trace. if (!(error instanceof Error) || !("code" in error)) { throw error; } @@ -353,10 +315,7 @@ async function readLastCheckMs(): Promise { async function isAutoUpdateDisabled(): Promise { if (process.env.WORKTREE_NO_UPDATE === "1") return true; - // shouldAutoUpdate fails CLOSED on broken ~/.worktreerc so a typo'd - // config doesn't silently override a user's intended opt-out. Pass - // appendLastError so the user can discover *why* it's disabled — without - // this hook, a typo'd AUTO_UPDATE silently turns off updates forever. + // Fail CLOSED on broken config so a typo can't silently disable auto-update. return !(await shouldAutoUpdate(function (msg) { appendLastError("check", msg); })); @@ -365,9 +324,7 @@ async function isAutoUpdateDisabled(): Promise { async function scheduleBackgroundUpdateCheck(): Promise { try { if (!isStandalone()) return; - // If recordCheckCompleted has already failed in this process, the - // child would also fail — skip the spawn to avoid burning the API - // and leaving stale stage artifacts. + // Skip spawn if cache is unwritable; the child would also fail and burn API quota. if (hasCacheWriteFailed) return; if (await isAutoUpdateDisabled()) return; @@ -379,15 +336,8 @@ async function scheduleBackgroundUpdateCheck(): Promise { now - lastCheck < TWENTY_FOUR_HOURS_MS; if (shouldSkip) return; - // Parent does NOT write last-check; the child writes it after a - // successful check completes. Simultaneous launches may both spawn - // (accepted trade-off — worst case 2 API calls within the anon 60/hr - // limit) but a failed check never burns the 24h window. - // - // Child stderr is appended to last-error (not "ignore") so an - // unhandled throw inside runBackgroundUpdateCheck surfaces with a - // stack trace on the next foreground launch — otherwise programmer - // bugs in the background path are invisible in production. + // Only the child writes last-check on success, so a failed check never burns the 24h window. + // Child stderr is funneled to last-error so background panics are visible on the next launch. const { data: stderrFd } = tryCatchSync(function () { ensureCacheDir(); return fs.openSync(getLastErrorPath(), "a"); @@ -398,15 +348,11 @@ async function scheduleBackgroundUpdateCheck(): Promise { stdin: "ignore", stdout: "ignore", stderr: stderrFd ?? "ignore", - // POSIX: setsid() — survives terminal close (SIGHUP) so a - // slow download isn't cut short when the user's shell exits. + // POSIX setsid(): survives terminal close so a slow download isn't SIGHUPed. detached: true, }).unref(); } finally { - // Close the parent's copy regardless of spawn outcome. Bun.spawn - // throws synchronously on spawn failures (EMFILE, EPERM, EAGAIN) - // before the catch below runs, so without the finally a spawn - // failure would leak the fd on every foreground launch. + // Close parent's fd copy even if Bun.spawn throws synchronously (else fd leak per launch). if (stderrFd !== null) { const inheritedFd = stderrFd; tryCatchSync(function () { @@ -415,9 +361,7 @@ async function scheduleBackgroundUpdateCheck(): Promise { } } } catch (error) { - // Only swallow errno-style errors (spawn failures, fs errors). Let - // programmer bugs (TypeError, ReferenceError) propagate so they - // surface via Bun's default unhandled-rejection handler. + // Swallow errno-style only; let programmer bugs propagate. if (!(error instanceof Error) || !("code" in error)) { throw error; } @@ -432,8 +376,7 @@ function recordCheckCompleted(): void { fs.writeFileSync(getLastCheckPath(), String(Date.now())); }); if (error) { - // Latch process-wide so the next call doesn't re-attempt (and so - // scheduleBackgroundUpdateCheck can short-circuit before spawning). + // Latch: future calls and scheduleBackgroundUpdateCheck short-circuit. hasCacheWriteFailed = true; appendLastError("check", `last-check write: ${error.message}`); warnCacheWriteFailureOnce(error.message); @@ -443,8 +386,7 @@ function recordCheckCompleted(): void { async function runBackgroundUpdateCheck(): Promise { const assetName = getAssetName(); if (!assetName) { - // Structural mismatch that won't be fixed by retrying sooner — burn - // the 24h window to avoid thrashing GitHub's API on every launch. + // Structural — burn throttle so we don't thrash the API. appendLastError("check", `unsupported platform/arch`); recordCheckCompleted(); return; @@ -469,8 +411,7 @@ async function runBackgroundUpdateCheck(): Promise { return entry.name === assetName; }); if (!asset) { - // Don't record completion — asset-missing is transient (maintainer may - // upload the missing arch moments later). Let the next launch retry. + // Transient: maintainer may upload the missing arch later; don't burn throttle. appendLastError( "check", `release ${release.tag} missing asset ${assetName}` @@ -484,8 +425,7 @@ async function runBackgroundUpdateCheck(): Promise { `${STAGING_FILENAME}.${process.pid}.tmp` ); - // Clear any pre-existing entry (symlink / leftover tmp) so the subsequent - // write can't follow a planted symlink to an attacker-chosen target. + // Pre-unlink to prevent the write from following a planted symlink. safeUnlinkSync(tmpPath); const { error: dlError } = await tryCatch(downloadAsset(asset, tmpPath)); if (dlError) { @@ -494,8 +434,7 @@ async function runBackgroundUpdateCheck(): Promise { return; } - // Verify integrity BEFORE making the binary executable or running it. - // Running an unverified binary (even just `--version`) is code execution. + // Verify BEFORE chmod/probe: running an unverified binary is code execution. const verify = await verifyAssetAgainstSums( tmpPath, assetName, @@ -508,9 +447,7 @@ async function runBackgroundUpdateCheck(): Promise { "check", `SHA256SUMS fetch failed — refusing to stage: ${verify.reason}` ); - // Burn the throttle for permanent failures (404, parse error, - // host-pin rejection); transient failures (5xx, 429, 403) keep - // retrying on the next launch. + // Burn throttle for permanent failures; transient ones keep retrying. if (!verify.retryable) { recordCheckCompleted(); } @@ -519,26 +456,19 @@ async function runBackgroundUpdateCheck(): Promise { "check", `SHA256SUMS missing entry for ${assetName}` ); - // Permanent — same release won't grow a missing entry. recordCheckCompleted(); } else if (verify.kind === "hash-io-error") { - // Local IO can be transient (disk full mid-write); don't burn - // the throttle. + // Local IO may be transient (disk full mid-write); don't burn throttle. appendLastError( "check", `hash io-error for ${assetName}: ${verify.cause.message}` ); } else { appendLastError("check", `hash mismatch for ${assetName}`); - // Permanent — same release won't change its hash. recordCheckCompleted(); } return; } - // verify.hash === null → legacy release without SHA256SUMS. Upstream - // integrity rests on TLS alone; no end-to-end hash exists to compare - // against. See the sidecar-commit comment below for the narrower - // guarantee we can still provide for legacy releases. let verifiedHash: string | null = verify.hash; const { error: chmodError } = tryCatchSync(function () { @@ -557,13 +487,7 @@ async function runBackgroundUpdateCheck(): Promise { return; } - // Legacy releases without SHA256SUMS can't stage via the re-verify path - // because applyPendingUpdate needs a hash to check. Self-compute from - // the downloaded bytes so apply can still run — note this hash is NOT - // an end-to-end integrity check (same bytes generate the hash being - // compared against). Its only guarantee is detecting stage→apply - // on-disk corruption; upstream tampering for legacy releases is - // blocked only by TLS. + // Legacy release lacks SHA256SUMS; self-hash only detects local stage→apply corruption, not upstream tampering. if (verifiedHash === null) { const { data: computed, error: hashError } = tryCatchSync(function () { return computeSha256Sync(tmpPath); @@ -579,9 +503,7 @@ async function runBackgroundUpdateCheck(): Promise { verifiedHash = computed; } - // Validate server-controlled release.version BEFORE writing — keeps the - // writer in lockstep with the reader's SIDECAR_VERSION_PATTERN check so a - // future parser relaxation can't turn a crafted tag into a hash-spoof. + // Lock writer to reader's pattern so a future parser relaxation can't turn a crafted tag into a hash-spoof. if (!SIDECAR_VERSION_PATTERN.test(release.version)) { safeUnlinkSync(tmpPath); appendLastError( @@ -599,7 +521,6 @@ async function runBackgroundUpdateCheck(): Promise { version: release.version, sha256: verifiedHash, }); - // Same symlink-safety treatment as the binary tmpPath. safeUnlinkSync(metaTmpPath); const { error: metaWriteError } = tryCatchSync(function () { fs.writeFileSync(metaTmpPath, sidecarContent); @@ -608,8 +529,7 @@ async function runBackgroundUpdateCheck(): Promise { safeUnlinkSync(tmpPath); safeUnlinkSync(metaTmpPath); appendLastError("check", `sidecar write: ${metaWriteError.message}`); - // Structural permission/readonly errors don't fix themselves on retry - // — burn the throttle so we don't re-download on every launch. + // Structural permission/readonly errors won't self-heal; burn throttle. if (classifyWriteError(metaWriteError) !== null) { recordCheckCompleted(); } @@ -652,14 +572,11 @@ function probeBinaryRuns(filePath: string): ProbeResult { const { data: result, error } = tryCatchSync(function () { return Bun.spawnSync({ cmd: [filePath, "--version"], - // Capture stdout so we can verify the binary actually printed a - // version string — exit-0-with-garbage would otherwise pass. + // Capture stdout to reject exit-0-with-garbage as a valid probe. stdout: "pipe", stderr: "pipe", timeout: PROBE_TIMEOUT_MS, - // Disable auto-update in the probe child — otherwise its top-level - // scheduleBackgroundUpdateCheck could spawn a grandchild, and - // applyPendingUpdate could consume a stale staged binary. + // Disable auto-update in the probe to prevent grandchild spawn / stale-stage consumption. env: { ...process.env, WORKTREE_NO_UPDATE: "1" }, }); }); @@ -670,8 +587,7 @@ function probeBinaryRuns(filePath: string): ProbeResult { }; } if (result.exitCode === null) { - // Bun.spawnSync returns exitCode=null when the child was killed by the - // timeout — surface that explicitly instead of the opaque "exit null". + // Bun.spawnSync returns null exitCode on timeout kill. return { ok: false, reason: `timed out after ${PROBE_TIMEOUT_MS}ms`, @@ -695,9 +611,7 @@ function probeBinaryRuns(filePath: string): ProbeResult { function decodeProbeStream(stream: unknown): string { if (!(stream instanceof Uint8Array) && !(stream instanceof Buffer)) { - // Future Bun API change could surface stream as ReadableStream/string - // — return a debuggable marker instead of "" so the regression is - // visible in last-error rather than silently degrading diagnostics. + // Emit a debuggable marker (not "") so a Bun API shape change is visible in last-error. return ``; } const bytes = stream instanceof Buffer ? new Uint8Array(stream) : stream; diff --git a/src/lib/config.ts b/src/lib/config.ts index 9efe807..f21ca9a 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -102,10 +102,7 @@ async function readConfigFile( `${filePath}:AUTO_UPDATE`, `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead.` ); - // Strip the ignored key BEFORE validation; otherwise an invalid value - // (e.g. AUTO_UPDATE=junk) throws inside booleanLike and the catch - // below falls back to all defaults, silently discarding any valid - // DEFAULT_BASE / WORKTREE_DIR from the same file. + // Strip pre-validate so `AUTO_UPDATE=junk` doesn't discard valid sibling keys. delete raw.AUTO_UPDATE; } const { data: parsed, error: parseError } = tryCatchSync(function () { @@ -125,23 +122,14 @@ async function loadConfig(root: string): Promise { return readConfigFile(path.join(root, ".worktreerc"), "project"); } -// Auto-update reads config on every background check and must fail CLOSED -// on parse/read errors. The generic readConfigFile falls back to defaults -// (AUTO_UPDATE: true), which would silently override a user's explicit -// opt-out whenever their config has a typo. This helper returns the -// narrow signal "should auto-update fire?" with fail-closed semantics. -// -// `onError` lets the caller (auto-update.ts) record diagnostics to the -// last-error log so a user with a typo doesn't have auto-update silently -// disabled forever. Optional to avoid forcing an import on call sites that -// don't need diagnostics. +// Fail CLOSED on parse/read errors so a typo can't silently override opt-out. +// `onError` threads diagnostics so users discover *why* auto-update is disabled. async function shouldAutoUpdate( onError?: (message: string) => void ): Promise { const filePath = path.join(os.homedir(), ".worktreerc"); const file = Bun.file(filePath); - // file.exists() can throw on EACCES (e.g., parent dir unreadable). Guard - // it instead of letting it bubble up and crash the scheduler. + // `file.exists()` can throw EACCES; guard to avoid crashing the scheduler. const { data: isExists, error: existsError } = await tryCatch( file.exists() ); diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts index f363028..8c4e4c1 100644 --- a/src/lib/fs-utils.ts +++ b/src/lib/fs-utils.ts @@ -36,12 +36,7 @@ function safeUnlinkSync(filePath: string): void { type WriteErrorCode = "EACCES" | "EPERM" | "EROFS" | "EBUSY" | "ETXTBSY"; -// release.ts wraps errno errors as `new Error(msg, { cause })` — walk the -// cause chain so the "binary directory not writable" branch fires even -// when the top-level Error lacks a .code field. EBUSY (Windows: file -// locked) and ETXTBSY (Linux: text file busy) are included because they -// behave like permanent failures from the auto-updater's POV — retry on -// every launch would just re-download the same blob and re-fail. +// Walks cause chain for errno; EBUSY/ETXTBSY treated as permanent (file locked/busy). const WRITE_ERROR_CODES = new Set([ "EACCES", "EPERM", @@ -64,9 +59,7 @@ function classifyWriteError(error: unknown): WriteErrorCode | null { return null; } -// Walk `Error.cause` to the deepest leaf so the original errno message -// (ENOTFOUND, ECONNRESET, ETIMEDOUT) surfaces to the user instead of the -// generic wrapper ("Failed to reach GitHub releases API"). +// Unwrap `cause` to surface the original errno message instead of a generic wrapper. function deepestMessage(error: unknown): string { let cur: unknown = error; while (cur instanceof Error && cur.cause !== undefined) { diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index 046e954..96a99f9 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -47,10 +47,7 @@ describe("compareVersions", () => { }); it("compares numeric prerelease segments as strings (rc.10 < rc.2)", () => { - // Intentional non-strict-SemVer: "rc.10" sorts before "rc.2" by - // lexicographic string compare. Strict SemVer 2.0 §11 would order - // numeric segments numerically (rc.2 < rc.10). If prereleases ever - // start shipping from this repo, revisit. + // Non-strict SemVer: lexicographic prerelease ordering (rc.10 < rc.2). expect(compareVersions("1.2.3-rc.10", "1.2.3-rc.2")).toBeLessThan(0); }); @@ -133,10 +130,7 @@ describe("parseSha256Sums", () => { it("rejects BSD-tagged-format `SHA256 (file) = hex` (not the format we publish)", () => { const text = `SHA256 (worktree-darwin-arm64) = ${"a".repeat(64)}`; const result = parseSha256Sums(text); - // Parser regex doesn't match this format — entry is silently skipped. - // Asserting empty result pins the parser's behavior so a future - // regression that loosens the regex is caught here instead of - // accepting unverified hashes downstream. + // Pins the parser to reject unknown formats — guards against accepting unverified hashes. expect(Object.keys(result)).toEqual([]); }); }); @@ -240,8 +234,7 @@ describe("verifyAssetAgainstSums", () => { const ASSET_BYTES = new Uint8Array([ 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, ]); - // Precomputed SHA256 of "hello world" (ASSET_BYTES) — avoids runtime - // dependence on Bun.CryptoHasher in test setup. + // Precomputed SHA256 of ASSET_BYTES. const ASSET_SHA = "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"; const ASSET_NAME = "worktree-darwin-arm64"; @@ -263,13 +256,12 @@ describe("verifyAssetAgainstSums", () => { try { fs.unlinkSync(tmpFile); } catch { - // best-effort cleanup + // ignore } }); function makeAsset(name: string): ReleaseAsset { - // Use an allowlisted host so the host-pin guard inside withTimeout - // doesn't reject the request before stubFetch can intercept it. + // Allowlisted host so the withTimeout host-pin doesn't reject pre-stub. return { name, browser_download_url: `https://objects.githubusercontent.com/${name}`, diff --git a/src/lib/release.ts b/src/lib/release.ts index bbb4c38..4e28c48 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -6,13 +6,7 @@ import pkg from "../../package.json"; const REPO = "bhagyamudgal/worktree-cli"; const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; -// Defense in depth: even though browser_download_url comes from a trusted -// GitHub API JSON, host-pin the fetch so a future CDN substitution or -// release-asset compromise can't redirect downloads to an attacker-chosen -// origin. api.github.com serves the release JSON; objects.githubusercontent -// .com / github-releases.githubusercontent.com / release-assets.githubuser -// content.com host the binary assets; codeload.github.com handles a -// fallback path; github.com handles redirects. +// Host-pin fetches to GitHub origins; defense-in-depth against CDN/release-asset compromise. const ALLOWED_RELEASE_HOSTS = new Set([ "api.github.com", "github.com", @@ -32,10 +26,7 @@ function isAllowedReleaseHost(urlString: string): boolean { const RELEASE_TAG_PATTERN = /^v?\d+\.\d+\.\d+(?:-[\w.-]+)?$/; -// GitHub REST API requires a User-Agent. Default Bun UA works today but is -// fragile under future GitHub policy. Identifying as worktree-cli also -// makes this client traceable in logs if something ever misbehaves. -// When GITHUB_TOKEN is set, rate limit bumps from 60/hr (anon) to 5000/hr. +// Identify as worktree-cli; GITHUB_TOKEN bumps rate limit from 60/hr to 5000/hr. function buildGitHubHeaders(): Record { const headers: Record = { "User-Agent": `worktree-cli/${pkg.version}`, @@ -51,9 +42,7 @@ function buildGitHubHeaders(): Record { const DEFAULT_META_TIMEOUT_MS = 30_000; const DEFAULT_ASSET_TIMEOUT_MS = 600_000; -// Hard cap on downloaded release assets. Current binaries are ~50 MB, so -// 200 MB leaves 4× headroom while still rejecting a malicious CDN response -// that would fill the binary-dir filesystem before SHA verification fires. +// 4× headroom over current ~50 MB binary; rejects oversized CDN responses pre-verification. const MAX_ASSET_BYTES = 200 * 1024 * 1024; type ReleaseAsset = { @@ -166,11 +155,7 @@ async function withTimeout( signal: controller.signal, headers: buildGitHubHeaders(), }); - // Re-validate the post-redirect URL: GitHub release downloads always - // bounce through 302 → objects.githubusercontent.com, so the initial - // host-pin alone is bypassable by anyone who can MITM the first hop - // and inject `Location: https://attacker.example/`. Trust only what - // the redirect chain settled on. + // Re-validate post-redirect host; initial pin is bypassable via an injected `Location:` on hop 1. if (response.url && !isAllowedReleaseHost(response.url)) { throw new Error( `Refused redirect to disallowed host: ${JSON.stringify(response.url.slice(0, 120))}` @@ -196,10 +181,7 @@ async function fetchLatestRelease( if (!isReleaseInfo(json)) { throw new Error("Release payload missing tag_name or assets"); } - // Validate tag shape at the API boundary so a malicious or - // misconfigured tag (e.g., "v1.2.3/../evil") can't propagate - // through to printError messages, log lines, or any future - // path-joining caller. + // Reject malformed tags at the boundary so they can't propagate into paths/logs. if (!RELEASE_TAG_PATTERN.test(json.tag_name)) { throw new Error( `Release tag malformed: ${JSON.stringify(json.tag_name.slice(0, 40))}` @@ -253,13 +235,7 @@ async function downloadAsset( `Download ${asset.name} refused: empty response body` ); } - // Stream the body into a growing chunk list, tracking bytes - // received so we can bail out BEFORE buffering past the cap - // when Content-Length is absent or forged. A plain - // `response.arrayBuffer()` would buffer the whole body - // before any cap check; a plain `Bun.write(destPath, response)` - // doesn't propagate the fetch AbortSignal reliably into the - // body read. Manual reader gives us both bounds. + // Manual reader: bail out before buffering past the cap when Content-Length is absent/forged. const reader = response.body.getReader(); const chunks: Uint8Array[] = []; let bytesReceived = 0; @@ -282,10 +258,7 @@ async function downloadAsset( }); } if (bytesReceived === 0) { - // 200 OK with empty body (CDN edge case, misconfigured - // proxy). Without this guard, Bun.write writes an empty - // file and SHA verification later reports a misleading - // "hash mismatch" instead of the upstream cause. + // Explicit empty-body error; else SHA verify later reports a misleading mismatch. throw new Error( `Download ${asset.name} refused: empty response body` ); @@ -375,8 +348,7 @@ function verifyBinaryHashSync( function constantTimeEquals(a: string, b: string): boolean { if (a.length !== b.length) return false; - // node:crypto.timingSafeEqual is a C-level constant-time compare — - // strictly better than a userland XOR loop that V8/JSC may short-circuit. + // Constant-time compare prevents timing side-channel on hash compare. return timingSafeEqual(Buffer.from(a), Buffer.from(b)); } @@ -385,10 +357,7 @@ type Sha256SumsResult = | { kind: "ok"; sums: Record } | { kind: "error"; reason: string; retryable: boolean }; -// 5xx / network errors are retryable (transient CDN or server issues). -// Most 4xx responses are permanent (missing asset, auth problem), but -// 403/429 specifically signal rate-limiting (especially for anonymous -// callers — 60/hr GitHub limit) and ARE transient. +// 5xx and 403/429 (rate-limit) retryable; other 4xx treated as permanent. function isRetryableHttpStatus(status: number): boolean { if (status === 403 || status === 429) return true; return status >= 500 && status < 600; @@ -419,14 +388,10 @@ async function fetchSha256Sums( return { kind: "error", reason: "empty SHA256SUMS body", - // Likely mid-publish or transient; next launch retries. retryable: true, }; } - // parseSha256Sums throws on duplicate-entry tampering — that - // is a permanent integrity failure for THIS release, not a - // transient network issue. Catch it inline so it doesn't - // collapse into the generic retryable network branch below. + // Duplicate entries are tampering, not transient — permanent failure. const { data: parsed, error: parseError } = tryCatchSync( function () { return parseSha256Sums(text); @@ -447,17 +412,12 @@ async function fetchSha256Sums( return { kind: "error", reason: error?.message ?? "network error", - // Network errors (DNS, abort, fetch throw) are transient. retryable: true, }; } return result; } -// Unified entry point for "download is on disk — verify it against SHA256SUMS -// before we chmod/rename/execute". Both the foreground `update` command and -// the background check need this exact flow; keeping it in one place means a -// future tweak (e.g., stricter not-published handling) lands everywhere. type VerifyAssetResult = | { ok: true; hash: string | null } // hash === null when SHA256SUMS isn't published | { ok: false; kind: "sums-error"; reason: string; retryable: boolean } @@ -501,8 +461,7 @@ async function verifyAssetAgainstSums( } function parseSha256Sums(text: string): Record { - // Object.create(null) blocks __proto__/constructor/prototype key - // pollution from an attacker-substituted SHA256SUMS file. + // Null-prototype object blocks __proto__/constructor pollution from a tampered file. const result: Record = Object.create(null); for (const line of text.split("\n")) { const trimmed = line.trim(); From bf3a0a8c69e623af96aa53fc2645dcd785763876 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sun, 19 Apr 2026 22:38:28 +0530 Subject: [PATCH 11/15] =?UTF-8?q?fix:=20address=20CodeRabbit=20PR=20#5=20r?= =?UTF-8?q?eview=20=E2=80=94=206=20auto-update=20hardenings?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- CHANGELOG.md | 8 +++- src/lib/auto-update.ts | 45 ++++++++++++++------ src/lib/config.ts | 60 +++++++++++++++++++++----- src/lib/fs-utils.ts | 8 +++- src/lib/release.ts | 96 ++++++++++++++++++++++++++++++------------ 5 files changed, 165 insertions(+), 52 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f3af6d0..d662462 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,12 +35,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Asset downloads now stream the body through a manual reader that enforces `MAX_ASSET_BYTES` during buffering, not only after — a CDN omitting or falsifying `Content-Length` can no longer balloon RAM before the size check fires. Also propagates the fetch `AbortSignal` into the body read so a slowloris response can't stretch past the 600s timeout. - GitHub API fetches now send `User-Agent` (`worktree-cli/vX.Y.Z`), `Accept: application/vnd.github+json`, and `X-GitHub-Api-Version: 2022-11-28`. Setting `GITHUB_TOKEN` in the environment raises the rate limit from 60/hr (anonymous) to 5000/hr (authenticated). - `isAutoUpdateDisabled` now fails closed on broken `~/.worktreerc` — a typo'd config no longer silently re-enables auto-update against a user's explicit opt-out. +- Release-channel fetches now follow redirects manually, validating each hop's host against the allowlist *before* connecting. A malicious `Location:` injection on the first hop can no longer reach an arbitrary host, and the `GITHUB_TOKEN` is stripped on any cross-origin hop and not re-attached even if a later hop bounces back to the origin. +- Applying a staged update now also respects `AUTO_UPDATE=false` in `~/.worktreerc`. Previously, a user who opted out via config after a binary was already staged would still get the staged binary installed on the next launch; now the apply path fails closed on the same config read as the background scheduler. ### Fixed - macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** in the release workflow after stripping. Prior releases shipped unsigned binaries, which Apple Silicon (arm64) macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. - Startup hash verification of a staged binary now reads in 64 KB chunks instead of buffering the full binary in memory, keeping peak RSS flat on every launch. -- Asset-download phase now streams directly from `fetch` to disk via `Bun.write(destPath, response)` instead of round-tripping through an `ArrayBuffer`. +- Asset-download phase now streams chunks directly to disk via `fs.createWriteStream`, enforcing the byte cap as bytes arrive. Replaces a buffered-then-copied path that held the whole response (~50 MB for current binaries, up to the 200 MB cap) in memory twice before writing. +- `downloadAsset` now cleans up its own partial write on error so callers can treat the destination path as all-or-nothing. +- Background updater now throttles persistent structural failures on download and `chmod` (e.g., `EACCES`/`EROFS`), matching the existing sidecar write/rename branches. A root-owned install directory no longer burns the GitHub API quota on every launch. +- Background updater no longer spawns a blind detached child when the cache-log file descriptor can't be opened — the same unwritable-cache condition that short-circuits the throttle now also short-circuits the spawn. +- First-ever auto-update launch no longer prints a spurious "auto-update error log unwritable" warning caused by `statSync` returning `ENOENT` on a not-yet-created log. - A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle — the next launch retries so users on the lagging arch get updated promptly once the asset is uploaded. - `worktree auto-update` failures caused by a read-only binary directory (`EACCES`/`EPERM`/`EROFS` on the atomic swap) now clean up the staged artifacts and print a one-line `run "sudo worktree update"` hint on stderr instead of looping on every launch. - Non-permission rename failures (`ETXTBSY`, `EXDEV`, `ENOSPC`, `EIO`, `EBUSY`) now cleanup the staged artifacts too, so a persistent rename failure can't pin a warning on every single launch. diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index d4b5dd0..fbac52c 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -1,8 +1,8 @@ import fs from "node:fs"; import os from "node:os"; import path from "node:path"; -import { shouldAutoUpdate } from "./config"; -import { classifyWriteError, safeUnlinkSync } from "./fs-utils"; +import { shouldAutoUpdate, shouldAutoUpdateSync } from "./config"; +import { classifyWriteError, isEnoent, safeUnlinkSync } from "./fs-utils"; import { compareVersions, computeSha256Sync, @@ -113,6 +113,7 @@ function rotateErrorLogIfOversized(logPath: string): void { const kept = lines.slice(-ERROR_LOG_KEEP_LINES).join("\n") + "\n"; fs.writeFileSync(logPath, kept); } catch (error) { + if (isEnoent(error)) return; warnLogFailureOnce( error instanceof Error ? error.message : String(error) ); @@ -161,6 +162,11 @@ function isWithinGracePeriod(filePath: string): boolean { function applyPendingUpdate(): void { if (process.env.WORKTREE_NO_UPDATE === "1") return; + // Gate on config too: a staged binary must not apply if the user set AUTO_UPDATE=false after it was staged. + const configAllows = shouldAutoUpdateSync(function (msg) { + appendLastError("apply", msg); + }); + if (!configAllows) return; try { if (!isStandalone()) return; const stagedPath = getStagingPath(); @@ -338,27 +344,34 @@ async function scheduleBackgroundUpdateCheck(): Promise { // Only the child writes last-check on success, so a failed check never burns the 24h window. // Child stderr is funneled to last-error so background panics are visible on the next launch. - const { data: stderrFd } = tryCatchSync(function () { - ensureCacheDir(); - return fs.openSync(getLastErrorPath(), "a"); - }); + const { data: stderrFd, error: stderrOpenError } = tryCatchSync( + function () { + ensureCacheDir(); + return fs.openSync(getLastErrorPath(), "a"); + } + ); + if (stderrOpenError) { + // If we can't capture the child's stderr, don't spawn blind — the + // throttle cache lives in the same dir, so it's likely unwritable too. + hasCacheWriteFailed = true; + warnCacheWriteFailureOnce(stderrOpenError.message); + return; + } try { Bun.spawn({ cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], stdin: "ignore", stdout: "ignore", - stderr: stderrFd ?? "ignore", + stderr: stderrFd, // POSIX setsid(): survives terminal close so a slow download isn't SIGHUPed. detached: true, }).unref(); } finally { // Close parent's fd copy even if Bun.spawn throws synchronously (else fd leak per launch). - if (stderrFd !== null) { - const inheritedFd = stderrFd; - tryCatchSync(function () { - fs.closeSync(inheritedFd); - }); - } + const inheritedFd = stderrFd; + tryCatchSync(function () { + fs.closeSync(inheritedFd); + }); } } catch (error) { // Swallow errno-style only; let programmer bugs propagate. @@ -431,6 +444,9 @@ async function runBackgroundUpdateCheck(): Promise { if (dlError) { safeUnlinkSync(tmpPath); appendLastError("check", `download: ${dlError.message}`); + if (classifyWriteError(dlError) !== null) { + recordCheckCompleted(); + } return; } @@ -477,6 +493,9 @@ async function runBackgroundUpdateCheck(): Promise { if (chmodError) { safeUnlinkSync(tmpPath); appendLastError("check", `chmod: ${chmodError.message}`); + if (classifyWriteError(chmodError) !== null) { + recordCheckCompleted(); + } return; } diff --git a/src/lib/config.ts b/src/lib/config.ts index f21ca9a..4216410 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,4 +1,5 @@ import { z } from "zod"; +import fs from "node:fs"; import os from "node:os"; import path from "node:path"; import { DEFAULT_WORKTREE_DIR } from "./constants"; @@ -122,11 +123,29 @@ async function loadConfig(root: string): Promise { return readConfigFile(path.join(root, ".worktreerc"), "project"); } +type AutoUpdateOnError = (message: string) => void; + +function decideAutoUpdateFromContent( + content: string | null, + onError?: AutoUpdateOnError +): boolean { + if (content === null) return true; + const raw = parseConfigContent(content); + const { data: parsed, error: parseError } = tryCatchSync(function () { + return validateConfig(raw); + }); + if (parseError) { + onError?.( + `~/.worktreerc invalid; auto-update disabled until fixed: ${parseError.message}` + ); + return false; + } + return parsed.AUTO_UPDATE; +} + // Fail CLOSED on parse/read errors so a typo can't silently override opt-out. // `onError` threads diagnostics so users discover *why* auto-update is disabled. -async function shouldAutoUpdate( - onError?: (message: string) => void -): Promise { +async function shouldAutoUpdate(onError?: AutoUpdateOnError): Promise { const filePath = path.join(os.homedir(), ".worktreerc"); const file = Bun.file(filePath); // `file.exists()` can throw EACCES; guard to avoid crashing the scheduler. @@ -137,7 +156,7 @@ async function shouldAutoUpdate( onError?.(`shouldAutoUpdate exists: ${existsError.message}`); return false; } - if (!isExists) return true; + if (!isExists) return decideAutoUpdateFromContent(null, onError); const { data: content, error: readError } = await tryCatch(file.text()); if (readError) { onError?.( @@ -145,18 +164,37 @@ async function shouldAutoUpdate( ); return false; } - const raw = parseConfigContent(content); - const { data: parsed, error: parseError } = tryCatchSync(function () { - return validateConfig(raw); + return decideAutoUpdateFromContent(content, onError); +} + +// Sync twin used at startup by applyPendingUpdate (before brocli.run / top-level await). +function shouldAutoUpdateSync(onError?: AutoUpdateOnError): boolean { + const filePath = path.join(os.homedir(), ".worktreerc"); + const { data: isExists, error: existsError } = tryCatchSync(function () { + return fs.existsSync(filePath); }); - if (parseError) { + if (existsError) { + onError?.(`shouldAutoUpdate exists: ${existsError.message}`); + return false; + } + if (!isExists) return decideAutoUpdateFromContent(null, onError); + const { data: content, error: readError } = tryCatchSync(function () { + return fs.readFileSync(filePath, "utf8"); + }); + if (readError) { onError?.( - `~/.worktreerc invalid; auto-update disabled until fixed: ${parseError.message}` + `~/.worktreerc read failed; auto-update disabled until fixed: ${readError.message}` ); return false; } - return parsed.AUTO_UPDATE; + return decideAutoUpdateFromContent(content, onError); } -export { loadConfig, parseConfigContent, shouldAutoUpdate, validateConfig }; +export { + loadConfig, + parseConfigContent, + shouldAutoUpdate, + shouldAutoUpdateSync, + validateConfig, +}; export type { Config }; diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts index 8c4e4c1..755b781 100644 --- a/src/lib/fs-utils.ts +++ b/src/lib/fs-utils.ts @@ -68,5 +68,11 @@ function deepestMessage(error: unknown): string { return cur instanceof Error ? cur.message : String(cur); } -export { classifyWriteError, deepestMessage, safeUnlink, safeUnlinkSync }; +export { + classifyWriteError, + deepestMessage, + isEnoent, + safeUnlink, + safeUnlinkSync, +}; export type { WriteErrorCode }; diff --git a/src/lib/release.ts b/src/lib/release.ts index 4e28c48..a712d58 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -1,4 +1,5 @@ import fs from "node:fs"; +import { once } from "node:events"; import { timingSafeEqual } from "node:crypto"; import { tryCatch, tryCatchSync } from "./try-catch"; import pkg from "../../package.json"; @@ -44,6 +45,7 @@ const DEFAULT_META_TIMEOUT_MS = 30_000; const DEFAULT_ASSET_TIMEOUT_MS = 600_000; // 4× headroom over current ~50 MB binary; rejects oversized CDN responses pre-verification. const MAX_ASSET_BYTES = 200 * 1024 * 1024; +const MAX_REDIRECT_HOPS = 5; type ReleaseAsset = { name: string; @@ -151,17 +153,48 @@ async function withTimeout( controller.abort(); }, timeoutMs); try { - const response = await fetch(url, { - signal: controller.signal, - headers: buildGitHubHeaders(), - }); - // Re-validate post-redirect host; initial pin is bypassable via an injected `Location:` on hop 1. - if (response.url && !isAllowedReleaseHost(response.url)) { - throw new Error( - `Refused redirect to disallowed host: ${JSON.stringify(response.url.slice(0, 120))}` - ); + // Follow redirects manually so each hop's host is validated BEFORE we connect to it — + // default `redirect: "follow"` connects to intermediate hosts and only exposes the final URL. + const originHost = new URL(url).host; + let currentUrl = url; + // Once Authorization has been stripped on any cross-origin hop, never re-add — + // prevents a redirect chain that bounces back to the origin host from re-attaching the token. + let authStripped = false; + for (let hop = 0; hop <= MAX_REDIRECT_HOPS; hop++) { + const headers = buildGitHubHeaders(); + if (authStripped || new URL(currentUrl).host !== originHost) { + delete headers.Authorization; + authStripped = true; + } + const response = await fetch(currentUrl, { + signal: controller.signal, + headers, + redirect: "manual", + }); + if (response.status >= 300 && response.status < 400) { + const location = response.headers.get("location"); + // Drain the redirect body so keep-alive sockets don't pin across hops. + await tryCatch(response.body?.cancel() ?? Promise.resolve()); + if (!location) { + throw new Error( + `Redirect ${response.status} without Location header from ${new URL(currentUrl).host}` + ); + } + const next = new URL(location, currentUrl).toString(); + if (!isAllowedReleaseHost(next)) { + // Log host only, not the full URL — signed CDN URLs can carry tokens in the query string. + throw new Error( + `Refused redirect to disallowed host: ${new URL(next).host}` + ); + } + currentUrl = next; + continue; + } + return await handler(response); } - return await handler(response); + throw new Error( + `Exceeded ${MAX_REDIRECT_HOPS} redirects from ${new URL(url).host}` + ); } finally { clearTimeout(timer); } @@ -235,10 +268,12 @@ async function downloadAsset( `Download ${asset.name} refused: empty response body` ); } - // Manual reader: bail out before buffering past the cap when Content-Length is absent/forged. + // Stream chunks directly to disk, enforcing the cap as bytes arrive. + // Avoids the ~2× memory peak of buffering all chunks then copying into one final Uint8Array. const reader = response.body.getReader(); - const chunks: Uint8Array[] = []; + const writer = fs.createWriteStream(destPath, { flags: "w" }); let bytesReceived = 0; + let writerClosed = false; try { while (true) { const { done, value } = await reader.read(); @@ -250,30 +285,39 @@ async function downloadAsset( `Download ${asset.name} exceeded cap: ${bytesReceived} bytes > ${MAX_ASSET_BYTES} bytes` ); } - chunks.push(value); + if (!writer.write(value)) { + await once(writer, "drain"); + } + } + if (bytesReceived === 0) { + // Explicit empty-body error; else SHA verify later reports a misleading mismatch. + throw new Error( + `Download ${asset.name} refused: empty response body` + ); } + await new Promise(function (resolve, reject) { + writer.once("error", reject); + writer.end(function () { + resolve(); + }); + }); + writerClosed = true; } finally { tryCatchSync(function () { reader.releaseLock(); }); + if (!writerClosed) { + writer.destroy(); + } } - if (bytesReceived === 0) { - // Explicit empty-body error; else SHA verify later reports a misleading mismatch. - throw new Error( - `Download ${asset.name} refused: empty response body` - ); - } - const buffer = new Uint8Array(bytesReceived); - let offset = 0; - for (const chunk of chunks) { - buffer.set(chunk, offset); - offset += chunk.byteLength; - } - await Bun.write(destPath, buffer); } ) ); if (error) { + // Clean up our own partial write so callers don't have to do it defensively. + tryCatchSync(function () { + fs.unlinkSync(destPath); + }); throw new Error(`Failed to download ${asset.name}: ${error.message}`, { cause: error, }); From a9d7ca4679798b6ecf4aa96975f4f3e6f0eb3bc9 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Sun, 19 Apr 2026 23:05:13 +0530 Subject: [PATCH 12/15] docs(changelog): add [Unreleased] and rewrite 1.3.0 user-facing --- CHANGELOG.md | 75 ++++++++++++++++++++++------------------------------ 1 file changed, 31 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d662462..b621175 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,59 +14,46 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed -- Trimmed verbose auto-update implementation comments; the hardening rationale lives in git history. -- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) to address GitHub's deprecation of Node.js 20 actions. -- Release binaries now built with `--minify --sourcemap=none` and stripped of debug symbols (`llvm-strip` on Linux, `strip` on macOS). Binary sizes are slightly smaller; functional behavior unchanged. -- Release workflow smoke-tests each built binary via `--version` before publishing to catch regressions. -- `AUTO_UPDATE` set in a project `.worktreerc` now warns once that it is ignored — the flag only takes effect in `~/.worktreerc`, matching the README. -- `readConfigFile` is now fail-open on parse errors (warn + return defaults) for both project and global configs, instead of asymmetrically swallowing for project / propagating for global. -- SHA256SUMS verification is centralized in a new `verifyAssetAgainstSums` helper; the foreground `update` command and the background check share the same discriminated-union result contract. +- Release binaries are slightly smaller (minified, debug symbols stripped). No behavioural change. +- Release workflow smoke-tests each built binary (`--version`) before publishing so a broken build can't reach users. +- `AUTO_UPDATE` in a project `.worktreerc` now warns once that it is ignored — only `~/.worktreerc` is honoured (matches the README). +- Global and project config files now behave symmetrically on parse errors: both warn and fall back to defaults. +- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) following GitHub's deprecation of Node.js 20 actions. ### Security -- SHA256SUMS parser now allocates its result map with `Object.create(null)` to block `__proto__`/`constructor`/`prototype` pollution from a tampered sums file. -- SHA256 hash comparison now uses `node:crypto.timingSafeEqual` (C-level constant-time) instead of a userland XOR loop that V8/JSC may short-circuit. -- Release asset downloads now reject responses whose declared `Content-Length` exceeds 200 MB, capping the blast radius of a malicious CDN before SHA verification fires. -- The staging tmp path is now `safeUnlinkSync`-ed before each download — best-effort defense against a planted symlink in a shared install directory. (Note: this is *not* race-free: an attacker who can write to the binary directory could re-plant the symlink between the unlink and the subsequent write. Closing that race fully would require an `O_EXCL | O_NOFOLLOW` open. Same treatment applies to the sidecar tmp path.) -- GitHub release workflow now creates releases as **draft**, uploads all files (binaries + `SHA256SUMS`), and flips to public in a subsequent step — eliminating the window where `releases/latest` returned a non-draft release with binaries attached but `SHA256SUMS` still uploading (which would let clients fall through to the TLS-only "not-published" install path). -- `SHA256SUMS` parser now rejects duplicate filename entries (defense-in-depth against tampered mirrors). -- `release.version` is now validated against the sidecar regex before being written, matching the reader's strictness — closes a writer/reader asymmetry that would be exploitable if the parser's rules ever loosen. -- Concurrent-launch race on staged-update commit: the window between "sidecar committed" and "binary committed" is now protected by a 60-second mtime grace period. `applyPendingUpdate` no longer reaps an apparently-orphan file that is fresh enough to be a concurrent producer still mid-commit, so simultaneous terminal launches no longer silently discard a correctly hash-verified staged binary. -- Asset downloads now stream the body through a manual reader that enforces `MAX_ASSET_BYTES` during buffering, not only after — a CDN omitting or falsifying `Content-Length` can no longer balloon RAM before the size check fires. Also propagates the fetch `AbortSignal` into the body read so a slowloris response can't stretch past the 600s timeout. -- GitHub API fetches now send `User-Agent` (`worktree-cli/vX.Y.Z`), `Accept: application/vnd.github+json`, and `X-GitHub-Api-Version: 2022-11-28`. Setting `GITHUB_TOKEN` in the environment raises the rate limit from 60/hr (anonymous) to 5000/hr (authenticated). -- `isAutoUpdateDisabled` now fails closed on broken `~/.worktreerc` — a typo'd config no longer silently re-enables auto-update against a user's explicit opt-out. -- Release-channel fetches now follow redirects manually, validating each hop's host against the allowlist *before* connecting. A malicious `Location:` injection on the first hop can no longer reach an arbitrary host, and the `GITHUB_TOKEN` is stripped on any cross-origin hop and not re-attached even if a later hop bounces back to the origin. -- Applying a staged update now also respects `AUTO_UPDATE=false` in `~/.worktreerc`. Previously, a user who opted out via config after a binary was already staged would still get the staged binary installed on the next launch; now the apply path fails closed on the same config read as the background scheduler. +- Release downloads are verified against `SHA256SUMS` using a constant-time hash comparison before being made executable. +- Release-channel fetches are restricted to an allowlist of GitHub-owned hosts, validated on every redirect hop **before** the runtime connects. A malicious `Location:` injection on the first hop can no longer reach an arbitrary host. `GITHUB_TOKEN` is stripped on any cross-origin hop and not re-attached if the chain bounces back to the origin. +- Release assets with a declared `Content-Length` over 200 MB are rejected outright, and byte-counts are enforced as the body streams in — a CDN omitting or forging `Content-Length` cannot exhaust memory before the size check fires. The download timeout is honoured throughout the body read so a slowloris response can't stretch past it. +- The staging tmp path is pre-unlinked before each download as a best-effort defense against a planted symlink in a shared install directory. (Not race-free — a writable install directory still allows re-planting between the unlink and the subsequent write. Closing that race fully would require `O_EXCL | O_NOFOLLOW`. Same treatment applies to the sidecar tmp path.) +- GitHub releases are now published as **draft**, uploaded with all files (binaries + `SHA256SUMS`), and flipped to public in a subsequent step — eliminating the window where `releases/latest` exposed a public release with binaries but no sums, forcing clients onto the TLS-only install path. +- `SHA256SUMS` parser rejects duplicate filename entries and is immune to prototype-pollution from a tampered sums file. +- Release tag and staged version strings are validated against the same strict regex at the writer and the reader, so a crafted tag can't propagate into paths, logs, or sidecar metadata. +- Concurrent launches no longer discard a correctly-staged update mid-commit: a 60-second mtime grace window distinguishes a concurrent producer from a real orphan. +- Auto-update now fails **closed** on a malformed `~/.worktreerc` — a typo can no longer silently re-enable auto-update against an explicit opt-out. +- Applying a staged update now also respects `AUTO_UPDATE=false` in `~/.worktreerc`. Previously, a user who opted out in config after a binary was already staged would still get the staged binary installed on the next launch. +- GitHub API fetches now send a proper `User-Agent` (`worktree-cli/vX.Y.Z`), `Accept: application/vnd.github+json`, and `X-GitHub-Api-Version` header. Setting `GITHUB_TOKEN` in the environment raises the rate limit from 60/hr (anonymous) to 5000/hr (authenticated). ### Fixed -- macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** in the release workflow after stripping. Prior releases shipped unsigned binaries, which Apple Silicon (arm64) macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. -- Startup hash verification of a staged binary now reads in 64 KB chunks instead of buffering the full binary in memory, keeping peak RSS flat on every launch. -- Asset-download phase now streams chunks directly to disk via `fs.createWriteStream`, enforcing the byte cap as bytes arrive. Replaces a buffered-then-copied path that held the whole response (~50 MB for current binaries, up to the 200 MB cap) in memory twice before writing. -- `downloadAsset` now cleans up its own partial write on error so callers can treat the destination path as all-or-nothing. -- Background updater now throttles persistent structural failures on download and `chmod` (e.g., `EACCES`/`EROFS`), matching the existing sidecar write/rename branches. A root-owned install directory no longer burns the GitHub API quota on every launch. -- Background updater no longer spawns a blind detached child when the cache-log file descriptor can't be opened — the same unwritable-cache condition that short-circuits the throttle now also short-circuits the spawn. -- First-ever auto-update launch no longer prints a spurious "auto-update error log unwritable" warning caused by `statSync` returning `ENOENT` on a not-yet-created log. -- A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle — the next launch retries so users on the lagging arch get updated promptly once the asset is uploaded. -- `worktree auto-update` failures caused by a read-only binary directory (`EACCES`/`EPERM`/`EROFS` on the atomic swap) now clean up the staged artifacts and print a one-line `run "sudo worktree update"` hint on stderr instead of looping on every launch. -- Non-permission rename failures (`ETXTBSY`, `EXDEV`, `ENOSPC`, `EIO`, `EBUSY`) now cleanup the staged artifacts too, so a persistent rename failure can't pin a warning on every single launch. -- Orphan `.worktree.next.meta` sidecar left by an interrupted background stage is now cleaned up on the next launch instead of lingering indefinitely. -- Background check's 24h throttle no longer thrashes if the cache file's `.exists()` probe errors out — the error is logged and the check proceeds as if no check has run, matching the symmetric `.text()` read-error behavior. -- `worktree update` now recognises `EACCES`/`EPERM`/`EROFS` on the download and rename steps (previously only `EACCES`) and surfaces the deepest `Error.cause` message so users see the real errno (`ENOTFOUND`, `ECONNRESET`, etc.) instead of the generic wrapper. -- Unlink errors in cleanup paths now distinguish `ENOENT` (silent, expected) from real failures (`EACCES`, `EPERM`, etc.) — real failures emit a dim stderr warning instead of being silently swallowed. -- Probe-timeout failures now surface as `timed out after 2000ms` instead of the opaque `exit null`. -- Network errors from `fetchLatestRelease`/`downloadAsset` now preserve the underlying errno chain via `Error.cause`, so callers can classify failures accurately. -- The background update child is now spawned with `detached: true` (POSIX `setsid()`), so a slow download isn't cut short when the user's shell/terminal exits — the child finishes the check in its own session. -- `applyPendingUpdate`'s outer catch now rethrows non-errno errors so programmer bugs surface with a stack trace via Bun's unhandled handler instead of being silently reduced to a dim warning. Matches the discipline in `scheduleBackgroundUpdateCheck`. -- The detached background-check child's stderr is now appended to `~/.cache/worktree-cli/last-error` (previously `"ignore"`). Unhandled throws from `runBackgroundUpdateCheck` now surface a full stack trace on the next foreground launch instead of failing silently. The internal command handler also wraps the run in a top-level try/catch that appends a panic trace before exiting non-zero. -- Parent's stderr file descriptor is now closed in a `finally` block, so a synchronous `Bun.spawn` failure (`EMFILE`, `EPERM`, `EAGAIN`) no longer leaks the fd on every foreground launch. -- Empty catches in the log-write helpers (`appendLastError`, `rotateErrorLogIfOversized`) now emit a one-shot dim stderr warning if `~/.cache/worktree-cli/last-error` itself is unwritable — diagnostics-of-diagnostics can no longer disappear silently. -- `fetchSha256Sums` error results now include a `retryable` boolean distinguishing transient (5xx, network) failures from permanent (4xx) ones; callers can use the hint to choose retry semantics without re-inspecting status strings. +- macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** after stripping. Prior releases shipped unsigned binaries, which Apple Silicon macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. +- Auto-update no longer buffers the full binary in memory during download or verification — peak memory stays flat regardless of binary size. +- A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle; the next launch retries so users on the lagging arch get updated once the asset is uploaded. +- `worktree update` and the background auto-updater now recognise the full set of write-permission errors on download, `chmod`, and rename (not just `EACCES`), clean up any partial staged files, and print a one-line `run "sudo worktree update"` hint instead of looping on every launch. +- Persistent structural failures (read-only install directory, busy binary, disk full, filesystem boundary) now throttle the background check so a stuck install directory no longer burns the GitHub API quota on every launch. +- Orphan staging artifacts left by an interrupted background update are cleaned up on the next launch instead of lingering indefinitely. +- First-ever auto-update launch no longer prints a spurious "error log unwritable" warning on a not-yet-created log file. +- Background updater no longer spawns a blind detached child when the cache log can't be opened — the same condition that short-circuits the throttle now also short-circuits the spawn, and the parent's copy of the log fd is released even if the spawn itself throws synchronously. +- Probe-timeout failures on a freshly-downloaded binary now surface as `timed out after 2000ms` instead of the opaque `exit null`. +- Network errors during update checks now preserve the underlying errno (`ENOTFOUND`, `ECONNRESET`, `ETIMEDOUT`, etc.) instead of being hidden behind a generic wrapper. +- The background update child is detached (POSIX `setsid`) so a slow download isn't killed when the user's shell or terminal exits. +- Unhandled throws from the background update path now write a full stack trace to `~/.cache/worktree-cli/last-error` and surface on the next foreground launch instead of failing silently. +- Unlink errors during cleanup distinguish "already gone" from real failures — only real failures emit a warning. ### Tests -- Added coverage for `verifyAssetAgainstSums` across all six result shapes (legacy, normal, 5xx retryable, 4xx permanent, missing-entry, hash-mismatch, hash-io-error) via stubbed `fetch` and a precomputed-hash asset file — pins the tri-state safety contract against future refactors. -- Added coverage for `parseSha256Sums` duplicate-filename rejection (defense-in-depth against tampered mirrors). +- Added coverage for the SHA256 verification flow across all result shapes (legacy, normal, transient, permanent, missing-entry, hash-mismatch, hash-io-error) using stubbed `fetch` and a precomputed-hash asset file — pins the safety contract against future refactors. +- Added coverage for the `SHA256SUMS` parser's duplicate-entry rejection. ## [1.2.0] - 2026-04-17 From d99ca77671c91b42caf1ff64daef8e47f2c6d710 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Mon, 20 Apr 2026 19:32:50 +0530 Subject: [PATCH 13/15] fix: address 15 self-review findings on PR #5 --- .github/workflows/release.yml | 6 +-- CHANGELOG.md | 21 +++++++++ src/commands/update.ts | 22 ++++++++- src/lib/auto-update.ts | 67 +++++++++++++++++++++++---- src/lib/config.ts | 30 +++++++----- src/lib/release.test.ts | 45 ++++++++++++++---- src/lib/release.ts | 86 +++++++++++++++++++++++++++++------ 7 files changed, 226 insertions(+), 51 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1da5a1b..51fab05 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -102,9 +102,9 @@ jobs: # would silently produce an empty SHA256SUMS that bricks # every client's auto-update install. count=$(ls -1 worktree-*.sha256 2>/dev/null | wc -l | tr -d ' ') - expected=4 - if [ "$count" != "$expected" ]; then - echo "expected $expected .sha256 files, got $count" >&2 + binaries=$(ls -1 worktree-* 2>/dev/null | grep -v '\.sha256$' | wc -l | tr -d ' ') + if [ "$count" -lt 1 ] || [ "$count" != "$binaries" ]; then + echo "sha256 count ($count) does not match binary count ($binaries) — abort" >&2 exit 1 fi cat worktree-*.sha256 > SHA256SUMS diff --git a/CHANGELOG.md b/CHANGELOG.md index b621175..7467b87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,27 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unreleased] + +### Changed + +- Auto-update now bumps its 24-hour throttle when a release is genuinely unusable on the current platform (probe failure), when GitHub is unreachable, or when a download fails for any reason. Previously, every CLI invocation re-downloaded ~50 MB and re-hit the GitHub API, which could trip the 60-requests-per-hour anonymous rate limit on a heavy day. +- Foreground `worktree update` now smoke-tests the downloaded binary (`--version`) before atomically replacing the installed binary. A SHA256-valid release that won't run on the current machine (libc/codesign/macOS-version mismatch) is now refused with a clear error instead of leaving the user with a broken `worktree`. +- A `SHA256SUMS` file containing **duplicate entries** — the canonical signature of supply-chain tampering — now triggers a loud red `SECURITY ALERT` in `worktree update` and a `TAMPER:` prefix in the background error log, instead of being reported as a generic "could not be fetched" outage. +- Project-scope `AUTO_UPDATE=...` is still ignored (matches existing behaviour) but the warning now also reports whether the value is a *valid* boolean-like, so a user moving the line to `~/.worktreerc` later already knows whether it would have taken effect. +- Version comparison now follows SemVer 2.0 §11 for prerelease ordering: `1.2.3-rc.10` is now correctly **greater than** `1.2.3-rc.2`. Previously the comparator used lexicographic string ordering, which would have stranded users on `rc.2` from ever auto-updating to `rc.10`. + +### Security + +- Auto-update tmp paths now use `crypto.randomBytes(8)` instead of `process.pid`, removing a predictable-filename primitive that a co-tenant on a group-writable install dir could pre-plant a symlink at. Pre-unlink remains as the primary defense. +- Stage detection no longer silently fails on `EACCES` of the binary directory: `existsSync` was masking permission errors as "no stage". Now logs the diagnostic and bails without destructive cleanup, so a transient permission glitch can't destroy a peer process's mid-commit stage either. + +### Fixed + +- `release.ts` download path no longer silently swallows three classes of error (writer post-finish flush errors, reader `releaseLock` failures, partial-write cleanup failures). Errors now route through an `onError` callback that the auto-updater logs to `~/.cache/worktree-cli/last-error`. +- Removed an off-by-one in the redirect loop (`<=` instead of `<`) — the loop was allowing 6 hops while the error message claimed a limit of 5. +- Aggregated SHA256SUMS in the release workflow now compares against the actual binary count instead of a hardcoded `expected=4` — adding or removing a build target no longer requires editing two places. + ## [1.3.0] - 2026-04-17 ### Added diff --git a/src/commands/update.ts b/src/commands/update.ts index 5b0fc55..a4c57f1 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -19,6 +19,7 @@ import { } from "../lib/release"; import { cleanupStagedArtifacts, + probeBinaryRuns, recordCheckCompleted, } from "../lib/auto-update"; @@ -108,7 +109,12 @@ export const updateCommand = command({ ); if (!verify.ok) { await safeUnlink(tmpPath); - if (verify.kind === "sums-error") { + if (verify.kind === "sums-tamper") { + const { RED, BOLD, RESET } = COLORS; + console.error( + `${RED}${BOLD}SECURITY ALERT${RESET}${RED}: SHA256SUMS for ${release.tag} is malformed (${verify.reason}). This is the canonical signature of supply-chain tampering. Refusing to install.${RESET}` + ); + } else if (verify.kind === "sums-error") { printError( `SHA256SUMS is published but could not be fetched: ${verify.reason}. Refusing to install.` ); @@ -139,7 +145,19 @@ export const updateCommand = command({ if (chmodError) { await safeUnlink(tmpPath); printError( - `Failed to mark binary executable: ${chmodError.message}` + `Failed to mark binary executable: ${deepestMessage(chmodError)}` + ); + process.exit(EXIT_CODES.ERROR); + } + + // Probe BEFORE rename: a SHA-valid binary that won't run on this machine + // (libc/codesign/macOS-version mismatch) would otherwise replace the user's + // working binary with one that segfaults on next launch. + const probe = probeBinaryRuns(tmpPath); + if (!probe.ok) { + await safeUnlink(tmpPath); + printError( + `The new release v${release.version} is not runnable on this machine (${probe.reason}). Please file an issue at https://github.com/bhagyamudgal/worktree-cli/issues.` ); process.exit(EXIT_CODES.ERROR); } diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index fbac52c..98c8f4b 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -1,3 +1,4 @@ +import { randomBytes } from "node:crypto"; import fs from "node:fs"; import os from "node:os"; import path from "node:path"; @@ -152,10 +153,32 @@ function cleanupStagedArtifacts(): void { safeUnlinkSync(getMetaSidecarPath()); } +// true=exists, false=ENOENT, null=non-ENOENT error logged; caller must bail without +// destructive cleanup (don't unlink anything we can't stat reliably). +function checkExists( + filePath: string, + kind: "apply" | "check" +): boolean | null { + const { error } = tryCatchSync(function () { + return fs.statSync(filePath); + }); + if (!error) return true; + if (isEnoent(error)) return false; + appendLastError(kind, `stat ${filePath}: ${error.message}`); + return null; +} + function isWithinGracePeriod(filePath: string): boolean { - const { data: stat } = tryCatchSync(function () { + const { data: stat, error } = tryCatchSync(function () { return fs.statSync(filePath); }); + if (error) { + if (isEnoent(error)) return false; + // Non-ENOENT (EACCES/EIO) — surface the diagnostic and be conservative: + // assume in-grace so we never destroy a peer's stage on incomplete info. + appendLastError("apply", `grace-stat: ${error.message}`); + return true; + } if (!stat) return false; return Date.now() - stat.mtimeMs < STAGING_ORPHAN_GRACE_MS; } @@ -171,14 +194,18 @@ function applyPendingUpdate(): void { if (!isStandalone()) return; const stagedPath = getStagingPath(); const metaPath = getMetaSidecarPath(); - if (!fs.existsSync(stagedPath)) { + const stagedExists = checkExists(stagedPath, "apply"); + if (stagedExists === null) return; + if (!stagedExists) { // Within grace window, assume concurrent producer; past it, reap orphan. if (isWithinGracePeriod(metaPath)) return; safeUnlinkSync(metaPath); return; } - if (!fs.existsSync(metaPath)) { + const metaExists = checkExists(metaPath, "apply"); + if (metaExists === null) return; + if (!metaExists) { if (isWithinGracePeriod(stagedPath)) return; safeUnlinkSync(stagedPath); appendLastError( @@ -412,6 +439,9 @@ async function runBackgroundUpdateCheck(): Promise { "check", `fetchLatestRelease: ${releaseError?.message ?? "unknown"}` ); + // Bump throttle on transient network failures so we don't burn the GitHub + // API quota retrying on every CLI invocation while api.github.com is down. + recordCheckCompleted(); return; } @@ -435,18 +465,22 @@ async function runBackgroundUpdateCheck(): Promise { const binaryDir = getBinaryDir(); const tmpPath = path.join( binaryDir, - `${STAGING_FILENAME}.${process.pid}.tmp` + `${STAGING_FILENAME}.${randomBytes(8).toString("hex")}.tmp` ); // Pre-unlink to prevent the write from following a planted symlink. safeUnlinkSync(tmpPath); - const { error: dlError } = await tryCatch(downloadAsset(asset, tmpPath)); + const { error: dlError } = await tryCatch( + downloadAsset(asset, tmpPath, undefined, function (op, downloadErr) { + appendLastError("check", `${op}: ${downloadErr.message}`); + }) + ); if (dlError) { safeUnlinkSync(tmpPath); appendLastError("check", `download: ${dlError.message}`); - if (classifyWriteError(dlError) !== null) { - recordCheckCompleted(); - } + // Bump throttle on every download failure (write OR network) so a persistent + // outage doesn't trigger a re-download cycle on every CLI invocation. + recordCheckCompleted(); return; } @@ -458,7 +492,15 @@ async function runBackgroundUpdateCheck(): Promise { ); if (!verify.ok) { safeUnlinkSync(tmpPath); - if (verify.kind === "sums-error") { + if (verify.kind === "sums-tamper") { + // Loud TAMPER: prefix in the log so a curious user grepping last-error + // can distinguish supply-chain integrity hits from generic outages. + appendLastError( + "check", + `TAMPER: SHA256SUMS for ${assetName} is malformed (${verify.reason}) — refusing to stage` + ); + recordCheckCompleted(); + } else if (verify.kind === "sums-error") { appendLastError( "check", `SHA256SUMS fetch failed — refusing to stage: ${verify.reason}` @@ -503,6 +545,10 @@ async function runBackgroundUpdateCheck(): Promise { if (!probe.ok) { safeUnlinkSync(tmpPath); appendLastError("check", `probe: ${probe.reason}`); + // Treat probe failure as structural for THIS release — bumping throttle + // prevents a re-download/re-probe loop burning ~50MB on every CLI invocation + // until upstream republishes a fixed binary. + recordCheckCompleted(); return; } @@ -534,7 +580,7 @@ async function runBackgroundUpdateCheck(): Promise { const metaTmpPath = path.join( binaryDir, - `${META_SIDECAR_FILENAME}.${process.pid}.tmp` + `${META_SIDECAR_FILENAME}.${randomBytes(8).toString("hex")}.tmp` ); const sidecarContent = formatSidecar({ version: release.version, @@ -642,6 +688,7 @@ export { appendBackgroundCheckPanic, applyPendingUpdate, cleanupStagedArtifacts, + probeBinaryRuns, recordCheckCompleted, scheduleBackgroundUpdateCheck, runBackgroundUpdateCheck, diff --git a/src/lib/config.ts b/src/lib/config.ts index 4216410..720ed03 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -81,6 +81,12 @@ function warnOnce(filePath: string, message: string): void { type ConfigScope = "project" | "global"; +const EXISTS_ERROR_PREFIX = "shouldAutoUpdate exists"; +const READ_ERROR_PREFIX = + "~/.worktreerc read failed; auto-update disabled until fixed"; +const PARSE_ERROR_PREFIX = + "~/.worktreerc invalid; auto-update disabled until fixed"; + async function readConfigFile( filePath: string, scope: ConfigScope @@ -99,9 +105,15 @@ async function readConfigFile( } const raw = parseConfigContent(content); if (scope === "project" && "AUTO_UPDATE" in raw) { + // Validate the value too so a user moving the line to ~/.worktreerc later + // already knows whether they wrote a valid boolean-like or garbage. + const probe = booleanLike.safeParse(raw.AUTO_UPDATE); + const validityNote = probe.success + ? `; the value "${raw.AUTO_UPDATE}" parses as a boolean (would take effect once moved)` + : `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probe.error.issues[0]?.message ?? "unknown"})`; warnOnce( `${filePath}:AUTO_UPDATE`, - `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead.` + `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead${validityNote}.` ); // Strip pre-validate so `AUTO_UPDATE=junk` doesn't discard valid sibling keys. delete raw.AUTO_UPDATE; @@ -135,9 +147,7 @@ function decideAutoUpdateFromContent( return validateConfig(raw); }); if (parseError) { - onError?.( - `~/.worktreerc invalid; auto-update disabled until fixed: ${parseError.message}` - ); + onError?.(`${PARSE_ERROR_PREFIX}: ${parseError.message}`); return false; } return parsed.AUTO_UPDATE; @@ -153,15 +163,13 @@ async function shouldAutoUpdate(onError?: AutoUpdateOnError): Promise { file.exists() ); if (existsError) { - onError?.(`shouldAutoUpdate exists: ${existsError.message}`); + onError?.(`${EXISTS_ERROR_PREFIX}: ${existsError.message}`); return false; } if (!isExists) return decideAutoUpdateFromContent(null, onError); const { data: content, error: readError } = await tryCatch(file.text()); if (readError) { - onError?.( - `~/.worktreerc read failed; auto-update disabled until fixed: ${readError.message}` - ); + onError?.(`${READ_ERROR_PREFIX}: ${readError.message}`); return false; } return decideAutoUpdateFromContent(content, onError); @@ -174,7 +182,7 @@ function shouldAutoUpdateSync(onError?: AutoUpdateOnError): boolean { return fs.existsSync(filePath); }); if (existsError) { - onError?.(`shouldAutoUpdate exists: ${existsError.message}`); + onError?.(`${EXISTS_ERROR_PREFIX}: ${existsError.message}`); return false; } if (!isExists) return decideAutoUpdateFromContent(null, onError); @@ -182,9 +190,7 @@ function shouldAutoUpdateSync(onError?: AutoUpdateOnError): boolean { return fs.readFileSync(filePath, "utf8"); }); if (readError) { - onError?.( - `~/.worktreerc read failed; auto-update disabled until fixed: ${readError.message}` - ); + onError?.(`${READ_ERROR_PREFIX}: ${readError.message}`); return false; } return decideAutoUpdateFromContent(content, onError); diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts index 96a99f9..5d02a2a 100644 --- a/src/lib/release.test.ts +++ b/src/lib/release.test.ts @@ -38,17 +38,41 @@ describe("compareVersions", () => { expect(compareVersions("1.2.3-rc.1", "1.2.4")).toBeLessThan(0); }); - it("orders prerelease tags lexicographically (NOT strict SemVer)", () => { + it("orders prerelease tags per SemVer 2.0 §11", () => { + // Numeric within-identifier comparison. expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBeLessThan(0); + // Lex on string identifiers. expect(compareVersions("1.2.3-rc.1", "1.2.3-beta.1")).toBeGreaterThan( 0 ); + // Equal prereleases. expect(compareVersions("1.2.3-alpha", "1.2.3-alpha")).toBe(0); }); - it("compares numeric prerelease segments as strings (rc.10 < rc.2)", () => { - // Non-strict SemVer: lexicographic prerelease ordering (rc.10 < rc.2). - expect(compareVersions("1.2.3-rc.10", "1.2.3-rc.2")).toBeLessThan(0); + it("compares numeric prerelease identifiers numerically (SemVer 2.0)", () => { + // SemVer 2.0 §11.4.1: numeric identifiers compare numerically — rc.10 > rc.2. + expect(compareVersions("1.2.3-rc.10", "1.2.3-rc.2")).toBeGreaterThan(0); + expect(compareVersions("1.2.3-rc.2", "1.2.3-rc.10")).toBeLessThan(0); + expect(compareVersions("1.2.3-alpha.9", "1.2.3-alpha.11")).toBeLessThan( + 0 + ); + }); + + it("treats numeric identifiers as lower precedence than string identifiers", () => { + // SemVer 2.0 §11.4.3: numeric identifiers always have lower precedence than + // alphanumeric identifiers within the same prerelease position. + expect( + compareVersions("1.0.0-alpha.1", "1.0.0-alpha.beta") + ).toBeLessThan(0); + }); + + it("longer prerelease wins when all preceding identifiers equal", () => { + // SemVer 2.0 §11.4.4: a larger set of fields has higher precedence than + // a smaller set, when all preceding identifiers are equal. + expect(compareVersions("1.0.0-alpha", "1.0.0-alpha.1")).toBeLessThan(0); + expect( + compareVersions("1.0.0-alpha.beta", "1.0.0-alpha.beta.1") + ).toBeLessThan(0); }); it("never returns NaN for garbage input", () => { @@ -375,7 +399,7 @@ describe("verifyAssetAgainstSums", () => { expect(result.retryable).toBe(true); }); - it("returns non-retryable sums-error when SHA256SUMS contains duplicate entry (tampering)", async () => { + it("returns sums-tamper kind when SHA256SUMS contains duplicate entry (tampering)", async () => { const dupeBody = [ "a".repeat(64) + " " + ASSET_NAME, "b".repeat(64) + " " + ASSET_NAME, @@ -389,11 +413,12 @@ describe("verifyAssetAgainstSums", () => { ]); expect(result.ok).toBe(false); if (result.ok) return; - expect(result.kind).toBe("sums-error"); - if (result.kind !== "sums-error") return; - // Duplicate entries are tampering, not transient — must NOT retry. - expect(result.retryable).toBe(false); - expect(result.reason).toMatch(/parse|Duplicate/); + // Distinct kind from "sums-error" so foreground/background paths can + // escalate (loud red error / TAMPER: log prefix) instead of treating + // tampering the same as a transient outage. + expect(result.kind).toBe("sums-tamper"); + if (result.kind !== "sums-tamper") return; + expect(result.reason).toMatch(/Duplicate/); }); it("returns missing-entry when SHA256SUMS exists but has no row for asset", async () => { diff --git a/src/lib/release.ts b/src/lib/release.ts index a712d58..627eb41 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -1,6 +1,7 @@ import fs from "node:fs"; import { once } from "node:events"; import { timingSafeEqual } from "node:crypto"; +import { isEnoent } from "./fs-utils"; import { tryCatch, tryCatchSync } from "./try-catch"; import pkg from "../../package.json"; @@ -102,12 +103,41 @@ function parseVersion(v: string): ParsedVersion { }; } +// SemVer 2.0 §11: dot-separated identifiers compared pairwise. +// Numeric identifiers compare numerically; string identifiers compare lexically; +// numeric identifiers always have lower precedence than strings; longer field +// wins iff all preceding identifiers are equal. +function comparePrereleaseIdentifier(a: string, b: string): number { + const aNumeric = /^\d+$/.test(a); + const bNumeric = /^\d+$/.test(b); + if (aNumeric && bNumeric) { + const an = Number(a); + const bn = Number(b); + if (an < bn) return -1; + if (an > bn) return 1; + return 0; + } + if (aNumeric) return -1; + if (bNumeric) return 1; + if (a < b) return -1; + if (a > b) return 1; + return 0; +} + function comparePrerelease(a: string | null, b: string | null): number { if (a === b) return 0; + // A version with a prerelease has lower precedence than one without. if (a === null) return 1; if (b === null) return -1; - if (a < b) return -1; - if (a > b) return 1; + const aParts = a.split("."); + const bParts = b.split("."); + const len = Math.min(aParts.length, bParts.length); + for (let i = 0; i < len; i++) { + const cmp = comparePrereleaseIdentifier(aParts[i], bParts[i]); + if (cmp !== 0) return cmp; + } + if (aParts.length < bParts.length) return -1; + if (aParts.length > bParts.length) return 1; return 0; } @@ -160,7 +190,7 @@ async function withTimeout( // Once Authorization has been stripped on any cross-origin hop, never re-add — // prevents a redirect chain that bounces back to the origin host from re-attaching the token. let authStripped = false; - for (let hop = 0; hop <= MAX_REDIRECT_HOPS; hop++) { + for (let hop = 0; hop < MAX_REDIRECT_HOPS; hop++) { const headers = buildGitHubHeaders(); if (authStripped || new URL(currentUrl).host !== originHost) { delete headers.Authorization; @@ -174,6 +204,9 @@ async function withTimeout( if (response.status >= 300 && response.status < 400) { const location = response.headers.get("location"); // Drain the redirect body so keep-alive sockets don't pin across hops. + // Body-cancel failures here are non-fatal; controller.abort() in the + // outer timer will close the socket anyway. Caller-supplied onError + // (when available — only downloadAsset threads one) gets the diagnostic. await tryCatch(response.body?.cancel() ?? Promise.resolve()); if (!location) { throw new Error( @@ -236,10 +269,16 @@ async function fetchLatestRelease( return result; } +type DownloadOnError = ( + op: "body-cancel" | "release-lock" | "cleanup-unlink", + error: Error +) => void; + async function downloadAsset( asset: ReleaseAsset, destPath: string, - timeoutMs: number = DEFAULT_ASSET_TIMEOUT_MS + timeoutMs: number = DEFAULT_ASSET_TIMEOUT_MS, + onError?: DownloadOnError ): Promise { const { error } = await tryCatch( withTimeout( @@ -296,16 +335,21 @@ async function downloadAsset( ); } await new Promise(function (resolve, reject) { - writer.once("error", reject); - writer.end(function () { - resolve(); + writer.end(function ( + err: NodeJS.ErrnoException | null | undefined + ) { + if (err) reject(err); + else resolve(); }); }); writerClosed = true; } finally { - tryCatchSync(function () { + const { error: releaseError } = tryCatchSync(function () { reader.releaseLock(); }); + if (releaseError) { + onError?.("release-lock", releaseError); + } if (!writerClosed) { writer.destroy(); } @@ -315,9 +359,12 @@ async function downloadAsset( ); if (error) { // Clean up our own partial write so callers don't have to do it defensively. - tryCatchSync(function () { + const { error: cleanupError } = tryCatchSync(function () { fs.unlinkSync(destPath); }); + if (cleanupError && !isEnoent(cleanupError)) { + onError?.("cleanup-unlink", cleanupError); + } throw new Error(`Failed to download ${asset.name}: ${error.message}`, { cause: error, }); @@ -399,7 +446,11 @@ function constantTimeEquals(a: string, b: string): boolean { type Sha256SumsResult = | { kind: "not-published" } | { kind: "ok"; sums: Record } - | { kind: "error"; reason: string; retryable: boolean }; + | { kind: "error"; reason: string; retryable: boolean } + // "tamper" = SHA256SUMS body parsed but contains evidence of tampering + // (today: duplicate entries). Distinct from generic "error" so callers can + // escalate (foreground: loud red error; background: TAMPER: log prefix). + | { kind: "tamper"; reason: string }; // 5xx and 403/429 (rate-limit) retryable; other 4xx treated as permanent. function isRetryableHttpStatus(status: number): boolean { @@ -442,10 +493,13 @@ async function fetchSha256Sums( } ); if (parseError) { + // parseSha256Sums throws ONLY on duplicate entries today — + // which is the canonical signature of supply-chain tampering, + // not a transient outage. Discriminate so foreground/background + // can escalate appropriately. return { - kind: "error", - reason: `SHA256SUMS parse: ${parseError.message}`, - retryable: false, + kind: "tamper", + reason: parseError.message, }; } return { kind: "ok", sums: parsed }; @@ -465,6 +519,7 @@ async function fetchSha256Sums( type VerifyAssetResult = | { ok: true; hash: string | null } // hash === null when SHA256SUMS isn't published | { ok: false; kind: "sums-error"; reason: string; retryable: boolean } + | { ok: false; kind: "sums-tamper"; reason: string } | { ok: false; kind: "missing-entry" } | { ok: false; kind: "hash-io-error"; cause: Error } | { ok: false; kind: "hash-mismatch" }; @@ -475,6 +530,9 @@ async function verifyAssetAgainstSums( assets: ReleaseAsset[] ): Promise { const sums = await fetchSha256Sums(assets); + if (sums.kind === "tamper") { + return { ok: false, kind: "sums-tamper", reason: sums.reason }; + } if (sums.kind === "error") { return { ok: false, @@ -501,7 +559,7 @@ async function verifyAssetAgainstSums( } return { ok: false, kind: "hash-mismatch" }; } - return { ok: true, hash: expected.toLowerCase() }; + return { ok: true, hash: expected }; } function parseSha256Sums(text: string): Record { From ac03eb50577a659119889bb15852d7db7d53c75d Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Mon, 20 Apr 2026 21:15:57 +0530 Subject: [PATCH 14/15] fix: add try catch in readConfigFile function --- src/lib/config.ts | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/lib/config.ts b/src/lib/config.ts index 720ed03..fb64740 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,7 +1,7 @@ -import { z } from "zod"; import fs from "node:fs"; import os from "node:os"; import path from "node:path"; +import { z } from "zod"; import { DEFAULT_WORKTREE_DIR } from "./constants"; import { tryCatch, tryCatchSync } from "./try-catch"; @@ -107,10 +107,15 @@ async function readConfigFile( if (scope === "project" && "AUTO_UPDATE" in raw) { // Validate the value too so a user moving the line to ~/.worktreerc later // already knows whether they wrote a valid boolean-like or garbage. - const probe = booleanLike.safeParse(raw.AUTO_UPDATE); - const validityNote = probe.success - ? `; the value "${raw.AUTO_UPDATE}" parses as a boolean (would take effect once moved)` - : `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probe.error.issues[0]?.message ?? "unknown"})`; + const { data: probe, error: probeError } = tryCatchSync(function () { + return booleanLike.safeParse(raw.AUTO_UPDATE); + }); + const validityNote = + probeError !== null + ? `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probeError.message})` + : probe.success + ? `; the value "${raw.AUTO_UPDATE}" parses as a boolean (would take effect once moved)` + : `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probe.error.issues[0]?.message ?? "unknown"})`; warnOnce( `${filePath}:AUTO_UPDATE`, `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead${validityNote}.` From b6009a0e2974e6dedbc769768ac431cb9aca9fa9 Mon Sep 17 00:00:00 2001 From: Bhagya Mudgal Date: Mon, 20 Apr 2026 21:59:06 +0530 Subject: [PATCH 15/15] fix: comments --- CLAUDE.md | 21 +++++++++++++++++++++ src/commands/update.ts | 4 +--- src/lib/auto-update.ts | 15 ++------------- src/lib/config.ts | 17 +++++++++++++---- src/lib/release.ts | 16 ++-------------- 5 files changed, 39 insertions(+), 34 deletions(-) diff --git a/CLAUDE.md b/CLAUDE.md index 35ad70b..552dc93 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -35,6 +35,27 @@ bun run format # Prettier - Every `@clack/prompts` call must check `p.isCancel()` and exit gracefully. - `shell.ts` reads stdout and stderr concurrently with `Promise.all` to avoid pipe deadlocks. +## Comment discipline + +**Default: write zero comments.** Well-named identifiers + control flow explain WHAT. This project follows the global "no comments unless genuinely complex" rule **strictly** — stricter than the global default. + +**Fix rationale belongs in commit messages, not code.** "We added X to prevent Y outage" is commit-message content. It does NOT go in the code — comments drift from the implementation as the code evolves, commit messages and PR descriptions don't. + +**The only code comments that earn their keep are footgun warnings** — ones that save a future dev from a specific non-obvious trap AND aren't findable from git blame. Examples that pass the bar: + +- `// Bun.spawnSync returns null exitCode on timeout kill.` (runtime quirk) +- `// Constant-time compare prevents timing side-channel on hash compare.` (security invariant the call site alone doesn't communicate) +- `// POSIX setsid(): survives terminal close so a slow download isn't SIGHUPed.` (cross-platform behavior note) + +**Anti-patterns — NEVER write any of these** (concrete examples from real commits that violated this rule): + +1. **Paraphrasing the next line** — `// Bump throttle on transient network failures so we don't burn the GitHub API quota` above `recordCheckCompleted();`. The function name already says this. +2. **JSDoc-style docblocks for internal helpers** — `// true=exists, false=ENOENT, null=non-ENOENT error logged; caller must bail` above `function checkExists(...): boolean | null`. The return type plus null-checks at call sites already communicate the contract. +3. **Multi-line fix rationale** — any 2+ line comment explaining WHY a PR-level decision was made. That belongs in the commit message. If it's not findable from `git blame`, improve the commit message instead of polluting the code. +4. **Stacked WHY paragraphs** — back-to-back `// line 1 / // line 2 / // line 3` blocks. Treat 2 lines as a warning sign; 3+ lines is always wrong. + +**Self-test before writing any comment**: remove it and re-read the function. Would a future reader (including future-me) be meaningfully more confused without it? If the answer is "no" or "barely" — delete the comment. + ## Dependencies - `@drizzle-team/brocli` — CLI arg parsing (typed commands + options) diff --git a/src/commands/update.ts b/src/commands/update.ts index a4c57f1..a36901b 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -150,9 +150,7 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } - // Probe BEFORE rename: a SHA-valid binary that won't run on this machine - // (libc/codesign/macOS-version mismatch) would otherwise replace the user's - // working binary with one that segfaults on next launch. + // Probe before rename — SHA match ≠ runnable; segfaults on libc/codesign mismatch. const probe = probeBinaryRuns(tmpPath); if (!probe.ok) { await safeUnlink(tmpPath); diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts index 98c8f4b..b319270 100644 --- a/src/lib/auto-update.ts +++ b/src/lib/auto-update.ts @@ -153,8 +153,6 @@ function cleanupStagedArtifacts(): void { safeUnlinkSync(getMetaSidecarPath()); } -// true=exists, false=ENOENT, null=non-ENOENT error logged; caller must bail without -// destructive cleanup (don't unlink anything we can't stat reliably). function checkExists( filePath: string, kind: "apply" | "check" @@ -174,8 +172,7 @@ function isWithinGracePeriod(filePath: string): boolean { }); if (error) { if (isEnoent(error)) return false; - // Non-ENOENT (EACCES/EIO) — surface the diagnostic and be conservative: - // assume in-grace so we never destroy a peer's stage on incomplete info. + // Non-ENOENT: be conservative (return true) — never destroy a peer's stage on incomplete stat info. appendLastError("apply", `grace-stat: ${error.message}`); return true; } @@ -439,8 +436,6 @@ async function runBackgroundUpdateCheck(): Promise { "check", `fetchLatestRelease: ${releaseError?.message ?? "unknown"}` ); - // Bump throttle on transient network failures so we don't burn the GitHub - // API quota retrying on every CLI invocation while api.github.com is down. recordCheckCompleted(); return; } @@ -478,8 +473,6 @@ async function runBackgroundUpdateCheck(): Promise { if (dlError) { safeUnlinkSync(tmpPath); appendLastError("check", `download: ${dlError.message}`); - // Bump throttle on every download failure (write OR network) so a persistent - // outage doesn't trigger a re-download cycle on every CLI invocation. recordCheckCompleted(); return; } @@ -493,8 +486,6 @@ async function runBackgroundUpdateCheck(): Promise { if (!verify.ok) { safeUnlinkSync(tmpPath); if (verify.kind === "sums-tamper") { - // Loud TAMPER: prefix in the log so a curious user grepping last-error - // can distinguish supply-chain integrity hits from generic outages. appendLastError( "check", `TAMPER: SHA256SUMS for ${assetName} is malformed (${verify.reason}) — refusing to stage` @@ -545,9 +536,7 @@ async function runBackgroundUpdateCheck(): Promise { if (!probe.ok) { safeUnlinkSync(tmpPath); appendLastError("check", `probe: ${probe.reason}`); - // Treat probe failure as structural for THIS release — bumping throttle - // prevents a re-download/re-probe loop burning ~50MB on every CLI invocation - // until upstream republishes a fixed binary. + // Probe fail is structural for this release — burn throttle or we redownload 50 MB every launch. recordCheckCompleted(); return; } diff --git a/src/lib/config.ts b/src/lib/config.ts index fb64740..a3657a4 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -92,9 +92,19 @@ async function readConfigFile( scope: ConfigScope ): Promise { const file = Bun.file(filePath); - const isExists = await file.exists(); - if (!isExists) return validateConfig({}); const display = displayPath(filePath); + // file.exists() can throw on stat errors — guard like shouldAutoUpdate below. + const { data: isExists, error: existsError } = await tryCatch( + file.exists() + ); + if (existsError) { + warnOnce( + filePath, + `warning: could not stat ${display}: ${existsError.message}. Using defaults.` + ); + return validateConfig({}); + } + if (!isExists) return validateConfig({}); const { data: content, error: readError } = await tryCatch(file.text()); if (readError) { warnOnce( @@ -105,8 +115,7 @@ async function readConfigFile( } const raw = parseConfigContent(content); if (scope === "project" && "AUTO_UPDATE" in raw) { - // Validate the value too so a user moving the line to ~/.worktreerc later - // already knows whether they wrote a valid boolean-like or garbage. + // Also validate — user moving this line to ~/.worktreerc later needs to know if it's syntactically valid. const { data: probe, error: probeError } = tryCatchSync(function () { return booleanLike.safeParse(raw.AUTO_UPDATE); }); diff --git a/src/lib/release.ts b/src/lib/release.ts index 627eb41..2ce6b60 100644 --- a/src/lib/release.ts +++ b/src/lib/release.ts @@ -103,10 +103,7 @@ function parseVersion(v: string): ParsedVersion { }; } -// SemVer 2.0 §11: dot-separated identifiers compared pairwise. -// Numeric identifiers compare numerically; string identifiers compare lexically; -// numeric identifiers always have lower precedence than strings; longer field -// wins iff all preceding identifiers are equal. +// SemVer 2.0 §11: pairwise compare; numeric( if (response.status >= 300 && response.status < 400) { const location = response.headers.get("location"); // Drain the redirect body so keep-alive sockets don't pin across hops. - // Body-cancel failures here are non-fatal; controller.abort() in the - // outer timer will close the socket anyway. Caller-supplied onError - // (when available — only downloadAsset threads one) gets the diagnostic. await tryCatch(response.body?.cancel() ?? Promise.resolve()); if (!location) { throw new Error( @@ -447,9 +441,7 @@ type Sha256SumsResult = | { kind: "not-published" } | { kind: "ok"; sums: Record } | { kind: "error"; reason: string; retryable: boolean } - // "tamper" = SHA256SUMS body parsed but contains evidence of tampering - // (today: duplicate entries). Distinct from generic "error" so callers can - // escalate (foreground: loud red error; background: TAMPER: log prefix). + // "tamper" = parsed-but-malformed sums (today: duplicates) — distinct from transient "error". | { kind: "tamper"; reason: string }; // 5xx and 403/429 (rate-limit) retryable; other 4xx treated as permanent. @@ -493,10 +485,6 @@ async function fetchSha256Sums( } ); if (parseError) { - // parseSha256Sums throws ONLY on duplicate entries today — - // which is the canonical signature of supply-chain tampering, - // not a transient outage. Discriminate so foreground/background - // can escalate appropriately. return { kind: "tamper", reason: parseError.message,