diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1cec1f2..08b99a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: name: Lint, Format & Type Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - uses: oven-sh/setup-bun@v2 - run: bun install --frozen-lockfile - run: bun run lint diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3161c82..51fab05 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,55 +1,140 @@ name: Release on: - push: - tags: ["v*"] + push: + tags: ["v*"] permissions: - contents: write + contents: write jobs: - build: - name: Build ${{ matrix.target }} - runs-on: ${{ matrix.os }} - strategy: - matrix: - include: - - target: darwin-arm64 - os: macos-latest - bun_target: bun-darwin-arm64 - - target: darwin-x64 - os: macos-15-intel - bun_target: bun-darwin-x64 - - target: linux-x64 - os: ubuntu-latest - bun_target: bun-linux-x64 - - target: linux-arm64 - os: ubuntu-latest - bun_target: bun-linux-arm64 - - steps: - - uses: actions/checkout@v4 - - uses: oven-sh/setup-bun@v2 - - run: bun install --frozen-lockfile - - run: bun build src/index.ts --compile --target=${{ matrix.bun_target }} --outfile dist/worktree-${{ matrix.target }} - - uses: actions/upload-artifact@v4 - with: - name: worktree-${{ matrix.target }} - path: dist/worktree-${{ matrix.target }} - - release: - name: Create Release - needs: build - runs-on: ubuntu-latest - steps: - - uses: actions/download-artifact@v4 - with: - path: artifacts - merge-multiple: true - - - run: chmod +x artifacts/worktree-* - - - uses: softprops/action-gh-release@v2 - with: - generate_release_notes: true - files: artifacts/worktree-* + build: + name: Build ${{ matrix.target }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - target: darwin-arm64 + os: macos-latest + bun_target: bun-darwin-arm64 + - target: darwin-x64 + os: macos-15-intel + bun_target: bun-darwin-x64 + - target: linux-x64 + os: ubuntu-latest + bun_target: bun-linux-x64 + - target: linux-arm64 + os: ubuntu-24.04-arm + bun_target: bun-linux-arm64 + + steps: + - uses: actions/checkout@v6 + - uses: oven-sh/setup-bun@v2 + - run: bun install --frozen-lockfile + + - name: Compile binary + run: | + bun build src/index.ts \ + --compile \ + --minify \ + --sourcemap=none \ + --target=${{ matrix.bun_target }} \ + --outfile dist/worktree-${{ matrix.target }} + + - name: Install llvm-strip (Linux) + if: startsWith(matrix.target, 'linux-') + run: sudo apt-get update && sudo apt-get install -y llvm + + - name: Strip symbols (Linux) + if: startsWith(matrix.target, 'linux-') + run: llvm-strip --strip-unneeded dist/worktree-${{ matrix.target }} + + - name: Strip symbols (macOS) + if: startsWith(matrix.target, 'darwin-') + run: strip dist/worktree-${{ matrix.target }} + + - name: Ad-hoc codesign (macOS) + if: startsWith(matrix.target, 'darwin-') + run: | + xattr -cr dist/worktree-${{ matrix.target }} + codesign --force --sign - dist/worktree-${{ matrix.target }} + codesign -dv dist/worktree-${{ matrix.target }} + + - name: Verify binary architecture and size + run: | + file dist/worktree-${{ matrix.target }} + ls -lh dist/worktree-${{ matrix.target }} + + - name: Smoke-test --version + env: + WORKTREE_NO_UPDATE: "1" + run: | + chmod +x dist/worktree-${{ matrix.target }} + ./dist/worktree-${{ matrix.target }} --version + + - name: Compute SHA256 + run: | + cd dist + shasum -a 256 worktree-${{ matrix.target }} > worktree-${{ matrix.target }}.sha256 + + - uses: actions/upload-artifact@v7 + with: + name: worktree-${{ matrix.target }} + path: | + dist/worktree-${{ matrix.target }} + dist/worktree-${{ matrix.target }}.sha256 + + release: + name: Create Release + needs: build + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v8 + with: + path: artifacts + merge-multiple: true + + - name: Aggregate SHA256SUMS + run: | + cd artifacts + # Fail-fast: if no .sha256 sidecars were downloaded (e.g., + # an upload-artifact rename regression), the cat below + # would silently produce an empty SHA256SUMS that bricks + # every client's auto-update install. + count=$(ls -1 worktree-*.sha256 2>/dev/null | wc -l | tr -d ' ') + binaries=$(ls -1 worktree-* 2>/dev/null | grep -v '\.sha256$' | wc -l | tr -d ' ') + if [ "$count" -lt 1 ] || [ "$count" != "$binaries" ]; then + echo "sha256 count ($count) does not match binary count ($binaries) — abort" >&2 + exit 1 + fi + cat worktree-*.sha256 > SHA256SUMS + rm worktree-*.sha256 + if [ ! -s SHA256SUMS ]; then + echo "SHA256SUMS empty after aggregation — abort" >&2 + exit 1 + fi + cat SHA256SUMS + + - run: chmod +x artifacts/worktree-* + + # Create the release as a draft and upload all assets to it. + # Publishing happens in the next step so `releases/latest` never + # returns a release where binaries are attached but SHA256SUMS + # is still being uploaded — that window would let clients fall + # through to the TLS-only "not-published" path and install + # unverified. + - uses: softprops/action-gh-release@v2 + with: + draft: true + generate_release_notes: true + files: | + artifacts/worktree-* + artifacts/SHA256SUMS + + - name: Publish release (flip draft to public) + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REF_NAME: ${{ github.ref_name }} + REPO: ${{ github.repository }} + run: | + gh release edit "$REF_NAME" --repo "$REPO" --draft=false diff --git a/.gitignore b/.gitignore index fda1c98..ffafb61 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ node_modules/ dist/ .worktrees docs/ +tasks/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 79d2758..7467b87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,77 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [Unreleased] + +### Changed + +- Auto-update now bumps its 24-hour throttle when a release is genuinely unusable on the current platform (probe failure), when GitHub is unreachable, or when a download fails for any reason. Previously, every CLI invocation re-downloaded ~50 MB and re-hit the GitHub API, which could trip the 60-requests-per-hour anonymous rate limit on a heavy day. +- Foreground `worktree update` now smoke-tests the downloaded binary (`--version`) before atomically replacing the installed binary. A SHA256-valid release that won't run on the current machine (libc/codesign/macOS-version mismatch) is now refused with a clear error instead of leaving the user with a broken `worktree`. +- A `SHA256SUMS` file containing **duplicate entries** — the canonical signature of supply-chain tampering — now triggers a loud red `SECURITY ALERT` in `worktree update` and a `TAMPER:` prefix in the background error log, instead of being reported as a generic "could not be fetched" outage. +- Project-scope `AUTO_UPDATE=...` is still ignored (matches existing behaviour) but the warning now also reports whether the value is a *valid* boolean-like, so a user moving the line to `~/.worktreerc` later already knows whether it would have taken effect. +- Version comparison now follows SemVer 2.0 §11 for prerelease ordering: `1.2.3-rc.10` is now correctly **greater than** `1.2.3-rc.2`. Previously the comparator used lexicographic string ordering, which would have stranded users on `rc.2` from ever auto-updating to `rc.10`. + +### Security + +- Auto-update tmp paths now use `crypto.randomBytes(8)` instead of `process.pid`, removing a predictable-filename primitive that a co-tenant on a group-writable install dir could pre-plant a symlink at. Pre-unlink remains as the primary defense. +- Stage detection no longer silently fails on `EACCES` of the binary directory: `existsSync` was masking permission errors as "no stage". Now logs the diagnostic and bails without destructive cleanup, so a transient permission glitch can't destroy a peer process's mid-commit stage either. + +### Fixed + +- `release.ts` download path no longer silently swallows three classes of error (writer post-finish flush errors, reader `releaseLock` failures, partial-write cleanup failures). Errors now route through an `onError` callback that the auto-updater logs to `~/.cache/worktree-cli/last-error`. +- Removed an off-by-one in the redirect loop (`<=` instead of `<`) — the loop was allowing 6 hops while the error message claimed a limit of 5. +- Aggregated SHA256SUMS in the release workflow now compares against the actual binary count instead of a hardcoded `expected=4` — adding or removing a build target no longer requires editing two places. + +## [1.3.0] - 2026-04-17 + +### Added + +- **Background auto-update**: on launch, `worktree` checks GitHub for a newer release at most once every 24 hours in a detached background process. When a newer version is found, the binary is downloaded, verified against SHA256, and staged. The next invocation atomically swaps in the new binary and prints a one-line `worktree auto-updated to vX.Y.Z` note on stderr. Opt out via `AUTO_UPDATE=false` in `~/.worktreerc` or `WORKTREE_NO_UPDATE=1` in the environment. +- **Release integrity**: every GitHub Release now publishes a `SHA256SUMS` file. The `worktree update` command and the background auto-updater both verify the downloaded binary against this hash before installing. Releases without `SHA256SUMS` (legacy) still work but without verification. + +### Changed + +- Release binaries are slightly smaller (minified, debug symbols stripped). No behavioural change. +- Release workflow smoke-tests each built binary (`--version`) before publishing so a broken build can't reach users. +- `AUTO_UPDATE` in a project `.worktreerc` now warns once that it is ignored — only `~/.worktreerc` is honoured (matches the README). +- Global and project config files now behave symmetrically on parse errors: both warn and fall back to defaults. +- CI: bumped `actions/checkout`, `actions/upload-artifact`, and `actions/download-artifact` to latest majors (Node.js 24 runtime) following GitHub's deprecation of Node.js 20 actions. + +### Security + +- Release downloads are verified against `SHA256SUMS` using a constant-time hash comparison before being made executable. +- Release-channel fetches are restricted to an allowlist of GitHub-owned hosts, validated on every redirect hop **before** the runtime connects. A malicious `Location:` injection on the first hop can no longer reach an arbitrary host. `GITHUB_TOKEN` is stripped on any cross-origin hop and not re-attached if the chain bounces back to the origin. +- Release assets with a declared `Content-Length` over 200 MB are rejected outright, and byte-counts are enforced as the body streams in — a CDN omitting or forging `Content-Length` cannot exhaust memory before the size check fires. The download timeout is honoured throughout the body read so a slowloris response can't stretch past it. +- The staging tmp path is pre-unlinked before each download as a best-effort defense against a planted symlink in a shared install directory. (Not race-free — a writable install directory still allows re-planting between the unlink and the subsequent write. Closing that race fully would require `O_EXCL | O_NOFOLLOW`. Same treatment applies to the sidecar tmp path.) +- GitHub releases are now published as **draft**, uploaded with all files (binaries + `SHA256SUMS`), and flipped to public in a subsequent step — eliminating the window where `releases/latest` exposed a public release with binaries but no sums, forcing clients onto the TLS-only install path. +- `SHA256SUMS` parser rejects duplicate filename entries and is immune to prototype-pollution from a tampered sums file. +- Release tag and staged version strings are validated against the same strict regex at the writer and the reader, so a crafted tag can't propagate into paths, logs, or sidecar metadata. +- Concurrent launches no longer discard a correctly-staged update mid-commit: a 60-second mtime grace window distinguishes a concurrent producer from a real orphan. +- Auto-update now fails **closed** on a malformed `~/.worktreerc` — a typo can no longer silently re-enable auto-update against an explicit opt-out. +- Applying a staged update now also respects `AUTO_UPDATE=false` in `~/.worktreerc`. Previously, a user who opted out in config after a binary was already staged would still get the staged binary installed on the next launch. +- GitHub API fetches now send a proper `User-Agent` (`worktree-cli/vX.Y.Z`), `Accept: application/vnd.github+json`, and `X-GitHub-Api-Version` header. Setting `GITHUB_TOKEN` in the environment raises the rate limit from 60/hr (anonymous) to 5000/hr (authenticated). + +### Fixed + +- macOS releases (darwin-arm64, darwin-x64) are now **ad-hoc codesigned** after stripping. Prior releases shipped unsigned binaries, which Apple Silicon macOS SIGKILLs on execution. Users who hit `killed: 9` errors after downloading the raw binary should re-install from v1.3.0 onward. +- Auto-update no longer buffers the full binary in memory during download or verification — peak memory stays flat regardless of binary size. +- A missing per-arch asset in the latest release no longer burns the 24h auto-update throttle; the next launch retries so users on the lagging arch get updated once the asset is uploaded. +- `worktree update` and the background auto-updater now recognise the full set of write-permission errors on download, `chmod`, and rename (not just `EACCES`), clean up any partial staged files, and print a one-line `run "sudo worktree update"` hint instead of looping on every launch. +- Persistent structural failures (read-only install directory, busy binary, disk full, filesystem boundary) now throttle the background check so a stuck install directory no longer burns the GitHub API quota on every launch. +- Orphan staging artifacts left by an interrupted background update are cleaned up on the next launch instead of lingering indefinitely. +- First-ever auto-update launch no longer prints a spurious "error log unwritable" warning on a not-yet-created log file. +- Background updater no longer spawns a blind detached child when the cache log can't be opened — the same condition that short-circuits the throttle now also short-circuits the spawn, and the parent's copy of the log fd is released even if the spawn itself throws synchronously. +- Probe-timeout failures on a freshly-downloaded binary now surface as `timed out after 2000ms` instead of the opaque `exit null`. +- Network errors during update checks now preserve the underlying errno (`ENOTFOUND`, `ECONNRESET`, `ETIMEDOUT`, etc.) instead of being hidden behind a generic wrapper. +- The background update child is detached (POSIX `setsid`) so a slow download isn't killed when the user's shell or terminal exits. +- Unhandled throws from the background update path now write a full stack trace to `~/.cache/worktree-cli/last-error` and surface on the next foreground launch instead of failing silently. +- Unlink errors during cleanup distinguish "already gone" from real failures — only real failures emit a warning. + +### Tests + +- Added coverage for the SHA256 verification flow across all result shapes (legacy, normal, transient, permanent, missing-entry, hash-mismatch, hash-io-error) using stubbed `fetch` and a precomputed-hash asset file — pins the safety contract against future refactors. +- Added coverage for the `SHA256SUMS` parser's duplicate-entry rejection. + ## [1.2.0] - 2026-04-17 ### Added diff --git a/CLAUDE.md b/CLAUDE.md index 35ad70b..552dc93 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -35,6 +35,27 @@ bun run format # Prettier - Every `@clack/prompts` call must check `p.isCancel()` and exit gracefully. - `shell.ts` reads stdout and stderr concurrently with `Promise.all` to avoid pipe deadlocks. +## Comment discipline + +**Default: write zero comments.** Well-named identifiers + control flow explain WHAT. This project follows the global "no comments unless genuinely complex" rule **strictly** — stricter than the global default. + +**Fix rationale belongs in commit messages, not code.** "We added X to prevent Y outage" is commit-message content. It does NOT go in the code — comments drift from the implementation as the code evolves, commit messages and PR descriptions don't. + +**The only code comments that earn their keep are footgun warnings** — ones that save a future dev from a specific non-obvious trap AND aren't findable from git blame. Examples that pass the bar: + +- `// Bun.spawnSync returns null exitCode on timeout kill.` (runtime quirk) +- `// Constant-time compare prevents timing side-channel on hash compare.` (security invariant the call site alone doesn't communicate) +- `// POSIX setsid(): survives terminal close so a slow download isn't SIGHUPed.` (cross-platform behavior note) + +**Anti-patterns — NEVER write any of these** (concrete examples from real commits that violated this rule): + +1. **Paraphrasing the next line** — `// Bump throttle on transient network failures so we don't burn the GitHub API quota` above `recordCheckCompleted();`. The function name already says this. +2. **JSDoc-style docblocks for internal helpers** — `// true=exists, false=ENOENT, null=non-ENOENT error logged; caller must bail` above `function checkExists(...): boolean | null`. The return type plus null-checks at call sites already communicate the contract. +3. **Multi-line fix rationale** — any 2+ line comment explaining WHY a PR-level decision was made. That belongs in the commit message. If it's not findable from `git blame`, improve the commit message instead of polluting the code. +4. **Stacked WHY paragraphs** — back-to-back `// line 1 / // line 2 / // line 3` blocks. Treat 2 lines as a warning sign; 3+ lines is always wrong. + +**Self-test before writing any comment**: remove it and re-read the function. Would a future reader (including future-me) be meaningfully more confused without it? If the answer is "no" or "barely" — delete the comment. + ## Dependencies - `@drizzle-team/brocli` — CLI arg parsing (typed commands + options) diff --git a/README.md b/README.md index bb6eddd..899f278 100644 --- a/README.md +++ b/README.md @@ -62,12 +62,15 @@ On `remove`, it: ## Config -The `.worktreerc` file supports: +`.worktreerc` keys are read from one of two locations depending on the key: -| Key | Description | Example | -|-----|-------------|---------| -| `DEFAULT_BASE` | Default base branch for new worktrees | `origin/dev` | -| `WORKTREE_DIR` | Directory name for worktrees (default: `.worktrees`) | `.worktrees` | +| Key | Description | Where | Example | +|-----|-------------|-------|---------| +| `DEFAULT_BASE` | Default base branch for new worktrees | Project (`/.worktreerc`) | `origin/dev` | +| `WORKTREE_DIR` | Directory name for worktrees (default: `.worktrees`) | Project (`/.worktreerc`) | `.worktrees` | +| `AUTO_UPDATE` | Enable background auto-update checks (default: `true`) | User (`~/.worktreerc`) only | `false` | + +`DEFAULT_BASE` and `WORKTREE_DIR` placed in `~/.worktreerc` are ignored — `worktree` reads them from the project file at the repo root only. `AUTO_UPDATE` placed in a project `.worktreerc` is ignored with a warning — it must live in `~/.worktreerc` so it applies across all repos under your control. ## Alias @@ -81,7 +84,29 @@ Then use `gw create feature-auth`, `gw list`, etc. ## Update -Re-run the install command to get the latest version. +### Automatic + +Once installed, `worktree` checks GitHub for a newer release at most once every 24 hours, in the background. When a newer version is found, it is downloaded, verified against a SHA256 hash, and staged. The **next** time you invoke `worktree`, the binary is swapped atomically and the command runs against the new version — you'll see a one-line note on stderr. + +To disable, create `~/.worktreerc` with: + +```ini +AUTO_UPDATE=false +``` + +Or set `WORKTREE_NO_UPDATE=1` in your environment (useful in CI). + +Auto-update is a no-op when running via `bun run dev` or in any non-standalone invocation. + +Background check failures (network errors, hash mismatches, filesystem issues) are logged to `~/.cache/worktree-cli/last-error` — check this file if auto-updates seem stuck. + +### Manual + +```bash +worktree update +``` + +Forces an immediate check + download + replace, bypassing the 24-hour throttle. Requires write permission to the binary location (use `sudo` if installed under `/usr/local/bin`). ## Platforms diff --git a/package.json b/package.json index 14f86c7..0f7e392 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "worktree-cli", - "version": "1.2.0", + "version": "1.3.0", "description": "Git worktree manager with automatic env file copying, dependency installation, and editor integration.", "type": "module", "module": "src/index.ts", diff --git a/src/commands/internal-update-check.ts b/src/commands/internal-update-check.ts new file mode 100644 index 0000000..a8e01e6 --- /dev/null +++ b/src/commands/internal-update-check.ts @@ -0,0 +1,21 @@ +import { command } from "@drizzle-team/brocli"; +import { + appendBackgroundCheckPanic, + INTERNAL_CHECK_SUBCOMMAND, + runBackgroundUpdateCheck, +} from "../lib/auto-update"; + +export const internalUpdateCheckCommand = command({ + name: INTERNAL_CHECK_SUBCOMMAND, + desc: "", + hidden: true, + handler: async () => { + // Detached child's stderr is redirected; catch so panics still hit last-error. + try { + await runBackgroundUpdateCheck(); + } catch (error) { + appendBackgroundCheckPanic(error); + process.exit(1); + } + }, +}); diff --git a/src/commands/update.ts b/src/commands/update.ts index 21c353f..a36901b 100644 --- a/src/commands/update.ts +++ b/src/commands/update.ts @@ -4,39 +4,30 @@ import pkg from "../../package.json"; import { tryCatch } from "../lib/try-catch"; import { printSuccess, printError, printInfo, COLORS } from "../lib/logger"; import { EXIT_CODES } from "../lib/constants"; - -const REPO = "bhagyamudgal/worktree-cli"; - -function getAssetName(): string { - const platform = process.platform; - const arch = process.arch; - - if (platform !== "darwin" && platform !== "linux") { - printError(`Unsupported platform: ${platform}`); - process.exit(EXIT_CODES.ERROR); - } - if (arch !== "arm64" && arch !== "x64") { - printError(`Unsupported architecture: ${arch}`); - process.exit(EXIT_CODES.ERROR); - } - - return `worktree-${platform}-${arch}`; -} - -type ReleaseAsset = { - name: string; - browser_download_url: string; -}; +import { + classifyWriteError, + deepestMessage, + safeUnlink, +} from "../lib/fs-utils"; +import { + compareVersions, + downloadAsset, + fetchLatestRelease, + getAssetName, + isStandalone, + verifyAssetAgainstSums, +} from "../lib/release"; +import { + cleanupStagedArtifacts, + probeBinaryRuns, + recordCheckCompleted, +} from "../lib/auto-update"; export const updateCommand = command({ name: "update", desc: "Update worktree CLI to the latest version", handler: async () => { - const isStandalone = - Bun.main.startsWith("/$bunfs/") || - import.meta.url.includes("$bunfs/"); - - if (!isStandalone) { + if (!isStandalone()) { printError( "Update is only available for standalone compiled binaries." ); @@ -44,112 +35,128 @@ export const updateCommand = command({ process.exit(EXIT_CODES.ERROR); } - const currentVersion = pkg.version; - const binaryPath = process.execPath; - - printInfo(`Current version: v${currentVersion}`); - - const { data: response, error: fetchError } = await tryCatch( - fetch(`https://api.github.com/repos/${REPO}/releases/latest`) - ); - if (fetchError || !response) { + const assetName = getAssetName(); + if (!assetName) { printError( - "Failed to check for updates. Check your internet connection." + `Unsupported platform/arch: ${process.platform}/${process.arch}` ); process.exit(EXIT_CODES.ERROR); } - if (!response.ok) { + const currentVersion = pkg.version; + const binaryPath = process.execPath; + + printInfo(`Current version: v${currentVersion}`); + + const { data: release, error: releaseError } = + await tryCatch(fetchLatestRelease()); + if (releaseError || !release) { printError( - `GitHub API error: ${response.status} ${response.statusText}` + releaseError + ? `Failed to check for updates: ${deepestMessage(releaseError)}` + : "Failed to check for updates. Check your internet connection." ); process.exit(EXIT_CODES.ERROR); } - const { data: release, error: jsonError } = await tryCatch( - response.json() - ); - if ( - jsonError || - !release || - typeof release !== "object" || - typeof release.tag_name !== "string" || - !Array.isArray(release.assets) - ) { - printError("Failed to parse release data."); - process.exit(EXIT_CODES.ERROR); - } - - const latestVersion = release.tag_name.replace(/^v/, ""); - printInfo(`Latest version: v${latestVersion}`); + printInfo(`Latest version: v${release.version}`); console.error(""); - if (currentVersion === latestVersion) { + const cmp = compareVersions(currentVersion, release.version); + if (cmp === 0) { printSuccess("Already up to date!"); return; } - - const [curMajor, curMinor, curPatch] = currentVersion - .split(".") - .map(Number); - const [latMajor, latMinor, latPatch] = latestVersion - .split(".") - .map(Number); - - const isNewer = - curMajor > latMajor || - (curMajor === latMajor && curMinor > latMinor) || - (curMajor === latMajor && - curMinor === latMinor && - curPatch > latPatch); - - if (isNewer) { + if (cmp > 0) { printSuccess( "Current version is newer than the latest release. No update needed." ); return; } - const assetName = getAssetName(); - const asset = (release.assets as ReleaseAsset[]).find( - (entry) => entry.name === assetName - ); + const asset = release.assets.find(function (entry) { + return entry.name === assetName; + }); if (!asset) { - printError( - `Release ${release.tag_name} is missing asset ${assetName}.` - ); + printError(`Release ${release.tag} is missing asset ${assetName}.`); process.exit(EXIT_CODES.ERROR); } printInfo(`Downloading ${assetName}...`); - const { data: downloadResponse, error: dlError } = await tryCatch( - fetch(asset.browser_download_url) + const tmpPath = `${binaryPath}.update-tmp`; + // Pre-unlink to prevent symlink-follow in shared install dirs. + await safeUnlink(tmpPath); + const { error: dlError } = await tryCatch( + downloadAsset(asset, tmpPath) ); - if (dlError || !downloadResponse || !downloadResponse.ok) { - printError(`Failed to download ${assetName}.`); + if (dlError) { + await safeUnlink(tmpPath); + if (classifyWriteError(dlError) !== null) { + printError( + `Permission denied (${deepestMessage(dlError)}). Try: sudo worktree update` + ); + } else { + printError(deepestMessage(dlError)); + } process.exit(EXIT_CODES.ERROR); } - const { data: buffer, error: bufError } = await tryCatch( - downloadResponse.arrayBuffer() + const verify = await verifyAssetAgainstSums( + tmpPath, + assetName, + release.assets ); - if (bufError || !buffer) { - printError("Failed to read download."); + if (!verify.ok) { + await safeUnlink(tmpPath); + if (verify.kind === "sums-tamper") { + const { RED, BOLD, RESET } = COLORS; + console.error( + `${RED}${BOLD}SECURITY ALERT${RESET}${RED}: SHA256SUMS for ${release.tag} is malformed (${verify.reason}). This is the canonical signature of supply-chain tampering. Refusing to install.${RESET}` + ); + } else if (verify.kind === "sums-error") { + printError( + `SHA256SUMS is published but could not be fetched: ${verify.reason}. Refusing to install.` + ); + } else if (verify.kind === "missing-entry") { + printError( + `SHA256SUMS is missing an entry for ${assetName}; refusing to install.` + ); + } else if (verify.kind === "hash-io-error") { + printError( + `Could not read downloaded binary for hash check: ${verify.cause.message}.` + ); + } else { + printError( + `Hash mismatch for ${assetName}; refusing to install.` + ); + } process.exit(EXIT_CODES.ERROR); } + if (verify.hash !== null) { + printInfo("Verified SHA256 checksum."); + } else { + printInfo( + "No SHA256SUMS published for this release; proceeding without hash verification." + ); + } - const tmpPath = `${binaryPath}.update-tmp`; - const { error: writeError } = await tryCatch( - fs.writeFile(tmpPath, Buffer.from(buffer), { mode: 0o755 }) - ); - if (writeError) { - await fs.unlink(tmpPath).catch(() => {}); - if ("code" in writeError && writeError.code === "EACCES") { - printError("Permission denied. Try: sudo worktree update"); - } else { - printError(`Failed to write update: ${writeError.message}`); - } + const { error: chmodError } = await tryCatch(fs.chmod(tmpPath, 0o755)); + if (chmodError) { + await safeUnlink(tmpPath); + printError( + `Failed to mark binary executable: ${deepestMessage(chmodError)}` + ); + process.exit(EXIT_CODES.ERROR); + } + + // Probe before rename — SHA match ≠ runnable; segfaults on libc/codesign mismatch. + const probe = probeBinaryRuns(tmpPath); + if (!probe.ok) { + await safeUnlink(tmpPath); + printError( + `The new release v${release.version} is not runnable on this machine (${probe.reason}). Please file an issue at https://github.com/bhagyamudgal/worktree-cli/issues.` + ); process.exit(EXIT_CODES.ERROR); } @@ -157,15 +164,27 @@ export const updateCommand = command({ fs.rename(tmpPath, binaryPath) ); if (renameError) { - await fs.unlink(tmpPath).catch(() => {}); - printError(`Failed to replace binary: ${renameError.message}`); + await safeUnlink(tmpPath); + if (classifyWriteError(renameError) !== null) { + printError( + `Permission denied (${deepestMessage(renameError)}). Try: sudo worktree update` + ); + } else { + printError( + `Failed to replace binary: ${deepestMessage(renameError)}` + ); + } process.exit(EXIT_CODES.ERROR); } + // Invalidate pending stage + bump throttle to prevent silent downgrade on next launch. + cleanupStagedArtifacts(); + recordCheckCompleted(); + const { BOLD, GREEN, DIM, RESET } = COLORS; console.error(""); console.error( - `${GREEN}${BOLD}Updated!${RESET} v${currentVersion} → v${latestVersion}` + `${GREEN}${BOLD}Updated!${RESET} v${currentVersion} → v${release.version}` ); console.error(` ${DIM}Binary: ${binaryPath}${RESET}`); }, diff --git a/src/index.ts b/src/index.ts index 12c1ddc..1c4d8d4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,13 +1,63 @@ import { run } from "@drizzle-team/brocli"; import { createCommand } from "./commands/create"; +import { internalUpdateCheckCommand } from "./commands/internal-update-check"; import { listCommand } from "./commands/list"; import { openCommand } from "./commands/open"; import { removeCommand } from "./commands/remove"; import { updateCommand } from "./commands/update"; +import { + appendBackgroundCheckPanic, + applyPendingUpdate, + INTERNAL_CHECK_SUBCOMMAND, + scheduleBackgroundUpdateCheck, +} from "./lib/auto-update"; +import { COLORS } from "./lib/logger"; import pkg from "../package.json"; -run([createCommand, listCommand, openCommand, removeCommand, updateCommand], { - name: "worktree", - description: pkg.description ?? "Git worktree manager", - version: pkg.version, -}); +const META_FLAGS = new Set(["--version", "-v", "--help", "-h"]); +const FOREGROUND_UPDATE_SUBCOMMAND = "update"; + +function isMetaInvocation(): boolean { + // Match only the first positional arg so flag-as-value (e.g. `create my-feature -h`) still auto-updates. + const first = process.argv[2]; + return first !== undefined && META_FLAGS.has(first); +} + +function shouldSkipAutoUpdate(): boolean { + const first = process.argv[2]; + if (first === INTERNAL_CHECK_SUBCOMMAND) return true; + // Skip for the foreground updater to avoid racing its own binary install. + if (first === FOREGROUND_UPDATE_SUBCOMMAND) return true; + return isMetaInvocation(); +} + +if (!shouldSkipAutoUpdate()) { + try { + applyPendingUpdate(); + } catch (error) { + // Never crash the entry point — the user's command (including `worktree update`) must still run. + appendBackgroundCheckPanic(error); + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update apply failed unexpectedly — set WORKTREE_NO_UPDATE=1 to disable; see ~/.cache/worktree-cli/last-error${RESET}` + ); + } + // Funnel async throws into the panic logger, not an unhandled rejection. + void scheduleBackgroundUpdateCheck().catch(appendBackgroundCheckPanic); +} + +run( + [ + createCommand, + listCommand, + openCommand, + removeCommand, + updateCommand, + internalUpdateCheckCommand, + ], + { + name: "worktree", + description: pkg.description ?? "Git worktree manager", + version: pkg.version, + } +); diff --git a/src/lib/auto-update.ts b/src/lib/auto-update.ts new file mode 100644 index 0000000..b319270 --- /dev/null +++ b/src/lib/auto-update.ts @@ -0,0 +1,685 @@ +import { randomBytes } from "node:crypto"; +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { shouldAutoUpdate, shouldAutoUpdateSync } from "./config"; +import { classifyWriteError, isEnoent, safeUnlinkSync } from "./fs-utils"; +import { + compareVersions, + computeSha256Sync, + downloadAsset, + fetchLatestRelease, + getAssetName, + isStandalone, + verifyAssetAgainstSums, + verifyBinaryHashSync, +} from "./release"; +import { tryCatch, tryCatchSync } from "./try-catch"; +import { COLORS } from "./logger"; +import pkg from "../../package.json"; + +const STAGING_FILENAME = ".worktree.next"; +const META_SIDECAR_FILENAME = ".worktree.next.meta"; +const TWENTY_FOUR_HOURS_MS = 24 * 60 * 60 * 1000; +const PROBE_TIMEOUT_MS = 2_000; +const MAX_ERROR_LOG_BYTES = 64 * 1024; +const ERROR_LOG_KEEP_LINES = 20; +const PROBE_STDERR_TRUNCATE_BYTES = 500; +const INTERNAL_CHECK_SUBCOMMAND = "__internal_update_check"; +const SIDECAR_VERSION_PATTERN = /^\d+\.\d+\.\d+(?:-[\w.-]+)?$/; +const SIDECAR_HASH_PATTERN = /^[0-9a-f]{64}$/; +// Defer reaping partial stages: a concurrent producer mid-commit looks identical to an orphan. +const STAGING_ORPHAN_GRACE_MS = 60 * 1000; + +function getBinaryDir(): string { + return path.dirname(process.execPath); +} + +function getStagingPath(): string { + return path.join(getBinaryDir(), STAGING_FILENAME); +} + +function getMetaSidecarPath(): string { + return path.join(getBinaryDir(), META_SIDECAR_FILENAME); +} + +function getCacheDir(): string { + return path.join(os.homedir(), ".cache", "worktree-cli"); +} + +function getLastCheckPath(): string { + return path.join(getCacheDir(), "last-check"); +} + +function getLastErrorPath(): string { + return path.join(getCacheDir(), "last-error"); +} + +function ensureCacheDir(): void { + fs.mkdirSync(getCacheDir(), { recursive: true }); +} + +let hasWarnedAboutLogFailure = false; +let hasCacheWriteFailed = false; +let hasWarnedAboutCacheWriteFailureOnce = false; + +function warnLogFailureOnce(reason: string): void { + if (hasWarnedAboutLogFailure) return; + hasWarnedAboutLogFailure = true; + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update error log unwritable (${reason}) — diagnostics unavailable${RESET}` + ); +} + +function warnCacheWriteFailureOnce(reason: string): void { + if (hasWarnedAboutCacheWriteFailureOnce) return; + hasWarnedAboutCacheWriteFailureOnce = true; + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: auto-update throttle cache unwritable (${reason}) — disabling auto-update for this process${RESET}` + ); +} + +function appendBackgroundCheckPanic(error: unknown): void { + const detail = + error instanceof Error + ? `${error.name}: ${error.message}\n${error.stack ?? ""}` + : String(error); + appendLastError("check", `PANIC — ${detail.replace(/\n/g, "\n ")}`); +} + +function appendLastError(kind: "apply" | "check", message: string): void { + try { + ensureCacheDir(); + const line = `${new Date().toISOString()} ${kind}: ${message}\n`; + const logPath = getLastErrorPath(); + rotateErrorLogIfOversized(logPath); + fs.appendFileSync(logPath, line); + } catch (error) { + warnLogFailureOnce( + error instanceof Error ? error.message : String(error) + ); + } +} + +function rotateErrorLogIfOversized(logPath: string): void { + try { + const stat = fs.statSync(logPath); + if (stat.size <= MAX_ERROR_LOG_BYTES) return; + const existing = fs.readFileSync(logPath, "utf8"); + const lines = existing.split("\n").filter(function (line) { + return line !== ""; + }); + const kept = lines.slice(-ERROR_LOG_KEEP_LINES).join("\n") + "\n"; + fs.writeFileSync(logPath, kept); + } catch (error) { + if (isEnoent(error)) return; + warnLogFailureOnce( + error instanceof Error ? error.message : String(error) + ); + } +} + +type SidecarMeta = { version: string; sha256: string }; + +function formatSidecar(meta: SidecarMeta): string { + return `version=${meta.version}\nsha256=${meta.sha256}\n`; +} + +const SIDECAR_KNOWN_KEYS = new Set(["version", "sha256"]); + +function parseSidecar(text: string): SidecarMeta | null { + const kv: Record = {}; + for (const line of text.split("\n")) { + const trimmed = line.trim(); + if (trimmed === "") continue; + const eq = trimmed.indexOf("="); + if (eq === -1) return null; + const key = trimmed.slice(0, eq).trim(); + if (!SIDECAR_KNOWN_KEYS.has(key)) return null; + if (Object.prototype.hasOwnProperty.call(kv, key)) return null; + kv[key] = trimmed.slice(eq + 1).trim(); + } + const version = kv.version ?? ""; + const sha256 = (kv.sha256 ?? "").toLowerCase(); + if (!SIDECAR_VERSION_PATTERN.test(version)) return null; + if (!SIDECAR_HASH_PATTERN.test(sha256)) return null; + return { version, sha256 }; +} + +function cleanupStagedArtifacts(): void { + safeUnlinkSync(getStagingPath()); + safeUnlinkSync(getMetaSidecarPath()); +} + +function checkExists( + filePath: string, + kind: "apply" | "check" +): boolean | null { + const { error } = tryCatchSync(function () { + return fs.statSync(filePath); + }); + if (!error) return true; + if (isEnoent(error)) return false; + appendLastError(kind, `stat ${filePath}: ${error.message}`); + return null; +} + +function isWithinGracePeriod(filePath: string): boolean { + const { data: stat, error } = tryCatchSync(function () { + return fs.statSync(filePath); + }); + if (error) { + if (isEnoent(error)) return false; + // Non-ENOENT: be conservative (return true) — never destroy a peer's stage on incomplete stat info. + appendLastError("apply", `grace-stat: ${error.message}`); + return true; + } + if (!stat) return false; + return Date.now() - stat.mtimeMs < STAGING_ORPHAN_GRACE_MS; +} + +function applyPendingUpdate(): void { + if (process.env.WORKTREE_NO_UPDATE === "1") return; + // Gate on config too: a staged binary must not apply if the user set AUTO_UPDATE=false after it was staged. + const configAllows = shouldAutoUpdateSync(function (msg) { + appendLastError("apply", msg); + }); + if (!configAllows) return; + try { + if (!isStandalone()) return; + const stagedPath = getStagingPath(); + const metaPath = getMetaSidecarPath(); + const stagedExists = checkExists(stagedPath, "apply"); + if (stagedExists === null) return; + if (!stagedExists) { + // Within grace window, assume concurrent producer; past it, reap orphan. + if (isWithinGracePeriod(metaPath)) return; + safeUnlinkSync(metaPath); + return; + } + + const metaExists = checkExists(metaPath, "apply"); + if (metaExists === null) return; + if (!metaExists) { + if (isWithinGracePeriod(stagedPath)) return; + safeUnlinkSync(stagedPath); + appendLastError( + "apply", + "staged binary without sidecar — discarded" + ); + warnApplyFailed("staged update was incomplete (missing metadata)"); + return; + } + + const { data: metaText, error: metaReadError } = tryCatchSync( + function () { + return fs.readFileSync(metaPath, "utf8"); + } + ); + if (metaReadError) { + cleanupStagedArtifacts(); + appendLastError("apply", `sidecar read: ${metaReadError.message}`); + warnApplyFailed( + `could not read staged metadata (${metaReadError.message})` + ); + return; + } + const meta = parseSidecar(metaText); + if (!meta) { + cleanupStagedArtifacts(); + appendLastError("apply", "sidecar malformed — discarded stage"); + warnApplyFailed("staged metadata was malformed"); + return; + } + + // Gate against silent downgrade from a stale stage (e.g. foreground update raced a background check). + const stageCmp = compareVersions(pkg.version, meta.version); + if (stageCmp > 0) { + cleanupStagedArtifacts(); + appendLastError( + "apply", + `discarded stale stage v${meta.version} (running v${pkg.version})` + ); + recordCheckCompleted(); + return; + } + if (stageCmp === 0) { + cleanupStagedArtifacts(); + return; + } + + const verify = verifyBinaryHashSync(stagedPath, meta.sha256); + if (!verify.ok) { + cleanupStagedArtifacts(); + if (verify.kind === "io-error") { + appendLastError( + "apply", + `hash io-error: ${verify.cause.message}` + ); + warnApplyFailed( + `could not read staged binary (${verify.cause.message})` + ); + } else { + appendLastError("apply", "staged binary hash mismatch"); + warnApplyFailed( + "staged binary failed integrity check — discarded" + ); + } + return; + } + + const { error: renameError } = tryCatchSync(function () { + fs.renameSync(stagedPath, process.execPath); + }); + if (renameError) { + // Persistent rename failures won't self-heal; cleanup to avoid looping on every launch. + cleanupStagedArtifacts(); + const writeCode = classifyWriteError(renameError); + const rawCode = (renameError as NodeJS.ErrnoException).code; + appendLastError( + "apply", + `rename ${rawCode ?? "unknown"}: ${renameError.message}` + ); + if (writeCode !== null) { + warnApplyFailed( + `binary directory not writable (${writeCode}) — run "sudo worktree update" to install the pending update manually` + ); + } else { + warnApplyFailed( + `rename failed (${rawCode ?? renameError.message}) — staged update discarded` + ); + } + return; + } + safeUnlinkSync(metaPath); + // Bump throttle so the sibling scheduleBackgroundUpdateCheck doesn't redundantly re-check. + recordCheckCompleted(); + + const { GREEN, BOLD, RESET } = COLORS; + console.error( + `worktree ${GREEN}${BOLD}auto-updated${RESET} to ${BOLD}v${meta.version}${RESET}` + ); + } catch (error) { + // Swallow errno-style I/O only; let programmer bugs propagate with a stack trace. + if (!(error instanceof Error) || !("code" in error)) { + throw error; + } + appendLastError("apply", error.message); + warnApplyFailed(error.message); + } +} + +function warnApplyFailed(reason: string): void { + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: could not apply staged update (${reason}); continuing with current version${RESET}` + ); +} + +async function readLastCheckMs(): Promise { + const file = Bun.file(getLastCheckPath()); + const { data: exists, error: existsError } = await tryCatch(file.exists()); + if (existsError) { + appendLastError("check", `last-check exists: ${existsError.message}`); + return null; + } + if (!exists) return null; + const { data: text, error } = await tryCatch(file.text()); + if (error) { + appendLastError("check", `last-check read: ${error.message}`); + return null; + } + if (!text) return null; + const parsed = Number(text.trim()); + if (!Number.isFinite(parsed)) { + appendLastError( + "check", + `last-check corrupt: ${JSON.stringify(text.slice(0, 40))}` + ); + return null; + } + return parsed; +} + +async function isAutoUpdateDisabled(): Promise { + if (process.env.WORKTREE_NO_UPDATE === "1") return true; + // Fail CLOSED on broken config so a typo can't silently disable auto-update. + return !(await shouldAutoUpdate(function (msg) { + appendLastError("check", msg); + })); +} + +async function scheduleBackgroundUpdateCheck(): Promise { + try { + if (!isStandalone()) return; + // Skip spawn if cache is unwritable; the child would also fail and burn API quota. + if (hasCacheWriteFailed) return; + if (await isAutoUpdateDisabled()) return; + + const lastCheck = await readLastCheckMs(); + const now = Date.now(); + const shouldSkip = + lastCheck !== null && + now - lastCheck >= 0 && + now - lastCheck < TWENTY_FOUR_HOURS_MS; + if (shouldSkip) return; + + // Only the child writes last-check on success, so a failed check never burns the 24h window. + // Child stderr is funneled to last-error so background panics are visible on the next launch. + const { data: stderrFd, error: stderrOpenError } = tryCatchSync( + function () { + ensureCacheDir(); + return fs.openSync(getLastErrorPath(), "a"); + } + ); + if (stderrOpenError) { + // If we can't capture the child's stderr, don't spawn blind — the + // throttle cache lives in the same dir, so it's likely unwritable too. + hasCacheWriteFailed = true; + warnCacheWriteFailureOnce(stderrOpenError.message); + return; + } + try { + Bun.spawn({ + cmd: [process.execPath, INTERNAL_CHECK_SUBCOMMAND], + stdin: "ignore", + stdout: "ignore", + stderr: stderrFd, + // POSIX setsid(): survives terminal close so a slow download isn't SIGHUPed. + detached: true, + }).unref(); + } finally { + // Close parent's fd copy even if Bun.spawn throws synchronously (else fd leak per launch). + const inheritedFd = stderrFd; + tryCatchSync(function () { + fs.closeSync(inheritedFd); + }); + } + } catch (error) { + // Swallow errno-style only; let programmer bugs propagate. + if (!(error instanceof Error) || !("code" in error)) { + throw error; + } + appendLastError("check", `spawn: ${error.message}`); + } +} + +function recordCheckCompleted(): void { + if (hasCacheWriteFailed) return; + const { error } = tryCatchSync(function () { + ensureCacheDir(); + fs.writeFileSync(getLastCheckPath(), String(Date.now())); + }); + if (error) { + // Latch: future calls and scheduleBackgroundUpdateCheck short-circuit. + hasCacheWriteFailed = true; + appendLastError("check", `last-check write: ${error.message}`); + warnCacheWriteFailureOnce(error.message); + } +} + +async function runBackgroundUpdateCheck(): Promise { + const assetName = getAssetName(); + if (!assetName) { + // Structural — burn throttle so we don't thrash the API. + appendLastError("check", `unsupported platform/arch`); + recordCheckCompleted(); + return; + } + + const { data: release, error: releaseError } = + await tryCatch(fetchLatestRelease()); + if (releaseError || !release) { + appendLastError( + "check", + `fetchLatestRelease: ${releaseError?.message ?? "unknown"}` + ); + recordCheckCompleted(); + return; + } + + if (compareVersions(pkg.version, release.version) >= 0) { + recordCheckCompleted(); + return; + } + + const asset = release.assets.find(function (entry) { + return entry.name === assetName; + }); + if (!asset) { + // Transient: maintainer may upload the missing arch later; don't burn throttle. + appendLastError( + "check", + `release ${release.tag} missing asset ${assetName}` + ); + return; + } + + const binaryDir = getBinaryDir(); + const tmpPath = path.join( + binaryDir, + `${STAGING_FILENAME}.${randomBytes(8).toString("hex")}.tmp` + ); + + // Pre-unlink to prevent the write from following a planted symlink. + safeUnlinkSync(tmpPath); + const { error: dlError } = await tryCatch( + downloadAsset(asset, tmpPath, undefined, function (op, downloadErr) { + appendLastError("check", `${op}: ${downloadErr.message}`); + }) + ); + if (dlError) { + safeUnlinkSync(tmpPath); + appendLastError("check", `download: ${dlError.message}`); + recordCheckCompleted(); + return; + } + + // Verify BEFORE chmod/probe: running an unverified binary is code execution. + const verify = await verifyAssetAgainstSums( + tmpPath, + assetName, + release.assets + ); + if (!verify.ok) { + safeUnlinkSync(tmpPath); + if (verify.kind === "sums-tamper") { + appendLastError( + "check", + `TAMPER: SHA256SUMS for ${assetName} is malformed (${verify.reason}) — refusing to stage` + ); + recordCheckCompleted(); + } else if (verify.kind === "sums-error") { + appendLastError( + "check", + `SHA256SUMS fetch failed — refusing to stage: ${verify.reason}` + ); + // Burn throttle for permanent failures; transient ones keep retrying. + if (!verify.retryable) { + recordCheckCompleted(); + } + } else if (verify.kind === "missing-entry") { + appendLastError( + "check", + `SHA256SUMS missing entry for ${assetName}` + ); + recordCheckCompleted(); + } else if (verify.kind === "hash-io-error") { + // Local IO may be transient (disk full mid-write); don't burn throttle. + appendLastError( + "check", + `hash io-error for ${assetName}: ${verify.cause.message}` + ); + } else { + appendLastError("check", `hash mismatch for ${assetName}`); + recordCheckCompleted(); + } + return; + } + let verifiedHash: string | null = verify.hash; + + const { error: chmodError } = tryCatchSync(function () { + fs.chmodSync(tmpPath, 0o755); + }); + if (chmodError) { + safeUnlinkSync(tmpPath); + appendLastError("check", `chmod: ${chmodError.message}`); + if (classifyWriteError(chmodError) !== null) { + recordCheckCompleted(); + } + return; + } + + const probe = probeBinaryRuns(tmpPath); + if (!probe.ok) { + safeUnlinkSync(tmpPath); + appendLastError("check", `probe: ${probe.reason}`); + // Probe fail is structural for this release — burn throttle or we redownload 50 MB every launch. + recordCheckCompleted(); + return; + } + + // Legacy release lacks SHA256SUMS; self-hash only detects local stage→apply corruption, not upstream tampering. + if (verifiedHash === null) { + const { data: computed, error: hashError } = tryCatchSync(function () { + return computeSha256Sync(tmpPath); + }); + if (hashError || !computed) { + safeUnlinkSync(tmpPath); + appendLastError( + "check", + `post-probe hash: ${hashError?.message ?? "unknown"}` + ); + return; + } + verifiedHash = computed; + } + + // Lock writer to reader's pattern so a future parser relaxation can't turn a crafted tag into a hash-spoof. + if (!SIDECAR_VERSION_PATTERN.test(release.version)) { + safeUnlinkSync(tmpPath); + appendLastError( + "check", + `invalid release version for sidecar: ${JSON.stringify(release.version.slice(0, 40))}` + ); + return; + } + + const metaTmpPath = path.join( + binaryDir, + `${META_SIDECAR_FILENAME}.${randomBytes(8).toString("hex")}.tmp` + ); + const sidecarContent = formatSidecar({ + version: release.version, + sha256: verifiedHash, + }); + safeUnlinkSync(metaTmpPath); + const { error: metaWriteError } = tryCatchSync(function () { + fs.writeFileSync(metaTmpPath, sidecarContent); + }); + if (metaWriteError) { + safeUnlinkSync(tmpPath); + safeUnlinkSync(metaTmpPath); + appendLastError("check", `sidecar write: ${metaWriteError.message}`); + // Structural permission/readonly errors won't self-heal; burn throttle. + if (classifyWriteError(metaWriteError) !== null) { + recordCheckCompleted(); + } + return; + } + const { error: metaRenameError } = tryCatchSync(function () { + fs.renameSync(metaTmpPath, getMetaSidecarPath()); + }); + if (metaRenameError) { + safeUnlinkSync(tmpPath); + safeUnlinkSync(metaTmpPath); + appendLastError("check", `sidecar commit: ${metaRenameError.message}`); + if (classifyWriteError(metaRenameError) !== null) { + recordCheckCompleted(); + } + return; + } + + const { error: renameError } = tryCatchSync(function () { + fs.renameSync(tmpPath, getStagingPath()); + }); + if (renameError) { + safeUnlinkSync(tmpPath); + safeUnlinkSync(getMetaSidecarPath()); + appendLastError("check", `stage: ${renameError.message}`); + if (classifyWriteError(renameError) !== null) { + recordCheckCompleted(); + } + return; + } + + recordCheckCompleted(); +} + +type ProbeResult = { ok: true } | { ok: false; reason: string }; + +const PROBE_VERSION_PATTERN = /\d+\.\d+\.\d+/; + +function probeBinaryRuns(filePath: string): ProbeResult { + const { data: result, error } = tryCatchSync(function () { + return Bun.spawnSync({ + cmd: [filePath, "--version"], + // Capture stdout to reject exit-0-with-garbage as a valid probe. + stdout: "pipe", + stderr: "pipe", + timeout: PROBE_TIMEOUT_MS, + // Disable auto-update in the probe to prevent grandchild spawn / stale-stage consumption. + env: { ...process.env, WORKTREE_NO_UPDATE: "1" }, + }); + }); + if (error || !result) { + return { + ok: false, + reason: error?.message ?? "spawn failed", + }; + } + if (result.exitCode === null) { + // Bun.spawnSync returns null exitCode on timeout kill. + return { + ok: false, + reason: `timed out after ${PROBE_TIMEOUT_MS}ms`, + }; + } + if (result.exitCode !== 0) { + const stderr = decodeProbeStream(result.stderr); + const base = `exit ${result.exitCode}`; + return { ok: false, reason: stderr ? `${base}: ${stderr}` : base }; + } + const stdout = decodeProbeStream(result.stdout); + if (!PROBE_VERSION_PATTERN.test(stdout)) { + const truncated = stdout.slice(0, 80); + return { + ok: false, + reason: `version output did not match expected format: ${JSON.stringify(truncated)}`, + }; + } + return { ok: true }; +} + +function decodeProbeStream(stream: unknown): string { + if (!(stream instanceof Uint8Array) && !(stream instanceof Buffer)) { + // Emit a debuggable marker (not "") so a Bun API shape change is visible in last-error. + return ``; + } + const bytes = stream instanceof Buffer ? new Uint8Array(stream) : stream; + const truncated = bytes.slice(0, PROBE_STDERR_TRUNCATE_BYTES); + return new TextDecoder().decode(truncated).trim(); +} + +export { + appendBackgroundCheckPanic, + applyPendingUpdate, + cleanupStagedArtifacts, + probeBinaryRuns, + recordCheckCompleted, + scheduleBackgroundUpdateCheck, + runBackgroundUpdateCheck, + INTERNAL_CHECK_SUBCOMMAND, +}; diff --git a/src/lib/config.test.ts b/src/lib/config.test.ts index ebf361d..d9f9bfd 100644 --- a/src/lib/config.test.ts +++ b/src/lib/config.test.ts @@ -92,4 +92,26 @@ describe("validateConfig", () => { const config = validateConfig({ DEFAULT_BASE: "origin/dev" }); expect(config.WORKTREE_DIR).toBe(DEFAULT_WORKTREE_DIR); }); + + it("AUTO_UPDATE defaults to true", () => { + const config = validateConfig({}); + expect(config.AUTO_UPDATE).toBe(true); + }); + + it("accepts AUTO_UPDATE=false", () => { + const config = validateConfig({ AUTO_UPDATE: "false" }); + expect(config.AUTO_UPDATE).toBe(false); + }); + + it("accepts AUTO_UPDATE=0, yes, 1 variants", () => { + expect(validateConfig({ AUTO_UPDATE: "0" }).AUTO_UPDATE).toBe(false); + expect(validateConfig({ AUTO_UPDATE: "no" }).AUTO_UPDATE).toBe(false); + expect(validateConfig({ AUTO_UPDATE: "true" }).AUTO_UPDATE).toBe(true); + expect(validateConfig({ AUTO_UPDATE: "1" }).AUTO_UPDATE).toBe(true); + expect(validateConfig({ AUTO_UPDATE: "yes" }).AUTO_UPDATE).toBe(true); + }); + + it("rejects unparseable AUTO_UPDATE", () => { + expect(() => validateConfig({ AUTO_UPDATE: "junk" })).toThrow(); + }); }); diff --git a/src/lib/config.ts b/src/lib/config.ts index f70bafc..a3657a4 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -1,11 +1,36 @@ +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; import { z } from "zod"; import { DEFAULT_WORKTREE_DIR } from "./constants"; -import { tryCatch } from "./try-catch"; -import path from "node:path"; +import { tryCatch, tryCatchSync } from "./try-catch"; + +const booleanLike = z + .union([z.boolean(), z.string()]) + .transform(function (value) { + if (typeof value === "boolean") return value; + const normalized = value.trim().toLowerCase(); + if ( + normalized === "true" || + normalized === "1" || + normalized === "yes" + ) { + return true; + } + if ( + normalized === "false" || + normalized === "0" || + normalized === "no" + ) { + return false; + } + throw new Error(`Expected boolean-like value, got "${value}"`); + }); const configSchema = z.object({ DEFAULT_BASE: z.string().optional(), WORKTREE_DIR: z.string().default(DEFAULT_WORKTREE_DIR), + AUTO_UPDATE: booleanLike.default(true), }); type Config = z.infer; @@ -37,24 +62,159 @@ function validateConfig(raw: Record): Config { return configSchema.parse(raw); } -async function loadConfig(root: string): Promise { - const configPath = path.join(root, ".worktreerc"); - const file = Bun.file(configPath); - const isExists = await file.exists(); +const warnedPaths = new Set(); - if (!isExists) { - return validateConfig({}); +function displayPath(filePath: string): string { + const home = os.homedir(); + if (filePath === home) return "~"; + if (filePath.startsWith(home + path.sep)) { + return "~" + filePath.slice(home.length); } + return filePath; +} + +function warnOnce(filePath: string, message: string): void { + if (warnedPaths.has(filePath)) return; + warnedPaths.add(filePath); + console.error(message); +} - const { data: content, error } = await tryCatch(file.text()); +type ConfigScope = "project" | "global"; - if (error) { +const EXISTS_ERROR_PREFIX = "shouldAutoUpdate exists"; +const READ_ERROR_PREFIX = + "~/.worktreerc read failed; auto-update disabled until fixed"; +const PARSE_ERROR_PREFIX = + "~/.worktreerc invalid; auto-update disabled until fixed"; + +async function readConfigFile( + filePath: string, + scope: ConfigScope +): Promise { + const file = Bun.file(filePath); + const display = displayPath(filePath); + // file.exists() can throw on stat errors — guard like shouldAutoUpdate below. + const { data: isExists, error: existsError } = await tryCatch( + file.exists() + ); + if (existsError) { + warnOnce( + filePath, + `warning: could not stat ${display}: ${existsError.message}. Using defaults.` + ); + return validateConfig({}); + } + if (!isExists) return validateConfig({}); + const { data: content, error: readError } = await tryCatch(file.text()); + if (readError) { + warnOnce( + filePath, + `warning: could not read ${display}: ${readError.message}. Using defaults.` + ); return validateConfig({}); } + const raw = parseConfigContent(content); + if (scope === "project" && "AUTO_UPDATE" in raw) { + // Also validate — user moving this line to ~/.worktreerc later needs to know if it's syntactically valid. + const { data: probe, error: probeError } = tryCatchSync(function () { + return booleanLike.safeParse(raw.AUTO_UPDATE); + }); + const validityNote = + probeError !== null + ? `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probeError.message})` + : probe.success + ? `; the value "${raw.AUTO_UPDATE}" parses as a boolean (would take effect once moved)` + : `; the value "${raw.AUTO_UPDATE}" is also invalid as a boolean (${probe.error.issues[0]?.message ?? "unknown"})`; + warnOnce( + `${filePath}:AUTO_UPDATE`, + `warning: AUTO_UPDATE in project ${display} is ignored — set it in ~/.worktreerc instead${validityNote}.` + ); + // Strip pre-validate so `AUTO_UPDATE=junk` doesn't discard valid sibling keys. + delete raw.AUTO_UPDATE; + } + const { data: parsed, error: parseError } = tryCatchSync(function () { + return validateConfig(raw); + }); + if (parseError) { + warnOnce( + filePath, + `warning: ${display} is invalid: ${parseError.message}. Using defaults.` + ); + return validateConfig({}); + } + return parsed; +} +async function loadConfig(root: string): Promise { + return readConfigFile(path.join(root, ".worktreerc"), "project"); +} + +type AutoUpdateOnError = (message: string) => void; + +function decideAutoUpdateFromContent( + content: string | null, + onError?: AutoUpdateOnError +): boolean { + if (content === null) return true; const raw = parseConfigContent(content); - return validateConfig(raw); + const { data: parsed, error: parseError } = tryCatchSync(function () { + return validateConfig(raw); + }); + if (parseError) { + onError?.(`${PARSE_ERROR_PREFIX}: ${parseError.message}`); + return false; + } + return parsed.AUTO_UPDATE; +} + +// Fail CLOSED on parse/read errors so a typo can't silently override opt-out. +// `onError` threads diagnostics so users discover *why* auto-update is disabled. +async function shouldAutoUpdate(onError?: AutoUpdateOnError): Promise { + const filePath = path.join(os.homedir(), ".worktreerc"); + const file = Bun.file(filePath); + // `file.exists()` can throw EACCES; guard to avoid crashing the scheduler. + const { data: isExists, error: existsError } = await tryCatch( + file.exists() + ); + if (existsError) { + onError?.(`${EXISTS_ERROR_PREFIX}: ${existsError.message}`); + return false; + } + if (!isExists) return decideAutoUpdateFromContent(null, onError); + const { data: content, error: readError } = await tryCatch(file.text()); + if (readError) { + onError?.(`${READ_ERROR_PREFIX}: ${readError.message}`); + return false; + } + return decideAutoUpdateFromContent(content, onError); +} + +// Sync twin used at startup by applyPendingUpdate (before brocli.run / top-level await). +function shouldAutoUpdateSync(onError?: AutoUpdateOnError): boolean { + const filePath = path.join(os.homedir(), ".worktreerc"); + const { data: isExists, error: existsError } = tryCatchSync(function () { + return fs.existsSync(filePath); + }); + if (existsError) { + onError?.(`${EXISTS_ERROR_PREFIX}: ${existsError.message}`); + return false; + } + if (!isExists) return decideAutoUpdateFromContent(null, onError); + const { data: content, error: readError } = tryCatchSync(function () { + return fs.readFileSync(filePath, "utf8"); + }); + if (readError) { + onError?.(`${READ_ERROR_PREFIX}: ${readError.message}`); + return false; + } + return decideAutoUpdateFromContent(content, onError); } -export { loadConfig, parseConfigContent, validateConfig }; +export { + loadConfig, + parseConfigContent, + shouldAutoUpdate, + shouldAutoUpdateSync, + validateConfig, +}; export type { Config }; diff --git a/src/lib/fs-utils.ts b/src/lib/fs-utils.ts new file mode 100644 index 0000000..755b781 --- /dev/null +++ b/src/lib/fs-utils.ts @@ -0,0 +1,78 @@ +import fs from "node:fs"; +import fsPromises from "node:fs/promises"; +import { COLORS } from "./logger"; + +function isEnoent(error: unknown): boolean { + return ( + error instanceof Error && + "code" in error && + (error as NodeJS.ErrnoException).code === "ENOENT" + ); +} + +function warnUnlinkFailure(filePath: string, error: unknown): void { + const message = error instanceof Error ? error.message : String(error); + const { DIM, RESET } = COLORS; + console.error( + `${DIM}worktree: could not remove ${filePath} (${message})${RESET}` + ); +} + +async function safeUnlink(filePath: string): Promise { + await fsPromises.unlink(filePath).catch(function (error) { + if (isEnoent(error)) return; + warnUnlinkFailure(filePath, error); + }); +} + +function safeUnlinkSync(filePath: string): void { + try { + fs.unlinkSync(filePath); + } catch (error) { + if (isEnoent(error)) return; + warnUnlinkFailure(filePath, error); + } +} + +type WriteErrorCode = "EACCES" | "EPERM" | "EROFS" | "EBUSY" | "ETXTBSY"; + +// Walks cause chain for errno; EBUSY/ETXTBSY treated as permanent (file locked/busy). +const WRITE_ERROR_CODES = new Set([ + "EACCES", + "EPERM", + "EROFS", + "EBUSY", + "ETXTBSY", +]); + +function classifyWriteError(error: unknown): WriteErrorCode | null { + let cur: unknown = error; + while (cur instanceof Error) { + if ("code" in cur) { + const code = (cur as NodeJS.ErrnoException).code; + if (code !== undefined && WRITE_ERROR_CODES.has(code)) { + return code as WriteErrorCode; + } + } + cur = cur.cause; + } + return null; +} + +// Unwrap `cause` to surface the original errno message instead of a generic wrapper. +function deepestMessage(error: unknown): string { + let cur: unknown = error; + while (cur instanceof Error && cur.cause !== undefined) { + cur = cur.cause; + } + return cur instanceof Error ? cur.message : String(cur); +} + +export { + classifyWriteError, + deepestMessage, + isEnoent, + safeUnlink, + safeUnlinkSync, +}; +export type { WriteErrorCode }; diff --git a/src/lib/release.test.ts b/src/lib/release.test.ts new file mode 100644 index 0000000..5d02a2a --- /dev/null +++ b/src/lib/release.test.ts @@ -0,0 +1,463 @@ +import { afterEach, beforeEach, describe, expect, it } from "bun:test"; +import fs from "node:fs"; +import os from "node:os"; +import path from "node:path"; +import { + compareVersions, + fetchLatestRelease, + parseSha256Sums, + verifyAssetAgainstSums, + type ReleaseAsset, +} from "./release"; + +describe("compareVersions", () => { + it("returns 0 for equal versions", () => { + expect(compareVersions("1.2.3", "1.2.3")).toBe(0); + expect(compareVersions("v1.2.3", "1.2.3")).toBe(0); + }); + + it("returns negative when a < b", () => { + expect(compareVersions("1.2.3", "1.2.4")).toBeLessThan(0); + expect(compareVersions("1.2.3", "1.3.0")).toBeLessThan(0); + expect(compareVersions("1.2.3", "2.0.0")).toBeLessThan(0); + }); + + it("returns positive when a > b", () => { + expect(compareVersions("1.2.4", "1.2.3")).toBeGreaterThan(0); + expect(compareVersions("2.0.0", "1.9.9")).toBeGreaterThan(0); + }); + + it("handles missing components as 0", () => { + expect(compareVersions("1", "1.0.0")).toBe(0); + expect(compareVersions("1.0", "1.0.1")).toBeLessThan(0); + }); + + it("treats prerelease tags as less than the base version (SemVer 2.0)", () => { + expect(compareVersions("1.2.3-beta", "1.2.3")).toBeLessThan(0); + expect(compareVersions("1.2.3", "1.2.3-beta")).toBeGreaterThan(0); + expect(compareVersions("1.2.3-rc.1", "1.2.4")).toBeLessThan(0); + }); + + it("orders prerelease tags per SemVer 2.0 §11", () => { + // Numeric within-identifier comparison. + expect(compareVersions("1.2.3-beta.1", "1.2.3-beta.2")).toBeLessThan(0); + // Lex on string identifiers. + expect(compareVersions("1.2.3-rc.1", "1.2.3-beta.1")).toBeGreaterThan( + 0 + ); + // Equal prereleases. + expect(compareVersions("1.2.3-alpha", "1.2.3-alpha")).toBe(0); + }); + + it("compares numeric prerelease identifiers numerically (SemVer 2.0)", () => { + // SemVer 2.0 §11.4.1: numeric identifiers compare numerically — rc.10 > rc.2. + expect(compareVersions("1.2.3-rc.10", "1.2.3-rc.2")).toBeGreaterThan(0); + expect(compareVersions("1.2.3-rc.2", "1.2.3-rc.10")).toBeLessThan(0); + expect(compareVersions("1.2.3-alpha.9", "1.2.3-alpha.11")).toBeLessThan( + 0 + ); + }); + + it("treats numeric identifiers as lower precedence than string identifiers", () => { + // SemVer 2.0 §11.4.3: numeric identifiers always have lower precedence than + // alphanumeric identifiers within the same prerelease position. + expect( + compareVersions("1.0.0-alpha.1", "1.0.0-alpha.beta") + ).toBeLessThan(0); + }); + + it("longer prerelease wins when all preceding identifiers equal", () => { + // SemVer 2.0 §11.4.4: a larger set of fields has higher precedence than + // a smaller set, when all preceding identifiers are equal. + expect(compareVersions("1.0.0-alpha", "1.0.0-alpha.1")).toBeLessThan(0); + expect( + compareVersions("1.0.0-alpha.beta", "1.0.0-alpha.beta.1") + ).toBeLessThan(0); + }); + + it("never returns NaN for garbage input", () => { + expect(Number.isFinite(compareVersions("junk", "1.2.3"))).toBe(true); + expect(Number.isFinite(compareVersions("1.2.3", "also-junk"))).toBe( + true + ); + }); +}); + +describe("parseSha256Sums", () => { + it("parses standard shasum -a 256 output", () => { + const text = [ + "a".repeat(64) + " worktree-darwin-arm64", + "b".repeat(64) + " worktree-linux-x64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + expect(result["worktree-linux-x64"]).toBe("b".repeat(64)); + }); + + it("parses BSD-style asterisk prefix (shasum -b)", () => { + const text = "c".repeat(64) + " *worktree-darwin-x64"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-x64"]).toBe("c".repeat(64)); + }); + + it("skips comments and blank lines", () => { + const text = [ + "# header comment", + "", + "d".repeat(64) + " worktree-linux-arm64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(Object.keys(result)).toEqual(["worktree-linux-arm64"]); + }); + + it("lowercases the hash", () => { + const hash = "ABCDEF" + "0".repeat(58); + const text = hash + " worktree-linux-x64"; + const result = parseSha256Sums(text); + expect(result["worktree-linux-x64"]).toBe(hash.toLowerCase()); + }); + + it("ignores malformed lines", () => { + const text = [ + "not-a-hash worktree-darwin-arm64", + "e".repeat(64) + " worktree-linux-x64", + ].join("\n"); + const result = parseSha256Sums(text); + expect(Object.keys(result)).toEqual(["worktree-linux-x64"]); + }); + + it("rejects duplicate filename entries with different hashes", () => { + const dupe = [ + "a".repeat(64) + " worktree-darwin-arm64", + "b".repeat(64) + " worktree-darwin-arm64", + ].join("\n"); + expect(() => parseSha256Sums(dupe)).toThrow(/Duplicate/); + }); + + it("handles CRLF line endings", () => { + const text = + "a".repeat(64) + + " worktree-darwin-arm64\r\n" + + "b".repeat(64) + + " worktree-linux-x64\r\n"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + expect(result["worktree-linux-x64"]).toBe("b".repeat(64)); + }); + + it("handles missing trailing newline", () => { + const text = "a".repeat(64) + " worktree-darwin-arm64"; + const result = parseSha256Sums(text); + expect(result["worktree-darwin-arm64"]).toBe("a".repeat(64)); + }); + + it("rejects BSD-tagged-format `SHA256 (file) = hex` (not the format we publish)", () => { + const text = `SHA256 (worktree-darwin-arm64) = ${"a".repeat(64)}`; + const result = parseSha256Sums(text); + // Pins the parser to reject unknown formats — guards against accepting unverified hashes. + expect(Object.keys(result)).toEqual([]); + }); +}); + +describe("fetchLatestRelease — JSON-shape boundary", () => { + let originalFetch: typeof globalThis.fetch; + + beforeEach(() => { + originalFetch = globalThis.fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + }); + + function stubFetch(handler: () => Response): void { + globalThis.fetch = async function ( + _input: RequestInfo | URL + ): Promise { + return handler(); + } as typeof globalThis.fetch; + } + + it("returns parsed release on a valid payload", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ + tag_name: "v1.2.3", + assets: [ + { + name: "worktree-darwin-arm64", + browser_download_url: + "https://objects.githubusercontent.com/worktree-darwin-arm64", + }, + ], + }), + { status: 200 } + ); + }); + const result = await fetchLatestRelease(); + expect(result.tag).toBe("v1.2.3"); + expect(result.version).toBe("1.2.3"); + expect(result.assets).toHaveLength(1); + }); + + it("throws on missing tag_name", async () => { + stubFetch(function () { + return new Response(JSON.stringify({ assets: [] }), { + status: 200, + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on missing assets array", async () => { + stubFetch(function () { + return new Response(JSON.stringify({ tag_name: "v1.0.0" }), { + status: 200, + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on assets being a non-array", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ tag_name: "v1.0.0", assets: "x" }), + { status: 200 } + ); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/tag_name|assets/); + }); + + it("throws on malformed tag_name (path-traversal-shaped)", async () => { + stubFetch(function () { + return new Response( + JSON.stringify({ + tag_name: "v1.2.3/../evil", + assets: [], + }), + { status: 200 } + ); + }); + await expect(fetchLatestRelease()).rejects.toThrow( + /Release tag malformed/ + ); + }); + + it("throws on non-2xx HTTP", async () => { + stubFetch(function () { + return new Response("server error", { + status: 500, + statusText: "Internal Server Error", + }); + }); + await expect(fetchLatestRelease()).rejects.toThrow(/500/); + }); +}); + +describe("verifyAssetAgainstSums", () => { + const ASSET_BYTES = new Uint8Array([ + 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x20, 0x77, 0x6f, 0x72, 0x6c, 0x64, + ]); + // Precomputed SHA256 of ASSET_BYTES. + const ASSET_SHA = + "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9"; + const ASSET_NAME = "worktree-darwin-arm64"; + + let tmpFile: string; + let originalFetch: typeof globalThis.fetch; + + beforeEach(() => { + tmpFile = path.join( + os.tmpdir(), + `verify-test-${process.pid}-${Date.now()}` + ); + fs.writeFileSync(tmpFile, ASSET_BYTES); + originalFetch = globalThis.fetch; + }); + + afterEach(() => { + globalThis.fetch = originalFetch; + try { + fs.unlinkSync(tmpFile); + } catch { + // ignore + } + }); + + function makeAsset(name: string): ReleaseAsset { + // Allowlisted host so the withTimeout host-pin doesn't reject pre-stub. + return { + name, + browser_download_url: `https://objects.githubusercontent.com/${name}`, + }; + } + + function resolveFetchUrl(input: RequestInfo | URL): string { + if (typeof input === "string") return input; + if (input instanceof URL) return input.toString(); + return input.url; + } + + function stubFetch(handler: (url: string) => Response): void { + globalThis.fetch = async function ( + input: RequestInfo | URL + ): Promise { + return handler(resolveFetchUrl(input)); + } as typeof globalThis.fetch; + } + + it("returns ok with null hash when SHA256SUMS is not published (legacy)", async () => { + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + ]); + expect(result).toEqual({ ok: true, hash: null }); + }); + + it("returns ok with lowercase hex when SHA256SUMS contains the entry", async () => { + const sumsBody = `${ASSET_SHA} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: true, hash: ASSET_SHA }); + }); + + it("returns sums-error with retryable flag when SHA256SUMS fetch fails 5xx", async () => { + stubFetch(function () { + return new Response("bad gateway", { + status: 502, + statusText: "Bad Gateway", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(true); + expect(result.reason).toContain("502"); + }); + + it("returns sums-error marked non-retryable on 4xx (permanent)", async () => { + stubFetch(function () { + return new Response("not found", { + status: 404, + statusText: "Not Found", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(false); + }); + + it("returns sums-error marked retryable on 403 (rate limit)", async () => { + stubFetch(function () { + return new Response("forbidden", { + status: 403, + statusText: "Forbidden", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + // 403/429 are GitHub rate-limit signals — transient, NOT permanent. + expect(result.retryable).toBe(true); + }); + + it("returns sums-error marked retryable on 429 (rate limit)", async () => { + stubFetch(function () { + return new Response("too many requests", { + status: 429, + statusText: "Too Many Requests", + }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("sums-error"); + if (result.kind !== "sums-error") return; + expect(result.retryable).toBe(true); + }); + + it("returns sums-tamper kind when SHA256SUMS contains duplicate entry (tampering)", async () => { + const dupeBody = [ + "a".repeat(64) + " " + ASSET_NAME, + "b".repeat(64) + " " + ASSET_NAME, + ].join("\n"); + stubFetch(function () { + return new Response(dupeBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result.ok).toBe(false); + if (result.ok) return; + // Distinct kind from "sums-error" so foreground/background paths can + // escalate (loud red error / TAMPER: log prefix) instead of treating + // tampering the same as a transient outage. + expect(result.kind).toBe("sums-tamper"); + if (result.kind !== "sums-tamper") return; + expect(result.reason).toMatch(/Duplicate/); + }); + + it("returns missing-entry when SHA256SUMS exists but has no row for asset", async () => { + const sumsBody = `${ASSET_SHA} some-other-asset\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: false, kind: "missing-entry" }); + }); + + it("returns hash-mismatch when entry exists but content differs", async () => { + const wrongHash = "0".repeat(64); + const sumsBody = `${wrongHash} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums(tmpFile, ASSET_NAME, [ + makeAsset(ASSET_NAME), + makeAsset("SHA256SUMS"), + ]); + expect(result).toEqual({ ok: false, kind: "hash-mismatch" }); + }); + + it("returns hash-io-error when the binary file is unreadable", async () => { + const sumsBody = `${ASSET_SHA} ${ASSET_NAME}\n`; + stubFetch(function () { + return new Response(sumsBody, { status: 200 }); + }); + const result = await verifyAssetAgainstSums( + "/nonexistent/path/file.bin", + ASSET_NAME, + [makeAsset(ASSET_NAME), makeAsset("SHA256SUMS")] + ); + expect(result.ok).toBe(false); + if (result.ok) return; + expect(result.kind).toBe("hash-io-error"); + }); +}); diff --git a/src/lib/release.ts b/src/lib/release.ts new file mode 100644 index 0000000..2ce6b60 --- /dev/null +++ b/src/lib/release.ts @@ -0,0 +1,590 @@ +import fs from "node:fs"; +import { once } from "node:events"; +import { timingSafeEqual } from "node:crypto"; +import { isEnoent } from "./fs-utils"; +import { tryCatch, tryCatchSync } from "./try-catch"; +import pkg from "../../package.json"; + +const REPO = "bhagyamudgal/worktree-cli"; +const API_RELEASES_LATEST = `https://api.github.com/repos/${REPO}/releases/latest`; + +// Host-pin fetches to GitHub origins; defense-in-depth against CDN/release-asset compromise. +const ALLOWED_RELEASE_HOSTS = new Set([ + "api.github.com", + "github.com", + "codeload.github.com", + "objects.githubusercontent.com", + "release-assets.githubusercontent.com", + "github-releases.githubusercontent.com", +]); + +function isAllowedReleaseHost(urlString: string): boolean { + const { data: parsed } = tryCatchSync(function () { + return new URL(urlString); + }); + if (!parsed) return false; + return ALLOWED_RELEASE_HOSTS.has(parsed.host); +} + +const RELEASE_TAG_PATTERN = /^v?\d+\.\d+\.\d+(?:-[\w.-]+)?$/; + +// Identify as worktree-cli; GITHUB_TOKEN bumps rate limit from 60/hr to 5000/hr. +function buildGitHubHeaders(): Record { + const headers: Record = { + "User-Agent": `worktree-cli/${pkg.version}`, + Accept: "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + }; + const token = process.env.GITHUB_TOKEN; + if (token && token.length > 0) { + headers.Authorization = `Bearer ${token}`; + } + return headers; +} + +const DEFAULT_META_TIMEOUT_MS = 30_000; +const DEFAULT_ASSET_TIMEOUT_MS = 600_000; +// 4× headroom over current ~50 MB binary; rejects oversized CDN responses pre-verification. +const MAX_ASSET_BYTES = 200 * 1024 * 1024; +const MAX_REDIRECT_HOPS = 5; + +type ReleaseAsset = { + name: string; + browser_download_url: string; +}; + +type ReleaseInfo = { + tag: string; + version: string; + assets: ReleaseAsset[]; +}; + +function isStandalone(): boolean { + return ( + Bun.main.startsWith("/$bunfs/") || import.meta.url.includes("$bunfs/") + ); +} + +function getAssetName(): string | null { + const platform = process.platform; + const arch = process.arch; + if (platform !== "darwin" && platform !== "linux") return null; + if (arch !== "arm64" && arch !== "x64") return null; + return `worktree-${platform}-${arch}`; +} + +function parseNumericSegment(raw: string | undefined): number { + if (raw === undefined) return 0; + const leadingInt = /^(\d+)/.exec(raw); + if (!leadingInt) return 0; + const n = Number(leadingInt[1]); + return Number.isFinite(n) ? n : 0; +} + +type ParsedVersion = { + major: number; + minor: number; + patch: number; + prerelease: string | null; +}; + +function parseVersion(v: string): ParsedVersion { + const stripped = v.replace(/^v/, ""); + const dashIndex = stripped.indexOf("-"); + const core = dashIndex === -1 ? stripped : stripped.slice(0, dashIndex); + const prerelease = + dashIndex === -1 ? null : stripped.slice(dashIndex + 1) || null; + const [maj, min, patch] = core.split("."); + return { + major: parseNumericSegment(maj), + minor: parseNumericSegment(min), + patch: parseNumericSegment(patch), + prerelease, + }; +} + +// SemVer 2.0 §11: pairwise compare; numeric bn) return 1; + return 0; + } + if (aNumeric) return -1; + if (bNumeric) return 1; + if (a < b) return -1; + if (a > b) return 1; + return 0; +} + +function comparePrerelease(a: string | null, b: string | null): number { + if (a === b) return 0; + // A version with a prerelease has lower precedence than one without. + if (a === null) return 1; + if (b === null) return -1; + const aParts = a.split("."); + const bParts = b.split("."); + const len = Math.min(aParts.length, bParts.length); + for (let i = 0; i < len; i++) { + const cmp = comparePrereleaseIdentifier(aParts[i], bParts[i]); + if (cmp !== 0) return cmp; + } + if (aParts.length < bParts.length) return -1; + if (aParts.length > bParts.length) return 1; + return 0; +} + +function compareVersions(a: string, b: string): number { + const pa = parseVersion(a); + const pb = parseVersion(b); + if (pa.major !== pb.major) return pa.major - pb.major; + if (pa.minor !== pb.minor) return pa.minor - pb.minor; + if (pa.patch !== pb.patch) return pa.patch - pb.patch; + return comparePrerelease(pa.prerelease, pb.prerelease); +} + +function isReleaseInfo(value: unknown): value is { + tag_name: string; + assets: ReleaseAsset[]; +} { + if (!value || typeof value !== "object") return false; + const rec = value as Record; + if (typeof rec.tag_name !== "string") return false; + if (!Array.isArray(rec.assets)) return false; + return rec.assets.every(function (entry: unknown) { + if (!entry || typeof entry !== "object") return false; + const asset = entry as Record; + return ( + typeof asset.name === "string" && + typeof asset.browser_download_url === "string" + ); + }); +} + +async function withTimeout( + url: string, + timeoutMs: number, + handler: (response: Response) => Promise +): Promise { + if (!isAllowedReleaseHost(url)) { + throw new Error( + `Refused to fetch URL with disallowed host: ${JSON.stringify(url.slice(0, 120))}` + ); + } + const controller = new AbortController(); + const timer = setTimeout(function () { + controller.abort(); + }, timeoutMs); + try { + // Follow redirects manually so each hop's host is validated BEFORE we connect to it — + // default `redirect: "follow"` connects to intermediate hosts and only exposes the final URL. + const originHost = new URL(url).host; + let currentUrl = url; + // Once Authorization has been stripped on any cross-origin hop, never re-add — + // prevents a redirect chain that bounces back to the origin host from re-attaching the token. + let authStripped = false; + for (let hop = 0; hop < MAX_REDIRECT_HOPS; hop++) { + const headers = buildGitHubHeaders(); + if (authStripped || new URL(currentUrl).host !== originHost) { + delete headers.Authorization; + authStripped = true; + } + const response = await fetch(currentUrl, { + signal: controller.signal, + headers, + redirect: "manual", + }); + if (response.status >= 300 && response.status < 400) { + const location = response.headers.get("location"); + // Drain the redirect body so keep-alive sockets don't pin across hops. + await tryCatch(response.body?.cancel() ?? Promise.resolve()); + if (!location) { + throw new Error( + `Redirect ${response.status} without Location header from ${new URL(currentUrl).host}` + ); + } + const next = new URL(location, currentUrl).toString(); + if (!isAllowedReleaseHost(next)) { + // Log host only, not the full URL — signed CDN URLs can carry tokens in the query string. + throw new Error( + `Refused redirect to disallowed host: ${new URL(next).host}` + ); + } + currentUrl = next; + continue; + } + return await handler(response); + } + throw new Error( + `Exceeded ${MAX_REDIRECT_HOPS} redirects from ${new URL(url).host}` + ); + } finally { + clearTimeout(timer); + } +} + +async function fetchLatestRelease( + timeoutMs: number = DEFAULT_META_TIMEOUT_MS +): Promise { + const { data: result, error } = await tryCatch( + withTimeout(API_RELEASES_LATEST, timeoutMs, async function (response) { + if (!response.ok) { + throw new Error( + `GitHub API error: ${response.status} ${response.statusText}` + ); + } + const json = await response.json(); + if (!isReleaseInfo(json)) { + throw new Error("Release payload missing tag_name or assets"); + } + // Reject malformed tags at the boundary so they can't propagate into paths/logs. + if (!RELEASE_TAG_PATTERN.test(json.tag_name)) { + throw new Error( + `Release tag malformed: ${JSON.stringify(json.tag_name.slice(0, 40))}` + ); + } + return { + tag: json.tag_name, + version: json.tag_name.replace(/^v/, ""), + assets: json.assets, + }; + }) + ); + if (error || !result) { + throw new Error( + `Failed to reach GitHub releases API: ${error?.message ?? "unknown"}`, + { cause: error ?? undefined } + ); + } + return result; +} + +type DownloadOnError = ( + op: "body-cancel" | "release-lock" | "cleanup-unlink", + error: Error +) => void; + +async function downloadAsset( + asset: ReleaseAsset, + destPath: string, + timeoutMs: number = DEFAULT_ASSET_TIMEOUT_MS, + onError?: DownloadOnError +): Promise { + const { error } = await tryCatch( + withTimeout( + asset.browser_download_url, + timeoutMs, + async function (response) { + if (!response.ok) { + throw new Error( + `Download ${asset.name} failed: ${response.status} ${response.statusText}` + ); + } + const contentLength = response.headers.get("content-length"); + if (contentLength !== null) { + const declared = Number(contentLength); + if ( + Number.isFinite(declared) && + declared > MAX_ASSET_BYTES + ) { + throw new Error( + `Download ${asset.name} refused: declared size ${declared} bytes exceeds cap ${MAX_ASSET_BYTES} bytes` + ); + } + } + if (!response.body) { + throw new Error( + `Download ${asset.name} refused: empty response body` + ); + } + // Stream chunks directly to disk, enforcing the cap as bytes arrive. + // Avoids the ~2× memory peak of buffering all chunks then copying into one final Uint8Array. + const reader = response.body.getReader(); + const writer = fs.createWriteStream(destPath, { flags: "w" }); + let bytesReceived = 0; + let writerClosed = false; + try { + while (true) { + const { done, value } = await reader.read(); + if (done) break; + if (!value) continue; + bytesReceived += value.byteLength; + if (bytesReceived > MAX_ASSET_BYTES) { + throw new Error( + `Download ${asset.name} exceeded cap: ${bytesReceived} bytes > ${MAX_ASSET_BYTES} bytes` + ); + } + if (!writer.write(value)) { + await once(writer, "drain"); + } + } + if (bytesReceived === 0) { + // Explicit empty-body error; else SHA verify later reports a misleading mismatch. + throw new Error( + `Download ${asset.name} refused: empty response body` + ); + } + await new Promise(function (resolve, reject) { + writer.end(function ( + err: NodeJS.ErrnoException | null | undefined + ) { + if (err) reject(err); + else resolve(); + }); + }); + writerClosed = true; + } finally { + const { error: releaseError } = tryCatchSync(function () { + reader.releaseLock(); + }); + if (releaseError) { + onError?.("release-lock", releaseError); + } + if (!writerClosed) { + writer.destroy(); + } + } + } + ) + ); + if (error) { + // Clean up our own partial write so callers don't have to do it defensively. + const { error: cleanupError } = tryCatchSync(function () { + fs.unlinkSync(destPath); + }); + if (cleanupError && !isEnoent(cleanupError)) { + onError?.("cleanup-unlink", cleanupError); + } + throw new Error(`Failed to download ${asset.name}: ${error.message}`, { + cause: error, + }); + } +} + +type HashResult = + | { ok: true } + | { ok: false; kind: "mismatch" } + | { ok: false; kind: "io-error"; cause: Error }; + +const HASH_CHUNK_BYTES = 64 * 1024; + +async function computeSha256Async(filePath: string): Promise { + const hasher = new Bun.CryptoHasher("sha256"); + const file = Bun.file(filePath); + for await (const chunk of file.stream()) { + hasher.update(chunk); + } + return hasher.digest("hex").toLowerCase(); +} + +function computeSha256Sync(filePath: string): string { + const hasher = new Bun.CryptoHasher("sha256"); + const fd = fs.openSync(filePath, "r"); + try { + const buffer = Buffer.alloc(HASH_CHUNK_BYTES); + while (true) { + const bytesRead = fs.readSync( + fd, + buffer, + 0, + HASH_CHUNK_BYTES, + null + ); + if (bytesRead === 0) break; + hasher.update(buffer.subarray(0, bytesRead)); + } + } finally { + fs.closeSync(fd); + } + return hasher.digest("hex").toLowerCase(); +} + +async function verifyBinaryHash( + filePath: string, + expectedSha256: string +): Promise { + const { data: actual, error } = await tryCatch( + computeSha256Async(filePath) + ); + if (error) return { ok: false, kind: "io-error", cause: error }; + if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { + return { ok: true }; + } + return { ok: false, kind: "mismatch" }; +} + +function verifyBinaryHashSync( + filePath: string, + expectedSha256: string +): HashResult { + const { data: actual, error } = tryCatchSync(function () { + return computeSha256Sync(filePath); + }); + if (error) return { ok: false, kind: "io-error", cause: error }; + if (constantTimeEquals(actual, expectedSha256.toLowerCase())) { + return { ok: true }; + } + return { ok: false, kind: "mismatch" }; +} + +function constantTimeEquals(a: string, b: string): boolean { + if (a.length !== b.length) return false; + // Constant-time compare prevents timing side-channel on hash compare. + return timingSafeEqual(Buffer.from(a), Buffer.from(b)); +} + +type Sha256SumsResult = + | { kind: "not-published" } + | { kind: "ok"; sums: Record } + | { kind: "error"; reason: string; retryable: boolean } + // "tamper" = parsed-but-malformed sums (today: duplicates) — distinct from transient "error". + | { kind: "tamper"; reason: string }; + +// 5xx and 403/429 (rate-limit) retryable; other 4xx treated as permanent. +function isRetryableHttpStatus(status: number): boolean { + if (status === 403 || status === 429) return true; + return status >= 500 && status < 600; +} + +async function fetchSha256Sums( + assets: ReleaseAsset[], + timeoutMs: number = DEFAULT_META_TIMEOUT_MS +): Promise { + const sumsAsset = assets.find(function (entry) { + return entry.name === "SHA256SUMS"; + }); + if (!sumsAsset) return { kind: "not-published" }; + const { data: result, error } = await tryCatch( + withTimeout( + sumsAsset.browser_download_url, + timeoutMs, + async function (response): Promise { + if (!response.ok) { + return { + kind: "error", + reason: `${response.status} ${response.statusText}`, + retryable: isRetryableHttpStatus(response.status), + }; + } + const text = await response.text(); + if (!text) { + return { + kind: "error", + reason: "empty SHA256SUMS body", + retryable: true, + }; + } + // Duplicate entries are tampering, not transient — permanent failure. + const { data: parsed, error: parseError } = tryCatchSync( + function () { + return parseSha256Sums(text); + } + ); + if (parseError) { + return { + kind: "tamper", + reason: parseError.message, + }; + } + return { kind: "ok", sums: parsed }; + } + ) + ); + if (error || !result) { + return { + kind: "error", + reason: error?.message ?? "network error", + retryable: true, + }; + } + return result; +} + +type VerifyAssetResult = + | { ok: true; hash: string | null } // hash === null when SHA256SUMS isn't published + | { ok: false; kind: "sums-error"; reason: string; retryable: boolean } + | { ok: false; kind: "sums-tamper"; reason: string } + | { ok: false; kind: "missing-entry" } + | { ok: false; kind: "hash-io-error"; cause: Error } + | { ok: false; kind: "hash-mismatch" }; + +async function verifyAssetAgainstSums( + tmpPath: string, + assetName: string, + assets: ReleaseAsset[] +): Promise { + const sums = await fetchSha256Sums(assets); + if (sums.kind === "tamper") { + return { ok: false, kind: "sums-tamper", reason: sums.reason }; + } + if (sums.kind === "error") { + return { + ok: false, + kind: "sums-error", + reason: sums.reason, + retryable: sums.retryable, + }; + } + if (sums.kind === "not-published") { + return { ok: true, hash: null }; + } + const expected = sums.sums[assetName]; + if (!expected) { + return { ok: false, kind: "missing-entry" }; + } + const hashResult = await verifyBinaryHash(tmpPath, expected); + if (!hashResult.ok) { + if (hashResult.kind === "io-error") { + return { + ok: false, + kind: "hash-io-error", + cause: hashResult.cause, + }; + } + return { ok: false, kind: "hash-mismatch" }; + } + return { ok: true, hash: expected }; +} + +function parseSha256Sums(text: string): Record { + // Null-prototype object blocks __proto__/constructor pollution from a tampered file. + const result: Record = Object.create(null); + for (const line of text.split("\n")) { + const trimmed = line.trim(); + if (trimmed === "" || trimmed.startsWith("#")) continue; + const match = /^([0-9a-fA-F]{64})\s+\*?(.+)$/.exec(trimmed); + if (!match) continue; + const [, hash, filename] = match; + const name = filename.trim(); + if (Object.prototype.hasOwnProperty.call(result, name)) { + throw new Error(`Duplicate SHA256SUMS entry for ${name}`); + } + result[name] = hash.toLowerCase(); + } + return result; +} + +export { + compareVersions, + computeSha256Sync, + downloadAsset, + fetchLatestRelease, + fetchSha256Sums, + getAssetName, + isStandalone, + parseSha256Sums, + verifyAssetAgainstSums, + verifyBinaryHash, + verifyBinaryHashSync, +}; +export type { + HashResult, + ReleaseAsset, + ReleaseInfo, + Sha256SumsResult, + VerifyAssetResult, +};