diff --git a/.env.example b/.env.example index febae2d29..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,44 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" - -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== - -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" - -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/.gitignore b/.gitignore index 92fc94aae..497ffb046 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,8 @@ result* *.sqlite3 *.sqlite3-wal *.sqlite3-shm + +TODO.md .spec-results # Keep it secret, keep it safe. diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..5ce5268dc --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,166 @@ +# AGENTS.md + +The PureScript Registry implements a package registry for PureScript. See @SPEC.md for the registry specification and @CONTRIBUTING.md for detailed contributor documentation. + +## Development Environment + +This project uses Nix with direnv. You should already be in the Nix shell automatically when entering the directory. If not, run: + +```sh +nix develop +``` + +Watch out for these Nix quirks: +- If Nix tries to fetch from git during a build, it is likely that spago.yaml files were changed but the lock file was not updated; if so, update the lockfile with `spago build` +- If a Nix build appears to be stale, then it is likely files were modified but are untracked by Git; if so, add modified files with `git add` and retry. + +### Build + +The registry is implemented in PureScript. Use spago to build it. + +```sh +spago build # Build all PureScript code +``` + +The registry infrastructure is defined in Nix. Build it with Nix: + +```sh +nix build .#server +``` + +### Test + +The registry contains a mixture of unit tests, e2e tests, and nix flake checks. When you complete a change you should generally run the unit tests. When working on the server, you should generally also run the e2e tests. If you are on a Linux system, you can run `nix flake check -L` to run the flake checks prior to committing code to ensure it works. + +#### Unit Tests + +Unit tests can be run with `spago`. They are fast and cheap. + +```sh +spago test # Run all unit tests +spago test -p # Run tests for a specific package +``` + +#### End-to-End Tests + +The end-to-end (integration) tests are in `app-e2e`. They can be run via Nix on Linux: + +```sh +nix build .#checks.x86_64-linux.integration +``` + +Alternately, they can be run on macOS or for more iterative development of tests using two terminals: one to start the test env, and one to execute the tests. + +```sh +# Terminal 1: Start test environment (wiremock mocks + registry server on port 9000) +nix run .#test-env + +# Terminal 2: Run E2E tests once server is ready +spago-test-e2e +``` + +Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode to use a single terminal. + +State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. This is useful, for example, to read the logs of the most recent run. For example: + +```sh +# after a test run, see the logs (log name is today's date) +cat /tmp/registry-test-env/scratch/logs/*.log +``` + +#### Smoke Test (Linux only) + +The smoke test verifies that the server comes up properly and tests deployment. Only run this test if you are making changes which could break the deployment of the server. + +```sh +nix build .#checks.x86_64-linux.smoke -L +``` + +#### Continuous Integration via Nix Checks + +There is a full suite of checks implemented with Nix which verify that packages build, formatting is correct, registry types are Dhall-conformant, and more. This is the primary check run in CI. + +```sh +nix flake check -L +``` + +## Formatting + +```sh +# Format PureScript +purs-tidy format-in-place app app-e2e foreign lib scripts +purs-tidy check app app-e2e foreign lib scripts + +# Format Nix files +nixfmt *.nix nix/**/*.nix +``` + +## Project Structure + +- `app/` — Registry server implementation. +- `app-e2e/` — E2E tests for the server API. +- `lib/` — **Public library** for consumers (Spago, Pursuit, etc.). Only types and functions useful to external tools belong here. Avoid implementation-specific code. +- `foreign/` — FFI bindings to JavaScript libraries. +- `scripts/` — Runnable modules for registry tasks (LegacyImporter, PackageTransferrer, PackageSetUpdater, etc.). Run via `nix run .#legacy-importer`, etc. +- `test-utils/` — Shared test utilities. +- `db/` — SQLite schemas and migrations (use `dbmate up` to initialize). +- `types/` — Dhall type specifications. +- `nix/` — Nix build and deployment configuration. + +## Scripts & Daily Workflows + +The `scripts/` directory contains modules run as daily jobs by the purescript/registry repository: + +- `LegacyImporter` — imports package versions from legacy Bower registry +- `PackageTransferrer` — handles package transfers +- `PackageSetUpdater` — automatic daily package set updates + +Run scripts via Nix: `nix run .#` (e.g., `nix run .#legacy-importer`). All scripts support `--help` for usage information. + +## Scratch Directory & Caching + +The `scratch/` directory (gitignored) is used by scripts for: +- `.cache/` — Cached API responses, downloaded packages, etc. +- `logs/` — Log files +- `registry/`, `registry-index/` — Local clones for testing, also modified and optionally committed to by scripts + +Caching is critical for the legacy importer due to the expense of downloading packages. The `Registry.App.Effect.Cache` module handles caching. + +## PureScript Conventions + +### Custom Prelude + +Always use `Registry.App.Prelude` in `app/` and `app-e2e/` directories: + +```purescript +import Registry.App.Prelude +``` + +### Effects via Run + +Use the `run` library for extensible effects. Do NOT perform HTTP calls, console logs, or other effects directly in `Aff`. Check for existing effects in `app/src/App/Effect/` or consider adding one. + +### Import Style + +Import types unqualified, values qualified. Use shortened module names: + +```purescript +import Registry.App.Prelude + +import Data.Array as Array +import Data.String as String +import Node.FS.Aff as FS.Aff +import Parsing (Parser) +import Parsing as Parsing +import Parsing.Combinators as Parsing.Combinators +import Registry.Operation (AuthenticatedData) +import Registry.SSH as SSH +``` + +## Deployment + +Continuous deployment via GitHub Actions on master. Manual deploy: + +```sh +colmena apply +``` diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 92f5f9dcf..ebe38a0dd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,20 +72,29 @@ nix build .#checks.x86_64-linux.smoke -L ### Integration Test +You can run the integration tests with the following on Linux: + +```sh +nix build .#checks.x86_64-linux.integration -L +``` + +On macOS or for iterative development, you can instead start the test environment and run the tests separately. + ```sh # Terminal 1: Start the test environment (wiremock mocks + registry server) nix run .#test-env -# Terminal 2: Once the server is ready, run the E2E tests -spago run -p registry-app-e2e +# Terminal 2: Run E2E tests once server is ready +spago-test-e2e ``` The test environment: - Starts wiremock services mocking GitHub, S3, Pursuit, etc. -- Starts the registry server on port 9000 with a temporary SQLite database +- Starts the registry server with a temporary SQLite database - Uses fixture data from `app/fixtures/` +- State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env` -Press `Ctrl+C` in Terminal 1 to stop all services. State is cleaned up automatically. +Press `Ctrl+C` in Terminal 1 to stop all services. All arguments after `--` are passed directly to process-compose: @@ -101,7 +110,11 @@ process-compose attach # Attach TUI process-compose down # Stop all services ``` -You can also set `STATE_DIR` to use a persistent state directory instead of a temp dir. +To examine state after a test run (e.g., for debugging), stop the test-env but don't restart it. The state remains in `/tmp/registry-test-env`: +- `db/registry.sqlite3` — SQLite database +- `scratch/registry/` — Local registry clone with metadata +- `scratch/registry-index/` — Local manifest index clone +- `repo-fixtures/` — Git fixture repositories ## Available Nix Commands diff --git a/SPEC.md b/SPEC.md index c0f7094c7..54c627d05 100644 --- a/SPEC.md +++ b/SPEC.md @@ -197,6 +197,7 @@ All packages in the registry contain a `purs.json` manifest file in their root d - `version`: a valid [`Version`](#version) - `license`: a valid [`License`](#license) - `location`: a valid [`Location`](#location) +- `ref`: a `string` representing the reference (e.g., a Git commit or Git tag) at the `location` that was used to fetch this version's source code - `owners` (optional): a non-empty array of [`Owner`](#owner) - `description` (optional): a description of your library as a plain text string, not markdown, up to 300 characters - `includeFiles` (optional): a non-empty array of globs, where globs are used to match file paths (in addition to the `src` directory and other [always-included files](#always-included-files)) that you want included in your package tarball @@ -207,7 +208,7 @@ Note: - Globs you provide at the `includeFiles` and `excludeFiles` keys must contain only `*`, `**`, `/`, `.`, `..`, and characters for Linux file paths. It is not possible to negate a glob (ie. the `!` character), and globs cannot represent a path out of the package source directory. - When packaging your project source, the registry will first "include" your `src` directory and always-included files such as your `purs.json` file. Then it will include files which match globs indicated by the `includeFiles` key ([always-ignored files](#always-ignored-files) cannot be included). Finally, it will apply the excluding globs indicated by the `excludeFiles` key to the included files ([always-included files](#always-included-files) cannot be excluded). -- Dependencies you provide at the `dependencies` key must exist in the registry, and the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies). +- Dependencies you provide at the `dependencies` key must exist in the registry, the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies), and transitive dependencies are not allowed (ie. any modules you import in your code must come from packages listed in your dependencies). For example: @@ -221,6 +222,7 @@ For example: "githubOwner": "purescript", "githubRepo": "purescript-control" }, + "ref": "v4.2.0", "include": ["test/**/*.purs"], "exclude": ["test/graphs"], "dependencies": { "newtype": ">=3.0.0 <4.0.0", "prelude": ">=4.0.0 <5.0.0" } @@ -234,11 +236,12 @@ For example: All packages in the registry have an associated metadata file, which is located in the `metadata` directory of the `registry` repository under the package name. For example, the metadata for the `aff` package is located at: https://github.com/purescript/registry/blob/main/metadata/aff.json. Metadata files are the source of truth on all published and unpublished versions for a particular package for what there content is and where the package is located. Metadata files are produced by the registry, not by package authors, though they take some information from package manifests. -Each published version of a package records three fields: +Each published version of a package records the following fields: - `hash`: a [`Sha256`](#Sha256) of the compressed archive fetched by the registry for the given version - `bytes`: the size of the tarball in bytes - `publishedTime`: the time the package was published as an `ISO8601` string +- `compilers`: compiler versions this package is known to work with. This field can be in one of two states: a single version indicates that the package worked with a specific compiler on upload but has not yet been tested with all compilers, whereas a non-empty array of versions indicates the package has been tested with all compilers the registry supports. Each unpublished version of a package records three fields: diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..fb3804b90 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,16 +5,27 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - - effect - - either - - maybe - - prelude + - exceptions + - fetch + - integers + - json + - node-child-process + - node-execa + - node-fs + - node-path + - node-process + - ordered-collections + - registry-app + - registry-foreign - registry-lib - registry-test-utils + - routing-duplex - spec - spec-node - strings + - transformers run: main: Test.E2E.Main diff --git a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs new file mode 100644 index 000000000..e02b623b5 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs @@ -0,0 +1,63 @@ +module Test.E2E.Endpoint.Jobs (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 (JobId(..)) +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Status endpoint" do + Spec.it "can reach the status endpoint" do + Client.getStatus + + Spec.describe "Jobs API" do + Spec.it "query parameters and filtering work correctly" do + -- Publish once and test all Jobs API features + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + let info = V1.jobInfo job + + -- Test: include_completed filtering + recentJobs <- Client.getJobsWith Client.ActiveOnly + allJobs <- Client.getJobsWith Client.IncludeCompleted + let allCount = Array.length allJobs + Assert.shouldSatisfy allCount (_ > 0) + let recentCount = Array.length recentJobs + Assert.shouldSatisfy recentCount (_ <= allCount) + let completedJob = Array.find (\j -> isJust (V1.jobInfo j).finishedAt) allJobs + case completedJob of + Just completed -> do + let + completedId = (V1.jobInfo completed).jobId + inRecent = Array.any (\j -> (V1.jobInfo j).jobId == completedId) recentJobs + when inRecent do + Assert.fail $ "Completed job " <> unwrap completedId <> " should be excluded from include_completed=false results" + Nothing -> pure unit + + -- Test: query parameters (level and since) + baseJob <- Client.getJob jobId Nothing Nothing + Assert.shouldEqual (V1.jobInfo baseJob).jobId info.jobId + debugJob <- Client.getJob jobId (Just V1.Debug) Nothing + Assert.shouldEqual (V1.jobInfo debugJob).jobId info.jobId + let sinceTime = fromMaybe info.createdAt info.finishedAt + sinceJob <- Client.getJob jobId Nothing (Just sinceTime) + Assert.shouldEqual (V1.jobInfo sinceJob).jobId info.jobId + + Spec.it "returns HTTP 404 for non-existent job ID" do + let fakeJobId = JobId "nonexistent-job-id-12345" + result <- Client.tryGetJob fakeJobId Nothing Nothing + case result of + Right _ -> + Assert.fail "Expected HTTP 404 for non-existent job" + Left err -> + case Client.clientErrorStatus err of + Just 404 -> pure unit + _ -> Assert.fail $ "Expected HTTP 404, got: " <> Client.printClientError err diff --git a/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs new file mode 100644 index 000000000..502853fbd --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs @@ -0,0 +1,52 @@ +module Test.E2E.Endpoint.PackageSets (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Effect.Aff as Aff +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Package Sets endpoint" do + Spec.it "accepts unauthenticated add/upgrade requests" do + { jobId } <- Client.packageSets Fixtures.packageSetAddRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "rejects unauthenticated compiler change requests" do + result <- Client.tryPackageSets Fixtures.packageSetCompilerChangeRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated compiler change" + + Spec.it "rejects unauthenticated package removal requests" do + result <- Client.tryPackageSets Fixtures.packageSetRemoveRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated package removal" + + Spec.it "accepts authenticated compiler change requests" do + { privateKey } <- ask + case Fixtures.signPackageSet privateKey Fixtures.packageSetCompilerChangeRequest of + Left err -> + liftAff $ Aff.throwError $ Aff.error $ "Failed to sign request: " <> err + Right signedRequest -> do + { jobId } <- Client.packageSets signedRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "returns existing job for duplicate requests" do + { jobId: firstJobId } <- Client.packageSets Fixtures.packageSetAddRequest + { jobId: secondJobId } <- Client.packageSets Fixtures.packageSetAddRequest + Assert.shouldEqual firstJobId secondJobId diff --git a/app-e2e/src/Test/E2E/Endpoint/Publish.purs b/app-e2e/src/Test/E2E/Endpoint/Publish.purs new file mode 100644 index 000000000..47e51c959 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Publish.purs @@ -0,0 +1,76 @@ +module Test.E2E.Endpoint.Publish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NEA +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.Manifest (Manifest(..)) +import Registry.Metadata (Metadata(..)) +import Registry.Sha256 as Sha256 +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish workflow" do + Spec.it "can publish effect@4.0.0 and verify all state changes" do + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + uploadOccurred <- Env.hasStorageUpload Fixtures.effect + unless uploadOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 PUT for effect/4.0.0.tar.gz" storageRequests + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.published of + Nothing -> Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in metadata published versions" + Just publishedMeta -> do + Assert.shouldSatisfy (Sha256.print publishedMeta.hash) (not <<< String.null) + + manifestEntries <- Env.readManifestIndexEntry Fixtures.effect.name + let hasVersion = Array.any (\(Manifest m) -> m.version == Fixtures.effect.version) manifestEntries + unless hasVersion do + Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in manifest index" + + Env.waitForAllMatrixJobs Fixtures.effect + + -- Collect the compilers from the matrix jobs that ran for this package + allJobs <- Client.getJobsWith Client.IncludeCompleted + let + matrixCompilers = Array.mapMaybe + ( case _ of + MatrixJob { packageName, packageVersion, compilerVersion } -> + if packageName == Fixtures.effect.name && packageVersion == Fixtures.effect.version then Just compilerVersion + else Nothing + _ -> Nothing + ) + allJobs + -- The expected compilers are: the publish compiler + all matrix job compilers + expectedCompilers = Set.fromFoldable $ Array.cons Fixtures.effectPublishData.compiler matrixCompilers + + Metadata metadataAfter <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadataAfter.published of + Nothing -> Assert.fail "Version missing after matrix jobs" + Just publishedMetaAfter -> do + let actualCompilers = Set.fromFoldable $ NEA.toArray publishedMetaAfter.compilers + Assert.shouldEqual actualCompilers expectedCompilers + + Spec.describe "Publish state machine" do + Spec.it "returns same jobId for duplicate publish requests" do + { jobId: id1 } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail id1 + { jobId: id2 } <- Client.publish Fixtures.effectPublishData + Assert.shouldEqual id1 id2 diff --git a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs new file mode 100644 index 000000000..6e3d49eef --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs @@ -0,0 +1,51 @@ +module Test.E2E.Endpoint.Transfer (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 as V1 +import Registry.Location (Location(..)) +import Registry.Metadata (Metadata(..)) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Transfer workflow" do + Spec.it "can transfer effect to a new location with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + -- Note: we don't wait for matrix jobs - transfer only modifies metadata + + Metadata originalMetadata <- Env.readMetadata Fixtures.effect.name + case originalMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "purescript" + Git _ -> Assert.fail "Expected GitHub location, got Git" + + -- clear the publish PUT so we can verify transfers leave storage unaffected + WireMock.clearStorageRequests + + authData <- Env.signTransferOrFail Fixtures.effectTransferData + { jobId: transferJobId } <- Client.transfer authData + transferJob <- Env.pollJobOrFail transferJobId + Assert.shouldSatisfy (V1.jobInfo transferJob).finishedAt isJust + + Metadata newMetadata <- Env.readMetadata Fixtures.effect.name + case newMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "new-owner" + Git _ -> Assert.fail "Expected GitHub location after transfer, got Git" + + storageRequests <- WireMock.getStorageRequests + let + packagePath = PackageName.print Fixtures.effect.name + putOrDeleteRequests = Array.filter + (\r -> (r.method == "PUT" || r.method == "DELETE") && WireMock.filterByUrlContaining packagePath [ r ] /= []) + storageRequests + unless (Array.null putOrDeleteRequests) do + WireMock.failWithRequests "Transfer should not PUT or DELETE to storage" putOrDeleteRequests diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs new file mode 100644 index 000000000..c58e88ea6 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -0,0 +1,95 @@ +module Test.E2E.Endpoint.Unpublish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish-Unpublish workflow" do + Spec.it "can publish then unpublish with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + + existsBefore <- Env.manifestIndexEntryExists Fixtures.effect + unless existsBefore do + Assert.fail "Expected version to exist in manifest index before unpublish" + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just unpublishedInfo -> + Assert.shouldSatisfy unpublishedInfo.reason (not <<< String.null) + + when (Map.member Fixtures.effect.version metadata.published) do + Assert.fail "Version 4.0.0 should not be in 'published' metadata after unpublish" + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 DELETE for effect/4.0.0.tar.gz" storageRequests + + existsAfter <- Env.manifestIndexEntryExists Fixtures.effect + when existsAfter do + Assert.fail "Expected version to be removed from manifest index after unpublish" + + -- Test race condition: submit unpublish while publish is still running. + -- Job priority (Unpublish > Matrix) ensures unpublish runs before matrix jobs. + Spec.it "unpublishing before matrix jobs complete causes them to fail gracefully" do + -- Submit publish, don't wait for it to complete + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + + -- Immediately submit unpublish - it will be queued and run after publish + -- but BEFORE matrix jobs due to job priority ordering + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + + -- Now wait for publish to complete + _ <- Env.pollJobOrFail publishJobId + + -- Wait for unpublish to complete + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + -- Verify unpublish succeeded + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just _ -> pure unit + + -- Wait for matrix jobs to complete + Env.waitForAllMatrixJobs Fixtures.effect + + -- Verify matrix jobs failed (they tried to download deleted tarball) + jobs <- Client.getJobs + let + matrixJobs = Array.filter (Env.isMatrixJobFor Fixtures.effect) jobs + allFailed = Array.all (\j -> not (V1.jobInfo j).success) matrixJobs + + unless (Array.null matrixJobs || allFailed) do + Assert.fail "Expected matrix jobs to fail after unpublish deleted the tarball" + + -- Critical: verify no bad writes occurred - the version should NOT be + -- back in published metadata (Map.update on missing key is a no-op) + Metadata metadataAfterMatrix <- Env.readMetadata Fixtures.effect.name + when (Map.member Fixtures.effect.version metadataAfterMatrix.published) do + Assert.fail "Matrix job incorrectly wrote to published metadata for unpublished version" diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..c4598313a --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,149 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | Tests the full flow: parsing GitHub event → submitting to registry API → +-- | polling for completion → posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation as Operation +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2E, E2ESpec) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.it "handles publish via GitHub issue, posts comments, and closes issue on success" do + requests <- runWorkflow $ mkPublishEvent Fixtures.effectPublishData + assertComment "Job started" requests + assertComment "Job completed successfully" requests + assertClosed requests + + Spec.it "posts failure comment and leaves issue open when job fails" do + requests <- runWorkflow $ mkAuthenticatedEvent "random-user" Fixtures.failingTransferData + assertComment "Job started" requests + assertComment "Job failed" requests + assertNoComment "Job completed successfully" requests + assertOpen requests + + Spec.it "calls Teams API to verify trustee membership for authenticated operation" do + requests <- runWorkflow $ mkAuthenticatedEvent packagingTeamUser Fixtures.trusteeAuthenticatedData + assertComment "Job started" requests + assertTeamsApiCalled requests + + Spec.it "posts error comment when issue body contains invalid JSON" do + requests <- runWorkflow Fixtures.invalidJsonIssueEvent + assertComment "malformed" requests + assertOpen requests + +-- Constants +testIssueNumber :: Int +testIssueNumber = 101 + +packagingTeamUser :: String +packagingTeamUser = "packaging-team-user" + +-- Event builders +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkPublishEvent :: Operation.PublishData -> String +mkPublishEvent publishData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.publishCodec publishData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: packagingTeamUser }, issue: { number: testIssueNumber, body } } + +mkAuthenticatedEvent :: String -> Operation.AuthenticatedData -> String +mkAuthenticatedEvent username authData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.authenticatedCodec authData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: username }, issue: { number: testIssueNumber, body } } + +-- Workflow runner +runWorkflow :: String -> E2E (Array WireMock.WireMockRequest) +runWorkflow eventJson = do + { stateDir } <- ask + + Client.getStatus + + tmpDir <- liftAff Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + liftAff $ FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + originalCwd <- liftEffect Process.cwd + liftEffect $ Process.chdir stateDir + + envResult <- liftAff GitHubIssue.initializeGitHub + for_ envResult \env -> do + let testEnv = env { pollConfig = { maxAttempts: 60, interval: Milliseconds 500.0 }, logVerbosity = Quiet } + liftAff $ void $ GitHubIssue.runGitHubIssue testEnv + + liftEffect $ Process.chdir originalCwd + + WireMock.getGithubRequests + +-- Assertions (all operate on captured requests) +assertComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + unless (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertNoComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + when (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertClosed :: Array WireMock.WireMockRequest -> E2E Unit +assertClosed requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + when (Array.null closes) do + WireMock.failWithRequests "Expected issue to be closed" requests + +assertOpen :: Array WireMock.WireMockRequest -> E2E Unit +assertOpen requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + unless (Array.null closes) do + WireMock.failWithRequests "Expected issue to remain open" requests + +assertTeamsApiCalled :: Array WireMock.WireMockRequest -> E2E Unit +assertTeamsApiCalled requests = do + let + teams = requests # Array.filter \r -> + r.method == "GET" && String.contains (String.Pattern "/orgs/purescript/teams/packaging/members") r.url + when (Array.null teams) do + WireMock.failWithRequests "Expected Teams API to be called" requests + +bodyContains :: String -> WireMock.WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs deleted file mode 100644 index 7bc030d76..000000000 --- a/app-e2e/src/Test/E2E/Main.purs +++ /dev/null @@ -1,22 +0,0 @@ -module Test.E2E.Main (main) where - -import Prelude - -import Data.Maybe (Maybe(..)) -import Data.Time.Duration (Milliseconds(..)) -import Effect (Effect) -import Test.E2E.Publish as Test.E2E.Publish -import Test.Spec as Spec -import Test.Spec.Reporter.Console (consoleReporter) -import Test.Spec.Runner.Node (runSpecAndExitProcess') -import Test.Spec.Runner.Node.Config as Cfg - -main :: Effect Unit -main = runSpecAndExitProcess' config [ consoleReporter ] do - Spec.describe "E2E Tests" do - Spec.describe "Publish" Test.E2E.Publish.spec - where - config = - { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } - , parseCLIOptions: false - } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs deleted file mode 100644 index f7bd1d63e..000000000 --- a/app-e2e/src/Test/E2E/Publish.purs +++ /dev/null @@ -1,84 +0,0 @@ --- | End-to-end tests for the Publish API endpoint. --- | These tests exercise the actual registry server via HTTP requests. -module Test.E2E.Publish (spec) where - -import Prelude - -import Data.Array as Array -import Data.Either (Either(..)) -import Data.Maybe (Maybe(..), isJust) -import Data.String as String -import Effect.Aff (Aff) -import Effect.Class (liftEffect) -import Effect.Class.Console as Console -import Registry.API.V1 as V1 -import Registry.Location as Registry.Location -import Registry.Test.Assert as Assert -import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils -import Test.Spec (Spec) -import Test.Spec as Spec - --- | Get client config from environment -getConfig :: Aff Client.Config -getConfig = liftEffect Client.configFromEnv - -spec :: Spec Unit -spec = do - Spec.describe "Server connectivity" do - Spec.it "can reach the status endpoint" do - config <- getConfig - result <- Client.getStatus config - case result of - Left err -> Assert.fail $ "Failed to reach status endpoint: " <> Client.printClientError err - Right _ -> pure unit - - Spec.it "can list jobs (initially empty)" do - config <- getConfig - result <- Client.getJobs config - case result of - Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - Right _ -> pure unit -- Jobs list may not be empty if other tests ran - - Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do - config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - } - - -- Submit publish request - publishResult <- Client.publish config publishData - case publishResult of - Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err - Right { jobId } -> do - -- Poll until job completes - job <- Client.pollJob config jobId - - -- If job failed, print logs for debugging - unless job.success do - Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs - Console.log $ String.joinWith "\n" logMessages - - -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs - let errorMessages = map _.message errorLogs - Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - - Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" diff --git a/app-e2e/src/Test/E2E/Support/Client.purs b/app-e2e/src/Test/E2E/Support/Client.purs new file mode 100644 index 000000000..3c1c02e62 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Client.purs @@ -0,0 +1,211 @@ +-- | HTTP client for making requests to the registry server during E2E tests. +-- | This module provides typed helpers for interacting with the Registry API. +-- | +-- | All client functions operate in the E2E monad (ReaderT TestEnv Aff) and +-- | throw on HTTP or parse errors. Use the `try*` variants (e.g., `tryGetJob`) +-- | when testing error responses - they return `Either ClientError a` with +-- | typed HTTP status codes. +module Test.E2E.Support.Client + ( ClientError(..) + , JobFilter(..) + , getJobs + , getJobsWith + , getJob + , tryGetJob + , getStatus + , publish + , unpublish + , transfer + , packageSets + , tryPackageSets + , pollJob + , printClientError + , clientErrorStatus + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Reader (ask) +import Data.Codec.JSON as CJ +import Data.DateTime (DateTime) +import Data.Int as Int +import Effect.Aff (delay) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Effect.Exception (Error) +import Effect.Exception as Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Registry.API.V1 (Job, JobId, LogLevel, Route(..)) +import Registry.API.V1 as V1 +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest, PublishData) +import Registry.Operation as Operation +import Routing.Duplex as Routing +import Test.E2E.Support.Types (E2E) + +-- | Errors that can occur during client operations +data ClientError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + | Timeout String + +printClientError :: ClientError -> String +printClientError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + Timeout msg -> "Timeout: " <> msg + +-- | Extract the HTTP status code from a ClientError, if it's an HttpError +clientErrorStatus :: ClientError -> Maybe Int +clientErrorStatus = case _ of + HttpError { status } -> Just status + _ -> Nothing + +-- | Convert a ClientError to an Effect Error for throwing +toError :: ClientError -> Error +toError = Exception.error <<< printClientError + +-- | Throw a ClientError as an Aff error +throw :: forall a. ClientError -> Aff a +throw = Aff.throwError <<< toError + +-- | Print a Route to its URL path using the route codec +printRoute :: Route -> String +printRoute = Routing.print V1.routes + +-- | Make a GET request and decode the response, returning Either on error. +tryGet :: forall a. CJ.Codec a -> String -> String -> Aff (Either ClientError a) +tryGet codec baseUrl path = do + response <- Fetch.fetch (baseUrl <> path) { method: GET } + body <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson codec body of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: body } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body } + +-- | Make a GET request and decode the response. Throws on error. +get :: forall a. CJ.Codec a -> String -> String -> Aff a +get codec baseUrl path = tryGet codec baseUrl path >>= either throw pure + +-- | Make a POST request with JSON body, returning Either on error. +tryPost :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff (Either ClientError res) +tryPost reqCodec resCodec baseUrl path reqBody = do + let jsonBody = JSON.print $ CJ.encode reqCodec reqBody + response <- Fetch.fetch (baseUrl <> path) + { method: POST + , headers: { "Content-Type": "application/json" } + , body: jsonBody + } + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson resCodec responseBody of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body: responseBody } + +-- | Make a POST request with JSON body and decode the response. Throws on error. +post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res +post reqCodec resCodec baseUrl path reqBody = tryPost reqCodec resCodec baseUrl path reqBody >>= either throw pure + +data JobFilter = ActiveOnly | IncludeCompleted + +-- | Get the list of jobs with a configurable filter +getJobsWith :: JobFilter -> E2E (Array Job) +getJobsWith filter = do + { clientConfig } <- ask + let + includeCompleted = case filter of + ActiveOnly -> Just false + IncludeCompleted -> Just true + route = Jobs { since: Nothing, include_completed: includeCompleted } + liftAff $ get (CJ.array V1.jobCodec) clientConfig.baseUrl (printRoute route) + +-- | Get the list of jobs (includes completed jobs) +getJobs :: E2E (Array Job) +getJobs = getJobsWith IncludeCompleted + +-- | Get a specific job by ID, with optional log filtering +getJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E Job +getJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ get V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Try to get a specific job by ID, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 404). +tryGetJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E (Either ClientError Job) +tryGetJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ tryGet V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Check if the server is healthy +getStatus :: E2E Unit +getStatus = do + { clientConfig } <- ask + liftAff do + response <- Fetch.fetch (clientConfig.baseUrl <> printRoute Status) { method: GET } + if response.status == 200 then + pure unit + else do + body <- response.text + throw $ HttpError { status: response.status, body } + +-- | Publish a package +publish :: PublishData -> E2E V1.JobCreatedResponse +publish reqBody = do + { clientConfig } <- ask + liftAff $ post Operation.publishCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Publish) reqBody + +-- | Unpublish a package (requires authentication) +unpublish :: AuthenticatedData -> E2E V1.JobCreatedResponse +unpublish authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Unpublish) authData + +-- | Transfer a package to a new location (requires authentication) +transfer :: AuthenticatedData -> E2E V1.JobCreatedResponse +transfer authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Transfer) authData + +-- | Submit a package set update request +packageSets :: PackageSetUpdateRequest -> E2E V1.JobCreatedResponse +packageSets request = do + { clientConfig } <- ask + liftAff $ post Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Try to submit a package set update, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 400 for unauthorized restricted ops). +tryPackageSets :: PackageSetUpdateRequest -> E2E (Either ClientError V1.JobCreatedResponse) +tryPackageSets request = do + { clientConfig } <- ask + liftAff $ tryPost Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Poll a job until it completes or times out. +-- | +-- | This is the recommended way to wait for job completion in E2E tests. +-- | Do not implement custom polling loops; use this function or the higher-level +-- | helpers in Test.E2E.Support.Env (pollJobOrFail, pollJobExpectFailure). +pollJob :: JobId -> E2E Job +pollJob jobId = do + { clientConfig } <- ask + go clientConfig 1 + where + go config attempt + | attempt > config.maxPollAttempts = + liftAff $ throw $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" + | otherwise = do + liftAff $ delay config.pollInterval + job <- getJob jobId (Just V1.Debug) Nothing + case (V1.jobInfo job).finishedAt of + Just _ -> pure job + Nothing -> do + when (attempt `mod` 10 == 0) do + Console.log $ "Polling job " <> unwrap jobId <> " (attempt " <> Int.toStringAs Int.decimal attempt <> ")" + go config (attempt + 1) diff --git a/app-e2e/src/Test/E2E/Support/Env.purs b/app-e2e/src/Test/E2E/Support/Env.purs new file mode 100644 index 000000000..06c8d47b9 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Env.purs @@ -0,0 +1,312 @@ +-- | Shared environment and helper functions for E2E tests. +-- | +-- | This module provides: +-- | - TestEnv type and E2E monad for test helpers (re-exported from Types) +-- | - Environment construction from env vars (mkTestEnv) +-- | - WireMock reset helpers for test isolation +-- | - Job polling with automatic failure handling +-- | - Git and metadata state inspection +-- | +-- | All functions operate in the E2E monad (ReaderT TestEnv Aff), so they +-- | have access to the shared test environment without explicit passing. +module Test.E2E.Support.Env + ( module ReExports + , mkTestEnv + , runE2E + , resetTestState + , resetDatabase + , resetGitFixtures + , resetLogs + , resetGitHubRequestCache + , pollJobOrFail + , pollJobExpectFailure + , signUnpublishOrFail + , signTransferOrFail + , gitStatus + , isCleanGitStatus + , waitForAllMatrixJobs + , isMatrixJobFor + , readMetadata + , readManifestIndexEntry + , manifestIndexEntryExists + , assertReposClean + , hasStorageUpload + , hasStorageDelete + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask, runReaderT) +import Data.Array as Array +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Node.ChildProcess.Types (Exit(..)) +import Node.FS.Aff as FS.Aff +import Node.Library.Execa as Execa +import Node.Path as Path +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Env as Env +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata) +import Registry.Metadata as Metadata +import Registry.Operation (AuthenticatedData, TransferData, UnpublishData) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Fixtures (PackageFixture) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.Types (ClientConfig, E2E, E2ESpec, TestEnv, WireMockConfig) as ReExports +import Test.E2E.Support.Types (E2E, TestEnv) +import Test.E2E.Support.WireMock as WireMock + +-- | Build the test environment from environment variables. +-- | Called once at startup in Main, before running any tests. +mkTestEnv :: Effect TestEnv +mkTestEnv = do + port <- Env.lookupRequired Env.serverPort + let + clientConfig = + { baseUrl: "http://localhost:" <> show port + , pollInterval: Milliseconds 2000.0 + , maxPollAttempts: 30 + } + + githubUrl <- Env.lookupRequired Env.githubApiUrl + storageUrl <- Env.lookupRequired Env.s3ApiUrl + let + githubWireMock = { baseUrl: githubUrl } + storageWireMock = { baseUrl: storageUrl } + + stateDir <- Env.lookupRequired Env.stateDir + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + + pure { clientConfig, githubWireMock, storageWireMock, stateDir, privateKey } + +-- | Run an E2E computation with a given environment. +-- | Primarily used by hoistSpec in Main. +runE2E :: forall a. TestEnv -> E2E a -> Aff a +runE2E env = flip runReaderT env + +-- | Reset all test state for isolation between tests. +-- | This is the recommended way to set up test isolation in Spec.before_. +-- | Resets: database, git fixtures, storage mock, and logs. +resetTestState :: E2E Unit +resetTestState = do + resetDatabase + resetGitFixtures + WireMock.clearStorageRequests + WireMock.resetStorageScenarios + WireMock.clearGithubRequests + resetGitHubRequestCache + resetLogs + +-- | Reset the database by clearing all job-related tables. +-- | +-- | This works because all job tables (publish_jobs, unpublish_jobs, transfer_jobs, +-- | matrix_jobs, package_set_jobs, logs) have foreign keys to job_info with +-- | ON DELETE CASCADE. See db/schema.sql for the schema definition. +resetDatabase :: E2E Unit +resetDatabase = do + { stateDir } <- ask + let dbPath = Path.concat [ stateDir, "db", "registry.sqlite3" ] + result <- liftAff $ _.getResult =<< Execa.execa "sqlite3" [ dbPath, "DELETE FROM job_info;" ] identity + case result.exit of + Normally 0 -> pure unit + _ -> liftAff $ Aff.throwError $ Aff.error $ "Failed to reset database: " <> result.stderr + +-- | Reset the git fixtures to restore original state. +-- | This restores metadata files modified by unpublish/transfer operations. +-- | +-- | Strategy: Reset the origin repos to their initial-fixture tag (created during +-- | setup), then delete the server's scratch git clones. The server will +-- | re-clone fresh copies on the next operation, ensuring a clean cache state. +resetGitFixtures :: E2E Unit +resetGitFixtures = do + { stateDir } <- ask + fixturesDir <- liftEffect $ Env.lookupRequired Env.repoFixturesDir + let + registryOrigin = Path.concat [ fixturesDir, "purescript", "registry" ] + registryIndexOrigin = Path.concat [ fixturesDir, "purescript", "registry-index" ] + scratchDir = Path.concat [ stateDir, "scratch" ] + resetOrigin registryOrigin + resetOrigin registryIndexOrigin + deleteGitClones scratchDir + where + resetOrigin dir = do + void $ gitOrFail [ "reset", "--hard", "initial-fixture" ] dir + void $ gitOrFail [ "clean", "-fd" ] dir + + deleteGitClones scratchDir = do + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry" ] + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry-index" ] + +-- | Clear server log files for test isolation. +-- | Deletes *.log files from the scratch/logs directory but preserves the directory itself. +resetLogs :: E2E Unit +resetLogs = do + { stateDir } <- ask + let logsDir = Path.concat [ stateDir, "scratch", "logs" ] + let cmd = "rm -f '" <> logsDir <> "'/*.log 2>/dev/null || true" + result <- liftAff $ _.getResult =<< Execa.execa "sh" [ "-c", cmd ] identity + case result.exit of + Normally _ -> pure unit + _ -> pure unit + +-- | Clear cached GitHub API requests from the scratch cache directory. +-- | This ensures each test makes fresh API calls rather than using cached responses. +resetGitHubRequestCache :: E2E Unit +resetGitHubRequestCache = do + { stateDir } <- ask + let cacheDir = Path.concat [ stateDir, "scratch", ".cache" ] + liftAff $ Aff.attempt (FS.Aff.readdir cacheDir) >>= case _ of + Left _ -> pure unit + Right files -> for_ files \file -> + when (String.Pattern "Request__" `String.contains` file) do + FS.Extra.remove (Path.concat [ cacheDir, file ]) + +-- | Poll a job until completion, failing the test if the job fails. +-- | Prints error logs on failure for debugging. +pollJobOrFail :: V1.JobId -> E2E V1.Job +pollJobOrFail jobId = do + job <- Client.pollJob jobId + unless (V1.jobInfo job).success do + Console.log "Job failed! Logs:" + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs + Console.log $ String.joinWith "\n" logMessages + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs + let errorMessages = map _.message errorLogs + Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages + pure job + +-- | Poll a job until completion, expecting it to fail. +-- | Returns the job for further assertions on error messages. +pollJobExpectFailure :: V1.JobId -> E2E V1.Job +pollJobExpectFailure jobId = do + job <- Client.pollJob jobId + when (V1.jobInfo job).success do + Assert.fail "Expected job to fail, but it succeeded" + pure job + +-- | Sign an unpublish operation using the pacchettibotti private key from environment. +signUnpublishOrFail :: UnpublishData -> E2E AuthenticatedData +signUnpublishOrFail unpublishData = do + { privateKey } <- ask + case Fixtures.signUnpublish privateKey unpublishData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign unpublish: " <> err + Right authData -> pure authData + +-- | Sign a transfer operation using the pacchettibotti private key from environment. +signTransferOrFail :: TransferData -> E2E AuthenticatedData +signTransferOrFail transferData = do + { privateKey } <- ask + case Fixtures.signTransfer privateKey transferData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign transfer: " <> err + Right authData -> pure authData + +-- | Run git status --porcelain in a directory and return the output. +gitStatus :: String -> E2E String +gitStatus cwd = gitOrFail [ "status", "--porcelain" ] cwd + +-- | Run a git command, throwing an exception on failure. +gitOrFail :: Array String -> FilePath -> E2E String +gitOrFail args cwd = liftAff $ Git.gitCLI args (Just cwd) >>= case _ of + Left err -> Aff.throwError $ Aff.error err + Right out -> pure out + +-- | Check if git status output indicates a clean working tree (no changes). +isCleanGitStatus :: String -> Boolean +isCleanGitStatus status = String.null status + +-- | Wait for all matrix jobs for a package to complete. +waitForAllMatrixJobs :: PackageFixture -> E2E Unit +waitForAllMatrixJobs pkg = go 120 0 + where + go :: Int -> Int -> E2E Unit + go 0 _ = liftAff $ Aff.throwError $ Aff.error "Timed out waiting for matrix jobs to complete" + go attempts lastCount = do + jobs <- Client.getJobs + let + matrixJobs = Array.filter (isMatrixJobFor pkg) jobs + totalCount = Array.length matrixJobs + finishedCount = Array.length $ Array.filter (\j -> isJust (V1.jobInfo j).finishedAt) matrixJobs + allFinished = finishedCount == totalCount + stillCreating = totalCount > lastCount + if totalCount >= 1 && allFinished && not stillCreating then + pure unit + else do + when (attempts `mod` 10 == 0) do + Console.log $ "Waiting for matrix jobs: " <> show finishedCount <> "/" <> show totalCount <> " finished" + liftAff $ Aff.delay (Milliseconds 1000.0) + go (attempts - 1) totalCount + +-- | Check if a job is a matrix job for the given package. +isMatrixJobFor :: PackageFixture -> Job -> Boolean +isMatrixJobFor pkg = case _ of + MatrixJob { packageName, packageVersion } -> + packageName == pkg.name && packageVersion == pkg.version + _ -> false + +-- | Read and parse the metadata file for a package from the server's scratch clone. +readMetadata :: PackageName -> E2E Metadata +readMetadata packageName = do + { stateDir } <- ask + let metadataPath = Path.concat [ stateDir, "scratch", "registry", "metadata", PackageName.print packageName <> ".json" ] + liftAff (readJsonFile Metadata.codec metadataPath) >>= case _ of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to read metadata for " <> PackageName.print packageName <> ": " <> err + Right metadata -> pure metadata + +-- | Read and parse the manifest index entry for a package from the server's scratch clone. +readManifestIndexEntry :: PackageName -> E2E (Array Manifest) +readManifestIndexEntry packageName = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath packageName >>= case _ of + Left err -> Aff.throwError $ Aff.error $ "Failed to read manifest index for " <> PackageName.print packageName <> ": " <> err + Right manifests -> pure $ Array.fromFoldable manifests + +-- | Check if a specific package version exists in the manifest index. +manifestIndexEntryExists :: PackageFixture -> E2E Boolean +manifestIndexEntryExists pkg = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath pkg.name >>= case _ of + Left _ -> pure false + Right manifests -> pure $ Array.any (\(Manifest m) -> m.version == pkg.version) $ Array.fromFoldable manifests + +-- | Assert that both git repos (registry and registry-index) have no uncommitted changes. +assertReposClean :: E2E Unit +assertReposClean = do + { stateDir } <- ask + let scratchRegistry = Path.concat [ stateDir, "scratch", "registry" ] + let scratchRegistryIndex = Path.concat [ stateDir, "scratch", "registry-index" ] + registryStatus <- gitStatus scratchRegistry + registryIndexStatus <- gitStatus scratchRegistryIndex + unless (isCleanGitStatus registryStatus) do + Assert.fail $ "registry repo has uncommitted changes:\n" <> registryStatus + unless (isCleanGitStatus registryIndexStatus) do + Assert.fail $ "registry-index repo has uncommitted changes:\n" <> registryIndexStatus + +-- | Check if a storage upload (PUT) occurred for a specific package. +hasStorageUpload :: PackageFixture -> E2E Boolean +hasStorageUpload pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + putRequests = WireMock.filterByMethod "PUT" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) putRequests + +-- | Check if a storage delete (DELETE) occurred for a specific package. +hasStorageDelete :: PackageFixture -> E2E Boolean +hasStorageDelete pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + deleteRequests = WireMock.filterByMethod "DELETE" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) deleteRequests diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs new file mode 100644 index 000000000..7fe0b556a --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -0,0 +1,286 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Test.E2E.Support.Fixtures + ( PackageFixture + , effect + , console + , prelude + , effectPublishData + , effectPublishDataDifferentLocation + , consolePublishData + , failingTransferData + , nonexistentTransferData + , trusteeAuthenticatedData + , effectUnpublishData + , effectTransferData + , nonexistentUnpublishData + , preludeUnpublishData + , signUnpublish + , signTransfer + , packageSetAddRequest + , packageSetCompilerChangeRequest + , packageSetRemoveRequest + , signPackageSet + , invalidJsonIssueEvent + ) where + +import Registry.App.Prelude + +import Data.Codec.JSON as CJ +import Data.Map as Map +import JSON as JSON +import Registry.Location (Location(..)) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PackageSetUpdateRequest, TransferData, UnpublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.SSH as SSH +import Registry.Test.Utils as Utils +import Registry.Version (Version) + +type PackageFixture = { name :: PackageName, version :: Version } + +-- | effect@4.0.0 fixture package +effect :: PackageFixture +effect = { name: Utils.unsafePackageName "effect", version: Utils.unsafeVersion "4.0.0" } + +-- | console@6.1.0 fixture package +console :: PackageFixture +console = { name: Utils.unsafePackageName "console", version: Utils.unsafeVersion "6.1.0" } + +-- | prelude@6.0.1 fixture package +prelude :: PackageFixture +prelude = { name: Utils.unsafePackageName "prelude", version: Utils.unsafeVersion "6.0.1" } + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: Operation.PublishData +effectPublishData = + { name: effect.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: effect.version + } + +-- | Publish data for effect@99.0.0 with a DIFFERENT location. +-- | Uses a non-existent version to avoid duplicate job detection, +-- | but still targets an existing package to test location conflicts. +effectPublishDataDifferentLocation :: Operation.PublishData +effectPublishDataDifferentLocation = + effectPublishData + { location = Just $ GitHub + { owner: "someone-else" + , repo: "purescript-effect" + , subdir: Nothing + } + , version = Utils.unsafeVersion "99.0.0" + , ref = "v99.0.0" + } + +-- | Publish data for console@6.1.0, used for concurrency tests. +-- | Console depends on effect ^4.0.0 and prelude ^6.0.0. +-- | This matches the fixtures in app/fixtures/github-packages/console-6.1.0 +consolePublishData :: Operation.PublishData +consolePublishData = + { name: console.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-console" + , subdir: Nothing + } + , ref: "v6.1.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: console.version + } + +-- | Unpublish data for effect@4.0.0, used for publish-then-unpublish tests. +effectUnpublishData :: UnpublishData +effectUnpublishData = + { name: effect.name + , version: effect.version + , reason: "Testing unpublish flow" + } + +-- | Transfer data for effect, used for transfer tests. +-- | Transfers effect to a different GitHub owner. +effectTransferData :: TransferData +effectTransferData = + { name: effect.name + , newLocation: GitHub + { owner: "new-owner" + , repo: "purescript-effect" + , subdir: Nothing + } + } + +-- | Unpublish data for a nonexistent package. +-- | Used to test error handling when unpublishing an unknown package. +nonexistentUnpublishData :: UnpublishData +nonexistentUnpublishData = + { name: Utils.unsafePackageName "nonexistent-package" + , version: Utils.unsafeVersion "1.0.0" + , reason: "Testing error handling for unknown package" + } + +-- | Unpublish data for prelude@6.0.1. +-- | This package was published long ago (in fixtures), so it should fail +-- | the 48-hour time limit check. +preludeUnpublishData :: UnpublishData +preludeUnpublishData = + { name: prelude.name + , version: prelude.version + , reason: "Testing 48-hour limit enforcement" + } + +-- | Sign an unpublish operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signUnpublish :: String -> UnpublishData -> Either String AuthenticatedData +signUnpublish privateKey unpublishData = do + let rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishData + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private rawPayload + pure + { payload: Unpublish unpublishData + , rawPayload + , signature + } + +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = do + let + transferPayload :: TransferData + transferPayload = + { name: prelude.name + , newLocation: GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } + } + + rawPayload :: String + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + + { payload: Transfer transferPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = do + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: prelude.name + , version: prelude.version + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + + { payload: Unpublish unpublishPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Transfer data for a nonexistent package. +-- | Used to test error handling when transferring an unknown package. +-- | Job should fail with "has not been published before" error. +nonexistentTransferData :: TransferData +nonexistentTransferData = + { name: Utils.unsafePackageName "nonexistent-package" + , newLocation: GitHub + { owner: "someone" + , repo: "purescript-nonexistent" + , subdir: Nothing + } + } + +-- | Sign a transfer operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signTransfer :: String -> TransferData -> Either String AuthenticatedData +signTransfer privateKey transferData = do + let rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferData + private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + let signature = SSH.sign private rawPayload + pure + { payload: Transfer transferData + , rawPayload + , signature + } + +-- | type-equality@4.0.1 fixture package (exists in registry-index but not in initial package set) +typeEquality :: PackageFixture +typeEquality = { name: Utils.unsafePackageName "type-equality", version: Utils.unsafeVersion "4.0.1" } + +-- | Package set request to add type-equality@4.0.1. +-- | This is an unauthenticated request (no signature) since adding packages +-- | doesn't require trustee authentication. +packageSetAddRequest :: PackageSetUpdateRequest +packageSetAddRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton typeEquality.name (Just typeEquality.version) + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to change the compiler version. +-- | This requires authentication (pacchettibotti signature) since changing +-- | the compiler is a restricted operation. +packageSetCompilerChangeRequest :: PackageSetUpdateRequest +packageSetCompilerChangeRequest = + let + payload = PackageSetUpdate + { compiler: Just (Utils.unsafeVersion "0.15.11") + , packages: Map.empty + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to remove a package. +-- | This requires authentication (pacchettibotti signature) since removing +-- | packages is a restricted operation. +packageSetRemoveRequest :: PackageSetUpdateRequest +packageSetRemoveRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton effect.name Nothing + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Sign a package set update request using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signPackageSet :: String -> PackageSetUpdateRequest -> Either String PackageSetUpdateRequest +signPackageSet privateKey request = do + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private request.rawPayload + pure request { signature = Just signature } + +-- | GitHub issue event with invalid JSON in the body. +-- | Used to test that malformed JSON is handled gracefully with an error comment. +-- | Note: The inner JSON has a trailing comma (`"v1.0.0",}`) which is intentionally +-- | malformed to trigger a parse error. +invalidJsonIssueEvent :: String +invalidJsonIssueEvent = + """{"sender":{"login":"packaging-team-user"},"issue":{"number":101,"body":"```json\n{\"name\": \"effect\", \"ref\": \"v1.0.0\",}\n```"}}""" diff --git a/app-e2e/src/Test/E2E/Support/Types.purs b/app-e2e/src/Test/E2E/Support/Types.purs new file mode 100644 index 000000000..2e4429057 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Types.purs @@ -0,0 +1,48 @@ +-- | Core types for E2E tests. +-- | +-- | This module defines the shared environment and monad types used by all +-- | E2E test helpers. It's kept separate to avoid circular dependencies +-- | between Env, Client, and WireMock modules. +module Test.E2E.Support.Types + ( TestEnv + , ClientConfig + , WireMockConfig + , E2E + , E2ESpec + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ReaderT) +import Effect.Aff (Milliseconds) +import Test.Spec (SpecT) + +-- | Configuration for the E2E test client +type ClientConfig = + { baseUrl :: String + , pollInterval :: Milliseconds + , maxPollAttempts :: Int + } + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | The shared test environment available to all E2E helpers. +-- | Constructed once at startup from environment variables. +type TestEnv = + { clientConfig :: ClientConfig + , githubWireMock :: WireMockConfig + , storageWireMock :: WireMockConfig + , stateDir :: String + , privateKey :: String + } + +-- | The base monad for E2E test helpers. +-- | All Client, Env, and WireMock functions operate in this monad. +type E2E = ReaderT TestEnv Aff + +-- | The spec type for E2E tests. +-- | Test modules export `spec :: E2ESpec` instead of `spec :: Spec Unit`. +type E2ESpec = SpecT E2E Unit Identity Unit diff --git a/app-e2e/src/Test/E2E/Support/WireMock.purs b/app-e2e/src/Test/E2E/Support/WireMock.purs new file mode 100644 index 000000000..4e3789fca --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/WireMock.purs @@ -0,0 +1,173 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +-- | +-- | Also provides helpers for managing WireMock scenarios (stateful mocking). +-- | Scenarios allow responses to change based on state transitions - e.g., a +-- | package tarball returns 404 until it's been "uploaded" via PUT, after which +-- | it returns 200. +module Test.E2E.Support.WireMock + ( WireMockRequest + , WireMockError(..) + , getGithubRequests + , getStorageRequests + , clearGithubRequests + , clearStorageRequests + , resetStorageScenarios + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Int as Int +import Data.String as String +import Effect.Aff as Aff +import Effect.Exception (Error) +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Test.E2E.Support.Types (E2E) + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from a WireMock instance +getRequestsFrom :: String -> Aff (Either WireMockError (Array WireMockRequest)) +getRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from a WireMock instance +clearRequestsFrom :: String -> Aff (Either WireMockError Unit) +clearRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Reset all scenarios to initial state on a WireMock instance +resetScenariosOn :: String -> Aff (Either WireMockError Unit) +resetScenariosOn baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/scenarios/reset") { method: POST } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Helper to run a WireMock operation and throw on error +orFail :: forall a. String -> Either WireMockError a -> Aff a +orFail context = case _ of + Left err -> Aff.throwError $ Aff.error $ context <> ": " <> printWireMockError err + Right a -> pure a + +-- | Get captured requests from the GitHub WireMock. +getGithubRequests :: E2E (Array WireMockRequest) +getGithubRequests = do + { githubWireMock } <- ask + liftAff $ getRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to get GitHub WireMock requests" + +-- | Get captured requests from the storage WireMock (S3, Pursuit). +getStorageRequests :: E2E (Array WireMockRequest) +getStorageRequests = do + { storageWireMock } <- ask + liftAff $ getRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to get storage WireMock requests" + +-- | Clear the GitHub WireMock request journal. +clearGithubRequests :: E2E Unit +clearGithubRequests = do + { githubWireMock } <- ask + liftAff $ clearRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to clear GitHub WireMock requests" + +-- | Clear the storage WireMock request journal. +clearStorageRequests :: E2E Unit +clearStorageRequests = do + { storageWireMock } <- ask + liftAff $ clearRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to clear storage WireMock requests" + +-- | Reset all storage WireMock scenarios to their initial state. +resetStorageScenarios :: E2E Unit +resetStorageScenarios = do + { storageWireMock } <- ask + liftAff $ resetScenariosOn storageWireMock.baseUrl >>= orFail "Failed to reset storage WireMock scenarios" + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests = String.joinWith "\n" <<< map formatRequest + where + formatRequest req = req.method <> " " <> req.url <> case req.body of + Nothing -> "" + Just body -> "\n Body: " <> body + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ String.joinWith "\n" [ msg, "\nCaptured requests:", formatRequests requests ] diff --git a/app-e2e/src/Test/E2E/Workflow.purs b/app-e2e/src/Test/E2E/Workflow.purs new file mode 100644 index 000000000..a03be0562 --- /dev/null +++ b/app-e2e/src/Test/E2E/Workflow.purs @@ -0,0 +1,88 @@ +-- | End-to-end tests for multi-operation workflows. +-- | +-- | These tests verify complex scenarios involving multiple operations, +-- | specifically dependency state validation across publish/unpublish sequences. +module Test.E2E.Workflow (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Dependency and unpublish interactions" do + Spec.it "publishing a package fails when its dependency was unpublished" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + _ <- Env.pollJobOrFail unpublishJobId + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + Assert.fail "Expected tarball delete from S3 for effect@4.0.0" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + when manifestExists do + Assert.fail "Expected effect@4.0.0 to be removed from manifest index after unpublish" + + WireMock.clearStorageRequests + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + consoleJob <- Env.pollJobExpectFailure consoleJobId + + let + logs = (V1.jobInfo consoleJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "Could not produce valid dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected dependency resolution error, got:\n" <> String.joinWith "\n" logMessages + + consoleUploadOccurred <- Env.hasStorageUpload Fixtures.console + when consoleUploadOccurred do + Assert.fail "Expected no tarball upload for console@6.1.0 after failed publish" + + Spec.it "unpublishing a package fails when dependents exist in manifest index" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + _ <- Env.pollJobOrFail consoleJobId + + WireMock.clearStorageRequests + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobExpectFailure unpublishJobId + + let + logs = (V1.jobInfo unpublishJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "unsatisfied dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected unsatisfied dependencies error, got:\n" <> + String.joinWith "\n" logMessages + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + when deleteOccurred do + Assert.fail "Expected no tarball delete for effect@4.0.0 after failed unpublish" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + unless manifestExists do + Assert.fail "Expected effect@4.0.0 to still exist in manifest index after failed unpublish" + + Metadata effectMeta <- Env.readMetadata Fixtures.effect.name + unless (isJust $ Map.lookup Fixtures.effect.version effectMeta.published) do + Assert.fail "Expected effect@4.0.0 to still be in published metadata after failed unpublish" diff --git a/app-e2e/src/Test/Main.purs b/app-e2e/src/Test/Main.purs new file mode 100644 index 000000000..a5b18d43c --- /dev/null +++ b/app-e2e/src/Test/Main.purs @@ -0,0 +1,40 @@ +module Test.E2E.Main (main) where + +import Registry.App.Prelude + +import Data.Time.Duration (Milliseconds(..)) +import Test.E2E.Endpoint.Jobs as Jobs +import Test.E2E.Endpoint.PackageSets as PackageSets +import Test.E2E.Endpoint.Publish as Publish +import Test.E2E.Endpoint.Transfer as Transfer +import Test.E2E.Endpoint.Unpublish as Unpublish +import Test.E2E.GitHubIssue as GitHubIssue +import Test.E2E.Support.Env (assertReposClean, mkTestEnv, resetTestState, runE2E) +import Test.E2E.Workflow as Workflow +import Test.Spec (hoistSpec) +import Test.Spec as Spec +import Test.Spec.Reporter.Console (consoleReporter) +import Test.Spec.Runner.Node (runSpecAndExitProcess') +import Test.Spec.Runner.Node.Config as Cfg + +main :: Effect Unit +main = do + env <- mkTestEnv + runSpecAndExitProcess' config [ consoleReporter ] $ hoistE2E env do + Spec.before_ resetTestState $ Spec.after_ assertReposClean $ Spec.describe "E2E Tests" do + Spec.describe "Endpoints" do + Spec.describe "Publish" Publish.spec + Spec.describe "Jobs" Jobs.spec + Spec.describe "Unpublish" Unpublish.spec + Spec.describe "Transfer" Transfer.spec + Spec.describe "PackageSets" PackageSets.spec + + Spec.describe "Workflows" do + Spec.describe "GitHubIssue" GitHubIssue.spec + Spec.describe "Multi-operation" Workflow.spec + where + hoistE2E env = hoistSpec identity (\_ m -> runE2E env m) + config = + { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 60_000.0 } + , parseCLIOptions: false + } diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/github-packages/console-6.1.0/LICENSE b/app/fixtures/github-packages/console-6.1.0/LICENSE new file mode 100644 index 000000000..311379c1e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/LICENSE @@ -0,0 +1,26 @@ +Copyright 2018 PureScript + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/app/fixtures/github-packages/console-6.1.0/bower.json b/app/fixtures/github-packages/console-6.1.0/bower.json new file mode 100644 index 000000000..da93c7f6e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/bower.json @@ -0,0 +1,22 @@ +{ + "name": "purescript-console", + "homepage": "https://github.com/purescript/purescript-console", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-console.git" + }, + "ignore": [ + "**/.*", + "bower_components", + "node_modules", + "output", + "test", + "bower.json", + "package.json" + ], + "dependencies": { + "purescript-effect": "^4.0.0", + "purescript-prelude": "^6.0.0" + } +} diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js new file mode 100644 index 000000000..432a4241b --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js @@ -0,0 +1,9 @@ +export const log = s => () => console.log(s); +export const warn = s => () => console.warn(s); +export const error = s => () => console.error(s); +export const info = s => () => console.info(s); +export const debug = s => () => console.debug(s); +export const time = s => () => console.time(s); +export const timeLog = s => () => console.timeLog(s); +export const timeEnd = s => () => console.timeEnd(s); +export const clear = () => console.clear(); diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs new file mode 100644 index 000000000..364ee2b1c --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs @@ -0,0 +1,46 @@ +-- | This module provides functions for outputting strings to the console. +module Effect.Console + ( log + , logShow + , warn + , warnShow + , error + , errorShow + , info + , infoShow + , debug + , debugShow + , time + , timeLog + , timeEnd + , clear + ) where + +import Prelude + +import Effect (Effect) + +foreign import log :: String -> Effect Unit +foreign import warn :: String -> Effect Unit +foreign import error :: String -> Effect Unit +foreign import info :: String -> Effect Unit +foreign import debug :: String -> Effect Unit +foreign import time :: String -> Effect Unit +foreign import timeLog :: String -> Effect Unit +foreign import timeEnd :: String -> Effect Unit +foreign import clear :: Effect Unit + +logShow :: forall a. Show a => a -> Effect Unit +logShow = log <<< show + +warnShow :: forall a. Show a => a -> Effect Unit +warnShow = warn <<< show + +errorShow :: forall a. Show a => a -> Effect Unit +errorShow = error <<< show + +infoShow :: forall a. Show a => a -> Effect Unit +infoShow = info <<< show + +debugShow :: forall a. Show a => a -> Effect Unit +debugShow = debug <<< show diff --git a/app/fixtures/github-packages/effect-4.0.0/bower.json b/app/fixtures/github-packages/effect-4.0.0/bower.json index bed5c5ab8..3b520e6ae 100644 --- a/app/fixtures/github-packages/effect-4.0.0/bower.json +++ b/app/fixtures/github-packages/effect-4.0.0/bower.json @@ -16,7 +16,6 @@ "package.json" ], "dependencies": { - "purescript-prelude": "^6.0.0", - "purescript-type-equality": "^4.0.0" + "purescript-prelude": "^6.0.0" } } diff --git a/app/fixtures/github-packages/transitive-1.0.0/bower.json b/app/fixtures/github-packages/transitive-1.0.0/bower.json new file mode 100644 index 000000000..d0d4d0bd1 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/bower.json @@ -0,0 +1,12 @@ +{ + "name": "purescript-transitive", + "homepage": "https://github.com/purescript/purescript-transitive", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-transitive.git" + }, + "dependencies": { + "purescript-effect": "^4.0.0" + } +} diff --git a/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs new file mode 100644 index 000000000..71d771f62 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs @@ -0,0 +1,6 @@ +module Transitive where + +import Prelude + +uno :: Int +uno = one diff --git a/app/fixtures/package-sets/latest-compatible-sets.json b/app/fixtures/package-sets/latest-compatible-sets.json index ceba8dd7a..5cdbbb9c2 100644 --- a/app/fixtures/package-sets/latest-compatible-sets.json +++ b/app/fixtures/package-sets/latest-compatible-sets.json @@ -1,3 +1,3 @@ { - "0.15.9": "psc-0.15.9-20230105" + "0.15.10": "psc-0.15.10-20230105" } diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz new file mode 100644 index 000000000..c06e9b276 Binary files /dev/null and b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz differ diff --git a/app/fixtures/registry-index/pr/el/prelude b/app/fixtures/registry-index/pr/el/prelude index e6c7d0759..8a01e7d6c 100644 --- a/app/fixtures/registry-index/pr/el/prelude +++ b/app/fixtures/registry-index/pr/el/prelude @@ -1 +1 @@ -{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"description":"The PureScript Prelude","dependencies":{}} +{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"ref":"v6.0.1","description":"The PureScript Prelude","dependencies":{}} diff --git a/app/fixtures/registry-index/ty/pe/type-equality b/app/fixtures/registry-index/ty/pe/type-equality index 8fbce8f14..8d5fc1d6e 100644 --- a/app/fixtures/registry-index/ty/pe/type-equality +++ b/app/fixtures/registry-index/ty/pe/type-equality @@ -1 +1 @@ -{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"dependencies":{}} \ No newline at end of file +{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"ref":"v4.0.1","dependencies":{}} \ No newline at end of file diff --git a/app/fixtures/registry-storage/console-6.1.0.tar.gz b/app/fixtures/registry-storage/console-6.1.0.tar.gz new file mode 100644 index 000000000..52c94f426 Binary files /dev/null and b/app/fixtures/registry-storage/console-6.1.0.tar.gz differ diff --git a/app/fixtures/registry-storage/effect-4.0.0.tar.gz b/app/fixtures/registry-storage/effect-4.0.0.tar.gz new file mode 100644 index 000000000..e86537b25 Binary files /dev/null and b/app/fixtures/registry-storage/effect-4.0.0.tar.gz differ diff --git a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz index 1df21a580..87d64d2c1 100644 Binary files a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz and b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz differ diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 0cffc4ab8..8c14057ad 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -5,8 +5,12 @@ }, "published": { "6.0.1": { - "bytes": 31142, - "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", + "bytes": 31129, + "compilers": [ + "0.15.10", + "0.15.11" + ], + "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" } diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 68f250604..e51b52614 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -5,8 +5,12 @@ }, "published": { "4.0.1": { - "bytes": 2184, - "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", + "bytes": 2179, + "compilers": [ + "0.15.10", + "0.15.11" + ], + "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" } diff --git a/app/fixtures/registry/package-sets/0.0.1.json b/app/fixtures/registry/package-sets/0.0.1.json new file mode 100644 index 000000000..cc82ad7d4 --- /dev/null +++ b/app/fixtures/registry/package-sets/0.0.1.json @@ -0,0 +1,8 @@ +{ + "version": "0.0.1", + "compiler": "0.15.10", + "published": "2024-01-01", + "packages": { + "prelude": "6.0.1" + } +} diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 19e09564c..9ee16a93b 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1,13 +1,17 @@ module Registry.App.API ( AuthenticatedEffects + , COMPILER_CACHE + , CompilerCache(..) , PackageSetUpdateEffects , PublishEffects + , _compilerCache , authenticated , copyPackageSourceFiles + , findAllCompilers , formatPursuitResolutions + , getPacchettiBotti , packageSetUpdate , packagingTeam - , parseInstalledModulePath , publish , removeIgnoredTarballFiles ) where @@ -16,15 +20,17 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array -import Data.Array.NonEmpty as NEA import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ +import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record -import Data.DateTime (DateTime) +import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.List.NonEmpty as NonEmptyList +import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet @@ -33,7 +39,7 @@ import Data.String.CodeUnits as String.CodeUnits import Data.String.NonEmpty as NonEmptyString import Data.String.Regex as Regex import Effect.Aff as Aff -import Effect.Ref as Ref +import Effect.Unsafe (unsafePerformEffect) import JSON as JSON import Node.ChildProcess.Types (Exit(..)) import Node.FS.Aff as FS.Aff @@ -46,15 +52,17 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Registry.App.Auth as Auth -import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive +import Registry.App.Effect.Cache (class FsEncodable, Cache) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.PackageSets (Change(..), PACKAGE_SETS) @@ -73,19 +81,20 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob -import Registry.Foreign.Octokit (IssueNumber(..), Team) -import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Octokit (Team) import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PublishData) import Registry.Operation as Operation -import Registry.Operation.Validation (UnpublishError(..), validateNoExcludedObligatoryFiles) +import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation import Registry.Owner as Owner import Registry.PackageName as PackageName @@ -94,25 +103,26 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 +import Registry.Solver (CompilerIndex, DependencyIndex, Intersection, SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +-- | Effect row for package set updates. Authentication is done at the API +-- | boundary, so we don't need GITHUB or GITHUB_EVENT_ENV effects here. +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + LOG + EXCEPT String + r) --- | Process a package set update. Package set updates are only processed via --- | GitHub and not the HTTP API, so they require access to the GitHub env. -packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate payload = do - { issue, username } <- Env.askGitHubEvent +-- | Process a package set update from a queued job. Authentication has already +-- | been verified at the API boundary, so we don't need to check team membership. +packageSetUpdate :: forall r. PackageSetOperation -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate operation = do + let PackageSetUpdate payload = operation - Log.debug $ Array.fold - [ "Package set update created from issue " <> show (un IssueNumber issue) <> " by user " <> username - , " with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload - ] + Log.debug $ "Package set update job starting with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload latestPackageSet <- Registry.readLatestPackageSet >>= case _ of Nothing -> do @@ -124,34 +134,8 @@ packageSetUpdate payload = do let prevCompiler = (un PackageSet latestPackageSet).compiler let prevPackages = (un PackageSet latestPackageSet).packages - Log.debug "Determining whether authentication is required (the compiler changed or packages were removed)..." - let didChangeCompiler = maybe false (not <<< eq prevCompiler) payload.compiler - let didRemovePackages = any isNothing payload.packages - - -- Changing the compiler version or removing packages are both restricted - -- to only the packaging team. We throw here if this is an authenticated - -- operation and we can't verify they are a member of the packaging team. - when (didChangeCompiler || didRemovePackages) do - Log.debug "Authentication is required. Verifying the user can take authenticated actions..." - GitHub.listTeamMembers packagingTeam >>= case _ of - Left githubError -> do - Log.error $ "Failed to retrieve the members of the packaging team from GitHub: " <> Octokit.printGitHubError githubError - Except.throw $ Array.fold - [ "This package set update changes the compiler version or removes a " - , "package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but we were " - , "unable to authenticate your account." - ] - Right members -> do - unless (Array.elem username members) do - Log.error $ "Username " <> username <> " is not a member of the packaging team, aborting..." - Except.throw $ Array.fold - [ "This package set update changes the compiler version or " - , "removes a package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but your " - , "username is not a member of the packaging team." - ] - Log.debug $ "Authentication verified for package set update by user " <> username + -- Note: authentication for restricted operations (compiler change, package removal) + -- is handled at the API boundary in the Router, not here. -- The compiler version cannot be downgraded. for_ payload.compiler \version -> when (version < prevCompiler) do @@ -209,18 +193,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -253,7 +237,7 @@ authenticated auth = case auth.payload of pure published pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Unpublishing is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -277,10 +261,14 @@ authenticated auth = case auth.payload of { published = Map.delete payload.version prev.published , unpublished = Map.insert payload.version unpublished prev.unpublished } + -- Delete the manifest entry first so ManifestIndex.delete can fail if other + -- packages still depend on this version. This way, we detect dependency + -- violations before performing any irreversible side effects like deleting + -- the tarball from storage. + Registry.deleteManifest payload.name payload.version Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated - Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -291,7 +279,7 @@ authenticated auth = case auth.payload of Just value -> pure value pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Transferring is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -311,21 +299,25 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be -- | upload. If it has been published before then the existing metadata will be -- | updated with the new version. -publish :: forall r. PackageSource -> PublishData -> Run (PublishEffects + r) Unit -publish source payload = do +-- +-- The legacyIndex argument contains the unverified manifests produced by the +-- legacy importer; these manifests can be used on legacy packages to conform +-- them to the registry rule that transitive dependencies are not allowed. +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) +publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name - Log.debug $ "Publishing " <> printPackageSource source <> " package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload + Log.debug $ "Publishing package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload Log.debug $ "Verifying metadata..." Metadata existingMetadata <- Registry.readMetadata payload.name >>= case _ of @@ -368,23 +360,54 @@ publish source payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: packageDirectory, published: publishedTime } <- Source.fetch source tmp existingMetadata.location payload.ref - Log.debug $ "Package downloaded to " <> packageDirectory <> ", verifying it contains a src directory with valid modules..." - Internal.Path.readPursFiles (Path.concat [ packageDirectory, "src" ]) >>= case _ of + -- Legacy imports may encounter packages whose GitHub repositories no longer + -- exist but whose tarballs are stored in the registry-archive. When Source.fetch + -- fails with InaccessibleRepo during a legacy import, we fall back to fetching + -- from the registry-archive instead. + { path: downloadedPackage, published: publishedTime } <- + Source.fetchEither tmp existingMetadata.location payload.ref >>= case _ of + Right result -> + pure result + Left (Source.InaccessibleRepo address) | isJust maybeLegacyIndex -> do + Log.warn $ Array.fold + [ "GitHub repository inaccessible during legacy import: " + , address.owner + , "/" + , address.repo + ] + Log.info "Falling back to registry-archive tarball..." + version <- case LenientVersion.parse payload.ref of + Left _ -> Except.throw $ Array.fold + [ "Cannot fall back to archive: ref " + , payload.ref + , " is not a valid version" + ] + Right v -> pure $ LenientVersion.version v + Archive.fetch tmp payload.name version + Left err -> + Except.throw $ Source.printFetchError err + + Log.debug $ "Package downloaded to " <> downloadedPackage <> ", verifying it contains a src directory with valid modules..." + Internal.Path.readPursFiles (Path.concat [ downloadedPackage, "src" ]) >>= case _ of Nothing -> Except.throw $ Array.fold [ "This package has no PureScript files in its `src` directory. " , "All package sources must be in the `src` directory, with any additional " , "sources indicated by the `files` key in your manifest." ] - Just files -> do + Just files -> + -- The 'validatePursModules' function uses language-cst-parser, which only + -- supports syntax back to 0.15.0. We'll still try to validate the package + -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of + Left formattedError | payload.compiler < Purs.minLanguageCSTParser -> do + Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError + Log.debug $ "Skipping check because package is published with a pre-0.15.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> Except.throw $ Array.fold [ "This package has either malformed or disallowed PureScript module names " - , "in its `src` directory. All package sources must be in the `src` directory, " - , "with any additional sources indicated by the `files` key in your manifest." + , "in its source: " , formattedError ] Right _ -> @@ -393,13 +416,18 @@ publish source payload = do -- If the package doesn't have a purs.json we can try to make one - possible scenarios: -- - in case it has a spago.yaml then we know how to read that, and have all the info to move forward -- - if it's a legacy import then we can try to infer as much info as possible to make a manifest - let packagePursJson = Path.concat [ packageDirectory, "purs.json" ] + let packagePursJson = Path.concat [ downloadedPackage, "purs.json" ] hadPursJson <- Run.liftEffect $ FS.Sync.exists packagePursJson - let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ] + let packageSpagoYaml = Path.concat [ downloadedPackage, "spago.yaml" ] hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml - Manifest manifest <- + address <- case existingMetadata.location of + Git _ -> Except.throw "Packages can only come from GitHub for now." + GitHub { subdir: Just subdir } -> Except.throw $ "Packages cannot yet use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." + GitHub { owner, repo } -> pure { owner, repo } + + Manifest receivedManifest <- if hadPursJson then Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of Left error -> do @@ -417,26 +445,22 @@ publish source payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest payload.ref config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold - [ "Converted your spago.yaml into a purs.json manifest to use for publishing:\n" - , "```json" + Log.notice $ Array.fold + [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." - address <- case existingMetadata.location of - Git _ -> Except.throw "Legacy packages can only come from GitHub." - GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." - GitHub { owner, repo } -> pure { owner, repo } + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -451,62 +475,71 @@ publish source payload = do ] Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." - let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold - [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:\n" - , "```json" + let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location payload.ref legacyManifest + Log.notice $ Array.fold + [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```" + , "\n```\n" ] pure manifest -- We trust the manifest for any changes to the 'owners' field, but for all -- other fields we trust the registry metadata. - let metadata = existingMetadata { owners = manifest.owners } - unless (Operation.Validation.nameMatches (Manifest manifest) payload) do + let metadata = existingMetadata { owners = receivedManifest.owners } + unless (Operation.Validation.nameMatches (Manifest receivedManifest) payload) do Except.throw $ Array.fold [ "The manifest file specifies a package name (" - , PackageName.print manifest.name + , PackageName.print receivedManifest.name , ") that differs from the package name submitted to the API (" , PackageName.print payload.name , "). The manifest and API request must match." ] - unless (Operation.Validation.locationMatches (Manifest manifest) (Metadata metadata)) do - Except.throw $ Array.fold - [ "The manifest file specifies a location (" - , stringifyJson Location.codec manifest.location - , ") that differs from the location in the registry metadata (" - , stringifyJson Location.codec metadata.location - , "). If you would like to change the location of your package you should " - , "submit a transfer operation." - ] + unless (Operation.Validation.locationMatches (Manifest receivedManifest) (Metadata metadata)) do + if isJust maybeLegacyIndex then + -- The legacy importer is sometimes run on older packages, some of which have been transferred. Since + -- package metadata only records the latest location, this can cause a problem: the manifest reports + -- the location at the time, but the metadata reports the current location. + Log.warn $ Array.fold + [ "In legacy mode and manifest location differs from existing metadata. This indicates a package that was " + , "transferred from a previous location. Ignoring location match validation..." + ] + else + Except.throw $ Array.fold + [ "The manifest file specifies a location (" + , stringifyJson Location.codec receivedManifest.location + , ") that differs from the location in the registry metadata (" + , stringifyJson Location.codec metadata.location + , "). If you would like to change the location of your package you should " + , "submit a transfer operation." + ] - when (Operation.Validation.isMetadataPackage (Manifest manifest)) do + when (Operation.Validation.isMetadataPackage (Manifest receivedManifest)) do Except.throw "The `metadata` package cannot be uploaded to the registry because it is a protected package." - for_ (Operation.Validation.isNotUnpublished (Manifest manifest) (Metadata metadata)) \info -> do + for_ (Operation.Validation.isNotUnpublished (Manifest receivedManifest) (Metadata metadata)) \info -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that has been unpublished: " <> Version.print manifest.version + [ "You tried to upload a version that has been unpublished: " <> Version.print receivedManifest.version , "" , "```json" , printJson Metadata.unpublishedMetadataCodec info , "```" ] - case Operation.Validation.isNotPublished (Manifest manifest) (Metadata metadata) of + case Operation.Validation.isNotPublished (Manifest receivedManifest) (Metadata metadata) of -- If the package has been published already, then we check whether the published -- version has made it to Pursuit or not. If it has, then we terminate here. If -- it hasn't then we publish to Pursuit and then terminate. Just info -> do - published <- Pursuit.getPublishedVersions manifest.name >>= case _ of + published <- Pursuit.getPublishedVersions receivedManifest.name >>= case _ of Left error -> Except.throw error Right versions -> pure versions - case Map.lookup manifest.version published of + case Map.lookup receivedManifest.version published of Just url -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that already exists: " <> Version.print manifest.version + [ "You tried to upload a version that already exists: " <> Version.print receivedManifest.version , "" , "Its metadata is:" , "```json" @@ -517,300 +550,281 @@ publish source payload = do , url ] + Nothing | payload.compiler < Purs.minPursuitPublish -> do + Log.notice $ Array.fold + [ "This version has already been published to the registry, but the docs have not been " + , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " + , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish + , ". Please try with a later compiler." + ] + pure Nothing + Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - compilationResult <- compilePackage { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } + compilerIndex <- MatrixBuilder.readCompilerIndex + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + let installedResolutions = Path.concat [ tmp, ".registry" ] + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just downloadedPackage + } case compilationResult of - Left error -> do + Left compileFailure -> do + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do + Right _ -> do Log.debug "Uploading to Pursuit" -- While we have created a manifest from the package source, we -- still need to ensure a purs.json file exists for 'purs publish'. unless hadPursJson do - existingManifest <- ManifestIndex.readManifest manifest.name manifest.version + existingManifest <- ManifestIndex.readManifest receivedManifest.name receivedManifest.version case existingManifest of Nothing -> Except.throw "Version was previously published, but we could not find a purs.json file in the package source, and no existing manifest was found in the registry." Just existing -> Run.liftAff $ writeJsonFile Manifest.codec packagePursJson existing - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + Left publishErr -> Except.throw publishErr + Right _ -> do + FS.Extra.remove tmp + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. - Nothing -> - -- Now that we've verified the package we can write the manifest to the source - -- directory and then publish it. - if hadPursJson then do - -- No need to verify the generated manifest because nothing was generated, - -- and no need to write a file (it's already in the package source.) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - - else if hasSpagoYaml then do - -- We need to write the generated purs.json file, but because spago-next - -- already does unused dependency checks and supports explicit test-only - -- dependencies we can skip those checks. - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + Nothing -> do + Log.info "Verifying the package build plan..." + compilerIndex <- MatrixBuilder.readCompilerIndex + validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + + Log.notice "Verifying unused and/or missing dependencies..." + + -- First we install the resolutions and call 'purs graph' to adjust the + -- manifest as needed, but we defer compilation until after this check + -- in case the package manifest and resolutions are adjusted. + let installedResolutions = Path.concat [ tmp, ".registry" ] + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions + + let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] + let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] + let pursGraph = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + + -- We need to use the minimum compiler version that supports 'purs graph'. + let pursGraphCompiler = if payload.compiler >= Purs.minPursGraph then payload.compiler else Purs.minPursGraph + + -- In this step we run 'purs graph' to get a graph of the package source + -- and installed dependencies and use that to determine if the manifest + -- contains any unused or missing dependencies. If it does and is a legacy + -- manifest then we fix it and return the result. If does and is a modern + -- manifest (spago.yaml, purs.json, etc.) then we reject it. If it doesn't + -- then we simply return the manifest and resolutions we already had. + Tuple manifest resolutions <- Run.liftAff (Purs.callCompiler { command: pursGraph, version: Just pursGraphCompiler, cwd: Nothing }) >>= case _ of + Left err -> case err of + UnknownError str -> Except.throw str + MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print pursGraphCompiler + CompilationError errs -> do + Log.warn $ Array.fold + [ "Failed to discover unused dependencies because purs graph failed:\n" + , Purs.printCompilerErrors errs + ] + -- The purs graph command will fail if the source code uses syntax + -- before the oldest usable purs graph compiler (ie. 0.14.0). In + -- this case we simply accept the dependencies as-is, even though + -- they could technically violate Registry rules around missing and + -- unused dependencies. This only affects old packages and we know + -- they compile, so we've decided it's an acceptable exception. + pure $ Tuple (Manifest receivedManifest) validatedResolutions + Right output -> case JSON.parse output of + Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr + Right json -> case CJ.decode PursGraph.pursGraphCodec json of + Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr + Right graph -> do + Log.debug "Got a valid graph of source and dependencies." + let + pathParser path = map _.name $ case String.stripPrefix (String.Pattern installedResolutions) path of + Just trimmed -> parseModulePath trimmed + Nothing -> case String.stripPrefix (String.Pattern downloadedPackage) path of + Just _ -> Right { name: receivedManifest.name, version: receivedManifest.version } + Nothing -> Left $ "Failed to parse module path " <> path <> " because it is not in the package source or installed dependencies." + + case Operation.Validation.noTransitiveOrMissingDeps (Manifest receivedManifest) graph pathParser of + -- Association failures should always throw + Left (Left assocErrors) -> + Except.throw $ Array.fold + [ "Failed to validate unused / missing dependencies because modules could not be associated with package names:" + , flip NonEmptyArray.foldMap1 assocErrors \{ error, module: ModuleName moduleName, path } -> + "\n - " <> moduleName <> " (" <> path <> "): " <> error + ] + + -- FIXME: For now we attempt to fix packages if a legacy index + -- is provided (ie. the publish is via the importer) but we + -- should at some point make this a hard error. + Left (Right depError) -> case maybeLegacyIndex of + Nothing -> + Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError + Just legacyIndex -> do + Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError + conformLegacyManifest (Manifest receivedManifest) payload.compiler compilerIndex legacyIndex depError + + -- If the check passes then we can simply return the manifest and + -- resolutions. + Right _ -> pure $ Tuple (Manifest receivedManifest) validatedResolutions + + -- Now that we've verified the package we can write the manifest to the + -- source directory. + Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest + + Log.info "Creating packaging directory" + let packageDirname = PackageName.print receivedManifest.name <> "-" <> Version.print receivedManifest.version + let packageSource = Path.concat [ tmp, packageDirname ] + FS.Extra.ensureDirectory packageSource + -- We copy over all files that are always included (ie. src dir, purs.json file), + -- and any files the user asked for via the 'files' key, and remove all files + -- that should never be included (even if the user asked for them). + copyPackageSourceFiles { includeFiles: receivedManifest.includeFiles, excludeFiles: receivedManifest.excludeFiles, source: downloadedPackage, destination: packageSource } + removeIgnoredTarballFiles packageSource + + -- Now that we have the package source contents we can verify we can compile + -- the package with exactly what is going to be uploaded. + Log.notice $ Array.fold + [ "Verifying package compiles using compiler " + , Version.print payload.compiler + , " and resolutions:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Version.codec) resolutions + , "\n```" + ] - -- Otherwise this is a legacy package, generated from a combination of bower, - -- spago.dhall, and package set files, so we need to verify the generated - -- manifest does not contain unused dependencies before writing it. + -- We clear the installation directory so that no old installed resolutions + -- stick around. + Run.liftAff $ FS.Extra.remove installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just tmp + } + + case compilationResult of + Left compileFailure -> do + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure + Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error + Right _ -> pure unit + + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." + let tarballName = packageDirname <> ".tar.gz" + let tarballPath = Path.concat [ tmp, tarballName ] + Tar.create { cwd: tmp, folderName: packageDirname } + + Log.info "Tarball created. Verifying its size..." + bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath + for_ (Operation.Validation.validateTarballSize bytes) case _ of + Operation.Validation.ExceedsMaximum maxPackageBytes -> + Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." + Operation.Validation.WarnPackageSize maxWarnBytes -> + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + + -- If a package has under ~30 bytes it's about guaranteed that packaging the + -- tarball failed. This can happen if the system running the API has a non- + -- GNU tar installed, for example. + let minBytes = 30.0 + when (bytes < minBytes) do + Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." + + hash <- Sha256.hashFile tarballPath + Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." + Log.info $ "Tarball hash: " <> Sha256.print hash + + Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath + Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." + let newPublishedVersion = { hash, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } + let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" + + -- We write to the registry index if possible. If this fails, the packaging + -- team should manually insert the entry. + Log.debug "Adding the new version to the registry index" + Registry.writeManifest manifest + + Registry.mirrorLegacyRegistry payload.name newMetadata.location + Log.notice "Mirrored registry operation to the legacy registry!" + + Log.debug "Uploading package documentation to Pursuit" + if payload.compiler >= Purs.minPursuitPublish then + -- TODO: We must use the 'downloadedPackage' instead of 'packageSource' + -- because Pursuit requires a git repository, and our tarball directory + -- is not one. This should be changed once Pursuit no longer needs git. + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of + Left publishErr -> do + Log.error publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr + Right _ -> + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Log.debug "Pruning unused dependencies from legacy package manifest..." - - Log.debug "Solving manifest to get all transitive dependencies." - resolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Installing dependencies." - tmpDepsDir <- Tmp.mkTmpDir - installBuildPlan resolutions tmpDepsDir - - Log.debug "Discovering used dependencies from source." - let srcGlobs = Path.concat [ packageDirectory, "src", "**", "*.purs" ] - let depGlobs = Path.concat [ tmpDepsDir, "*", "src", "**", "*.purs" ] - let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } - -- We need to use the minimum compiler version that supports 'purs graph' - let minGraphCompiler = unsafeFromRight (Version.parse "0.13.8") - let callCompilerVersion = if payload.compiler >= minGraphCompiler then payload.compiler else minGraphCompiler - Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of - Left err -> do - let prefix = "Failed to discover unused dependencies because purs graph failed: " - Log.error $ prefix <> case err of - UnknownError str -> str - CompilationError errs -> Purs.printCompilerErrors errs - MissingCompiler -> "missing compiler " <> Version.print payload.compiler - -- We allow legacy packages through even if we couldn't run purs graph, - -- because we can't be sure we chose the correct compiler version. - if source == LegacyPackage then - Comment.comment "Failed to prune dependencies for legacy package, continuing anyway..." - else do - Except.throw "purs graph failed; cannot verify unused dependencies." - Right output -> case JSON.parse output of - Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr - Right json -> case CJ.decode PursGraph.pursGraphCodec json of - Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CJ.DecodeError.print decodeErr - Right graph -> do - Log.debug "Got a valid graph of source and dependencies. Removing install dir and associating discovered modules with their packages..." - FS.Extra.remove tmpDepsDir - - let - -- We need access to a graph that _doesn't_ include the package - -- source, because we only care about dependencies of the package. - noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ } - - case PursGraph.associateModules pathParser noSrcGraph of - Left errs -> - Except.throw $ String.joinWith "\n" - [ "Failed to associate modules with packages while finding unused dependencies:" - , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> - " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" - ] - Right modulePackageMap -> do - Log.debug "Associated modules with their package names. Finding all modules used in package source..." - -- The modules used in the package source code are any that have - -- a path beginning with the package source directory. We only - -- care about dependents of these modules. - let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - Log.debug "Found all modules used in package source. Finding all modules used by those modules..." - let allReachableModules = PursGraph.allDependenciesOf sourceModules graph - - -- Then we can associate each reachable module with its package - -- name to get the full set of used package names. - let allUsedPackages = Set.mapMaybe (flip Map.lookup modulePackageMap) allReachableModules - - -- Finally, we can use this to find the unused dependencies. - Log.debug "Found all packages reachable by the project source code. Determining unused dependencies..." - case Operation.Validation.getUnusedDependencies (Manifest manifest) resolutions allUsedPackages of - Nothing -> do - Log.debug "No unused dependencies! This manifest is good to go." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { source - , manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - Just isUnused -> do - let printed = String.joinWith ", " (PackageName.print <$> NonEmptySet.toUnfoldable isUnused) - Log.debug $ "Found unused dependencies: " <> printed - Comment.comment $ "Generated legacy manifest contains unused dependencies which will be removed: " <> printed - let verified = manifest { dependencies = Map.filterKeys (not <<< flip NonEmptySet.member isUnused) manifest.dependencies } - Log.debug "Writing updated, pruned manifest." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified) - publishRegistry - { source - , manifest: Manifest verified - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - -type PublishRegistry = - { source :: PackageSource - , manifest :: Manifest - , metadata :: Metadata - , payload :: PublishData - , publishedTime :: DateTime - , tmp :: FilePath - , packageDirectory :: FilePath - } + Log.notice $ Array.fold + [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" + , Version.print payload.compiler + , "). If you want to publish documentation, please try again with a later compiler." + ] --- A private helper function for publishing to the registry. Separated out of --- the main 'publish' function because we sometimes use the publish function to --- publish to Pursuit only (in the case the package has been pushed to the --- registry, but docs have not been uploaded). -publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit -publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do - Log.debug "Verifying the package build plan..." - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - - Log.debug "Verifying that the package dependencies are all registered..." - unregisteredRef <- Run.liftEffect $ Ref.new Map.empty - forWithIndex_ verifiedResolutions \name version -> do - Registry.readMetadata name >>= case _ of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just (Metadata { published }) -> case Map.lookup version published of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just _ -> pure unit - unregistered <- Run.liftEffect $ Ref.read unregisteredRef - unless (Map.isEmpty unregistered) do - Except.throw $ Array.fold - [ "Cannot register this package because it has unregistered dependencies: " - , Array.foldMap (\(Tuple name version) -> "\n - " <> formatPackageVersion name version) (Map.toUnfoldable unregistered) - ] + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Log.notice "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - Log.info "Packaging tarball for upload..." - let newDir = PackageName.print manifest.name <> "-" <> Version.print manifest.version - let packageSourceDir = Path.concat [ tmp, newDir ] - Log.debug $ "Creating packaging directory at " <> packageSourceDir - FS.Extra.ensureDirectory packageSourceDir - -- We copy over all files that are always included (ie. src dir, purs.json file), - -- and any files the user asked for via the 'files' key, and remove all files - -- that should never be included (even if the user asked for them). - copyPackageSourceFiles { includeFiles: manifest.includeFiles, excludeFiles: manifest.excludeFiles, source: packageDirectory, destination: packageSourceDir } - Log.debug "Removing always-ignored files from the packaging directory." - removeIgnoredTarballFiles packageSourceDir - - let tarballName = newDir <> ".tar.gz" - let tarballPath = Path.concat [ tmp, tarballName ] - Tar.create { cwd: tmp, folderName: newDir } - - Log.info "Tarball created. Verifying its size..." - bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath - for_ (Operation.Validation.validateTarballSize bytes) case _ of - Operation.Validation.ExceedsMaximum maxPackageBytes -> - Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." - Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." - - -- If a package has under ~30 bytes it's about guaranteed that packaging the - -- tarball failed. This can happen if the system running the API has a non- - -- GNU tar installed, for example. - let minBytes = 30.0 - when (bytes < minBytes) do - Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." - - hash <- Sha256.hashFile tarballPath - Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." - Log.info $ "Tarball hash: " <> Sha256.print hash - - -- Now that we have the package source contents we can verify we can compile - -- the package. We skip failures when the package is a legacy package. - Log.info "Verifying package compiles (this may take a while)..." - compilationResult <- compilePackage - { packageSourceDir: packageDirectory - , compiler: payload.compiler - , resolutions: verifiedResolutions - } + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - case compilationResult of - Left error - -- We allow legacy packages to fail compilation because we do not - -- necessarily know what compiler to use with them. - | source == LegacyPackage -> do - Log.debug error - Log.warn "Failed to compile, but continuing because this package is a legacy package." - | otherwise -> - Except.throw error - Right _ -> - pure unit - - Comment.comment "Package is verified! Uploading it to the storage backend..." - Storage.upload manifest.name manifest.version tarballPath - Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, publishedTime, bytes } metadata.published } - Registry.writeMetadata manifest.name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" - - -- After a package has been uploaded we add it to the registry index, we - -- upload its documentation to Pursuit, and we can now process it for package - -- sets when the next batch goes out. - - -- We write to the registry index if possible. If this fails, the packaging - -- team should manually insert the entry. - Registry.writeManifest (Manifest manifest) - - when (source == CurrentPackage) $ case compilationResult of - Left error -> do - Log.error $ "Compilation failed, cannot upload to pursuit: " <> error - Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do - Log.debug "Uploading to Pursuit" - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } - - Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.notice "Wrote completed metadata to the registry!" + + FS.Extra.remove tmp + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the -- | manifest. If not, we solve their manifest to produce a build plan. -verifyResolutions :: forall r. Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + EXCEPT String + r) (Map PackageName Version) -verifyResolutions manifest resolutions = do +verifyResolutions :: forall r. CompilerIndex -> Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) +verifyResolutions compilerIndex compiler manifest resolutions = do Log.debug "Check the submitted build plan matches the manifest" - manifestIndex <- Registry.readAllManifests case resolutions of - Nothing -> case Operation.Validation.validateDependenciesSolve manifest manifestIndex of - Left errors -> do - let - printedError = String.joinWith "\n" - [ "Could not produce valid dependencies for manifest." - , "```" - , errors # foldMapWithIndex \index error -> String.joinWith "\n" - [ "[Error " <> show (index + 1) <> "]" - , Solver.printSolverError error - ] - , "```" - ] - Except.throw printedError - Right solved -> pure solved + Nothing -> do + case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of + Left errors -> do + let + printedError = String.joinWith "\n" + [ "Could not produce valid dependencies for manifest." + , "```" + , errors # foldMapWithIndex \index error -> String.joinWith "\n" + [ "[Error " <> show (index + 1) <> "]" + , Solver.printSolverError error + ] + , "```" + ] + Except.throw printedError + Right solved -> pure solved Just provided -> do validateResolutions manifest provided pure provided @@ -860,85 +874,66 @@ validateResolutions manifest resolutions = do , incorrectVersionsError ] -type CompilePackage = - { packageSourceDir :: FilePath - , compiler :: Version - , resolutions :: Map PackageName Version +type FindAllCompilersResult = + { failed :: Map Version (Either SolverErrors CompilerFailure) + , succeeded :: Set Version } -compilePackage :: forall r. CompilePackage -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either String FilePath) -compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do - tmp <- Tmp.mkTmpDir - let dependenciesDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory dependenciesDir - - let - globs = - if Map.isEmpty resolutions then - [ "src/**/*.purs" ] - else - [ "src/**/*.purs" - , Path.concat [ dependenciesDir, "*/src/**/*.purs" ] - ] - - Log.debug "Installing build plan..." - installBuildPlan resolutions dependenciesDir - - Log.debug "Compiling..." - compilerOutput <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs } - , version: Just compiler - , cwd: Just packageSourceDir - } - - case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed for your package due to a compiler error:" - , "```" - , err - , "```" - ] - Right _ -> pure dependenciesDir - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version +-- | Find all compilers that can compile the package source code and installed +-- | resolutions from the given array of compilers. +findAllCompilers + :: forall r + . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } + -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult +findAllCompilers { source, manifest, compilers } = do + compilerIndex <- MatrixBuilder.readCompilerIndex + checkedCompilers <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target + case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of + Left solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + pure $ Left $ Tuple target (Left solverErrors) + Right (Tuple compiler resolutions) -> do + Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions + when (compiler /= target) do + Except.throw $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print compiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + MatrixBuilder.installBuildPlan resolutions installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err + Right _ -> do + Log.debug $ "Compilation succeeded with compiler " <> Version.print target + Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } + pure $ bimap (Tuple target <<< Right) (const target) result + Just { result } -> + pure $ bimap (Tuple target <<< Right) (const target) result + + let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers + pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -- | Parse the name and version from a path to a module installed in the standard --- | form: '/-/...' -parseInstalledModulePath :: { prefix :: FilePath, path :: FilePath } -> Either String { name :: PackageName, version :: Version } -parseInstalledModulePath { prefix, path } = do +-- | form: '-...' +parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } +parseModulePath path = do packageVersion <- lmap Parsing.parseErrorMessage $ Parsing.runParser path do - _ <- Parsing.String.string prefix _ <- Parsing.Combinators.optional (Parsing.Combinators.try (Parsing.String.string Path.sep)) Tuple packageVersionChars _ <- Parsing.Combinators.Array.manyTill_ Parsing.String.anyChar (Parsing.String.string Path.sep) pure $ String.CodeUnits.fromCharArray (Array.fromFoldable packageVersionChars) @@ -955,38 +950,40 @@ parseInstalledModulePath { prefix, path } = do pure { name, version } type PublishToPursuit = - { packageSourceDir :: FilePath - , dependenciesDir :: FilePath + { source :: FilePath , compiler :: Version , resolutions :: Map PackageName Version + , installedResolutions :: FilePath } -- | Publishes a package to Pursuit. -- | -- | ASSUMPTIONS: This function should not be run on legacy packages or on --- | packages where the `purescript-` prefix is still present. +-- | packages where the `purescript-` prefix is still present. Cannot be used +-- | on packages prior to 'Purs.minPursuitPublish' publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) Unit -publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = do + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) +publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir + when (compiler < Purs.minPursuitPublish) do + Except.throw $ "Cannot publish to Pursuit because this package was published with a pre-0.14.7 compiler (" <> Version.print compiler <> "). If you want to publish documentation, please try again with a later compiler." + let - resolvedPaths = formatPursuitResolutions { resolutions, dependenciesDir } + resolvedPaths = formatPursuitResolutions { resolutions, installedResolutions } resolutionsFilePath = Path.concat [ tmp, "resolutions.json" ] Run.liftAff $ writeJsonFile pursuitResolutionsCodec resolutionsFilePath resolvedPaths -- The 'purs publish' command requires a clean working tree, but it isn't - -- guaranteed that packages have an adequate .gitignore file; compilers prior - -- to 0.14.7 did not ignore the purs.json file when publishing. So we stash - -- changes made during the publishing process (ie. inclusion of a new purs.json - -- file and an output directory from compilation) before calling purs publish. + -- guaranteed that packages have an adequate .gitignore file. So we stash + -- stash changes made during the publishing process before calling publish. -- https://git-scm.com/docs/gitignore Log.debug "Adding output and purs.json to local git excludes..." - Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ packageSourceDir, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) + Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ source, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) -- NOTE: The compatibility version of purs publish appends 'purescript-' to the -- package name in the manifest file: @@ -997,27 +994,12 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = compilerOutput <- Run.liftAff $ Purs.callCompiler { command: Purs.Publish { resolutions: resolutionsFilePath } , version: Just compiler - , cwd: Just packageSourceDir + , cwd: Just source } publishJson <- case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Publishing failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed for your package due to an unknown compiler error:" - , "```" - , err - , "```" - ] + Left error -> + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1039,7 +1021,7 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = Left error -> Except.throw $ "Could not publish your package to Pursuit because an error was encountered (cc: @purescript/packaging): " <> error Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + FS.Extra.remove tmp type PursuitResolutions = Map RawPackageName { version :: Version, path :: FilePath } @@ -1050,13 +1032,13 @@ pursuitResolutionsCodec = rawPackageNameMapCodec $ CJ.named "Resolution" $ CJ.Re -- -- Note: This interfaces with Pursuit, and therefore we must add purescript- -- prefixes to all package names for compatibility with the Bower naming format. -formatPursuitResolutions :: { resolutions :: Map PackageName Version, dependenciesDir :: FilePath } -> PursuitResolutions -formatPursuitResolutions { resolutions, dependenciesDir } = +formatPursuitResolutions :: { resolutions :: Map PackageName Version, installedResolutions :: FilePath } -> PursuitResolutions +formatPursuitResolutions { resolutions, installedResolutions } = Map.fromFoldable do Tuple name version <- Map.toUnfoldable resolutions let bowerPackageName = RawPackageName ("purescript-" <> PackageName.print name) - packagePath = Path.concat [ dependenciesDir, PackageName.print name <> "-" <> Version.print version ] + packagePath = Path.concat [ installedResolutions, PackageName.print name <> "-" <> Version.print version ] [ Tuple bowerPackageName { path: packagePath, version } ] -- | Copy files from the package source directory to the destination directory @@ -1163,3 +1145,176 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } + +type AdjustManifest = + { source :: FilePath + , compiler :: Version + , manifest :: Manifest + , legacyIndex :: Maybe DependencyIndex + , currentIndex :: CompilerIndex + , resolutions :: Maybe (Map PackageName Version) + } + +-- | Conform a legacy manifest to the Registry requirements for dependencies, +-- | ie. that all direct imports are listed (no transitive dependencies) and no +-- | unused dependencies are listed. +conformLegacyManifest + :: forall r + . Manifest + -> Version + -> CompilerIndex + -> Solver.TransitivizedRegistry + -> ValidateDepsError + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) +conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do + let + manifestRequired :: SemigroupMap PackageName Intersection + manifestRequired = Solver.initializeRequired manifest.dependencies + + legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of + Left unsolvableLegacy -> do + Log.warn $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy + case Solver.solveWithCompiler (Range.exact compiler) currentIndex manifest.dependencies of + Left unsolvableCurrent -> Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableCurrent + Right (Tuple _ solved) -> do + Log.debug $ "Got current resolutions as a fallback to unsolvable legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + Right solved -> do + Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + + let + legacyTransitive :: Map PackageName Range + legacyTransitive = + Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps + $ Solver.solveSeed + $ Solver.withReachable { registry: legacyRegistry, required: manifestRequired } + + Log.debug $ "Got transitive solution:\n" <> printJson (Internal.Codec.packageMap Range.codec) legacyTransitive + + let + associateMissing :: Array PackageName -> Map PackageName Range + associateMissing packages = do + -- First we look up the package in the produced transitive ranges, as those + -- are the most likely to be correct. + let associateTransitive pkg = maybe (Left pkg) (\range -> Right (Tuple pkg range)) (Map.lookup pkg legacyTransitive) + let associated = partitionEithers (map associateTransitive packages) + let foundFromTransitive = Map.fromFoldable associated.success + + -- If not found, we search for the ranges described for this dependency + -- in the manifests of the packages in the resolutions. + let + resolutionRanges :: Map PackageName Range + resolutionRanges = do + let + foldFn name prev version = fromMaybe prev do + versions <- Map.lookup name (un SemigroupMap legacyRegistry) + deps <- Map.lookup version (un SemigroupMap versions) + let deps' = Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) (un SemigroupMap deps) + pure $ Map.unionWith (\l r -> fromMaybe l (Range.intersect l r)) prev deps' + + foldlWithIndex foldFn Map.empty legacyResolutions + + foundFromResolutions :: Map PackageName Range + foundFromResolutions = Map.fromFoldable do + associated.fail # Array.mapMaybe \pkg -> map (Tuple pkg) (Map.lookup pkg resolutionRanges) + + Map.union foundFromTransitive foundFromResolutions + + fixUnused names (Manifest m) = do + let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names + let fixedDependencies = Map.difference m.dependencies unused + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + fixMissing names (Manifest m) = do + let fixedDependencies = Map.union m.dependencies (associateMissing (NonEmptySet.toUnfoldable names)) + -- Once we've fixed the missing dependencies we need to be sure we can still + -- produce a viable solution with the current index. + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved + + previousDepsMessage = Array.fold + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add direct-imported ones. " + , "Your dependency list was:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "\n```\n" + ] + + newDepsMessage (Manifest new) = Array.fold + [ "\nYour new dependency list is:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) new.dependencies + , "\n```\n" + ] + + case problem of + UnusedDependencies names -> do + Tuple deps resolutions <- fixUnused names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Log.notice $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + MissingDependencies names -> do + Tuple deps resolutions <- fixMissing names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Log.notice $ Array.fold + [ previousDepsMessage + , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + UnusedAndMissing { missing, unused } -> do + let unused' = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) unused + let trimmed = Map.difference manifest.dependencies unused' + Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) + let newManifest = Manifest (manifest { dependencies = newDeps }) + Log.notice $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" + , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest newResolutions + +type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) + +_compilerCache :: Proxy "compilerCache" +_compilerCache = Proxy + +data CompilerCache :: (Type -> Type -> Type) -> Type -> Type +data CompilerCache c a = Compilation Manifest (Map PackageName Version) Version (c { target :: Version, result :: Either CompilerFailure Unit } a) + +instance Functor2 c => Functor (CompilerCache c) where + map k (Compilation manifest resolutions compiler a) = Compilation manifest resolutions compiler (map2 k a) + +instance FsEncodable CompilerCache where + encodeFs = case _ of + Compilation (Manifest manifest) resolutions compiler next -> do + let + baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" <> Version.print compiler <> "__" + hashKey = do + let resolutions' = foldlWithIndex (\name prev version -> formatPackageVersion name version <> prev) "" resolutions + unsafePerformEffect $ Sha256.hashString resolutions' + cacheKey = baseKey <> Sha256.print hashKey + + let + codec = CJ.named "FindAllCompilersResult" $ CJ.Record.object + { target: Version.codec + , result: CJ.Common.either compilerFailureCodec CJ.null + } + + Exists.mkExists $ Cache.AsJson cacheKey codec next diff --git a/app/src/App/Auth.purs b/app/src/App/Auth.purs index c8647304f..f9303fea8 100644 --- a/app/src/App/Auth.purs +++ b/app/src/App/Auth.purs @@ -1,6 +1,7 @@ module Registry.App.Auth ( SignAuthenticated , signPayload + , verifyPackageSetPayload , verifyPayload ) where @@ -8,7 +9,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.String as String -import Registry.Operation (AuthenticatedData) +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest) import Registry.SSH as SSH -- We take pacchettibotti as an extra owner because pacchettibotti can always @@ -35,3 +36,20 @@ signPayload :: SignAuthenticated -> Either String SSH.Signature signPayload { privateKey, rawPayload } = do private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } pure $ SSH.sign private rawPayload + +-- | Verify a package set update request using pacchettibotti's key. +-- | Returns an error if the signature is invalid or missing. +verifyPackageSetPayload :: Owner -> PackageSetUpdateRequest -> Aff (Either String Unit) +verifyPackageSetPayload pacchettiBotti request = do + case request.signature of + Nothing -> + pure $ Left "Package set update requires a signature for restricted operations." + Just signature -> do + let eitherKey = SSH.parsePublicKey (formatOwner pacchettiBotti) + pure do + key <- eitherKey + unless (SSH.verify key request.rawPayload signature) do + Left "The pacchettibotti signature is not valid for this payload." + where + formatOwner (Owner owner) = + String.joinWith " " [ owner.keytype, owner.public, fromMaybe "id" owner.id ] diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac64c8e65..baf513748 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,11 +111,12 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + Just _files -> do + -- This is a bit noisy, so commenting it out for now. + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." @@ -213,8 +214,8 @@ gitCommit { address: { owner, repo }, committer, commit, message } cwd = Except. -- Git will error if we try to commit without any changes actually staged, -- so the below command lists file paths (--name-only) that have changed -- between the index and current HEAD (--cached), only including files that - -- have been added or modified (--diff-filter=AM). - staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AM" ] \error -> + -- have been added, modified, or deleted (--diff-filter=AMD). + staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AMD" ] \error -> "Failed to check whether any changes are staged " <> inRepoErr error -- If there are no staged files, then we have nothing to commit. diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs index 7e8d22c90..e5706e3f1 100644 --- a/app/src/App/CLI/Purs.purs +++ b/app/src/App/CLI/Purs.purs @@ -4,6 +4,7 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record @@ -13,6 +14,16 @@ import Node.ChildProcess.Types (Exit(..)) import Node.Library.Execa as Execa import Registry.Version as Version +-- | The minimum compiler version that supports 'purs graph' +minPursGraph :: Version +minPursGraph = unsafeFromRight (Version.parse "0.14.0") + +minPursuitPublish :: Version +minPursuitPublish = unsafeFromRight (Version.parse "0.14.7") + +minLanguageCSTParser :: Version +minLanguageCSTParser = unsafeFromRight (Version.parse "0.15.0") + -- | Call a specific version of the PureScript compiler callCompiler_ :: { version :: Maybe Version, command :: PursCommand, cwd :: Maybe FilePath } -> Aff Unit callCompiler_ = void <<< callCompiler @@ -23,6 +34,22 @@ data CompilerFailure | MissingCompiler derive instance Eq CompilerFailure +derive instance Ord CompilerFailure + +compilerFailureCodec :: CJ.Codec CompilerFailure +compilerFailureCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError CompilerFailure + decode json = except do + map CompilationError (CJ.decode (CJ.array compilerErrorCodec) json) + <|> map UnknownError (CJ.decode CJ.string json) + <|> map (const MissingCompiler) (CJ.decode CJ.null json) + + encode :: CompilerFailure -> JSON + encode = case _ of + CompilationError errors -> CJ.encode (CJ.array compilerErrorCodec) errors + UnknownError message -> CJ.encode CJ.string message + MissingCompiler -> CJ.encode CJ.null unit type CompilerError = { position :: SourcePosition diff --git a/app/src/App/Effect/Archive.purs b/app/src/App/Effect/Archive.purs new file mode 100644 index 000000000..17ca0675e --- /dev/null +++ b/app/src/App/Effect/Archive.purs @@ -0,0 +1,288 @@ +-- | An effect for fetching packages from the registry-archive. +-- | +-- | The registry-archive stores tarballs for packages whose original GitHub +-- | repositories are no longer available. This effect provides operations to +-- | fetch source code and metadata from that archive. +-- | +-- | This effect can be removed when the legacy importer is no longer in use. +module Registry.App.Effect.Archive + ( ARCHIVE + , Archive(..) + , ArchiveError(..) + , FetchedSource + , _archive + , fetch + , fetchEither + , handle + , handleMock + , interpret + , printArchiveError + , registryArchiveUrl + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as Formatter.DateTime +import Data.Map as Map +import Effect.Aff as Aff +import Effect.Exception as Exception +import Fetch.Retry as Fetch +import JSON as JSON +import JSON.Object as JSON.Object +import Node.Buffer as Buffer +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Constants as Constants +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tar as Foreign.Tar +import Registry.Internal.Format as Internal.Format +import Registry.Metadata (Metadata(..)) +import Registry.PackageName (PackageName) +import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +-- | The base URL for fetching tarballs from the registry archive. +registryArchiveUrl :: String +registryArchiveUrl = "https://raw.githubusercontent.com/purescript/registry-archive/main" + +-- | The result of fetching source code from the registry archive. +type FetchedSource = + { path :: FilePath + , published :: DateTime + } + +-- | Errors that can occur when fetching from the archive. +data ArchiveError + = DownloadFailed PackageName Version String + | ExtractionFailed PackageName Version String + | PublishedTimeNotFound PackageName Version + +printArchiveError :: ArchiveError -> String +printArchiveError = case _ of + DownloadFailed name version reason -> Array.fold + [ "Failed to download " + , formatPackageVersion name version + , " from the registry archive: " + , reason + ] + ExtractionFailed name version reason -> Array.fold + [ "Failed to extract " + , formatPackageVersion name version + , " from the registry archive: " + , reason + ] + PublishedTimeNotFound name version -> Array.fold + [ "Could not find published time for " + , formatPackageVersion name version + ] + +-- | The Archive effect, which describes fetching package tarballs from the +-- | registry-archive repository. +data Archive a = Fetch FilePath PackageName Version (Either ArchiveError FetchedSource -> a) + +derive instance Functor Archive + +type ARCHIVE r = (archive :: Archive | r) + +_archive :: Proxy "archive" +_archive = Proxy + +-- | Fetch a package tarball from the registry archive, extracting it to the +-- | given destination directory. Returns the path to the extracted source and +-- | the published time. +fetch :: forall r. FilePath -> PackageName -> Version -> Run (ARCHIVE + EXCEPT String + r) FetchedSource +fetch destination name version = (Except.rethrow <<< lmap printArchiveError) =<< fetchEither destination name version + +-- | Fetch a package tarball from the registry archive, returning the typed +-- | ArchiveError on failure. +fetchEither :: forall r. FilePath -> PackageName -> Version -> Run (ARCHIVE + r) (Either ArchiveError FetchedSource) +fetchEither destination name version = Run.lift _archive (Fetch destination name version identity) + +-- | Run the ARCHIVE effect given a handler. +interpret :: forall r a. (Archive ~> Run r) -> Run (ARCHIVE + r) a -> Run r a +interpret handler = Run.interpret (Run.on _archive handler Run.send) + +-- | Handle the ARCHIVE effect by fetching from the real registry-archive on GitHub. +handle :: forall r a. Archive a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle = case _ of + Fetch destination name version reply -> do + result <- fetchFromArchiveImpl destination name version + pure $ reply result + +-- | Internal implementation that fetches from the registry-archive and looks up +-- | the published time from the remote registry metadata. +fetchFromArchiveImpl + :: forall r + . FilePath + -> PackageName + -> Version + -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either ArchiveError FetchedSource) +fetchFromArchiveImpl destination name version = do + let + nameStr = PackageName.print name + versionStr = Version.print version + tarballName = versionStr <> ".tar.gz" + -- Extract to a subdirectory to avoid path collisions with the packaging + -- directory (which uses the name-version format that archive tarballs + -- also use internally). + extractDir = Path.concat [ destination, "archive" ] + absoluteTarballPath = Path.concat [ extractDir, tarballName ] + archiveUrl = Array.fold + [ registryArchiveUrl + , "/" + , nameStr + , "/" + , versionStr + , ".tar.gz" + ] + + Log.debug $ "Fetching archive tarball from: " <> archiveUrl + FS.Extra.ensureDirectory extractDir + + response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} + + case response of + Cancelled -> + pure $ Left $ DownloadFailed name version "Request was cancelled" + Failed (Fetch.FetchError error) -> do + Log.error $ "HTTP error when fetching archive: " <> Exception.message error + pure $ Left $ DownloadFailed name version (Exception.message error) + Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) + Log.error $ Array.fold + [ "Bad status (" + , show status + , ") when fetching archive with body: " + , bodyString + ] + pure $ Left $ DownloadFailed name version ("HTTP status " <> show status) + Succeeded { arrayBuffer: arrayBufferAff } -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of + Left error -> do + Log.error $ Array.fold + [ "Downloaded archive but failed to write to " + , absoluteTarballPath + , ": " + , Aff.message error + ] + pure $ Left $ DownloadFailed name version "Failed to write tarball to disk" + Right _ -> do + Log.debug $ "Tarball downloaded to " <> absoluteTarballPath + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> + pure $ Left $ ExtractionFailed name version "Tarball has no top-level directory" + Just extractedPath -> do + Log.debug "Extracting archive tarball..." + Tar.extract { cwd: extractDir, archive: tarballName } + fetchRemotePublishedTime name version >>= case _ of + Nothing -> pure $ Left $ PublishedTimeNotFound name version + Just publishedTime -> + pure $ Right { path: Path.concat [ extractDir, extractedPath ], published: publishedTime } + +-- | Fetch the published time for a specific version from the remote registry +-- | repo (main branch). Used as a fallback when the local registry checkout +-- | doesn't have metadata for archive-backed packages. +fetchRemotePublishedTime :: forall r. PackageName -> Version -> Run (GITHUB + LOG + r) (Maybe DateTime) +fetchRemotePublishedTime name version = do + let + printed = PackageName.print name + path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] + Log.debug $ Array.fold + [ "Fetching published time for " + , formatPackageVersion name version + , " from remote registry" + ] + GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of + Left err -> do + Log.warn $ Array.fold + [ "Failed to fetch remote metadata for " + , printed + , ": " + , Octokit.printGitHubError err + ] + pure Nothing + Right content -> do + let + parsed = do + json <- hush $ JSON.parse content + obj <- JSON.toJObject json + publishedJson <- JSON.Object.lookup "published" obj + publishedObj <- JSON.toJObject publishedJson + versionJson <- JSON.Object.lookup (Version.print version) publishedObj + versionObj <- JSON.toJObject versionJson + timeJson <- JSON.Object.lookup "publishedTime" versionObj + timeStr <- JSON.toString timeJson + hush $ Formatter.DateTime.unformat Internal.Format.iso8601DateTime timeStr + case parsed of + Nothing -> do + Log.warn $ Array.fold + [ "Could not extract publishedTime for " + , formatPackageVersion name version + , " from remote metadata" + ] + pure Nothing + Just dt -> do + Log.debug $ Array.fold + [ "Fetched published time for " + , formatPackageVersion name version + , " from remote registry" + ] + pure $ Just dt + +-- | A mock handler for testing that uses a local directory of tarballs instead +-- | of fetching from the remote registry-archive. +handleMock + :: forall r a + . { archiveDir :: FilePath, metadata :: Map PackageName Metadata } + -> Archive a + -> Run (LOG + AFF + EFFECT + r) a +handleMock env = case _ of + Fetch destination name version reply -> map (map reply) Except.runExcept do + let + tarballName = Version.print version <> ".tar.gz" + sourcePath = Path.concat [ env.archiveDir, PackageName.print name <> "-" <> Version.print version <> ".tar.gz" ] + absoluteTarballPath = Path.concat [ destination, tarballName ] + + Run.liftAff (Aff.attempt (FS.Aff.stat sourcePath)) >>= case _ of + Left _ -> + Except.throw $ DownloadFailed name version "Tarball not found in mock archive" + Right _ -> do + Run.liftAff (Aff.attempt (FS.Aff.copyFile sourcePath absoluteTarballPath)) >>= case _ of + Left error -> + Except.throw $ DownloadFailed name version (Aff.message error) + Right _ -> + Log.debug $ "Copied mock tarball to " <> absoluteTarballPath + + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> + Except.throw $ ExtractionFailed name version "Tarball has no top-level directory" + Just extractedPath -> do + Log.debug "Extracting mock archive tarball..." + Tar.extract { cwd: destination, archive: tarballName } + case Map.lookup name env.metadata of + Nothing -> + Except.throw $ PublishedTimeNotFound name version + Just (Metadata m) -> + case Map.lookup version m.published of + Nothing -> + Except.throw $ PublishedTimeNotFound name version + Just publishedMeta -> + pure { path: Path.concat [ destination, extractedPath ], published: publishedMeta.publishedTime } diff --git a/app/src/App/Effect/Cache.purs b/app/src/App/Effect/Cache.purs index 15808ff9d..3ea63452a 100644 --- a/app/src/App/Effect/Cache.purs +++ b/app/src/App/Effect/Cache.purs @@ -169,7 +169,6 @@ handleMemoryFs env = case _ of case inFs of Nothing -> pure $ reply Nothing Just entry -> do - Log.debug $ "Fell back to on-disk entry for " <> memory putMemoryImpl env.ref unit (Key memory (Const entry)) pure $ reply $ Just $ unCache entry Just cached -> @@ -227,8 +226,7 @@ getMemoryImpl ref (Key id (Reply reply)) = do let (unCache :: CacheValue -> b) = unsafeCoerce cache <- Run.liftEffect $ Ref.read ref case Map.lookup id cache of - Nothing -> do - Log.debug $ "No cache entry found for " <> id <> " in memory." + Nothing -> pure $ reply Nothing Just cached -> do pure $ reply $ Just $ unCache cached @@ -237,7 +235,6 @@ putMemoryImpl :: forall x r a. CacheRef -> a -> MemoryEncoding Const a x -> Run putMemoryImpl ref next (Key id (Const value)) = do let (toCache :: x -> CacheValue) = unsafeCoerce Run.liftEffect $ Ref.modify_ (Map.insert id (toCache value)) ref - Log.debug $ "Wrote cache entry for " <> id <> " in memory." pure next deleteMemoryImpl :: forall x r a. CacheRef -> MemoryEncoding Ignore a x -> Run (LOG + EFFECT + r) a @@ -276,7 +273,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readFile path)) >>= case _ of Left _ -> do - Log.debug $ "No cache found for " <> id <> " at path " <> path pure $ reply Nothing Right buf -> do pure $ reply $ Just buf @@ -285,7 +281,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 path)) >>= case _ of Left _ -> do - Log.debug $ "No cache file found for " <> id <> " at path " <> path pure $ reply Nothing Right content -> case JSON.parse content of Left parseError -> do @@ -308,7 +303,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as a buffer: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path pure next AsJson id codec (Const value) -> do @@ -318,7 +312,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as JSON: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." pure next deleteFsImpl :: forall a b r. FilePath -> FsEncoding Ignore a b -> Run (LOG + AFF + r) a diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..b37103531 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,13 +5,16 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (AdminJobDetails, FinishJob, InsertAdminJob, InsertMatrixJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite +import Registry.Operation (PackageSetOperation) import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +24,30 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertAdminJob InsertAdminJob (JobId -> a) + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectJobs SelectJobsRequest (Array Job -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextAdminJob (Either String (Maybe AdminJobDetails) -> a) + | SelectRecentAdminJobs DateTime (Either String (Array AdminJobDetails) -> a) + | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) + | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) + | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) + | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe AdminJobDetails) -> a) + | InsertLogLine LogLine a + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) + | ResetIncompleteJobs a derive instance Functor Db @@ -39,28 +59,92 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +-- | Select all logs for a given job, filtered by loglevel. +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow + +-- | Select a list of the latest jobs from the database +selectJobs :: forall r. SelectJobsRequest -> Run (DB + EXCEPT String + r) (Array Job) +selectJobs request = Run.lift _db (SelectJobs request identity) + +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) + +-- | Insert a new admin job into the database. +insertAdminJob :: forall r. InsertAdminJob -> Run (DB + r) JobId +insertAdminJob job = Run.lift _db (InsertAdminJob job identity) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) + +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow + +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next admin job from the database. +selectNextAdminJob :: forall r. Run (DB + EXCEPT String + r) (Maybe AdminJobDetails) +selectNextAdminJob = Run.lift _db (SelectNextAdminJob identity) >>= Except.rethrow + +-- | Returns recent admin jobs since a given timestamp (for scheduler). +selectRecentAdminJobs :: forall r. DateTime -> Run (DB + EXCEPT String + r) (Array AdminJobDetails) +selectRecentAdminJobs since = Run.lift _db (SelectRecentAdminJobs since identity) >>= Except.rethrow + +-- | Lookup a publish job from the database by name and version. +selectPublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectPublishJob packageName packageVersion = Run.lift _db (SelectPublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookup an unpublish job from the database by name and version. +selectUnpublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookop a transfer job from the database by name. +selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow + +-- | Lookup a pending package set job from the database by payload (for duplicate detection at API boundary). +-- | This is only used when a manual package set operation is submitted via the API. +selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe AdminJobDetails) +selectPackageSetJobByPayload payload = Run.lift _db (SelectPackageSetJobByPayload payload identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +154,96 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log - pure next + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job + pure $ reply result + + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result + + InsertAdminJob job reply -> do + result <- Run.liftEffect $ SQLite.insertAdminJob env.db job + pure $ reply result + + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId + SelectJob request reply -> do + { unreadableLogs, job } <- Run.liftEffect $ SQLite.selectJob env.db request + unless (Array.null unreadableLogs) do + Log.warn $ "Some logs were not readable: " <> String.joinWith "\n" unreadableLogs pure $ reply job - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + SelectJobs request reply -> do + { failed, jobs } <- Run.liftEffect $ SQLite.selectJobs env.db request + unless (Array.null failed) do + Log.warn $ "Some jobs were not readable: " <> String.joinWith "\n" failed + pure $ reply jobs + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db + pure $ reply result + + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextAdminJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextAdminJob env.db + pure $ reply result + + SelectRecentAdminJobs since reply -> do + result <- Run.liftEffect $ SQLite.selectRecentAdminJobs env.db since + pure $ reply result + + SelectPublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectPublishJob env.db packageName packageVersion + pure $ reply result + + SelectUnpublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectUnpublishJob env.db packageName packageVersion + pure $ reply result + + SelectTransferJob packageName reply -> do + result <- Run.liftEffect $ SQLite.selectTransferJob env.db packageName + pure $ reply result + + SelectPackageSetJobByPayload payload reply -> do + result <- Run.liftEffect $ SQLite.selectPackageSetJobByPayload env.db payload + pure $ reply result + + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..873162264 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL @@ -272,6 +285,16 @@ pacchettibottiED25519Pub = EnvKey githubEventPath :: EnvKey FilePath githubEventPath = EnvKey { key: "GITHUB_EVENT_PATH", decode: pure } +-- Test environment variables (used by E2E tests) + +-- | Root directory for test state (database, scratch repos, etc). +stateDir :: EnvKey FilePath +stateDir = EnvKey { key: "STATE_DIR", decode: pure } + +-- | Directory containing git repository fixtures for tests. +repoFixturesDir :: EnvKey FilePath +repoFixturesDir = EnvKey { key: "REPO_FIXTURES_DIR", decode: pure } + decodeDatabaseUrl :: String -> Either String DatabaseUrl decodeDatabaseUrl input = do let prefix = "sqlite:" diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 584832255..914a3aa92 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -242,8 +242,8 @@ request octokit githubRequest@{ route: route@(GitHubRoute method _ _), codec } = -- auto-expire cache entries. We will be behind GitHub at most this amount per repo. -- -- TODO: This 'diff' check should be removed once we have conditional requests. - Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 4.0 -> do - Log.debug $ "Found cache entry but it was modified more than 4 hours ago, refetching " <> printedRoute + Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 23.0 -> do + Log.debug $ "Found cache entry but it was modified more than 23 hours ago, refetching " <> printedRoute result <- requestWithBackoff octokit githubRequest Cache.put _githubCache (Request route) (result <#> \resp -> { response: CJ.encode codec resp, modified: now, etag: Nothing }) pure result @@ -265,10 +265,8 @@ requestWithBackoff octokit githubRequest = do Log.debug $ "Making request to " <> route <> " with base URL " <> githubApiUrl result <- Run.liftAff do let - retryOptions = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt err -> case err of + retryOptions = defaultRetry + { retryOnFailure = \attempt err -> case err of UnexpectedError _ -> false DecodeError _ -> false -- https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#exceeding-the-rate-limit diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of @@ -134,5 +138,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/Effect/PackageSets.purs b/app/src/App/Effect/PackageSets.purs index 5a250ba22..ccd78e1c2 100644 --- a/app/src/App/Effect/PackageSets.purs +++ b/app/src/App/Effect/PackageSets.purs @@ -428,7 +428,7 @@ validatePackageSet (PackageSet set) = do -- We can now attempt to produce a self-contained manifest index from the -- collected manifests. If this fails then the package set is not -- self-contained. - Tuple unsatisfied _ = ManifestIndex.maximalIndex (Set.fromFoldable success) + Tuple unsatisfied _ = ManifestIndex.maximalIndex ManifestIndex.IgnoreRanges (Set.fromFoldable success) -- Otherwise, we can check if we were able to produce an index from the -- package set alone, without errors. diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index cdd00eb1d..48fbdf4a8 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -252,7 +252,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Writing manifest for " <> formatted <> ":\n" <> printJson Manifest.codec manifest index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left error -> Except.throw $ Array.fold [ "Can't insert " <> formatted <> " into manifest index because it has unsatisfied dependencies:" @@ -275,7 +275,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Deleting manifest for " <> formatted index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left error -> Except.throw $ Array.fold [ "Can't delete " <> formatted <> " from manifest index because it would produce unsatisfied dependencies:" @@ -359,7 +359,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Just metadata -> do Log.debug $ "Successfully read metadata for " <> printedName <> " from path " <> path - Log.debug $ "Setting metadata cache to singleton entry (as cache was previosuly empty)." + Log.debug $ "Setting metadata cache to singleton entry (as cache was previously empty)." Cache.put _registryCache AllMetadata (Map.singleton name metadata) pure $ Just metadata @@ -388,6 +388,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Right Git.Changed -> do Log.info "Registry repo has changed, clearing metadata cache..." + Cache.delete _registryCache AllMetadata resetFromDisk WriteMetadata name metadata reply -> map (map reply) Except.runExcept do @@ -501,10 +502,9 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Log.info $ "Mirroring legacy package set " <> name <> " to the legacy package sets repo" manifests <- Except.rethrow =<< handle env (ReadAllManifests identity) - metadata <- Except.rethrow =<< handle env (ReadAllMetadata identity) Log.debug $ "Converting package set..." - converted <- case Legacy.PackageSet.convertPackageSet manifests metadata set of + converted <- case Legacy.PackageSet.convertPackageSet manifests set of Left error -> Except.throw $ "Failed to convert package set " <> name <> " to a legacy package set: " <> error Right converted -> pure converted @@ -733,17 +733,30 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << result <- Git.gitPull { address, pullMode: env.pull } path pure result - now <- nowUTC - debouncers <- Run.liftEffect $ Ref.read env.debouncer - case Map.lookup path debouncers of - -- We will be behind the upstream by at most this amount of time. - Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> - pure $ Right Git.NoChange - -- If we didn't debounce, then we should fetch the upstream. - _ -> do + -- Check if the repo directory exists before consulting the debouncer. + -- This ensures that if the scratch directory is deleted (e.g., for test + -- isolation), we always re-clone rather than returning a stale NoChange. + repoExists <- Run.liftAff $ Aff.attempt (FS.Aff.stat path) + case repoExists of + Left _ -> do + -- Repo doesn't exist, bypass debouncer entirely and clone fresh result <- fetchLatest + now <- nowUTC Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer pure result + Right _ -> do + -- Repo exists, check debouncer + now <- nowUTC + debouncers <- Run.liftEffect $ Ref.read env.debouncer + case Map.lookup path debouncers of + -- We will be behind the upstream by at most this amount of time. + Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> + pure $ Right Git.NoChange + -- If we didn't debounce, then we should fetch the upstream. + _ -> do + result <- fetchLatest + Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer + pure result -- | Commit the file(s) indicated by the commit key with a commit message. commit :: CommitKey -> String -> Run _ (Either String GitResult) @@ -836,8 +849,9 @@ readManifestIndexFromDisk root = do entries <- map partitionEithers $ for packages.success (ManifestIndex.readEntryFile root) case entries.fail of - [] -> case ManifestIndex.fromSet $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of + [] -> case ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of Left errors -> do + Log.debug $ "Could not read a valid manifest index from entry files: " <> Array.foldMap (Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) <<< NonEmptyArray.toArray) entries.success Except.throw $ append "Unable to read manifest index (some packages are not satisfiable): " $ Array.foldMap (append "\n - ") do Tuple name versions <- Map.toUnfoldable errors Tuple version dependency <- Map.toUnfoldable versions @@ -878,10 +892,10 @@ readAllMetadataFromDisk metadataDir = do entries <- Run.liftAff $ map partitionEithers $ for packages.success \name -> do result <- readJsonFile Metadata.codec (Path.concat [ metadataDir, PackageName.print name <> ".json" ]) - pure $ map (Tuple name) result + pure $ bimap (Tuple name) (Tuple name) result unless (Array.null entries.fail) do - Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (append "\n - ") entries.fail + Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (\(Tuple name err) -> "\n - " <> PackageName.print name <> ": " <> err) entries.fail Log.debug "Successfully read metadata entries." pure $ Map.fromFoldable entries.success diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index a9479d3f5..f9fe3444d 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,6 +6,8 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate +import Data.String as String +import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff import Effect.Exception as Exception import Effect.Now as Now @@ -20,6 +22,7 @@ import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Location as Location @@ -28,8 +31,15 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +-- | Packages can be published via the legacy importer or a user via the API. We +-- | determine some information differently in these cases, such as the time the +-- | package was published. +data ImportType = Old | Recent + +derive instance Eq ImportType + -- | An effect for fetching package sources -data Source a = Fetch PackageSource FilePath Location String (Either String FetchedSource -> a) +data Source a = Fetch FilePath Location String (Either FetchError FetchedSource -> a) derive instance Functor Source @@ -40,27 +50,46 @@ _source = Proxy type FetchedSource = { path :: FilePath, published :: DateTime } +data FetchError + = GitHubOnly + | NoSubdir + | InaccessibleRepo Octokit.Address + | NoToplevelDir + | Fatal String + +printFetchError :: FetchError -> String +printFetchError = case _ of + GitHubOnly -> "Packages are only allowed to come from GitHub for now. See issue #15." + NoSubdir -> "Monorepos and the `subdir` key are not supported yet. See issue #16." + InaccessibleRepo { owner, repo } -> "Repository located at https://github.com/" <> owner <> "/" <> repo <> ".git is inaccessible or does not exist." + NoToplevelDir -> "Downloaded tarball has no top-level directory." + Fatal err -> "Unrecoverable error. " <> err + -- | Fetch the provided location to the provided destination path. -fetch :: forall r. PackageSource -> FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch source destination location ref = Except.rethrow =<< Run.lift _source (Fetch source destination location ref identity) +fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource +fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< fetchEither destination location ref + +-- | Fetch the provided location, returning the typed FetchError on failure. +fetchEither :: forall r. FilePath -> Location -> String -> Run (SOURCE + r) (Either FetchError FetchedSource) +fetchEither destination location ref = Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a interpret handler = Run.interpret (Run.on _source handler Run.send) -- | Handle the SOURCE effect by downloading package source to the file system. -handle :: forall r a. Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a -handle = case _ of - Fetch source destination location ref reply -> map (map reply) Except.runExcept do +handle :: forall r a. ImportType -> Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle importType = case _ of + Fetch destination location ref reply -> map (map reply) Except.runExcept do Log.info $ "Fetching " <> printJson Location.codec location case location of Git _ -> do -- TODO: Support non-GitHub packages. Remember subdir when doing so. (See #15) - Except.throw "Packages are only allowed to come from GitHub for now. See #15" + Except.throw GitHubOnly GitHub { owner, repo, subdir } -> do -- TODO: Support subdir. In the meantime, we verify subdir is not present. (See #16) - when (isJust subdir) $ Except.throw "`subdir` is not supported for now. See #16" + when (isJust subdir) $ Except.throw NoSubdir case pursPublishMethod of -- This needs to be removed so that we can support non-GitHub packages (#15) @@ -73,41 +102,79 @@ handle = case _ of Log.debug $ "Using legacy Git clone to fetch package source at tag: " <> show { owner, repo, ref } let - repoDir = Path.concat [ destination, repo ] - - clonePackageAtTag = do - let url = Array.fold [ "https://github.com/", owner, "/", repo ] - let args = [ "clone", url, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] - withRetryOnTimeout (Git.gitCLI args Nothing) >>= case _ of - Cancelled -> Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> url <> " " <> ref - Failed err -> Aff.throwError $ Aff.error err - Succeeded _ -> pure unit + repoDir = Path.concat [ destination, repo <> "-" <> ref ] + + -- If a git clone is cancelled by the timeout, but had partially-cloned, then it will + -- leave behind files that prevent a retry. + retryOpts = defaultRetry + { cleanupOnCancel = FS.Extra.remove repoDir + , timeout = Milliseconds 15_000.0 + } + + cloneUrl = + Array.fold [ "https://github.com/", owner, "/", repo ] + + -- We disable Git LFS smudging because package sources should not + -- contain large binary files. This avoids downloading LFS objects + -- from misconfigured packages. + cloneArgs = + [ "-c" + , "filter.lfs.smudge=cat" + , "-c" + , "filter.lfs.process=cat" + , "clone" + , cloneUrl + , "--branch" + , ref + , "--single-branch" + , "-c" + , "advice.detachedHead=false" + , repoDir + ] + + clonePackageAtTag = + withRetry retryOpts (Git.gitCLI cloneArgs Nothing) >>= case _ of + Cancelled -> + Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> cloneUrl <> " " <> ref + Failed err -> + Aff.throwError $ Aff.error err + Succeeded _ -> + pure unit Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of + Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error -> do + Log.warn $ "Git clone command failed:\n " <> String.joinWith " " (Array.cons "git" cloneArgs) Log.error $ "Failed to clone git tag: " <> Aff.message error - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref - Right _ -> Log.debug $ "Cloned package source to " <> repoDir + + -- We'll receive this message if we try to clone a repo which doesn't + -- exist, which is interpreted as an attempt to fetch a private repo. + let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" + + if String.contains (String.Pattern missingRepoErr) (Aff.message error) then + Except.throw $ InaccessibleRepo { owner, repo } + else + Except.throw $ Fatal $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." let - getRefTime = case source of - LegacyPackage -> do - timestamp <- Except.rethrow =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) + getRefTime = case importType of + Old -> do + timestamp <- (Except.rethrow <<< lmap Fatal) =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) jsDate <- Run.liftEffect $ JSDate.parse timestamp dateTime <- case JSDate.toDateTime jsDate of - Nothing -> Except.throw $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate + Nothing -> Except.throw $ Fatal $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate Just parsed -> pure parsed pure dateTime - CurrentPackage -> + Recent -> Run.liftEffect Now.nowDateTime -- Cloning will result in the `repo` name as the directory name publishedTime <- Except.runExcept getRefTime >>= case _ of Left error -> do - Log.error $ "Failed to get published time: " <> error - Except.throw $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." + Log.error $ "Failed to get published time. " <> printFetchError error + Except.throw $ Fatal $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." Right value -> pure value pure { path: repoDir, published: publishedTime } @@ -122,12 +189,12 @@ handle = case _ of commit <- GitHub.getRefCommit { owner, repo } (RawVersion ref) >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at ref " <> ref <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref + Except.throw $ Fatal $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref Right result -> pure result GitHub.getCommitDate { owner, repo } commit >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at commit " <> commit <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref + Except.throw $ Fatal $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref Right a -> pure a let tarballName = ref <> ".tar.gz" @@ -139,16 +206,16 @@ handle = case _ of Run.liftAff $ Fetch.withRetryRequest archiveUrl {} case response of - Cancelled -> Except.throw $ "Could not download " <> archiveUrl + Cancelled -> Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.FetchError error) -> do Log.error $ "Failed to download " <> archiveUrl <> " because of an HTTP error: " <> Exception.message error - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do arrayBuffer <- Run.liftAff arrayBufferAff buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) Log.error $ "Failed to download " <> archiveUrl <> " because of a non-200 status code (" <> show status <> ") with body " <> bodyString - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Succeeded { arrayBuffer: arrayBufferAff } -> do arrayBuffer <- Run.liftAff arrayBufferAff Log.debug $ "Successfully downloaded " <> archiveUrl <> " into a buffer." @@ -156,14 +223,14 @@ handle = case _ of Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of Left error -> do Log.error $ "Downloaded " <> archiveUrl <> " but failed to write it to the file at path " <> absoluteTarballPath <> ":\n" <> Aff.message error - Except.throw $ "Could not download " <> archiveUrl <> " due to an internal error." + Except.throw $ Fatal $ "Could not download " <> archiveUrl <> " due to an internal error." Right _ -> Log.debug $ "Tarball downloaded to " <> absoluteTarballPath Log.debug "Verifying tarball..." Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of Nothing -> - Except.throw "Downloaded tarball from GitHub has no top-level directory." + Except.throw NoToplevelDir Just path -> do Log.debug "Extracting the tarball..." Tar.extract { cwd: destination, archive: tarballName } diff --git a/app/src/App/Effect/Storage.purs b/app/src/App/Effect/Storage.purs index c9a52a7bb..b6d6a0ad4 100644 --- a/app/src/App/Effect/Storage.purs +++ b/app/src/App/Effect/Storage.purs @@ -199,6 +199,7 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case Except.throw $ "Could not delete package " <> package <> " due to an error connecting to the storage backend." Succeeded _ -> do Log.debug $ "Deleted release of " <> package <> " from S3 at the path " <> packagePath + Cache.delete _storageCache (Package name version) pure unit else do Log.error $ packagePath <> " does not exist on S3 (available: " <> String.joinWith ", " published <> ")" diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 2c02604c4..b0ab0f02c 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,120 +14,249 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - API.publish CurrentPackage payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret Source.handle + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left packageSetOp@(PackageSetUpdate payload) -> do + -- Sign with pacchettibotti if submitter is a trustee + request <- signPackageSetIfTrustee packageSetOp payload + pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateRequestCodec request + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Notice && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -126,10 +264,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -137,17 +276,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -172,10 +306,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -198,9 +333,6 @@ readOperation eventPath = do pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -240,7 +372,7 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code -- | or issue creation. -- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent @@ -299,3 +431,32 @@ signPacchettiBottiIfTrustee auth = do else do Log.info "Authenticated payload not submitted by a registry trustee, continuing with original signature." pure auth + +-- | Sign a package set update with pacchettibotti's key if the submitter is a trustee. +-- | Non-trustees get an unsigned request (signature = Nothing). +signPackageSetIfTrustee + :: forall r + . PackageSetOperation + -> Operation.PackageSetUpdateData + -> Run (GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) Operation.PackageSetUpdateRequest +signPackageSetIfTrustee packageSetOp payload = do + let rawPayload = JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + GitHub.listTeamMembers API.packagingTeam >>= case _ of + Left githubError -> do + Log.warn $ Array.fold + [ "Unable to fetch members of packaging team, not signing package set request: " + , Octokit.printGitHubError githubError + ] + pure { payload: packageSetOp, rawPayload, signature: Nothing } + Right members -> do + { username } <- Env.askGitHubEvent + if Array.elem username members then do + Log.info "Package set update submitted by a registry trustee, signing with pacchettibotti keys." + { privateKey } <- Env.askPacchettiBotti + signature <- case Auth.signPayload { privateKey, rawPayload } of + Left _ -> Except.throw "Error signing package set update. cc: @purescript/packaging" + Right sig -> pure sig + pure { payload: packageSetOp, rawPayload, signature: Just signature } + else do + Log.info "Package set update not submitted by a registry trustee, sending unsigned request." + pure { payload: packageSetOp, rawPayload, signature: Nothing } diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 7788b16c2..8d997342f 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -11,7 +11,6 @@ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Either as Either import Data.Exists as Exists -import Data.FunctorWithIndex (mapWithIndex) import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Ord.Max (Max(..)) @@ -38,7 +37,7 @@ import Registry.App.Legacy.LenientRange as LenientRange import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (LegacyPackageSet(..), LegacyPackageSetEntry, LegacyPackageSetUnion, RawPackageName(..), RawVersion(..), RawVersionRange(..), legacyPackageSetCodec, legacyPackageSetUnionCodec, rawPackageNameMapCodec, rawVersionCodec, rawVersionRangeCodec) -import Registry.Foreign.Octokit (Address, GitHubError) +import Registry.Foreign.Octokit (Address, GitHubError(..)) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp import Registry.License as License @@ -60,12 +59,13 @@ type LegacyManifest = , dependencies :: Map PackageName Range } -toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location { license, description, dependencies } = do +toManifest :: PackageName -> Version -> Location -> String -> LegacyManifest -> Manifest +toManifest name version location ref legacy = do + let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing - Manifest { name, version, location, license, description, dependencies, includeFiles, excludeFiles, owners } + Manifest { name, version, location, ref, license, description, dependencies, includeFiles, excludeFiles, owners } -- | Attempt to retrieve a license, description, and set of dependencies from a -- | PureScript repo that does not have a Registry-supported manifest, but does @@ -140,21 +140,13 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr Left bowerError, Left _ -> Left bowerError Right bowerDeps, Left _ -> Right bowerDeps Left _, Right spagoDeps -> Right spagoDeps - Right bowerDeps, Right spagoDeps -> Right do - bowerDeps # mapWithIndex \package range -> - case Map.lookup package spagoDeps of - Nothing -> range - Just spagoRange -> Range.union range spagoRange + Right bowerDeps, Right spagoDeps -> Right $ Map.unionWith Range.union bowerDeps spagoDeps unionPackageSets = case maybePackageSetDeps, unionManifests of Nothing, Left manifestError -> Left manifestError Nothing, Right manifestDeps -> Right manifestDeps Just packageSetDeps, Left _ -> Right packageSetDeps - Just packageSetDeps, Right manifestDeps -> Right do - packageSetDeps # mapWithIndex \package range -> - case Map.lookup package manifestDeps of - Nothing -> range - Just manifestRange -> Range.union range manifestRange + Just packageSetDeps, Right manifestDeps -> Right $ Map.unionWith Range.union manifestDeps packageSetDeps Run.Except.rethrowAt _legacyManifestError unionPackageSets @@ -173,6 +165,44 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr pure { license, dependencies, description } +-- | Some legacy manifests must be patched to be usable. +patchLegacyManifest :: PackageName -> Version -> LegacyManifest -> LegacyManifest +patchLegacyManifest name version legacy = do + let bolson = unsafeFromRight (PackageName.parse "bolson") + let hyrule = unsafeFromRight (PackageName.parse "hyrule") + + let unsafeVersion = unsafeFromRight <<< Version.parse + let unsafeRange a b = unsafeFromJust (Range.mk (unsafeVersion a) (unsafeVersion b)) + let fixRange pkg range = Map.update (\_ -> Just range) pkg + + -- hyrule v2.2.0 removes a module that breaks all versions of bolson + -- prior to the versions below + let earlyHyruleFixedRange = unsafeRange "1.6.4" "2.2.0" + let earlyFixHyrule = fixRange hyrule earlyHyruleFixedRange + + -- hyrule v2.4.0 removes a module that breaks all versions of bolson, deku, + -- and rito prior to the versions below + let hyruleFixedRange = unsafeRange "2.0.0" "2.4.0" + let fixHyrule = fixRange hyrule hyruleFixedRange + + -- bolson v0.3.1 changes the type of a function that breaks deku until 0.9.21 + let bolsonFixedRange = unsafeRange "0.1.0" "0.3.2" + let fixBolson = fixRange bolson bolsonFixedRange + + case PackageName.print name of + "bolson" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.4.0" -> legacy { dependencies = fixHyrule legacy.dependencies } + "deku" + | version < unsafeVersion "0.7.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.9.21" -> legacy { dependencies = fixBolson (fixHyrule legacy.dependencies) } + | version < unsafeVersion "0.9.25" -> legacy { dependencies = fixHyrule legacy.dependencies } + "rito" + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.5" -> legacy { dependencies = fixHyrule legacy.dependencies } + _ -> + legacy + _legacyManifestError :: Proxy "legacyManifestError" _legacyManifestError = Proxy @@ -224,16 +254,22 @@ fetchLegacyManifestFiles :: forall r . Address -> RawVersion - -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) + -> Run (GITHUB + LOG + AFF + EFFECT + EXCEPT String + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) fetchLegacyManifestFiles address ref = do eitherBower <- fetchBowerfile address ref - void $ flip ltraverse eitherBower \error -> - Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherBower case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error eitherSpago <- fetchSpagoDhallJson address ref - void $ flip ltraverse eitherSpago \error -> - Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherSpago case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error pure $ case eitherBower, eitherSpago of - Left _, Left _ -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available." } + Left errL, Left errR -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available: " <> Octokit.printGitHubError errL <> ", " <> Octokit.printGitHubError errR } Right bower, Left _ -> Right $ This bower Left _, Right spago -> Right $ That spago Right bower, Right spago -> Right $ Both bower spago @@ -448,7 +484,12 @@ fetchLegacyPackageSets = Run.Except.runExceptAt _legacyPackageSetsError do Nothing -> do Log.debug $ "Cache miss for legacy package set " <> refStr <> ", refetching..." result <- GitHub.getJsonFile Legacy.PackageSet.legacyPackageSetsRepo ref legacyPackageSetCodec "packages.json" - Cache.put _legacyCache (LegacySet ref) result + -- Only cache permanent errors (404, decode errors) and successes. + -- Transient errors (rate limits, network issues) should be retried. + case result of + Right _ -> Cache.put _legacyCache (LegacySet ref) result + Left err | Octokit.isPermanentGitHubError err -> Cache.put _legacyCache (LegacySet ref) result + Left _ -> pure unit pure result Just value -> pure value diff --git a/app/src/App/Legacy/PackageSet.purs b/app/src/App/Legacy/PackageSet.purs index eb1ce8021..62b718d7c 100644 --- a/app/src/App/Legacy/PackageSet.purs +++ b/app/src/App/Legacy/PackageSet.purs @@ -102,8 +102,8 @@ printPscTag (PscTag { compiler, date }) = , Format.DateTime.format pscDateFormat (DateTime date bottom) ] -convertPackageSet :: ManifestIndex -> Map PackageName Metadata -> PackageSet -> Either String ConvertedLegacyPackageSet -convertPackageSet index metadataMap (PackageSet { compiler, packages, published, version }) = do +convertPackageSet :: ManifestIndex -> PackageSet -> Either String ConvertedLegacyPackageSet +convertPackageSet index (PackageSet { compiler, packages, published, version }) = do converted <- case separate $ mapWithIndex convertPackage packages of { left, right } | Map.isEmpty left -> Right right { left } -> do @@ -130,17 +130,14 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versions <- note noIndexPackageError $ Map.lookup packageName $ ManifestIndex.toMap index Manifest manifest <- note noIndexVersionError $ Map.lookup packageVersion versions - Metadata metadata <- note noMetadataPackageError $ Map.lookup packageName metadataMap - { ref } <- note noMetadataVersionError $ Map.lookup packageVersion metadata.published - - repo <- case metadata.location of + repo <- case manifest.location of GitHub { owner, repo, subdir: Nothing } -> Right $ "https://github.com/" <> owner <> "/" <> repo <> ".git" Git { url, subdir: Nothing } -> Right url GitHub _ -> Left usesSubdirError Git _ -> Left usesSubdirError pure - { version: RawVersion ref + { version: RawVersion manifest.ref , dependencies: Array.fromFoldable $ Map.keys $ manifest.dependencies , repo } @@ -149,8 +146,6 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versionStr = Version.print packageVersion noIndexPackageError = "No registry index entry found for " <> nameStr noIndexVersionError = "Found registry index entry for " <> nameStr <> " but none for version " <> versionStr - noMetadataPackageError = "No metadata entry found for " <> nameStr - noMetadataVersionError = "Found metadata entry for " <> nameStr <> " but no published version for " <> versionStr usesSubdirError = "Package " <> nameStr <> " uses the 'subdir' key, which is not supported for legacy package sets." printDhall :: LegacyPackageSet -> String diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..5d5169c5e --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,94 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Minutes(..), Seconds(..), fromDuration) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router +import Registry.App.Server.Scheduler as CronJobs + +main :: Effect Unit +main = do + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ withRestartLoop "Scheduler" (CronJobs.runScheduler env) + Aff.launchAff_ $ withRestartLoop "Job executor" (JobExecutor.runJobExecutor env) + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = fromDuration (Minutes 1.0) + fiveMinutes = fromDuration (Minutes 5.0) + + loop n = do + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + -- | Run an Aff action in an infinite loop with exponential backoff on failure. + -- | If the action keeps crashing immediately, restart delay doubles each time. + -- | Once the action runs for more than a minute, the delay resets. + withRestartLoop :: String -> Aff (Either Aff.Error Unit) -> Aff Unit + withRestartLoop name action = loop initialRestartDelay + where + initialRestartDelay = fromDuration (Seconds 0.1) + + loop restartDelay = do + start <- nowUTC + result <- action + end <- nowUTC + + Console.error case result of + Left error -> name <> " failed: " <> Aff.message error + Right _ -> name <> " exited for no reason." + + -- This is a heuristic: if the fiber keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Manifest/SpagoYaml.purs b/app/src/App/Manifest/SpagoYaml.purs index 1d701e57c..66ffa1c48 100644 --- a/app/src/App/Manifest/SpagoYaml.purs +++ b/app/src/App/Manifest/SpagoYaml.purs @@ -27,9 +27,10 @@ import Registry.Range (Range) import Registry.Range as Range import Registry.Version as Version --- | Attempt to convert a spago.yaml file to a Manifest -spagoYamlToManifest :: SpagoYaml -> Either String Manifest -spagoYamlToManifest config = do +-- | Attempt to convert a spago.yaml file to a Manifest. The ref parameter is +-- | the Git reference (tag or commit) used to fetch this version's source. +spagoYamlToManifest :: String -> SpagoYaml -> Either String Manifest +spagoYamlToManifest ref config = do package@{ name, description, dependencies: spagoDependencies } <- note "No 'package' key found in config." config.package publish@{ version, license, owners } <- note "No 'publish' key found under the 'package' key in config." package.publish location <- note "No 'location' key found under the 'publish' key in config." publish.location @@ -43,6 +44,7 @@ spagoYamlToManifest config = do , description , license , location + , ref , owners , includeFiles , excludeFiles diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 311a15aa5..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -1,6 +1,5 @@ module Registry.App.Prelude ( LogVerbosity(..) - , PackageSource(..) , PursPublishMethod(..) , Retry , RetryResult(..) @@ -23,7 +22,6 @@ module Registry.App.Prelude , parseYaml , partitionEithers , printJson - , printPackageSource , pursPublishMethod , readJsonFile , readYamlFile @@ -62,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra @@ -173,7 +171,9 @@ withRetryOnTimeout = withRetry defaultRetry type Retry err = { timeout :: Aff.Milliseconds + , cleanupOnCancel :: Extra.Aff Unit , retryOnCancel :: Int -> Boolean + , cleanupOnFailure :: err -> Extra.Aff Unit , retryOnFailure :: Int -> err -> Boolean } @@ -182,7 +182,9 @@ type Retry err = defaultRetry :: forall err. Retry err defaultRetry = { timeout: Aff.Milliseconds 5000.0 + , cleanupOnCancel: pure unit , retryOnCancel: \attempt -> attempt <= 3 + , cleanupOnFailure: \_ -> pure unit , retryOnFailure: \_ _ -> false } @@ -196,7 +198,7 @@ derive instance (Eq err, Eq a) => Eq (RetryResult err a) -- | Attempt an effectful computation that can fail by specifying how to retry -- | the request and whether it should time out. withRetry :: forall err a. Retry err -> Extra.Aff (Either.Either err a) -> Extra.Aff (RetryResult err a) -withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } action = do +withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure, cleanupOnCancel, cleanupOnFailure } action = do let runAction :: Extra.Aff (Either.Either err a) -> Int -> Extra.Aff (RetryResult err a) runAction action' ms = do @@ -217,14 +219,18 @@ withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } a Cancelled -> if retryOnCancel attempt then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnCancel retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnCancel pure Cancelled Failed err -> if retryOnFailure attempt err then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnFailure err retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnFailure err pure (Failed err) Succeeded result -> pure (Succeeded result) @@ -255,15 +261,3 @@ data PursPublishMethod = LegacyPursPublish | PursPublish -- | The current purs publish method pursPublishMethod :: PursPublishMethod pursPublishMethod = LegacyPursPublish - --- | Operations can be exercised for old, pre-registry packages, or for packages --- | which are on the 0.15 compiler series. If a true legacy package is uploaded --- | then we do not require compilation to succeed and we don't publish docs. -data PackageSource = LegacyPackage | CurrentPackage - -derive instance Eq PackageSource - -printPackageSource :: PackageSource -> String -printPackageSource = case _ of - LegacyPackage -> "legacy" - CurrentPackage -> "current" diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..ccc0debb2 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,13 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const ADMIN_JOBS_TABLE = 'admin_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +19,239 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); +}; + +export const insertMatrixJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); +}; + +export const insertAdminJobImpl = (db, job) => { + const columns = ['jobId', 'adminJobType', 'payload', 'rawPayload', 'signature'] + return _insertJob(db, ADMIN_JOBS_TABLE, columns, job); +}; + +const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { + const params = []; + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + `; + + if (jobId != null) { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } else if (packageName != null) { + query += ` WHERE job.packageName = ?`; + params.push(packageName); + if (packageVersion != null) { + query += ` AND job.packageVersion = ?`; + params.push(packageVersion); + } + } else { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectUnpublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectTransferJobImpl = (db, { jobId, packageName }) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId, packageName }); +}; + +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); +}; + +export const selectAdminJobImpl = (db, jobId) => { + return _selectJob(db, { table: ADMIN_JOBS_TABLE, jobId }); +}; + +// Find a pending package set job by payload (for duplicate detection at API boundary) +// Note: This function is kept for checking duplicates when a manual package set +// operation is submitted via the API. It only looks for package_set_operation type jobs. +export const selectPackageSetJobByPayloadImpl = (db, payload) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${ADMIN_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE job.adminJobType = 'package_set_operation' + AND job.payload = ? + AND info.finishedAt IS NULL + ORDER BY info.createdAt ASC LIMIT 1 + `); + return stmt.get(payload); +}; + +const _selectJobs = (db, { table, since, includeCompleted }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.createdAt >= ? + `; + let params = [since]; + + if (includeCompleted === false) { + query += ` AND info.finishedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 100`; + const stmt = db.prepare(query); + + return stmt.all(...params); +} + +export const selectPublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PUBLISH_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectUnpublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: UNPUBLISH_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectTransferJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: TRANSFER_JOBS_TABLE, since, includeCompleted }); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const selectMatrixJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: MATRIX_JOBS_TABLE, since, includeCompleted }); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const selectAdminJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: ADMIN_JOBS_TABLE, since, includeCompleted }); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them +export const resetIncompleteJobsImpl = (db) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); + return stmt.run(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 + `; + + const stmt = db.prepare(query); + return stmt.all(jobId, logLevel, since); }; -export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); +// Returns recent admin jobs since a given timestamp (for scheduler) +export const selectRecentAdminJobsImpl = (db, since) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${ADMIN_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.createdAt >= ? + `); + return stmt.all(since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..4d0de6c7f 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,842 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob + ( AdminJobDetails + , ConnectOptions + , FinishJob + , InsertAdminJob + , InsertMatrixJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PublishJobDetails , SQLite + , SelectJobRequest + , SelectJobsRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails , connect - , createJob - , deleteIncompleteJobs , finishJob - , insertLog - , runningJobForPackage + , insertAdminJob + , insertLogLine + , insertMatrixJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs , selectJob + , selectJobs + , selectRecentAdminJobs , selectLogsByJob + , selectNextAdminJob + , selectNextMatrixJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , selectPackageSetJobByPayload + , selectPublishJob + , selectTransferJob + , selectUnpublishJob + , startJob ) where import Registry.App.Prelude +import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) +import Data.Array (sortBy, take) import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Function (on) +import Data.Nullable (notNull, null) +import Data.Nullable as Nullable +import Data.UUID.Random as UUID +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (AdminJobType, Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 +import Registry.API.V1 as V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature(..)) +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-------------------------------------------------------------------------------- +-- job_info table -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo + +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSStartJob = + { jobId :: String + , startedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: fromSuccess success + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -type NewJob = +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect { unreadableLogs :: Array String, job :: Either String (Maybe Job) } +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail: unreadableLogs, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + -- Failing to decode a log should not prevent us from returning a job, so we pass + -- failures through to be handled by application code + job <- runExceptT $ firstJust + [ selectPublishJobById logs + , selectMatrixJobById logs + , selectTransferJobById logs + , selectAdminJobById logs + , selectUnpublishJobById logs + ] + pure { job, unreadableLogs } + where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + + selectPublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: notNull jobId, packageName: null } + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectAdminJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectAdminJobImpl db (notNull jobId) + pure $ traverse + ( map (AdminJob <<< Record.merge { logs, jobType: Proxy :: _ "admin" }) + <<< adminJobDetailsFromJSRep + ) + maybeJobDetails + +type SelectJobsRequest = + { since :: DateTime + , includeCompleted :: Boolean + } + +selectJobs :: SQLite -> SelectJobsRequest -> Effect { failed :: Array String, jobs :: Array Job } +selectJobs db { since, includeCompleted } = do + publishJobs <- selectPublishJobs + unpublishJobs <- selectUnpublishJobs + transferJobs <- selectTransferJobs + matrixJobs <- selectMatrixJobs + adminJobs <- selectAdminJobs + let + { fail: failedJobs, success: allJobs } = partitionEithers + (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> adminJobs) + pure { failed: failedJobs, jobs: take 100 $ sortBy (compare `on` (V1.jobInfo >>> _.createdAt)) allJobs } + + where + selectPublishJobs = do + jobs <- Uncurried.runEffectFn3 selectPublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" }) <<< publishJobDetailsFromJSRep) jobs + + selectUnpublishJobs = do + jobs <- Uncurried.runEffectFn3 selectUnpublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" }) <<< unpublishJobDetailsFromJSRep) jobs + + selectTransferJobs = do + jobs <- Uncurried.runEffectFn3 selectTransferJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" }) <<< transferJobDetailsFromJSRep) jobs + + selectMatrixJobs = do + jobs <- Uncurried.runEffectFn3 selectMatrixJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" }) <<< matrixJobDetailsFromJSRep) jobs + + selectAdminJobs = do + jobs <- Uncurried.runEffectFn3 selectAdminJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (AdminJob <<< Record.merge { logs: [], jobType: Proxy :: _ "admin" }) <<< adminJobDetailsFromJSRep) jobs + +-------------------------------------------------------------------------------- +-- publish_jobs table + +type PublishJobDetails = { jobId :: JobId - , jobType :: JobType , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PublishData } -type JSNewJob = +type JSPublishJobDetails = { jobId :: String - , jobType :: String , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectPublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite SelectPublishParams (Nullable JSPublishJobDetails) + +foreign import selectPublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +selectPublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe PublishJobDetails)) +selectPublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now } -type JobResult = +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = { jobId :: JobId - , finishedAt :: DateTime + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData } -type JSJobResult = +type JSUnpublishJobDetails = { jobId :: String - , finishedAt :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectUnpublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String } -type Job = +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite SelectUnpublishParams (Nullable JSUnpublishJobDetails) + +foreign import selectUnpublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +selectUnpublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe UnpublishJobDetails)) +selectUnpublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = { jobId :: JobId - , jobType :: JobType + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed + } + +type SelectTransferParams = { jobId :: Nullable String, packageName :: Nullable String } + +foreign import selectTransferJobImpl :: EffectFn2 SQLite SelectTransferParams (Nullable JSTransferJobDetails) + +foreign import selectTransferJobsImpl :: EffectFn3 SQLite String Boolean (Array JSTransferJobDetails) + +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db { jobId: null, packageName: null } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +selectTransferJob :: SQLite -> PackageName -> Effect (Either String (Maybe TransferJobDetails)) +selectTransferJob db packageName = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertTransferJob = + { jobId :: String + , packageName :: String + , payload :: String + , createdAt :: String + } + +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- matrix_jobs table + +type InsertMatrixJob = + { packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , createdAt :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload + } + +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId +insertMatrixJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId + +type MatrixJobDetails = + { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJob = +type JSMatrixJobDetails = { jobId :: String - , jobType :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + } + +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) + +foreign import selectMatrixJobsImpl :: EffectFn3 SQLite String Boolean (Array JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +-------------------------------------------------------------------------------- +-- admin_jobs table + +type AdminJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , adminJobType :: AdminJobType + } + +type JSAdminJobDetails = + { jobId :: String , createdAt :: String + , startedAt :: Nullable String , finishedAt :: Nullable String , success :: Int + , adminJobType :: String + , payload :: String + } + +adminJobDetailsFromJSRep :: JSAdminJobDetails -> Either String AdminJobDetails +adminJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsedAdminJobType <- lmap JSON.DecodeError.print $ parseJson API.V1.adminJobTypeCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , adminJobType: parsedAdminJobType + } + +foreign import selectAdminJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSAdminJobDetails) + +foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSAdminJobDetails) + +foreign import selectAdminJobsImpl :: EffectFn3 SQLite String Boolean (Array JSAdminJobDetails) + +foreign import selectRecentAdminJobsImpl :: EffectFn2 SQLite String (Array JSAdminJobDetails) + +selectNextAdminJob :: SQLite -> Effect (Either String (Maybe AdminJobDetails)) +selectNextAdminJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectAdminJobImpl db null + pure $ traverse adminJobDetailsFromJSRep maybeJobDetails + +-- | Find a pending package set job by payload (for duplicate detection at API boundary) +-- | This is only used when a manual package set operation is submitted via the API. +selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe AdminJobDetails)) +selectPackageSetJobByPayload db payload = do + let payloadStr = stringifyJson Operation.packageSetOperationCodec payload + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobByPayloadImpl db payloadStr + pure $ traverse adminJobDetailsFromJSRep maybeJobDetails + +-- | Returns recent admin jobs since a given timestamp (for scheduler) +selectRecentAdminJobs :: SQLite -> DateTime -> Effect (Either String (Array AdminJobDetails)) +selectRecentAdminJobs db since = do + let sinceStr = DateTime.format Internal.Format.iso8601DateTime since + jobs <- Uncurried.runEffectFn2 selectRecentAdminJobsImpl db sinceStr + pure $ traverse adminJobDetailsFromJSRep jobs + +type InsertAdminJob = + { adminJobType :: AdminJobType + , rawPayload :: Maybe String + , signature :: Maybe Signature + } + +type JSInsertAdminJob = + { jobId :: String + , createdAt :: String + , adminJobType :: String + , payload :: String + , rawPayload :: Nullable String + , signature :: Nullable String + } + +insertAdminJobToJSRep :: JobId -> DateTime -> InsertAdminJob -> JSInsertAdminJob +insertAdminJobToJSRep jobId now { adminJobType, rawPayload, signature } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , adminJobType: API.V1.adminJobTypeKey adminJobType + , payload: stringifyJson API.V1.adminJobTypeCodec adminJobType + , rawPayload: Nullable.toNullable rawPayload + , signature: Nullable.toNullable $ map (\(Signature s) -> s) signature + } + +foreign import insertAdminJobImpl :: EffectFn2 SQLite JSInsertAdminJob Unit + +insertAdminJob :: SQLite -> InsertAdminJob -> Effect JobId +insertAdminJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertAdminJobImpl db $ insertAdminJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- logs table + +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } + +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } - -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob - -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult - -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob - -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob - -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = DateTime.format Internal.Format.iso8601DateTime since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + timestamp + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index e2830e29f..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,341 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish CurrentPackage publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret Source.handle - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/AdminJobs.purs b/app/src/App/Server/AdminJobs.purs new file mode 100644 index 000000000..2992d57a2 --- /dev/null +++ b/app/src/App/Server/AdminJobs.purs @@ -0,0 +1,36 @@ +-- | Execution of admin jobs (scheduled tasks and manual package set operations). +-- | The scheduled job implementations are stubbed for now - actual scripts will +-- | be plugged in later. +module Registry.App.Server.AdminJobs + ( executeAdminJob + ) where + +import Registry.App.Prelude + +import Registry.API.V1 (AdminJobType(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects) +import Run (Run) + +-- | Execute an admin job based on its type. The scheduled job implementations +-- | (PackageTransfer, LegacyImport, PackageSetUpdate) are currently stubbed. +-- | Only AdminPackageSetOperation (manual API requests) is fully implemented. +executeAdminJob :: AdminJobType -> Run ServerEffects Unit +executeAdminJob = case _ of + AdminPackageTransfer -> do + Log.info "Running scheduled PackageTransfer job..." + Log.warn "TODO: PackageTransfer execution not yet implemented" + + AdminLegacyImport mode -> do + Log.info $ "Running scheduled LegacyImport job with mode: " <> V1.printLegacyImportMode mode + Log.warn "TODO: LegacyImport execution not yet implemented" + + AdminPackageSetUpdate mode -> do + Log.info $ "Running scheduled PackageSetUpdate job with mode: " <> V1.printPackageSetUpdateMode mode + Log.warn "TODO: PackageSetUpdate execution not yet implemented" + + AdminPackageSetOperation operation -> do + Log.info "Running manual package set operation from API..." + API.packageSetUpdate operation diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..70e5698f5 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,191 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.PackageSets (PACKAGE_SETS) +import Registry.App.Effect.PackageSets as PackageSets +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.resetIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + PACKAGE_SETS + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # PackageSets.interpret (PackageSets.handle { workdir: scratchDir }) + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Archive.interpret Archive.handle + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..4120fb65f --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,186 @@ +module Registry.App.Server.JobExecutor + ( runJobExecutor + ) where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) +import Control.Parallel as Parallel +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Map as Map +import Data.Set as Set +import Data.Time.Duration (Hours(..), Minutes(..), Seconds(..), fromDuration) +import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Server.AdminJobs as AdminJobs +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (Run) +import Run.Except (EXCEPT) + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler + Db.resetIncompleteJobs + loop + where + loop = do + maybeJob <- findNextAvailableJob + case maybeJob of + Nothing -> do + liftAff $ Aff.delay $ fromDuration (Seconds 1.0) + loop + + Just job -> do + now <- nowUTC + let + jobId = (V1.jobInfo job).jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. + jobResult <- liftAff do + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) + -- Admin jobs get a long timeout (they can run for a long time, + -- e.g. LegacyImporter), while other jobs a much shorter one + let + delay = case job of + AdminJob _ -> fromDuration (Hours 4.0) + _ -> fromDuration (Minutes 5.0) + let timeout = Aff.delay delay $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } + loop + +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) +findNextAvailableJob = runMaybeT + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (AdminJob <<< Record.merge { logs: [], jobType: Proxy :: _ "admin" } <$> MaybeT Db.selectNextAdminJob) + +executeJob :: DateTime -> Job -> Run ServerEffects Unit +executeJob _ = case _ of + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeResult \{ dependencies, version } -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + AdminJob { adminJobType } -> AdminJobs.executeAdminJob adminJobType + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + Log.info $ "New compiler found: " <> Version.print newCompilerVersion + Log.info "Starting upgrade of the whole registry to the new compiler..." + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (Map.isEmpty manifest.dependencies) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..34aba9ba0 --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,234 @@ +module Registry.App.Server.MatrixBuilder + ( checkIfNewCompiler + , installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.API.V1 (MatrixJobData) +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + pure Nothing + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + pure Nothing + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler: target, resolutions, name, version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..2035d542c --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,178 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.DateTime as DateTime +import Data.Time.Duration (Hours(..), negateDuration) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (AdminJobType(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Auth as Auth +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageSetOperation(..)) +import Registry.Operation as Operation +import Run (Run) +import Run as Run +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> do + Console.log $ "Bad request: " <> Aff.message error + HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + + jobId <- lift (Db.selectPublishJob publish.name publish.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate publish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPublishJob { payload: publish } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + + jobId <- lift (Db.selectUnpublishJob payload.name payload.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate unpublish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + + jobId <- lift (Db.selectTransferJob payload.name) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate transfer job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs { since, include_completed }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift + ( Run.Except.runExcept $ Db.selectJobs + { includeCompleted: fromMaybe false include_completed + , since: fromMaybe oneHourAgo since + } + ) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching jobs: " <> err + HTTPurple.internalServerError $ "Error while fetching jobs: " <> err + Right jobs -> jsonOk (CJ.array V1.jobCodec) jobs + + Job jobId { level: maybeLogLevel, since }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe oneHourAgo since }) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.internalServerError $ "Error while fetching job: " <> err + Right Nothing -> do + HTTPurple.notFound + Right (Just job) -> jsonOk V1.jobCodec job + + PackageSets, Post -> do + request <- HTTPurple.fromJson (jsonDecoder Operation.packageSetUpdateRequestCodec) body + lift $ Log.info $ "Received PackageSet request: " <> request.rawPayload + + -- Check if the operation requires authentication (compiler change or package removal) + let + PackageSetUpdate payload = request.payload + didChangeCompiler = isJust payload.compiler + didRemovePackages = any isNothing payload.packages + requiresAuth = didChangeCompiler || didRemovePackages + + -- If restricted operation, verify pacchettibotti signature + authResult <- + if requiresAuth then do + pacchettiBotti <- lift API.getPacchettiBotti + lift $ Run.liftAff $ Auth.verifyPackageSetPayload pacchettiBotti request + else + pure (Right unit) + + case authResult of + Left err -> do + lift $ Log.error $ "Package set authentication failed: " <> err + HTTPurple.badRequest err + Right _ -> do + when requiresAuth do + lift $ Log.info "Package set authentication successful." + + -- Check for duplicate pending job with the same payload + jobId <- lift (Db.selectPackageSetJobByPayload request.payload) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate package set job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertAdminJob + { adminJobType: AdminPackageSetOperation request.payload + , rawPayload: Just request.rawPayload + , signature: request.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound diff --git a/app/src/App/Server/Scheduler.purs b/app/src/App/Server/Scheduler.purs new file mode 100644 index 000000000..e0f66150f --- /dev/null +++ b/app/src/App/Server/Scheduler.purs @@ -0,0 +1,78 @@ +-- | Scheduler for admin jobs (PackageTransfer, LegacyImport, PackageSetUpdate). +module Registry.App.Server.Scheduler + ( runScheduler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.DateTime (DateTime, Time(..)) +import Data.DateTime as DateTime +import Data.Enum (fromEnum) +import Data.Time.Duration (Hours(..), fromDuration, negateDuration) +import Effect.Aff as Aff +import Registry.API.V1 (AdminJobType(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (AdminJobDetails) +import Registry.App.Server.Env (ServerEnv, runEffects) + +-- | The three admin job types that run on schedule. +-- | They are enqueued in this order: PackageTransfer -> LegacyImport -> PackageSetUpdate +scheduledAdminJobs :: Array AdminJobType +scheduledAdminJobs = + [ AdminPackageTransfer + , AdminLegacyImport V1.UpdateRegistry + , AdminPackageSetUpdate V1.CommitPackageSet + ] + +-- | Run the scheduler loop. Checks every hour if jobs should be enqueued. +-- | We run things in a window instead of a precise time, so that restarts and/or +-- | delays don't prevent jobs from happening. +runScheduler :: ServerEnv -> Aff (Either Aff.Error Unit) +runScheduler env = runEffects env do + Log.info "Starting Admin Job Scheduler" + loop + where + loop = do + liftAff $ Aff.delay $ fromDuration (Hours 1.0) + now <- nowUTC + + when (inScheduleWindow now) do + Log.info "In schedule window (00:00-04:00 UTC) - checking if admin jobs should be scheduled..." + -- Get jobs from last 12h + let twelveHoursAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 12.0)) now + recentJobs <- Db.selectRecentAdminJobs twelveHoursAgo + for_ scheduledAdminJobs \jobType -> do + when (shouldEnqueue jobType recentJobs) do + Log.info $ "Scheduling admin job: " <> V1.adminJobTypeKey jobType + void $ Db.insertAdminJob + { adminJobType: jobType + , rawPayload: Nothing + , signature: Nothing + } + + loop + +-- | Check if current time is in the schedule window +inScheduleWindow :: DateTime -> Boolean +inScheduleWindow dt = + let + Time hour _ _ _ = DateTime.time dt + in + fromEnum hour >= 0 && fromEnum hour < 4 + +-- | Determine if we should enqueue a job of the given type. +-- | Returns true if: +-- | 1. No incomplete job of that type exists (prevents duplicates) +-- | 2. Either never run, or last completed job was >12 hours ago +shouldEnqueue :: AdminJobType -> Array AdminJobDetails -> Boolean +shouldEnqueue jobType recentJobs = + let + jobsOfType = Array.filter (\j -> V1.adminJobTypeKey j.adminJobType == V1.adminJobTypeKey jobType) recentJobs + hasIncomplete = Array.any (\j -> isNothing j.finishedAt) jobsOfType + lastCompleted = Array.last $ Array.sortBy (comparing _.createdAt) $ + Array.filter (\j -> isJust j.finishedAt) jobsOfType + in + not hasIncomplete && isJust lastCompleted diff --git a/app/src/Fetch/Retry.purs b/app/src/Fetch/Retry.purs index 4260f6e46..cd182385a 100644 --- a/app/src/Fetch/Retry.purs +++ b/app/src/Fetch/Retry.purs @@ -43,10 +43,8 @@ withRetryRequest url opts = withRetry retry do if response.status >= 400 then Left $ StatusError response else Right response - retry = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt -> case _ of + retry = defaultRetry + { retryOnFailure = \attempt -> case _ of FetchError _ -> false StatusError { status } -> -- We retry on 500-level errors in case the server is temporarily diff --git a/app/test/App/API.purs b/app/test/App/API.purs index faf4df3a3..28f17f90e 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -9,6 +9,7 @@ import Data.Set as Set import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff +import Effect.Class.Console as Console import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path @@ -27,8 +28,10 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Manifest as Manifest +import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Range as Range +import Registry.Solver as Solver import Registry.Test.Assert as Assert import Registry.Test.Assert.Run as Assert.Run import Registry.Test.Utils as Utils @@ -45,6 +48,7 @@ type PipelineEnv = , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , storageDir :: FilePath + , archiveDir :: FilePath , githubDir :: FilePath } @@ -57,31 +61,31 @@ spec = do removeIgnoredTarballFiles copySourceFiles - Spec.describe "Parses installed paths" do - Spec.it "Parses install path /my-package-1.0.0/..." do - tmp <- Tmp.mkTmpDir - let moduleA = Path.concat [ tmp, "my-package-1.0.0", "src", "ModuleA.purs" ] - case API.parseInstalledModulePath { prefix: tmp, path: moduleA } of - Left err -> Assert.fail $ "Expected to parse " <> moduleA <> " but got error: " <> err - Right { name, version } -> do - Assert.shouldEqual name (Utils.unsafePackageName "my-package") - Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") - FS.Extra.remove tmp - Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do - Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do + Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do + logs <- liftEffect (Ref.new []) + let + toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry + toLegacyIndex = + Solver.exploreAllTransitiveDependencies + <<< Solver.initializeRegistry + <<< map (map (_.dependencies <<< un Manifest)) + <<< ManifestIndex.toMap + testEnv = { workdir + , logs , index , metadata , pursuitExcludes: Set.singleton (Utils.unsafePackageName "type-equality") , username: "jon" , storage: storageDir + , archive: archiveDir , github: githubDir } - Assert.Run.runTestEffects testEnv do + result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do -- We'll publish effect@4.0.0 from the fixtures/github-packages -- directory, which has an unnecessary dependency on 'type-equality' -- inserted into it. @@ -90,15 +94,17 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version: version , resolutions: Nothing } -- First, we publish the package. - API.publish CurrentPackage publishArgs + Registry.readAllManifests >>= \idx -> + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -127,9 +133,22 @@ spec = do , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies ] + -- We should verify the resulting metadata file is correct + Metadata effectMetadata <- Registry.readMetadata name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print name <> " to be in metadata." + Just m -> pure m + + case Map.lookup version effectMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + -- Finally, we can verify that publishing the package again should fail -- since it already exists. - Except.runExcept (API.publish CurrentPackage publishArgs) >>= case _ of + Except.runExcept (API.publish Nothing publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." @@ -138,14 +157,66 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" + , resolutions: Nothing + } + Registry.readAllManifests >>= \idx -> + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + + -- We can also verify that transitive dependencies are added for legacy + -- packages. + let + transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } + transitivePublishArgs = + { compiler: Utils.unsafeVersion "0.15.10" + , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } + , name: transitive.name + , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } - API.publish CurrentPackage pursuitOnlyPublishArgs + Registry.readAllManifests >>= \idx -> + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + + -- We should verify the resulting metadata file is correct + Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in metadata." + Just m -> pure m + + case Map.lookup transitive.version transitiveMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + + Registry.readManifest transitive.name transitive.version >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in manifest index." + Just (Manifest manifest) -> do + let expectedDeps = Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeRange ">=6.0.0 <7.0.0") + when (manifest.dependencies /= expectedDeps) do + Except.throw $ String.joinWith "\n" + [ "Expected transitive@1.0.0 to have dependencies" + , printJson (Internal.Codec.packageMap Range.codec) expectedDeps + , "\nbut got" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + ] + case result of + Left exn -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Got an Aff exception! " <> Aff.message exn + Right (Left err) -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err + Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -178,7 +249,12 @@ spec = do copyFixture "registry-index" copyFixture "registry" copyFixture "registry-storage" + copyFixture "registry-archive" copyFixture "github-packages" + -- FIXME: This is a bit hacky, but we remove effect-4.0.0.tar.gz since the unit test publishes + -- it from scratch and will fail if effect-4.0.0 is already in storage. We have it in storage + -- for the separate integration tests. + FS.Extra.remove $ Path.concat [ testFixtures, "registry-storage", "effect-4.0.0.tar.gz" ] let readFixtures = do @@ -199,6 +275,7 @@ spec = do , metadata: fixtures.metadata , index: fixtures.index , storageDir: Path.concat [ testFixtures, "registry-storage" ] + , archiveDir: Path.concat [ testFixtures, "registry-archive" ] , githubDir: Path.concat [ testFixtures, "github-packages" ] } @@ -207,7 +284,7 @@ checkBuildPlanToResolutions = do Spec.it "buildPlanToResolutions produces expected resolutions file format" do Assert.shouldEqual generatedResolutions expectedResolutions where - dependenciesDir = "testDir" + installedResolutions = "testDir" resolutions = Map.fromFoldable [ Tuple (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "1.0.0") @@ -218,14 +295,14 @@ checkBuildPlanToResolutions = do generatedResolutions = API.formatPursuitResolutions { resolutions - , dependenciesDir + , installedResolutions } expectedResolutions = Map.fromFoldable do packageName /\ version <- (Map.toUnfoldable resolutions :: Array _) let bowerName = RawPackageName ("purescript-" <> PackageName.print packageName) - path = Path.concat [ dependenciesDir, PackageName.print packageName <> "-" <> Version.print version ] + path = Path.concat [ installedResolutions, PackageName.print packageName <> "-" <> Version.print version ] pure $ Tuple bowerName { path, version } removeIgnoredTarballFiles :: Spec.Spec Unit diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing @@ -103,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -121,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 8e8207974..2d4a7a2dc 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,7 +2,6 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude -import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map import Data.Set as Set @@ -13,7 +12,6 @@ import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (legacyPackageSetCodec) import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName -import Registry.Sha256 as Sha256 import Registry.Test.Assert as Assert import Registry.Test.Utils as Utils import Registry.Version as Version @@ -92,24 +90,17 @@ packageSet = PackageSet convertedPackageSet :: ConvertedLegacyPackageSet convertedPackageSet = - case Legacy.PackageSet.convertPackageSet index metadata packageSet of + case Legacy.PackageSet.convertPackageSet index packageSet of Left err -> unsafeCrashWith err Right value -> value where - index = unsafeFromRight $ ManifestIndex.fromSet $ Set.fromFoldable + index = unsafeFromRight $ ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable [ mkManifest assert [ console, effect, prelude ] , mkManifest console [ effect, prelude ] , mkManifest effect [ prelude ] , mkManifest prelude [] ] - metadata = Map.fromFoldable - [ unsafeMetadataEntry assert - , unsafeMetadataEntry console - , unsafeMetadataEntry effect - , unsafeMetadataEntry prelude - ] - legacyPackageSetJson :: String legacyPackageSetJson = """{ @@ -200,22 +191,3 @@ mkManifest (Tuple name version) deps = do (PackageName.print name) (LenientVersion.print version) (map (bimap PackageName.print (LenientVersion.version >>> toRange)) deps) - -unsafeMetadataEntry :: Tuple PackageName LenientVersion -> Tuple PackageName Metadata -unsafeMetadataEntry (Tuple name version) = do - let - published = - { ref: LenientVersion.raw version - , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" - , bytes: 0.0 - , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom - } - - metadata = Metadata - { location: GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print name, subdir: Nothing } - , owners: Nothing - , published: Map.singleton (LenientVersion.version version) published - , unpublished: Map.empty - } - - Tuple name metadata diff --git a/app/test/App/Manifest/SpagoYaml.purs b/app/test/App/Manifest/SpagoYaml.purs index 973af0a99..52174063c 100644 --- a/app/test/App/Manifest/SpagoYaml.purs +++ b/app/test/App/Manifest/SpagoYaml.purs @@ -19,6 +19,6 @@ spec = do config <- SpagoYaml.readSpagoYaml (Path.concat [ fixturesPath, path ]) >>= case _ of Left err -> Aff.throwError $ Aff.error err Right config -> pure config - case SpagoYaml.spagoYamlToManifest config of + case SpagoYaml.spagoYamlToManifest "v1.0.0" config of Left err -> Assert.fail $ path <> " failed: " <> err Right _ -> pure unit diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 2eaca689d..a858dc675 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -11,22 +11,28 @@ module Registry.Test.Assert.Run import Registry.App.Prelude import Data.Array as Array +import Data.Exists as Exists import Data.Foldable (class Foldable) import Data.Foldable as Foldable import Data.FunctorWithIndex (mapWithIndex) import Data.Map as Map import Data.Set as Set import Data.String as String +import Dodo as Dodo import Effect.Aff as Aff import Effect.Now as Now import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (LogLevel) +import Registry.App.API (COMPILER_CACHE) +import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -39,7 +45,7 @@ import Registry.App.Effect.Pursuit (PURSUIT, Pursuit(..)) import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry (REGISTRY, Registry(..)) import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE, Source(..)) +import Registry.App.Effect.Source (FetchError(..), SOURCE, Source(..)) import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE, Storage) import Registry.App.Effect.Storage as Storage @@ -48,6 +54,7 @@ import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Prelude as Either import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (GitHubError(..), IssueNumber(..)) +import Registry.Foreign.Tar as Foreign.Tar import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Test.Utils as Utils @@ -77,13 +84,14 @@ type TEST_EFFECTS = + PACKAGE_SETS + STORAGE + SOURCE + + ARCHIVE + GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + RESOURCE_ENV + GITHUB_CACHE + LEGACY_CACHE - + COMMENT + + COMPILER_CACHE + LOG + EXCEPT String + AFF @@ -93,16 +101,18 @@ type TEST_EFFECTS = type TestEnv = { workdir :: FilePath + , logs :: Ref (Array (Tuple LogLevel String)) , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , pursuitExcludes :: Set PackageName , storage :: FilePath + , archive :: FilePath , github :: FilePath , username :: String } -runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff a -runTestEffects env operation = do +runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff (Either Aff.Error a) +runTestEffects env operation = Aff.attempt do resourceEnv <- Env.lookupResourceEnv githubCache <- liftEffect Cache.newCacheRef legacyCache <- liftEffect Cache.newCacheRef @@ -112,24 +122,25 @@ runTestEffects env operation = do # PackageSets.interpret handlePackageSetsMock # Storage.interpret (handleStorageMock { storage: env.storage }) # Source.interpret (handleSourceMock { github: env.github }) + # Archive.interpret (handleArchiveMock { metadataRef: env.metadata, archive: env.archive }) # GitHub.interpret (handleGitHubMock { github: env.github }) -- Environments # Env.runGitHubEventEnv { username: env.username, issue: IssueNumber 1 } # Env.runPacchettiBottiEnv { publicKey: "Unimplemented", privateKey: "Unimplemented" } # Env.runResourceEnv resourceEnv -- Caches + # runCompilerCacheMock # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog - # Log.interpret (\(Log _ _ next) -> pure next) + # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) # Run.runBaseAff' -- | For testing simple Run functions that don't need the whole environment. runBaseEffects :: forall a. Run (LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff a -runBaseEffects = +runBaseEffects = do Log.interpret (\(Log _ _ next) -> pure next) -- Base effects >>> Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) @@ -141,6 +152,22 @@ runLegacyCacheMemory = Cache.interpret Legacy.Manifest._legacyCache <<< Cache.ha runGitHubCacheMemory :: forall r a. CacheRef -> Run (GITHUB_CACHE + LOG + EFFECT + r) a -> Run (LOG + EFFECT + r) a runGitHubCacheMemory = Cache.interpret GitHub._githubCache <<< Cache.handleMemory +runCompilerCacheMock :: forall r a. Run (COMPILER_CACHE + LOG + r) a -> Run (LOG + r) a +runCompilerCacheMock = Cache.interpret API._compilerCache case _ of + Cache.Get key -> Exists.runExists getImpl (Cache.encodeFs key) + Cache.Put _ next -> pure next + Cache.Delete key -> Exists.runExists deleteImpl (Cache.encodeFs key) + where + getImpl :: forall x z. Cache.FsEncoding Cache.Reply x z -> Run _ x + getImpl = case _ of + Cache.AsBuffer _ (Cache.Reply reply) -> pure $ reply Nothing + Cache.AsJson _ _ (Cache.Reply reply) -> pure $ reply Nothing + + deleteImpl :: forall x z. Cache.FsEncoding Cache.Ignore x z -> Run _ x + deleteImpl = case _ of + Cache.AsBuffer _ (Cache.Ignore next) -> pure next + Cache.AsJson _ _ (Cache.Ignore next) -> pure next + type PursuitMockEnv = { excludes :: Set PackageName , metadataRef :: Ref (Map PackageName Metadata) @@ -179,7 +206,7 @@ handleRegistryMock env = case _ of WriteManifest manifest reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left err -> pure $ reply $ Left $ "Failed to insert manifest:\n" <> Utils.unsafeStringify manifest <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -187,7 +214,7 @@ handleRegistryMock env = case _ of DeleteManifest name version reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left err -> pure $ reply $ Left $ "Failed to delete entry for :\n" <> Utils.formatPackageVersion name version <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -282,12 +309,12 @@ type SourceMockEnv = { github :: FilePath } handleSourceMock :: forall r a. SourceMockEnv -> Source a -> Run (EXCEPT String + AFF + EFFECT + r) a handleSourceMock env = case _ of - Fetch _source destination location ref reply -> do + Fetch destination location ref reply -> do now <- Run.liftEffect Now.nowDateTime case location of - Git _ -> pure $ reply $ Left "Packages cannot be published from Git yet (only GitHub)." - GitHub { subdir } | isJust subdir -> pure $ reply $ Left "Packages cannot use the 'subdir' key yet." - GitHub { repo } -> do + Git _ -> pure $ reply $ Left GitHubOnly + GitHub { subdir } | isJust subdir -> pure $ reply $ Left NoSubdir + GitHub { owner, repo } -> do let name = stripPureScriptPrefix repo fixedRef = fromMaybe ref $ String.stripPrefix (String.Pattern "v") ref @@ -295,7 +322,7 @@ handleSourceMock env = case _ of localPath = Path.concat [ env.github, dirname ] destinationPath = Path.concat [ destination, dirname <> "-checkout" ] Run.liftAff (Aff.attempt (FS.Aff.stat localPath)) >>= case _ of - Left _ -> pure $ reply $ Left $ "Cannot copy " <> localPath <> " because it does not exist." + Left _ -> pure $ reply $ Left $ InaccessibleRepo { owner, repo } Right _ -> do Run.liftAff $ FS.Extra.copy { from: localPath, to: destinationPath, preserveTimestamps: true } case pursPublishMethod of @@ -367,3 +394,73 @@ handleGitHubMock env = case _ of -- currently used in tests. GetCommitDate _address _ref reply -> pure $ reply $ Left $ UnexpectedError "Unimplemented" + +type ArchiveMockEnv = + { metadataRef :: Ref (Map PackageName Metadata) + , archive :: FilePath + } + +-- | A mock implementation for the ARCHIVE effect that uses the registry-archive +-- | fixtures as the archive source. Archive tarballs are expected to be in the +-- | same format as storage tarballs (name-version.tar.gz). +handleArchiveMock :: forall r a. ArchiveMockEnv -> Archive.Archive a -> Run (AFF + EFFECT + r) a +handleArchiveMock env = case _ of + Archive.Fetch destination name version reply -> map (map reply) Except.runExcept do + -- For testing, we look up publishedTime from metadata if available, but + -- fall back to current time if not (to support tests where metadata has + -- been modified but tarballs still exist). + now <- Run.liftEffect Now.nowDateTime + metadata <- Run.liftEffect (Ref.read env.metadataRef) + let + publishedTime = fromMaybe now do + Metadata m <- Map.lookup name metadata + publishedMeta <- Map.lookup version m.published + pure publishedMeta.publishedTime + + let + tarballName = Version.print version <> ".tar.gz" + sourcePath = Path.concat [ env.archive, PackageName.print name <> "-" <> Version.print version <> ".tar.gz" ] + absoluteTarballPath = Path.concat [ destination, tarballName ] + + Run.liftAff (Aff.attempt (FS.Aff.stat sourcePath)) >>= case _ of + Left _ -> + Except.throw $ Archive.DownloadFailed name version "Tarball not found in mock archive" + Right _ -> + Run.liftAff (Aff.attempt (FS.Aff.copyFile sourcePath absoluteTarballPath)) >>= case _ of + Left error -> + Except.throw $ Archive.DownloadFailed name version (Aff.message error) + Right _ -> + pure unit + + extractedPath <- Run.liftAff $ Foreign.Tar.getToplevelDir absoluteTarballPath + case extractedPath of + Nothing -> + Except.throw $ Archive.ExtractionFailed name version "Tarball has no top-level directory" + Just path -> do + Run.liftAff $ Tar.extract { cwd: destination, archive: tarballName } + -- Rename to avoid conflict with packaging directory (same as source mock's "-checkout" suffix) + -- Strip trailing slash if present + let cleanPath = fromMaybe path $ String.stripSuffix (String.Pattern "/") path + let extractedDir = Path.concat [ destination, cleanPath ] + let finalPath = Path.concat [ destination, cleanPath <> "-archive" ] + Run.liftAff $ FS.Aff.rename extractedDir finalPath + + -- Initialize a git repo for purs publish (same as source mock) + -- We do this inside liftAff to avoid EXCEPT type mismatch with Git.withGit + Run.liftAff $ case pursPublishMethod of + LegacyPursPublish -> do + FS.Aff.writeTextFile UTF8 (Path.concat [ finalPath, ".gitignore" ]) "output" + let exec args = void $ Git.gitCLI args (Just finalPath) + let ref = "v" <> Version.print version + exec [ "init" ] + exec [ "config", "user.name", "test-user" ] + exec [ "config", "user.email", "test-user@aol.com" ] + exec [ "config", "commit.gpgSign", "false" ] + exec [ "config", "tag.gpgSign", "false" ] + exec [ "add", "." ] + exec [ "commit", "-m", "Initial commit" ] + exec [ "tag", "-m", ref, ref ] + PursPublish -> + pure unit + + pure { path: finalPath, published: publishedTime } diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..35b43b5c0 --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,78 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Admin jobs (scheduled tasks and manual package set operations) +CREATE TABLE admin_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + adminJobType TEXT NOT NULL, -- 'package_transfer', 'legacy_import', 'package_set_update', 'package_set_operation' + payload JSON NOT NULL, + -- Keep these for manual package set operations (authenticated API requests) + rawPayload TEXT, + signature TEXT, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; +DROP TABLE matrix_jobs; +DROP TABLE admin_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..803b621c7 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,61 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE admin_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + adminJobType TEXT NOT NULL, -- 'package_transfer', 'legacy_import', 'package_set_update', 'package_set_operation' + payload JSON NOT NULL, + -- Keep these for manual package set operations (authenticated API requests) + rawPayload TEXT, + signature TEXT, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/flake.lock b/flake.lock index c7ffaaad8..456e35c15 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1765855794, - "narHash": "sha256-aG/E/kJ5PpEbrlVU+QHaCFm3ULOwL0ni85ONQef35pk=", + "lastModified": 1765976197, + "narHash": "sha256-KiX3eomD6ajjJ8ByA/cM1G7RbOjFbr3b+aX909i8K3o=", "owner": "nixos", "repo": "nixpkgs", - "rev": "33c80e50d3d783a58107326539e15181971272ed", + "rev": "5e38e4851ce6c82c2409b2b5616b4f3c69d6497e", "type": "github" }, "original": { @@ -83,11 +83,11 @@ ] }, "locked": { - "lastModified": 1765858580, - "narHash": "sha256-KEDJMxXKSEgywe3I7PTc2m5dAI2dTQwzzylo2cnU3+U=", + "lastModified": 1765940228, + "narHash": "sha256-G21SwmQsdMLfBIyhLtlPiAHkqOSJzNXTqnFGtMYGxAU=", "owner": "thomashoneyman", "repo": "purescript-overlay", - "rev": "45bbb4f5e657080adfd01d629ee6150c4b8c36f8", + "rev": "acca6cfb1b9605b8755b238285fe69ee4090a510", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 6546852a9..bbec41157 100644 --- a/flake.nix +++ b/flake.nix @@ -51,15 +51,19 @@ fileset.fileFilter (f: f.name == "package.json" || f.name == "package-lock.json") ./. ); + # The location of the Dhall type specifications, used to type-check manifests. DHALL_TYPES = ./types; - GIT_LFS_SKIP_SMUDGE = 1; DHALL_PRELUDE = "${ builtins.fetchGit { url = "https://github.com/dhall-lang/dhall-lang"; - rev = "e35f69d966f205fdc0d6a5e8d0209e7b600d90b3"; + rev = "25cf020ab307cb2d66826b0d1ddac8bc89241e27"; } }/Prelude/package.dhall"; + # We disable git from entering interactive mode at any time, as there is no + # one there to answer prompts. + GIT_TERMINAL_PROMPT = 0; + # Build sources with filesets spagoSrc = fileset.toSource { root = ./.; @@ -128,6 +132,7 @@ spago-test = pkgs.runCommand "spago-test" { + inherit DHALL_TYPES DHALL_PRELUDE; nativeBuildInputs = with pkgs; [ @@ -193,7 +198,8 @@ # Integration test - exercises the server API integration = import ./nix/test/integration.nix { - inherit pkgs spagoSrc testEnv; + inherit pkgs spagoSrc; + testSupport = testEnv; }; # VM smoke test - verifies deployment without full API testing @@ -205,12 +211,17 @@ devShells.default = pkgs.mkShell { name = "registry-dev"; - inherit GIT_LFS_SKIP_SMUDGE; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; + # Dhall environment variables needed for manifest typechecking + inherit DHALL_TYPES DHALL_PRELUDE GIT_TERMINAL_PROMPT; + + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps @@ -222,11 +233,19 @@ nodejs jq dbmate + sqlite purs spago purs-tidy-unstable purs-backend-es-unstable process-compose + + # E2E test runner script - uses same fixed test environment as test-env + (writeShellScriptBin "spago-test-e2e" '' + set -euo pipefail + ${testEnv.envToExports testEnv.testEnv} + exec spago run -p registry-app-e2e + '') ]; }; } @@ -261,7 +280,11 @@ # These env vars are known to Nix so we set them in advance. # Others, like credentials, must be set in a .env file in # the state directory, unless there are viable defaults. - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + inherit + DHALL_PRELUDE + DHALL_TYPES + GIT_TERMINAL_PROMPT + ; }; }; system.stateVersion = "24.05"; diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index c0258b096..41c882a97 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -28,6 +28,7 @@ module Registry.Foreign.Octokit , getRefCommitRequest , githubApiErrorCodec , githubErrorCodec + , isPermanentGitHubError , listTagsRequest , listTeamMembersRequest , newOctokit @@ -207,12 +208,17 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "Commit" $ CJ.Record.object - { committer: CJ.Record.object { date: Internal.Codec.iso8601DateTime } } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "CommitData" $ CJ.Record.object + { data: CJ.named "Commit" $ CJ.Record.object + { committer: CJ.named "Commit.committer" $ CJ.Record.object + { date: Internal.Codec.iso8601DateTime + } + } + } } where - toJsonRep date = { committer: { date } } - fromJsonRep = _.committer.date + toJsonRep date = { data: { committer: { date } } } + fromJsonRep = _.data.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md @@ -385,6 +391,25 @@ printGitHubError = case _ of , error ] +-- | Returns true if the error represents a permanent failure that is safe to +-- | cache across runs. Transient errors (rate limits, network issues, server +-- | errors) return false and should be retried. +-- | +-- | Permanent errors: +-- | - 404 Not Found: Resource doesn't exist at this ref/path +-- | - DecodeError: Content exists but is malformed (immutable at a given tag) +-- | +-- | Transient errors (should NOT be cached): +-- | - UnexpectedError: Network issues, DNS, TLS problems +-- | - 401/403: Auth or rate limit issues +-- | - 5xx: Server-side problems +-- | - Any other status codes +isPermanentGitHubError :: GitHubError -> Boolean +isPermanentGitHubError = case _ of + APIError { statusCode: 404 } -> true + DecodeError _ -> true + _ -> false + atKey :: forall a. String -> CJ.Codec a -> JSON.JObject -> Either CJ.DecodeError a atKey key codec object = Maybe.maybe diff --git a/foreign/src/Foreign/Tmp.js b/foreign/src/Foreign/Tmp.js index b11d10232..8995afdfc 100644 --- a/foreign/src/Foreign/Tmp.js +++ b/foreign/src/Foreign/Tmp.js @@ -3,6 +3,6 @@ import { setGracefulCleanup, dirSync } from "tmp"; setGracefulCleanup(); export const mkTmpDirImpl = () => { - const tmpobj = dirSync(); + const tmpobj = dirSync({ template: 'XXXXXX' }); return tmpobj.name; }; diff --git a/lib/fixtures/manifests/aff-5.1.2.json b/lib/fixtures/manifests/aff-5.1.2.json index 22684f05c..77bb331dd 100644 --- a/lib/fixtures/manifests/aff-5.1.2.json +++ b/lib/fixtures/manifests/aff-5.1.2.json @@ -6,6 +6,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-aff" }, + "ref": "v5.1.2", "dependencies": { "datetime": ">=4.0.0 <5.0.0", "effect": ">=2.0.0 <3.0.0", diff --git a/lib/fixtures/manifests/mysql-4.1.1.json b/lib/fixtures/manifests/mysql-4.1.1.json index 6f9703b61..e0e8c70fe 100644 --- a/lib/fixtures/manifests/mysql-4.1.1.json +++ b/lib/fixtures/manifests/mysql-4.1.1.json @@ -6,6 +6,7 @@ "githubOwner": "oreshinya", "githubRepo": "purescript-mysql" }, + "ref": "v4.1.1", "dependencies": { "aff": ">=5.0.2 <6.0.0", "js-date": ">=6.0.0 <7.0.0", diff --git a/lib/fixtures/manifests/prelude-4.1.1.json b/lib/fixtures/manifests/prelude-4.1.1.json index 3dd47411c..56ac6db20 100644 --- a/lib/fixtures/manifests/prelude-4.1.1.json +++ b/lib/fixtures/manifests/prelude-4.1.1.json @@ -7,6 +7,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-prelude" }, + "ref": "v4.1.1", "owners": [ { "keytype": "ed-25519", diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..978ccdac3 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,40 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( AdminJobData + , AdminJobType(..) + , JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LegacyImportMode(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetUpdateMode(..) + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , adminJobTypeCodec + , adminJobTypeKey + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLegacyImportMode + , printLogLevel + , printPackageSetUpdateMode + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,23 +43,33 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) -import Data.Maybe (Maybe) +import Data.Map (Map) +import Data.Maybe (Maybe(..)) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish | Unpublish | Transfer - | Jobs + | PackageSets + | Jobs { since :: Maybe DateTime, include_completed :: Maybe Boolean } | Job JobId { level :: Maybe LogLevel, since :: Maybe DateTime } | Status @@ -37,7 +80,11 @@ routes = Routing.root $ Routing.prefix "api" $ Routing.prefix "v1" $ RoutingG.su { "Publish": "publish" / RoutingG.noArgs , "Unpublish": "unpublish" / RoutingG.noArgs , "Transfer": "transfer" / RoutingG.noArgs - , "Jobs": "jobs" / RoutingG.noArgs + , "PackageSets": "package-sets" / RoutingG.noArgs + , "Jobs": "jobs" ? + { since: Routing.optional <<< timestampP <<< Routing.string + , include_completed: Routing.optional <<< Routing.boolean + } , "Job": "jobs" / ( jobIdS ? { level: Routing.optional <<< logLevelP <<< Routing.string @@ -64,55 +111,279 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | AdminJob AdminJobData + +type JobInfo r = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +-- | Admin job types for scheduled operations and manual package set updates +data AdminJobType + = AdminPackageTransfer + | AdminLegacyImport LegacyImportMode + | AdminPackageSetUpdate PackageSetUpdateMode + | AdminPackageSetOperation PackageSetOperation -- For manual API requests + +derive instance Eq AdminJobType + +data LegacyImportMode = DryRun | GenerateRegistry | UpdateRegistry + +derive instance Eq LegacyImportMode + +data PackageSetUpdateMode = GeneratePackageSet | CommitPackageSet + +derive instance Eq PackageSetUpdateMode + +-- | Returns the key used in the database for an admin job type +adminJobTypeKey :: AdminJobType -> String +adminJobTypeKey = case _ of + AdminPackageTransfer -> "package_transfer" + AdminLegacyImport _ -> "legacy_import" + AdminPackageSetUpdate _ -> "package_set_update" + AdminPackageSetOperation _ -> "package_set_operation" + +type AdminJobData = JobInfo + ( adminJobType :: AdminJobType + , jobType :: Proxy "admin" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map AdminJob (Codec.decode adminJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + AdminJob j -> CJ.encode adminJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec , packageName: PackageName.codec - , ref: CJ.string + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +adminJobDataCodec :: CJ.Codec AdminJobData +adminJobDataCodec = CJ.named "AdminJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "admin") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , adminJobType: adminJobTypeCodec + } + +adminJobTypeCodec :: CJ.Codec AdminJobType +adminJobTypeCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError AdminJobType + decode json = do + obj <- Codec.decode (CJ.Record.object { type: CJ.string }) json + case obj.type of + "package_transfer" -> pure AdminPackageTransfer + "legacy_import" -> + map (\{ mode } -> AdminLegacyImport mode) + (Codec.decode (CJ.Record.object { mode: legacyImportModeCodec }) json) + "package_set_update" -> + map (\{ mode } -> AdminPackageSetUpdate mode) + (Codec.decode (CJ.Record.object { mode: packageSetUpdateModeCodec }) json) + "package_set_operation" -> + map (\{ payload } -> AdminPackageSetOperation payload) + (Codec.decode (CJ.Record.object { payload: Operation.packageSetOperationCodec }) json) + other -> except $ Left $ CJ.DecodeError.basic $ "Unknown admin job type: " <> other + + encode :: AdminJobType -> JSON + encode = case _ of + AdminPackageTransfer -> + CJ.encode (CJ.Record.object { type: CJ.string }) { type: "package_transfer" } + AdminLegacyImport mode -> + CJ.encode (CJ.Record.object { type: CJ.string, mode: legacyImportModeCodec }) + { type: "legacy_import", mode } + AdminPackageSetUpdate mode -> + CJ.encode (CJ.Record.object { type: CJ.string, mode: packageSetUpdateModeCodec }) + { type: "package_set_update", mode } + AdminPackageSetOperation payload -> + CJ.encode (CJ.Record.object { type: CJ.string, payload: Operation.packageSetOperationCodec }) + { type: "package_set_operation", payload } + +legacyImportModeCodec :: CJ.Codec LegacyImportMode +legacyImportModeCodec = CJ.Sum.enumSum printLegacyImportMode parseLegacyImportMode + where + parseLegacyImportMode = case _ of + "dry_run" -> Just DryRun + "generate_registry" -> Just GenerateRegistry + "update_registry" -> Just UpdateRegistry + _ -> Nothing + +printLegacyImportMode :: LegacyImportMode -> String +printLegacyImportMode = case _ of + DryRun -> "dry_run" + GenerateRegistry -> "generate_registry" + UpdateRegistry -> "update_registry" + +packageSetUpdateModeCodec :: CJ.Codec PackageSetUpdateMode +packageSetUpdateModeCodec = CJ.Sum.enumSum printPackageSetUpdateMode parsePackageSetUpdateMode + where + parsePackageSetUpdateMode = case _ of + "generate" -> Just GeneratePackageSet + "commit" -> Just CommitPackageSet + _ -> Nothing + +printPackageSetUpdateMode :: PackageSetUpdateMode -> String +printPackageSetUpdateMode = case _ of + GeneratePackageSet -> "generate" + CommitPackageSet -> "commit" + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + AdminJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ +derive newtype instance Eq JobId jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | AdminJobType derive instance Eq JobType -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - printJobType :: JobType -> String printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + AdminJobType -> "admin" type LogLine = { level :: LogLevel @@ -129,7 +400,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -139,6 +410,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -147,6 +419,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -154,6 +427,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -162,5 +436,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/lib/src/Manifest.purs b/lib/src/Manifest.purs index d660b459b..49bb62f2c 100644 --- a/lib/src/Manifest.purs +++ b/lib/src/Manifest.purs @@ -48,6 +48,7 @@ newtype Manifest = Manifest , version :: Version , license :: License , location :: Location + , ref :: String , owners :: Maybe (NonEmptyArray Owner) , description :: Maybe String , includeFiles :: Maybe (NonEmptyArray NonEmptyString) @@ -77,6 +78,7 @@ codec = Profunctor.wrapIso Manifest $ CJ.named "Manifest" $ CJ.object $ CJ.recordProp @"license" License.codec $ CJ.recordPropOptional @"description" (Internal.Codec.limitedString 300) $ CJ.recordProp @"location" Location.codec + $ CJ.recordProp @"ref" CJ.string $ CJ.recordPropOptional @"owners" (CJ.Common.nonEmptyArray Owner.codec) $ CJ.recordPropOptional @"includeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) $ CJ.recordPropOptional @"excludeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index c867b5d9b..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -20,10 +22,10 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -87,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = @@ -103,25 +110,18 @@ lookup name version (ManifestIndex index) = -- | Insert a new manifest into the manifest index, failing if the manifest -- | indicates dependencies that cannot be satisfied. Dependencies are not -- | satisfied if the package is not in the index. -insert :: Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex -insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do +insert :: IncludeRanges -> Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex +insert consider manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do let unsatisfied :: Map PackageName Range unsatisfied = Map.fromFoldable do Tuple dependency range <- Map.toUnfoldable dependencies case Map.lookup dependency index of - Just _versions -> - -- Ideally we would enforce that inserting a manifest requires that - -- at least one version exists in the index in the given range already - -- Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> - -- - -- However, to be somewhat lenient on what packages can be admitted to - -- the official index, we just look to see the package name exists. - -- - -- Note that if we _do_ add this check later on, we will need to - -- produce an alternate version that does not check version bounds for - -- use in validatiing package sets, ie. 'maximalIndexIgnoringBounds' - [] + Just versions -> case consider of + IgnoreRanges -> [] + ConsiderRanges + | Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> [] + | otherwise -> [ Tuple dependency range ] _ -> [ Tuple dependency range ] @@ -137,12 +137,12 @@ insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) -- | package names (and not package versions), it is always acceptable to delete -- | a package version so long as it has at least 2 versions. However, removing -- | a package altogether incurs a full validation check. -delete :: PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -delete name version (ManifestIndex index) = do +delete :: IncludeRanges -> PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +delete consider name version (ManifestIndex index) = do case Map.lookup name index of Nothing -> pure (ManifestIndex index) Just versionsMap | Map.size versionsMap == 1 -> - fromSet $ Set.fromFoldable do + fromSet consider $ Set.fromFoldable do Tuple _ versions <- Map.toUnfoldableUnordered (Map.delete name index) Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] @@ -151,21 +151,21 @@ delete name version (ManifestIndex index) = do -- | Convert a set of manifests into a `ManifestIndex`. Reports all failures -- | encountered rather than short-circuiting. -fromSet :: Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -fromSet manifests = do - let Tuple failed index = maximalIndex manifests +fromSet :: IncludeRanges -> Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +fromSet consider manifests = do + let Tuple failed index = maximalIndex consider manifests if Map.isEmpty failed then Right index else Left failed -- | Produce the maximal `ManifestIndex` possible for the given set of -- | `Manifest`s, collecting failures along the way. -maximalIndex :: Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -maximalIndex manifests = do +maximalIndex :: IncludeRanges -> Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +maximalIndex consider manifests = do let - insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert manifest index of + insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert consider manifest index of Left errors -> Tuple (Map.insertWith Map.union name (Map.singleton version errors) failed) index Right newIndex -> Tuple failed newIndex - Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort IgnoreRanges manifests) + Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort consider manifests) data IncludeRanges = ConsiderRanges @@ -206,6 +206,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index 62fe3c5e8..3235661de 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -37,6 +37,7 @@ import Registry.Owner as Owner import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) +import Registry.Version as Version -- | A record of all published and unpublished versions of a package, along with -- | the last-used location and any owners (public keys) authorized to take @@ -62,22 +63,19 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object $ CJ.record -- | Metadata about a published package version. --- | --- | NOTE: The `ref` field is UNSPECIFIED and WILL BE REMOVED in the future. Do --- | not rely on its presence! type PublishedMetadata = { bytes :: Number + , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime - , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object { bytes: CJ.number + , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime - , ref: CJ.string } -- | Metadata about an unpublished package version. diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..7327001e6 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,16 +14,21 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData + , PackageSetUpdateRequest , PublishData , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec + , packageSetUpdateRequestCodec , publishCodec , transferCodec , unpublishCodec @@ -58,6 +63,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +98,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +109,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +204,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: @@ -197,3 +230,33 @@ packageSetUpdateCodec = CJ.named "PackageSetUpdate" $ CJ.Record.object -- `Compat` version of the `maybe` codec. , packages: Internal.Codec.packageMap (CJ.Common.nullable Version.codec) } + +-- | A package set update request that can be optionally authenticated. +-- | +-- | Non-trustees can submit add/upgrade operations without authentication. +-- | Trustees must sign requests for restricted operations (compiler changes, +-- | package removals) with pacchettibotti's key. +type PackageSetUpdateRequest = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +-- | A codec for encoding and decoding a `PackageSetUpdateRequest` as JSON. +packageSetUpdateRequestCodec :: CJ.Codec PackageSetUpdateRequest +packageSetUpdateRequestCodec = CJ.named "PackageSetUpdateRequest" $ Codec.codec' decode encode + where + decode json = do + rep <- Codec.decode repCodec json + payloadJson <- except $ lmap JSON.DecodeError.basic $ JSON.parse rep.payload + operation <- Codec.decode packageSetOperationCodec payloadJson + pure { payload: operation, rawPayload: rep.payload, signature: map Signature rep.signature } + + encode { rawPayload, signature } = + CJ.encode repCodec { payload: rawPayload, signature: map (\(Signature s) -> s) signature } + + repCodec :: CJ.Codec { payload :: String, signature :: Maybe String } + repCodec = CJ.named "PackageSetUpdateRequestRep" $ CJ.Record.object + { payload: CJ.string + , signature: CJ.Record.optional CJ.string + } diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index 0dc31e283..c842145d9 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -5,10 +5,10 @@ import Prelude import Data.Array as Array import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Bifunctor as Bifunctor import Data.DateTime (DateTime) import Data.DateTime as DateTime import Data.Either (Either(..)) -import Data.List.NonEmpty (NonEmptyList) import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..), maybe) @@ -20,7 +20,7 @@ import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.Time.Duration (Hours(..)) import Data.Traversable (traverse) -import Data.Tuple (Tuple(..), uncurry) +import Data.Tuple (Tuple(..), snd, uncurry) import Data.Tuple.Nested (type (/\), (/\)) import Effect.Aff as Aff import Effect.Aff.Class (class MonadAff, liftAff) @@ -32,14 +32,15 @@ import PureScript.CST.Errors as CST.Errors import PureScript.CST.Types as CST.Types import Registry.Location (Location) import Registry.Manifest (Manifest(..)) -import Registry.ManifestIndex (ManifestIndex) -import Registry.ManifestIndex as ManifestIndex import Registry.Metadata (Metadata(..), PublishedMetadata, UnpublishedMetadata) import Registry.Operation (PublishData) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.PursGraph (AssociatedError, ModuleName, PursGraph) +import Registry.PursGraph as PursGraph import Registry.Range (Range) import Registry.Range as Range +import Registry.Solver (CompilerIndex) import Registry.Solver as Solver import Registry.Version (Version) @@ -71,11 +72,63 @@ isNotUnpublished :: Manifest -> Metadata -> Maybe UnpublishedMetadata isNotUnpublished (Manifest { version }) (Metadata { unpublished }) = Map.lookup version unpublished +data ValidateDepsError + = UnusedDependencies (NonEmptySet PackageName) + | MissingDependencies (NonEmptySet PackageName) + | UnusedAndMissing { unused :: NonEmptySet PackageName, missing :: NonEmptySet PackageName } + +derive instance Eq ValidateDepsError + +printValidateDepsError :: ValidateDepsError -> String +printValidateDepsError = case _ of + UnusedDependencies unused -> + "Unused dependencies (" <> printPackages unused <> ")" + MissingDependencies missing -> + "Missing dependencies (" <> printPackages missing <> ")" + UnusedAndMissing { unused, missing } -> + "Unused dependencies (" <> printPackages unused <> ") and missing dependencies (" <> printPackages missing <> ")" + where + printPackages :: NonEmptySet PackageName -> String + printPackages = String.joinWith ", " <<< map PackageName.print <<< NonEmptySet.toUnfoldable + +-- | Verifies that the manifest lists dependencies imported in the source code, +-- | no more (ie. unused) and no less (ie. transitive). The graph passed to this +-- | function should be the output of 'purs graph' executed on the 'output' +-- | directory of the package compiled with its dependencies. +noTransitiveOrMissingDeps :: Manifest -> PursGraph -> (FilePath -> Either String PackageName) -> Either (Either (NonEmptyArray AssociatedError) ValidateDepsError) Unit +noTransitiveOrMissingDeps (Manifest manifest) graph parser = do + associated <- Bifunctor.lmap Left $ PursGraph.associateModules parser graph + + let + packageModules :: Set ModuleName + packageModules = Map.keys $ Map.filter (_ == manifest.name) associated + + directImportModules :: Set ModuleName + directImportModules = PursGraph.directDependenciesOf packageModules graph + + directImportPackages :: Set PackageName + directImportPackages = Set.mapMaybe (flip Map.lookup associated) directImportModules + + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. + unusedDependencies :: Set PackageName + unusedDependencies = Set.filter (not <<< flip Set.member directImportPackages) (Map.keys manifest.dependencies) + + -- Missing packages are those which are imported by the package source code + -- but which are not listed in its dependencies. + missingDependencies :: Set PackageName + missingDependencies = Set.filter (not <<< flip Map.member manifest.dependencies) directImportPackages + + case NonEmptySet.fromSet unusedDependencies, NonEmptySet.fromSet missingDependencies of + Nothing, Nothing -> Right unit + Just unused, Nothing -> Left $ Right $ UnusedDependencies unused + Nothing, Just missing -> Left $ Right $ MissingDependencies missing + Just unused, Just missing -> Left $ Right $ UnusedAndMissing { unused, missing } + -- | Verifies that the manifest dependencies are solvable by the registry solver. -validateDependenciesSolve :: Manifest -> ManifestIndex -> Either (NonEmptyList Solver.SolverError) (Map PackageName Version) -validateDependenciesSolve manifest manifestIndex = do - let getDependencies = _.dependencies <<< un Manifest - Solver.solve (map (map getDependencies) (ManifestIndex.toMap manifestIndex)) (getDependencies manifest) +validateDependenciesSolve :: Version -> Manifest -> CompilerIndex -> Either Solver.SolverErrors (Map PackageName Version) +validateDependenciesSolve compiler (Manifest manifest) compilerIndex = + map snd $ Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies -- | Verifies that all dependencies in the manifest are present in the build -- | plan, and the version listed in the build plan is within the range provided @@ -97,23 +150,6 @@ getUnresolvedDependencies (Manifest { dependencies }) resolutions = | not (Range.includes dependencyRange version) -> Just $ Right $ dependencyName /\ dependencyRange /\ version | otherwise -> Nothing --- | Discovers dependencies listed in the manifest that are not actually used --- | by the solved dependencies. This should not produce an error, but it --- | indicates an over-constrained manifest. -getUnusedDependencies :: Manifest -> Map PackageName Version -> Set PackageName -> Maybe (NonEmptySet PackageName) -getUnusedDependencies (Manifest { dependencies }) resolutions discovered = do - let - -- There may be too many resolved dependencies because the manifest includes - -- e.g. test dependencies, so we start by only considering resolved deps - -- that are actually used. - inUse = Set.filter (flip Set.member discovered) (Map.keys resolutions) - - -- Next, we can determine which dependencies are unused by looking at the - -- difference between the manifest dependencies and the resolved packages - unused = Set.filter (not <<< flip Set.member inUse) (Map.keys dependencies) - - NonEmptySet.fromSet unused - data TarballSizeResult = ExceedsMaximum Number | WarnPackageSize Number diff --git a/lib/src/PursGraph.purs b/lib/src/PursGraph.purs index 5ed1e512b..d95bff119 100644 --- a/lib/src/PursGraph.purs +++ b/lib/src/PursGraph.purs @@ -79,7 +79,17 @@ associateModules parse graph = do -- | Find direct dependencies of the given module, according to the given graph. directDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName) -directDependencies name = map (Set.fromFoldable <<< _.depends) <<< Map.lookup name +directDependencies start graph = Map.lookup start graph <#> \_ -> directDependenciesOf (Set.singleton start) graph + +-- | Find direct dependencies of a set of input modules according to the given +-- | graph, excluding the input modules themselves. +directDependenciesOf :: Set ModuleName -> PursGraph -> Set ModuleName +directDependenciesOf sources graph = do + let + foldFn prev name = case Map.lookup name graph of + Nothing -> prev + Just { depends } -> Set.union prev (Array.foldl (\acc mod -> if Set.member mod sources then acc else Set.insert mod acc) Set.empty depends) + Array.foldl foldFn Set.empty $ Set.toUnfoldable sources -- | Find all dependencies of the given module, according to the given graph, -- | excluding the module itself. diff --git a/lib/src/Range.purs b/lib/src/Range.purs index b5dbdcf59..0f707c578 100644 --- a/lib/src/Range.purs +++ b/lib/src/Range.purs @@ -4,17 +4,17 @@ module Registry.Range ( Range , caret - , exact , codec + , exact , greaterThanOrEq , includes , intersect , lessThan + , mk , parse , parser , print , union - , mk ) where import Prelude diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index ac0086c76..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -5,9 +5,12 @@ import Prelude import Control.Alternative (guard) import Data.Array as Array +import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Array.NonEmpty as NonEmptyArray import Data.Bifunctor (lmap) import Data.Either (Either(..)) +import Data.Either as Either import Data.Foldable (fold, foldMap, intercalate) import Data.FoldableWithIndex (anyWithIndex, foldMapWithIndex, foldlWithIndex, forWithIndex_) import Data.Functor.App (App(..)) @@ -16,10 +19,12 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) import Data.Semigroup.Foldable (intercalateMap) +import Data.Semigroup.Foldable as Foldable1 import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -27,6 +32,11 @@ import Data.Set.NonEmpty as NES import Data.Traversable (for, sequence, traverse) import Data.TraversableWithIndex (forWithIndex, traverseWithIndex) import Data.Tuple (Tuple(..), fst, snd) +import Partial.Unsafe as Partial +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex (ManifestIndex) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata(..)) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -39,6 +49,46 @@ import Safe.Coerce (coerce) -- Public API -------------------------------------------------------------------------------- +-- | A 'DependencyIndex' enriched to include the compiler versions supported by +-- | each package version as a dependency. +newtype CompilerIndex = CompilerIndex DependencyIndex + +derive instance Newtype CompilerIndex _ + +-- | Associate the compiler versions supported by each package version by +-- | inserting them as a range in the version's dependencies. +buildCompilerIndex :: NonEmptyArray Version -> ManifestIndex -> Map PackageName Metadata -> CompilerIndex +buildCompilerIndex pursCompilers index metadata = CompilerIndex do + let + purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + + getDependencies (Manifest manifest) = fromMaybe manifest.dependencies do + Metadata { published } <- Map.lookup manifest.name metadata + { compilers } <- Map.lookup manifest.version published + -- Construct a maximal range for the compilers the + -- indicated package version supports. + let + min = Foldable1.minimum compilers + max = Version.bumpPatch $ Foldable1.maximum compilers + pursRange <- Range.mk min max + pure $ Map.insert purs pursRange manifest.dependencies + + newPurs version = Map.singleton purs (Map.singleton version Map.empty) + pursVersions = Array.foldl (\acc compiler -> Map.unionWith Map.union (newPurs compiler) acc) Map.empty (NonEmptyArray.toArray pursCompilers) + dependencyIndex = map (map getDependencies) (ManifestIndex.toMap index) + + Map.unionWith Map.union pursVersions dependencyIndex + +-- | Solve the given dependencies using a dependency index that includes compiler +-- | versions, such that the solution prunes results that would fall outside +-- | a compiler range accepted by all dependencies. +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) +solveWithCompiler pursRange (CompilerIndex index) required = do + let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results + pure $ Tuple pursVersion $ Map.delete purs results + -- | Data from the registry index, listing dependencies for each version of -- | each package type DependencyIndex = Map PackageName (Map Version (Map PackageName Range)) @@ -146,6 +196,7 @@ intersectionFromRange' package range = -------------------------------------------------------------------------------- type SolverErrors = NEL.NonEmptyList SolverError + data SolverError = Conflicts (Map PackageName Intersection) | WhileSolving PackageName (Map Version SolverError) diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index c37d6875a..1fb7e13a6 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -74,8 +74,8 @@ spec = do manifest1 = unsafeManifest "prelude" "1.0.0" [] manifest2 = Newtype.over Manifest (_ { description = Just "My prelude description." }) manifest1 index = - ManifestIndex.insert manifest1 ManifestIndex.empty - >>= ManifestIndex.insert manifest2 + ManifestIndex.insert ManifestIndex.ConsiderRanges manifest1 ManifestIndex.empty + >>= ManifestIndex.insert ManifestIndex.ConsiderRanges manifest2 case index of Left errors -> @@ -103,17 +103,20 @@ spec = do tinyIndex :: Array Manifest tinyIndex = [ unsafeManifest "prelude" "1.0.0" [] ] - testIndex { satisfied: tinyIndex, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied: tinyIndex, unsatisfied: [] } Spec.it "Fails to parse non-self-contained index" do let - satisfied :: Array Manifest - satisfied = + satisfiedStrict :: Array Manifest + satisfiedStrict = [ unsafeManifest "prelude" "1.0.0" [] , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] - -- It is OK for the version bounds to not exist, although we may - -- choose to make this more strict in the future. - , unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] + ] + + -- Packages with dependencies that exist, but not at the proper bounds. + satisfiedLoose :: Array Manifest + satisfiedLoose = satisfiedStrict <> + [ unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] ] unsatisfied :: Array Manifest @@ -121,7 +124,8 @@ spec = do [ unsafeManifest "control" "3.0.0" [ Tuple "tuples" ">=2.0.0 <3.0.0" ] ] - testIndex { satisfied, unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: satisfiedStrict, unsatisfied } + testIndex ManifestIndex.IgnoreRanges { satisfied: satisfiedLoose, unsatisfied } Spec.it "Parses cyclical but acceptable index" do let @@ -133,7 +137,7 @@ spec = do , unsafeManifest "control" "2.0.0" [] ] - testIndex { satisfied, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied, unsatisfied: [] } Spec.it "Does not parse unacceptable cyclical index" do let @@ -143,21 +147,22 @@ spec = do , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] ] - testIndex { satisfied: [], unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: [], unsatisfied } contextEntry :: String contextEntry = - """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} + """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.1","dependencies":{}} +{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.2","dependencies":{}} +{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.3","dependencies":{}} """ testIndex :: forall m . MonadThrow Error m - => { satisfied :: Array Manifest, unsatisfied :: Array Manifest } + => ManifestIndex.IncludeRanges + -> { satisfied :: Array Manifest, unsatisfied :: Array Manifest } -> m Unit -testIndex { satisfied, unsatisfied } = case ManifestIndex.maximalIndex (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of +testIndex consider { satisfied, unsatisfied } = case ManifestIndex.maximalIndex consider (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of Tuple errors result -> do let { fail: shouldHaveErrors } = @@ -237,6 +242,7 @@ manifestCodec' = Profunctor.dimap to from $ CJ.named "ManifestRep" $ CJ.Record.o { url: "https://github.com/purescript/purescript-" <> PackageName.print name <> ".git" , subdir: Nothing } + , ref: "v" <> Version.print version , description: Nothing , owners: Nothing , includeFiles: Nothing diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index eff61e185..8daffc02c 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,27 +25,36 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, + "compilers": [ + "0.13.0" + ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", - "publishedTime": "2021-03-27T10:03:46.000Z", - "ref": "v0.1.0" + "publishedTime": "2021-03-27T10:03:46.000Z" }, "0.2.1": { "bytes": 3365, + "compilers": [ + "0.13.0" + ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", - "publishedTime": "2022-05-15T10:51:57.000Z", - "ref": "v0.2.1" + "publishedTime": "2022-05-15T10:51:57.000Z" }, "1.0.0": { "bytes": 5155, + "compilers": [ + "0.13.0" + ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", - "publishedTime": "2022-11-03T17:30:28.000Z", - "ref": "v1.0.0" + "publishedTime": "2022-11-03T17:30:28.000Z" }, "1.0.1": { "bytes": 5635, + "compilers": [ + "0.13.0", + "0.13.1" + ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", - "publishedTime": "2022-11-04T12:21:09.000Z", - "ref": "v1.0.1" + "publishedTime": "2022-11-04T12:21:09.000Z" } }, "unpublished": { diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index 2e5cb47aa..955b08164 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -2,6 +2,7 @@ module Test.Registry.Operation.Validation where import Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.Either (Either(..)) import Data.Either as Either import Data.Foldable (for_) @@ -14,7 +15,8 @@ import Registry.Manifest (Manifest(..)) import Registry.Metadata (Metadata(..)) import Registry.Operation.Validation (UnpublishError(..), forbiddenModules, getUnresolvedDependencies, validatePursModule, validateUnpublish) import Registry.Test.Assert as Assert -import Registry.Test.Utils (defaultHash, defaultLocation, fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) +import Registry.Test.Fixtures (defaultHash, defaultLocation) +import Registry.Test.Utils (fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) import Test.Spec (Spec) import Test.Spec as Spec @@ -63,8 +65,9 @@ spec = do now = unsafeDateTime "2022-12-12T12:00:00.000Z" outOfRange = unsafeDateTime "2022-12-10T11:00:00.000Z" inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" + compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers } metadata = Metadata { location: defaultLocation diff --git a/lib/test/Registry/Solver.purs b/lib/test/Registry/Solver.purs index bfc0e31b9..a45cf92f9 100644 --- a/lib/test/Registry/Solver.purs +++ b/lib/test/Registry/Solver.purs @@ -7,18 +7,19 @@ import Data.Either (Either(..)) import Data.Foldable (for_) import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList -import Data.Map (Map) +import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map -import Data.Maybe (Maybe(..)) -import Data.Newtype (wrap) +import Data.Maybe (Maybe(..), fromMaybe') +import Data.Newtype (un, wrap) import Data.Semigroup.Foldable (intercalateMap) import Data.Set as Set import Data.Set.NonEmpty as NES import Data.Tuple (Tuple(..)) import Data.Tuple.Nested ((/\)) +import Partial.Unsafe (unsafeCrashWith) import Registry.PackageName as PackageName import Registry.Range as Range -import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), printSolverError, solve) +import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), initializeRegistry, initializeRequired, lowerBound, printSolverError, solve, solveSeed, solveSteps, upperBound) import Registry.Test.Assert as Assert import Registry.Test.Utils (fromRight) import Registry.Types (PackageName, Range, Version) @@ -31,6 +32,11 @@ spec = do shouldSucceed goals result = pure unit >>= \_ -> solve solverIndex (Map.fromFoldable goals) `Assert.shouldContain` (Map.fromFoldable result) + shouldSucceedSteps goals result = pure unit >>= \_ -> do + let solved = solveSteps (solveSeed { registry: initializeRegistry solverIndex, required: initializeRequired (Map.fromFoldable goals) }) + let toRange intersect = fromMaybe' (\_ -> unsafeCrashWith "Bad intersection") (Range.mk (lowerBound intersect) (upperBound intersect)) + map toRange (un SemigroupMap solved.required) `Assert.shouldEqual` Map.fromFoldable result + shouldFail goals errors = pure unit >>= \_ -> case solve solverIndex (Map.fromFoldable goals) of Left solverErrors -> do let expectedErrorCount = Array.length errors @@ -103,6 +109,22 @@ spec = do , prelude.package /\ version 1 ] + Spec.describe "Single-step expands bounds" do + Spec.it "Simple range" do + shouldSucceedSteps + [ simple.package /\ range 0 1 ] + [ simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + + Spec.it "Multi-version range" do + shouldSucceedSteps + [ simple.package /\ range 0 2 ] + [ simple.package /\ range 0 2, prelude.package /\ range 0 2 ] + + Spec.it "Transitive" do + shouldSucceedSteps + [ onlySimple.package /\ range 0 1 ] + [ onlySimple.package /\ range 0 1, simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + Spec.describe "Valid dependency ranges containing some invalid versions solve" do Spec.it "Proceeds past broken ranges to find a later valid range" do -- 'broken-fixed' cannot be solved at the broken version 0, but it can be diff --git a/nix/overlay.nix b/nix/overlay.nix index 71049d6e8..8ec743a39 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -50,6 +50,10 @@ let # Map of script name -> { module, description } scripts = { + archive-seeder = { + module = "Registry.Scripts.ArchiveSeeder"; + description = "Seed the registry archive with tarballs for deleted GitHub repos"; + }; legacy-importer = { module = "Registry.Scripts.LegacyImporter"; description = "Import packages from legacy registries (bower, psc-package, etc.)"; @@ -181,8 +185,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out @@ -235,7 +240,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/nix/test-vm.nix b/nix/test-vm.nix new file mode 100644 index 000000000..f77ef574a --- /dev/null +++ b/nix/test-vm.nix @@ -0,0 +1,39 @@ +# Machine configuration for the NixOS virtual machine suitable for testing. +{ + lib, + pkgs, + modulesPath, + ... +}: +{ + imports = [ + "${modulesPath}/virtualisation/qemu-vm.nix" + ./module.nix + ]; + + config = { + # https://github.com/utmapp/UTM/issues/2353 + networking.nameservers = lib.mkIf pkgs.stdenv.isDarwin [ "8.8.8.8" ]; + + # NOTE: Use 'shutdown now' to exit the VM. + services.getty.autologinUser = "root"; + + virtualisation = { + forwardPorts = [ + { + from = "host"; + guest.port = 80; + host.port = 8080; + } + ]; + graphics = false; + host = { + inherit pkgs; + }; + # Can be adjusted if necessary for test systems (default is 1024) + memorySize = 2048; + }; + + system.stateVersion = "24.05"; + }; +} diff --git a/nix/test/config.nix b/nix/test/config.nix index af973d2c8..07917444f 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -19,35 +19,51 @@ let ports = { server = serverPort; github = serverPort + 1; - bucket = serverPort + 2; - s3 = serverPort + 3; - pursuit = serverPort + 4; - healthchecks = serverPort + 5; + # Single storage WireMock instance for bucket + s3 + pursuit (merged for stateful scenarios) + storage = serverPort + 2; + healthchecks = serverPort + 3; }; - # Default state directory for tests - defaultStateDir = "/var/lib/registry-server"; + # Fixed state directory for tests - not configurable to avoid mismatch between + # test-env and spago-test-e2e shells. The test-env script cleans this up on start. + stateDir = "/tmp/registry-test-env"; # Mock service URLs for test environment + # All storage-related APIs (s3, bucket, pursuit) now share a single WireMock instance mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; - s3 = "http://localhost:${toString ports.s3}"; - bucket = "http://localhost:${toString ports.bucket}"; - pursuit = "http://localhost:${toString ports.pursuit}"; + storage = "http://localhost:${toString ports.storage}"; healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include - # mock secrets, then overrides external services with mock URLs. The DATABASE_URL - # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are - # implemented in the script directly. + # mock secrets, then overrides external services with mock URLs. + # All storage-related APIs share a single WireMock instance for stateful scenarios. testEnv = envDefaults // { + # State directory and derived paths + STATE_DIR = stateDir; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + DATABASE_URL = "sqlite:${stateDir}/db/registry.sqlite3"; # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; - S3_API_URL = mockUrls.s3; - S3_BUCKET_URL = mockUrls.bucket; - PURSUIT_API_URL = mockUrls.pursuit; + # All storage-related APIs share a single base URL for stateful scenarios + S3_API_URL = mockUrls.storage; + S3_BUCKET_URL = mockUrls.storage; + PURSUIT_API_URL = mockUrls.storage; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = @@ -61,17 +77,22 @@ let exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" ''; - # Apply git mock overlay to get registry packages with mocked git. + # Test overlay: mocks git and limits compilers for faster tests. # Using pkgs.extend avoids a second nixpkgs instantiation (more efficient). - # This substitutes gitMock for git in registry-runtime-deps, which causes - # registry-server to be rebuilt with the mock baked into its PATH wrapper. - gitMockOverlay = _: prev: { + testOverlay = _: prev: { + # Substitute gitMock for git in registry-runtime-deps registry-runtime-deps = map ( pkg: if pkg == prev.git then gitMock else pkg ) prev.registry-runtime-deps; + + # Limit to 2 compilers for faster matrix job tests. + # These versions match the compilers referenced in app/fixtures. + registry-supported-compilers = lib.filterAttrs ( + name: _: name == "purs-0_15_10" || name == "purs-0_15_11" + ) prev.registry-supported-compilers; }; - registryPkgs = pkgs.extend gitMockOverlay; + registryPkgs = pkgs.extend testOverlay; # Helper to create GitHub contents API response, as it returns base64-encoded content base64Response = @@ -127,6 +148,30 @@ let }; }; + # Console package helpers (console@6.1.0) + consoleBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + inherit fileName; + filePath = rootPath + "/app/fixtures/github-packages/console-6.1.0/${fileName}"; + }; + + console404Response = fileName: { + request = { + method = "GET"; + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + }; + response = { + status = 404; + headers."Content-Type" = "application/json"; + jsonBody = { + message = "Not Found"; + documentation_url = "https://docs.github.com/rest/repos/contents#get-repository-content"; + }; + }; + }; + # GitHub API wiremock mappings githubMappings = [ (effectBase64Response "bower.json") @@ -136,6 +181,13 @@ let (effect404Response "spago.dhall") (effect404Response "purs.json") (effect404Response "package.json") + # Console package (console@6.1.0) + (consoleBase64Response "bower.json") + (consoleBase64Response "LICENSE") + (console404Response "spago.yaml") + (console404Response "spago.dhall") + (console404Response "purs.json") + (console404Response "package.json") { request = { method = "GET"; @@ -153,85 +205,503 @@ let }; }; } - ]; - - # S3 API wiremock mappings (serves package tarballs) - s3Mappings = [ + # Accept issue comment creation (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; }; response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; }; } + # Accept issue closing (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/type-equality/4.0.1.tar.gz"; + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; }; response = { status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "type-equality-4.0.1.tar.gz"; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; }; } - ]; - - s3Files = [ - { - name = "prelude-6.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; - } - { - name = "type-equality-4.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; - } - ]; - - # S3 Bucket API wiremock mappings (handles upload/list operations) - # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name - # is in the hostname (purescript-registry.localhost:9002) and the path contains - # only the key. For example: GET /?prefix=effect/ instead of GET /purescript-registry?prefix=effect/ - bucketMappings = [ - # List objects - virtual-hosted style (bucket in hostname, path is just /?prefix=...) + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) { request = { method = "GET"; - urlPattern = "/\\?.*prefix=.*"; - }; - response = { - status = 200; - headers."Content-Type" = "application/xml"; - body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; - }; - } - # Upload effect@4.0.0 - virtual-hosted style (path is /effect/4.0.0.tar.gz) - { - request = { - method = "PUT"; - urlPattern = "/effect/4\\.0\\.0\\.tar\\.gz.*"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; }; response = { status = 200; - headers."ETag" = ''"abc123"''; - headers."Content-Type" = "application/xml"; - body = ""; - }; - } - # Fail upload for prelude (to test error handling) - { - request = { - method = "PUT"; - urlPattern = "/prelude/6\\.0\\.1\\.tar\\.gz.*"; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { + login = "packaging-team-user"; + id = 1; + } + ]; }; - response.status = 500; } ]; + # Fixture directory for storage (tarballs) + storageFixturesDir = rootPath + "/app/fixtures/registry-storage"; + + # Parse tarball filename into package name and version + # e.g. "effect-4.0.0.tar.gz" -> { name = "effect"; version = "4.0.0"; fileName = "effect-4.0.0.tar.gz"; } + # e.g. "type-equality-4.0.1.tar.gz" -> { name = "type-equality"; version = "4.0.1"; ... } + parseTarball = + fileName: + let + base = lib.removeSuffix ".tar.gz" fileName; + parts = lib.splitString "-" base; + # Version is the last part; name is everything before + version = lib.last parts; + name = lib.concatStringsSep "-" (lib.init parts); + in + { + inherit name version fileName; + }; + + # List all .tar.gz files in storage fixtures + storageTarballs = map parseTarball ( + builtins.filter (f: lib.hasSuffix ".tar.gz" f) ( + builtins.attrNames (builtins.readDir storageFixturesDir) + ) + ); + + # Metadata fixtures directory (to determine which packages are "published") + metadataFixturesDir = rootPath + "/app/fixtures/registry/metadata"; + metadataFiles = builtins.attrNames (builtins.readDir metadataFixturesDir); + publishedPackageNames = map (f: lib.removeSuffix ".json" f) metadataFiles; + + # ============================================================================ + # UNIFIED STORAGE MAPPINGS WITH WIREMOCK SCENARIOS + # ============================================================================ + # + # All storage-related APIs (S3 downloads, bucket uploads, Pursuit) are now served + # by a single WireMock instance with stateful scenarios. This enables proper + # read-after-write semantics - when a test publishes a package, subsequent + # downloads will succeed. + # + # Scenario design: + # - One scenario per package-version (e.g., "effect-4.0.0") + # - WireMock scenarios always start at state "Started" + # - Published packages (has metadata): "Started" means Present (tarball available) + # - After DELETE, transitions to "Deleted" state (404 on GET) + # - Unpublished packages (no metadata): "Started" means Absent (tarball 404) + # - After PUT upload, transitions to "Present" state + # - After DELETE, transitions to "Deleted" state (404 on GET) + # + # State machine: + # Published: Started(Present) --DELETE--> Deleted(404) + # Unpublished: Started(404) --PUT--> Present(200) --DELETE--> Deleted(404) + # + # Reset between tests via POST /__admin/scenarios/reset + # ============================================================================ + + # Generate S3 GET mappings with scenario support + s3Mappings = lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + tarPath = "/${pkg.name}/${pkg.version}.tar.gz"; + in + if isPublished then + # Published package: tarball available in Started state, 404 in Deleted state + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: 404 in Started, 200 in Present, 404 in Deleted + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs; + + # Generate s3Files list from fixtures (tarballs for bodyFileName references) + s3Files = map (pkg: { + name = pkg.fileName; + path = storageFixturesDir + "/${pkg.fileName}"; + }) storageTarballs; + + # Generate bucket PUT/DELETE/listObjects mappings with scenario support + # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name + # is in the hostname (purescript-registry.localhost:9002) and the path contains + # only the key. + bucketMappings = + # Generate per-package listObjects mappings with scenario support + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedName = lib.replaceStrings [ "-" ] [ "\\-" ] pkg.name; + listUrlPattern = "/\\?.*prefix=${escapedName}.*"; + presentContents = ''${pkg.name}/${pkg.version}.tar.gz1000"abc123"''; + in + if isPublished then + # Published package: listObjects returns contents in Started, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: listObjects returns empty in Started, contents in Present, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ ( + # Generate PUT/DELETE mappings for all packages with scenario support + lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedVersion = lib.replaceStrings [ "." ] [ "\\." ] pkg.version; + urlPattern = "/${pkg.name}/${escapedVersion}\\.tar\\.gz.*"; + in + if isPublished then + # Published package: PUT fails (already exists), DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already published"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Started state (package exists) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: PUT succeeds and transitions to Present, DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 200; + headers."ETag" = ''"abc123"''; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Present"; + } + # PUT in Present state fails (already uploaded) + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already uploaded"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + # DELETE in Started state fails (doesn't exist yet) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Present state (after publish) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs + ); + + # Pursuit API mappings with scenario support + pursuitMappings = + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + versionsUrl = "/packages/purescript-${pkg.name}/available-versions"; + publishedVersionsBody = ''[["${pkg.version}","https://pursuit.purescript.org/packages/purescript-${pkg.name}/${pkg.version}"]]''; + in + if isPublished then + # Published package: versions available in Started, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: empty in Started, has version in Present, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ [ + # Accept documentation uploads (POST /packages) + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; + # Healthchecks API wiremock mappings (simple ping endpoint) healthchecksMappings = [ { @@ -246,46 +716,9 @@ let } ]; - # Pursuit API wiremock mappings - pursuitMappings = [ - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-effect/available-versions"; - }; - response = { - status = 200; - body = ''[]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-type-equality/available-versions"; - }; - response = { - status = 200; - body = ''[["4.0.1","https://pursuit.purescript.org/packages/purescript-type-equality/4.0.1"]]''; - }; - } - { - request = { - method = "POST"; - url = "/packages"; - }; - response.status = 201; - } - ]; + # Combined storage mappings (S3 + bucket + Pursuit) + storageMappings = s3Mappings ++ bucketMappings ++ pursuitMappings; + storageFiles = s3Files; # Wiremock root directory builder mkWiremockRoot = @@ -304,7 +737,9 @@ let ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") files} ''; - # All wiremock configurations + # All WireMock configurations. + # Add new WireMock services here; both test-env.nix and integration.nix + # derive their processes from this attribute set automatically. wiremockConfigs = { github = { port = ports.github; @@ -313,26 +748,13 @@ let mappings = githubMappings; }; }; - s3 = { - port = ports.s3; + # Single storage WireMock instance with stateful scenarios + storage = { + port = ports.storage; rootDir = mkWiremockRoot { - name = "s3"; - mappings = s3Mappings; - files = s3Files; - }; - }; - bucket = { - port = ports.bucket; - rootDir = mkWiremockRoot { - name = "bucket"; - mappings = bucketMappings; - }; - }; - pursuit = { - port = ports.pursuit; - rootDir = mkWiremockRoot { - name = "pursuit"; - mappings = pursuitMappings; + name = "storage"; + mappings = storageMappings; + files = storageFiles; }; }; healthchecks = { @@ -357,42 +779,50 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . && git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${stateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + cp -r ${rootPath}/app/fixtures/github-packages/console-6.1.0 "$FIXTURES_DIR/purescript/purescript-console" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + # Tag the initial commit so we can reset to it for test isolation + gitbot tag -m "initial-fixture" initial-fixture + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + gitbot -C "$FIXTURES_DIR/purescript/purescript-console" tag -m "v6.1.0" v6.1.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( builtins.toJSON { name = "effect"; ref = "v4.0.0"; - compiler = "0.15.9"; + compiler = "0.15.10"; location = { githubOwner = "purescript"; githubRepo = "purescript-effect"; @@ -470,12 +900,12 @@ in { inherit ports - defaultStateDir + stateDir mockUrls testEnv envToExports gitMock - gitMockOverlay + testOverlay wiremockConfigs combinedWiremockRoot setupGitFixtures @@ -484,10 +914,8 @@ in serverStartScript # For custom wiremock setups githubMappings - s3Mappings - s3Files - bucketMappings - pursuitMappings + storageMappings + storageFiles mkWiremockRoot ; } diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..75b6e6487 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -1,7 +1,9 @@ { pkgs, spagoSrc, - testEnv, + # Test support module from test-env.nix. Named 'testSupport' to avoid confusion + # with testSupport.testEnv (the environment variables attribute set). + testSupport, }: if pkgs.stdenv.isDarwin then pkgs.runCommand "integration-skip" { } '' @@ -29,7 +31,7 @@ else ''; }; - ports = testEnv.ports; + ports = testSupport.ports; in pkgs.runCommand "e2e-integration" { @@ -38,10 +40,11 @@ else pkgs.curl pkgs.jq pkgs.git + pkgs.sqlite pkgs.nss_wrapper - testEnv.wiremockStartScript - testEnv.serverStartScript - testEnv.setupGitFixtures + testSupport.wiremockStartScript + testSupport.serverStartScript + testSupport.setupGitFixtures ]; NODE_PATH = "${pkgs.registry-package-lock}/node_modules"; # Use nss_wrapper to resolve S3 bucket subdomain in the Nix sandbox. @@ -57,7 +60,11 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + export REPO_FIXTURES_DIR="$STATE_DIR/repo-fixtures" + + # Export test environment variables for E2E test runners + ${testSupport.envToExports testSupport.testEnv} + mkdir -p $STATE_DIR # Start wiremock services @@ -65,8 +72,8 @@ else start-wiremock & WIREMOCK_PID=$! - # Wait for wiremock (github, bucket, s3, pursuit) - for port in ${toString ports.github} ${toString ports.bucket} ${toString ports.s3} ${toString ports.pursuit}; do + # Wait for wiremock (github, storage, healthchecks) + for port in ${toString ports.github} ${toString ports.storage} ${toString ports.healthchecks}; do until curl -s "http://localhost:$port/__admin" > /dev/null 2>&1; do sleep 0.5 done diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..d754f36b6 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -42,11 +46,14 @@ else timeout=30 ) - # Verify we get a valid JSON response (empty array for jobs) + # Verify we get a valid JSON response (the jobs endpoint responds) result = registry.succeed( "curl -s http://localhost:${envVars.SERVER_PORT}/api/v1/jobs" ) - assert result.strip() == "[]", f"Expected empty jobs array, got: {result}" + + # The server may create matrix jobs on startup for new compilers, so we just verify + # the response is valid JSON (starts with '[') + assert result.strip().startswith("["), f"Expected JSON array, got: {result}" # Verify the database was created and migrations ran registry.succeed("test -f ${stateDir}/db/registry.sqlite3") @@ -54,6 +61,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +77,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.testOverlay ]; virtualisation = { graphics = false; @@ -70,12 +86,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..764d01c47 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -59,18 +59,15 @@ let version = "0.5"; processes = { wiremock-github = mkWiremockProcess "github" ports.github; - wiremock-s3 = mkWiremockProcess "s3" ports.s3; - wiremock-bucket = mkWiremockProcess "bucket" ports.bucket; - wiremock-pursuit = mkWiremockProcess "pursuit" ports.pursuit; + # Unified storage WireMock instance for S3 + bucket + Pursuit with stateful scenarios + wiremock-storage = mkWiremockProcess "storage" ports.storage; wiremock-healthchecks = mkWiremockProcess "healthchecks" ports.healthchecks; registry-server = { command = "${serverStartScript}/bin/start-server"; depends_on = { wiremock-github.condition = "process_healthy"; - wiremock-s3.condition = "process_healthy"; - wiremock-bucket.condition = "process_healthy"; - wiremock-pursuit.condition = "process_healthy"; + wiremock-storage.condition = "process_healthy"; wiremock-healthchecks.condition = "process_healthy"; }; readiness_probe = { @@ -92,21 +89,21 @@ let processComposeYaml = pkgs.writeText "process-compose.yaml" (builtins.toJSON processComposeConfig); + testEnvExports = testConfig.envToExports testConfig.testEnv; + + # The state directory is fixed (not configurable) to avoid mismatch between + # the test-env and spago-test-e2e shells. + stateDir = testConfig.testEnv.STATE_DIR; + testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" - - if [ -z "''${STATE_DIR:-}" ]; then - STATE_DIR="$(mktemp -d)" - export STATE_DIR - echo "Using temporary directory: $STATE_DIR" - trap 'echo "Cleaning up $STATE_DIR..."; rm -rf "$STATE_DIR"' EXIT - else - export STATE_DIR - fi + # Clean up previous test state and create fresh directory + rm -rf ${stateDir} + mkdir -p ${stateDir} - mkdir -p "$STATE_DIR" + # Export all test environment variables + ${testEnvExports} exec ${pkgs.process-compose}/bin/process-compose up \ -f ${processComposeYaml} \ @@ -130,8 +127,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/package-lock.json b/package-lock.json index 3e868b0c6..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -253,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -319,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -368,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -392,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -408,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -429,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -454,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -473,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -496,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -513,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -532,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -550,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -568,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -583,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -607,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -622,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -636,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -650,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -666,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -691,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -705,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -723,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -772,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -788,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -805,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -823,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -836,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -848,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -864,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -876,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -912,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1169,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1207,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1224,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1245,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1261,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1276,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1290,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1303,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1317,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1331,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1347,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1362,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1377,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1391,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1416,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1430,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1444,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1463,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1483,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1497,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1510,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1525,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1541,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1554,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1567,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1581,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1594,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1619,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1638,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1656,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1668,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1745,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1760,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1778,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1804,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1817,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1831,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1875,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -2193,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", diff --git a/scripts/src/ArchiveSeeder.purs b/scripts/src/ArchiveSeeder.purs new file mode 100644 index 000000000..fe0ae805f --- /dev/null +++ b/scripts/src/ArchiveSeeder.purs @@ -0,0 +1,360 @@ +-- | This script populates the purescript/registry-archive repository with +-- | tarballs for packages whose GitHub sources have been deleted (404). +-- | +-- | The archive is a temporary measure for the legacy importer migration. +-- | Once packages are re-uploaded to the registry, the archive can be deleted. +-- | +-- | The script is designed to be re-run safely: +-- | - Caches 404 and accessible status to disk (scratch/.cache) +-- | - Skips tarballs that already exist in the archive +-- | - Reports transient errors (rate limits, network) separately +-- | - Exits with error code 1 if any packages had transient errors +module Registry.Scripts.ArchiveSeeder where + +import Registry.App.Prelude + +import ArgParse.Basic (ArgParser) +import ArgParse.Basic as Arg +import Control.Apply (lift2) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Exists as Exists +import Data.Formatter.DateTime as Formatter.DateTime +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Effect.Class.Console as Console +import Effect.Ref as Ref +import Node.FS.Aff as FS.Aff +import Node.FS.Sync as FS.Sync +import Node.Path as Path +import Node.Process as Process +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type Args = + { archivePath :: FilePath + , dryRun :: Boolean + , package :: Maybe PackageName + } + +parser :: ArgParser Args +parser = Arg.fromRecord + { archivePath: + Arg.argument [ "--archive-path" ] + "Path to local checkout of purescript/registry-archive" + # Arg.default (Path.concat [ scratchDir, "registry-archive" ]) + , dryRun: + Arg.flag [ "--dry-run" ] + "Run without writing tarballs or committing to the registry-archive repo." + # Arg.boolean + # Arg.default false + , package: + Arg.argument [ "--package" ] + "Only process the given package (by registry package name)." + # Arg.unformat "PACKAGE" PackageName.parse + # Arg.optional + } + +main :: Effect Unit +main = launchAff_ do + args <- Array.drop 2 <$> liftEffect Process.argv + + let description = "A script for seeding the registry archive with tarballs for deleted GitHub repos." + parsedArgs <- case Arg.parseArgs "archive-seeder" description parser args of + Left err -> Console.log (Arg.printArgError err) *> liftEffect (Process.exit' 1) + Right a -> pure a + + Env.loadEnvFile ".env" + resourceEnv <- Env.lookupResourceEnv + + githubCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + seederCacheRef <- Cache.newCacheRef + let cache = Path.concat [ scratchDir, ".cache" ] + FS.Extra.ensureDirectory cache + + runAppEffects <- do + debouncer <- Registry.newDebouncer + let registryEnv = { pull: Git.Autostash, write: Registry.ReadOnly, repos: Registry.defaultRepos, workdir: scratchDir, debouncer, cacheRef: registryCacheRef } + + token <- Env.lookupRequired Env.githubToken + s3 <- lift2 { key: _, secret: _ } (Env.lookupRequired Env.spacesKey) (Env.lookupRequired Env.spacesSecret) + octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl + pure do + Registry.interpret (Registry.handle registryEnv) + >>> Storage.interpret (Storage.handleS3 { s3, cache }) + >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) + >>> Cache.interpret _seederCache (Cache.handleMemoryFs { cache, ref: seederCacheRef }) + + -- Logging setup + let logDir = Path.concat [ scratchDir, "logs" ] + FS.Extra.ensureDirectory logDir + now <- nowUTC + + let + logFile = "archive-seeder-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" + logPath = Path.concat [ logDir, logFile ] + + hasErrors <- runArchiveSeeder parsedArgs logPath + # runAppEffects + # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) + # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) + # Env.runResourceEnv resourceEnv + # Run.runBaseAff' + + when hasErrors do + liftEffect $ Process.exit' 1 + +-- | The status of a GitHub repo: either accessible or returns 404. +-- | We only cache these definitive states, not transient errors. +data RepoStatus = RepoAccessible | Repo404 + +derive instance Eq RepoStatus + +repoStatusCodec :: CJ.Codec RepoStatus +repoStatusCodec = CJ.prismaticCodec "RepoStatus" decode encode CJ.string + where + decode = case _ of + "accessible" -> Just RepoAccessible + "404" -> Just Repo404 + _ -> Nothing + encode = case _ of + RepoAccessible -> "accessible" + Repo404 -> "404" + +type SEEDER_CACHE r = (seederCache :: Cache SeederCache | r) + +_seederCache :: Proxy "seederCache" +_seederCache = Proxy + +data SeederCache :: (Type -> Type -> Type) -> Type -> Type +data SeederCache c a = RepoStatusCache PackageName (c RepoStatus a) + +instance Functor2 c => Functor (SeederCache c) where + map k (RepoStatusCache name a) = RepoStatusCache name (map2 k a) + +instance MemoryEncodable SeederCache where + encodeMemory = case _ of + RepoStatusCache name next -> + Exists.mkExists $ Key ("RepoStatus__" <> PackageName.print name) next + +instance FsEncodable SeederCache where + encodeFs = case _ of + RepoStatusCache name next -> + Exists.mkExists $ AsJson ("RepoStatus__" <> PackageName.print name) repoStatusCodec next + +type Stats = + { packagesChecked :: Int + , versionsChecked :: Int + , packagesNeedingArchive :: Int + , versionsNeedingArchive :: Int + , tarballsWritten :: Int + , tarballsSkipped :: Int + , tarballsMissing :: Int + , transientErrors :: Int + } + +emptyStats :: Stats +emptyStats = + { packagesChecked: 0 + , versionsChecked: 0 + , packagesNeedingArchive: 0 + , versionsNeedingArchive: 0 + , tarballsWritten: 0 + , tarballsSkipped: 0 + , tarballsMissing: 0 + , transientErrors: 0 + } + +type SeedEffects r = (SEEDER_CACHE + REGISTRY + STORAGE + GITHUB + LOG + EXCEPT String + AFF + EFFECT + r) + +-- | Returns true if there were transient errors that require re-running +runArchiveSeeder :: forall r. Args -> FilePath -> Run (SeedEffects r) Boolean +runArchiveSeeder args logPath = do + Log.info "Starting archive seeder!" + Log.info $ "Logs available at " <> logPath + Log.info $ "Archive path: " <> args.archivePath + when args.dryRun do + Log.info "Running in dry-run mode (no writes will be performed)" + case args.package of + Nothing -> Log.info "Processing all packages" + Just name -> Log.info $ "Processing single package: " <> PackageName.print name + + -- Ensure archive directory exists (unless dry-run) + unless args.dryRun do + Run.liftAff $ FS.Extra.ensureDirectory args.archivePath + + statsRef <- liftEffect $ Ref.new emptyStats + transientErrorsRef <- liftEffect $ Ref.new ([] :: Array String) + + let + processPackage name (Metadata metadata) = do + liftEffect $ Ref.modify_ (\s -> s { packagesChecked = s.packagesChecked + 1 }) statsRef + + let publishedVersions = Map.keys metadata.published + let versionCount = Set.size publishedVersions + liftEffect $ Ref.modify_ (\s -> s { versionsChecked = s.versionsChecked + versionCount }) statsRef + + -- Extract GitHub address from location + case metadata.location of + Git _ -> do + Log.debug $ PackageName.print name <> ": Git location, skipping (only GitHub packages supported)" + GitHub { owner, repo } -> do + let address = { owner, repo } + + -- Check cache first for definitive status + Cache.get _seederCache (RepoStatusCache name) >>= case _ of + Just RepoAccessible -> do + Log.debug $ PackageName.print name <> ": Cached as accessible, skipping" + Just Repo404 -> do + Log.debug $ PackageName.print name <> ": Cached as 404, processing..." + processDeletedPackage args statsRef name publishedVersions versionCount + Nothing -> do + -- Probe GitHub to check if the repo is accessible + GitHub.listTags address >>= case _ of + Right _ -> do + Log.debug $ PackageName.print name <> ": GitHub repo accessible, caching and skipping" + Cache.put _seederCache (RepoStatusCache name) RepoAccessible + Left (Octokit.APIError err) | err.statusCode == 404 -> do + Log.info $ PackageName.print name <> ": GitHub repo returns 404, caching and processing..." + Cache.put _seederCache (RepoStatusCache name) Repo404 + processDeletedPackage args statsRef name publishedVersions versionCount + Left otherErr -> do + -- Transient error - do NOT cache, log for re-run + let errMsg = PackageName.print name <> ": " <> Octokit.printGitHubError otherErr + Log.warn $ errMsg <> " (transient, will retry on next run)" + liftEffect $ Ref.modify_ (\s -> s { transientErrors = s.transientErrors + 1 }) statsRef + liftEffect $ Ref.modify_ (Array.snoc <@> errMsg) transientErrorsRef + + -- Process either single package or all packages + case args.package of + Just targetName -> Registry.readMetadata targetName >>= case _ of + Nothing -> Except.throw $ "Package " <> PackageName.print targetName <> " not found in registry metadata." + Just metadata -> processPackage targetName metadata + Nothing -> do + allMetadata <- Registry.readAllMetadata + Log.info $ "Read metadata for " <> show (Map.size allMetadata) <> " packages." + forWithIndex_ allMetadata processPackage + + -- Generate summary report + stats <- liftEffect $ Ref.read statsRef + transientErrors <- liftEffect $ Ref.read transientErrorsRef + let report = formatReport stats transientErrors + Log.info report + + let reportPath = Path.concat [ scratchDir, "archive-seeder-report.txt" ] + Run.liftAff $ FS.Aff.writeTextFile UTF8 reportPath report + Log.info $ "Report written to " <> reportPath + + let hadTransientErrors = stats.transientErrors > 0 + let wroteAnything = stats.tarballsWritten > 0 + + if hadTransientErrors then do + Log.warn $ "There were " <> show stats.transientErrors <> " transient errors. Re-run the script to retry." + pure true + else if args.dryRun then do + Log.info $ String.joinWith "\n" + [ "" + , "Dry run complete!" + , "Run without --dry-run to write tarballs and commit." + ] + pure false + else if wroteAnything then do + Log.warn "Make sure to commit and push archive changes!" + pure false + else do + Log.info "Archive seeding complete! No new tarballs were written." + pure false + +processDeletedPackage + :: forall r + . Args + -> Ref.Ref Stats + -> PackageName + -> Set Version + -> Int + -> Run (SeedEffects r) Unit +processDeletedPackage args statsRef name publishedVersions versionCount = do + liftEffect $ Ref.modify_ (\s -> s { packagesNeedingArchive = s.packagesNeedingArchive + 1 }) statsRef + liftEffect $ Ref.modify_ (\s -> s { versionsNeedingArchive = s.versionsNeedingArchive + versionCount }) statsRef + + Log.info $ PackageName.print name <> ": Checking S3 for tarballs..." + + -- Check S3 for available versions + Except.runExcept (Storage.query name) >>= case _ of + Left queryErr -> do + Log.warn $ PackageName.print name <> ": Failed to query S3: " <> queryErr + Right s3Versions -> do + Log.debug $ PackageName.print name <> ": S3 has " <> show (Set.size s3Versions) <> " versions" + + -- For each published version, try to download and write to archive + for_ publishedVersions \version -> do + let formatted = formatPackageVersion name version + let archiveSubdir = Path.concat [ args.archivePath, PackageName.print name ] + let archiveFile = Path.concat [ archiveSubdir, Version.print version <> ".tar.gz" ] + + -- Check if already exists in archive (skip check in dry-run since we don't ensure dir exists) + exists <- if args.dryRun then pure false else liftEffect $ FS.Sync.exists archiveFile + if exists then do + Log.debug $ formatted <> ": Already exists in archive, skipping" + liftEffect $ Ref.modify_ (\s -> s { tarballsSkipped = s.tarballsSkipped + 1 }) statsRef + else if Set.member version s3Versions then do + if args.dryRun then do + Log.info $ formatted <> ": Would download from S3 and write to archive (dry run)" + liftEffect $ Ref.modify_ (\s -> s { tarballsWritten = s.tarballsWritten + 1 }) statsRef + else do + Log.info $ formatted <> ": Downloading from S3..." + Run.liftAff $ FS.Extra.ensureDirectory archiveSubdir + Except.runExcept (Storage.download name version archiveFile) >>= case _ of + Left downloadErr -> do + Log.warn $ formatted <> ": Failed to download: " <> downloadErr + liftEffect $ Ref.modify_ (\s -> s { tarballsMissing = s.tarballsMissing + 1 }) statsRef + Right _ -> do + Log.info $ formatted <> ": Written to archive" + liftEffect $ Ref.modify_ (\s -> s { tarballsWritten = s.tarballsWritten + 1 }) statsRef + else do + Log.warn $ formatted <> ": Not available in S3" + liftEffect $ Ref.modify_ (\s -> s { tarballsMissing = s.tarballsMissing + 1 }) statsRef + +formatReport :: Stats -> Array String -> String +formatReport stats transientErrors = String.joinWith "\n" (header <> transients) + where + header = + [ "=== Archive Seeder Report ===" + , "" + , "Packages checked: " <> show stats.packagesChecked + , "Versions checked: " <> show stats.versionsChecked + , "" + , "Packages needing archive (GitHub 404): " <> show stats.packagesNeedingArchive + , "Versions needing archive: " <> show stats.versionsNeedingArchive + , "" + , "Tarballs written: " <> show stats.tarballsWritten + , "Tarballs skipped (already exist): " <> show stats.tarballsSkipped + , "Tarballs missing (not in S3): " <> show stats.tarballsMissing + , "" + , "Transient errors (re-run to retry): " <> show stats.transientErrors + ] + + transients = do + guard $ Array.null transientErrors + [ "", "Packages with transient errors:" ] <> map (" - " <> _) transientErrors diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index a9f0079b5..05e73ae84 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -4,34 +4,90 @@ -- | It can be run in different modes depending on whether you want to generate -- | the registry from scratch, including uploading packages to the backend or -- | you just want to iteratively pick up new releases. +-- | +-- | The legacy importer clones the registry and registry-index repositories into +-- | `scratch/registry` and `scratch/registry-index`. After a run, you can diff +-- | against the upstream to see what changed: +-- | +-- | ```sh +-- | cd scratch/registry +-- | git diff origin/main -- metadata/ +-- | ``` +-- | +-- | For a fresh re-upload, reset the local clones first, then delete the +-- | metadata and index contents so the importer sees an empty registry: +-- | +-- | ```sh +-- | cd scratch/registry && git reset --hard origin/main +-- | rm -rf scratch/registry/metadata/* +-- | cd scratch/registry-index && git reset --hard origin/main +-- | rm -rf scratch/registry-index/*/ +-- | ``` +-- | +-- | To also recompute all cached manifests and compilation results, remove the +-- | cache directory, or you can remove specific cache files as needed. +-- | +-- | ```sh +-- | rm -rf scratch/.cache +-- | ``` +-- | +-- | The script writes several files to `scratch`: +-- | +-- | - `import-stats.txt` - Aggregate counts of import results +-- | - `package-failures.json` - Package-level failures (e.g., repo not found) +-- | - `version-failures.json` - Version-level failures (e.g., invalid manifest) +-- | - `publish-failures.json` - Publish-level failures (e.g., no valid compiler) +-- | - `publish-stats.txt` - Aggregate counts of publish results +-- | - `reserved-packages.txt` - Packages reserved due to 0.13 or org status +-- | - `removed-packages.txt` - Packages that fully failed and will be removed +-- | - `sorted-packages.txt` - All packages in topological order for publishing +-- | - `logs/` - Detailed logs from each run module Registry.Scripts.LegacyImporter where import Registry.App.Prelude import ArgParse.Basic (ArgParser) import ArgParse.Basic as Arg +import Codec.JSON.DecodeError as CJ.DecodeError import Control.Apply (lift2) import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Compactable (separate) +import Data.DateTime (Date, Month(..)) +import Data.DateTime as DateTime +import Data.Enum (toEnum) import Data.Exists as Exists import Data.Filterable (partition) import Data.Foldable (foldMap) import Data.Foldable as Foldable import Data.Formatter.DateTime as Formatter.DateTime +import Data.Function (on) import Data.FunctorWithIndex (mapWithIndex) import Data.List as List +import Data.List.NonEmpty as NonEmptyList import Data.Map as Map import Data.Ordering (invert) import Data.Profunctor as Profunctor import Data.Set as Set +import Data.Set.NonEmpty (NonEmptySet) +import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits +import Data.These (These(..)) +import Data.Tuple (uncurry) import Data.Variant as Variant +import Effect.Aff as Aff import Effect.Class.Console as Console +import Effect.Exception as Exception +import Fetch.Retry as Fetch +import JSON as JSON +import JSON.Object as JSON.Object +import Node.Buffer as Buffer +import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Parsing (Parser) @@ -40,39 +96,54 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic +import Registry.App.API (COMPILER_CACHE) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.LenientVersion (LenientVersion) import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestValidationError) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) +import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.Constants as Constants import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tar as Foreign.Tar +import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec (packageMap, versionMap) +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.Location as Location import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex -import Registry.Operation (PublishData) import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver (CompilerIndex(..)) +import Registry.Solver as Solver import Registry.Version as Version -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Run.Except -import Spago.Generated.BuildInfo as BuildInfo import Type.Proxy (Proxy(..)) data ImportMode = DryRun | GenerateRegistry | UpdateRegistry @@ -125,9 +196,10 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) GenerateRegistry -> do @@ -136,9 +208,10 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.Autostash (Registry.CommitAs (Git.pacchettibottiCommitter token)))) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) UpdateRegistry -> do @@ -147,9 +220,10 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.ForceClean (Registry.CommitAs (Git.pacchettibottiCommitter token)))) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret (Pursuit.handleAff token) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Recent) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) -- Logging setup @@ -161,18 +235,18 @@ main = launchAff_ do logFile = "legacy-importer-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" logPath = Path.concat [ logDir, logFile ] - runLegacyImport mode logPath + runLegacyImport logPath # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' -runLegacyImport :: forall r. ImportMode -> FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit -runLegacyImport mode logs = do +runLegacyImport :: forall r. FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit +runLegacyImport logs = do Log.info "Starting legacy import!" Log.info $ "Logs available at " <> logs @@ -204,108 +278,315 @@ runLegacyImport mode logs = do pure $ fixupNames allPackages Log.info $ "Read " <> show (Set.size (Map.keys legacyRegistry)) <> " package names from the legacy registry." - importedIndex <- importLegacyRegistry legacyRegistry - Log.info "Writing package and version failures to disk..." - Run.liftAff $ writePackageFailures importedIndex.failedPackages - Run.liftAff $ writeVersionFailures importedIndex.failedVersions - - Log.info "Writing empty metadata files for legacy packages that can't be registered..." - void $ forWithIndex importedIndex.reservedPackages \package location -> do + Log.info "Reading packages eligible for reservation (post-0.13 or trusted orgs)..." + eligibleForReservation <- readPackagesMetadata >>= case _ of + Left err -> do + Log.warn $ "Could not read reserved packages: " <> err + Log.warn $ "Determining reserved packages..." + metadata <- getPackagesMetadata legacyRegistry + let cutoff = filterPackages_0_13 metadata + writePackagesMetadata cutoff + pure cutoff + Right cutoff -> pure cutoff + + -- Reserve the 'metadata', 'purs', and 'purescript' package names + let metadataPackage = unsafeFromRight (PackageName.parse "metadata") + let pursPackage = unsafeFromRight (PackageName.parse "purs") + let purescriptPackage = unsafeFromRight (PackageName.parse "purescript") + for_ [ metadataPackage, pursPackage, purescriptPackage ] \package -> Registry.readMetadata package >>= case _ of Nothing -> do - let metadata = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata package metadata + Log.info $ "Writing empty metadata file for " <> PackageName.print package + let location = GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print package, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry Just _ -> pure unit - Log.info "Ready for upload!" - - Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex - - Log.info "Sorting packages for upload..." - let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges importedIndex.registryIndex + importedIndex <- importLegacyRegistry legacyRegistry - Log.info "Removing packages that previously failed publish" - indexPackages <- allIndexPackages # Array.filterA \(Manifest { name, version }) -> - isNothing <$> Cache.get _importCache (PublishFailure name version) + -- Reserve metadata files for post-0.13 packages that failed to import (no usable versions). + -- Pre-0.13 packages and explicitly freed packages are NOT reserved. + Log.info "Reserving metadata files for post-0.13 packages that failed import..." + let + packagesToReserve = Map.filterWithKey (\name _ -> Map.member name eligibleForReservation) importedIndex.removedPackages + forWithIndex_ packagesToReserve \package location -> Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for reserved package " <> PackageName.print package + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> Log.debug $ PackageName.print package <> " already reserved." - allMetadata <- Registry.readAllMetadata + Log.info "Writing package and version failures to disk..." + Run.liftAff $ writePackageFailures importedIndex.failedPackages + Run.liftAff $ writeVersionFailures importedIndex.failedVersions - -- This record comes from the build directory (.spago) and records information - -- from the most recent build. - let compiler = unsafeFromRight (Version.parse BuildInfo.pursVersion) + Log.info "Ready for upload!" + let importStats = calculateImportStats legacyRegistry importedIndex + let formattedStats = formatImportStats importStats + Log.info formattedStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) formattedStats - -- Just a safety check to ensure the compiler used in the pipeline is not too - -- low. Should be bumped from time to time to the latest compiler. - let minCompiler = unsafeFromRight (Version.parse "0.15.7") - when (compiler < minCompiler) do - Run.Except.throw $ "Local compiler " <> Version.print compiler <> " is too low (min: " <> Version.print minCompiler <> ")." + Log.info "Sorting packages for upload..." + let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "sorted-packages.txt" ]) $ String.joinWith "\n" $ map (\(Manifest { name, version }) -> PackageName.print name <> "@" <> Version.print version) allIndexPackages - Log.info $ "Using compiler " <> Version.print compiler + Log.info "Removing packages that previously failed publish or have been published" + publishable <- do + allMetadata <- Registry.readAllMetadata + allIndexPackages # Array.filterA \(Manifest { name, version }) -> do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure $ not $ hasMetadata allMetadata name version + Just _ -> pure false + + allCompilers <- PursVersions.pursVersions + allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (Version.bumpPatch (NonEmptyArray.last allCompilers)) of + Nothing -> Run.Except.throw $ "Failed to construct a compiler range from " <> Version.print (NonEmptyArray.head allCompilers) <> " and " <> Version.print (NonEmptyArray.last allCompilers) + Just range -> do + Log.info $ "All available compilers range: " <> Range.print range + pure range let - isPublished { name, version } = hasMetadata allMetadata name version - notPublished = indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) - - mkOperation :: Manifest -> Run _ PublishData - mkOperation (Manifest manifest) = - case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of - Nothing -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.error $ "Unable to recover package ref for " <> formatted - Run.Except.throw $ "Failed to create publish operation for " <> formatted - Just ref -> - pure - { location: Just manifest.location - , name: manifest.name - , ref: un RawVersion ref - , compiler - , resolutions: Nothing - } - - case notPublished of + publishLegacyPackage :: Solver.TransitivizedRegistry -> Set PackageName -> Manifest -> Run _ Unit + publishLegacyPackage legacyIndex archivePackages (Manifest manifest) = do + let formatted = formatPackageVersion manifest.name manifest.version + let isArchiveBacked = manifest.name `Set.member` archivePackages + Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" + RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of + Nothing -> Run.Except.throw $ "Unable to recover package ref for " <> formatted + Just ref -> pure ref + + Log.debug "Building dependency index with compiler versions..." + compilerIndex <- MatrixBuilder.readCompilerIndex + + Log.debug $ "Solving dependencies for " <> formatted + eitherResolutions <- do + let toErrors = map Solver.printSolverError <<< NonEmptyList.toUnfoldable + let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") + let partitionIsCompiler = partitionEithers <<< map (\error -> if isCompilerSolveError error then Right error else Left error) + + legacySolution <- case Solver.solveFull { registry: legacyIndex, required: Solver.initializeRequired manifest.dependencies } of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with legacy index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with legacy index." + -- The solutions do us no good if the dependencies don't exist. Note + -- the compiler index is updated on every publish. + let lookupInRegistry res = maybe (Left res) (\_ -> Right res) (Map.lookup (fst res) (un CompilerIndex compilerIndex) >>= Map.lookup (snd res)) + let { fail: notRegistered } = partitionEithers $ map lookupInRegistry $ Map.toUnfoldable resolutions + if (Array.null notRegistered) then + pure $ Right resolutions + else do + let missing = "Some resolutions from legacy index are not registered: " <> String.joinWith ", " (map (uncurry formatPackageVersion) notRegistered) + Log.warn missing + Log.warn "Not using legacy index resolutions for this package." + pure $ Left $ SolveFailedDependencies missing + + currentSolution <- case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with current index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right (Tuple _ resolutions) -> do + Log.debug $ "Solved " <> formatted <> " with contemporary index." + pure $ Right resolutions + + pure $ case legacySolution, currentSolution of + Left err, Left _ -> Left err + Right resolutions, Left _ -> Right $ This resolutions + Left _, Right resolutions -> Right $ That resolutions + Right legacyResolutions, Right currentResolutions -> Right $ Both legacyResolutions currentResolutions + + case eitherResolutions of + -- We skip if we couldn't solve (but we write the error to cache). + Left err -> + Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right resolutionOptions -> do + Log.info "Selecting usable compiler from resolutions..." + + let + findFirstFromResolutions :: Map PackageName Version -> Run _ (Either (Map Version CompilerFailure) Version) + findFirstFromResolutions resolutions = do + Log.debug $ "Finding compiler for " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + possibleCompilers <- + if Map.isEmpty manifest.dependencies then do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + else do + Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." + allMetadata <- Registry.readAllMetadata + case compatibleCompilers allMetadata resolutions of + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Log.warn $ Array.fold + [ "Resolutions admit no overlapping compiler versions:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + pure $ NonEmptySet.fromFoldable1 allCompilers + Right compilers -> do + Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CJ.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + + cached <- do + cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> + Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of + Nothing -> pure Nothing + Just { result: Left _ } -> pure Nothing + Just { target, result: Right _ } -> pure $ Just target + pure $ NonEmptyArray.fromArray $ Array.catMaybes cached + + case cached of + Just prev -> do + let selected = NonEmptyArray.last prev + Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected + pure $ Right selected + Nothing -> do + Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + path <- + if isArchiveBacked then do + Log.info $ "Using registry archive for " <> formatted <> " instead of GitHub clone." + { path: archivePath } <- Archive.fetch tmp manifest.name manifest.version + pure archivePath + else do + { path: sourcePath } <- Source.fetch tmp manifest.location ref + pure sourcePath + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + MatrixBuilder.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Trying compilers one-by-one..." + selected <- findFirstCompiler + { source: path + , installed: installDir + , compilers: NonEmptySet.toUnfoldable possibleCompilers + , resolutions + , manifest: Manifest manifest + } + FS.Extra.remove tmp + pure selected + + let + collectCompilerErrors :: Map Version CompilerFailure -> Map (NonEmptyArray Version) CompilerFailure + collectCompilerErrors failures = do + let + foldFn prev xs = do + let Tuple _ failure = NonEmptyArray.head xs + let key = map fst xs + Map.insert key failure prev + Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + + reportFailures :: forall a. _ -> Run _ (Either PublishError a) + reportFailures failures = do + let collected = collectCompilerErrors failures + Log.error $ "Failed to find any valid compilers for publishing:\n" <> printJson compilerFailureMapCodec collected + pure $ Left $ NoCompilersFound collected + + -- Here, we finally attempt to find a suitable compiler. If we only + -- got one set of working resolutions that's what we use. If we got + -- solutions with both the legacy and adjusted-manifest indices, then + -- we try the adjusted index first since that's what is used in the + -- publish pipeline. + eitherCompiler <- case resolutionOptions of + This legacyResolutions -> do + selected <- findFirstFromResolutions legacyResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + That currentResolutions -> do + selected <- findFirstFromResolutions currentResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Both legacyResolutions currentResolutions -> do + selectedCurrent <- findFirstFromResolutions currentResolutions + case selectedCurrent of + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Left currentFailures | legacyResolutions == currentResolutions -> reportFailures currentFailures + Left _ -> do + Log.info $ "Could not find suitable compiler from current index, trying legacy solution..." + selectedLegacy <- findFirstFromResolutions legacyResolutions + case selectedLegacy of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + + case eitherCompiler of + Left err -> Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right (Tuple compiler resolutions) -> do + Log.debug $ "Selected " <> Version.print compiler <> " for publishing." + let + payload = + { name: manifest.name + , location: Just manifest.location + , ref + , version: manifest.version + , compiler + , resolutions: Just resolutions + } + Run.Except.runExcept (API.publish (Just legacyIndex) payload) >>= case _ of + Left error -> do + Log.error $ "Failed to publish " <> formatted <> ": " <> error + Cache.put _importCache (PublishFailure manifest.name manifest.version) (PublishError error) + Right _ -> do + Log.info $ "Published " <> formatted + + case publishable of [] -> Log.info "No packages to publish." manifests -> do - let printPackage (Manifest { name, version }) = formatPackageVersion name version Log.info $ Array.foldMap (append "\n") [ "----------" , "AVAILABLE TO PUBLISH" - , "" - , " using purs " <> Version.print compiler - , "" + , Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) manifests , "----------" - , Array.foldMap (append "\n - " <<< printPackage) manifests ] - let - source = case mode of - DryRun -> LegacyPackage - GenerateRegistry -> LegacyPackage - UpdateRegistry -> CurrentPackage - - void $ for notPublished \(Manifest manifest) -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.info $ Array.foldMap (append "\n") - [ "----------" - , "PUBLISHING: " <> formatted - , stringifyJson Location.codec manifest.location - , "----------" - ] - operation <- mkOperation (Manifest manifest) - - result <- Run.Except.runExcept $ API.publish source operation - -- TODO: Some packages will fail because the legacy importer does not - -- perform all the same validation checks that the publishing flow does. - -- What should we do when a package has a valid manifest but fails for - -- other reasons? Should they be added to the package validation - -- failures and we defer writing the package failures until the import - -- has completed? - case result of - Left error -> do - Log.error $ "Failed to publish " <> formatted <> ": " <> error - Cache.put _importCache (PublishFailure manifest.name manifest.version) error - Right _ -> do - Log.info $ "Published " <> formatted + legacyIndex <- do + Log.info "Transitivizing legacy registry..." + pure + $ Solver.exploreAllTransitiveDependencies + $ Solver.initializeRegistry + $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) + + let archivePackages = importedIndex.archivePackages + void $ for manifests (publishLegacyPackage legacyIndex archivePackages) + + Log.info "Finished publishing! Collecting all publish failures and writing to disk." + let + collectError prev (Manifest { name, version }) = do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure prev + Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev + failures <- Array.foldM collectError Map.empty allIndexPackages + Run.liftAff $ writePublishFailures failures + + let publishStats = collectPublishFailureStats importStats (map _.address eligibleForReservation) importedIndex.registryIndex failures + let publishStatsMessage = formatPublishFailureStats publishStats + Log.info publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "reserved-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.reserved))) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable (Set.difference publishStats.packages.failed publishStats.packages.reserved)))) + +-- | Record all package failures to the 'package-failures.json' file. +writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit +writePublishFailures = + writeJsonFile (packageMap (versionMap jsonValidationErrorCodec)) (Path.concat [ scratchDir, "publish-failures.json" ]) + <<< map (map formatPublishError) -- | Record all package failures to the 'package-failures.json' file. writePackageFailures :: Map RawPackageName PackageValidationError -> Aff Unit @@ -324,9 +605,10 @@ type LegacyRegistry = Map RawPackageName String type ImportedIndex = { failedPackages :: Map RawPackageName PackageValidationError , failedVersions :: Map RawPackageName (Map RawVersion VersionValidationError) - , reservedPackages :: Map PackageName Location + , removedPackages :: Map PackageName Location , registryIndex :: ManifestIndex , packageRefs :: Map PackageName (Map Version RawVersion) + , archivePackages :: Set PackageName } -- | Construct a valid registry index containing manifests for all packages from @@ -339,13 +621,16 @@ importLegacyRegistry legacyRegistry = do manifests <- forWithIndex legacyRegistry buildLegacyPackageManifests let - separatedPackages :: { left :: Map RawPackageName PackageValidationError, right :: Map RawPackageName (Map RawVersion _) } + separatedPackages :: { left :: Map RawPackageName PackageValidationError, right :: Map RawPackageName PackageManifests } separatedPackages = separate manifests + archiveBackedByRaw :: Map RawPackageName Boolean + archiveBackedByRaw = separatedPackages.right <#> _.archiveBacked + separatedVersions :: { left :: Map RawPackageName (Map RawVersion VersionValidationError), right :: Map RawPackageName (Map RawVersion Manifest) } separatedVersions = separatedPackages.right # flip foldlWithIndex { left: Map.empty, right: Map.empty } \key acc next -> do - let { left, right } = separate next + let { left, right } = separate next.versions { left: if Map.isEmpty left then acc.left else Map.insert key left acc.left , right: if Map.isEmpty right then acc.right else Map.insert key right acc.right } @@ -362,16 +647,15 @@ importLegacyRegistry legacyRegistry = do -- A 'checked' index is one where we have verified that all dependencies -- are self-contained within the registry. - Tuple unsatisfied validIndex = ManifestIndex.maximalIndex validLegacyManifests + Tuple unsatisfied validIndex = ManifestIndex.maximalIndex ManifestIndex.ConsiderRanges validLegacyManifests -- The list of all packages that were present in the legacy registry files, - -- but which have no versions present in the fully-imported registry. These - -- packages still need to have empty metadata files written for them. - reservedPackages :: Map PackageName Location - reservedPackages = - Map.fromFoldable $ Array.mapMaybe reserved $ Map.toUnfoldable legacyRegistry + -- but which have no versions present in the fully-imported registry. + removedPackages :: Map PackageName Location + removedPackages = + Map.fromFoldable $ Array.mapMaybe removed $ Map.toUnfoldable legacyRegistry where - reserved (Tuple (RawPackageName name) address) = do + removed (Tuple (RawPackageName name) address) = do packageName <- hush $ PackageName.parse name guard $ isNothing $ Map.lookup packageName $ ManifestIndex.toMap validIndex { owner, repo } <- hush $ Parsing.runParser address legacyRepoParser @@ -400,14 +684,29 @@ importLegacyRegistry legacyRegistry = do [ { package: RawPackageName (PackageName.print name), version: ref, dependencies: Array.fromFoldable $ Map.keys deps } ] Map.unionWith Map.union separatedVersions.left dependencyFailures + archivePackages :: Set PackageName + archivePackages = + Set.fromFoldable do + Tuple rawName isArchive <- Map.toUnfoldable archiveBackedByRaw + guard isArchive + let (RawPackageName raw) = rawName + name <- Array.fromFoldable (hush $ PackageName.parse raw) + pure name + pure { failedPackages: packageFailures , failedVersions: versionFailures - , reservedPackages: reservedPackages + , removedPackages: removedPackages , registryIndex: validIndex , packageRefs + , archivePackages } +type PackageManifests = + { archiveBacked :: Boolean + , versions :: Map RawVersion (Either VersionValidationError Manifest) + } + -- | Attempt to build valid manifests for all releases associated with the given -- | legacy package. This will result in a package error if versions could not -- | be fetched in the first place. Otherwise, it will produce errors for all @@ -416,7 +715,7 @@ buildLegacyPackageManifests :: forall r . RawPackageName -> String - -> Run (API.PublishEffects + IMPORT_CACHE + r) (Either PackageValidationError (Map RawVersion (Either VersionValidationError Manifest))) + -> Run (API.PublishEffects + IMPORT_CACHE + r) (Either PackageValidationError PackageManifests) buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPackage do Log.info $ "Processing " <> un RawPackageName rawPackage package <- validatePackage rawPackage rawUrl @@ -428,37 +727,191 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa buildManifestForVersion :: Tag -> Run _ (Either VersionValidationError Manifest) buildManifestForVersion tag = Run.Except.runExceptAt _exceptVersion do version <- exceptVersion $ validateVersion tag - - -- TODO: This will use the manifest for the package version from the - -- registry, without trying to produce a legacy manifest. However, we may - -- want to always attempt to produce a legacy manifest. If we can produce - -- one we compare it to the existing entry, failing if there is a - -- difference; if we can't, we warn and fall back to the existing entry. - Registry.readManifest package.name (LenientVersion.version version) >>= case _ of + Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + Just cached -> exceptVersion cached Nothing -> do - Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of - Nothing -> do - Log.debug $ "Building manifest in legacy import because it was not found in cache: " <> formatPackageVersion package.name (LenientVersion.version version) - manifest <- Run.Except.runExceptAt _exceptVersion do - exceptVersion $ validateVersionDisabled package.name version - legacyManifest <- do - Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of - Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } - Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest - exceptVersion manifest - Just cached -> - exceptVersion cached - - Just manifest -> - exceptVersion $ Right manifest + -- For archive-backed packages (where GitHub repo is unavailable), + -- fetch the manifest directly from the registry archive tarball. + if package.archiveBacked then do + Log.debug $ "Package is archive-backed, fetching manifest from archive for " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + Run.Except.runExcept (fetchManifestFromArchive package.name (LenientVersion.version version)) >>= case _ of + Left error -> throwVersion { error: InvalidManifest { error: NoManifests, reason: error }, reason: "Failed to fetch manifest from archive." } + Right result -> pure result + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest from archive for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest + else do + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest + Nothing -> do + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + legacyManifest <- do + Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of + Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } + Right result -> pure result + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location tag.name legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag pure (Tuple (RawVersion tag.name) manifest) - pure $ Map.fromFoldable manifests + pure { archiveBacked: package.archiveBacked, versions: Map.fromFoldable manifests } + +data PublishError + = SolveFailedDependencies String + | SolveFailedCompiler String + | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) + | UnsolvableDependencyCompilers (Array GroupedByCompilers) + | PublishError String + +derive instance Eq PublishError + +publishErrorCodec :: CJ.Codec PublishError +publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CJ.Variant.variantMatch + { solveFailedCompiler: Right CJ.string + , solveFailedDependencies: Right CJ.string + , noCompilersFound: Right compilerFailureMapCodec + , unsolvableDependencyCompilers: Right (CJ.array groupedByCompilersCodec) + , publishError: Right CJ.string + } + where + toVariant = case _ of + SolveFailedDependencies error -> Variant.inj (Proxy :: _ "solveFailedDependencies") error + SolveFailedCompiler error -> Variant.inj (Proxy :: _ "solveFailedCompiler") error + NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed + UnsolvableDependencyCompilers group -> Variant.inj (Proxy :: _ "unsolvableDependencyCompilers") group + PublishError error -> Variant.inj (Proxy :: _ "publishError") error + + fromVariant = Variant.match + { solveFailedDependencies: SolveFailedDependencies + , solveFailedCompiler: SolveFailedCompiler + , noCompilersFound: NoCompilersFound + , unsolvableDependencyCompilers: UnsolvableDependencyCompilers + , publishError: PublishError + } + +type PublishFailureStats = + { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName, reserved :: Set PackageName } + , versions :: { total :: Int, considered :: Int, failed :: Int, reason :: Map String Int } + } + +collectPublishFailureStats :: ImportStats -> Map PackageName Address -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importStats eligibleForReservation importedIndex failures = do + let + index :: Map PackageName (Map Version Manifest) + index = ManifestIndex.toMap importedIndex + + countVersions :: forall a. Map PackageName (Map Version a) -> Int + countVersions = Array.foldl (\prev (Tuple _ versions) -> prev + Map.size versions) 0 <<< Map.toUnfoldable + + startPackages :: Int + startPackages = importStats.packagesProcessed + + consideredPackages :: Int + consideredPackages = Map.size index + + startVersions :: Int + startVersions = importStats.versionsProcessed + + consideredVersions :: Int + consideredVersions = countVersions index + + failedPackages :: Int + failedPackages = Map.size failures + + failedVersions :: Int + failedVersions = countVersions failures + + removedPackages :: Set PackageName + removedPackages = do + let + foldFn package prev versions = fromMaybe prev do + allVersions <- Map.lookup package index + guard (Map.keys allVersions == Map.keys versions) + pure $ Set.insert package prev + + foldlWithIndex foldFn Set.empty failures + + -- Packages that are eligible for removal — but are reserved due to 0.13 or + -- organization status — are the 'reserved packages'. + reservedPackages :: Set PackageName + reservedPackages = Set.intersection removedPackages (Map.keys eligibleForReservation) + + countByFailure :: Map String Int + countByFailure = do + let + toKey = case _ of + SolveFailedDependencies _ -> "Solving failed (dependencies)" + SolveFailedCompiler _ -> "Solving failed (compiler)" + NoCompilersFound _ -> "No compilers usable for publishing" + UnsolvableDependencyCompilers _ -> "Dependency compiler conflict" + PublishError _ -> "Publishing failed" + + foldFn prev (Tuple _ versions) = + Array.foldl (\prevCounts (Tuple _ error) -> Map.insertWith (+) (toKey error) 1 prevCounts) prev (Map.toUnfoldable versions) + + Array.foldl foldFn Map.empty (Map.toUnfoldable failures) + + { packages: + { total: startPackages + , considered: consideredPackages + , partial: failedPackages + , reserved: reservedPackages + , failed: removedPackages + } + , versions: + { total: startVersions + , considered: consideredVersions + , failed: failedVersions + , reason: countByFailure + } + } + +formatPublishFailureStats :: PublishFailureStats -> String +formatPublishFailureStats { packages, versions } = String.joinWith "\n" + [ "--------------------" + , "PUBLISH FAILURES" + , "--------------------" + , "" + , show packages.considered <> " of " <> show packages.total <> " total packages were considered for publishing (others had no manifests imported.)" + , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.considered <> " packages fully succeeded." + , " - " <> show packages.partial <> " packages partially succeeded." + , " - " <> show (Set.size packages.reserved) <> " packages fully failed, but are reserved due to 0.13 or organization status." + , " - " <> show (Set.size packages.failed - Set.size packages.reserved) <> " packages had all versions fail and will be removed." + , "" + , show versions.considered <> " of " <> show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) + ] + +compilerFailureMapCodec :: CJ.Codec (Map (NonEmptyArray Version) CompilerFailure) +compilerFailureMapCodec = do + let + print = NonEmptyArray.intercalate "," <<< map Version.print + parse input = do + let versions = String.split (String.Pattern ",") input + let { fail, success } = partitionEithers $ map Version.parse versions + case NonEmptyArray.fromArray success of + Nothing | Array.null fail -> Left "No versions" + Nothing -> Left $ "No versions parsed, some failed: " <> String.joinWith ", " fail + Just result -> pure result + Internal.Codec.strMap "CompilerFailureMap" parse print compilerFailureCodec type EXCEPT_VERSION :: Row (Type -> Type) -> Row (Type -> Type) type EXCEPT_VERSION r = (exceptVersion :: Run.Except.Except VersionValidationError | r) @@ -567,28 +1020,105 @@ type PackageResult = { name :: PackageName , address :: Address , tags :: Array Tag + , archiveBacked :: Boolean } -validatePackage :: forall r. RawPackageName -> String -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult +type FetchTagsResult = + { tags :: Array Tag + , archiveBacked :: Boolean + } + +type PackagesMetadata = { address :: Address, lastPublished :: Date } + +packagesMetadataCodec :: CJ.Codec PackagesMetadata +packagesMetadataCodec = CJ.named "PackagesMetadata" $ CJ.Record.object + { address: CJ.named "Address" $ CJ.Record.object { owner: CJ.string, repo: CJ.string } + , lastPublished: Internal.Codec.iso8601Date + } + +getPackagesMetadata :: forall r. Map RawPackageName String -> Run (REGISTRY + LOG + EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) +getPackagesMetadata legacyRegistry = do + associated <- for (Map.toUnfoldableUnordered legacyRegistry) \(Tuple rawName rawUrl) -> do + Run.Except.runExceptAt (Proxy :: _ "exceptPackage") (validatePackage rawName rawUrl) >>= case _ of + Left _ -> pure Nothing + Right { name, address, tags } -> case Array.head tags of + Nothing -> pure Nothing + Just tag -> do + result <- GitHub.getCommitDate address tag.sha + case result of + Left error -> unsafeCrashWith ("Failed to get commit date for " <> PackageName.print name <> "@" <> tag.name <> ": " <> Octokit.printGitHubError error) + Right date -> pure $ Just $ Tuple name { address, lastPublished: DateTime.date date } + pure $ Map.fromFoldable $ Array.catMaybes associated + +filterPackages_0_13 :: Map PackageName PackagesMetadata -> Map PackageName PackagesMetadata +filterPackages_0_13 = do + let + -- 0.13 release date + cutoff = DateTime.canonicalDate (unsafeFromJust (toEnum 2019)) May (unsafeFromJust (toEnum 29)) + organizations = + [ "purescript" + , "purescript-contrib" + , "purescript-node" + , "purescript-web" + , "rowtype-yoga" + , "purescript-halogen" + , "purescript-deprecated" + ] + + Map.filterWithKey \_ metadata -> do + let { owner } = metadata.address + owner `Array.elem` organizations || metadata.lastPublished >= cutoff + +writePackagesMetadata :: forall r. Map PackageName PackagesMetadata -> Run (LOG + AFF + r) Unit +writePackagesMetadata pkgs = do + let path = Path.concat [ scratchDir, "packages-metadata.json" ] + Log.info $ "Writing packages metadata to " <> path + Run.liftAff $ writeJsonFile (packageMap packagesMetadataCodec) path pkgs + +readPackagesMetadata :: forall r. Run (AFF + r) (Either String (Map PackageName PackagesMetadata)) +readPackagesMetadata = Run.liftAff $ readJsonFile (packageMap packagesMetadataCodec) (Path.concat [ scratchDir, "packages-metadata.json" ]) + +validatePackage :: forall r. RawPackageName -> String -> Run (REGISTRY + LOG + GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult validatePackage rawPackage rawUrl = do name <- exceptPackage $ validatePackageName rawPackage exceptPackage $ validatePackageDisabled name address <- exceptPackage $ validatePackageAddress rawUrl - tags <- fetchPackageTags address + { tags, archiveBacked } <- fetchPackageTags name address -- We do not allow packages that redirect from their registered location elsewhere. The package -- transferrer will handle automatically transferring these packages. + -- Skip URL redirect validation for archive-backed packages since they have no valid tag URLs. case Array.head tags of - Nothing -> pure { name, address, tags } + Nothing -> pure { name, address, tags, archiveBacked } + Just _ | archiveBacked -> pure { name, address, tags, archiveBacked } Just tag -> do tagAddress <- exceptPackage case tagUrlToRepoUrl tag.url of Nothing -> Left { error: InvalidPackageURL tag.url, reason: "Failed to format redirected " <> tag.url <> " as a GitHub.Address." } Just formatted -> Right formatted exceptPackage $ validatePackageLocation { registered: address, received: tagAddress } - pure { name, address, tags } + pure { name, address, tags, archiveBacked } -fetchPackageTags :: forall r. Address -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) (Array Tag) -fetchPackageTags address = GitHub.listTags address >>= case _ of +fetchPackageTags :: forall r. PackageName -> Address -> Run (REGISTRY + LOG + GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) FetchTagsResult +fetchPackageTags name address = GitHub.listTags address >>= case _ of Left err -> case err of + Octokit.APIError apiError | apiError.statusCode == 404 -> do + let printed = PackageName.print name + Log.debug $ "GitHub 404 for " <> printed <> ", attempting to synthesize tags from metadata (local, then remote)" + versions <- Registry.readMetadata name >>= case _ of + Just (Metadata metadata) -> do + Log.debug $ "Found metadata for " <> printed <> " in local registry checkout." + pure $ Set.toUnfoldable $ Map.keys metadata.published + Nothing -> do + Log.debug $ "No local metadata for " <> printed <> ", fetching versions from remote registry main branch..." + fetchRemoteRegistryVersions name + case versions of + [] -> do + let error = CannotAccessRepo address + let reason = "GitHub 404 and no metadata found locally or in remote purescript/registry main branch to construct archive tags." + throwPackage { error, reason } + _ -> do + let tags = versions <#> \v -> { name: "v" <> Version.print v, sha: "", url: "" } + Log.info $ "Synthesized " <> show (Array.length tags) <> " tags from metadata for archive-backed package " <> printed + pure { tags, archiveBacked: true } Octokit.APIError apiError | apiError.statusCode >= 400 -> do let error = CannotAccessRepo address let reason = "GitHub API error with status code " <> show apiError.statusCode @@ -599,7 +1129,42 @@ fetchPackageTags address = GitHub.listTags address >>= case _ of , Octokit.printGitHubError err ] Right tags -> - pure tags + pure { tags, archiveBacked: false } + +-- | Fetch published versions for a package directly from the remote registry repo (main branch). +-- | Used as a fallback when the local registry checkout has been cleared (e.g., during reuploads). +-- | Only extracts the version keys from the "published" field without fully parsing metadata, +-- | since the remote registry may have a different schema (e.g., missing 'compilers' field). +fetchRemoteRegistryVersions :: forall r. PackageName -> Run (GITHUB + LOG + r) (Array Version) +fetchRemoteRegistryVersions name = do + let + printed = PackageName.print name + path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] + Log.debug $ "Fetching published versions for " <> printed <> " from remote registry" + GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of + Left err -> do + case err of + Octokit.APIError apiError | apiError.statusCode == 404 -> + Log.debug $ "No metadata found in remote registry for " <> printed <> " (404)" + _ -> + Log.warn $ "Failed to fetch remote metadata for " <> printed <> ": " <> Octokit.printGitHubError err + pure [] + Right content -> do + let + parsed = do + json <- hush $ JSON.parse content + obj <- JSON.toJObject json + publishedJson <- JSON.Object.lookup "published" obj + publishedObj <- JSON.toJObject publishedJson + let versionStrings = JSON.Object.keys publishedObj + pure $ Array.mapMaybe (hush <<< Version.parse) versionStrings + case parsed of + Nothing -> do + Log.warn $ "Could not extract versions from remote metadata for " <> printed + pure [] + Just versions -> do + Log.debug $ "Extracted " <> show (Array.length versions) <> " versions from remote metadata for " <> printed + pure versions validatePackageLocation :: { registered :: Address, received :: Address } -> Either PackageValidationError Unit validatePackageLocation addresses = do @@ -641,14 +1206,53 @@ validatePackageDisabled package = disabledPackages :: Map String String disabledPackages = Map.fromFoldable [ Tuple "metadata" reservedPackage + , Tuple "purs" reservedPackage + , Tuple "bitstrings" noSrcDirectory , Tuple "purveyor" noSrcDirectory , Tuple "styled-components" noSrcDirectory , Tuple "styled-system" noSrcDirectory + + , Tuple "arb-instances" freedPackage + , Tuple "big-integer" freedPackage + , Tuple "chosen" freedPackage + , Tuple "chosen-halogen" freedPackage + , Tuple "combinators" freedPackage + , Tuple "constraint-kanren" freedPackage + , Tuple "datareify" freedPackage + , Tuple "dynamic" freedPackage + , Tuple "flux-store" freedPackage + , Tuple "focus-ui" freedPackage + , Tuple "fussy" freedPackage + , Tuple "globals-safe" freedPackage + , Tuple "hashable" freedPackage + , Tuple "hubot" freedPackage + , Tuple "mdcss" freedPackage + , Tuple "node-args" freedPackage + , Tuple "node-readline-question" freedPackage + , Tuple "nunjucks" freedPackage + , Tuple "org" freedPackage + , Tuple "phantomjs" freedPackage + , Tuple "photons" freedPackage + , Tuple "pouchdb-ffi" freedPackage + , Tuple "pux-router" freedPackage + , Tuple "reactive" freedPackage + , Tuple "reactive-jquery" freedPackage + , Tuple "skull" freedPackage + , Tuple "slack" freedPackage + , Tuple "stablename" freedPackage + , Tuple "stm" freedPackage + , Tuple "stuff" freedPackage + , Tuple "subtype" freedPackage + , Tuple "toastr" freedPackage + , Tuple "uport" freedPackage + , Tuple "yaml" freedPackage + , Tuple "zmq" freedPackage ] where reservedPackage = "Reserved package which cannot be uploaded." noSrcDirectory = "No version contains a 'src' directory." + freedPackage = "Abandoned package whose name has been freed for reuse." -- | Validate that a package name parses. Expects the package to already have -- | had its 'purescript-' prefix removed. @@ -661,14 +1265,14 @@ validatePackageName (RawPackageName name) = type JsonValidationError = { tag :: String - , value :: Maybe String + , value :: Maybe JSON , reason :: String } jsonValidationErrorCodec :: CJ.Codec JsonValidationError jsonValidationErrorCodec = CJ.named "JsonValidationError" $ CJ.Record.object { tag: CJ.string - , value: CJ.Record.optional CJ.string + , value: CJ.Record.optional CJ.json , reason: CJ.string } @@ -677,31 +1281,43 @@ formatPackageValidationError { error, reason } = case error of InvalidPackageName -> { tag: "InvalidPackageName", value: Nothing, reason } InvalidPackageURL url -> - { tag: "InvalidPackageURL", value: Just url, reason } + { tag: "InvalidPackageURL", value: Just (CJ.encode CJ.string url), reason } PackageURLRedirects { registered } -> - { tag: "PackageURLRedirects", value: Just (registered.owner <> "/" <> registered.repo), reason } + { tag: "PackageURLRedirects", value: Just (CJ.encode CJ.string (registered.owner <> "/" <> registered.repo)), reason } CannotAccessRepo address -> - { tag: "CannotAccessRepo", value: Just (address.owner <> "/" <> address.repo), reason } + { tag: "CannotAccessRepo", value: Just (CJ.encode CJ.string (address.owner <> "/" <> address.repo)), reason } DisabledPackage -> { tag: "DisabledPackage", value: Nothing, reason } formatVersionValidationError :: VersionValidationError -> JsonValidationError formatVersionValidationError { error, reason } = case error of InvalidTag tag -> - { tag: "InvalidTag", value: Just tag.name, reason } + { tag: "InvalidTag", value: Just (CJ.encode CJ.string tag.name), reason } DisabledVersion -> { tag: "DisabledVersion", value: Nothing, reason } InvalidManifest err -> do let errorValue = Legacy.Manifest.printLegacyManifestError err.error - { tag: "InvalidManifest", value: Just errorValue, reason } - UnregisteredDependencies names -> do - let errorValue = String.joinWith ", " $ map PackageName.print names - { tag: "UnregisteredDependencies", value: Just errorValue, reason } + { tag: "InvalidManifest", value: Just (CJ.encode CJ.string errorValue), reason } + UnregisteredDependencies names -> + { tag: "UnregisteredDependencies", value: Just (CJ.encode (CJ.array PackageName.codec) names), reason } + +formatPublishError :: PublishError -> JsonValidationError +formatPublishError = case _ of + SolveFailedCompiler error -> + { tag: "SolveFailedCompiler", value: Nothing, reason: error } + SolveFailedDependencies error -> + { tag: "SolveFailedDependencies", value: Nothing, reason: error } + NoCompilersFound versions -> + { tag: "NoCompilersFound", value: Just (CJ.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } + UnsolvableDependencyCompilers failed -> + { tag: "UnsolvableDependencyCompilers", value: Just (CJ.encode (CJ.array groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } + PublishError error -> + { tag: "PublishError", value: Nothing, reason: error } type ImportStats = { packagesProcessed :: Int , versionsProcessed :: Int - , packageNamesReserved :: Int + , packageNamesRemoved :: Int , packageResults :: { success :: Int, partial :: Int, fail :: Int } , versionResults :: { success :: Int, fail :: Int } , packageErrors :: Map String Int @@ -714,7 +1330,7 @@ formatImportStats stats = String.joinWith "\n" , show stats.packagesProcessed <> " packages processed:" , indent $ show stats.packageResults.success <> " fully successful" , indent $ show stats.packageResults.partial <> " partially successful" - , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " reserved (no usable versions)" + , indent $ show (stats.packageNamesRemoved - stats.packageResults.fail) <> " omitted (no usable versions)" , indent $ show stats.packageResults.fail <> " fully failed" , indent "---" , formatErrors stats.packageErrors @@ -747,8 +1363,8 @@ calculateImportStats legacyRegistry imported = do packagesProcessed = Map.size legacyRegistry - packageNamesReserved = - Map.size imported.reservedPackages + packageNamesRemoved = + Map.size imported.removedPackages packageResults = do let succeeded = Map.keys registryIndex @@ -801,7 +1417,7 @@ calculateImportStats legacyRegistry imported = do { packagesProcessed , versionsProcessed - , packageNamesReserved + , packageNamesRemoved , packageResults , versionResults , packageErrors @@ -830,12 +1446,129 @@ legacyRepoParser = do pure { owner, repo } +fetchSpagoYaml :: forall r. Address -> RawVersion -> Run (GITHUB + LOG + EXCEPT String + r) (Maybe Manifest) +fetchSpagoYaml address ref = do + eitherSpagoYaml <- GitHub.getContent address ref "spago.yaml" + case eitherSpagoYaml of + Left err -> do + Log.debug $ "No spago.yaml found: " <> Octokit.printGitHubError err + pure Nothing + Right contents -> do + Log.debug $ "Found spago.yaml file\n" <> contents + case parseYaml SpagoYaml.spagoYamlCodec contents of + Left error -> do + Log.warn $ "Failed to parse spago.yaml file:\n" <> contents <> "\nwith errors:\n" <> error + pure Nothing + Right { package: Just { publish: Just { location: Just location } } } + | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do + Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." + pure Nothing + Right config -> case SpagoYaml.spagoYamlToManifest (un RawVersion ref) config of + Left err -> do + Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err + pure Nothing + Right manifest -> do + Log.debug "Successfully converted a spago.yaml into a purs.json manifest" + pure $ Just manifest + +-- | Find the first compiler that can compile the package source code and +-- | installed resolutions from the given array of compilers. Begins with the +-- | latest compiler and works backwards to older compilers. +findFirstCompiler + :: forall r + . { compilers :: Array Version + , manifest :: Manifest + , resolutions :: Map PackageName Version + , source :: FilePath + , installed :: FilePath + } + -> Run (COMPILER_CACHE + STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) +findFirstCompiler { source, manifest, resolutions, compilers, installed } = do + search <- Run.Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do + result <- Cache.get API._compilerCache (API.Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.info $ "Not cached, trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + let cache = { result: map (const unit) result, target } + Cache.put API._compilerCache (API.Compilation manifest resolutions target) cache + pure cache.result + Just cached -> + pure cached.result + + case result of + Left error -> pure $ Tuple target error + Right _ -> Run.Except.throw target + + case search of + Left worked -> pure $ Right worked + Right others -> pure $ Left $ Map.fromFoldable others + +type GroupedByCompilers = + { packages :: Map PackageName Version + , compilers :: NonEmptySet Version + } + +groupedByCompilersCodec :: CJ.Codec GroupedByCompilers +groupedByCompilersCodec = CJ.named "GroupedByCompilers" $ CJ.Record.object + { compilers: CJ.Common.nonEmptySet Version.codec + , packages: Internal.Codec.packageMap Version.codec + } + +-- | Given a set of package versions, determine the set of compilers that can be +-- | used for all packages. +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) +compatibleCompilers allMetadata resolutions = do + let + associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } + associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do + Metadata metadata <- Map.lookup name allMetadata + published <- Map.lookup version metadata.published + Just { name, version, compilers: published.compilers } + + case Array.uncons associated of + Nothing -> + Left [] + Just { head, tail: [] } -> + Right $ NonEmptySet.fromFoldable1 head.compilers + Just { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers + case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of + -- An empty intersection means there are no shared compilers among the + -- resolved dependencies. + Nothing -> do + let + grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) + grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) + + collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers + collect vals = + { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) + -- We've already grouped by compilers, so those must all be equal + -- and we can take just the first value. + , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers + } + Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped + + Just set -> + Right set + +type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) + +_importCache :: Proxy "importCache" +_importCache = Proxy + -- | A key type for the storage cache. Only supports packages identified by -- | their name and version. data ImportCache :: (Type -> Type -> Type) -> Type -> Type data ImportCache c a = ImportManifest PackageName RawVersion (c (Either VersionValidationError Manifest) a) - | PublishFailure PackageName Version (c String a) + | PublishFailure PackageName Version (c PublishError a) instance Functor2 c => Functor (ImportCache c) where map k (ImportManifest name version a) = ImportManifest name version (map2 k a) @@ -846,7 +1579,7 @@ instance MemoryEncodable ImportCache where ImportManifest name (RawVersion version) next -> Exists.mkExists $ Key ("ImportManifest__" <> PackageName.print name <> "__" <> version) next PublishFailure name version next -> do - Exists.mkExists $ Key ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) next + Exists.mkExists $ Key ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) next instance FsEncodable ImportCache where encodeFs = case _ of @@ -854,10 +1587,76 @@ instance FsEncodable ImportCache where let codec = CJ.Common.either versionValidationErrorCodec Manifest.codec Exists.mkExists $ AsJson ("ImportManifest__" <> PackageName.print name <> "__" <> version) codec next PublishFailure name version next -> do - let codec = CJ.string - Exists.mkExists $ AsJson ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) codec next - -type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) - -_importCache :: Proxy "importCache" -_importCache = Proxy + let codec = publishErrorCodec + Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next + +-- | Fetch a manifest directly from the registry archive tarball. +-- | Used for archive-backed packages where the original GitHub repo is unavailable. +fetchManifestFromArchive :: forall r. PackageName -> Version -> Run (LOG + EXCEPT String + AFF + EFFECT + r) Manifest +fetchManifestFromArchive name version = do + let formatted = formatPackageVersion name version + Log.debug $ "Fetching manifest from archive for " <> formatted + tmp <- Tmp.mkTmpDir + let + nameStr = PackageName.print name + versionStr = Version.print version + tarballName = versionStr <> ".tar.gz" + absoluteTarballPath = Path.concat [ tmp, tarballName ] + archiveUrl = Archive.registryArchiveUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" + + Log.debug $ "Fetching archive tarball from: " <> archiveUrl + response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} + + case response of + Cancelled -> do + FS.Extra.remove tmp + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.FetchError error) -> do + FS.Extra.remove tmp + Log.error $ "HTTP error when fetching archive: " <> Exception.message error + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) + FS.Extra.remove tmp + Log.error $ "Bad status (" <> show status <> ") when fetching archive with body: " <> bodyString + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl <> " (status " <> show status <> ")" + Succeeded { arrayBuffer: arrayBufferAff } -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of + Left error -> do + FS.Extra.remove tmp + Log.error $ "Downloaded archive but failed to write to " <> absoluteTarballPath <> ": " <> Aff.message error + Run.Except.throw $ "Could not save archive tarball for " <> formatted + Right _ -> + Log.debug $ "Tarball downloaded to " <> absoluteTarballPath + + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> do + FS.Extra.remove tmp + Run.Except.throw $ "Downloaded archive tarball for " <> formatted <> " has no top-level directory." + Just extractedPath -> do + Log.debug "Extracting archive tarball..." + Tar.extract { cwd: tmp, archive: tarballName } + let pursJsonPath = Path.concat [ tmp, extractedPath, "purs.json" ] + Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 pursJsonPath)) >>= case _ of + Left error -> do + FS.Extra.remove tmp + Log.error $ "Failed to read purs.json from archive: " <> Aff.message error + Run.Except.throw $ "No purs.json found in archive for " <> formatted + Right contents -> case JSON.parse contents of + Left parseErr -> do + FS.Extra.remove tmp + Log.error $ "Failed to parse purs.json as JSON: " <> parseErr + Run.Except.throw $ "Invalid purs.json in archive for " <> formatted + Right json -> case CJ.decode Manifest.codec json of + Left decodeErr -> do + FS.Extra.remove tmp + Log.error $ "Failed to decode purs.json manifest: " <> CJ.DecodeError.print decodeErr + Run.Except.throw $ "Could not decode purs.json manifest for " <> formatted + Right manifest -> do + FS.Extra.remove tmp + Log.debug $ "Successfully fetched manifest from archive for " <> formatted + pure manifest diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 0bcacc643..257a7b1a2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -16,10 +16,11 @@ import Effect.Class.Console (log) import Effect.Class.Console as Console import Node.Path as Path import Node.Process as Process +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -151,12 +152,13 @@ main = launchAff_ do let interpret = Registry.interpret (Registry.handle registryEnv) + >>> Archive.interpret Archive.handle >>> Storage.interpret (if arguments.upload then Storage.handleS3 { s3, cache } else Storage.handleReadOnly cache) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) - >>> Comment.interpret Comment.handleLog + >>> Cache.interpret _compilerCache (Cache.handleFs cache) >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' @@ -226,21 +228,25 @@ deleteVersion arguments name version = do Just published, Nothing -> pure (Just (Right published)) Nothing, Just unpublished -> pure (Just (Left unpublished)) Nothing, Nothing -> pure Nothing + -- Read manifest before deleting it (needed for reimport) + maybeManifest <- Registry.readManifest name version let newMetadata = Metadata $ oldMetadata { published = Map.delete version oldMetadata.published, unpublished = Map.delete version oldMetadata.unpublished } Registry.writeMetadata name newMetadata Registry.deleteManifest name version -- --reimport when arguments.reimport do - case publishment of - Nothing -> Log.error "Cannot reimport a version that was not published" - Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" - Just (Right specificPackageMetadata) -> do + case publishment, maybeManifest of + Nothing, _ -> Log.error "Cannot reimport a version that was not published" + Just (Left _), _ -> Log.error "Cannot reimport a version that was specifically unpublished" + Just (Right _), Nothing -> Log.error $ "Cannot reimport: manifest not found for " <> formatted + Just (Right _), Just (Manifest manifest) -> do -- Obtains `newMetadata` via cache - API.publish LegacyPackage + void $ API.publish Nothing { location: Just oldMetadata.location , name: name - , ref: specificPackageMetadata.ref + , ref: manifest.ref + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index ffd66dbd2..ce615b5a9 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff @@ -28,10 +27,11 @@ import Node.Path as Path import Node.Process as Node.Process import Node.Process as Process import Parsing as Parsing +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -125,9 +125,10 @@ main = launchAff_ do let runAppEffects = Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) let @@ -148,8 +149,8 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/spago.lock b/spago.lock index 83d2afb8d..ea939ddeb 100644 --- a/spago.lock +++ b/spago.lock @@ -227,7 +227,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -239,14 +238,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -254,7 +249,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -299,11 +293,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -313,20 +303,32 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", - "effect", - "either", - "maybe", - "prelude", + "exceptions", + "fetch", + "integers", + "json", + "node-child-process", + "node-execa", + "node-fs", + "node-path", + "node-process", + "ordered-collections", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", + "routing-duplex", "spec", "spec-node", - "strings" + "strings", + "transformers" ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +336,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +345,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +371,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +381,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +425,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +452,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", @@ -604,7 +637,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -616,14 +648,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -632,7 +660,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -682,11 +709,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -846,7 +869,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -858,14 +880,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -874,7 +892,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -924,11 +941,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -1104,22 +1117,16 @@ "path": "test-utils", "core": { "dependencies": [ - "aff", "arrays", "bifunctors", "codec-json", "datetime", - "effect", "either", "exceptions", - "fetch", "foldable-traversable", "formatters", - "integers", "json", "maybe", - "newtype", - "node-process", "ordered-collections", "partial", "prelude", @@ -1152,7 +1159,6 @@ "enums", "exceptions", "exists", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -1164,14 +1170,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -1179,14 +1181,12 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", "node-event-emitter", "node-fs", "node-path", - "node-process", "node-streams", "nonempty", "now", @@ -1198,7 +1198,6 @@ "parsing", "partial", "pipes", - "posix-types", "prelude", "profunctor", "profunctor-lenses", @@ -1220,11 +1219,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] }, "test": { diff --git a/test-utils/spago.yaml b/test-utils/spago.yaml index d85190964..4362f8e77 100644 --- a/test-utils/spago.yaml +++ b/test-utils/spago.yaml @@ -3,22 +3,16 @@ package: build: pedanticPackages: true dependencies: - - aff - arrays - bifunctors - codec-json - datetime - - effect - either - exceptions - - fetch - foldable-traversable - formatters - - integers - json - maybe - - newtype - - node-process - ordered-collections - partial - prelude diff --git a/test-utils/src/Registry/Test/Assert.purs b/test-utils/src/Registry/Test/Assert.purs index 55c0f2277..2d15e7a74 100644 --- a/test-utils/src/Registry/Test/Assert.purs +++ b/test-utils/src/Registry/Test/Assert.purs @@ -38,6 +38,18 @@ shouldNotContain container elem = when (elem `Foldable.elem` container) do fail (Utils.unsafeStringify elem <> "\n\nshould not be a member of\n\n" <> Utils.unsafeStringify container) +-- | Assert that all elements in `expected` are present in `actual`. +-- | This is a subset check, not an equality check - `actual` may contain +-- | additional elements. +-- | +-- | Useful for E2E tests where a shared database means we can't predict +-- | exact contents, only that certain expected items are present. +shouldContainAll :: forall m a. MonadThrow Error m => Eq a => Array a -> Array a -> m Unit +shouldContainAll actual expected = + Foldable.for_ expected \elem -> + when (elem `Foldable.notElem` actual) do + fail ("Expected element not found:\n" <> Utils.unsafeStringify elem <> "\n\nin array:\n" <> Utils.unsafeStringify actual) + shouldSatisfy :: forall m a. MonadThrow Error m => a -> (a -> Boolean) -> m Unit shouldSatisfy a predicate = unless (predicate a) do diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs deleted file mode 100644 index 960484609..000000000 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ /dev/null @@ -1,180 +0,0 @@ --- | HTTP client for making requests to the registry server during E2E tests. --- | This module provides typed helpers for interacting with the Registry API. -module Registry.Test.E2E.Client - ( Config - , ClientError(..) - , defaultConfig - , configFromEnv - , getJobs - , getJob - , getStatus - , publish - , pollJob - , printClientError - ) where - -import Prelude - -import Codec.JSON.DecodeError as CJ.DecodeError -import Control.Monad.Error.Class (class MonadThrow, throwError) -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Class (lift) -import Data.Array as Array -import Data.Bifunctor (lmap) -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime) -import Data.Either (Either(..)) -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Int as Int -import Data.Maybe (Maybe(..)) -import Data.Newtype (unwrap) -import Effect (Effect) -import Effect.Aff (Aff, Milliseconds(..), delay) -import Effect.Aff.Class (class MonadAff, liftAff) -import Effect.Exception (Error, error) -import Effect.Exception as Effect.Exception -import Fetch (Method(..)) -import Fetch as Fetch -import JSON as JSON -import Node.Process as Process -import Registry.API.V1 (Job, JobId(..), LogLevel) -import Registry.API.V1 as V1 -import Registry.Internal.Format as Internal.Format -import Registry.Operation (PublishData) -import Registry.Operation as Operation - --- | Configuration for the E2E test client -type Config = - { baseUrl :: String - , timeout :: Milliseconds - , pollInterval :: Milliseconds - , maxPollAttempts :: Int - } - --- | Default configuration for production use (port 8080 matches HTTPurple default) -defaultConfig :: Config -defaultConfig = - { baseUrl: "http://localhost:8080" - , timeout: Milliseconds 30000.0 - , pollInterval: Milliseconds 2000.0 - , maxPollAttempts: 30 - } - --- | Create config from environment, reading SERVER_PORT. --- | --- | SERVER_PORT is required and must be set by the test environment. --- | See `nix/lib/env.nix` for the centralized environment configuration. -configFromEnv :: Effect Config -configFromEnv = do - maybePort <- Process.lookupEnv "SERVER_PORT" - case maybePort of - Nothing -> Effect.Exception.throw "SERVER_PORT environment variable is not set. Run tests via 'nix run .#test-env' or 'nix build .#checks.x86_64-linux.integration'." - Just port -> pure $ defaultConfig { baseUrl = "http://localhost:" <> port } - --- | Errors that can occur during client operations -data ClientError - = HttpError { status :: Int, body :: String } - | ParseError String - | Timeout String - | NetworkError String - -printClientError :: ClientError -> String -printClientError = case _ of - HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg - Timeout msg -> "Timeout: " <> msg - NetworkError msg -> "Network Error: " <> msg - --- | Convert a ClientError to an Effect Error for throwing -toError :: ClientError -> Error -toError = error <<< printClientError - --- | Parse JSON response body using a codec -parseResponse :: forall a. CJ.Codec a -> String -> Either String a -parseResponse codec body = do - json <- lmap (append "JSON parse error: ") $ JSON.parse body - lmap CJ.DecodeError.print $ CJ.decode codec json - --- | Make a GET request and decode the response -get :: forall a. CJ.Codec a -> Config -> String -> Aff (Either ClientError a) -get codec config path = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> path) { method: GET } - body <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse codec body of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body } - --- | Make a POST request with JSON body and decode the response -post :: forall req res. CJ.Codec req -> CJ.Codec res -> Config -> String -> req -> Aff (Either ClientError res) -post reqCodec resCodec config path reqBody = runExceptT do - let jsonBody = JSON.print $ CJ.encode reqCodec reqBody - response <- lift $ Fetch.fetch (config.baseUrl <> path) - { method: POST - , headers: { "Content-Type": "application/json" } - , body: jsonBody - } - responseBody <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body: responseBody } - --- | Get the list of jobs -getJobs :: Config -> Aff (Either ClientError (Array Job)) -getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs" - --- | Get a specific job by ID, with optional log filtering -getJob :: Config -> JobId -> Maybe LogLevel -> Maybe DateTime -> Aff (Either ClientError Job) -getJob config (JobId jobId) level since = do - let - params = Array.catMaybes - [ level <#> \l -> "level=" <> V1.printLogLevel l - , since <#> \s -> "since=" <> Formatter.DateTime.format Internal.Format.iso8601DateTime s - ] - query = case params of - [] -> "" - ps -> "?" <> Array.intercalate "&" ps - get V1.jobCodec config ("/api/v1/jobs/" <> jobId <> query) - --- | Check if the server is healthy -getStatus :: Config -> Aff (Either ClientError Unit) -getStatus config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/api/v1/status") { method: GET } - if response.status == 200 then - pure unit - else do - body <- lift response.text - throwError $ HttpError { status: response.status, body } - --- | Publish a package -publish :: Config -> PublishData -> Aff (Either ClientError V1.JobCreatedResponse) -publish config publishData = - post Operation.publishCodec V1.jobCreatedResponseCodec config "/api/v1/publish" publishData - --- | Poll a job until it completes or times out -pollJob - :: forall m - . MonadAff m - => MonadThrow Error m - => Config - -> JobId - -> m Job -pollJob config jobId = go 1 - where - go attempt - | attempt > config.maxPollAttempts = - throwError $ toError $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" - | otherwise = do - liftAff $ delay config.pollInterval - result <- liftAff $ getJob config jobId (Just V1.Debug) Nothing - case result of - Left err -> throwError $ toError err - Right job -> - case job.finishedAt of - Just _ -> pure job - Nothing -> go (attempt + 1) diff --git a/test-utils/src/Registry/Test/Fixtures.purs b/test-utils/src/Registry/Test/Fixtures.purs new file mode 100644 index 000000000..28692c13c --- /dev/null +++ b/test-utils/src/Registry/Test/Fixtures.purs @@ -0,0 +1,18 @@ +module Registry.Test.Fixtures where + +import Prelude + +import Data.Either as Either +import Data.Maybe (Maybe(..)) +import Partial.Unsafe as Partial +import Registry.Location (Location(..)) +import Registry.Sha256 (Sha256) +import Registry.Sha256 as Sha256 + +-- | A Location for use within tests. +defaultLocation :: Location +defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } + +-- | A Sha256 for use within tests. +defaultHash :: Sha256 +defaultHash = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Failed to parse Sha256") $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/test-utils/src/Registry/Test/Utils.purs b/test-utils/src/Registry/Test/Utils.purs index 2db7280e5..57f177890 100644 --- a/test-utils/src/Registry/Test/Utils.purs +++ b/test-utils/src/Registry/Test/Utils.purs @@ -25,7 +25,6 @@ import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range as Range import Registry.SSH as SSH -import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) import Registry.Version as Version @@ -149,6 +148,7 @@ unsafeManifest name version dependencies = Manifest { url: "https://github.com/purescript/purescript-" <> name <> ".git" , subdir: Nothing } + , ref: "v" <> version , description: Nothing , owners: Nothing , includeFiles: Nothing @@ -158,11 +158,3 @@ unsafeManifest name version dependencies = Manifest -- | Format a package version as a string in the form 'name@X.Y.Z' formatPackageVersion :: PackageName -> Version -> String formatPackageVersion name version = PackageName.print name <> "@" <> Version.print version - --- | A Location for use within tests. -defaultLocation :: Location -defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } - --- | A Sha256 for use within tests. -defaultHash :: Sha256 -defaultHash = fromRight "Failed to parse Sha256" $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/types/v1/Manifest.dhall b/types/v1/Manifest.dhall index e9fe88850..2f1a6fa5b 100644 --- a/types/v1/Manifest.dhall +++ b/types/v1/Manifest.dhall @@ -13,6 +13,7 @@ let Manifest = , license : License , version : Version , location : ./Location.dhall + , ref : Text , owners : Optional (List ./Owner.dhall) , description : Optional Text , includeFiles : Optional (List Text) diff --git a/types/v1/Metadata.dhall b/types/v1/Metadata.dhall index 84685290c..083960152 100644 --- a/types/v1/Metadata.dhall +++ b/types/v1/Metadata.dhall @@ -1,4 +1,5 @@ let Map = (./Prelude.dhall).Map.Type +let NonEmpty = (./Prelude.dhall).NonEmpty.Type let Owner = ./Owner.dhall @@ -14,6 +15,7 @@ let PublishedMetadata = { hash : Sha256 , bytes : Natural , publishedTime : ISO8601String + , compilers : NonEmpty Version } let UnpublishedMetadata = diff --git a/types/v1/Prelude.dhall b/types/v1/Prelude.dhall index 8b05657c4..d86e105e1 100644 --- a/types/v1/Prelude.dhall +++ b/types/v1/Prelude.dhall @@ -2,4 +2,4 @@ -- remote hosts in an offline environment (such as Nix in CI). DHALL_PRELUDE is -- automatically set in your Nix shell, but if you are not using a Nix shell and -- want to run this locally then the URL will be used instead. -env:DHALL_PRELUDE ? https://prelude.dhall-lang.org/v19.0.0/package.dhall sha256:eb693342eb769f782174157eba9b5924cf8ac6793897fc36a31ccbd6f56dafe2 +env:DHALL_PRELUDE