From 0b6dd4e33114edc8c084bf2a4ae670afdffb50a4 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 8 Nov 2023 17:29:11 -0500 Subject: [PATCH 01/64] Add 'compilers' field to metadata --- SPEC.md | 3 ++- app/fixtures/registry/metadata/prelude.json | 1 + .../registry/metadata/type-equality.json | 1 + app/src/App/API.purs | 2 +- app/test/App/Legacy/PackageSet.purs | 2 ++ lib/src/Metadata.purs | 18 ++++++++++++++++++ lib/test/Registry/Metadata.purs | 7 +++++++ lib/test/Registry/Operation/Validation.purs | 3 ++- types/v1/Metadata.dhall | 1 + 9 files changed, 35 insertions(+), 3 deletions(-) diff --git a/SPEC.md b/SPEC.md index 99ea2f476..2af908582 100644 --- a/SPEC.md +++ b/SPEC.md @@ -234,11 +234,12 @@ For example: All packages in the registry have an associated metadata file, which is located in the `metadata` directory of the `registry` repository under the package name. For example, the metadata for the `aff` package is located at: https://github.com/purescript/registry/blob/main/metadata/aff.json. Metadata files are the source of truth on all published and unpublished versions for a particular package for what there content is and where the package is located. Metadata files are produced by the registry, not by package authors, though they take some information from package manifests. -Each published version of a package records three fields: +Each published version of a package records four fields: - `hash`: a [`Sha256`](#Sha256) of the compressed archive fetched by the registry for the given version - `bytes`: the size of the tarball in bytes - `publishedTime`: the time the package was published as an `ISO8601` string +- `compilers`: compiler versions this package is known to work with. This field can be in one of two states: a single version indicates that the package worked with a specific compiler on upload but has not yet been tested with all compilers, whereas a non-empty array of versions indicates the package has been tested with all compilers the registry supports. Each unpublished version of a package records three fields: diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 0cffc4ab8..24537ed0b 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,6 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, + "compilers": ["0.15.0", "0.15.2"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 68f250604..d473c73e4 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,6 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, + "compilers": ["0.15.0", "0.15.2"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index e2d125614..11a164330 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -756,7 +756,7 @@ publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manife Comment.comment "Package is verified! Uploading it to the storage backend..." Storage.upload manifest.name manifest.version tarballPath Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, publishedTime, bytes } metadata.published } + let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } metadata.published } Registry.writeMetadata manifest.name (Metadata newMetadata) Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 8e8207974..5fd4a801a 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,6 +2,7 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map @@ -208,6 +209,7 @@ unsafeMetadataEntry (Tuple name version) = do { ref: LenientVersion.raw version , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" , bytes: 0.0 + , compilers: Right (NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2")) , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom } diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index f8e774176..e124dee6b 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,12 +20,14 @@ module Registry.Metadata import Prelude +import Control.Alt ((<|>)) import Data.Array.NonEmpty (NonEmptyArray) import Data.Codec.Argonaut (JsonCodec) import Data.Codec.Argonaut as CA import Data.Codec.Argonaut.Common as CA.Common import Data.Codec.Argonaut.Record as CA.Record import Data.DateTime (DateTime) +import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) @@ -38,6 +40,7 @@ import Registry.Owner as Owner import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) +import Registry.Version as Version import Type.Proxy (Proxy(..)) -- | A record of all published and unpublished versions of a package, along with @@ -69,18 +72,33 @@ codec = Profunctor.wrapIso Metadata $ CA.object "Metadata" -- | not rely on its presence! type PublishedMetadata = { bytes :: Number + , compilers :: Either Version (NonEmptyArray Version) , hash :: Sha256 , publishedTime :: DateTime + + -- UNSPECIFIED: Will be removed in the future. , ref :: String } publishedMetadataCodec :: JsonCodec PublishedMetadata publishedMetadataCodec = CA.Record.object "PublishedMetadata" { bytes: CA.number + , compilers: compilersCodec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime , ref: CA.string } + where + compilersCodec :: JsonCodec (Either Version (NonEmptyArray Version)) + compilersCodec = CA.codec' decode encode + where + decode json = + map Left (CA.decode Version.codec json) + <|> map Right (CA.decode (CA.Common.nonEmptyArray Version.codec) json) + + encode = case _ of + Left version -> CA.encode Version.codec version + Right versions -> CA.encode (CA.Common.nonEmptyArray Version.codec) versions -- | Metadata about an unpublished package version. type UnpublishedMetadata = diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index eff61e185..08c12d887 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,24 +25,31 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, + "compilers": "0.13.0", "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", "publishedTime": "2021-03-27T10:03:46.000Z", "ref": "v0.1.0" }, "0.2.1": { "bytes": 3365, + "compilers": "0.13.0", "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", "publishedTime": "2022-05-15T10:51:57.000Z", "ref": "v0.2.1" }, "1.0.0": { "bytes": 5155, + "compilers": "0.13.0", "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", "publishedTime": "2022-11-03T17:30:28.000Z", "ref": "v1.0.0" }, "1.0.1": { "bytes": 5635, + "compilers": [ + "0.13.0", + "0.13.1" + ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", "publishedTime": "2022-11-04T12:21:09.000Z", "ref": "v1.0.1" diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index 2e5cb47aa..338fe4266 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -63,8 +63,9 @@ spec = do now = unsafeDateTime "2022-12-12T12:00:00.000Z" outOfRange = unsafeDateTime "2022-12-10T11:00:00.000Z" inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" + compilers = Left (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } metadata = Metadata { location: defaultLocation diff --git a/types/v1/Metadata.dhall b/types/v1/Metadata.dhall index 84685290c..2f50decf6 100644 --- a/types/v1/Metadata.dhall +++ b/types/v1/Metadata.dhall @@ -14,6 +14,7 @@ let PublishedMetadata = { hash : Sha256 , bytes : Natural , publishedTime : ISO8601String + , compilers : < Single : Version | Many : List Version > } let UnpublishedMetadata = From e15e4a874892b0d00c65d0ef0c840dada84b4410 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 11 Nov 2023 14:20:10 -0500 Subject: [PATCH 02/64] Add utilities for building with many compilers --- app/src/App/API.purs | 158 ++++++++++++++++++++++++++++++------------- 1 file changed, 111 insertions(+), 47 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 11a164330..1b43a3342 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -4,6 +4,8 @@ module Registry.App.API , PublishEffects , authenticated , copyPackageSourceFiles + , findAllCompilers + , findFirstCompiler , formatPursuitResolutions , packageSetUpdate , packagingTeam @@ -16,7 +18,6 @@ import Registry.App.Prelude import Data.Argonaut.Parser as Argonaut.Parser import Data.Array as Array -import Data.Array.NonEmpty as NEA import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.Argonaut as CA import Data.Codec.Argonaut.Record as CA.Record @@ -76,6 +77,7 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Octokit (IssueNumber(..), Team) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest @@ -253,7 +255,7 @@ authenticated auth = case auth.payload of pure published pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Unpublishing is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -291,7 +293,7 @@ authenticated auth = case auth.payload of Just value -> pure value pacchettiBotti <- getPacchettiBotti - let owners = maybe [] NEA.toArray (un Metadata metadata).owners + let owners = maybe [] NonEmptyArray.toArray (un Metadata metadata).owners Run.liftAff (Auth.verifyPayload pacchettiBotti owners auth) >>= case _ of Left _ | [] <- owners -> do Log.error $ "Transferring is an authenticated operation, but no owners were listed in the metadata: " <> stringifyJson Metadata.codec metadata @@ -510,7 +512,7 @@ publish source payload = do , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions - compilationResult <- compilePackage { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } + compilationResult <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } case compilationResult of Left error -> do Log.error $ "Compilation failed, cannot upload to pursuit: " <> error @@ -734,9 +736,15 @@ publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manife -- Now that we have the package source contents we can verify we can compile -- the package. We skip failures when the package is a legacy package. - Log.info "Verifying package compiles (this may take a while)..." + Comment.comment $ Array.fold + [ "Verifying package compiles using compiler " + , Version.print payload.compiler + , " and resolutions:\n\n```json" + , printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions + , "\n```" + ] compilationResult <- compilePackage - { packageSourceDir: packageDirectory + { source: packageDirectory , compiler: payload.compiler , resolutions: verifiedResolutions } @@ -851,55 +859,111 @@ validateResolutions manifest resolutions = do ] type CompilePackage = - { packageSourceDir :: FilePath + { source :: FilePath , compiler :: Version , resolutions :: Map PackageName Version } compilePackage :: forall r. CompilePackage -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either String FilePath) -compilePackage { packageSourceDir, compiler, resolutions } = Except.runExcept do +compilePackage { source, compiler, resolutions } = Except.runExcept do tmp <- Tmp.mkTmpDir - let dependenciesDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory dependenciesDir - + output <- do + if Map.isEmpty resolutions then do + Log.debug "Compiling source code (no dependencies to install)..." + Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs" ] } + , version: Just compiler + , cwd: Just source + } + else do + Log.debug "Installing build plan..." + installBuildPlan resolutions tmp + Log.debug "Compiling..." + Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ tmp, "*/src/**/*.purs" ] ] } + , version: Just compiler + , cwd: Just source + } + + case output of + Left err -> Except.throw $ printCompilerFailure compiler err + Right _ -> pure tmp + +-- | Given a set of package versions, determine the set of compilers that can be +-- | used for all packages. +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Set Version +compatibleCompilers allMetadata resolutions = do let - globs = - if Map.isEmpty resolutions then - [ "src/**/*.purs" ] - else - [ "src/**/*.purs" - , Path.concat [ dependenciesDir, "*/src/**/*.purs" ] - ] - - Log.debug "Installing build plan..." - installBuildPlan resolutions dependenciesDir - - Log.debug "Compiling..." - compilerOutput <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs } - , version: Just compiler - , cwd: Just packageSourceDir - } + associated :: Array (NonEmptyArray Version) + associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do + Metadata metadata <- Map.lookup name allMetadata + published <- Map.lookup version metadata.published + case published.compilers of + Left _ -> Nothing + Right all -> Just all + + Array.foldl (\prev next -> Set.intersection prev (Set.fromFoldable next)) Set.empty associated + +type DiscoverCompilers = + { source :: FilePath + , compilers :: Array Version + , installed :: FilePath + } - case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Compilation failed for your package due to a compiler error:" - , "```" - , err - , "```" - ] - Right _ -> pure dependenciesDir +-- | Find all compilers that can compile the package source code and installed +-- | resolutions from the given array of compilers. +findAllCompilers :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) { failed :: Map Version CompilerFailure, succeeded :: Set Version } +findAllCompilers { source, compilers, installed } = do + checkedCompilers <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + pure $ bimap (Tuple target) (const target) result + let results = partitionEithers checkedCompilers + pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } + +-- | Find the first compiler that can compile the package source code and +-- | installed resolutions from the given array of compilers. +findFirstCompiler :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) (Maybe Version) +findFirstCompiler { source, compilers, installed } = do + search <- Except.runExcept $ for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + either (\_ -> Except.throw target) (\_ -> pure unit) result + case search of + Left found -> pure $ Just found + Right _ -> pure Nothing + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed due to a compiler error:" + , "```" + , err + , "```" + ] -- | Install all dependencies indicated by the build plan to the specified -- | directory. Packages will be installed at 'dir/package-name-x.y.z'. From d8e7e41d0cd092083b2bf8d73ca939c9df1ade58 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 11 Nov 2023 14:44:14 -0500 Subject: [PATCH 03/64] Remove PackageSource and require all packages to solve/compile --- app/src/App/API.purs | 91 +++++++++++---------------------- app/src/App/Effect/Source.purs | 25 +++++---- app/src/App/GitHubIssue.purs | 4 +- app/src/App/Prelude.purs | 14 ----- app/src/App/Server.purs | 4 +- app/test/App/API.purs | 10 ++-- app/test/Test/Assert/Run.purs | 2 +- scripts/src/LegacyImporter.purs | 20 +++----- scripts/src/PackageDeleter.purs | 4 +- scripts/src/Solver.purs | 2 +- 10 files changed, 65 insertions(+), 111 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 1b43a3342..5a04b876e 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -323,11 +323,11 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- | published before then it will be registered and the given version will be -- | upload. If it has been published before then the existing metadata will be -- | updated with the new version. -publish :: forall r. PackageSource -> PublishData -> Run (PublishEffects + r) Unit -publish source payload = do +publish :: forall r. PublishData -> Run (PublishEffects + r) Unit +publish payload = do let printedName = PackageName.print payload.name - Log.debug $ "Publishing " <> printPackageSource source <> " package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload + Log.debug $ "Publishing package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload Log.debug $ "Verifying metadata..." Metadata existingMetadata <- Registry.readMetadata payload.name >>= case _ of @@ -370,7 +370,7 @@ publish source payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: packageDirectory, published: publishedTime } <- Source.fetch source tmp existingMetadata.location payload.ref + { path: packageDirectory, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref Log.debug $ "Package downloaded to " <> packageDirectory <> ", verifying it contains a src directory with valid modules..." Internal.Path.readPursFiles (Path.concat [ packageDirectory, "src" ]) >>= case _ of @@ -517,9 +517,9 @@ publish source payload = do Left error -> do Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do + Right installedResolutions -> do Log.debug "Uploading to Pursuit" - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } + publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } Just url -> do Except.throw $ String.joinWith "\n" @@ -540,8 +540,7 @@ publish source payload = do -- No need to verify the generated manifest because nothing was generated, -- and no need to write a file (it's already in the package source.) publishRegistry - { source - , manifest: Manifest manifest + { manifest: Manifest manifest , metadata: Metadata metadata , payload , publishedTime @@ -555,8 +554,7 @@ publish source payload = do -- dependencies we can skip those checks. Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) publishRegistry - { source - , manifest: Manifest manifest + { manifest: Manifest manifest , metadata: Metadata metadata , payload , publishedTime @@ -587,16 +585,10 @@ publish source payload = do Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of Left err -> do let prefix = "Failed to discover unused dependencies because purs graph failed: " - Log.error $ prefix <> case err of + Except.throw $ prefix <> case err of UnknownError str -> str - CompilationError errs -> Purs.printCompilerErrors errs + CompilationError errs -> "\n" <> Purs.printCompilerErrors errs MissingCompiler -> "missing compiler " <> Version.print payload.compiler - -- We allow legacy packages through even if we couldn't run purs graph, - -- because we can't be sure we chose the correct compiler version. - if source == LegacyPackage then - Comment.comment "Failed to prune dependencies for legacy package, continuing anyway..." - else do - Except.throw "purs graph failed; cannot verify unused dependencies." Right output -> case Argonaut.Parser.jsonParser output of Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr Right json -> case CA.decode PursGraph.pursGraphCodec json of @@ -609,7 +601,6 @@ publish source payload = do -- We need access to a graph that _doesn't_ include the package -- source, because we only care about dependencies of the package. noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ } case PursGraph.associateModules pathParser noSrcGraph of @@ -640,8 +631,7 @@ publish source payload = do Log.debug "No unused dependencies! This manifest is good to go." Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) publishRegistry - { source - , manifest: Manifest manifest + { manifest: Manifest manifest , metadata: Metadata metadata , payload , publishedTime @@ -656,8 +646,7 @@ publish source payload = do Log.debug "Writing updated, pruned manifest." Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified) publishRegistry - { source - , manifest: Manifest verified + { manifest: Manifest verified , metadata: Metadata metadata , payload , publishedTime @@ -666,8 +655,7 @@ publish source payload = do } type PublishRegistry = - { source :: PackageSource - , manifest :: Manifest + { manifest :: Manifest , metadata :: Metadata , payload :: PublishData , publishedTime :: DateTime @@ -680,7 +668,7 @@ type PublishRegistry = -- publish to Pursuit only (in the case the package has been pushed to the -- registry, but docs have not been uploaded). publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit -publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do +publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do Log.debug "Verifying the package build plan..." verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions @@ -743,23 +731,10 @@ publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manife , printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions , "\n```" ] - compilationResult <- compilePackage - { source: packageDirectory - , compiler: payload.compiler - , resolutions: verifiedResolutions - } - case compilationResult of - Left error - -- We allow legacy packages to fail compilation because we do not - -- necessarily know what compiler to use with them. - | source == LegacyPackage -> do - Log.debug error - Log.warn "Failed to compile, but continuing because this package is a legacy package." - | otherwise -> - Except.throw error - Right _ -> - pure unit + installedResolutions <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } >>= case _ of + Left error -> Except.throw error + Right installed -> pure installed Comment.comment "Package is verified! Uploading it to the storage backend..." Storage.upload manifest.name manifest.version tarballPath @@ -768,21 +743,13 @@ publishRegistry { source, payload, metadata: Metadata metadata, manifest: Manife Registry.writeMetadata manifest.name (Metadata newMetadata) Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" - -- After a package has been uploaded we add it to the registry index, we - -- upload its documentation to Pursuit, and we can now process it for package - -- sets when the next batch goes out. - -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. + Log.debug "Adding the new version to the registry index" Registry.writeManifest (Manifest manifest) - when (source == CurrentPackage) $ case compilationResult of - Left error -> do - Log.error $ "Compilation failed, cannot upload to pursuit: " <> error - Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right dependenciesDir -> do - Log.debug "Uploading to Pursuit" - publishToPursuit { packageSourceDir: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, dependenciesDir } + Log.debug "Uploading package documentation to pursuit" + publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } Registry.mirrorLegacyRegistry payload.name newMetadata.location Comment.comment "Mirrored registry operation to the legacy registry." @@ -1009,10 +976,10 @@ parseInstalledModulePath { prefix, path } = do pure { name, version } type PublishToPursuit = - { packageSourceDir :: FilePath - , dependenciesDir :: FilePath + { source :: FilePath , compiler :: Version , resolutions :: Map PackageName Version + , installedResolutions :: FilePath } -- | Publishes a package to Pursuit. @@ -1023,12 +990,12 @@ publishToPursuit :: forall r . PublishToPursuit -> Run (PURSUIT + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) Unit -publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = do +publishToPursuit { source, compiler, resolutions, installedResolutions } = do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir let - resolvedPaths = formatPursuitResolutions { resolutions, dependenciesDir } + resolvedPaths = formatPursuitResolutions { resolutions, installedResolutions } resolutionsFilePath = Path.concat [ tmp, "resolutions.json" ] Run.liftAff $ writeJsonFile pursuitResolutionsCodec resolutionsFilePath resolvedPaths @@ -1040,7 +1007,7 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = -- file and an output directory from compilation) before calling purs publish. -- https://git-scm.com/docs/gitignore Log.debug "Adding output and purs.json to local git excludes..." - Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ packageSourceDir, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) + Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ source, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) -- NOTE: The compatibility version of purs publish appends 'purescript-' to the -- package name in the manifest file: @@ -1051,7 +1018,7 @@ publishToPursuit { packageSourceDir, dependenciesDir, compiler, resolutions } = compilerOutput <- Run.liftAff $ Purs.callCompiler { command: Purs.Publish { resolutions: resolutionsFilePath } , version: Just compiler - , cwd: Just packageSourceDir + , cwd: Just source } publishJson <- case compilerOutput of @@ -1104,13 +1071,13 @@ pursuitResolutionsCodec = rawPackageNameMapCodec $ CA.Record.object "Resolution" -- -- Note: This interfaces with Pursuit, and therefore we must add purescript- -- prefixes to all package names for compatibility with the Bower naming format. -formatPursuitResolutions :: { resolutions :: Map PackageName Version, dependenciesDir :: FilePath } -> PursuitResolutions -formatPursuitResolutions { resolutions, dependenciesDir } = +formatPursuitResolutions :: { resolutions :: Map PackageName Version, installedResolutions :: FilePath } -> PursuitResolutions +formatPursuitResolutions { resolutions, installedResolutions } = Map.fromFoldable do Tuple name version <- Map.toUnfoldable resolutions let bowerPackageName = RawPackageName ("purescript-" <> PackageName.print name) - packagePath = Path.concat [ dependenciesDir, PackageName.print name <> "-" <> Version.print version ] + packagePath = Path.concat [ installedResolutions, PackageName.print name <> "-" <> Version.print version ] [ Tuple bowerPackageName { path: packagePath, version } ] -- | Copy files from the package source directory to the destination directory diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index a9479d3f5..f1da6f7e8 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -28,8 +28,15 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +-- | Packages can be published via the legacy importer or a user via the API. We +-- | determine some information differently in these cases, such as the time the +-- | package was published. +data ImportType = Old | Recent + +derive instance Eq ImportType + -- | An effect for fetching package sources -data Source a = Fetch PackageSource FilePath Location String (Either String FetchedSource -> a) +data Source a = Fetch FilePath Location String (Either String FetchedSource -> a) derive instance Functor Source @@ -41,17 +48,17 @@ _source = Proxy type FetchedSource = { path :: FilePath, published :: DateTime } -- | Fetch the provided location to the provided destination path. -fetch :: forall r. PackageSource -> FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch source destination location ref = Except.rethrow =<< Run.lift _source (Fetch source destination location ref identity) +fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource +fetch destination location ref = Except.rethrow =<< Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a interpret handler = Run.interpret (Run.on _source handler Run.send) -- | Handle the SOURCE effect by downloading package source to the file system. -handle :: forall r a. Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a -handle = case _ of - Fetch source destination location ref reply -> map (map reply) Except.runExcept do +handle :: forall r a. ImportType -> Source a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle importType = case _ of + Fetch destination location ref reply -> map (map reply) Except.runExcept do Log.info $ "Fetching " <> printJson Location.codec location case location of Git _ -> do @@ -92,15 +99,15 @@ handle = case _ of Log.debug $ "Getting published time..." let - getRefTime = case source of - LegacyPackage -> do + getRefTime = case importType of + Old -> do timestamp <- Except.rethrow =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) jsDate <- Run.liftEffect $ JSDate.parse timestamp dateTime <- case JSDate.toDateTime jsDate of Nothing -> Except.throw $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate Just parsed -> pure parsed pure dateTime - CurrentPackage -> + Recent -> Run.liftEffect Now.nowDateTime -- Cloning will result in the `repo` name as the directory name diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index f8d7738d2..63dc1bcb6 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -57,7 +57,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish CurrentPackage payload + API.publish payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the @@ -97,7 +97,7 @@ main = launchAff_ $ do # Registry.interpret (Registry.handle registryEnv) # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret Source.handle + # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) -- Caching & logging # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index e42fddabf..ed4c5d110 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -1,6 +1,5 @@ module Registry.App.Prelude ( LogVerbosity(..) - , PackageSource(..) , PursPublishMethod(..) , Retry , RetryResult(..) @@ -22,7 +21,6 @@ module Registry.App.Prelude , parseJson , partitionEithers , printJson - , printPackageSource , pursPublishMethod , readJsonFile , scratchDir @@ -241,15 +239,3 @@ data PursPublishMethod = LegacyPursPublish | PursPublish -- | The current purs publish method pursPublishMethod :: PursPublishMethod pursPublishMethod = LegacyPursPublish - --- | Operations can be exercised for old, pre-registry packages, or for packages --- | which are on the 0.15 compiler series. If a true legacy package is uploaded --- | then we do not require compilation to succeed and we don't publish docs. -data PackageSource = LegacyPackage | CurrentPackage - -derive instance Eq PackageSource - -printPackageSource :: PackageSource -> String -printPackageSource = case _ of - LegacyPackage -> "legacy" - CurrentPackage -> "current" diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index 3f8132705..783e4d1dc 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -68,7 +68,7 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish forkPipelineJob publish.name publish.ref PublishJob \jobId -> do Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish CurrentPackage publish + API.publish publish Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body @@ -292,7 +292,7 @@ runEffects env operation = Aff.attempt do ) # Pursuit.interpret (Pursuit.handleAff env.vars.token) # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret Source.handle + # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) # Except.catch diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 827ed7aa4..bfc3ed247 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -88,7 +88,7 @@ spec = do } -- First, we publish the package. - API.publish CurrentPackage publishArgs + API.publish publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -119,7 +119,7 @@ spec = do -- Finally, we can verify that publishing the package again should fail -- since it already exists. - Except.runExcept (API.publish CurrentPackage publishArgs) >>= case _ of + Except.runExcept (API.publish publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." @@ -184,7 +184,7 @@ checkBuildPlanToResolutions = do Spec.it "buildPlanToResolutions produces expected resolutions file format" do Assert.shouldEqual generatedResolutions expectedResolutions where - dependenciesDir = "testDir" + installedResolutions = "testDir" resolutions = Map.fromFoldable [ Tuple (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "1.0.0") @@ -195,14 +195,14 @@ checkBuildPlanToResolutions = do generatedResolutions = API.formatPursuitResolutions { resolutions - , dependenciesDir + , installedResolutions } expectedResolutions = Map.fromFoldable do packageName /\ version <- (Map.toUnfoldable resolutions :: Array _) let bowerName = RawPackageName ("purescript-" <> PackageName.print packageName) - path = Path.concat [ dependenciesDir, PackageName.print packageName <> "-" <> Version.print version ] + path = Path.concat [ installedResolutions, PackageName.print packageName <> "-" <> Version.print version ] pure $ Tuple bowerName { path, version } removeIgnoredTarballFiles :: Spec.Spec Unit diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 44426d2f9..5865ee1df 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -266,7 +266,7 @@ type SourceMockEnv = { github :: FilePath } handleSourceMock :: forall r a. SourceMockEnv -> Source a -> Run (EXCEPT String + AFF + EFFECT + r) a handleSourceMock env = case _ of - Fetch _source destination location ref reply -> do + Fetch destination location ref reply -> do now <- Run.liftEffect Now.nowDateTime case location of Git _ -> pure $ reply $ Left "Packages cannot be published from Git yet (only GitHub)." diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 36102e92a..9409a9588 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -128,7 +128,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) GenerateRegistry -> do @@ -139,7 +139,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) UpdateRegistry -> do @@ -150,7 +150,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.ForceClean (Registry.CommitAs (Git.pacchettibottiCommitter token)))) >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret (Pursuit.handleAff token) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Recent) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) -- Logging setup @@ -162,7 +162,7 @@ main = launchAff_ do logFile = "legacy-importer-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" logPath = Path.concat [ logDir, logFile ] - runLegacyImport mode logPath + runLegacyImport logPath # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) @@ -172,8 +172,8 @@ main = launchAff_ do # Env.runResourceEnv resourceEnv # Run.runBaseAff' -runLegacyImport :: forall r. ImportMode -> FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit -runLegacyImport mode logs = do +runLegacyImport :: forall r. FilePath -> Run (API.PublishEffects + IMPORT_CACHE + r) Unit +runLegacyImport logs = do Log.info "Starting legacy import!" Log.info $ "Logs available at " <> logs @@ -278,12 +278,6 @@ runLegacyImport mode logs = do , Array.foldMap (append "\n - " <<< printPackage) manifests ] - let - source = case mode of - DryRun -> LegacyPackage - GenerateRegistry -> LegacyPackage - UpdateRegistry -> CurrentPackage - void $ for notPublished \(Manifest manifest) -> do let formatted = formatPackageVersion manifest.name manifest.version Log.info $ Array.foldMap (append "\n") @@ -294,7 +288,7 @@ runLegacyImport mode logs = do ] operation <- mkOperation (Manifest manifest) - result <- Except.runExcept $ API.publish source operation + result <- Except.runExcept $ API.publish operation -- TODO: Some packages will fail because the legacy importer does not -- perform all the same validation checks that the publishing flow does. -- What should we do when a package has a valid manifest but fails for diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index a8c8339b2..04b0b9954 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -152,7 +152,7 @@ main = launchAff_ do interpret = Registry.interpret (Registry.handle registryEnv) >>> Storage.interpret (if arguments.upload then Storage.handleS3 { s3, cache } else Storage.handleReadOnly cache) - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) @@ -237,7 +237,7 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish LegacyPackage + API.publish { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 517be4fbb..a0ac67398 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -127,7 +127,7 @@ main = launchAff_ do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure - >>> Source.interpret Source.handle + >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) let From 8e069b6aeb6bc3ca651f2ba7ff380a11f51f50e9 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 11 Nov 2023 17:25:27 -0500 Subject: [PATCH 04/64] Determine all compilers for package in publish pipeline --- app/fixtures/registry/metadata/prelude.json | 2 +- .../registry/metadata/type-equality.json | 2 +- app/src/App/API.purs | 51 +++++++++++++++-- app/test/App/API.purs | 57 ++++++++++++++++++- app/test/Test/Assert/Run.purs | 5 +- flake.nix | 2 +- 6 files changed, 108 insertions(+), 11 deletions(-) diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 24537ed0b..cab65f7b1 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,7 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, - "compilers": ["0.15.0", "0.15.2"], + "compilers": ["0.15.10", "0.15.12"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index d473c73e4..aed5ea89f 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,7 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, - "compilers": ["0.15.0", "0.15.2"], + "compilers": ["0.15.9", "0.15.10", "0.15.11"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 5a04b876e..d50563c41 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -3,6 +3,7 @@ module Registry.App.API , PackageSetUpdateEffects , PublishEffects , authenticated + , compatibleCompilers , copyPackageSourceFiles , findAllCompilers , findFirstCompiler @@ -28,6 +29,7 @@ import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format import Data.Set as Set +import Data.Set.NonEmpty (NonEmptySet) import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits @@ -739,7 +741,8 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Comment.comment "Package is verified! Uploading it to the storage backend..." Storage.upload manifest.name manifest.version tarballPath Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newMetadata = metadata { published = Map.insert manifest.version { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } metadata.published } + let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } + let newMetadata = metadata { published = Map.insert manifest.version newPublishedVersion metadata.published } Registry.writeMetadata manifest.name (Metadata newMetadata) Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" @@ -752,7 +755,42 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry." + Comment.comment "Mirrored registry operation to the legacy registry!" + + allMetadata <- Registry.readAllMetadata + compatible <- case compatibleCompilers allMetadata verifiedResolutions of + Nothing -> do + let msg = "Dependencies admit no overlapping compiler versions! This should not be possible. Resolutions: " <> printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions + Log.error msg *> Except.throw msg + Just result -> pure result + + Comment.comment $ Array.fold + [ "The following compilers are compatible with this package according to its dependency resolutions: " + , String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") $ NonEmptySet.toUnfoldable compatible) + , ".\n\n" + , "Computing the list of compilers usable with your package version..." + ] + + { failed: invalidCompilers, succeeded: validCompilers } <- findAllCompilers + { source: packageDirectory + , installed: installedResolutions + , compilers: Array.fromFoldable $ NonEmptySet.filter (notEq payload.compiler) compatible + } + + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + + let + allVerified = case NonEmptySet.fromFoldable validCompilers of + Nothing -> NonEmptyArray.singleton payload.compiler + Just verified -> NonEmptyArray.fromFoldable1 $ NonEmptySet.insert payload.compiler verified + + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptyArray.toArray allVerified)) + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right allVerified })) manifest.version newMetadata.published } + Registry.writeMetadata manifest.name (Metadata compilersMetadata) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + + Comment.comment "Wrote completed metadata to the registry!" -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -858,7 +896,7 @@ compilePackage { source, compiler, resolutions } = Except.runExcept do -- | Given a set of package versions, determine the set of compilers that can be -- | used for all packages. -compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Set Version +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Maybe (NonEmptySet Version) compatibleCompilers allMetadata resolutions = do let associated :: Array (NonEmptyArray Version) @@ -869,7 +907,12 @@ compatibleCompilers allMetadata resolutions = do Left _ -> Nothing Right all -> Just all - Array.foldl (\prev next -> Set.intersection prev (Set.fromFoldable next)) Set.empty associated + Array.uncons associated >>= case _ of + { head, tail: [] } -> + pure $ NonEmptySet.fromFoldable1 head + { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable + NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head) tail type DiscoverCompilers = { source :: FilePath diff --git a/app/test/App/API.purs b/app/test/App/API.purs index bfc3ed247..b930a1600 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -6,6 +6,7 @@ import Data.Array.NonEmpty as NonEmptyArray import Data.Foldable (traverse_) import Data.Map as Map import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff @@ -68,10 +69,39 @@ spec = do Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") FS.Extra.remove tmp + Spec.describe "Finds compatible compilers from dependencies" do + Spec.it "Finds intersect of single package" do + Assert.Run.runBaseEffects do + metadata <- Registry.readAllMetadataFromDisk $ Path.concat [ "app", "fixtures", "registry", "metadata" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.12" ] + case API.compatibleCompilers metadata (Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "6.0.1")) of + Nothing -> Except.throw $ "Got no compatible compilers, but expected " <> Utils.unsafeStringify (map Version.print expected) + Just set -> do + let actual = NonEmptySet.toUnfoldable set + unless (actual == expected) do + Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) + + Spec.it "Finds intersect of multiple packages" do + Assert.Run.runBaseEffects do + metadata <- Registry.readAllMetadataFromDisk $ Path.concat [ "app", "fixtures", "registry", "metadata" ] + let + expected = map Utils.unsafeVersion [ "0.15.10" ] + resolutions = Map.fromFoldable $ map (bimap Utils.unsafePackageName Utils.unsafeVersion) + [ Tuple "prelude" "6.0.1" + , Tuple "type-equality" "4.0.1" + ] + case API.compatibleCompilers metadata resolutions of + Nothing -> Except.throw $ "Got no compatible compilers, but expected " <> Utils.unsafeStringify (map Version.print expected) + Just set -> do + let actual = NonEmptySet.toUnfoldable set + unless (actual == expected) do + Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) + Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do - let testEnv = { workdir, index, metadata, username: "jon", storage: storageDir, github: githubDir } - Assert.Run.runTestEffects testEnv do + logs <- liftEffect (Ref.new []) + let testEnv = { workdir, logs, index, metadata, username: "jon", storage: storageDir, github: githubDir } + result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do -- We'll publish effect@4.0.0 from the fixtures/github-packages -- directory, which has an unnecessary dependency on 'type-equality' -- inserted into it. @@ -80,7 +110,7 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref @@ -117,12 +147,33 @@ spec = do , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies ] + -- We should verify the resulting metadata file is correct + Metadata effectMetadata <- Registry.readMetadata name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print name <> " to be in metadata." + Just m -> pure m + + case Map.lookup version effectMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." + Just published -> case published.compilers of + Left one -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix but unfinished single version: " <> Version.print one + Right many -> do + let many' = NonEmptyArray.toArray many + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.12" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + -- Finally, we can verify that publishing the package again should fail -- since it already exists. Except.runExcept (API.publish publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." + case result of + Left err -> do + recorded <- liftEffect (Ref.read logs) + Assert.fail $ "Expected to publish effect@4.0.0 but got error: " <> err <> "\n\nLogs:\n" <> String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Right _ -> pure unit + where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 5865ee1df..18ba03016 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -17,11 +17,13 @@ import Data.FunctorWithIndex (mapWithIndex) import Data.Map as Map import Data.Set as Set import Data.String as String +import Dodo as Dodo import Effect.Aff as Aff import Effect.Now as Now import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (LogLevel) import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache @@ -93,6 +95,7 @@ type TEST_EFFECTS = type TestEnv = { workdir :: FilePath + , logs :: Ref (Array (Tuple LogLevel String)) , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , storage :: FilePath @@ -121,7 +124,7 @@ runTestEffects env operation = do # runLegacyCacheMemory legacyCache -- Other effects # Comment.interpret Comment.handleLog - # Log.interpret (\(Log _ _ next) -> pure next) + # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) # Run.runBaseAff' diff --git a/flake.nix b/flake.nix index 66c3dba9d..064ad3ff3 100644 --- a/flake.nix +++ b/flake.nix @@ -592,7 +592,7 @@ { "name": "effect", "ref": "v4.0.0", - "compiler": "0.15.4", + "compiler": "0.15.10", "location": { "githubOwner": "purescript", "githubRepo": "purescript-effect" From 5348ee2f9646b9ed1daf2f1b97d2a85c0ae36283 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 11 Nov 2023 19:29:01 -0500 Subject: [PATCH 05/64] Initial cut at discovering compiler in legacy import --- app/src/App/API.purs | 6 ++ lib/src/Solver.purs | 1 + scripts/src/LegacyImporter.purs | 152 +++++++++++++++++++++++--------- 3 files changed, 115 insertions(+), 44 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index d50563c41..720d5e2ae 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -8,6 +8,7 @@ module Registry.App.API , findAllCompilers , findFirstCompiler , formatPursuitResolutions + , installBuildPlan , packageSetUpdate , packagingTeam , parseInstalledModulePath @@ -50,6 +51,7 @@ import Parsing.String as Parsing.String import Registry.App.Auth as Auth import Registry.App.CLI.Purs (CompilerFailure(..)) import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Comment (COMMENT) import Registry.App.Effect.Comment as Comment @@ -759,6 +761,10 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif allMetadata <- Registry.readAllMetadata compatible <- case compatibleCompilers allMetadata verifiedResolutions of + Nothing | Map.isEmpty verifiedResolutions -> do + Log.debug "No dependencies, so all compilers are potentially compatible." + allCompilers <- PursVersions.pursVersions + pure $ NonEmptySet.fromFoldable1 allCompilers Nothing -> do let msg = "Dependencies admit no overlapping compiler versions! This should not be possible. Resolutions: " <> printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions Log.error msg *> Except.throw msg diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index ac0086c76..fcb6f6edb 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -146,6 +146,7 @@ intersectionFromRange' package range = -------------------------------------------------------------------------------- type SolverErrors = NEL.NonEmptyList SolverError + data SolverError = Conflicts (Map PackageName Intersection) | WhileSolving PackageName (Map Version SolverError) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 9409a9588..eb518da33 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -24,10 +24,12 @@ import Data.Foldable as Foldable import Data.Formatter.DateTime as Formatter.DateTime import Data.FunctorWithIndex (mapWithIndex) import Data.List as List +import Data.List.NonEmpty as NonEmptyList import Data.Map as Map import Data.Ordering (invert) import Data.Profunctor as Profunctor import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits import Data.Variant as Variant @@ -42,6 +44,7 @@ import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment @@ -61,19 +64,22 @@ import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackage import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format import Registry.Location as Location import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex import Registry.Operation (PublishData) import Registry.PackageName as PackageName +import Registry.Solver (DependencyIndex) +import Registry.Solver as Solver import Registry.Version as Version import Run (Run) import Run as Run import Run.Except (EXCEPT, Except) import Run.Except as Except import Run.Except as Run.Except -import Spago.Generated.BuildInfo as BuildInfo import Type.Proxy (Proxy(..)) data ImportMode = DryRun | GenerateRegistry | UpdateRegistry @@ -230,65 +236,109 @@ runLegacyImport logs = do indexPackages <- allIndexPackages # Array.filterA \(Manifest { name, version }) -> isNothing <$> Cache.get _importCache (PublishFailure name version) - allMetadata <- Registry.readAllMetadata - - -- This record comes from the build directory (.spago) and records information - -- from the most recent build. - let compiler = unsafeFromRight (Version.parse BuildInfo.buildInfo.pursVersion) - - -- Just a safety check to ensure the compiler used in the pipeline is not too - -- low. Should be bumped from time to time to the latest compiler. - let minCompiler = unsafeFromRight (Version.parse "0.15.7") - when (compiler < minCompiler) do - Except.throw $ "Local compiler " <> Version.print compiler <> " is too low (min: " <> Version.print minCompiler <> ")." - - Log.info $ "Using compiler " <> Version.print compiler - - let - isPublished { name, version } = hasMetadata allMetadata name version - notPublished = indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) + notPublished <- do + allMetadata <- Registry.readAllMetadata + let isPublished { name, version } = hasMetadata allMetadata name version + pure $ indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) - mkOperation :: Manifest -> Run _ PublishData - mkOperation (Manifest manifest) = - case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of - Nothing -> do - let formatted = formatPackageVersion manifest.name manifest.version - Log.error $ "Unable to recover package ref for " <> formatted - Except.throw $ "Failed to create publish operation for " <> formatted - Just ref -> - pure - { location: Just manifest.location - , name: manifest.name - , ref: un RawVersion ref - , compiler - , resolutions: Nothing - } - - case notPublished of + Tuple _ operations <- do + let + buildOperation + :: Tuple DependencyIndex (Array (Tuple Manifest PublishData)) + -> Manifest + -> Run _ (Tuple DependencyIndex (Array (Tuple Manifest PublishData))) + buildOperation (Tuple prevIndex prevData) (Manifest manifest) = do + let formatted = formatPackageVersion manifest.name manifest.version + RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of + Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted + Just ref -> pure ref + + Log.debug $ "Solving for " <> formatted + case Solver.solve prevIndex manifest.dependencies of + Left unsolvable -> do + Log.warn $ "Could not solve " <> formatted + let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable + Log.debug $ String.joinWith "\n" errors + Cache.put _importCache (ImportManifest manifest.name (RawVersion ref)) (Left { error: SolveFailed, reason: String.joinWith " " errors }) + pure $ Tuple prevIndex prevData + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions + Log.debug "Determining a compiler version suitable for publishing..." + allMetadata <- Registry.readAllMetadata + possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of + Nothing | Map.isEmpty resolutions -> do + Log.debug "No resolutions, so all compilers could be compatible." + allCompilers <- PursVersions.pursVersions + pure $ NonEmptySet.fromFoldable1 allCompilers + Nothing -> + Except.throw "No overlapping compilers found in dependencies; this should not happen!" + Just compilers -> do + Log.debug $ "Compatible compilers for dependencies of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Finding first compiler that can build the package..." + selected <- API.findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } + FS.Extra.remove tmp + case selected of + Nothing -> do + Log.warn "Could not find any valid compilers for this package." + Log.debug "Skipping this package." + pure $ Tuple prevIndex prevData + Just compiler -> do + Log.debug $ "Selected " <> Version.print compiler <> " for publishing." + let + operation :: PublishData + operation = + { name: manifest.name + , location: Just manifest.location + , ref + , compiler + , resolutions: Just resolutions + } + + -- FIXME: Can't actually accumulate dependenyc index, and need to publish + -- packages before moving to the next. Replace this implementation such that + -- we publish each package, then read the manifest / metadata indices again + -- on every iteration. + pure $ Tuple (Map.insertWith Map.union manifest.name (Map.singleton manifest.version manifest.dependencies) prevIndex) (Array.snoc prevData (Tuple (Manifest manifest) operation)) + + Array.foldM buildOperation (Tuple Map.empty []) notPublished + + case operations of [] -> Log.info "No packages to publish." - manifests -> do - let printPackage (Manifest { name, version }) = formatPackageVersion name version + ops -> do Log.info $ Array.foldMap (append "\n") [ "----------" , "AVAILABLE TO PUBLISH" - , "" - , " using purs " <> Version.print compiler - , "" + , Array.foldMap (\(Tuple _ { name, ref }) -> "\n - " <> PackageName.print name <> " " <> ref) ops , "----------" - , Array.foldMap (append "\n - " <<< printPackage) manifests ] - void $ for notPublished \(Manifest manifest) -> do + void $ for ops \(Tuple (Manifest manifest) publish) -> do let formatted = formatPackageVersion manifest.name manifest.version + + -- Never happens, just a safety check. + when (manifest.name /= publish.name) do + Except.throw $ "Package name mismatch: " <> formatted <> " is being published as " <> PackageName.print publish.name + Log.info $ Array.foldMap (append "\n") [ "----------" , "PUBLISHING: " <> formatted , stringifyJson Location.codec manifest.location , "----------" ] - operation <- mkOperation (Manifest manifest) - result <- Except.runExcept $ API.publish operation + result <- Except.runExcept $ API.publish publish + -- TODO: Some packages will fail because the legacy importer does not -- perform all the same validation checks that the publishing flow does. -- What should we do when a package has a valid manifest but fails for @@ -480,6 +530,8 @@ data VersionError | DisabledVersion | InvalidManifest LegacyManifestValidationError | UnregisteredDependencies (Array PackageName) + | SolveFailed + | NoCompilerFound versionErrorCodec :: JsonCodec VersionError versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch @@ -494,6 +546,8 @@ versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM , reason: CA.string } , unregisteredDependencies: Right (CA.array PackageName.codec) + , solveFailed: Left unit + , noCompilerFound: Left unit } where toVariant = case _ of @@ -501,12 +555,16 @@ versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM DisabledVersion -> Variant.inj (Proxy :: _ "disabledVersion") unit InvalidManifest inner -> Variant.inj (Proxy :: _ "invalidManifest") inner UnregisteredDependencies inner -> Variant.inj (Proxy :: _ "unregisteredDependencies") inner + SolveFailed -> Variant.inj (Proxy :: _ "solveFailed") unit + NoCompilerFound -> Variant.inj (Proxy :: _ "noCompilerFound") unit fromVariant = Variant.match { invalidTag: InvalidTag , disabledVersion: \_ -> DisabledVersion , invalidManifest: InvalidManifest , unregisteredDependencies: UnregisteredDependencies + , solveFailed: \_ -> SolveFailed + , noCompilerFound: \_ -> NoCompilerFound } validateVersionDisabled :: PackageName -> LenientVersion -> Either VersionValidationError Unit @@ -692,6 +750,10 @@ formatVersionValidationError { error, reason } = case error of UnregisteredDependencies names -> do let errorValue = String.joinWith ", " $ map PackageName.print names { tag: "UnregisteredDependencies", value: Just errorValue, reason } + SolveFailed -> + { tag: "SolveFailed", value: Nothing, reason } + NoCompilerFound -> + { tag: "NoCompilerFound", value: Nothing, reason } type ImportStats = { packagesProcessed :: Int @@ -787,6 +849,8 @@ calculateImportStats legacyRegistry imported = do DisabledVersion -> "Disabled Version" InvalidManifest err -> "Invalid Manifest (" <> innerKey err <> ")" UnregisteredDependencies _ -> "Unregistered Dependencies" + SolveFailed -> "Solve Failed" + NoCompilerFound -> "No Compiler Found" innerKey = _.error >>> case _ of NoManifests -> "No Manifests" From 630c0bff5068a10149504652b6f623467ec7bd1e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 11 Nov 2023 21:35:03 -0500 Subject: [PATCH 06/64] Always look up metadata / manifests in each publishing step --- scripts/src/LegacyImporter.purs | 174 ++++++++++++-------------------- 1 file changed, 67 insertions(+), 107 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index eb518da33..9fdd36d99 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -67,12 +67,9 @@ import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.Location as Location import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex -import Registry.Operation (PublishData) import Registry.PackageName as PackageName -import Registry.Solver (DependencyIndex) import Registry.Solver as Solver import Registry.Version as Version import Run (Run) @@ -241,116 +238,79 @@ runLegacyImport logs = do let isPublished { name, version } = hasMetadata allMetadata name version pure $ indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) - Tuple _ operations <- do - let - buildOperation - :: Tuple DependencyIndex (Array (Tuple Manifest PublishData)) - -> Manifest - -> Run _ (Tuple DependencyIndex (Array (Tuple Manifest PublishData))) - buildOperation (Tuple prevIndex prevData) (Manifest manifest) = do - let formatted = formatPackageVersion manifest.name manifest.version - RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of - Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted - Just ref -> pure ref - - Log.debug $ "Solving for " <> formatted - case Solver.solve prevIndex manifest.dependencies of - Left unsolvable -> do - Log.warn $ "Could not solve " <> formatted - let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable - Log.debug $ String.joinWith "\n" errors - Cache.put _importCache (ImportManifest manifest.name (RawVersion ref)) (Left { error: SolveFailed, reason: String.joinWith " " errors }) - pure $ Tuple prevIndex prevData - Right resolutions -> do - Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions - Log.debug "Determining a compiler version suitable for publishing..." - allMetadata <- Registry.readAllMetadata - possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of - Nothing | Map.isEmpty resolutions -> do - Log.debug "No resolutions, so all compilers could be compatible." - allCompilers <- PursVersions.pursVersions - pure $ NonEmptySet.fromFoldable1 allCompilers - Nothing -> - Except.throw "No overlapping compilers found in dependencies; this should not happen!" - Just compilers -> do - Log.debug $ "Compatible compilers for dependencies of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) - pure compilers - - Log.debug "Fetching source and installing dependencies to test compilers" - tmp <- Tmp.mkTmpDir - { path } <- Source.fetch tmp manifest.location ref - Log.debug $ "Downloaded source to " <> path - Log.debug "Downloading dependencies..." - let installDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir - Log.debug $ "Installed to " <> installDir - Log.debug "Finding first compiler that can build the package..." - selected <- API.findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } - FS.Extra.remove tmp - case selected of - Nothing -> do - Log.warn "Could not find any valid compilers for this package." - Log.debug "Skipping this package." - pure $ Tuple prevIndex prevData - Just compiler -> do - Log.debug $ "Selected " <> Version.print compiler <> " for publishing." - let - operation :: PublishData - operation = - { name: manifest.name - , location: Just manifest.location - , ref - , compiler - , resolutions: Just resolutions - } - - -- FIXME: Can't actually accumulate dependenyc index, and need to publish - -- packages before moving to the next. Replace this implementation such that - -- we publish each package, then read the manifest / metadata indices again - -- on every iteration. - pure $ Tuple (Map.insertWith Map.union manifest.name (Map.singleton manifest.version manifest.dependencies) prevIndex) (Array.snoc prevData (Tuple (Manifest manifest) operation)) - - Array.foldM buildOperation (Tuple Map.empty []) notPublished - - case operations of + let + publishLegacyPackage :: Manifest -> Run _ Unit + publishLegacyPackage (Manifest manifest) = do + let formatted = formatPackageVersion manifest.name manifest.version + Log.info $ "PUBLISHING: " <> formatted + RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of + Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted + Just ref -> pure ref + + Log.debug $ "Solving dependencies for " <> formatted + index <- Registry.readAllManifests + let solverIndex = map (map (_.dependencies <<< un Manifest)) $ ManifestIndex.toMap index + case Solver.solve solverIndex manifest.dependencies of + Left unsolvable -> do + Log.warn $ "Could not solve " <> formatted + let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable + Log.debug $ String.joinWith "\n" errors + Cache.put _importCache (ImportManifest manifest.name (RawVersion ref)) (Left { error: SolveFailed, reason: String.joinWith " " errors }) + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions + Log.debug "Determining a compiler version suitable for publishing..." + allMetadata <- Registry.readAllMetadata + possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of + Nothing | Map.isEmpty resolutions -> do + Log.debug "No resolutions, so all compilers could be compatible." + allCompilers <- PursVersions.pursVersions + pure $ NonEmptySet.fromFoldable1 allCompilers + Nothing -> + Except.throw "No overlapping compilers found in dependencies; this should not happen!" + Just compilers -> do + Log.debug $ "Compatible compilers for dependencies of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Finding first compiler that can build the package..." + selected <- API.findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } + FS.Extra.remove tmp + case selected of + Nothing -> Log.error "Could not find any valid compilers for this package." + Just compiler -> do + Log.debug $ "Selected " <> Version.print compiler <> " for publishing." + let + payload = + { name: manifest.name + , location: Just manifest.location + , ref + , compiler + , resolutions: Just resolutions + } + Except.runExcept (API.publish payload) >>= case _ of + Left error -> do + Log.error $ "Failed to publish " <> formatted <> ": " <> error + Cache.put _importCache (PublishFailure manifest.name manifest.version) error + Right _ -> do + Log.info $ "Published " <> formatted + + case notPublished of [] -> Log.info "No packages to publish." - ops -> do + manifests -> do Log.info $ Array.foldMap (append "\n") [ "----------" , "AVAILABLE TO PUBLISH" - , Array.foldMap (\(Tuple _ { name, ref }) -> "\n - " <> PackageName.print name <> " " <> ref) ops + , Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) manifests , "----------" ] - - void $ for ops \(Tuple (Manifest manifest) publish) -> do - let formatted = formatPackageVersion manifest.name manifest.version - - -- Never happens, just a safety check. - when (manifest.name /= publish.name) do - Except.throw $ "Package name mismatch: " <> formatted <> " is being published as " <> PackageName.print publish.name - - Log.info $ Array.foldMap (append "\n") - [ "----------" - , "PUBLISHING: " <> formatted - , stringifyJson Location.codec manifest.location - , "----------" - ] - - result <- Except.runExcept $ API.publish publish - - -- TODO: Some packages will fail because the legacy importer does not - -- perform all the same validation checks that the publishing flow does. - -- What should we do when a package has a valid manifest but fails for - -- other reasons? Should they be added to the package validation - -- failures and we defer writing the package failures until the import - -- has completed? - case result of - Left error -> do - Log.error $ "Failed to publish " <> formatted <> ": " <> error - Cache.put _importCache (PublishFailure manifest.name manifest.version) error - Right _ -> do - Log.info $ "Published " <> formatted + void $ for manifests publishLegacyPackage -- | Record all package failures to the 'package-failures.json' file. writePackageFailures :: Map RawPackageName PackageValidationError -> Aff Unit From 77d6e681f2e37cfa29734fa8e159e1b83c8e8b7f Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 13 Nov 2023 10:18:53 -0500 Subject: [PATCH 07/64] Testing the pipeline... --- app/src/App/API.purs | 87 ++++++++++++++++++-------------- app/src/App/CLI/Git.purs | 9 ++-- app/src/App/Effect/Cache.purs | 15 +++--- app/src/App/Effect/Registry.purs | 2 +- scripts/src/LegacyImporter.purs | 19 ++----- 5 files changed, 69 insertions(+), 63 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 720d5e2ae..2ef936643 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -384,13 +384,18 @@ publish payload = do , "All package sources must be in the `src` directory, with any additional " , "sources indicated by the `files` key in your manifest." ] - Just files -> do + Just files -> + -- The 'validatePursModules' function uses language-cst-parser, which only + -- supports syntax back to 0.14.0. We'll still try to validate the package + -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of + Left formattedError | payload.compiler < unsafeFromRight (Version.parse "0.14.0") -> do + Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError + Log.debug $ "Skipping check because package is published with a pre-0.14.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> Except.throw $ Array.fold [ "This package has either malformed or disallowed PureScript module names " - , "in its `src` directory. All package sources must be in the `src` directory, " - , "with any additional sources indicated by the `files` key in your manifest." + , "in its source: " , formattedError ] Right _ -> @@ -510,6 +515,12 @@ publish payload = do Right versions -> pure versions case Map.lookup manifest.version published of + Nothing | payload.compiler < unsafeFromRight (Version.parse "0.14.7") -> do + Comment.comment $ Array.fold + [ "This version has already been published to the registry, but the docs have not been " + , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " + , "registry using compiler versions prior to 0.14.7. Please try with a later compiler." + ] Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " @@ -523,7 +534,11 @@ publish payload = do Except.throw "Cannot publish to Pursuit because this package failed to compile." Right installedResolutions -> do Log.debug "Uploading to Pursuit" - publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } + publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + Left publishErr -> Except.throw publishErr + Right _ -> do + Log.debug "Package docs publish succeeded" + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" Just url -> do Except.throw $ String.joinWith "\n" @@ -731,7 +746,8 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Comment.comment $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler - , " and resolutions:\n\n```json" + , " and resolutions:\n" + , "```json\n" , printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions , "\n```" ] @@ -753,12 +769,24 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Log.debug "Adding the new version to the registry index" Registry.writeManifest (Manifest manifest) - Log.debug "Uploading package documentation to pursuit" - publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } - Registry.mirrorLegacyRegistry payload.name newMetadata.location Comment.comment "Mirrored registry operation to the legacy registry!" + Log.debug "Uploading package documentation to Pursuit" + if payload.compiler >= unsafeFromRight (Version.parse "0.14.7") then + publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + Left publishErr -> do + Log.error publishErr + Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Right _ -> + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + else do + Comment.comment $ Array.fold + [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" + , Version.print payload.compiler + , "). If you want to publish documentation, please try again with a later compiler." + ] + allMetadata <- Registry.readAllMetadata compatible <- case compatibleCompilers allMetadata verifiedResolutions of Nothing | Map.isEmpty verifiedResolutions -> do @@ -944,10 +972,11 @@ findAllCompilers { source, compilers, installed } = do pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -- | Find the first compiler that can compile the package source code and --- | installed resolutions from the given array of compilers. +-- | installed resolutions from the given array of compilers. Begins with the +-- | latest compiler and works backwards to older compilers. findFirstCompiler :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) (Maybe Version) findFirstCompiler { source, compilers, installed } = do - search <- Except.runExcept $ for compilers \target -> do + search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do Log.debug $ "Trying compiler " <> Version.print target workdir <- Tmp.mkTmpDir result <- Run.liftAff $ Purs.callCompiler @@ -956,7 +985,7 @@ findFirstCompiler { source, compilers, installed } = do , cwd: Just workdir } FS.Extra.remove workdir - either (\_ -> Except.throw target) (\_ -> pure unit) result + for_ result (\_ -> Except.throw target) case search of Left found -> pure $ Just found Right _ -> pure Nothing @@ -975,7 +1004,7 @@ printCompilerFailure compiler = case _ of , "```" ] UnknownError err -> String.joinWith "\n" - [ "Compilation failed due to a compiler error:" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" , "```" , err , "```" @@ -1034,12 +1063,13 @@ type PublishToPursuit = -- | Publishes a package to Pursuit. -- | -- | ASSUMPTIONS: This function should not be run on legacy packages or on --- | packages where the `purescript-` prefix is still present. +-- | packages where the `purescript-` prefix is still present. Cannot be used +-- | on packages prior to 0.14.7. publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) Unit -publishToPursuit { source, compiler, resolutions, installedResolutions } = do + -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) +publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1050,10 +1080,8 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = do Run.liftAff $ writeJsonFile pursuitResolutionsCodec resolutionsFilePath resolvedPaths -- The 'purs publish' command requires a clean working tree, but it isn't - -- guaranteed that packages have an adequate .gitignore file; compilers prior - -- to 0.14.7 did not ignore the purs.json file when publishing. So we stash - -- changes made during the publishing process (ie. inclusion of a new purs.json - -- file and an output directory from compilation) before calling purs publish. + -- guaranteed that packages have an adequate .gitignore file. So we stash + -- stash changes made during the publishing process before calling publish. -- https://git-scm.com/docs/gitignore Log.debug "Adding output and purs.json to local git excludes..." Run.liftAff $ FS.Aff.appendTextFile UTF8 (Path.concat [ source, ".git", "info", "exclude" ]) (String.joinWith "\n" [ "output", "purs.json" ]) @@ -1071,23 +1099,8 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = do } publishJson <- case compilerOutput of - Left MissingCompiler -> Except.throw $ Array.fold - [ "Publishing failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - Left (CompilationError errs) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - Left (UnknownError err) -> Except.throw $ String.joinWith "\n" - [ "Publishing failed for your package due to an unknown compiler error:" - , "```" - , err - , "```" - ] + Left error -> + Except.throw $ printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1109,7 +1122,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = do Left error -> Except.throw $ "Could not publish your package to Pursuit because an error was encountered (cc: @purescript/packaging): " <> error Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure unit type PursuitResolutions = Map RawPackageName { version :: Version, path :: FilePath } diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ce046282d..410fdaca1 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,10 +111,11 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do ] pure true Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + -- FIXME + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/app/src/App/Effect/Cache.purs b/app/src/App/Effect/Cache.purs index 04f64c302..3a13de35c 100644 --- a/app/src/App/Effect/Cache.purs +++ b/app/src/App/Effect/Cache.purs @@ -168,7 +168,7 @@ handleMemoryFs env = case _ of case inFs of Nothing -> pure $ reply Nothing Just entry -> do - Log.debug $ "Fell back to on-disk entry for " <> memory + -- Log.debug $ "Fell back to on-disk entry for " <> memory putMemoryImpl env.ref unit (Key memory (Const entry)) pure $ reply $ Just $ unCache entry Just cached -> @@ -227,7 +227,8 @@ getMemoryImpl ref (Key id (Reply reply)) = do cache <- Run.liftEffect $ Ref.read ref case Map.lookup id cache of Nothing -> do - Log.debug $ "No cache entry found for " <> id <> " in memory." + -- FIXME: Re-enable these (?) + -- Log.debug $ "No cache entry found for " <> id <> " in memory." pure $ reply Nothing Just cached -> do pure $ reply $ Just $ unCache cached @@ -236,7 +237,7 @@ putMemoryImpl :: forall x r a. CacheRef -> a -> MemoryEncoding Const a x -> Run putMemoryImpl ref next (Key id (Const value)) = do let (toCache :: x -> CacheValue) = unsafeCoerce Run.liftEffect $ Ref.modify_ (Map.insert id (toCache value)) ref - Log.debug $ "Wrote cache entry for " <> id <> " in memory." + -- Log.debug $ "Wrote cache entry for " <> id <> " in memory." pure next deleteMemoryImpl :: forall x r a. CacheRef -> MemoryEncoding Ignore a x -> Run (LOG + EFFECT + r) a @@ -275,7 +276,7 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readFile path)) >>= case _ of Left _ -> do - Log.debug $ "No cache found for " <> id <> " at path " <> path + -- Log.debug $ "No cache found for " <> id <> " at path " <> path pure $ reply Nothing Right buf -> do pure $ reply $ Just buf @@ -284,7 +285,7 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 path)) >>= case _ of Left _ -> do - Log.debug $ "No cache file found for " <> id <> " at path " <> path + -- Log.debug $ "No cache file found for " <> id <> " at path " <> path pure $ reply Nothing Right content -> case Argonaut.Parser.jsonParser content of Left parseError -> do @@ -307,7 +308,7 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as a buffer: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path + -- Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path pure next AsJson id codec (Const value) -> do @@ -317,7 +318,7 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as JSON: " <> Aff.message fsError pure next Right _ -> do - Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." + -- Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." pure next deleteFsImpl :: forall a b r. FilePath -> FsEncoding Ignore a b -> Run (LOG + AFF + r) a diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index 05da5d983..91bc3537f 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -358,7 +358,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Just metadata -> do Log.debug $ "Successfully read metadata for " <> printedName <> " from path " <> path - Log.debug $ "Setting metadata cache to singleton entry (as cache was previosuly empty)." + Log.debug $ "Setting metadata cache to singleton entry (as cache was previously empty)." Cache.put _registryCache AllMetadata (Map.singleton name metadata) pure $ Just metadata diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 9fdd36d99..7738a1269 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -214,16 +214,7 @@ runLegacyImport logs = do Run.liftAff $ writePackageFailures importedIndex.failedPackages Run.liftAff $ writeVersionFailures importedIndex.failedVersions - Log.info "Writing empty metadata files for legacy packages that can't be registered..." - void $ forWithIndex importedIndex.reservedPackages \package location -> do - Registry.readMetadata package >>= case _ of - Nothing -> do - let metadata = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata package metadata - Just _ -> pure unit - Log.info "Ready for upload!" - Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex Log.info "Sorting packages for upload..." @@ -242,20 +233,20 @@ runLegacyImport logs = do publishLegacyPackage :: Manifest -> Run _ Unit publishLegacyPackage (Manifest manifest) = do let formatted = formatPackageVersion manifest.name manifest.version - Log.info $ "PUBLISHING: " <> formatted + Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted Just ref -> pure ref Log.debug $ "Solving dependencies for " <> formatted index <- Registry.readAllManifests + Log.debug $ "Read all manifests: " <> String.joinWith ", " (map (\(Manifest m) -> formatPackageVersion m.name m.version) $ ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges index) let solverIndex = map (map (_.dependencies <<< un Manifest)) $ ManifestIndex.toMap index case Solver.solve solverIndex manifest.dependencies of Left unsolvable -> do Log.warn $ "Could not solve " <> formatted let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable Log.debug $ String.joinWith "\n" errors - Cache.put _importCache (ImportManifest manifest.name (RawVersion ref)) (Left { error: SolveFailed, reason: String.joinWith " " errors }) Right resolutions -> do Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions Log.debug "Determining a compiler version suitable for publishing..." @@ -297,7 +288,7 @@ runLegacyImport logs = do Except.runExcept (API.publish payload) >>= case _ of Left error -> do Log.error $ "Failed to publish " <> formatted <> ": " <> error - Cache.put _importCache (PublishFailure manifest.name manifest.version) error + -- Cache.put _importCache (PublishFailure manifest.name manifest.version) error Right _ -> do Log.info $ "Published " <> formatted @@ -865,7 +856,7 @@ instance MemoryEncodable ImportCache where ImportManifest name (RawVersion version) next -> Exists.mkExists $ Key ("ImportManifest__" <> PackageName.print name <> "__" <> version) next PublishFailure name version next -> do - Exists.mkExists $ Key ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) next + Exists.mkExists $ Key ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) next instance FsEncodable ImportCache where encodeFs = case _ of @@ -874,7 +865,7 @@ instance FsEncodable ImportCache where Exists.mkExists $ AsJson ("ImportManifest__" <> PackageName.print name <> "__" <> version) codec next PublishFailure name version next -> do let codec = CA.string - Exists.mkExists $ AsJson ("PublishFailureCache__" <> PackageName.print name <> "__" <> Version.print version) codec next + Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) From 8749bea037d91e39719854759963b2374b42ca62 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 13 Nov 2023 16:13:27 -0500 Subject: [PATCH 08/64] Better reporting of failures --- app/src/App/API.purs | 21 ++--- app/src/App/CLI/Git.purs | 9 +- app/src/App/CLI/Purs.purs | 16 ++++ app/src/App/Effect/Cache.purs | 10 +-- scripts/src/LegacyImporter.purs | 140 ++++++++++++++++++++++---------- 5 files changed, 130 insertions(+), 66 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 2ef936643..06ee64a7c 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -389,9 +389,9 @@ publish payload = do -- supports syntax back to 0.14.0. We'll still try to validate the package -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of - Left formattedError | payload.compiler < unsafeFromRight (Version.parse "0.14.0") -> do + Left formattedError | payload.compiler < unsafeFromRight (Version.parse "0.15.0") -> do Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError - Log.debug $ "Skipping check because package is published with a pre-0.14.0 compiler (" <> Version.print payload.compiler <> ")." + Log.debug $ "Skipping check because package is published with a pre-0.15.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> Except.throw $ Array.fold [ "This package has either malformed or disallowed PureScript module names " @@ -801,8 +801,7 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Comment.comment $ Array.fold [ "The following compilers are compatible with this package according to its dependency resolutions: " , String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") $ NonEmptySet.toUnfoldable compatible) - , ".\n\n" - , "Computing the list of compilers usable with your package version..." + , ". Computing the list of compilers usable with your package version..." ] { failed: invalidCompilers, succeeded: validCompilers } <- findAllCompilers @@ -949,8 +948,8 @@ compatibleCompilers allMetadata resolutions = do NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head) tail type DiscoverCompilers = - { source :: FilePath - , compilers :: Array Version + { compilers :: Array Version + , source :: FilePath , installed :: FilePath } @@ -974,7 +973,7 @@ findAllCompilers { source, compilers, installed } = do -- | Find the first compiler that can compile the package source code and -- | installed resolutions from the given array of compilers. Begins with the -- | latest compiler and works backwards to older compilers. -findFirstCompiler :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) (Maybe Version) +findFirstCompiler :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) findFirstCompiler { source, compilers, installed } = do search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do Log.debug $ "Trying compiler " <> Version.print target @@ -985,10 +984,12 @@ findFirstCompiler { source, compilers, installed } = do , cwd: Just workdir } FS.Extra.remove workdir - for_ result (\_ -> Except.throw target) + case result of + Left error -> pure $ Tuple target error + Right _ -> Except.throw target case search of - Left found -> pure $ Just found - Right _ -> pure Nothing + Left worked -> pure $ Right worked + Right others -> pure $ Left $ Map.fromFoldable others printCompilerFailure :: Version -> CompilerFailure -> String printCompilerFailure compiler = case _ of diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index 410fdaca1..ce046282d 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,11 +111,10 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do ] pure true Just files -> do - -- FIXME - -- Log.debug $ Array.fold - -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - -- , NonEmptyArray.foldMap1 (append "\n - ") files - -- ] + Log.debug $ Array.fold + [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + , NonEmptyArray.foldMap1 (append "\n - ") files + ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs index 38fa0f19c..2034286f2 100644 --- a/app/src/App/CLI/Purs.purs +++ b/app/src/App/CLI/Purs.purs @@ -21,6 +21,22 @@ data CompilerFailure | MissingCompiler derive instance Eq CompilerFailure +derive instance Ord CompilerFailure + +compilerFailureCodec :: JsonCodec CompilerFailure +compilerFailureCodec = CA.codec' decode encode + where + decode :: Json -> Either JsonDecodeError CompilerFailure + decode json = + map CompilationError (CA.decode (CA.array compilerErrorCodec) json) + <|> map UnknownError (CA.decode CA.string json) + <|> map (const MissingCompiler) (CA.decode CA.null json) + + encode :: CompilerFailure -> Json + encode = case _ of + CompilationError errors -> CA.encode (CA.array compilerErrorCodec) errors + UnknownError message -> CA.encode CA.string message + MissingCompiler -> CA.encode CA.null unit type CompilerError = { position :: SourcePosition diff --git a/app/src/App/Effect/Cache.purs b/app/src/App/Effect/Cache.purs index 3a13de35c..1688f3ff2 100644 --- a/app/src/App/Effect/Cache.purs +++ b/app/src/App/Effect/Cache.purs @@ -168,7 +168,6 @@ handleMemoryFs env = case _ of case inFs of Nothing -> pure $ reply Nothing Just entry -> do - -- Log.debug $ "Fell back to on-disk entry for " <> memory putMemoryImpl env.ref unit (Key memory (Const entry)) pure $ reply $ Just $ unCache entry Just cached -> @@ -226,9 +225,7 @@ getMemoryImpl ref (Key id (Reply reply)) = do let (unCache :: CacheValue -> b) = unsafeCoerce cache <- Run.liftEffect $ Ref.read ref case Map.lookup id cache of - Nothing -> do - -- FIXME: Re-enable these (?) - -- Log.debug $ "No cache entry found for " <> id <> " in memory." + Nothing -> pure $ reply Nothing Just cached -> do pure $ reply $ Just $ unCache cached @@ -237,7 +234,6 @@ putMemoryImpl :: forall x r a. CacheRef -> a -> MemoryEncoding Const a x -> Run putMemoryImpl ref next (Key id (Const value)) = do let (toCache :: x -> CacheValue) = unsafeCoerce Run.liftEffect $ Ref.modify_ (Map.insert id (toCache value)) ref - -- Log.debug $ "Wrote cache entry for " <> id <> " in memory." pure next deleteMemoryImpl :: forall x r a. CacheRef -> MemoryEncoding Ignore a x -> Run (LOG + EFFECT + r) a @@ -276,7 +272,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readFile path)) >>= case _ of Left _ -> do - -- Log.debug $ "No cache found for " <> id <> " at path " <> path pure $ reply Nothing Right buf -> do pure $ reply $ Just buf @@ -285,7 +280,6 @@ getFsImpl cacheDir = case _ of let path = Path.concat [ cacheDir, safePath id ] Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 path)) >>= case _ of Left _ -> do - -- Log.debug $ "No cache file found for " <> id <> " at path " <> path pure $ reply Nothing Right content -> case Argonaut.Parser.jsonParser content of Left parseError -> do @@ -308,7 +302,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as a buffer: " <> Aff.message fsError pure next Right _ -> do - -- Log.debug $ "Wrote cache entry for " <> id <> " as a buffer at path " <> path pure next AsJson id codec (Const value) -> do @@ -318,7 +311,6 @@ putFsImpl cacheDir next = case _ of Log.warn $ "Failed to write cache entry for " <> id <> " at path " <> path <> " as JSON: " <> Aff.message fsError pure next Right _ -> do - -- Log.debug $ "Wrote cache entry for " <> id <> " at path " <> path <> " as JSON." pure next deleteFsImpl :: forall a b r. FilePath -> FsEncoding Ignore a b -> Run (LOG + AFF + r) a diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 7738a1269..43cc7f94b 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -12,6 +12,7 @@ import ArgParse.Basic (ArgParser) import ArgParse.Basic as Arg import Control.Apply (lift2) import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.Argonaut as CA import Data.Codec.Argonaut.Common as CA.Common import Data.Codec.Argonaut.Record as CA.Record @@ -22,6 +23,7 @@ import Data.Filterable (partition) import Data.Foldable (foldMap) import Data.Foldable as Foldable import Data.Formatter.DateTime as Formatter.DateTime +import Data.Function (on) import Data.FunctorWithIndex (mapWithIndex) import Data.List as List import Data.List.NonEmpty as NonEmptyList @@ -44,6 +46,7 @@ import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache @@ -61,10 +64,12 @@ import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestValidationError) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) +import Registry.App.Prelude as Either import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec (packageMap, versionMap) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format import Registry.Manifest as Manifest @@ -218,16 +223,15 @@ runLegacyImport logs = do Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex Log.info "Sorting packages for upload..." - let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges importedIndex.registryIndex + let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex - Log.info "Removing packages that previously failed publish" - indexPackages <- allIndexPackages # Array.filterA \(Manifest { name, version }) -> - isNothing <$> Cache.get _importCache (PublishFailure name version) - - notPublished <- do + Log.info "Removing packages that previously failed publish or have been published" + publishable <- do allMetadata <- Registry.readAllMetadata - let isPublished { name, version } = hasMetadata allMetadata name version - pure $ indexPackages # Array.filter \(Manifest manifest) -> not (isPublished manifest) + allIndexPackages # Array.filterA \(Manifest { name, version }) -> do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure $ not $ hasMetadata allMetadata name version + Just _ -> pure false let publishLegacyPackage :: Manifest -> Run _ Unit @@ -237,16 +241,15 @@ runLegacyImport logs = do RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted Just ref -> pure ref - Log.debug $ "Solving dependencies for " <> formatted index <- Registry.readAllManifests Log.debug $ "Read all manifests: " <> String.joinWith ", " (map (\(Manifest m) -> formatPackageVersion m.name m.version) $ ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges index) let solverIndex = map (map (_.dependencies <<< un Manifest)) $ ManifestIndex.toMap index case Solver.solve solverIndex manifest.dependencies of Left unsolvable -> do - Log.warn $ "Could not solve " <> formatted let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable - Log.debug $ String.joinWith "\n" errors + Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors + Cache.put _importCache (PublishFailure manifest.name manifest.version) (SolveFailed $ String.joinWith " " errors) Right resolutions -> do Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions Log.debug "Determining a compiler version suitable for publishing..." @@ -274,8 +277,19 @@ runLegacyImport logs = do selected <- API.findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } FS.Extra.remove tmp case selected of - Nothing -> Log.error "Could not find any valid compilers for this package." - Just compiler -> do + Left failures -> do + let + collected :: Map (NonEmptyArray Version) CompilerFailure + collected = do + let + foldFn prev xs = do + let Tuple _ failure = NonEmptyArray.head xs + let key = map fst xs + Map.insert key failure prev + Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + Log.error $ "Failed to find any valid compilers for publishing:\n" <> printJson compilerFailureMapCodec collected + Cache.put _importCache (PublishFailure manifest.name manifest.version) (NoCompilersFound collected) + Right compiler -> do Log.debug $ "Selected " <> Version.print compiler <> " for publishing." let payload = @@ -288,11 +302,11 @@ runLegacyImport logs = do Except.runExcept (API.publish payload) >>= case _ of Left error -> do Log.error $ "Failed to publish " <> formatted <> ": " <> error - -- Cache.put _importCache (PublishFailure manifest.name manifest.version) error + Cache.put _importCache (PublishFailure manifest.name manifest.version) (PublishError error) Right _ -> do Log.info $ "Published " <> formatted - case notPublished of + case publishable of [] -> Log.info "No packages to publish." manifests -> do Log.info $ Array.foldMap (append "\n") @@ -301,7 +315,23 @@ runLegacyImport logs = do , Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) manifests , "----------" ] - void $ for manifests publishLegacyPackage + + void $ for (Array.take 150 manifests) publishLegacyPackage + + Log.info "Finished publishing! Collecting all publish failures and writing to disk." + let + collectError prev (Manifest { name, version }) = do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure prev + Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev + failures <- Array.foldM collectError Map.empty allIndexPackages + Run.liftAff $ writePublishFailures failures + +-- | Record all package failures to the 'package-failures.json' file. +writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit +writePublishFailures = + writeJsonFile (packageMap (versionMap jsonValidationErrorCodec)) (Path.concat [ scratchDir, "publish-failures.json" ]) + <<< map (map formatPublishError) -- | Record all package failures to the 'package-failures.json' file. writePackageFailures :: Map RawPackageName PackageValidationError -> Aff Unit @@ -456,6 +486,38 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa pure $ Map.fromFoldable manifests +data PublishError = SolveFailed String | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) | PublishError String + +derive instance Eq PublishError + +publishErrorCodec :: JsonCodec PublishError +publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch + { solveFailed: Right CA.string + , noCompilersFound: Right compilerFailureMapCodec + , publishError: Right CA.string + } + where + toVariant = case _ of + SolveFailed error -> Variant.inj (Proxy :: _ "solveFailed") error + NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed + PublishError error -> Variant.inj (Proxy :: _ "publishError") error + + fromVariant = Variant.match + { solveFailed: SolveFailed + , noCompilersFound: NoCompilersFound + , publishError: PublishError + } + +compilerFailureMapCodec :: JsonCodec (Map (NonEmptyArray Version) CompilerFailure) +compilerFailureMapCodec = do + let + print = NonEmptyArray.intercalate "," <<< map Version.print + parse input = do + let versions = String.split (String.Pattern ",") input + let parsed = Array.mapMaybe (Either.hush <<< Version.parse) versions + NonEmptyArray.fromArray parsed + Internal.Codec.strMap "CompilerFailureMap" parse print compilerFailureCodec + type EXCEPT_VERSION :: Row (Type -> Type) -> Row (Type -> Type) type EXCEPT_VERSION r = (exceptVersion :: Except VersionValidationError | r) @@ -481,8 +543,6 @@ data VersionError | DisabledVersion | InvalidManifest LegacyManifestValidationError | UnregisteredDependencies (Array PackageName) - | SolveFailed - | NoCompilerFound versionErrorCodec :: JsonCodec VersionError versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch @@ -497,8 +557,6 @@ versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM , reason: CA.string } , unregisteredDependencies: Right (CA.array PackageName.codec) - , solveFailed: Left unit - , noCompilerFound: Left unit } where toVariant = case _ of @@ -506,16 +564,12 @@ versionErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM DisabledVersion -> Variant.inj (Proxy :: _ "disabledVersion") unit InvalidManifest inner -> Variant.inj (Proxy :: _ "invalidManifest") inner UnregisteredDependencies inner -> Variant.inj (Proxy :: _ "unregisteredDependencies") inner - SolveFailed -> Variant.inj (Proxy :: _ "solveFailed") unit - NoCompilerFound -> Variant.inj (Proxy :: _ "noCompilerFound") unit fromVariant = Variant.match { invalidTag: InvalidTag , disabledVersion: \_ -> DisabledVersion , invalidManifest: InvalidManifest , unregisteredDependencies: UnregisteredDependencies - , solveFailed: \_ -> SolveFailed - , noCompilerFound: \_ -> NoCompilerFound } validateVersionDisabled :: PackageName -> LenientVersion -> Either VersionValidationError Unit @@ -665,14 +719,14 @@ validatePackageName (RawPackageName name) = type JsonValidationError = { tag :: String - , value :: Maybe String + , value :: Maybe Json , reason :: String } jsonValidationErrorCodec :: JsonCodec JsonValidationError jsonValidationErrorCodec = CA.Record.object "JsonValidationError" { tag: CA.string - , value: CA.Record.optional CA.string + , value: CA.Record.optional CA.json , reason: CA.string } @@ -681,30 +735,34 @@ formatPackageValidationError { error, reason } = case error of InvalidPackageName -> { tag: "InvalidPackageName", value: Nothing, reason } InvalidPackageURL url -> - { tag: "InvalidPackageURL", value: Just url, reason } + { tag: "InvalidPackageURL", value: Just (CA.encode CA.string url), reason } PackageURLRedirects { registered } -> - { tag: "PackageURLRedirects", value: Just (registered.owner <> "/" <> registered.repo), reason } + { tag: "PackageURLRedirects", value: Just (CA.encode CA.string (registered.owner <> "/" <> registered.repo)), reason } CannotAccessRepo address -> - { tag: "CannotAccessRepo", value: Just (address.owner <> "/" <> address.repo), reason } + { tag: "CannotAccessRepo", value: Just (CA.encode CA.string (address.owner <> "/" <> address.repo)), reason } DisabledPackage -> { tag: "DisabledPackage", value: Nothing, reason } formatVersionValidationError :: VersionValidationError -> JsonValidationError formatVersionValidationError { error, reason } = case error of InvalidTag tag -> - { tag: "InvalidTag", value: Just tag.name, reason } + { tag: "InvalidTag", value: Just (CA.encode CA.string tag.name), reason } DisabledVersion -> { tag: "DisabledVersion", value: Nothing, reason } InvalidManifest err -> do let errorValue = Legacy.Manifest.printLegacyManifestError err.error - { tag: "InvalidManifest", value: Just errorValue, reason } - UnregisteredDependencies names -> do - let errorValue = String.joinWith ", " $ map PackageName.print names - { tag: "UnregisteredDependencies", value: Just errorValue, reason } - SolveFailed -> - { tag: "SolveFailed", value: Nothing, reason } - NoCompilerFound -> - { tag: "NoCompilerFound", value: Nothing, reason } + { tag: "InvalidManifest", value: Just (CA.encode CA.string errorValue), reason } + UnregisteredDependencies names -> + { tag: "UnregisteredDependencies", value: Just (CA.encode (CA.array PackageName.codec) names), reason } + +formatPublishError :: PublishError -> JsonValidationError +formatPublishError = case _ of + SolveFailed error -> + { tag: "SolveFailed", value: Nothing, reason: error } + NoCompilersFound versions -> + { tag: "NoCompilersFound", value: Just (CA.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } + PublishError error -> + { tag: "PublishError", value: Nothing, reason: error } type ImportStats = { packagesProcessed :: Int @@ -800,8 +858,6 @@ calculateImportStats legacyRegistry imported = do DisabledVersion -> "Disabled Version" InvalidManifest err -> "Invalid Manifest (" <> innerKey err <> ")" UnregisteredDependencies _ -> "Unregistered Dependencies" - SolveFailed -> "Solve Failed" - NoCompilerFound -> "No Compiler Found" innerKey = _.error >>> case _ of NoManifests -> "No Manifests" @@ -845,7 +901,7 @@ legacyRepoParser = do data ImportCache :: (Type -> Type -> Type) -> Type -> Type data ImportCache c a = ImportManifest PackageName RawVersion (c (Either VersionValidationError Manifest) a) - | PublishFailure PackageName Version (c String a) + | PublishFailure PackageName Version (c PublishError a) instance Functor2 c => Functor (ImportCache c) where map k (ImportManifest name version a) = ImportManifest name version (map2 k a) @@ -864,7 +920,7 @@ instance FsEncodable ImportCache where let codec = CA.Common.either versionValidationErrorCodec Manifest.codec Exists.mkExists $ AsJson ("ImportManifest__" <> PackageName.print name <> "__" <> version) codec next PublishFailure name version next -> do - let codec = CA.string + let codec = publishErrorCodec Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) From be93d18cb68b29dd01d9051a1a67c5f959c31f68 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 14 Nov 2023 12:27:18 -0500 Subject: [PATCH 09/64] Update union of package set / spago / bower deps, consider ranges in most manifest index ops --- app/src/App/Effect/PackageSets.purs | 2 +- app/src/App/Effect/Registry.purs | 7 +++-- app/src/App/Legacy/Manifest.purs | 33 ++++++++++------------ app/test/App/Legacy/PackageSet.purs | 2 +- app/test/Test/Assert/Run.purs | 4 +-- lib/spago.yaml | 1 - lib/src/ManifestIndex.purs | 41 ++++++++++++---------------- lib/test/Registry/ManifestIndex.purs | 31 ++++++++++++--------- scripts/src/LegacyImporter.purs | 29 ++++++++++++++------ spago.lock | 2 +- 10 files changed, 79 insertions(+), 73 deletions(-) diff --git a/app/src/App/Effect/PackageSets.purs b/app/src/App/Effect/PackageSets.purs index 5a250ba22..ccd78e1c2 100644 --- a/app/src/App/Effect/PackageSets.purs +++ b/app/src/App/Effect/PackageSets.purs @@ -428,7 +428,7 @@ validatePackageSet (PackageSet set) = do -- We can now attempt to produce a self-contained manifest index from the -- collected manifests. If this fails then the package set is not -- self-contained. - Tuple unsatisfied _ = ManifestIndex.maximalIndex (Set.fromFoldable success) + Tuple unsatisfied _ = ManifestIndex.maximalIndex ManifestIndex.IgnoreRanges (Set.fromFoldable success) -- Otherwise, we can check if we were able to produce an index from the -- package set alone, without errors. diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index 91bc3537f..6590ae37f 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -251,7 +251,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Writing manifest for " <> formatted <> ":\n" <> printJson Manifest.codec manifest index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left error -> Except.throw $ Array.fold [ "Can't insert " <> formatted <> " into manifest index because it has unsatisfied dependencies:" @@ -274,7 +274,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << let formatted = formatPackageVersion name version Log.info $ "Deleting manifest for " <> formatted index <- Except.rethrow =<< handle env (ReadAllManifests identity) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left error -> Except.throw $ Array.fold [ "Can't delete " <> formatted <> " from manifest index because it would produce unsatisfied dependencies:" @@ -835,8 +835,9 @@ readManifestIndexFromDisk root = do entries <- map partitionEithers $ for packages.success (ManifestIndex.readEntryFile root) case entries.fail of - [] -> case ManifestIndex.fromSet $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of + [] -> case ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable $ Array.foldMap NonEmptyArray.toArray entries.success of Left errors -> do + Log.debug $ "Could not read a valid manifest index from entry files: " <> Array.foldMap (Array.foldMap (\(Manifest { name, version }) -> "\n - " <> formatPackageVersion name version) <<< NonEmptyArray.toArray) entries.success Except.throw $ append "Unable to read manifest index (some packages are not satisfiable): " $ Array.foldMap (append "\n - ") do Tuple name versions <- Map.toUnfoldable errors Tuple version dependency <- Map.toUnfoldable versions diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index be4c34cec..dd689fb2c 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -9,7 +9,6 @@ import Data.Codec.Argonaut.Record as CA.Record import Data.Codec.Argonaut.Variant as CA.Variant import Data.Either as Either import Data.Exists as Exists -import Data.FunctorWithIndex (mapWithIndex) import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Ord.Max (Max(..)) @@ -35,7 +34,7 @@ import Registry.App.Legacy.LenientRange as LenientRange import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (LegacyPackageSet(..), LegacyPackageSetEntry, LegacyPackageSetUnion, RawPackageName(..), RawVersion(..), RawVersionRange(..), legacyPackageSetCodec, legacyPackageSetUnionCodec, rawPackageNameMapCodec, rawVersionCodec, rawVersionRangeCodec) -import Registry.Foreign.Octokit (Address, GitHubError) +import Registry.Foreign.Octokit (Address, GitHubError(..)) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp import Registry.License as License @@ -137,21 +136,13 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr Left bowerError, Left _ -> Left bowerError Right bowerDeps, Left _ -> Right bowerDeps Left _, Right spagoDeps -> Right spagoDeps - Right bowerDeps, Right spagoDeps -> Right do - bowerDeps # mapWithIndex \package range -> - case Map.lookup package spagoDeps of - Nothing -> range - Just spagoRange -> Range.union range spagoRange + Right bowerDeps, Right spagoDeps -> Right $ Map.unionWith Range.union bowerDeps spagoDeps unionPackageSets = case maybePackageSetDeps, unionManifests of Nothing, Left manifestError -> Left manifestError Nothing, Right manifestDeps -> Right manifestDeps Just packageSetDeps, Left _ -> Right packageSetDeps - Just packageSetDeps, Right manifestDeps -> Right do - packageSetDeps # mapWithIndex \package range -> - case Map.lookup package manifestDeps of - Nothing -> range - Just manifestRange -> Range.union range manifestRange + Just packageSetDeps, Right manifestDeps -> Right $ Map.unionWith Range.union manifestDeps packageSetDeps Run.Except.rethrowAt _legacyManifestError unionPackageSets @@ -221,16 +212,22 @@ fetchLegacyManifestFiles :: forall r . Address -> RawVersion - -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) + -> Run (GITHUB + LOG + AFF + EFFECT + EXCEPT String + r) (Either LegacyManifestValidationError (These Bowerfile SpagoDhallJson)) fetchLegacyManifestFiles address ref = do eitherBower <- fetchBowerfile address ref - void $ flip ltraverse eitherBower \error -> - Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherBower case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch bowerfile: " <> Octokit.printGitHubError error eitherSpago <- fetchSpagoDhallJson address ref - void $ flip ltraverse eitherSpago \error -> - Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error + void $ flip ltraverse eitherSpago case _ of + APIError { statusCode } | statusCode == 401 -> + Except.throw "Permission error on token used to fetch manifests!" + error -> + Log.debug $ "Failed to fetch spago.dhall: " <> Octokit.printGitHubError error pure $ case eitherBower, eitherSpago of - Left _, Left _ -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available." } + Left errL, Left errR -> Left { error: NoManifests, reason: "No bower.json or spago.dhall files available: " <> Octokit.printGitHubError errL <> ", " <> Octokit.printGitHubError errR } Right bower, Left _ -> Right $ This bower Left _, Right spago -> Right $ That spago Right bower, Right spago -> Right $ Both bower spago diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 5fd4a801a..e3279f68b 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -97,7 +97,7 @@ convertedPackageSet = Left err -> unsafeCrashWith err Right value -> value where - index = unsafeFromRight $ ManifestIndex.fromSet $ Set.fromFoldable + index = unsafeFromRight $ ManifestIndex.fromSet ManifestIndex.ConsiderRanges $ Set.fromFoldable [ mkManifest assert [ console, effect, prelude ] , mkManifest console [ effect, prelude ] , mkManifest effect [ prelude ] diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 18ba03016..0daf28264 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -166,7 +166,7 @@ handleRegistryMock env = case _ of WriteManifest manifest reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.insert manifest index of + case ManifestIndex.insert ManifestIndex.ConsiderRanges manifest index of Left err -> pure $ reply $ Left $ "Failed to insert manifest:\n" <> Utils.unsafeStringify manifest <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) @@ -174,7 +174,7 @@ handleRegistryMock env = case _ of DeleteManifest name version reply -> do index <- Run.liftEffect (Ref.read env.indexRef) - case ManifestIndex.delete name version index of + case ManifestIndex.delete ManifestIndex.ConsiderRanges name version index of Left err -> pure $ reply $ Left $ "Failed to delete entry for :\n" <> Utils.formatPackageVersion name version <> " due to an error:\n" <> Utils.unsafeStringify err Right index' -> do Run.liftEffect (Ref.write index' env.indexRef) diff --git a/lib/spago.yaml b/lib/spago.yaml index 561b8231e..d334f9c87 100644 --- a/lib/spago.yaml +++ b/lib/spago.yaml @@ -44,7 +44,6 @@ package: test: main: Test.Registry dependencies: - - argonaut-core - exceptions - node-child-process - node-execa diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 6029b8e34..413201f83 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -103,25 +103,18 @@ lookup name version (ManifestIndex index) = -- | Insert a new manifest into the manifest index, failing if the manifest -- | indicates dependencies that cannot be satisfied. Dependencies are not -- | satisfied if the package is not in the index. -insert :: Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex -insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do +insert :: IncludeRanges -> Manifest -> ManifestIndex -> Either (Map PackageName Range) ManifestIndex +insert consider manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) = do let unsatisfied :: Map PackageName Range unsatisfied = Map.fromFoldable do Tuple dependency range <- Map.toUnfoldable dependencies case Map.lookup dependency index of - Just _versions -> - -- Ideally we would enforce that inserting a manifest requires that - -- at least one version exists in the index in the given range already - -- Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> - -- - -- However, to be somewhat lenient on what packages can be admitted to - -- the official index, we just look to see the package name exists. - -- - -- Note that if we _do_ add this check later on, we will need to - -- produce an alternate version that does not check version bounds for - -- use in validatiing package sets, ie. 'maximalIndexIgnoringBounds' - [] + Just versions -> case consider of + IgnoreRanges -> [] + ConsiderRanges + | Array.any (Range.includes range) (Set.toUnfoldable (Map.keys versions)) -> [] + | otherwise -> [ Tuple dependency range ] _ -> [ Tuple dependency range ] @@ -137,12 +130,12 @@ insert manifest@(Manifest { name, version, dependencies }) (ManifestIndex index) -- | package names (and not package versions), it is always acceptable to delete -- | a package version so long as it has at least 2 versions. However, removing -- | a package altogether incurs a full validation check. -delete :: PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -delete name version (ManifestIndex index) = do +delete :: IncludeRanges -> PackageName -> Version -> ManifestIndex -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +delete consider name version (ManifestIndex index) = do case Map.lookup name index of Nothing -> pure (ManifestIndex index) Just versionsMap | Map.size versionsMap == 1 -> - fromSet $ Set.fromFoldable do + fromSet consider $ Set.fromFoldable do Tuple _ versions <- Map.toUnfoldableUnordered (Map.delete name index) Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] @@ -151,21 +144,21 @@ delete name version (ManifestIndex index) = do -- | Convert a set of manifests into a `ManifestIndex`. Reports all failures -- | encountered rather than short-circuiting. -fromSet :: Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -fromSet manifests = do - let Tuple failed index = maximalIndex manifests +fromSet :: IncludeRanges -> Set Manifest -> Either (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +fromSet consider manifests = do + let Tuple failed index = maximalIndex consider manifests if Map.isEmpty failed then Right index else Left failed -- | Produce the maximal `ManifestIndex` possible for the given set of -- | `Manifest`s, collecting failures along the way. -maximalIndex :: Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex -maximalIndex manifests = do +maximalIndex :: IncludeRanges -> Set Manifest -> Tuple (Map PackageName (Map Version (Map PackageName Range))) ManifestIndex +maximalIndex consider manifests = do let - insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert manifest index of + insertManifest (Tuple failed index) manifest@(Manifest { name, version }) = case insert consider manifest index of Left errors -> Tuple (Map.insertWith Map.union name (Map.singleton version errors) failed) index Right newIndex -> Tuple failed newIndex - Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort IgnoreRanges manifests) + Array.foldl insertManifest (Tuple Map.empty empty) (topologicalSort consider manifests) data IncludeRanges = ConsiderRanges diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index b66395289..2d19504ef 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -75,8 +75,8 @@ spec = do manifest1 = unsafeManifest "prelude" "1.0.0" [] manifest2 = Newtype.over Manifest (_ { description = Just "My prelude description." }) manifest1 index = - ManifestIndex.insert manifest1 ManifestIndex.empty - >>= ManifestIndex.insert manifest2 + ManifestIndex.insert ManifestIndex.ConsiderRanges manifest1 ManifestIndex.empty + >>= ManifestIndex.insert ManifestIndex.ConsiderRanges manifest2 case index of Left errors -> @@ -104,17 +104,20 @@ spec = do tinyIndex :: Array Manifest tinyIndex = [ unsafeManifest "prelude" "1.0.0" [] ] - testIndex { satisfied: tinyIndex, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied: tinyIndex, unsatisfied: [] } Spec.it "Fails to parse non-self-contained index" do let - satisfied :: Array Manifest - satisfied = + satisfiedStrict :: Array Manifest + satisfiedStrict = [ unsafeManifest "prelude" "1.0.0" [] , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] - -- It is OK for the version bounds to not exist, although we may - -- choose to make this more strict in the future. - , unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] + ] + + -- Packages with dependencies that exist, but not at the proper bounds. + satisfiedLoose :: Array Manifest + satisfiedLoose = satisfiedStrict <> + [ unsafeManifest "control" "2.0.0" [ Tuple "prelude" ">=2.0.0 <3.0.0" ] ] unsatisfied :: Array Manifest @@ -122,7 +125,8 @@ spec = do [ unsafeManifest "control" "3.0.0" [ Tuple "tuples" ">=2.0.0 <3.0.0" ] ] - testIndex { satisfied, unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: satisfiedStrict, unsatisfied } + testIndex ManifestIndex.IgnoreRanges { satisfied: satisfiedLoose, unsatisfied } Spec.it "Parses cyclical but acceptable index" do let @@ -134,7 +138,7 @@ spec = do , unsafeManifest "control" "2.0.0" [] ] - testIndex { satisfied, unsatisfied: [] } + testIndex ManifestIndex.ConsiderRanges { satisfied, unsatisfied: [] } Spec.it "Does not parse unacceptable cyclical index" do let @@ -144,7 +148,7 @@ spec = do , unsafeManifest "control" "1.0.0" [ Tuple "prelude" ">=1.0.0 <2.0.0" ] ] - testIndex { satisfied: [], unsatisfied } + testIndex ManifestIndex.ConsiderRanges { satisfied: [], unsatisfied } contextEntry :: String contextEntry = @@ -156,9 +160,10 @@ contextEntry = testIndex :: forall m . MonadThrow Error m - => { satisfied :: Array Manifest, unsatisfied :: Array Manifest } + => ManifestIndex.IncludeRanges + -> { satisfied :: Array Manifest, unsatisfied :: Array Manifest } -> m Unit -testIndex { satisfied, unsatisfied } = case ManifestIndex.maximalIndex (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of +testIndex consider { satisfied, unsatisfied } = case ManifestIndex.maximalIndex consider (Set.fromFoldable (Array.fold [ satisfied, unsatisfied ])) of Tuple errors result -> do let { fail: shouldHaveErrors } = diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 43cc7f94b..27bdf09a9 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -64,7 +64,6 @@ import Registry.App.Legacy.LenientVersion as LenientVersion import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestValidationError) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) -import Registry.App.Prelude as Either import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit @@ -75,6 +74,7 @@ import Registry.Internal.Format as Internal.Format import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName +import Registry.Range as Range import Registry.Solver as Solver import Registry.Version as Version import Run (Run) @@ -219,6 +219,15 @@ runLegacyImport logs = do Run.liftAff $ writePackageFailures importedIndex.failedPackages Run.liftAff $ writeVersionFailures importedIndex.failedVersions + let metadataPackage = unsafeFromRight (PackageName.parse "metadata") + Registry.readMetadata metadataPackage >>= case _ of + Nothing -> do + Log.info "Writing empty metadata file for the 'metadata' package" + let location = GitHub { owner: "purescript", repo: "purescript-metadata", subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata metadataPackage entry + Just _ -> pure unit + Log.info "Ready for upload!" Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex @@ -243,7 +252,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug $ "Solving dependencies for " <> formatted index <- Registry.readAllManifests - Log.debug $ "Read all manifests: " <> String.joinWith ", " (map (\(Manifest m) -> formatPackageVersion m.name m.version) $ ManifestIndex.toSortedArray ManifestIndex.IgnoreRanges index) + Log.debug $ "Read all manifests: " <> String.joinWith ", " (map (\(Manifest m) -> formatPackageVersion m.name m.version) $ ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges index) let solverIndex = map (map (_.dependencies <<< un Manifest)) $ ManifestIndex.toMap index case Solver.solve solverIndex manifest.dependencies of Left unsolvable -> do @@ -251,7 +260,7 @@ runLegacyImport logs = do Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors Cache.put _importCache (PublishFailure manifest.name manifest.version) (SolveFailed $ String.joinWith " " errors) Right resolutions -> do - Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions + Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies Log.debug "Determining a compiler version suitable for publishing..." allMetadata <- Registry.readAllMetadata possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of @@ -316,7 +325,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 150 manifests) publishLegacyPackage + void $ for (Array.take 500 manifests) publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let @@ -388,11 +397,10 @@ importLegacyRegistry legacyRegistry = do -- A 'checked' index is one where we have verified that all dependencies -- are self-contained within the registry. - Tuple unsatisfied validIndex = ManifestIndex.maximalIndex validLegacyManifests + Tuple unsatisfied validIndex = ManifestIndex.maximalIndex ManifestIndex.ConsiderRanges validLegacyManifests -- The list of all packages that were present in the legacy registry files, - -- but which have no versions present in the fully-imported registry. These - -- packages still need to have empty metadata files written for them. + -- but which have no versions present in the fully-imported registry. reservedPackages :: Map PackageName Location reservedPackages = Map.fromFoldable $ Array.mapMaybe reserved $ Map.toUnfoldable legacyRegistry @@ -472,6 +480,9 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest exceptVersion manifest Just cached -> @@ -514,7 +525,7 @@ compilerFailureMapCodec = do print = NonEmptyArray.intercalate "," <<< map Version.print parse input = do let versions = String.split (String.Pattern ",") input - let parsed = Array.mapMaybe (Either.hush <<< Version.parse) versions + let parsed = Array.mapMaybe (hush <<< Version.parse) versions NonEmptyArray.fromArray parsed Internal.Codec.strMap "CompilerFailureMap" parse print compilerFailureCodec @@ -780,7 +791,7 @@ formatImportStats stats = String.joinWith "\n" , show stats.packagesProcessed <> " packages processed:" , indent $ show stats.packageResults.success <> " fully successful" , indent $ show stats.packageResults.partial <> " partially successful" - , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " reserved (no usable versions)" + , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " omitted (no usable versions)" , indent $ show stats.packageResults.fail <> " fully failed" , indent "---" , formatErrors stats.packageErrors diff --git a/spago.lock b/spago.lock index ec20419f6..2546a1f1c 100644 --- a/spago.lock +++ b/spago.lock @@ -153,7 +153,7 @@ workspace: - transformers - tuples test_dependencies: - - argonaut-core + - debug - exceptions - node-child-process - node-execa From 5a154333909df3f4ee29d195dc11fb7487403c97 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 14 Nov 2023 19:13:30 -0500 Subject: [PATCH 10/64] Include spago.yaml files in legacy import --- app/src/App/API.purs | 14 +++---- app/src/App/Effect/GitHub.purs | 4 +- scripts/src/LegacyImporter.purs | 69 +++++++++++++++++++++++---------- 3 files changed, 56 insertions(+), 31 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 06ee64a7c..7dd7cb5f3 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -14,6 +14,7 @@ module Registry.App.API , parseInstalledModulePath , publish , removeIgnoredTarballFiles + , spagoToManifest ) where import Registry.App.Prelude @@ -104,8 +105,7 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except import Spago.Core.Config as Spago.Config -import Spago.Core.Prelude as Spago.Prelude -import Spago.Log as Spago.Log +import Spago.FS as Spago.FS type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) @@ -429,16 +429,14 @@ publish payload = do else if hasSpagoYaml then do Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." - -- Need to make a Spago log env first, disable the logging - let spagoEnv = { logOptions: { color: false, verbosity: Spago.Log.LogQuiet } } - Spago.Prelude.runSpago spagoEnv (Spago.Config.readConfig packageSpagoYaml) >>= case _ of - Left readErr -> Except.throw $ String.joinWith "\n" + Run.liftAff (Spago.FS.readYamlDocFile Spago.Config.configCodec packageSpagoYaml) >>= case _ of + Left readError -> Except.throw $ String.joinWith "\n" [ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:" - , readErr + , readError ] Right { yaml: config } -> do -- Once we have the config we are still not entirely sure it fits into a Manifest - -- E.g. need to make sure all the ranges are present + -- e.g. need to make sure all the ranges are present case spagoToManifest config of Left err -> Except.throw $ String.joinWith "\n" [ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:" diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 2a30a8f87..0c489d009 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -241,8 +241,8 @@ request octokit githubRequest@{ route: route@(GitHubRoute method _ _), codec } = -- auto-expire cache entries. We will be behind GitHub at most this amount per repo. -- -- TODO: This 'diff' check should be removed once we have conditional requests. - Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 4.0 -> do - Log.debug $ "Found cache entry but it was modified more than 4 hours ago, refetching " <> printedRoute + Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 24.0 -> do + Log.debug $ "Found cache entry but it was modified more than 24 hours ago, refetching " <> printedRoute result <- requestWithBackoff octokit githubRequest Cache.put _githubCache (Request route) (result <#> \resp -> { response: CA.encode codec resp, modified: now, etag: Nothing }) pure result diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 27bdf09a9..2d29ca851 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -54,6 +54,7 @@ import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry as Registry @@ -82,6 +83,8 @@ import Run as Run import Run.Except (EXCEPT, Except) import Run.Except as Except import Run.Except as Run.Except +import Spago.Core.Config as Spago.Config +import Spago.Yaml as Yaml import Type.Proxy (Proxy(..)) data ImportMode = DryRun | GenerateRegistry | UpdateRegistry @@ -469,27 +472,32 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa -- one we compare it to the existing entry, failing if there is a -- difference; if we can't, we warn and fall back to the existing entry. Registry.readManifest package.name (LenientVersion.version version) >>= case _ of - Nothing -> do - Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of - Nothing -> do - Log.debug $ "Building manifest in legacy import because it was not found in cache: " <> formatPackageVersion package.name (LenientVersion.version version) - manifest <- Run.Except.runExceptAt _exceptVersion do - exceptVersion $ validateVersionDisabled package.name version - legacyManifest <- do - Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of - Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } - Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest - case manifest of - Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err - Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest - exceptVersion manifest - Just cached -> - exceptVersion cached - - Just manifest -> - exceptVersion $ Right manifest + Just manifest -> pure manifest + Nothing -> Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + Just cached -> exceptVersion cached + Nothing -> do + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest + Nothing -> do + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + legacyManifest <- do + Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of + Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } + Right result -> pure result + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag @@ -907,6 +915,25 @@ legacyRepoParser = do pure { owner, repo } +fetchSpagoYaml :: forall r. Address -> RawVersion -> Run (GITHUB + LOG + EXCEPT String + r) (Maybe Manifest) +fetchSpagoYaml address ref = do + eitherSpagoYaml <- GitHub.getContent address ref "spago.yaml" + case eitherSpagoYaml of + Left err -> do + Log.debug $ "No spago.yaml found: " <> Octokit.printGitHubError err + pure Nothing + Right file -> do + Log.debug $ "Found spago.yaml file\n" <> file + case Yaml.parseYamlDoc Spago.Config.configCodec file of + Left error -> Except.throw $ "Failed to parse spago.yaml file:\n" <> file <> "\nwith errors:\n" <> CA.printJsonDecodeError error + Right { yaml: parsed } -> case API.spagoToManifest parsed of + Left err -> do + Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> file <> "\nwith errors:\n" <> err + pure Nothing + Right manifest -> do + Log.debug "Successfully converted a spago.yaml into a purs.json manifest" + pure $ Just manifest + -- | A key type for the storage cache. Only supports packages identified by -- | their name and version. data ImportCache :: (Type -> Type -> Type) -> Type -> Type From 559275c05c08e62b0f895974dbf8d901af3ac360 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 15 Nov 2023 17:16:37 -0500 Subject: [PATCH 11/64] Retain compilation in cache --- app/src/App/API.purs | 151 ++++++++++++++++++++++++++------ app/src/App/CLI/Git.purs | 8 +- app/src/App/GitHubIssue.purs | 1 + app/src/App/Server.purs | 4 +- app/test/App/API.purs | 9 +- app/test/Test/Assert/Run.purs | 21 +++++ scripts/src/LegacyImporter.purs | 44 +++++++--- scripts/src/PackageDeleter.purs | 2 + scripts/src/Solver.purs | 2 + spago.lock | 1 - 10 files changed, 195 insertions(+), 48 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 7dd7cb5f3..56f669a1d 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1,13 +1,18 @@ module Registry.App.API ( AuthenticatedEffects + , COMPILER_CACHE + , CompilerCache + , GroupedByCompilers , PackageSetUpdateEffects , PublishEffects + , _compilerCache , authenticated , compatibleCompilers , copyPackageSourceFiles , findAllCompilers , findFirstCompiler , formatPursuitResolutions + , groupedByCompilersCodec , installBuildPlan , packageSetUpdate , packagingTeam @@ -23,10 +28,13 @@ import Data.Argonaut.Parser as Argonaut.Parser import Data.Array as Array import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.Argonaut as CA +import Data.Codec.Argonaut.Common as CA.Common import Data.Codec.Argonaut.Record as CA.Record import Data.DateTime (DateTime) +import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.Function (on) import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format @@ -40,6 +48,7 @@ import Data.String.NonEmpty.Internal (toString) as NonEmptyString import Data.String.Regex as Regex import Effect.Aff as Aff import Effect.Ref as Ref +import Effect.Unsafe (unsafePerformEffect) import Node.FS.Aff as FS.Aff import Node.FS.Stats as FS.Stats import Node.FS.Sync as FS.Sync @@ -50,10 +59,12 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Registry.App.Auth as Auth -import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Cache (class FsEncodable, Cache) +import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) @@ -82,6 +93,7 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Octokit (IssueNumber(..), Team) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp +import Registry.Internal.Codec (versionMap) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location @@ -321,7 +333,7 @@ authenticated auth = case auth.payload of Registry.mirrorLegacyRegistry payload.name payload.newLocation Comment.comment "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -386,7 +398,7 @@ publish payload = do ] Just files -> -- The 'validatePursModules' function uses language-cst-parser, which only - -- supports syntax back to 0.14.0. We'll still try to validate the package + -- supports syntax back to 0.15.0. We'll still try to validate the package -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of Left formattedError | payload.compiler < unsafeFromRight (Version.parse "0.15.0") -> do @@ -787,14 +799,21 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif allMetadata <- Registry.readAllMetadata compatible <- case compatibleCompilers allMetadata verifiedResolutions of - Nothing | Map.isEmpty verifiedResolutions -> do - Log.debug "No dependencies, so all compilers are potentially compatible." + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." allCompilers <- PursVersions.pursVersions pure $ NonEmptySet.fromFoldable1 allCompilers - Nothing -> do - let msg = "Dependencies admit no overlapping compiler versions! This should not be possible. Resolutions: " <> printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions - Log.error msg *> Except.throw msg - Just result -> pure result + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Except.throw $ Array.fold + [ "Dependencies admit no overlapping compiler versions, so your package cannot be compiled:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + Right result -> pure result Comment.comment $ Array.fold [ "The following compilers are compatible with this package according to its dependency resolutions: " @@ -802,11 +821,23 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif , ". Computing the list of compilers usable with your package version..." ] - { failed: invalidCompilers, succeeded: validCompilers } <- findAllCompilers - { source: packageDirectory - , installed: installedResolutions - , compilers: Array.fromFoldable $ NonEmptySet.filter (notEq payload.compiler) compatible - } + let tryCompilers = Array.fromFoldable $ NonEmptySet.filter (notEq payload.compiler) compatible + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromArray tryCompilers of + Nothing -> pure { failed: Map.empty, succeeded: Set.empty } + Just try -> Cache.get _compilerCache (Compilation (Manifest manifest) verifiedResolutions try) >>= case _ of + Nothing -> do + intermediate <- findAllCompilers + { source: packageDirectory + , installed: installedResolutions + , compilers: tryCompilers + } + -- We need to insert the payload compiler, which we previously omitted + -- from the list of compilers to try for efficiency's sake. + let result = intermediate { succeeded = Set.insert payload.compiler intermediate.succeeded } + Cache.put _compilerCache (Compilation (Manifest manifest) verifiedResolutions try) result + pure result + Just cached -> + pure cached unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) @@ -814,7 +845,7 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif let allVerified = case NonEmptySet.fromFoldable validCompilers of Nothing -> NonEmptyArray.singleton payload.compiler - Just verified -> NonEmptyArray.fromFoldable1 $ NonEmptySet.insert payload.compiler verified + Just verified -> NonEmptyArray.fromFoldable1 verified Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptyArray.toArray allVerified)) let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right allVerified })) manifest.version newMetadata.published } @@ -822,6 +853,8 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) Comment.comment "Wrote completed metadata to the registry!" + FS.Extra.remove tmp + FS.Extra.remove packageDirectory -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -925,25 +958,56 @@ compilePackage { source, compiler, resolutions } = Except.runExcept do Left err -> Except.throw $ printCompilerFailure compiler err Right _ -> pure tmp +type GroupedByCompilers = + { packages :: Map PackageName Version + , compilers :: NonEmptySet Version + } + +groupedByCompilersCodec :: JsonCodec GroupedByCompilers +groupedByCompilersCodec = CA.Record.object "GroupedByCompilers" + { compilers: CA.Common.nonEmptySet Version.codec + , packages: Internal.Codec.packageMap Version.codec + } + -- | Given a set of package versions, determine the set of compilers that can be -- | used for all packages. -compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Maybe (NonEmptySet Version) +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) compatibleCompilers allMetadata resolutions = do let - associated :: Array (NonEmptyArray Version) + associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do Metadata metadata <- Map.lookup name allMetadata published <- Map.lookup version metadata.published case published.compilers of Left _ -> Nothing - Right all -> Just all + Right compilers -> Just { name, version, compilers: compilers } - Array.uncons associated >>= case _ of - { head, tail: [] } -> - pure $ NonEmptySet.fromFoldable1 head - { head, tail } -> do - let foldFn prev = Set.intersection prev <<< Set.fromFoldable - NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head) tail + case Array.uncons associated of + Nothing -> + Left [] + Just { head, tail: [] } -> + Right $ NonEmptySet.fromFoldable1 head.compilers + Just { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers + case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of + -- An empty intersection means there are no shared compilers among the + -- resolved dependencies. + Nothing -> do + let + grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) + grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) + + collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers + collect vals = + { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) + -- We've already grouped by compilers, so those must all be equal + -- and we can take just the first value. + , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers + } + Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped + + Just set -> + Right set type DiscoverCompilers = { compilers :: Array Version @@ -951,9 +1015,14 @@ type DiscoverCompilers = , installed :: FilePath } +type FindAllCompilersResult = + { failed :: Map Version CompilerFailure + , succeeded :: Set Version + } + -- | Find all compilers that can compile the package source code and installed -- | resolutions from the given array of compilers. -findAllCompilers :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) { failed :: Map Version CompilerFailure, succeeded :: Set Version } +findAllCompilers :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) FindAllCompilersResult findAllCompilers { source, compilers, installed } = do checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target @@ -1121,7 +1190,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep Left error -> Except.throw $ "Could not publish your package to Pursuit because an error was encountered (cc: @purescript/packaging): " <> error Right _ -> - pure unit + FS.Extra.remove tmp type PursuitResolutions = Map RawPackageName { version :: Version, path :: FilePath } @@ -1273,3 +1342,33 @@ spagoToManifest config = do , includeFiles , excludeFiles } + +type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) + +_compilerCache :: Proxy "compilerCache" +_compilerCache = Proxy + +data CompilerCache :: (Type -> Type -> Type) -> Type -> Type +data CompilerCache c a = Compilation Manifest (Map PackageName Version) (NonEmptyArray Version) (c FindAllCompilersResult a) + +instance Functor2 c => Functor (CompilerCache c) where + map k (Compilation manifest resolutions compilers a) = Compilation manifest resolutions compilers (map2 k a) + +instance FsEncodable CompilerCache where + encodeFs = case _ of + Compilation (Manifest manifest) resolutions compilers next -> do + let + baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" + hashKey = do + let resolutions' = foldlWithIndex (\name prev version -> formatPackageVersion name version <> prev) "" resolutions + let compilers' = NonEmptyArray.foldMap1 Version.print compilers + unsafePerformEffect $ Sha256.hashString $ resolutions' <> compilers' + cacheKey = baseKey <> Sha256.print hashKey + + let + codec = CA.Record.object "FindAllCompilersResult" + { failed: versionMap compilerFailureCodec + , succeeded: CA.Common.set Version.codec + } + + Exists.mkExists $ Cache.AsJson cacheKey codec next diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ce046282d..89f6ea49f 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,10 +111,10 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do ] pure true Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 63dc1bcb6..c4b678436 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -101,6 +101,7 @@ main = launchAff_ $ do # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) -- Caching & logging # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) # Log.interpret (Log.handleTerminal Verbose) diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index 783e4d1dc..83ffcbfc1 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -19,6 +19,7 @@ import Node.Process as Process import Record as Record import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) import Registry.API.V1 as V1 +import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) @@ -216,7 +217,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response runServer env router' request = do @@ -295,6 +296,7 @@ runEffects env operation = Aff.attempt do # Source.interpret (Source.handle Source.Recent) # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) # Except.catch ( \msg -> do finishedAt <- nowUTC diff --git a/app/test/App/API.purs b/app/test/App/API.purs index b930a1600..49c41cba8 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -3,6 +3,7 @@ module Test.Registry.App.API (spec) where import Registry.App.Prelude import Data.Array.NonEmpty as NonEmptyArray +import Data.Codec.Argonaut as CA import Data.Foldable (traverse_) import Data.Map as Map import Data.Set as Set @@ -75,8 +76,8 @@ spec = do metadata <- Registry.readAllMetadataFromDisk $ Path.concat [ "app", "fixtures", "registry", "metadata" ] let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.12" ] case API.compatibleCompilers metadata (Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "6.0.1")) of - Nothing -> Except.throw $ "Got no compatible compilers, but expected " <> Utils.unsafeStringify (map Version.print expected) - Just set -> do + Left failed -> Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> printJson (CA.array API.groupedByCompilersCodec) failed + Right set -> do let actual = NonEmptySet.toUnfoldable set unless (actual == expected) do Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) @@ -91,8 +92,8 @@ spec = do , Tuple "type-equality" "4.0.1" ] case API.compatibleCompilers metadata resolutions of - Nothing -> Except.throw $ "Got no compatible compilers, but expected " <> Utils.unsafeStringify (map Version.print expected) - Just set -> do + Left failed -> Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> printJson (CA.array API.groupedByCompilersCodec) failed + Right set -> do let actual = NonEmptySet.toUnfoldable set unless (actual == expected) do Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 0daf28264..4361670d1 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -11,6 +11,7 @@ module Registry.Test.Assert.Run import Registry.App.Prelude import Data.Array as Array +import Data.Exists as Exists import Data.Foldable (class Foldable) import Data.Foldable as Foldable import Data.FunctorWithIndex (mapWithIndex) @@ -24,6 +25,8 @@ import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path import Registry.API.V1 (LogLevel) +import Registry.App.API (COMPILER_CACHE) +import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache @@ -85,6 +88,7 @@ type TEST_EFFECTS = + RESOURCE_ENV + GITHUB_CACHE + LEGACY_CACHE + + COMPILER_CACHE + COMMENT + LOG + EXCEPT String @@ -120,6 +124,7 @@ runTestEffects env operation = do # Env.runPacchettiBottiEnv { publicKey: "Unimplemented", privateKey: "Unimplemented" } # Env.runResourceEnv resourceEnv -- Caches + # runCompilerCacheMock # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects @@ -143,6 +148,22 @@ runLegacyCacheMemory = Cache.interpret Legacy.Manifest._legacyCache <<< Cache.ha runGitHubCacheMemory :: forall r a. CacheRef -> Run (GITHUB_CACHE + LOG + EFFECT + r) a -> Run (LOG + EFFECT + r) a runGitHubCacheMemory = Cache.interpret GitHub._githubCache <<< Cache.handleMemory +runCompilerCacheMock :: forall r a. Run (COMPILER_CACHE + LOG + r) a -> Run (LOG + r) a +runCompilerCacheMock = Cache.interpret API._compilerCache case _ of + Cache.Get key -> Exists.runExists getImpl (Cache.encodeFs key) + Cache.Put _ next -> pure next + Cache.Delete key -> Exists.runExists deleteImpl (Cache.encodeFs key) + where + getImpl :: forall x z. Cache.FsEncoding Cache.Reply x z -> Run _ x + getImpl = case _ of + Cache.AsBuffer _ (Cache.Reply reply) -> pure $ reply Nothing + Cache.AsJson _ _ (Cache.Reply reply) -> pure $ reply Nothing + + deleteImpl :: forall x z. Cache.FsEncoding Cache.Ignore x z -> Run _ x + deleteImpl = case _ of + Cache.AsBuffer _ (Cache.Ignore next) -> pure next + Cache.AsJson _ _ (Cache.Ignore next) -> pure next + handlePursuitMock :: forall r a. Ref (Map PackageName Metadata) -> Pursuit a -> Run (EFFECT + r) a handlePursuitMock metadataRef = case _ of Publish _json reply -> diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 2d29ca851..ce5741624 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -44,6 +44,7 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic +import Registry.App.API (GroupedByCompilers, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) @@ -177,6 +178,7 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit 1)) # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) @@ -267,13 +269,22 @@ runLegacyImport logs = do Log.debug "Determining a compiler version suitable for publishing..." allMetadata <- Registry.readAllMetadata possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of - Nothing | Map.isEmpty resolutions -> do - Log.debug "No resolutions, so all compilers could be compatible." + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." allCompilers <- PursVersions.pursVersions pure $ NonEmptySet.fromFoldable1 allCompilers - Nothing -> - Except.throw "No overlapping compilers found in dependencies; this should not happen!" - Just compilers -> do + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) + Except.throw $ Array.fold + [ "Dependencies admit no overlapping compiler versions so your package cannot be compiled:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + Right compilers -> do Log.debug $ "Compatible compilers for dependencies of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) pure compilers Log.debug "Fetching source and installing dependencies to test compilers" @@ -328,7 +339,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 500 manifests) publishLegacyPackage + void $ for (Array.take 1000 manifests) publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let @@ -505,7 +516,11 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa pure $ Map.fromFoldable manifests -data PublishError = SolveFailed String | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) | PublishError String +data PublishError + = SolveFailed String + | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) + | UnsolvableDependencyCompilers (Array GroupedByCompilers) + | PublishError String derive instance Eq PublishError @@ -513,17 +528,20 @@ publishErrorCodec :: JsonCodec PublishError publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch { solveFailed: Right CA.string , noCompilersFound: Right compilerFailureMapCodec + , unsolvableDependencyCompilers: Right (CA.array API.groupedByCompilersCodec) , publishError: Right CA.string } where toVariant = case _ of SolveFailed error -> Variant.inj (Proxy :: _ "solveFailed") error NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed + UnsolvableDependencyCompilers group -> Variant.inj (Proxy :: _ "unsolvableDependencyCompilers") group PublishError error -> Variant.inj (Proxy :: _ "publishError") error fromVariant = Variant.match { solveFailed: SolveFailed , noCompilersFound: NoCompilersFound + , unsolvableDependencyCompilers: UnsolvableDependencyCompilers , publishError: PublishError } @@ -780,6 +798,8 @@ formatPublishError = case _ of { tag: "SolveFailed", value: Nothing, reason: error } NoCompilersFound versions -> { tag: "NoCompilersFound", value: Just (CA.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } + UnsolvableDependencyCompilers failed -> + { tag: "UnsolvableDependencyCompilers", value: Just (CA.encode (CA.array API.groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } PublishError error -> { tag: "PublishError", value: Nothing, reason: error } @@ -934,6 +954,11 @@ fetchSpagoYaml address ref = do Log.debug "Successfully converted a spago.yaml into a purs.json manifest" pure $ Just manifest +type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) + +_importCache :: Proxy "importCache" +_importCache = Proxy + -- | A key type for the storage cache. Only supports packages identified by -- | their name and version. data ImportCache :: (Type -> Type -> Type) -> Type -> Type @@ -960,8 +985,3 @@ instance FsEncodable ImportCache where PublishFailure name version next -> do let codec = publishErrorCodec Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next - -type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) - -_importCache :: Proxy "importCache" -_importCache = Proxy diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 04b0b9954..67c8f0d6f 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -16,6 +16,7 @@ import Effect.Class.Console (log) import Effect.Class.Console as Console import Node.Path as Path import Node.Process as Process +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -156,6 +157,7 @@ main = launchAff_ do >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) + >>> Cache.interpret _compilerCache (Cache.handleFs cache) >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index a0ac67398..49e864176 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -28,6 +28,7 @@ import Node.Path as Path import Node.Process as Node.Process import Node.Process as Process import Parsing as Parsing +import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache @@ -148,6 +149,7 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit 1)) # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv diff --git a/spago.lock b/spago.lock index 2546a1f1c..4a96f68e7 100644 --- a/spago.lock +++ b/spago.lock @@ -153,7 +153,6 @@ workspace: - transformers - tuples test_dependencies: - - debug - exceptions - node-child-process - node-execa From 09d515ac8382f8dba54714f3d12bbc3587064f4f Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 15 Nov 2023 19:05:48 -0500 Subject: [PATCH 12/64] Consider compilers when solving --- app/src/App/API.purs | 141 +++++++++++++++--------------- app/src/App/CLI/Git.purs | 4 +- lib/src/Operation/Validation.purs | 13 ++- lib/src/Range.purs | 10 ++- lib/src/Solver.purs | 53 +++++++++++ scripts/src/LegacyImporter.purs | 98 +++++++++++++++------ 6 files changed, 209 insertions(+), 110 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 56f669a1d..d346db4b6 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -10,7 +10,6 @@ module Registry.App.API , compatibleCompilers , copyPackageSourceFiles , findAllCompilers - , findFirstCompiler , formatPursuitResolutions , groupedByCompilersCodec , installBuildPlan @@ -18,6 +17,7 @@ module Registry.App.API , packagingTeam , parseInstalledModulePath , publish + , readCompilerIndex , removeIgnoredTarballFiles , spagoToManifest ) where @@ -93,7 +93,6 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Octokit (IssueNumber(..), Team) import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tmp as Tmp -import Registry.Internal.Codec (versionMap) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location @@ -110,6 +109,7 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 +import Registry.Solver (SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) @@ -536,7 +536,7 @@ publish payload = do [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions + verifiedResolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions compilationResult <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } case compilationResult of Left error -> do @@ -598,7 +598,7 @@ publish payload = do Log.debug "Pruning unused dependencies from legacy package manifest..." Log.debug "Solving manifest to get all transitive dependencies." - resolutions <- verifyResolutions (Manifest manifest) payload.resolutions + resolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions Log.debug "Installing dependencies." tmpDepsDir <- Tmp.mkTmpDir @@ -699,7 +699,7 @@ type PublishRegistry = publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do Log.debug "Verifying the package build plan..." - verifiedResolutions <- verifyResolutions (Manifest manifest) payload.resolutions + verifiedResolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions Log.debug "Verifying that the package dependencies are all registered..." unregisteredRef <- Run.liftEffect $ Ref.new Map.empty @@ -824,20 +824,15 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif let tryCompilers = Array.fromFoldable $ NonEmptySet.filter (notEq payload.compiler) compatible { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromArray tryCompilers of Nothing -> pure { failed: Map.empty, succeeded: Set.empty } - Just try -> Cache.get _compilerCache (Compilation (Manifest manifest) verifiedResolutions try) >>= case _ of - Nothing -> do - intermediate <- findAllCompilers - { source: packageDirectory - , installed: installedResolutions - , compilers: tryCompilers - } - -- We need to insert the payload compiler, which we previously omitted - -- from the list of compilers to try for efficiency's sake. - let result = intermediate { succeeded = Set.insert payload.compiler intermediate.succeeded } - Cache.put _compilerCache (Compilation (Manifest manifest) verifiedResolutions try) result - pure result - Just cached -> - pure cached + Just try -> do + intermediate <- findAllCompilers + { source: packageDirectory + , manifest: Manifest manifest + , compilers: try + } + -- We need to insert the payload compiler, which we previously omitted + -- from the list of compilers to try for efficiency's sake. + pure $ intermediate { succeeded = Set.insert payload.compiler intermediate.succeeded } unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) @@ -859,12 +854,12 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the -- | manifest. If not, we solve their manifest to produce a build plan. -verifyResolutions :: forall r. Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + EXCEPT String + r) (Map PackageName Version) -verifyResolutions manifest resolutions = do +verifyResolutions :: forall r. Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) +verifyResolutions compiler manifest resolutions = do Log.debug "Check the submitted build plan matches the manifest" - manifestIndex <- Registry.readAllManifests + compilerIndex <- readCompilerIndex case resolutions of - Nothing -> case Operation.Validation.validateDependenciesSolve manifest manifestIndex of + Nothing -> case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of Left errors -> do let printedError = String.joinWith "\n" @@ -1009,54 +1004,54 @@ compatibleCompilers allMetadata resolutions = do Just set -> Right set -type DiscoverCompilers = - { compilers :: Array Version - , source :: FilePath - , installed :: FilePath - } - type FindAllCompilersResult = - { failed :: Map Version CompilerFailure + { failed :: Map Version (Either SolverErrors CompilerFailure) , succeeded :: Set Version } -- | Find all compilers that can compile the package source code and installed -- | resolutions from the given array of compilers. -findAllCompilers :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) FindAllCompilersResult -findAllCompilers { source, compilers, installed } = do +findAllCompilers + :: forall r + . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } + -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult +findAllCompilers { source, manifest, compilers } = do + compilerIndex <- readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target - workdir <- Tmp.mkTmpDir - result <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } - , version: Just target - , cwd: Just workdir - } - FS.Extra.remove workdir - pure $ bimap (Tuple target) (const target) result - let results = partitionEithers checkedCompilers - pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } + case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of + Left solverErrors -> pure $ Left $ Tuple target (Left solverErrors) + Right (Tuple mbCompiler resolutions) -> do + Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions + case mbCompiler of + Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" + Just selected | selected /= target -> Except.throw $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print selected + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + Just _ -> pure unit + Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of + Nothing -> do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan resolutions installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } + pure $ bimap (Tuple target <<< Right) (const target) result + Just { result } -> + pure $ bimap (Tuple target <<< Right) (const target) result --- | Find the first compiler that can compile the package source code and --- | installed resolutions from the given array of compilers. Begins with the --- | latest compiler and works backwards to older compilers. -findFirstCompiler :: forall r. DiscoverCompilers -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) -findFirstCompiler { source, compilers, installed } = do - search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do - Log.debug $ "Trying compiler " <> Version.print target - workdir <- Tmp.mkTmpDir - result <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } - , version: Just target - , cwd: Just workdir - } - FS.Extra.remove workdir - case result of - Left error -> pure $ Tuple target error - Right _ -> Except.throw target - case search of - Left worked -> pure $ Right worked - Right others -> pure $ Left $ Map.fromFoldable others + let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers + pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } printCompilerFailure :: Version -> CompilerFailure -> String printCompilerFailure compiler = case _ of @@ -1343,32 +1338,38 @@ spagoToManifest config = do , excludeFiles } +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) _compilerCache :: Proxy "compilerCache" _compilerCache = Proxy data CompilerCache :: (Type -> Type -> Type) -> Type -> Type -data CompilerCache c a = Compilation Manifest (Map PackageName Version) (NonEmptyArray Version) (c FindAllCompilersResult a) +data CompilerCache c a = Compilation Manifest (Map PackageName Version) Version (c { target :: Version, result :: Either CompilerFailure Unit } a) instance Functor2 c => Functor (CompilerCache c) where - map k (Compilation manifest resolutions compilers a) = Compilation manifest resolutions compilers (map2 k a) + map k (Compilation manifest resolutions compiler a) = Compilation manifest resolutions compiler (map2 k a) instance FsEncodable CompilerCache where encodeFs = case _ of - Compilation (Manifest manifest) resolutions compilers next -> do + Compilation (Manifest manifest) resolutions compiler next -> do let - baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" + baseKey = "Compilation__" <> PackageName.print manifest.name <> "__" <> Version.print manifest.version <> "__" <> Version.print compiler <> "__" hashKey = do let resolutions' = foldlWithIndex (\name prev version -> formatPackageVersion name version <> prev) "" resolutions - let compilers' = NonEmptyArray.foldMap1 Version.print compilers - unsafePerformEffect $ Sha256.hashString $ resolutions' <> compilers' + unsafePerformEffect $ Sha256.hashString resolutions' cacheKey = baseKey <> Sha256.print hashKey let codec = CA.Record.object "FindAllCompilersResult" - { failed: versionMap compilerFailureCodec - , succeeded: CA.Common.set Version.codec + { target: Version.codec + , result: CA.Common.either compilerFailureCodec CA.null } Exists.mkExists $ Cache.AsJson cacheKey codec next diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index 89f6ea49f..ce4e05c67 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -110,10 +110,10 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just files -> do + Just _files -> do -- Log.debug $ Array.fold -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- , NonEmptyArray.foldMap1 (append "\n - ") _files -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index 0dc31e283..7e1fad8a2 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -8,7 +8,6 @@ import Data.Array.NonEmpty as NEA import Data.DateTime (DateTime) import Data.DateTime as DateTime import Data.Either (Either(..)) -import Data.List.NonEmpty (NonEmptyList) import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..), maybe) @@ -20,7 +19,7 @@ import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.Time.Duration (Hours(..)) import Data.Traversable (traverse) -import Data.Tuple (Tuple(..), uncurry) +import Data.Tuple (Tuple(..), snd, uncurry) import Data.Tuple.Nested (type (/\), (/\)) import Effect.Aff as Aff import Effect.Aff.Class (class MonadAff, liftAff) @@ -32,14 +31,13 @@ import PureScript.CST.Errors as CST.Errors import PureScript.CST.Types as CST.Types import Registry.Location (Location) import Registry.Manifest (Manifest(..)) -import Registry.ManifestIndex (ManifestIndex) -import Registry.ManifestIndex as ManifestIndex import Registry.Metadata (Metadata(..), PublishedMetadata, UnpublishedMetadata) import Registry.Operation (PublishData) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) import Registry.Range as Range +import Registry.Solver (CompilerIndex) import Registry.Solver as Solver import Registry.Version (Version) @@ -72,10 +70,9 @@ isNotUnpublished (Manifest { version }) (Metadata { unpublished }) = Map.lookup version unpublished -- | Verifies that the manifest dependencies are solvable by the registry solver. -validateDependenciesSolve :: Manifest -> ManifestIndex -> Either (NonEmptyList Solver.SolverError) (Map PackageName Version) -validateDependenciesSolve manifest manifestIndex = do - let getDependencies = _.dependencies <<< un Manifest - Solver.solve (map (map getDependencies) (ManifestIndex.toMap manifestIndex)) (getDependencies manifest) +validateDependenciesSolve :: Version -> Manifest -> CompilerIndex -> Either Solver.SolverErrors (Map PackageName Version) +validateDependenciesSolve compiler (Manifest manifest) compilerIndex = + map snd $ Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies -- | Verifies that all dependencies in the manifest are present in the build -- | plan, and the version listed in the build plan is within the range provided diff --git a/lib/src/Range.purs b/lib/src/Range.purs index 11e50b74a..ac5d38298 100644 --- a/lib/src/Range.purs +++ b/lib/src/Range.purs @@ -5,15 +5,16 @@ module Registry.Range ( Range , caret , codec + , exact , greaterThanOrEq , includes , intersect , lessThan + , mk , parse , parser , print , union - , mk ) where import Prelude @@ -138,6 +139,11 @@ mk lhs rhs | lhs < rhs = Just (Range { lhs, rhs }) mk _ _ = Nothing -- | Produce a "caret range" from a version. --- | I.e. "^0.15.6" ==> ">=0.15.6 > 0.16.0" +-- | i.e. "^0.15.6" ==> ">=0.15.6 > 0.16.0" caret :: Version -> Range caret v = Range { lhs: v, rhs: Version.bumpHighest v } + +-- | Produce an exact range from a version. +-- | i.e. "0.15.6" ==> ">=0.15.6 <0.15.7" +exact :: Version -> Range +exact v = Range { lhs: v, rhs: Version.bumpPatch v } diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index fcb6f6edb..fad71e937 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -5,9 +5,12 @@ import Prelude import Control.Alternative (guard) import Data.Array as Array +import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Array.NonEmpty as NonEmptyArray import Data.Bifunctor (lmap) import Data.Either (Either(..)) +import Data.Either as Either import Data.Foldable (fold, foldMap, intercalate) import Data.FoldableWithIndex (anyWithIndex, foldMapWithIndex, foldlWithIndex, forWithIndex_) import Data.Functor.App (App(..)) @@ -20,6 +23,7 @@ import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) import Data.Semigroup.Foldable (intercalateMap) +import Data.Semigroup.Foldable as Foldable1 import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -27,6 +31,11 @@ import Data.Set.NonEmpty as NES import Data.Traversable (for, sequence, traverse) import Data.TraversableWithIndex (forWithIndex, traverseWithIndex) import Data.Tuple (Tuple(..), fst, snd) +import Partial.Unsafe as Partial +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex (ManifestIndex) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata(..)) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -39,6 +48,50 @@ import Safe.Coerce (coerce) -- Public API -------------------------------------------------------------------------------- +-- | A 'DependencyIndex' enriched to include the compiler versions supported by +-- | each package version as a dependency. +newtype CompilerIndex = CompilerIndex DependencyIndex + +derive instance Newtype CompilerIndex _ + +-- | Associate the compiler versions supported by each package version by +-- | inserting them as a range in the version's dependencies. +buildCompilerIndex :: NonEmptyArray Version -> ManifestIndex -> Map PackageName Metadata -> CompilerIndex +buildCompilerIndex pursCompilers index metadata = CompilerIndex do + let + purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + + getDependencies (Manifest manifest) = fromMaybe manifest.dependencies do + Metadata { published } <- Map.lookup manifest.name metadata + { compilers: eitherCompilers } <- Map.lookup manifest.version published + -- If the dependency hasn't yet had all compilers computed for it, + -- then we don't add it to the dependencies to avoid over- + -- constraining the solver. + compilers <- Either.hush eitherCompilers + -- Otherwise, we construct a maximal range for the compilers the + -- indicated package version supports. + let + min = Foldable1.minimum compilers + max = Version.bumpPatch $ Foldable1.maximum compilers + pursRange <- Range.mk min max + pure $ Map.insert purs pursRange manifest.dependencies + + newPurs version = Map.singleton purs (Map.singleton version Map.empty) + pursVersions = Array.foldl (\acc compiler -> Map.unionWith Map.union (newPurs compiler) acc) Map.empty (NonEmptyArray.toArray pursCompilers) + dependencyIndex = map (map getDependencies) (ManifestIndex.toMap index) + + Map.unionWith Map.union pursVersions dependencyIndex + +-- | Solve the given dependencies using a dependency index that includes compiler +-- | versions, such that the solution prunes results that would fall outside +-- | a compiler range accepted by all dependencies. +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler pursRange (CompilerIndex index) required = do + let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") + results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } + let pursVersion = Map.lookup purs results + pure $ Tuple pursVersion $ Map.delete purs results + -- | Data from the registry index, listing dependencies for each version of -- | each package type DependencyIndex = Map PackageName (Map Version (Map PackageName Range)) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index ce5741624..b5d007271 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -48,6 +48,7 @@ import Registry.App.API (GroupedByCompilers, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) +import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache @@ -60,6 +61,7 @@ import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.LenientVersion (LenientVersion) import Registry.App.Legacy.LenientVersion as LenientVersion @@ -79,7 +81,7 @@ import Registry.PackageName as PackageName import Registry.Range as Range import Registry.Solver as Solver import Registry.Version as Version -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT, Except) import Run.Except as Except @@ -247,6 +249,13 @@ runLegacyImport logs = do Nothing -> pure $ not $ hasMetadata allMetadata name version Just _ -> pure false + allCompilers <- PursVersions.pursVersions + allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (NonEmptyArray.last allCompilers) of + Nothing -> Except.throw $ "Failed to construct a compiler range from " <> Version.print (NonEmptyArray.head allCompilers) <> " and " <> Version.print (NonEmptyArray.last allCompilers) + Just range -> do + Log.info $ "All available compilers range: " <> Range.print range + pure range + let publishLegacyPackage :: Manifest -> Run _ Unit publishLegacyPackage (Manifest manifest) = do @@ -255,38 +264,44 @@ runLegacyImport logs = do RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of Nothing -> Except.throw $ "Unable to recover package ref for " <> formatted Just ref -> pure ref + + Log.debug "Building dependency index with compiler versions..." + compilerIndex <- API.readCompilerIndex + Log.debug $ "Solving dependencies for " <> formatted - index <- Registry.readAllManifests - Log.debug $ "Read all manifests: " <> String.joinWith ", " (map (\(Manifest m) -> formatPackageVersion m.name m.version) $ ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges index) - let solverIndex = map (map (_.dependencies <<< un Manifest)) $ ManifestIndex.toMap index - case Solver.solve solverIndex manifest.dependencies of + case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of Left unsolvable -> do let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors Cache.put _importCache (PublishFailure manifest.name manifest.version) (SolveFailed $ String.joinWith " " errors) - Right resolutions -> do + Right (Tuple mbCompiler resolutions) -> do Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - Log.debug "Determining a compiler version suitable for publishing..." - allMetadata <- Registry.readAllMetadata - possibleCompilers <- case API.compatibleCompilers allMetadata resolutions of - Left [] -> do - Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." - allCompilers <- PursVersions.pursVersions - pure $ NonEmptySet.fromFoldable1 allCompilers - Left errors -> do - let - printError { packages, compilers } = do - let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages - let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers - key <> " support compilers " <> val - Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) - Except.throw $ Array.fold - [ "Dependencies admit no overlapping compiler versions so your package cannot be compiled:\n" - , Array.foldMap (append "\n - " <<< printError) errors - ] - Right compilers -> do - Log.debug $ "Compatible compilers for dependencies of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) - pure compilers + possibleCompilers <- case mbCompiler of + Just one -> do + Log.info $ "Solver produced a compiler version suitable for publishing: " <> Version.print one + pure $ NonEmptySet.singleton one + Nothing -> do + Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." + allMetadata <- Registry.readAllMetadata + case API.compatibleCompilers allMetadata resolutions of + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) + Except.throw $ Array.fold + [ "Resolutions admit no overlapping compiler versions so your package cannot be compiled:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + Right compilers -> do + Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + Log.debug "Fetching source and installing dependencies to test compilers" tmp <- Tmp.mkTmpDir { path } <- Source.fetch tmp manifest.location ref @@ -297,7 +312,7 @@ runLegacyImport logs = do API.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Finding first compiler that can build the package..." - selected <- API.findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } + selected <- findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } FS.Extra.remove tmp case selected of Left failures -> do @@ -954,6 +969,33 @@ fetchSpagoYaml address ref = do Log.debug "Successfully converted a spago.yaml into a purs.json manifest" pure $ Just manifest +-- | Find the first compiler that can compile the package source code and +-- | installed resolutions from the given array of compilers. Begins with the +-- | latest compiler and works backwards to older compilers. +findFirstCompiler + :: forall r + . { compilers :: Array Version + , source :: FilePath + , installed :: FilePath + } + -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) +findFirstCompiler { source, compilers, installed } = do + search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do + Log.debug $ "Trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left error -> pure $ Tuple target error + Right _ -> Except.throw target + case search of + Left worked -> pure $ Right worked + Right others -> pure $ Left $ Map.fromFoldable others + type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) _importCache :: Proxy "importCache" From 98ef8924e13c54a2eb37d37457bc7eaca5b2102e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Thu, 16 Nov 2023 12:03:29 -0500 Subject: [PATCH 13/64] Rely on solver per-compiler instead of looking at metadata for compatible compilers from deps --- app/fixtures/registry/metadata/prelude.json | 2 +- app/src/App/API.purs | 92 ++------------------- app/test/App/API.purs | 32 +------ scripts/src/LegacyImporter.purs | 89 ++++++++++++++++---- 4 files changed, 78 insertions(+), 137 deletions(-) diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index cab65f7b1..4421ec79b 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,7 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, - "compilers": ["0.15.10", "0.15.12"], + "compilers": ["0.15.10", "0.15.11", "0.15.12"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index d346db4b6..889dbdb31 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -2,16 +2,13 @@ module Registry.App.API ( AuthenticatedEffects , COMPILER_CACHE , CompilerCache - , GroupedByCompilers , PackageSetUpdateEffects , PublishEffects , _compilerCache , authenticated - , compatibleCompilers , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , groupedByCompilersCodec , installBuildPlan , packageSetUpdate , packagingTeam @@ -34,12 +31,10 @@ import Data.DateTime (DateTime) import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) -import Data.Function (on) import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format import Data.Set as Set -import Data.Set.NonEmpty (NonEmptySet) import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits @@ -797,42 +792,16 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif , "). If you want to publish documentation, please try again with a later compiler." ] - allMetadata <- Registry.readAllMetadata - compatible <- case compatibleCompilers allMetadata verifiedResolutions of - Left [] -> do - Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." - allCompilers <- PursVersions.pursVersions - pure $ NonEmptySet.fromFoldable1 allCompilers - Left errors -> do - let - printError { packages, compilers } = do - let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages - let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers - key <> " support compilers " <> val - Except.throw $ Array.fold - [ "Dependencies admit no overlapping compiler versions, so your package cannot be compiled:\n" - , Array.foldMap (append "\n - " <<< printError) errors - ] - Right result -> pure result - - Comment.comment $ Array.fold - [ "The following compilers are compatible with this package according to its dependency resolutions: " - , String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") $ NonEmptySet.toUnfoldable compatible) - , ". Computing the list of compilers usable with your package version..." - ] - - let tryCompilers = Array.fromFoldable $ NonEmptySet.filter (notEq payload.compiler) compatible - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromArray tryCompilers of - Nothing -> pure { failed: Map.empty, succeeded: Set.empty } + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.filter (notEq payload.compiler) allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: Set.singleton payload.compiler } Just try -> do - intermediate <- findAllCompilers + found <- findAllCompilers { source: packageDirectory , manifest: Manifest manifest , compilers: try } - -- We need to insert the payload compiler, which we previously omitted - -- from the list of compilers to try for efficiency's sake. - pure $ intermediate { succeeded = Set.insert payload.compiler intermediate.succeeded } + pure $ found { succeeded = Set.insert payload.compiler found.succeeded } unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) @@ -953,57 +922,6 @@ compilePackage { source, compiler, resolutions } = Except.runExcept do Left err -> Except.throw $ printCompilerFailure compiler err Right _ -> pure tmp -type GroupedByCompilers = - { packages :: Map PackageName Version - , compilers :: NonEmptySet Version - } - -groupedByCompilersCodec :: JsonCodec GroupedByCompilers -groupedByCompilersCodec = CA.Record.object "GroupedByCompilers" - { compilers: CA.Common.nonEmptySet Version.codec - , packages: Internal.Codec.packageMap Version.codec - } - --- | Given a set of package versions, determine the set of compilers that can be --- | used for all packages. -compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) -compatibleCompilers allMetadata resolutions = do - let - associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } - associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do - Metadata metadata <- Map.lookup name allMetadata - published <- Map.lookup version metadata.published - case published.compilers of - Left _ -> Nothing - Right compilers -> Just { name, version, compilers: compilers } - - case Array.uncons associated of - Nothing -> - Left [] - Just { head, tail: [] } -> - Right $ NonEmptySet.fromFoldable1 head.compilers - Just { head, tail } -> do - let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers - case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of - -- An empty intersection means there are no shared compilers among the - -- resolved dependencies. - Nothing -> do - let - grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) - grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) - - collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers - collect vals = - { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) - -- We've already grouped by compilers, so those must all be equal - -- and we can take just the first value. - , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers - } - Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped - - Just set -> - Right set - type FindAllCompilersResult = { failed :: Map Version (Either SolverErrors CompilerFailure) , succeeded :: Set Version diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 49c41cba8..9b0dada47 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -3,11 +3,9 @@ module Test.Registry.App.API (spec) where import Registry.App.Prelude import Data.Array.NonEmpty as NonEmptyArray -import Data.Codec.Argonaut as CA import Data.Foldable (traverse_) import Data.Map as Map import Data.Set as Set -import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff @@ -70,34 +68,6 @@ spec = do Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") FS.Extra.remove tmp - Spec.describe "Finds compatible compilers from dependencies" do - Spec.it "Finds intersect of single package" do - Assert.Run.runBaseEffects do - metadata <- Registry.readAllMetadataFromDisk $ Path.concat [ "app", "fixtures", "registry", "metadata" ] - let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.12" ] - case API.compatibleCompilers metadata (Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeVersion "6.0.1")) of - Left failed -> Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> printJson (CA.array API.groupedByCompilersCodec) failed - Right set -> do - let actual = NonEmptySet.toUnfoldable set - unless (actual == expected) do - Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) - - Spec.it "Finds intersect of multiple packages" do - Assert.Run.runBaseEffects do - metadata <- Registry.readAllMetadataFromDisk $ Path.concat [ "app", "fixtures", "registry", "metadata" ] - let - expected = map Utils.unsafeVersion [ "0.15.10" ] - resolutions = Map.fromFoldable $ map (bimap Utils.unsafePackageName Utils.unsafeVersion) - [ Tuple "prelude" "6.0.1" - , Tuple "type-equality" "4.0.1" - ] - case API.compatibleCompilers metadata resolutions of - Left failed -> Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> printJson (CA.array API.groupedByCompilersCodec) failed - Right set -> do - let actual = NonEmptySet.toUnfoldable set - unless (actual == expected) do - Except.throw $ "Expected " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print actual) - Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do logs <- liftEffect (Ref.new []) @@ -159,7 +129,7 @@ spec = do Left one -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix but unfinished single version: " <> Version.print one Right many -> do let many' = NonEmptyArray.toArray many - let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.12" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11", "0.15.12" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index b5d007271..238e550f5 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -31,6 +31,7 @@ import Data.Map as Map import Data.Ordering (invert) import Data.Profunctor as Profunctor import Data.Set as Set +import Data.Set.NonEmpty (NonEmptySet) import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits @@ -44,7 +45,6 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic -import Registry.App.API (GroupedByCompilers, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) @@ -180,7 +180,7 @@ main = launchAff_ do # runAppEffects # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs cache) + # Cache.interpret API._compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit 1)) # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) @@ -227,13 +227,15 @@ runLegacyImport logs = do Run.liftAff $ writeVersionFailures importedIndex.failedVersions let metadataPackage = unsafeFromRight (PackageName.parse "metadata") - Registry.readMetadata metadataPackage >>= case _ of - Nothing -> do - Log.info "Writing empty metadata file for the 'metadata' package" - let location = GitHub { owner: "purescript", repo: "purescript-metadata", subdir: Nothing } - let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata metadataPackage entry - Just _ -> pure unit + let pursPackage = unsafeFromRight (PackageName.parse "purs") + for_ [ metadataPackage, pursPackage ] \package -> + Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for " <> PackageName.print package + let location = GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print package, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> pure unit Log.info "Ready for upload!" Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex @@ -274,16 +276,16 @@ runLegacyImport logs = do let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors Cache.put _importCache (PublishFailure manifest.name manifest.version) (SolveFailed $ String.joinWith " " errors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple _ resolutions) -> do Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - possibleCompilers <- case mbCompiler of - Just one -> do - Log.info $ "Solver produced a compiler version suitable for publishing: " <> Version.print one - pure $ NonEmptySet.singleton one - Nothing -> do + possibleCompilers <- + if Map.isEmpty manifest.dependencies then do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + else do Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." allMetadata <- Registry.readAllMetadata - case API.compatibleCompilers allMetadata resolutions of + case compatibleCompilers allMetadata resolutions of Left [] -> do Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." pure $ NonEmptySet.fromFoldable1 allCompilers @@ -543,7 +545,7 @@ publishErrorCodec :: JsonCodec PublishError publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch { solveFailed: Right CA.string , noCompilersFound: Right compilerFailureMapCodec - , unsolvableDependencyCompilers: Right (CA.array API.groupedByCompilersCodec) + , unsolvableDependencyCompilers: Right (CA.array groupedByCompilersCodec) , publishError: Right CA.string } where @@ -814,7 +816,7 @@ formatPublishError = case _ of NoCompilersFound versions -> { tag: "NoCompilersFound", value: Just (CA.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } UnsolvableDependencyCompilers failed -> - { tag: "UnsolvableDependencyCompilers", value: Just (CA.encode (CA.array API.groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } + { tag: "UnsolvableDependencyCompilers", value: Just (CA.encode (CA.array groupedByCompilersCodec) failed), reason: "Resolved dependencies cannot compile together" } PublishError error -> { tag: "PublishError", value: Nothing, reason: error } @@ -996,6 +998,57 @@ findFirstCompiler { source, compilers, installed } = do Left worked -> pure $ Right worked Right others -> pure $ Left $ Map.fromFoldable others +type GroupedByCompilers = + { packages :: Map PackageName Version + , compilers :: NonEmptySet Version + } + +groupedByCompilersCodec :: JsonCodec GroupedByCompilers +groupedByCompilersCodec = CA.Record.object "GroupedByCompilers" + { compilers: CA.Common.nonEmptySet Version.codec + , packages: Internal.Codec.packageMap Version.codec + } + +-- | Given a set of package versions, determine the set of compilers that can be +-- | used for all packages. +compatibleCompilers :: Map PackageName Metadata -> Map PackageName Version -> Either (Array GroupedByCompilers) (NonEmptySet Version) +compatibleCompilers allMetadata resolutions = do + let + associated :: Array { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } + associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do + Metadata metadata <- Map.lookup name allMetadata + published <- Map.lookup version metadata.published + case published.compilers of + Left _ -> Nothing + Right compilers -> Just { name, version, compilers: compilers } + + case Array.uncons associated of + Nothing -> + Left [] + Just { head, tail: [] } -> + Right $ NonEmptySet.fromFoldable1 head.compilers + Just { head, tail } -> do + let foldFn prev = Set.intersection prev <<< Set.fromFoldable <<< _.compilers + case NonEmptySet.fromFoldable $ Array.foldl foldFn (Set.fromFoldable head.compilers) tail of + -- An empty intersection means there are no shared compilers among the + -- resolved dependencies. + Nothing -> do + let + grouped :: Array (NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version }) + grouped = Array.groupAllBy (compare `on` _.compilers) (Array.cons head tail) + + collect :: NonEmptyArray { name :: PackageName, version :: Version, compilers :: NonEmptyArray Version } -> GroupedByCompilers + collect vals = + { packages: Map.fromFoldable (map (\{ name, version } -> Tuple name version) vals) + -- We've already grouped by compilers, so those must all be equal + -- and we can take just the first value. + , compilers: NonEmptySet.fromFoldable1 (NonEmptyArray.head vals).compilers + } + Left $ Array.foldl (\prev -> Array.snoc prev <<< collect) [] grouped + + Just set -> + Right set + type IMPORT_CACHE r = (importCache :: Cache ImportCache | r) _importCache :: Proxy "importCache" From ae621daa46d6a308825ab0d3a213ae3e3100e7c1 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Thu, 16 Nov 2023 20:54:28 -0500 Subject: [PATCH 14/64] Adjust unused dependency pruning to replace used transitive deps --- app/src/App/API.purs | 325 +++++++++++++++++++++----------- lib/src/PursGraph.purs | 12 +- lib/test/Registry/Solver.purs | 30 ++- scripts/src/LegacyImporter.purs | 43 +++-- 4 files changed, 283 insertions(+), 127 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 889dbdb31..c8fda387c 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1,7 +1,7 @@ module Registry.App.API ( AuthenticatedEffects , COMPILER_CACHE - , CompilerCache + , CompilerCache(..) , PackageSetUpdateEffects , PublishEffects , _compilerCache @@ -31,9 +31,11 @@ import Data.DateTime (DateTime) import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.Map (SemigroupMap(..)) import Data.Map as Map import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format +import Data.Semigroup.Foldable as Foldable1 import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet import Data.String as String @@ -104,13 +106,14 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 -import Registry.Solver (SolverErrors) +import Registry.Solver (CompilerIndex(..), SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Safe.Coerce as Safe.Coerce import Spago.Core.Config as Spago.Config import Spago.FS as Spago.FS @@ -531,7 +534,8 @@ publish payload = do [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - verifiedResolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions + compilerIndex <- readCompilerIndex + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest manifest) payload.resolutions compilationResult <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } case compilationResult of Left error -> do @@ -591,92 +595,24 @@ publish payload = do -- manifest does not contain unused dependencies before writing it. else do Log.debug "Pruning unused dependencies from legacy package manifest..." + compilerIndex <- readCompilerIndex + Tuple fixedManifest fixedResolutions <- fixManifestDependencies + { source: packageDirectory + , compiler: payload.compiler + , manifest: Manifest manifest + , index: compilerIndex + , resolutions: payload.resolutions + } - Log.debug "Solving manifest to get all transitive dependencies." - resolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions - - Log.debug "Installing dependencies." - tmpDepsDir <- Tmp.mkTmpDir - installBuildPlan resolutions tmpDepsDir - - Log.debug "Discovering used dependencies from source." - let srcGlobs = Path.concat [ packageDirectory, "src", "**", "*.purs" ] - let depGlobs = Path.concat [ tmpDepsDir, "*", "src", "**", "*.purs" ] - let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } - -- We need to use the minimum compiler version that supports 'purs graph' - let minGraphCompiler = unsafeFromRight (Version.parse "0.13.8") - let callCompilerVersion = if payload.compiler >= minGraphCompiler then payload.compiler else minGraphCompiler - Run.liftAff (Purs.callCompiler { command, version: Just callCompilerVersion, cwd: Nothing }) >>= case _ of - Left err -> do - let prefix = "Failed to discover unused dependencies because purs graph failed: " - Except.throw $ prefix <> case err of - UnknownError str -> str - CompilationError errs -> "\n" <> Purs.printCompilerErrors errs - MissingCompiler -> "missing compiler " <> Version.print payload.compiler - Right output -> case Argonaut.Parser.jsonParser output of - Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr - Right json -> case CA.decode PursGraph.pursGraphCodec json of - Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CA.printJsonDecodeError decodeErr - Right graph -> do - Log.debug "Got a valid graph of source and dependencies. Removing install dir and associating discovered modules with their packages..." - FS.Extra.remove tmpDepsDir - - let - -- We need access to a graph that _doesn't_ include the package - -- source, because we only care about dependencies of the package. - noSrcGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmpDepsDir, path: _ } - - case PursGraph.associateModules pathParser noSrcGraph of - Left errs -> - Except.throw $ String.joinWith "\n" - [ "Failed to associate modules with packages while finding unused dependencies:" - , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> - " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" - ] - Right modulePackageMap -> do - Log.debug "Associated modules with their package names. Finding all modules used in package source..." - -- The modules used in the package source code are any that have - -- a path beginning with the package source directory. We only - -- care about dependents of these modules. - let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern packageDirectory) <<< _.path) graph - - Log.debug "Found all modules used in package source. Finding all modules used by those modules..." - let allReachableModules = PursGraph.allDependenciesOf sourceModules graph - - -- Then we can associate each reachable module with its package - -- name to get the full set of used package names. - let allUsedPackages = Set.mapMaybe (flip Map.lookup modulePackageMap) allReachableModules - - -- Finally, we can use this to find the unused dependencies. - Log.debug "Found all packages reachable by the project source code. Determining unused dependencies..." - case Operation.Validation.getUnusedDependencies (Manifest manifest) resolutions allUsedPackages of - Nothing -> do - Log.debug "No unused dependencies! This manifest is good to go." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } - Just isUnused -> do - let printed = String.joinWith ", " (PackageName.print <$> NonEmptySet.toUnfoldable isUnused) - Log.debug $ "Found unused dependencies: " <> printed - Comment.comment $ "Generated legacy manifest contains unused dependencies which will be removed: " <> printed - let verified = manifest { dependencies = Map.filterKeys (not <<< flip NonEmptySet.member isUnused) manifest.dependencies } - Log.debug "Writing updated, pruned manifest." - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest verified) - publishRegistry - { manifest: Manifest verified - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + Run.liftAff $ writeJsonFile Manifest.codec packagePursJson fixedManifest + publishRegistry + { manifest: fixedManifest + , metadata: Metadata metadata + , payload: payload { resolutions = Just fixedResolutions } + , publishedTime + , tmp + , packageDirectory + } type PublishRegistry = { manifest :: Manifest @@ -694,7 +630,8 @@ type PublishRegistry = publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do Log.debug "Verifying the package build plan..." - verifiedResolutions <- verifyResolutions payload.compiler (Manifest manifest) payload.resolutions + compilerIndex <- readCompilerIndex + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest manifest) payload.resolutions Log.debug "Verifying that the package dependencies are all registered..." unregisteredRef <- Run.liftEffect $ Ref.new Map.empty @@ -766,8 +703,10 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert manifest.version newPublishedVersion metadata.published } - Registry.writeMetadata manifest.name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + + -- FIXME: Re-enable. + -- Registry.writeMetadata manifest.name (Metadata newMetadata) + -- Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -811,6 +750,11 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Nothing -> NonEmptyArray.singleton payload.compiler Just verified -> NonEmptyArray.fromFoldable1 verified + -- FIXME: Remove + case NonEmptyArray.length allVerified of + 1 -> unsafeCrashWith $ "Only one compiler verified (this is odd)" <> Version.print (NonEmptyArray.head allVerified) + _ -> pure unit + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptyArray.toArray allVerified)) let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right allVerified })) manifest.version newMetadata.published } Registry.writeMetadata manifest.name (Metadata compilersMetadata) @@ -823,25 +767,25 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the -- | manifest. If not, we solve their manifest to produce a build plan. -verifyResolutions :: forall r. Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) -verifyResolutions compiler manifest resolutions = do +verifyResolutions :: forall r. CompilerIndex -> Version -> Manifest -> Maybe (Map PackageName Version) -> Run (REGISTRY + LOG + AFF + EXCEPT String + r) (Map PackageName Version) +verifyResolutions compilerIndex compiler manifest resolutions = do Log.debug "Check the submitted build plan matches the manifest" - compilerIndex <- readCompilerIndex case resolutions of - Nothing -> case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of - Left errors -> do - let - printedError = String.joinWith "\n" - [ "Could not produce valid dependencies for manifest." - , "```" - , errors # foldMapWithIndex \index error -> String.joinWith "\n" - [ "[Error " <> show (index + 1) <> "]" - , Solver.printSolverError error - ] - , "```" - ] - Except.throw printedError - Right solved -> pure solved + Nothing -> do + case Operation.Validation.validateDependenciesSolve compiler manifest compilerIndex of + Left errors -> do + let + printedError = String.joinWith "\n" + [ "Could not produce valid dependencies for manifest." + , "```" + , errors # foldMapWithIndex \index error -> String.joinWith "\n" + [ "[Error " <> show (index + 1) <> "]" + , Solver.printSolverError error + ] + , "```" + ] + Except.throw printedError + Right solved -> pure solved Just provided -> do validateResolutions manifest provided pure provided @@ -938,7 +882,10 @@ findAllCompilers { source, manifest, compilers } = do checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of - Left solverErrors -> pure $ Left $ Tuple target (Left solverErrors) + Left solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + Log.debug $ Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) solverErrors + pure $ Left $ Tuple target (Left solverErrors) Right (Tuple mbCompiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions case mbCompiler of @@ -953,6 +900,7 @@ findAllCompilers { source, manifest, compilers } = do Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do + Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed @@ -963,6 +911,11 @@ findAllCompilers { source, manifest, compilers } = do , cwd: Just workdir } FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Right _ -> do + Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } pure $ bimap (Tuple target <<< Right) (const target) result Just { result } -> @@ -1263,6 +1216,160 @@ readCompilerIndex = do allCompilers <- PursVersions.pursVersions pure $ Solver.buildCompilerIndex allCompilers manifests metadata +type AdjustManifest = + { source :: FilePath + , compiler :: Version + , manifest :: Manifest + , index :: CompilerIndex + , resolutions :: Maybe (Map PackageName Version) + } + +-- other TODOs: +-- - make sure that we're handling 'verified resolutions' appropriately +-- - if we changed the manifest then don't trust our initial compile, +-- do it over again with the new resolutions (maybe just always redo +-- it for simplicity's sake? like findAllCompilers just tries them all?) +-- - delete the validation 'unused dependencies' check since we have +-- this whole dedicated function? +-- - test this function (a bitch, i know) + +-- | Check the given manifest to determine dependencies that are unused and can +-- | be removed, as well as dependencies that are used but not listed in the +-- | manifest dependencies. +fixManifestDependencies + :: forall r + . AdjustManifest + -> Run (COMMENT + REGISTRY + STORAGE + LOG + EXCEPT String + AFF + EFFECT + r) (Tuple Manifest (Map PackageName Version)) +fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, resolutions } = do + verified <- verifyResolutions index compiler (Manifest manifest) resolutions + + Log.debug "Fixing manifest dependencies if needed..." + tmp <- Tmp.mkTmpDir + installBuildPlan verified tmp + + Log.debug "Discovering used dependencies from source." + let srcGlobs = Path.concat [ source, "src", "**", "*.purs" ] + let depGlobs = Path.concat [ tmp, "*", "src", "**", "*.purs" ] + let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + + -- We need to use the minimum compiler version that supports 'purs graph'. + -- Technically that's 0.13.8, but that version had a bug wrt transitive + -- dependencies, so we start from 0.14.0. + let minGraphCompiler = unsafeFromRight (Version.parse "0.14.0") + let compiler' = if compiler >= minGraphCompiler then compiler else minGraphCompiler + result <- Run.liftAff (Purs.callCompiler { command, version: Just compiler', cwd: Nothing }) + FS.Extra.remove tmp + case result of + Left err -> case err of + UnknownError str -> Except.throw str + MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print compiler' + CompilationError errs -> do + Log.warn $ Array.fold + [ "Failed to discover unused dependencies because purs graph failed:\n" + , Purs.printCompilerErrors errs + ] + -- purs graph will fail if the source code is malformed or because the + -- package uses syntax before the oldest usable purs graph compiler (ie. + -- 0.14.0). In this case we can't determine unused dependencies and should + -- leave the manifest untouched. + pure $ Tuple (Manifest manifest) verified + Right output -> do + graph <- case Argonaut.Parser.jsonParser output of + Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr + Right json -> case CA.decode PursGraph.pursGraphCodec json of + Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CA.printJsonDecodeError decodeErr + Right graph -> do + Log.debug "Got a valid graph of source and dependencies." + pure graph + + let + depsGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern source) <<< _.path) graph + pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmp, path: _ } + + associated <- case PursGraph.associateModules pathParser depsGraph of + Left errs -> do + Except.throw $ String.joinWith "\n" + [ "Failed to associate modules with packages while finding unused dependencies:" + , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> + " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" + ] + Right modules -> pure modules + + let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern source) <<< _.path) graph + let directImports = PursGraph.directDependenciesOf sourceModules graph + Log.debug $ "Found modules directly imported by project source code: " <> String.joinWith ", " (map unwrap (Set.toUnfoldable directImports)) + let directPackages = Set.mapMaybe (flip Map.lookup associated) directImports + Log.debug $ "Found packages directly imported by project source code: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) + + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. + let unusedInManifest = Set.filter (not <<< flip Set.member directPackages) (Map.keys manifest.dependencies) + + if Set.isEmpty unusedInManifest then + -- If there are no unused dependencies then we don't need to fix anything. + pure $ Tuple (Manifest manifest) verified + else do + Log.debug $ "Found unused dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) + + let + registry :: Solver.TransitivizedRegistry + registry = Solver.initializeRegistry $ un CompilerIndex index + + prune :: Map PackageName Range -> Map PackageName Range + prune deps = do + let + partition = partitionEithers $ map (\entry -> entry # if Set.member (fst entry) directPackages then Right else Left) $ Map.toUnfoldable deps + unusedDeps = Map.fromFoldable partition.fail + + if Map.isEmpty unusedDeps then + deps + else do + let + usedDeps :: Map PackageName Range + usedDeps = Map.fromFoldable partition.success + + unusedTransitive :: Map PackageName Range + unusedTransitive = + Map.mapMaybeWithKey (\key intersect -> if Map.member key unusedDeps then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired unusedDeps }) + + prune $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive + + prunedDependencies = prune manifest.dependencies + + -- Missing packages are those which are imported by the package source + -- but which are not listed in the manifest dependencies. + let missing = Set.filter (not <<< flip Set.member (Map.keys prunedDependencies)) directPackages + when (Set.size missing > 0) do + let path = Path.concat [ scratchDir, "missing-deps" ] + FS.Extra.ensureDirectory path + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ path, formatPackageVersion manifest.name manifest.version <> "-unused-dependencies.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable missing))) + Log.warn $ "Found missing dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) + + case Solver.solveFull { registry, required: Solver.initializeRequired prunedDependencies } of + Left failure -> + Except.throw $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure + Right new' -> do + let purs = unsafeFromRight (PackageName.parse "purs") + let newResolutions = Map.delete purs new' + let removed = Map.keys $ Map.difference manifest.dependencies prunedDependencies + let added = Map.difference prunedDependencies manifest.dependencies + Comment.comment $ String.joinWith "\n" + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones. Your dependency list was:" + , "```json" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "```" + , " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) + , " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) + , "Your new dependency list is:" + , "```json" + , printJson (Internal.Codec.packageMap Range.codec) prunedDependencies + , "```" + ] + pure $ Tuple (Manifest (manifest { dependencies = prunedDependencies })) newResolutions + type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) _compilerCache :: Proxy "compilerCache" diff --git a/lib/src/PursGraph.purs b/lib/src/PursGraph.purs index fdcef5268..1029515ce 100644 --- a/lib/src/PursGraph.purs +++ b/lib/src/PursGraph.purs @@ -80,7 +80,17 @@ associateModules parse graph = do -- | Find direct dependencies of the given module, according to the given graph. directDependencies :: ModuleName -> PursGraph -> Maybe (Set ModuleName) -directDependencies name = map (Set.fromFoldable <<< _.depends) <<< Map.lookup name +directDependencies start graph = Map.lookup start graph <#> \_ -> directDependenciesOf (Set.singleton start) graph + +-- | Find direct dependencies of a set of input modules according to the given +-- | graph, excluding the input modules themselves. +directDependenciesOf :: Set ModuleName -> PursGraph -> Set ModuleName +directDependenciesOf sources graph = do + let + foldFn prev name = case Map.lookup name graph of + Nothing -> prev + Just { depends } -> Set.union prev (Array.foldl (\acc mod -> if Set.member mod sources then acc else Set.insert mod acc) Set.empty depends) + Array.foldl foldFn Set.empty $ Set.toUnfoldable sources -- | Find all dependencies of the given module, according to the given graph, -- | excluding the module itself. diff --git a/lib/test/Registry/Solver.purs b/lib/test/Registry/Solver.purs index bfc0e31b9..a45cf92f9 100644 --- a/lib/test/Registry/Solver.purs +++ b/lib/test/Registry/Solver.purs @@ -7,18 +7,19 @@ import Data.Either (Either(..)) import Data.Foldable (for_) import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList -import Data.Map (Map) +import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map -import Data.Maybe (Maybe(..)) -import Data.Newtype (wrap) +import Data.Maybe (Maybe(..), fromMaybe') +import Data.Newtype (un, wrap) import Data.Semigroup.Foldable (intercalateMap) import Data.Set as Set import Data.Set.NonEmpty as NES import Data.Tuple (Tuple(..)) import Data.Tuple.Nested ((/\)) +import Partial.Unsafe (unsafeCrashWith) import Registry.PackageName as PackageName import Registry.Range as Range -import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), printSolverError, solve) +import Registry.Solver (Intersection(..), LocalSolverPosition(..), SolverError(..), SolverPosition(..), Sourced(..), initializeRegistry, initializeRequired, lowerBound, printSolverError, solve, solveSeed, solveSteps, upperBound) import Registry.Test.Assert as Assert import Registry.Test.Utils (fromRight) import Registry.Types (PackageName, Range, Version) @@ -31,6 +32,11 @@ spec = do shouldSucceed goals result = pure unit >>= \_ -> solve solverIndex (Map.fromFoldable goals) `Assert.shouldContain` (Map.fromFoldable result) + shouldSucceedSteps goals result = pure unit >>= \_ -> do + let solved = solveSteps (solveSeed { registry: initializeRegistry solverIndex, required: initializeRequired (Map.fromFoldable goals) }) + let toRange intersect = fromMaybe' (\_ -> unsafeCrashWith "Bad intersection") (Range.mk (lowerBound intersect) (upperBound intersect)) + map toRange (un SemigroupMap solved.required) `Assert.shouldEqual` Map.fromFoldable result + shouldFail goals errors = pure unit >>= \_ -> case solve solverIndex (Map.fromFoldable goals) of Left solverErrors -> do let expectedErrorCount = Array.length errors @@ -103,6 +109,22 @@ spec = do , prelude.package /\ version 1 ] + Spec.describe "Single-step expands bounds" do + Spec.it "Simple range" do + shouldSucceedSteps + [ simple.package /\ range 0 1 ] + [ simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + + Spec.it "Multi-version range" do + shouldSucceedSteps + [ simple.package /\ range 0 2 ] + [ simple.package /\ range 0 2, prelude.package /\ range 0 2 ] + + Spec.it "Transitive" do + shouldSucceedSteps + [ onlySimple.package /\ range 0 1 ] + [ onlySimple.package /\ range 0 1, simple.package /\ range 0 1, prelude.package /\ range 0 1 ] + Spec.describe "Valid dependency ranges containing some invalid versions solve" do Spec.it "Proceeds past broken ranges to find a later valid range" do -- 'broken-fixed' cannot be solved at the broken version 0, but it can be diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 238e550f5..726c9399e 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -304,18 +304,35 @@ runLegacyImport logs = do Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) pure compilers - Log.debug "Fetching source and installing dependencies to test compilers" - tmp <- Tmp.mkTmpDir - { path } <- Source.fetch tmp manifest.location ref - Log.debug $ "Downloaded source to " <> path - Log.debug "Downloading dependencies..." - let installDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir - Log.debug $ "Installed to " <> installDir - Log.debug "Finding first compiler that can build the package..." - selected <- findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } - FS.Extra.remove tmp + cached <- do + cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> + Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of + Nothing -> pure Nothing + Just { result: Left _ } -> pure Nothing + Just { target, result: Right _ } -> pure $ Just target + pure $ NonEmptyArray.fromArray $ Array.catMaybes cached + + selected <- case cached of + Just prev -> do + let selected = NonEmptyArray.last prev + Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected + pure $ Right selected + Nothing -> do + Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Trying compilers one-by-one..." + selected <- findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } + FS.Extra.remove tmp + pure selected + case selected of Left failures -> do let @@ -356,7 +373,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 1000 manifests) publishLegacyPackage + void $ for (Array.take 100 manifests) publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let From 5c5410375ead077fd8935ffc314fbf4f789ac620 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 17 Nov 2023 15:35:27 -0500 Subject: [PATCH 15/64] Remove unused functions --- app/src/App/API.purs | 9 --------- lib/src/Operation/Validation.purs | 17 ----------------- 2 files changed, 26 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index c8fda387c..df8bef50d 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1224,15 +1224,6 @@ type AdjustManifest = , resolutions :: Maybe (Map PackageName Version) } --- other TODOs: --- - make sure that we're handling 'verified resolutions' appropriately --- - if we changed the manifest then don't trust our initial compile, --- do it over again with the new resolutions (maybe just always redo --- it for simplicity's sake? like findAllCompilers just tries them all?) --- - delete the validation 'unused dependencies' check since we have --- this whole dedicated function? --- - test this function (a bitch, i know) - -- | Check the given manifest to determine dependencies that are unused and can -- | be removed, as well as dependencies that are used but not listed in the -- | manifest dependencies. diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index 7e1fad8a2..d25b47064 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -94,23 +94,6 @@ getUnresolvedDependencies (Manifest { dependencies }) resolutions = | not (Range.includes dependencyRange version) -> Just $ Right $ dependencyName /\ dependencyRange /\ version | otherwise -> Nothing --- | Discovers dependencies listed in the manifest that are not actually used --- | by the solved dependencies. This should not produce an error, but it --- | indicates an over-constrained manifest. -getUnusedDependencies :: Manifest -> Map PackageName Version -> Set PackageName -> Maybe (NonEmptySet PackageName) -getUnusedDependencies (Manifest { dependencies }) resolutions discovered = do - let - -- There may be too many resolved dependencies because the manifest includes - -- e.g. test dependencies, so we start by only considering resolved deps - -- that are actually used. - inUse = Set.filter (flip Set.member discovered) (Map.keys resolutions) - - -- Next, we can determine which dependencies are unused by looking at the - -- difference between the manifest dependencies and the resolved packages - unused = Set.filter (not <<< flip Set.member inUse) (Map.keys dependencies) - - NonEmptySet.fromSet unused - data TarballSizeResult = ExceedsMaximum Number | WarnPackageSize Number From 441b960c67c8e35204e48f94fd642edecb197d7e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 17 Nov 2023 18:13:54 -0500 Subject: [PATCH 16/64] wip --- app/src/App/API.purs | 4 ++-- scripts/src/LegacyImporter.purs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index df8bef50d..14e6c1bf7 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1336,8 +1336,8 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, when (Set.size missing > 0) do let path = Path.concat [ scratchDir, "missing-deps" ] FS.Extra.ensureDirectory path - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ path, formatPackageVersion manifest.name manifest.version <> "-unused-dependencies.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable missing))) - Log.warn $ "Found missing dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ path, formatPackageVersion manifest.name manifest.version <> "-missing-dependencies.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable missing))) + Log.warn $ "Missing direct imports: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) case Solver.solveFull { registry, required: Solver.initializeRequired prunedDependencies } of Left failure -> diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 726c9399e..cf27533e0 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -373,7 +373,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 100 manifests) publishLegacyPackage + void $ for (Array.take 1500 manifests) publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let From 3495edb3b287b4dd4fde91c86565e427664234e8 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 12:22:40 -0500 Subject: [PATCH 17/64] Use cache when finding first suitable compiler --- app/src/App/API.purs | 5 ---- app/src/App/Effect/Source.purs | 19 ++++++++++++--- scripts/src/LegacyImporter.purs | 42 +++++++++++++++++++++++---------- 3 files changed, 46 insertions(+), 20 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 14e6c1bf7..4a069bd5d 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -750,11 +750,6 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Nothing -> NonEmptyArray.singleton payload.compiler Just verified -> NonEmptyArray.fromFoldable1 verified - -- FIXME: Remove - case NonEmptyArray.length allVerified of - 1 -> unsafeCrashWith $ "Only one compiler verified (this is odd)" <> Version.print (NonEmptyArray.head allVerified) - _ -> pure unit - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptyArray.toArray allVerified)) let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right allVerified })) manifest.version newMetadata.published } Registry.writeMetadata manifest.name (Metadata compilersMetadata) diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index f1da6f7e8..38d27b580 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,6 +6,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate +import Data.String as String import Effect.Aff as Aff import Effect.Exception as Exception import Effect.Now as Now @@ -20,6 +21,7 @@ import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Location as Location @@ -90,11 +92,22 @@ handle importType = case _ of Failed err -> Aff.throwError $ Aff.error err Succeeded _ -> pure unit + alreadyExists = String.contains (String.Pattern "already exists and is not an empty directory") + Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of - Left error -> do - Log.error $ "Failed to clone git tag: " <> Aff.message error - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Right _ -> Log.debug $ "Cloned package source to " <> repoDir + Left error -> do + Log.error $ "Failed to clone git tag: " <> Aff.message error <> ", retrying..." + when (alreadyExists (Aff.message error)) $ FS.Extra.remove repoDir + Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of + Right _ -> Log.debug $ "Cloned package source to " <> repoDir + Left error2 -> do + Log.error $ "Failed to clone git tag (attempt 2): " <> Aff.message error2 <> ", retrying..." + Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of + Right _ -> Log.debug $ "Cloned package source to " <> repoDir + Left error3 -> do + Log.error $ "Failed to clone git tag (attempt 3): " <> Aff.message error3 + unsafeCrashWith $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index cf27533e0..c9925d4a2 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -45,6 +45,7 @@ import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String import Parsing.String.Basic as Parsing.String.Basic +import Registry.App.API (COMPILER_CACHE) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) @@ -329,7 +330,13 @@ runLegacyImport logs = do API.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." - selected <- findFirstCompiler { source: path, installed: installDir, compilers: NonEmptySet.toUnfoldable possibleCompilers } + selected <- findFirstCompiler + { source: path + , installed: installDir + , compilers: NonEmptySet.toUnfoldable possibleCompilers + , resolutions + , manifest: Manifest manifest + } FS.Extra.remove tmp pure selected @@ -373,7 +380,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 1500 manifests) publishLegacyPackage + void $ for manifests publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let @@ -994,23 +1001,34 @@ fetchSpagoYaml address ref = do findFirstCompiler :: forall r . { compilers :: Array Version + , manifest :: Manifest + , resolutions :: Map PackageName Version , source :: FilePath , installed :: FilePath } - -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) -findFirstCompiler { source, compilers, installed } = do + -> Run (COMPILER_CACHE + STORAGE + LOG + AFF + EFFECT + r) (Either (Map Version CompilerFailure) Version) +findFirstCompiler { source, manifest, resolutions, compilers, installed } = do search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do - Log.debug $ "Trying compiler " <> Version.print target - workdir <- Tmp.mkTmpDir - result <- Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } - , version: Just target - , cwd: Just workdir - } - FS.Extra.remove workdir + result <- Cache.get API._compilerCache (API.Compilation manifest resolutions target) >>= case _ of + Nothing -> do + Log.debug $ "Trying compiler " <> Version.print target + workdir <- Tmp.mkTmpDir + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just target + , cwd: Just workdir + } + FS.Extra.remove workdir + let cache = { result: map (const unit) result, target } + Cache.put API._compilerCache (API.Compilation manifest resolutions target) cache + pure cache.result + Just cached -> + pure cached.result + case result of Left error -> pure $ Tuple target error Right _ -> Except.throw target + case search of Left worked -> pure $ Right worked Right others -> pure $ Left $ Map.fromFoldable others From 7ceab4ca7e28f45c845fab953be36feca696523a Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 16:24:47 -0500 Subject: [PATCH 18/64] WIP: Include missing direct imports --- app/src/App/API.purs | 94 ++++++++++++++++++++------------- scripts/src/LegacyImporter.purs | 83 +++++++++++++++++++++++++---- 2 files changed, 132 insertions(+), 45 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 4a069bd5d..8b84cb253 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -33,6 +33,7 @@ import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) import Data.Map (SemigroupMap(..)) import Data.Map as Map +import Data.Monoid as Monoid import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format import Data.Semigroup.Foldable as Foldable1 @@ -1287,27 +1288,39 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, let directPackages = Set.mapMaybe (flip Map.lookup associated) directImports Log.debug $ "Found packages directly imported by project source code: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) - -- Unused packages are those which are listed in the manifest dependencies - -- but which are not imported by the package source code. let unusedInManifest = Set.filter (not <<< flip Set.member directPackages) (Map.keys manifest.dependencies) + when (Set.size unusedInManifest > 0) do + Log.warn $ "Manifest includes unused packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) - if Set.isEmpty unusedInManifest then - -- If there are no unused dependencies then we don't need to fix anything. + let missingInManifest = Set.filter (not <<< flip Map.member manifest.dependencies) directPackages + when (Set.size missingInManifest > 0) do + Log.warn $ "Manifest does not include imported packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missingInManifest)) + + if Set.isEmpty unusedInManifest && Set.isEmpty missingInManifest then pure $ Tuple (Manifest manifest) verified else do - Log.debug $ "Found unused dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) - let registry :: Solver.TransitivizedRegistry registry = Solver.initializeRegistry $ un CompilerIndex index - prune :: Map PackageName Range -> Map PackageName Range - prune deps = do + solveSteps :: Map PackageName Range -> Map PackageName Range + solveSteps init = + Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps + $ Solver.solveSeed { registry, required: Solver.initializeRequired init } + + expandedManifest :: Map PackageName Range + expandedManifest = solveSteps manifest.dependencies + + pruneUnused :: Map PackageName Range -> Map PackageName Range + pruneUnused deps = do let partition = partitionEithers $ map (\entry -> entry # if Set.member (fst entry) directPackages then Right else Left) $ Map.toUnfoldable deps - unusedDeps = Map.fromFoldable partition.fail + remainingUnused = Map.fromFoldable partition.fail - if Map.isEmpty unusedDeps then + if Map.isEmpty remainingUnused then deps else do let @@ -1316,45 +1329,54 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, unusedTransitive :: Map PackageName Range unusedTransitive = - Map.mapMaybeWithKey (\key intersect -> if Map.member key unusedDeps then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + Map.mapMaybeWithKey (\key intersect -> if Map.member key remainingUnused then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) $ Safe.Coerce.coerce $ _.required - $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired unusedDeps }) + $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired remainingUnused }) - prune $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive + pruneUnused $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive - prunedDependencies = prune manifest.dependencies + fixedDependencies = pruneUnused expandedManifest -- Missing packages are those which are imported by the package source -- but which are not listed in the manifest dependencies. - let missing = Set.filter (not <<< flip Set.member (Map.keys prunedDependencies)) directPackages - when (Set.size missing > 0) do - let path = Path.concat [ scratchDir, "missing-deps" ] - FS.Extra.ensureDirectory path - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ path, formatPackageVersion manifest.name manifest.version <> "-missing-dependencies.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable missing))) - Log.warn $ "Missing direct imports: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) - - case Solver.solveFull { registry, required: Solver.initializeRequired prunedDependencies } of + let missing = Set.filter (not <<< flip Set.member (Map.keys fixedDependencies)) directPackages + case Set.size missing of + 0 -> pure unit + n -> do + Log.warn $ show n <> " packages still missing!" + unsafeCrashWith $ String.joinWith "\n\n" + [ "ORIGINAL DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "EXPANDED DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) expandedManifest + , "PRUNED DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + , "DIRECT IMPORTS: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) + , "MISSING : " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) + , "RESOLUTIONS : " <> printJson (Internal.Codec.packageMap Version.codec) verified + ] + + case Solver.solveFull { registry, required: Solver.initializeRequired fixedDependencies } of Left failure -> - Except.throw $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure + unsafeCrashWith $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure Right new' -> do let purs = unsafeFromRight (PackageName.parse "purs") let newResolutions = Map.delete purs new' - let removed = Map.keys $ Map.difference manifest.dependencies prunedDependencies - let added = Map.difference prunedDependencies manifest.dependencies - Comment.comment $ String.joinWith "\n" - [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones. Your dependency list was:" - , "```json" + let removed = Map.keys $ Map.difference manifest.dependencies fixedDependencies + let added = Map.difference fixedDependencies manifest.dependencies + Comment.comment $ Array.fold + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add directly-imported ones. Your dependency list was:\n" + , "```json\n" , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - , "```" - , " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) - , " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) - , "Your new dependency list is:" - , "```json" - , printJson (Internal.Codec.packageMap Range.codec) prunedDependencies - , "```" + , "\n```\n" + , Monoid.guard (not (Set.isEmpty removed)) do + " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) <> "\n" + , Monoid.guard (not (Map.isEmpty added)) do + " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) <> "\n" + , "Your new dependency list is:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + , "\n```\n" ] - pure $ Tuple (Manifest (manifest { dependencies = prunedDependencies })) newResolutions + pure $ Tuple (Manifest (manifest { dependencies = fixedDependencies })) newResolutions type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index c9925d4a2..da79e5e4e 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -37,6 +37,7 @@ import Data.String as String import Data.String.CodeUnits as String.CodeUnits import Data.Variant as Variant import Effect.Class.Console as Console +import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Parsing (Parser) @@ -239,7 +240,9 @@ runLegacyImport logs = do Just _ -> pure unit Log.info "Ready for upload!" - Log.info $ formatImportStats $ calculateImportStats legacyRegistry importedIndex + let importStats = formatImportStats $ calculateImportStats legacyRegistry importedIndex + Log.info importStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) importStats Log.info "Sorting packages for upload..." let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex @@ -276,7 +279,10 @@ runLegacyImport logs = do Left unsolvable -> do let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors - Cache.put _importCache (PublishFailure manifest.name manifest.version) (SolveFailed $ String.joinWith " " errors) + let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") + let { fail: nonCompiler } = partitionEithers $ map (\error -> if isCompilerSolveError error then Right error else Left error) errors + let joined = String.joinWith " " errors + Cache.put _importCache (PublishFailure manifest.name manifest.version) (if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined) Right (Tuple _ resolutions) -> do Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies possibleCompilers <- @@ -380,7 +386,7 @@ runLegacyImport logs = do , "----------" ] - void $ for manifests publishLegacyPackage + void $ for (Array.take 1000 manifests) publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let @@ -391,6 +397,10 @@ runLegacyImport logs = do failures <- Array.foldM collectError Map.empty allIndexPackages Run.liftAff $ writePublishFailures failures + let publishStats = formatPublishFailureStats importedIndex.registryIndex failures + Log.info publishStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStats + -- | Record all package failures to the 'package-failures.json' file. writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit writePublishFailures = @@ -558,7 +568,8 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa pure $ Map.fromFoldable manifests data PublishError - = SolveFailed String + = SolveFailedDependencies String + | SolveFailedCompiler String | NoCompilersFound (Map (NonEmptyArray Version) CompilerFailure) | UnsolvableDependencyCompilers (Array GroupedByCompilers) | PublishError String @@ -567,25 +578,77 @@ derive instance Eq PublishError publishErrorCodec :: JsonCodec PublishError publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantMatch - { solveFailed: Right CA.string + { solveFailedCompiler: Right CA.string + , solveFailedDependencies: Right CA.string , noCompilersFound: Right compilerFailureMapCodec , unsolvableDependencyCompilers: Right (CA.array groupedByCompilersCodec) , publishError: Right CA.string } where toVariant = case _ of - SolveFailed error -> Variant.inj (Proxy :: _ "solveFailed") error + SolveFailedDependencies error -> Variant.inj (Proxy :: _ "solveFailedDependencies") error + SolveFailedCompiler error -> Variant.inj (Proxy :: _ "solveFailedCompiler") error NoCompilersFound failed -> Variant.inj (Proxy :: _ "noCompilersFound") failed UnsolvableDependencyCompilers group -> Variant.inj (Proxy :: _ "unsolvableDependencyCompilers") group PublishError error -> Variant.inj (Proxy :: _ "publishError") error fromVariant = Variant.match - { solveFailed: SolveFailed + { solveFailedDependencies: SolveFailedDependencies + , solveFailedCompiler: SolveFailedCompiler , noCompilersFound: NoCompilersFound , unsolvableDependencyCompilers: UnsolvableDependencyCompilers , publishError: PublishError } +formatPublishFailureStats :: ManifestIndex -> Map PackageName (Map Version PublishError) -> String +formatPublishFailureStats importedIndex results = do + let + index :: Map PackageName (Map Version Manifest) + index = ManifestIndex.toMap importedIndex + + countVersions :: forall a. Map PackageName (Map Version a) -> Int + countVersions = Array.foldl (\prev (Tuple _ versions) -> prev + Map.size versions) 0 <<< Map.toUnfoldable + + startPackages :: Int + startPackages = Map.size index + + startVersions :: Int + startVersions = countVersions index + + failedPackages :: Int + failedPackages = Map.size results + + failedVersions :: Int + failedVersions = countVersions results + + removedPackages :: Int + removedPackages = Map.size index - Map.size results + + countByFailure :: Map String Int + countByFailure = do + let + toKey = case _ of + SolveFailedDependencies _ -> "Solving failed (dependencies)" + SolveFailedCompiler _ -> "Solving failed (compiler)" + NoCompilersFound _ -> "No compilers usable for publishing" + UnsolvableDependencyCompilers _ -> "Dependency compiler conflict" + PublishError _ -> "Publishing failed" + + foldFn prev (Tuple _ versions) = + Array.foldl (\prevCounts (Tuple _ error) -> Map.insertWith (+) (toKey error) 1 prevCounts) prev (Map.toUnfoldable versions) + + Array.foldl foldFn Map.empty (Map.toUnfoldable results) + + String.joinWith "\n" + [ "--------------------" + , "PUBLISH FAILURES" + , "--------------------" + , "" + , "PACKAGES: " <> show failedPackages <> " out of " <> show startPackages <> " failed (" <> show removedPackages <> " packages have zero usable versions)." + , "VERSIONS: " <> show failedVersions <> " out of " <> show startVersions <> " failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Map.toUnfoldable countByFailure) + ] + compilerFailureMapCodec :: JsonCodec (Map (NonEmptyArray Version) CompilerFailure) compilerFailureMapCodec = do let @@ -835,8 +898,10 @@ formatVersionValidationError { error, reason } = case error of formatPublishError :: PublishError -> JsonValidationError formatPublishError = case _ of - SolveFailed error -> - { tag: "SolveFailed", value: Nothing, reason: error } + SolveFailedCompiler error -> + { tag: "SolveFailedCompiler", value: Nothing, reason: error } + SolveFailedDependencies error -> + { tag: "SolveFailedDependencies", value: Nothing, reason: error } NoCompilersFound versions -> { tag: "NoCompilersFound", value: Just (CA.encode compilerFailureMapCodec versions), reason: "No valid compilers found for publishing." } UnsolvableDependencyCompilers failed -> From 3b85cd573ac9966099c7e51419277f1d6720d146 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 18:13:05 -0500 Subject: [PATCH 19/64] No longer try to insert missing dependencies --- app/src/App/API.purs | 86 +++++++++++---------------------- scripts/src/LegacyImporter.purs | 2 +- 2 files changed, 29 insertions(+), 59 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8b84cb253..2fe4eb2bf 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1288,39 +1288,27 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, let directPackages = Set.mapMaybe (flip Map.lookup associated) directImports Log.debug $ "Found packages directly imported by project source code: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. let unusedInManifest = Set.filter (not <<< flip Set.member directPackages) (Map.keys manifest.dependencies) - when (Set.size unusedInManifest > 0) do - Log.warn $ "Manifest includes unused packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) - let missingInManifest = Set.filter (not <<< flip Map.member manifest.dependencies) directPackages - when (Set.size missingInManifest > 0) do - Log.warn $ "Manifest does not include imported packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missingInManifest)) - - if Set.isEmpty unusedInManifest && Set.isEmpty missingInManifest then + if Set.isEmpty unusedInManifest then + -- If there are no unused dependencies then we don't need to fix anything. pure $ Tuple (Manifest manifest) verified else do + Log.debug $ "Found unused dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) + let registry :: Solver.TransitivizedRegistry registry = Solver.initializeRegistry $ un CompilerIndex index - solveSteps :: Map PackageName Range -> Map PackageName Range - solveSteps init = - Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) - $ Safe.Coerce.coerce - $ _.required - $ Solver.solveSteps - $ Solver.solveSeed { registry, required: Solver.initializeRequired init } - - expandedManifest :: Map PackageName Range - expandedManifest = solveSteps manifest.dependencies - - pruneUnused :: Map PackageName Range -> Map PackageName Range - pruneUnused deps = do + prune :: Map PackageName Range -> Map PackageName Range + prune deps = do let partition = partitionEithers $ map (\entry -> entry # if Set.member (fst entry) directPackages then Right else Left) $ Map.toUnfoldable deps - remainingUnused = Map.fromFoldable partition.fail + unusedDeps = Map.fromFoldable partition.fail - if Map.isEmpty remainingUnused then + if Map.isEmpty unusedDeps then deps else do let @@ -1329,54 +1317,36 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, unusedTransitive :: Map PackageName Range unusedTransitive = - Map.mapMaybeWithKey (\key intersect -> if Map.member key remainingUnused then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + Map.mapMaybeWithKey (\key intersect -> if Map.member key unusedDeps then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) $ Safe.Coerce.coerce $ _.required - $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired remainingUnused }) - - pruneUnused $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive - - fixedDependencies = pruneUnused expandedManifest - - -- Missing packages are those which are imported by the package source - -- but which are not listed in the manifest dependencies. - let missing = Set.filter (not <<< flip Set.member (Map.keys fixedDependencies)) directPackages - case Set.size missing of - 0 -> pure unit - n -> do - Log.warn $ show n <> " packages still missing!" - unsafeCrashWith $ String.joinWith "\n\n" - [ "ORIGINAL DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - , "EXPANDED DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) expandedManifest - , "PRUNED DEPS:\n" <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies - , "DIRECT IMPORTS: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) - , "MISSING : " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable missing)) - , "RESOLUTIONS : " <> printJson (Internal.Codec.packageMap Version.codec) verified - ] + $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired unusedDeps }) + + prune $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive + + prunedDependencies = prune manifest.dependencies - case Solver.solveFull { registry, required: Solver.initializeRequired fixedDependencies } of + case Solver.solveFull { registry, required: Solver.initializeRequired prunedDependencies } of Left failure -> - unsafeCrashWith $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure + Except.throw $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure Right new' -> do let purs = unsafeFromRight (PackageName.parse "purs") let newResolutions = Map.delete purs new' - let removed = Map.keys $ Map.difference manifest.dependencies fixedDependencies - let added = Map.difference fixedDependencies manifest.dependencies + let removed = Map.keys $ Map.difference manifest.dependencies prunedDependencies + let added = Map.difference prunedDependencies manifest.dependencies Comment.comment $ Array.fold - [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add directly-imported ones. Your dependency list was:\n" - , "```json\n" + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones. Your dependency list was:" + , "\n```json\n" , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies , "\n```\n" - , Monoid.guard (not (Set.isEmpty removed)) do - " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) <> "\n" - , Monoid.guard (not (Map.isEmpty added)) do - " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) <> "\n" - , "Your new dependency list is:\n" - , "```json\n" - , printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + , Monoid.guard (not (Set.isEmpty removed)) $ " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) <> "\n" + , Monoid.guard (not (Map.isEmpty added)) $ " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) <> "\n" + , "Your new dependency list is:" + , "\n```json\n" + , printJson (Internal.Codec.packageMap Range.codec) prunedDependencies , "\n```\n" ] - pure $ Tuple (Manifest (manifest { dependencies = fixedDependencies })) newResolutions + pure $ Tuple (Manifest (manifest { dependencies = prunedDependencies })) newResolutions type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index da79e5e4e..0a9d671a3 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -386,7 +386,7 @@ runLegacyImport logs = do , "----------" ] - void $ for (Array.take 1000 manifests) publishLegacyPackage + void $ for manifests publishLegacyPackage Log.info "Finished publishing! Collecting all publish failures and writing to disk." let From 3fa90b5086b9f7162e060fe36305902e558435fa Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 20:31:36 -0500 Subject: [PATCH 20/64] Address internal comments --- app/src/App/API.purs | 5 ++--- app/src/App/Effect/Source.purs | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 2fe4eb2bf..32c04acc2 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -705,9 +705,8 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert manifest.version newPublishedVersion metadata.published } - -- FIXME: Re-enable. - -- Registry.writeMetadata manifest.name (Metadata newMetadata) - -- Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Registry.writeMetadata manifest.name (Metadata newMetadata) + Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index 38d27b580..c7e6dfcf9 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -107,7 +107,7 @@ handle importType = case _ of Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error3 -> do Log.error $ "Failed to clone git tag (attempt 3): " <> Aff.message error3 - unsafeCrashWith $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref + Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." From 0d3cef9e4f5e71370ed84051b7e98ba37d1be581 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 20:39:15 -0500 Subject: [PATCH 21/64] Re-enable comment --- app/src/App/CLI/Git.purs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ce4e05c67..ce046282d 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -110,11 +110,11 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just _files -> do - -- Log.debug $ Array.fold - -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - -- , NonEmptyArray.foldMap1 (append "\n - ") _files - -- ] + Just files -> do + Log.debug $ Array.fold + [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + , NonEmptyArray.foldMap1 (append "\n - ") files + ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." From 4e8cb8786a905ba0467fc9fb15a1580b27dd7743 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 19 Nov 2023 20:41:54 -0500 Subject: [PATCH 22/64] Remove unnecessary --- app/src/App/API.purs | 1 - 1 file changed, 1 deletion(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 32c04acc2..3b03850e0 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -757,7 +757,6 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp - FS.Extra.remove packageDirectory -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the From 81c85a410e478dd386a6c3db88fc71a5fe18a628 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 1 Dec 2023 12:06:57 -0500 Subject: [PATCH 23/64] Fix 'removed packages' stats --- scripts/src/LegacyImporter.purs | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index e7ff714f9..b450c51d1 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -600,7 +600,7 @@ publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM } formatPublishFailureStats :: ManifestIndex -> Map PackageName (Map Version PublishError) -> String -formatPublishFailureStats importedIndex results = do +formatPublishFailureStats importedIndex failures = do let index :: Map PackageName (Map Version Manifest) index = ManifestIndex.toMap importedIndex @@ -615,13 +615,20 @@ formatPublishFailureStats importedIndex results = do startVersions = countVersions index failedPackages :: Int - failedPackages = Map.size results + failedPackages = Map.size failures failedVersions :: Int - failedVersions = countVersions results + failedVersions = countVersions failures - removedPackages :: Int - removedPackages = Map.size index - Map.size results + removedPackages :: Set PackageName + removedPackages = do + let + foldFn package prev versions = fromMaybe prev do + allVersions <- Map.lookup package index + guard (Map.keys allVersions == Map.keys versions) + pure $ Set.insert package prev + + foldlWithIndex foldFn Set.empty failures countByFailure :: Map String Int countByFailure = do @@ -636,16 +643,16 @@ formatPublishFailureStats importedIndex results = do foldFn prev (Tuple _ versions) = Array.foldl (\prevCounts (Tuple _ error) -> Map.insertWith (+) (toKey error) 1 prevCounts) prev (Map.toUnfoldable versions) - Array.foldl foldFn Map.empty (Map.toUnfoldable results) + Array.foldl foldFn Map.empty (Map.toUnfoldable failures) String.joinWith "\n" [ "--------------------" , "PUBLISH FAILURES" , "--------------------" , "" - , "PACKAGES: " <> show failedPackages <> " out of " <> show startPackages <> " failed (" <> show removedPackages <> " packages have zero usable versions)." + , "PACKAGES: " <> show failedPackages <> " out of " <> show startPackages <> " had at least 1 version fail (" <> show (Set.size removedPackages) <> " packages have zero usable versions)." , "VERSIONS: " <> show failedVersions <> " out of " <> show startVersions <> " failed." - , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Map.toUnfoldable countByFailure) + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable countByFailure)) ] compilerFailureMapCodec :: JsonCodec (Map (NonEmptyArray Version) CompilerFailure) From 10bccee825278dead514e81543a4ee13f377b79c Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 1 Dec 2023 15:21:50 -0500 Subject: [PATCH 24/64] Feedback --- app/src/App/API.purs | 34 ++++++++++++++------------------ app/src/App/CLI/Purs.purs | 10 ++++++++++ app/src/App/Effect/Registry.purs | 4 ++-- 3 files changed, 27 insertions(+), 21 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 319d4e233..b717ec460 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -399,7 +399,7 @@ publish payload = do -- supports syntax back to 0.15.0. We'll still try to validate the package -- but it may fail to parse. Operation.Validation.validatePursModules files >>= case _ of - Left formattedError | payload.compiler < unsafeFromRight (Version.parse "0.15.0") -> do + Left formattedError | payload.compiler < Purs.minLanguageCSTParser -> do Log.debug $ "Package failed to parse in validatePursModules: " <> formattedError Log.debug $ "Skipping check because package is published with a pre-0.15.0 compiler (" <> Version.print payload.compiler <> ")." Left formattedError -> @@ -538,11 +538,12 @@ publish payload = do , url ] - Nothing | payload.compiler < unsafeFromRight (Version.parse "0.14.7") -> do + Nothing | payload.compiler < Purs.minPursuitPublish -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " - , "registry using compiler versions prior to 0.14.7. Please try with a later compiler." + , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish + , ". Please try with a later compiler." ] Nothing -> do @@ -727,7 +728,7 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif Comment.comment "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" - if payload.compiler >= unsafeFromRight (Version.parse "0.14.7") then + if payload.compiler >= Purs.minPursuitPublish then publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr @@ -742,26 +743,21 @@ publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manif ] allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.filter (notEq payload.compiler) allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: Set.singleton payload.compiler } + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } Just try -> do found <- findAllCompilers { source: packageDirectory , manifest: Manifest manifest , compilers: try } - pure $ found { succeeded = Set.insert payload.compiler found.succeeded } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - let - allVerified = case NonEmptySet.fromFoldable validCompilers of - Nothing -> NonEmptyArray.singleton payload.compiler - Just verified -> NonEmptyArray.fromFoldable1 verified - - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptyArray.toArray allVerified)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right allVerified })) manifest.version newMetadata.published } + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right (NonEmptySet.toUnfoldable1 validCompilers) })) manifest.version newMetadata.published } Registry.writeMetadata manifest.name (Metadata compilersMetadata) Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) @@ -1002,7 +998,7 @@ type PublishToPursuit = -- | -- | ASSUMPTIONS: This function should not be run on legacy packages or on -- | packages where the `purescript-` prefix is still present. Cannot be used --- | on packages prior to 0.14.7. +-- | on packages prior to 'Purs.minPursuitPublish' publishToPursuit :: forall r . PublishToPursuit @@ -1011,6 +1007,9 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir + when (compiler < Purs.minPursuitPublish) do + Except.throw $ "Cannot publish to Pursuit because this package was published with a pre-0.14.7 compiler (" <> Version.print compiler <> "). If you want to publish documentation, please try again with a later compiler." + let resolvedPaths = formatPursuitResolutions { resolutions, installedResolutions } resolutionsFilePath = Path.concat [ tmp, "resolutions.json" ] @@ -1220,10 +1219,7 @@ fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } -- We need to use the minimum compiler version that supports 'purs graph'. - -- Technically that's 0.13.8, but that version had a bug wrt transitive - -- dependencies, so we start from 0.14.0. - let minGraphCompiler = unsafeFromRight (Version.parse "0.14.0") - let compiler' = if compiler >= minGraphCompiler then compiler else minGraphCompiler + let compiler' = if compiler >= Purs.minPursGraph then compiler else Purs.minPursGraph result <- Run.liftAff (Purs.callCompiler { command, version: Just compiler', cwd: Nothing }) FS.Extra.remove tmp case result of diff --git a/app/src/App/CLI/Purs.purs b/app/src/App/CLI/Purs.purs index 65723f88c..5bdae74a4 100644 --- a/app/src/App/CLI/Purs.purs +++ b/app/src/App/CLI/Purs.purs @@ -12,6 +12,16 @@ import Node.ChildProcess.Types (Exit(..)) import Node.Library.Execa as Execa import Registry.Version as Version +-- | The minimum compiler version that supports 'purs graph' +minPursGraph :: Version +minPursGraph = unsafeFromRight (Version.parse "0.14.0") + +minPursuitPublish :: Version +minPursuitPublish = unsafeFromRight (Version.parse "0.14.7") + +minLanguageCSTParser :: Version +minLanguageCSTParser = unsafeFromRight (Version.parse "0.15.0") + -- | Call a specific version of the PureScript compiler callCompiler_ :: { version :: Maybe Version, command :: PursCommand, cwd :: Maybe FilePath } -> Aff Unit callCompiler_ = void <<< callCompiler diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index 6590ae37f..40f2c68b6 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -878,10 +878,10 @@ readAllMetadataFromDisk metadataDir = do entries <- Run.liftAff $ map partitionEithers $ for packages.success \name -> do result <- readJsonFile Metadata.codec (Path.concat [ metadataDir, PackageName.print name <> ".json" ]) - pure $ map (Tuple name) result + pure $ bimap (Tuple name) (Tuple name) result unless (Array.null entries.fail) do - Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (append "\n - ") entries.fail + Except.throw $ append "Could not read metadata for all packages because the metadata directory is invalid (some package metadata cannot be decoded):" $ Array.foldMap (\(Tuple name err) -> "\n - " <> PackageName.print name <> ": " <> err) entries.fail Log.debug "Successfully read metadata entries." pure $ Map.fromFoldable entries.success From 26c5aa06b526d39e558f6f4ffcf23986775a07f7 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 1 Dec 2023 16:13:27 -0500 Subject: [PATCH 25/64] Always print publish stats --- scripts/src/LegacyImporter.purs | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index b450c51d1..15ec00f9b 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -387,18 +387,18 @@ runLegacyImport logs = do void $ for manifests publishLegacyPackage - Log.info "Finished publishing! Collecting all publish failures and writing to disk." - let - collectError prev (Manifest { name, version }) = do - Cache.get _importCache (PublishFailure name version) >>= case _ of - Nothing -> pure prev - Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev - failures <- Array.foldM collectError Map.empty allIndexPackages - Run.liftAff $ writePublishFailures failures - - let publishStats = formatPublishFailureStats importedIndex.registryIndex failures - Log.info publishStats - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStats + Log.info "Finished publishing! Collecting all publish failures and writing to disk." + let + collectError prev (Manifest { name, version }) = do + Cache.get _importCache (PublishFailure name version) >>= case _ of + Nothing -> pure prev + Just error -> pure $ Map.insertWith Map.union name (Map.singleton version error) prev + failures <- Array.foldM collectError Map.empty allIndexPackages + Run.liftAff $ writePublishFailures failures + + let publishStats = formatPublishFailureStats importedIndex.registryIndex failures + Log.info publishStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStats -- | Record all package failures to the 'package-failures.json' file. writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit From b11917ee107522c837018f243f804b5cc781a8c2 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 3 Dec 2023 19:44:35 -0500 Subject: [PATCH 26/64] tweaks --- app/src/App/API.purs | 8 ++++---- scripts/src/LegacyImporter.purs | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index b717ec460..d7a070b28 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -446,9 +446,9 @@ publish payload = do Right manifest -> do Comment.comment $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:\n" - , "```json" + , "```json\n" , printJson Manifest.codec manifest - , "```" + , "```\n" ] pure manifest @@ -475,9 +475,9 @@ publish payload = do let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest Comment.comment $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:\n" - , "```json" + , "```json\n" , printJson Manifest.codec manifest - , "```" + , "```\n" ] pure manifest diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 15ec00f9b..d0d0f6916 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -245,6 +245,7 @@ runLegacyImport logs = do Log.info "Sorting packages for upload..." let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "sorted-packages.txt" ]) $ String.joinWith "\n" $ map (\(Manifest { name, version }) -> PackageName.print name <> "@" <> Version.print version) allIndexPackages Log.info "Removing packages that previously failed publish or have been published" publishable <- do @@ -255,7 +256,7 @@ runLegacyImport logs = do Just _ -> pure false allCompilers <- PursVersions.pursVersions - allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (NonEmptyArray.last allCompilers) of + allCompilersRange <- case Range.mk (NonEmptyArray.head allCompilers) (Version.bumpPatch (NonEmptyArray.last allCompilers)) of Nothing -> Except.throw $ "Failed to construct a compiler range from " <> Version.print (NonEmptyArray.head allCompilers) <> " and " <> Version.print (NonEmptyArray.last allCompilers) Just range -> do Log.info $ "All available compilers range: " <> Range.print range From 3ddde828ffd82cfa24a02e018cdb1dfae103c4d4 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 3 Dec 2023 20:53:28 -0500 Subject: [PATCH 27/64] Better publish stats formatting and write removals --- scripts/src/LegacyImporter.purs | 48 +++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d0d0f6916..30b5dc9eb 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -397,9 +397,11 @@ runLegacyImport logs = do failures <- Array.foldM collectError Map.empty allIndexPackages Run.liftAff $ writePublishFailures failures - let publishStats = formatPublishFailureStats importedIndex.registryIndex failures - Log.info publishStats - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStats + let publishStats = collectPublishFailureStats importedIndex.registryIndex failures + let publishStatsMessage = formatPublishFailureStats publishStats + Log.info publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.failed))) -- | Record all package failures to the 'package-failures.json' file. writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit @@ -600,8 +602,13 @@ publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM , publishError: PublishError } -formatPublishFailureStats :: ManifestIndex -> Map PackageName (Map Version PublishError) -> String -formatPublishFailureStats importedIndex failures = do +type PublishFailureStats = + { packages :: { total :: Int, partial :: Int, failed :: Set PackageName } + , versions :: { total :: Int, failed :: Int, reason :: Map String Int } + } + +collectPublishFailureStats :: ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importedIndex failures = do let index :: Map PackageName (Map Version Manifest) index = ManifestIndex.toMap importedIndex @@ -646,15 +653,28 @@ formatPublishFailureStats importedIndex failures = do Array.foldl foldFn Map.empty (Map.toUnfoldable failures) - String.joinWith "\n" - [ "--------------------" - , "PUBLISH FAILURES" - , "--------------------" - , "" - , "PACKAGES: " <> show failedPackages <> " out of " <> show startPackages <> " had at least 1 version fail (" <> show (Set.size removedPackages) <> " packages have zero usable versions)." - , "VERSIONS: " <> show failedVersions <> " out of " <> show startVersions <> " failed." - , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable countByFailure)) - ] + { packages: + { total: startPackages + , partial: failedPackages + , failed: removedPackages + } + , versions: + { total: startVersions + , failed: failedVersions + , reason: countByFailure + } + } + +formatPublishFailureStats :: PublishFailureStats -> String +formatPublishFailureStats { packages, versions } = String.joinWith "\n" + [ "--------------------" + , "PUBLISH FAILURES" + , "--------------------" + , "" + , show packages.partial <> " out of " <> show packages.total <> " packages had at least 1 version fail (" <> show (Set.size packages.failed) <> " packages had all versions fail)." + , show versions.failed <> " out of " <> show versions.total <> " versions failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) + ] compilerFailureMapCodec :: JsonCodec (Map (NonEmptyArray Version) CompilerFailure) compilerFailureMapCodec = do From 5b17cb31f2a65eb839617ffb789300350590fefd Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 4 Dec 2023 19:24:22 -0500 Subject: [PATCH 28/64] Update flake --- flake.lock | 26 ++++++++++++++------------ flake.nix | 2 +- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/flake.lock b/flake.lock index e32b3662f..1ecbf5e9c 100644 --- a/flake.lock +++ b/flake.lock @@ -17,17 +17,19 @@ } }, "flake-compat_2": { + "flake": false, "locked": { "lastModified": 1696426674, "narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=", + "owner": "edolstra", + "repo": "flake-compat", "rev": "0f9255e01c2351cc7d116c072cb317785dd33b33", - "revCount": 57, - "type": "tarball", - "url": "https://api.flakehub.com/f/pinned/edolstra/flake-compat/1.0.1/018afb31-abd1-7bff-a5e4-cff7e18efb7a/source.tar.gz" + "type": "github" }, "original": { - "type": "tarball", - "url": "https://flakehub.com/f/edolstra/flake-compat/1.tar.gz" + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" } }, "flake-utils": { @@ -50,16 +52,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1701699333, - "narHash": "sha256-ePa4oynwTNXuc4bqbi5ZMrO72yGuTPukptuMmgXPM5k=", + "lastModified": 1701730523, + "narHash": "sha256-WWgooXBkjXukyZzMUhkPJvvngKed2VW5yv+i8Qtpldc=", "owner": "nixos", "repo": "nixpkgs", - "rev": "42499b9f6515dbca54cec1cae78165fd4e5eccfe", + "rev": "8078ceb2777d790d3fbc53589ed3753532185d77", "type": "github" }, "original": { "owner": "nixos", - "ref": "release-23.05", + "ref": "release-23.11", "repo": "nixpkgs", "type": "github" } @@ -73,11 +75,11 @@ "slimlock": "slimlock" }, "locked": { - "lastModified": 1701720691, - "narHash": "sha256-BaQ+UyYSqNezOnM6OtR/dcC3Iwa95k+2ojEcvs82MoQ=", + "lastModified": 1701732039, + "narHash": "sha256-0KBXWRUgWKIS1oE0qFfCNXTbttozzS97gv0pW2GplAg=", "owner": "thomashoneyman", "repo": "purescript-overlay", - "rev": "8260c6819df0814c0cc1e7fc262d60910399c89f", + "rev": "249f9042299dfd4a6f77ddff4a2849651a8211e5", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 3f40b0848..b0fc05e4c 100644 --- a/flake.nix +++ b/flake.nix @@ -2,7 +2,7 @@ description = "The PureScript Registry"; inputs = { - nixpkgs.url = "github:nixos/nixpkgs/release-23.05"; + nixpkgs.url = "github:nixos/nixpkgs/release-23.11"; flake-utils.url = "github:numtide/flake-utils"; flake-compat.url = "github:edolstra/flake-compat"; From f924b31af5fbb68d2c9929e06df3fcc6665066ea Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 6 Dec 2023 22:07:48 -0500 Subject: [PATCH 29/64] Integrate inserting missing dependencies --- .../transitive-1.0.0/bower.json | 12 + .../transitive-1.0.0/src/Transitive.purs | 6 + app/src/App/API.purs | 724 +++++++++--------- app/src/App/Effect/GitHub.purs | 4 +- app/src/App/GitHubIssue.purs | 2 +- app/src/App/Server.purs | 2 +- app/test/App/API.purs | 81 +- app/test/Test/Assert/Run.purs | 6 +- lib/src/Operation/Validation.purs | 56 ++ scripts/src/LegacyImporter.purs | 66 +- scripts/src/PackageDeleter.purs | 2 +- 11 files changed, 533 insertions(+), 428 deletions(-) create mode 100644 app/fixtures/github-packages/transitive-1.0.0/bower.json create mode 100644 app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs diff --git a/app/fixtures/github-packages/transitive-1.0.0/bower.json b/app/fixtures/github-packages/transitive-1.0.0/bower.json new file mode 100644 index 000000000..d0d4d0bd1 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/bower.json @@ -0,0 +1,12 @@ +{ + "name": "purescript-transitive", + "homepage": "https://github.com/purescript/purescript-transitive", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-transitive.git" + }, + "dependencies": { + "purescript-effect": "^4.0.0" + } +} diff --git a/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs new file mode 100644 index 000000000..71d771f62 --- /dev/null +++ b/app/fixtures/github-packages/transitive-1.0.0/src/Transitive.purs @@ -0,0 +1,6 @@ +module Transitive where + +import Prelude + +uno :: Int +uno = one diff --git a/app/src/App/API.purs b/app/src/App/API.purs index d7a070b28..f6dab9efa 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -12,7 +12,6 @@ module Registry.App.API , installBuildPlan , packageSetUpdate , packagingTeam - , parseInstalledModulePath , publish , readCompilerIndex , removeIgnoredTarballFiles @@ -26,16 +25,14 @@ import Data.Array.NonEmpty as NonEmptyArray import Data.Codec.Argonaut as CA import Data.Codec.Argonaut.Common as CA.Common import Data.Codec.Argonaut.Record as CA.Record -import Data.DateTime (DateTime) import Data.Exists as Exists import Data.Foldable (traverse_) import Data.FoldableWithIndex (foldMapWithIndex) +import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Monoid as Monoid import Data.Newtype (over, unwrap) import Data.Number.Format as Number.Format -import Data.Semigroup.Foldable as Foldable1 import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet import Data.String as String @@ -43,7 +40,6 @@ import Data.String.CodeUnits as String.CodeUnits import Data.String.NonEmpty as NonEmptyString import Data.String.Regex as Regex import Effect.Aff as Aff -import Effect.Ref as Ref import Effect.Unsafe (unsafePerformEffect) import Node.ChildProcess.Types (Exit(..)) import Node.FS.Aff as FS.Aff @@ -99,7 +95,7 @@ import Registry.Manifest as Manifest import Registry.Metadata as Metadata import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) import Registry.Operation as Operation -import Registry.Operation.Validation (UnpublishError(..), validateNoExcludedObligatoryFiles) +import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation import Registry.Owner as Owner import Registry.PackageName as PackageName @@ -108,7 +104,7 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 -import Registry.Solver (CompilerIndex(..), SolverErrors) +import Registry.Solver (CompilerIndex(..), DependencyIndex, Intersection, SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) @@ -337,8 +333,12 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- | published before then it will be registered and the given version will be -- | upload. If it has been published before then the existing metadata will be -- | updated with the new version. -publish :: forall r. PublishData -> Run (PublishEffects + r) Unit -publish payload = do +-- +-- The legacyIndex argument contains the unverified manifests produced by the +-- legacy importer; these manifests can be used on legacy packages to conform +-- them to the registry rule that transitive dependencies are not allowed. +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name Log.debug $ "Publishing package " <> printedName <> " with payload:\n" <> stringifyJson Operation.publishCodec payload @@ -384,10 +384,10 @@ publish payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: packageDirectory, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref + { path: downloadedPackage, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref - Log.debug $ "Package downloaded to " <> packageDirectory <> ", verifying it contains a src directory with valid modules..." - Internal.Path.readPursFiles (Path.concat [ packageDirectory, "src" ]) >>= case _ of + Log.debug $ "Package downloaded to " <> downloadedPackage <> ", verifying it contains a src directory with valid modules..." + Internal.Path.readPursFiles (Path.concat [ downloadedPackage, "src" ]) >>= case _ of Nothing -> Except.throw $ Array.fold [ "This package has no PureScript files in its `src` directory. " @@ -414,13 +414,13 @@ publish payload = do -- If the package doesn't have a purs.json we can try to make one - possible scenarios: -- - in case it has a spago.yaml then we know how to read that, and have all the info to move forward -- - if it's a legacy import then we can try to infer as much info as possible to make a manifest - let packagePursJson = Path.concat [ packageDirectory, "purs.json" ] + let packagePursJson = Path.concat [ downloadedPackage, "purs.json" ] hadPursJson <- Run.liftEffect $ FS.Sync.exists packagePursJson - let packageSpagoYaml = Path.concat [ packageDirectory, "spago.yaml" ] + let packageSpagoYaml = Path.concat [ downloadedPackage, "spago.yaml" ] hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml - Manifest manifest <- + Manifest receivedManifest <- if hadPursJson then Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of Left error -> do @@ -483,51 +483,51 @@ publish payload = do -- We trust the manifest for any changes to the 'owners' field, but for all -- other fields we trust the registry metadata. - let metadata = existingMetadata { owners = manifest.owners } - unless (Operation.Validation.nameMatches (Manifest manifest) payload) do + let metadata = existingMetadata { owners = receivedManifest.owners } + unless (Operation.Validation.nameMatches (Manifest receivedManifest) payload) do Except.throw $ Array.fold [ "The manifest file specifies a package name (" - , PackageName.print manifest.name + , PackageName.print receivedManifest.name , ") that differs from the package name submitted to the API (" , PackageName.print payload.name , "). The manifest and API request must match." ] - unless (Operation.Validation.locationMatches (Manifest manifest) (Metadata metadata)) do + unless (Operation.Validation.locationMatches (Manifest receivedManifest) (Metadata metadata)) do Except.throw $ Array.fold [ "The manifest file specifies a location (" - , stringifyJson Location.codec manifest.location + , stringifyJson Location.codec receivedManifest.location , ") that differs from the location in the registry metadata (" , stringifyJson Location.codec metadata.location , "). If you would like to change the location of your package you should " , "submit a transfer operation." ] - when (Operation.Validation.isMetadataPackage (Manifest manifest)) do + when (Operation.Validation.isMetadataPackage (Manifest receivedManifest)) do Except.throw "The `metadata` package cannot be uploaded to the registry because it is a protected package." - for_ (Operation.Validation.isNotUnpublished (Manifest manifest) (Metadata metadata)) \info -> do + for_ (Operation.Validation.isNotUnpublished (Manifest receivedManifest) (Metadata metadata)) \info -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that has been unpublished: " <> Version.print manifest.version + [ "You tried to upload a version that has been unpublished: " <> Version.print receivedManifest.version , "" , "```json" , printJson Metadata.unpublishedMetadataCodec info , "```" ] - case Operation.Validation.isNotPublished (Manifest manifest) (Metadata metadata) of + case Operation.Validation.isNotPublished (Manifest receivedManifest) (Metadata metadata) of -- If the package has been published already, then we check whether the published -- version has made it to Pursuit or not. If it has, then we terminate here. If -- it hasn't then we publish to Pursuit and then terminate. Just info -> do - published <- Pursuit.getPublishedVersions manifest.name >>= case _ of + published <- Pursuit.getPublishedVersions receivedManifest.name >>= case _ of Left error -> Except.throw error Right versions -> pure versions - case Map.lookup manifest.version published of + case Map.lookup receivedManifest.version published of Just url -> do Except.throw $ String.joinWith "\n" - [ "You tried to upload a version that already exists: " <> Version.print manifest.version + [ "You tried to upload a version that already exists: " <> Version.print receivedManifest.version , "" , "Its metadata is:" , "```json" @@ -552,217 +552,236 @@ publish payload = do , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] compilerIndex <- readCompilerIndex - verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest manifest) payload.resolutions - compilationResult <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } + verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan verifiedResolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just downloadedPackage + } case compilationResult of - Left error -> do + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." - Right installedResolutions -> do + Right _ -> do Log.debug "Uploading to Pursuit" -- While we have created a manifest from the package source, we -- still need to ensure a purs.json file exists for 'purs publish'. unless hadPursJson do - existingManifest <- ManifestIndex.readManifest manifest.name manifest.version + existingManifest <- ManifestIndex.readManifest receivedManifest.name receivedManifest.version case existingManifest of Nothing -> Except.throw "Version was previously published, but we could not find a purs.json file in the package source, and no existing manifest was found in the registry." Just existing -> Run.liftAff $ writeJsonFile Manifest.codec packagePursJson existing - publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of Left publishErr -> Except.throw publishErr Right _ -> Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" -- In this case the package version has not been published, so we proceed -- with ordinary publishing. - Nothing -> - -- Now that we've verified the package we can write the manifest to the source - -- directory and then publish it. - if hadPursJson then do - -- No need to verify the generated manifest because nothing was generated, - -- and no need to write a file (it's already in the package source.) - publishRegistry - { manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + Nothing -> do + Log.info "Verifying the package build plan..." + compilerIndex <- readCompilerIndex + validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions + + Comment.comment "Verifying unused and/or missing dependencies..." + + -- First we install the resolutions and call 'purs graph' to adjust the + -- manifest as needed, but we defer compilation until after this check + -- in case the package manifest and resolutions are adjusted. + let installedResolutions = Path.concat [ tmp, ".registry" ] + installBuildPlan validatedResolutions installedResolutions + + let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] + let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] + let pursGraph = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + + -- We need to use the minimum compiler version that supports 'purs graph'. + let pursGraphCompiler = if payload.compiler >= Purs.minPursGraph then payload.compiler else Purs.minPursGraph + + -- In this step we run 'purs graph' to get a graph of the package source + -- and installed dependencies and use that to determine if the manifest + -- contains any unused or missing dependencies. If it does and is a legacy + -- manifest then we fix it and return the result. If does and is a modern + -- manifest (spago.yaml, purs.json, etc.) then we reject it. If it doesn't + -- then we simply return the manifest and resolutions we already had. + Tuple manifest resolutions <- Run.liftAff (Purs.callCompiler { command: pursGraph, version: Just pursGraphCompiler, cwd: Nothing }) >>= case _ of + Left err -> case err of + UnknownError str -> Except.throw str + MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print pursGraphCompiler + CompilationError errs -> do + Log.warn $ Array.fold + [ "Failed to discover unused dependencies because purs graph failed:\n" + , Purs.printCompilerErrors errs + ] + -- The purs graph command will fail if the source code uses syntax + -- before the oldest usable purs graph compiler (ie. 0.14.0). In + -- this case we simply accept the dependencies as-is, even though + -- they could technically violate Registry rules around missing and + -- unused dependencies. This only affects old packages and we know + -- they compile, so we've decided it's an acceptable exception. + pure $ Tuple (Manifest receivedManifest) validatedResolutions + Right output -> case Argonaut.Parser.jsonParser output of + Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr + Right json -> case CA.decode PursGraph.pursGraphCodec json of + Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CA.printJsonDecodeError decodeErr + Right graph -> do + Log.debug "Got a valid graph of source and dependencies." + let + pathParser path = map _.name $ case String.stripPrefix (String.Pattern installedResolutions) path of + Just trimmed -> parseModulePath trimmed + Nothing -> case String.stripPrefix (String.Pattern downloadedPackage) path of + Just _ -> Right { name: receivedManifest.name, version: receivedManifest.version } + Nothing -> Left $ "Failed to parse module path " <> path <> " because it is not in the package source or installed dependencies." + + case Operation.Validation.noTransitiveOrMissingDeps (Manifest receivedManifest) graph pathParser of + -- Association failures should always throw + Left (Left assocErrors) -> + Except.throw $ Array.fold + [ "Failed to validate unused / missing dependencies because modules could not be associated with package names:" + , flip NonEmptyArray.foldMap1 assocErrors \{ error, module: ModuleName moduleName, path } -> + "\n - " <> moduleName <> " (" <> path <> "): " <> error + ] + + Left (Right depError) + -- If the package fails the transitive / missing check and uses + -- a contemporary manifest then it should be rejected. + | (hadPursJson || hasSpagoYaml) -> + Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError + -- If the package fails, is legacy, and we have a legacy index + -- then we can try to fix it. + | Just legacyIndex <- maybeLegacyIndex -> do + Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError + conformLegacyManifest (Manifest receivedManifest) compilerIndex legacyIndex depError + | otherwise -> + Except.throw $ "Failed to validate unused / missing dependencies and no legacy index was provided to attempt a fix: " <> Operation.Validation.printValidateDepsError depError + + -- If the check passes then we can simply return the manifest and + -- resolutions. + Right _ -> pure $ Tuple (Manifest receivedManifest) validatedResolutions + + -- Now that we've verified the package we can write the manifest to the + -- source directory. + Run.liftAff $ writeJsonFile Manifest.codec packagePursJson manifest + + Log.info "Creating packaging directory" + let packageDirname = PackageName.print receivedManifest.name <> "-" <> Version.print receivedManifest.version + let packageSource = Path.concat [ tmp, packageDirname ] + FS.Extra.ensureDirectory packageSource + -- We copy over all files that are always included (ie. src dir, purs.json file), + -- and any files the user asked for via the 'files' key, and remove all files + -- that should never be included (even if the user asked for them). + copyPackageSourceFiles { includeFiles: receivedManifest.includeFiles, excludeFiles: receivedManifest.excludeFiles, source: downloadedPackage, destination: packageSource } + removeIgnoredTarballFiles packageSource + + -- Now that we have the package source contents we can verify we can compile + -- the package with exactly what is going to be uploaded. + Comment.comment $ Array.fold + [ "Verifying package compiles using compiler " + , Version.print payload.compiler + , " and resolutions:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Version.codec) resolutions + , "\n```" + ] - else if hasSpagoYaml then do - -- We need to write the generated purs.json file, but because spago-next - -- already does unused dependency checks and supports explicit test-only - -- dependencies we can skip those checks. - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson (Manifest manifest) - publishRegistry - { manifest: Manifest manifest - , metadata: Metadata metadata - , payload - , publishedTime - , tmp - , packageDirectory - } + -- We clear the installation directory so that no old installed resolutions + -- stick around. + Run.liftAff $ FS.Extra.remove installedResolutions + installBuildPlan validatedResolutions installedResolutions + compilationResult <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } + , version: Just payload.compiler + , cwd: Just tmp + } - -- Otherwise this is a legacy package, generated from a combination of bower, - -- spago.dhall, and package set files, so we need to verify the generated - -- manifest does not contain unused dependencies before writing it. + case compilationResult of + Left compileFailure -> do + let error = printCompilerFailure payload.compiler compileFailure + Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error + Right _ -> pure unit + + Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + let tarballName = packageDirname <> ".tar.gz" + let tarballPath = Path.concat [ tmp, tarballName ] + Tar.create { cwd: tmp, folderName: packageDirname } + + Log.info "Tarball created. Verifying its size..." + bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath + for_ (Operation.Validation.validateTarballSize bytes) case _ of + Operation.Validation.ExceedsMaximum maxPackageBytes -> + Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." + Operation.Validation.WarnPackageSize maxWarnBytes -> + Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + + -- If a package has under ~30 bytes it's about guaranteed that packaging the + -- tarball failed. This can happen if the system running the API has a non- + -- GNU tar installed, for example. + let minBytes = 30.0 + when (bytes < minBytes) do + Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." + + hash <- Sha256.hashFile tarballPath + Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." + Log.info $ "Tarball hash: " <> Sha256.print hash + + Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath + Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." + let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } + let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) + Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + + -- We write to the registry index if possible. If this fails, the packaging + -- team should manually insert the entry. + Log.debug "Adding the new version to the registry index" + Registry.writeManifest manifest + + Registry.mirrorLegacyRegistry payload.name newMetadata.location + Comment.comment "Mirrored registry operation to the legacy registry!" + + Log.debug "Uploading package documentation to Pursuit" + if payload.compiler >= Purs.minPursuitPublish then + -- TODO: We must use the 'downloadedPackage' instead of 'packageSource' + -- because Pursuit requires a git repository, and our tarball directory + -- is not one. This should be changed once Pursuit no longer needs git. + publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of + Left publishErr -> do + Log.error publishErr + Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Right _ -> + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Log.debug "Pruning unused dependencies from legacy package manifest..." - compilerIndex <- readCompilerIndex - Tuple fixedManifest fixedResolutions <- fixManifestDependencies - { source: packageDirectory - , compiler: payload.compiler - , manifest: Manifest manifest - , index: compilerIndex - , resolutions: payload.resolutions - } - - Run.liftAff $ writeJsonFile Manifest.codec packagePursJson fixedManifest - publishRegistry - { manifest: fixedManifest - , metadata: Metadata metadata - , payload: payload { resolutions = Just fixedResolutions } - , publishedTime - , tmp - , packageDirectory - } - -type PublishRegistry = - { manifest :: Manifest - , metadata :: Metadata - , payload :: PublishData - , publishedTime :: DateTime - , tmp :: FilePath - , packageDirectory :: FilePath - } - --- A private helper function for publishing to the registry. Separated out of --- the main 'publish' function because we sometimes use the publish function to --- publish to Pursuit only (in the case the package has been pushed to the --- registry, but docs have not been uploaded). -publishRegistry :: forall r. PublishRegistry -> Run (PublishEffects + r) Unit -publishRegistry { payload, metadata: Metadata metadata, manifest: Manifest manifest, publishedTime, tmp, packageDirectory } = do - Log.debug "Verifying the package build plan..." - compilerIndex <- readCompilerIndex - verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest manifest) payload.resolutions - - Log.debug "Verifying that the package dependencies are all registered..." - unregisteredRef <- Run.liftEffect $ Ref.new Map.empty - forWithIndex_ verifiedResolutions \name version -> do - Registry.readMetadata name >>= case _ of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just (Metadata { published }) -> case Map.lookup version published of - Nothing -> Run.liftEffect $ Ref.modify_ (Map.insert name version) unregisteredRef - Just _ -> pure unit - unregistered <- Run.liftEffect $ Ref.read unregisteredRef - unless (Map.isEmpty unregistered) do - Except.throw $ Array.fold - [ "Cannot register this package because it has unregistered dependencies: " - , Array.foldMap (\(Tuple name version) -> "\n - " <> formatPackageVersion name version) (Map.toUnfoldable unregistered) - ] - - Log.info "Packaging tarball for upload..." - let newDir = PackageName.print manifest.name <> "-" <> Version.print manifest.version - let packageSourceDir = Path.concat [ tmp, newDir ] - Log.debug $ "Creating packaging directory at " <> packageSourceDir - FS.Extra.ensureDirectory packageSourceDir - -- We copy over all files that are always included (ie. src dir, purs.json file), - -- and any files the user asked for via the 'files' key, and remove all files - -- that should never be included (even if the user asked for them). - copyPackageSourceFiles { includeFiles: manifest.includeFiles, excludeFiles: manifest.excludeFiles, source: packageDirectory, destination: packageSourceDir } - Log.debug "Removing always-ignored files from the packaging directory." - removeIgnoredTarballFiles packageSourceDir - - let tarballName = newDir <> ".tar.gz" - let tarballPath = Path.concat [ tmp, tarballName ] - Tar.create { cwd: tmp, folderName: newDir } - - Log.info "Tarball created. Verifying its size..." - bytes <- Run.liftAff $ map FS.Stats.size $ FS.Aff.stat tarballPath - for_ (Operation.Validation.validateTarballSize bytes) case _ of - Operation.Validation.ExceedsMaximum maxPackageBytes -> - Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." - Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." - - -- If a package has under ~30 bytes it's about guaranteed that packaging the - -- tarball failed. This can happen if the system running the API has a non- - -- GNU tar installed, for example. - let minBytes = 30.0 - when (bytes < minBytes) do - Except.throw $ "Package tarball is only " <> Number.Format.toString bytes <> " bytes, which indicates the source was not correctly packaged." - - hash <- Sha256.hashFile tarballPath - Log.info $ "Tarball size of " <> show bytes <> " bytes is acceptable." - Log.info $ "Tarball hash: " <> Sha256.print hash - - -- Now that we have the package source contents we can verify we can compile - -- the package. We skip failures when the package is a legacy package. - Comment.comment $ Array.fold - [ "Verifying package compiles using compiler " - , Version.print payload.compiler - , " and resolutions:\n" - , "```json\n" - , printJson (Internal.Codec.packageMap Version.codec) verifiedResolutions - , "\n```" - ] - - installedResolutions <- compilePackage { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions } >>= case _ of - Left error -> Except.throw error - Right installed -> pure installed - - Comment.comment "Package is verified! Uploading it to the storage backend..." - Storage.upload manifest.name manifest.version tarballPath - Log.debug $ "Adding the new version " <> Version.print manifest.version <> " to the package metadata file." - let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } - let newMetadata = metadata { published = Map.insert manifest.version newPublishedVersion metadata.published } - - Registry.writeMetadata manifest.name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" - - -- We write to the registry index if possible. If this fails, the packaging - -- team should manually insert the entry. - Log.debug "Adding the new version to the registry index" - Registry.writeManifest (Manifest manifest) - - Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" - - Log.debug "Uploading package documentation to Pursuit" - if payload.compiler >= Purs.minPursuitPublish then - publishToPursuit { source: packageDirectory, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of - Left publishErr -> do - Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr - Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" - else do - Comment.comment $ Array.fold - [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" - , Version.print payload.compiler - , "). If you want to publish documentation, please try again with a later compiler." - ] + Comment.comment $ Array.fold + [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" + , Version.print payload.compiler + , "). If you want to publish documentation, please try again with a later compiler." + ] - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageDirectory - , manifest: Manifest manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right (NonEmptySet.toUnfoldable1 validCompilers) })) manifest.version newMetadata.published } - Registry.writeMetadata manifest.name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right (NonEmptySet.toUnfoldable1 validCompilers) })) (un Manifest manifest).version newMetadata.published } + Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) - Comment.comment "Wrote completed metadata to the registry!" - FS.Extra.remove tmp + Comment.comment "Wrote completed metadata to the registry!" + FS.Extra.remove tmp -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -835,37 +854,6 @@ validateResolutions manifest resolutions = do , incorrectVersionsError ] -type CompilePackage = - { source :: FilePath - , compiler :: Version - , resolutions :: Map PackageName Version - } - -compilePackage :: forall r. CompilePackage -> Run (STORAGE + LOG + AFF + EFFECT + r) (Either String FilePath) -compilePackage { source, compiler, resolutions } = Except.runExcept do - tmp <- Tmp.mkTmpDir - output <- do - if Map.isEmpty resolutions then do - Log.debug "Compiling source code (no dependencies to install)..." - Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs: [ "src/**/*.purs" ] } - , version: Just compiler - , cwd: Just source - } - else do - Log.debug "Installing build plan..." - installBuildPlan resolutions tmp - Log.debug "Compiling..." - Run.liftAff $ Purs.callCompiler - { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ tmp, "*/src/**/*.purs" ] ] } - , version: Just compiler - , cwd: Just source - } - - case output of - Left err -> Except.throw $ printCompilerFailure compiler err - Right _ -> pure tmp - type FindAllCompilersResult = { failed :: Map Version (Either SolverErrors CompilerFailure) , succeeded :: Set Version @@ -884,7 +872,6 @@ findAllCompilers { source, manifest, compilers } = do case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target - Log.debug $ Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) solverErrors pure $ Left $ Tuple target (Left solverErrors) Right (Tuple mbCompiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions @@ -948,6 +935,7 @@ printCompilerFailure compiler = case _ of -- | directory. Packages will be installed at 'dir/package-name-x.y.z'. installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir -- We fetch every dependency at its resolved version, unpack the tarball, and -- store the resulting source code in a specified directory for dependencies. forWithIndex_ resolutions \name version -> do @@ -967,11 +955,10 @@ installBuildPlan resolutions dependenciesDir = do Log.debug $ "Installed " <> formatPackageVersion name version -- | Parse the name and version from a path to a module installed in the standard --- | form: '/-/...' -parseInstalledModulePath :: { prefix :: FilePath, path :: FilePath } -> Either String { name :: PackageName, version :: Version } -parseInstalledModulePath { prefix, path } = do +-- | form: '-...' +parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } +parseModulePath path = do packageVersion <- lmap Parsing.parseErrorMessage $ Parsing.runParser path do - _ <- Parsing.String.string prefix _ <- Parsing.Combinators.optional (Parsing.Combinators.try (Parsing.String.string Path.sep)) Tuple packageVersionChars _ <- Parsing.Combinators.Array.manyTill_ Parsing.String.anyChar (Parsing.String.string Path.sep) pure $ String.CodeUnits.fromCharArray (Array.fromFoldable packageVersionChars) @@ -1195,134 +1182,135 @@ type AdjustManifest = { source :: FilePath , compiler :: Version , manifest :: Manifest - , index :: CompilerIndex + , legacyIndex :: Maybe DependencyIndex + , currentIndex :: CompilerIndex , resolutions :: Maybe (Map PackageName Version) } --- | Check the given manifest to determine dependencies that are unused and can --- | be removed, as well as dependencies that are used but not listed in the --- | manifest dependencies. -fixManifestDependencies +-- | Conform a legacy manifest to the Registry requirements for dependencies, +-- | ie. that all direct imports are listed (no transitive dependencies) and no +-- | unused dependencies are listed. +conformLegacyManifest :: forall r - . AdjustManifest - -> Run (COMMENT + REGISTRY + STORAGE + LOG + EXCEPT String + AFF + EFFECT + r) (Tuple Manifest (Map PackageName Version)) -fixManifestDependencies { source, compiler, index, manifest: Manifest manifest, resolutions } = do - verified <- verifyResolutions index compiler (Manifest manifest) resolutions - - Log.debug "Fixing manifest dependencies if needed..." - tmp <- Tmp.mkTmpDir - installBuildPlan verified tmp + . Manifest + -> CompilerIndex + -> Solver.TransitivizedRegistry + -> ValidateDepsError + -> Run (COMMENT + LOG + r) (Tuple Manifest (Map PackageName Version)) +conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = Except.catch (\e -> unsafeCrashWith e) do + let + purs :: PackageName + purs = unsafeFromRight (PackageName.parse "purs") - Log.debug "Discovering used dependencies from source." - let srcGlobs = Path.concat [ source, "src", "**", "*.purs" ] - let depGlobs = Path.concat [ tmp, "*", "src", "**", "*.purs" ] - let command = Purs.Graph { globs: [ srcGlobs, depGlobs ] } + manifestRequired :: SemigroupMap PackageName Intersection + manifestRequired = Solver.initializeRequired manifest.dependencies - -- We need to use the minimum compiler version that supports 'purs graph'. - let compiler' = if compiler >= Purs.minPursGraph then compiler else Purs.minPursGraph - result <- Run.liftAff (Purs.callCompiler { command, version: Just compiler', cwd: Nothing }) - FS.Extra.remove tmp - case result of - Left err -> case err of - UnknownError str -> Except.throw str - MissingCompiler -> Except.throw $ "Missing compiler " <> Version.print compiler' - CompilationError errs -> do - Log.warn $ Array.fold - [ "Failed to discover unused dependencies because purs graph failed:\n" - , Purs.printCompilerErrors errs - ] - -- purs graph will fail if the source code is malformed or because the - -- package uses syntax before the oldest usable purs graph compiler (ie. - -- 0.14.0). In this case we can't determine unused dependencies and should - -- leave the manifest untouched. - pure $ Tuple (Manifest manifest) verified - Right output -> do - graph <- case Argonaut.Parser.jsonParser output of - Left parseErr -> Except.throw $ "Failed to parse purs graph output as JSON while finding unused dependencies: " <> parseErr - Right json -> case CA.decode PursGraph.pursGraphCodec json of - Left decodeErr -> Except.throw $ "Failed to decode JSON from purs graph output while finding unused dependencies: " <> CA.printJsonDecodeError decodeErr - Right graph -> do - Log.debug "Got a valid graph of source and dependencies." - pure graph + legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of + Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right solved -> pure solved - let - depsGraph = Map.filter (isNothing <<< String.stripPrefix (String.Pattern source) <<< _.path) graph - pathParser = map _.name <<< parseInstalledModulePath <<< { prefix: tmp, path: _ } + Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) legacyResolutions - associated <- case PursGraph.associateModules pathParser depsGraph of - Left errs -> do - Except.throw $ String.joinWith "\n" - [ "Failed to associate modules with packages while finding unused dependencies:" - , flip NonEmptyArray.foldMap1 errs \{ error, module: ModuleName moduleName, path } -> - " - " <> moduleName <> " (" <> path <> "): " <> error <> "\n" - ] - Right modules -> pure modules + let + legacyTransitive :: Map PackageName Range + legacyTransitive = + Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) + $ Safe.Coerce.coerce + $ _.required + $ Solver.solveSteps (Solver.solveSeed { registry: legacyRegistry, required: manifestRequired }) - let sourceModules = Map.keys $ Map.filter (isJust <<< String.stripPrefix (String.Pattern source) <<< _.path) graph - let directImports = PursGraph.directDependenciesOf sourceModules graph - Log.debug $ "Found modules directly imported by project source code: " <> String.joinWith ", " (map unwrap (Set.toUnfoldable directImports)) - let directPackages = Set.mapMaybe (flip Map.lookup associated) directImports - Log.debug $ "Found packages directly imported by project source code: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable directPackages)) + Log.debug $ "Got transitive solution:\n" <> printJson (Internal.Codec.packageMap Range.codec) legacyTransitive - -- Unused packages are those which are listed in the manifest dependencies - -- but which are not imported by the package source code. - let unusedInManifest = Set.filter (not <<< flip Set.member directPackages) (Map.keys manifest.dependencies) + let + associateMissing :: Array PackageName -> Map PackageName Range + associateMissing packages = do + -- First we look up the package in the produced transitive ranges, as those + -- are the most likely to be correct. + let associateTransitive pkg = maybe (Left pkg) (\range -> Right (Tuple pkg range)) (Map.lookup pkg legacyTransitive) + let associated = partitionEithers (map associateTransitive packages) + let foundFromTransitive = Map.fromFoldable associated.success + + -- If not found, we search for the ranges described for this dependency + -- in the manifests of the packages in the resolutions. + let + resolutionRanges :: Map PackageName Range + resolutionRanges = do + let + foldFn name prev version = fromMaybe prev do + versions <- Map.lookup name (un SemigroupMap legacyRegistry) + deps <- Map.lookup version (un SemigroupMap versions) + let deps' = Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) (un SemigroupMap deps) + pure $ Map.unionWith (\l r -> fromMaybe l (Range.intersect l r)) prev deps' + + foldlWithIndex foldFn Map.empty legacyResolutions + + foundFromResolutions :: Map PackageName Range + foundFromResolutions = Map.fromFoldable do + associated.fail <#> \pkg -> case Map.lookup pkg resolutionRanges of + Nothing -> unsafeCrashWith $ "Package " <> PackageName.print pkg <> " not found in resolution ranges" + Just range -> Tuple pkg range + + Map.union foundFromTransitive foundFromResolutions + + fixUnused names (Manifest m) resolutions = do + let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names + let fixedDependencies = Map.difference m.dependencies unused + let fixedResolutions = Map.difference resolutions unused + Tuple fixedDependencies fixedResolutions + + fixMissing names (Manifest m) = do + let fixedDependencies = Map.union m.dependencies (associateMissing (NonEmptySet.toUnfoldable names)) + -- Once we've fixed the missing dependencies we need to be sure we can still + -- produce a viable solution with the current index. + case Solver.solve (un CompilerIndex currentIndex) fixedDependencies of + Left unsolvable -> unsafeCrashWith $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right solved -> Tuple fixedDependencies (Map.delete purs solved) + + previousDepsMessage = Array.fold + [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add direct-imported ones. " + , "Your dependency list was:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + , "\n```\n" + ] - if Set.isEmpty unusedInManifest then - -- If there are no unused dependencies then we don't need to fix anything. - pure $ Tuple (Manifest manifest) verified - else do - Log.debug $ "Found unused dependencies: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable unusedInManifest)) + newDepsMessage (Manifest new) = Array.fold + [ "\nYour new dependency list is:\n" + , "```json\n" + , printJson (Internal.Codec.packageMap Range.codec) new.dependencies + , "\n```\n" + ] - let - registry :: Solver.TransitivizedRegistry - registry = Solver.initializeRegistry $ un CompilerIndex index - - prune :: Map PackageName Range -> Map PackageName Range - prune deps = do - let - partition = partitionEithers $ map (\entry -> entry # if Set.member (fst entry) directPackages then Right else Left) $ Map.toUnfoldable deps - unusedDeps = Map.fromFoldable partition.fail - - if Map.isEmpty unusedDeps then - deps - else do - let - usedDeps :: Map PackageName Range - usedDeps = Map.fromFoldable partition.success - - unusedTransitive :: Map PackageName Range - unusedTransitive = - Map.mapMaybeWithKey (\key intersect -> if Map.member key unusedDeps then Nothing else Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) - $ Safe.Coerce.coerce - $ _.required - $ Solver.solveSteps (Solver.solveSeed { registry, required: Solver.initializeRequired unusedDeps }) - - prune $ Map.unionWith (\used unused -> fromMaybe used (Range.intersect used unused)) usedDeps unusedTransitive - - prunedDependencies = prune manifest.dependencies - - case Solver.solveFull { registry, required: Solver.initializeRequired prunedDependencies } of - Left failure -> - Except.throw $ "Failed to solve for dependencies while fixing manifest: " <> Foldable1.foldMap1 (append "\n" <<< Solver.printSolverError) failure - Right new' -> do - let purs = unsafeFromRight (PackageName.parse "purs") - let newResolutions = Map.delete purs new' - let removed = Map.keys $ Map.difference manifest.dependencies prunedDependencies - let added = Map.difference prunedDependencies manifest.dependencies - Comment.comment $ Array.fold - [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones. Your dependency list was:" - , "\n```json\n" - , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - , "\n```\n" - , Monoid.guard (not (Set.isEmpty removed)) $ " - We have removed the following packages: " <> String.joinWith ", " (map PackageName.print (Set.toUnfoldable removed)) <> "\n" - , Monoid.guard (not (Map.isEmpty added)) $ " - We have added the following packages: " <> String.joinWith ", " (map (\(Tuple name range) -> PackageName.print name <> "(" <> Range.print range <> ")") (Map.toUnfoldable added)) <> "\n" - , "Your new dependency list is:" - , "\n```json\n" - , printJson (Internal.Codec.packageMap Range.codec) prunedDependencies - , "\n```\n" - ] - pure $ Tuple (Manifest (manifest { dependencies = prunedDependencies })) newResolutions + case problem of + UnusedDependencies names -> do + let (Tuple deps resolutions) = fixUnused names (Manifest manifest) legacyResolutions + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + MissingDependencies names -> do + let (Tuple deps resolutions) = fixMissing names (Manifest manifest) + let newManifest = Manifest (manifest { dependencies = deps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest resolutions + UnusedAndMissing { missing, unused } -> do + let result = fixMissing missing (Manifest manifest) + let (Tuple newDeps newResolutions) = fixUnused unused (Manifest (manifest { dependencies = (fst result) })) (snd result) + let newManifest = Manifest (manifest { dependencies = newDeps }) + Comment.comment $ Array.fold + [ previousDepsMessage + , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" + , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" + , newDepsMessage newManifest + ] + pure $ Tuple newManifest newResolutions type COMPILER_CACHE r = (compilerCache :: Cache CompilerCache | r) diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 0c489d009..e4d3ebf68 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -241,8 +241,8 @@ request octokit githubRequest@{ route: route@(GitHubRoute method _ _), codec } = -- auto-expire cache entries. We will be behind GitHub at most this amount per repo. -- -- TODO: This 'diff' check should be removed once we have conditional requests. - Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 24.0 -> do - Log.debug $ "Found cache entry but it was modified more than 24 hours ago, refetching " <> printedRoute + Right _ | DateTime.diff now prevResponse.modified >= Duration.Hours 23.0 -> do + Log.debug $ "Found cache entry but it was modified more than 23 hours ago, refetching " <> printedRoute result <- requestWithBackoff octokit githubRequest Cache.put _githubCache (Request route) (result <#> \resp -> { response: CA.encode codec resp, modified: now, etag: Nothing }) pure result diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 4ef868af5..bf5e3eab0 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -57,7 +57,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish payload + API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index c44f2d958..7655bb0bc 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -69,7 +69,7 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish forkPipelineJob publish.name publish.ref PublishJob \jobId -> do Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish publish + API.publish Nothing publish Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body diff --git a/app/test/App/API.purs b/app/test/App/API.purs index c61c67292..9206b4ac8 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -9,6 +9,7 @@ import Data.Set as Set import Data.String as String import Data.String.NonEmpty as NonEmptyString import Effect.Aff as Aff +import Effect.Class.Console as Console import Effect.Ref as Ref import Node.FS.Aff as FS.Aff import Node.Path as Path @@ -27,8 +28,10 @@ import Registry.Foreign.FastGlob as FastGlob import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Manifest as Manifest +import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Range as Range +import Registry.Solver as Solver import Registry.Test.Assert as Assert import Registry.Test.Assert.Run as Assert.Run import Registry.Test.Utils as Utils @@ -57,22 +60,18 @@ spec = do removeIgnoredTarballFiles copySourceFiles - Spec.describe "Parses installed paths" do - Spec.it "Parses install path /my-package-1.0.0/..." do - tmp <- Tmp.mkTmpDir - let moduleA = Path.concat [ tmp, "my-package-1.0.0", "src", "ModuleA.purs" ] - case API.parseInstalledModulePath { prefix: tmp, path: moduleA } of - Left err -> Assert.fail $ "Expected to parse " <> moduleA <> " but got error: " <> err - Right { name, version } -> do - Assert.shouldEqual name (Utils.unsafePackageName "my-package") - Assert.shouldEqual version (Utils.unsafeVersion "1.0.0") - FS.Extra.remove tmp - Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do logs <- liftEffect (Ref.new []) let + toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry + toLegacyIndex = + Solver.exploreAllTransitiveDependencies + <<< Solver.initializeRegistry + <<< map (map (_.dependencies <<< un Manifest)) + <<< ManifestIndex.toMap + testEnv = { workdir , logs @@ -101,7 +100,8 @@ spec = do } -- First, we publish the package. - API.publish publishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -147,7 +147,7 @@ spec = do -- Finally, we can verify that publishing the package again should fail -- since it already exists. - Except.runExcept (API.publish publishArgs) >>= case _ of + Except.runExcept (API.publish Nothing publishArgs) >>= case _ of Left _ -> pure unit Right _ -> Except.throw $ "Expected publishing " <> formatPackageVersion name version <> " twice to fail." @@ -162,13 +162,60 @@ spec = do , ref: "v4.0.1" , resolutions: Nothing } - API.publish pursuitOnlyPublishArgs + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + + -- We can also verify that transitive dependencies are added for legacy + -- packages. + let + transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } + transitivePublishArgs = + { compiler: Utils.unsafeVersion "0.15.10" + , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } + , name: transitive.name + , ref: "v" <> Version.print transitive.version + , resolutions: Nothing + } + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + + -- We should verify the resulting metadata file is correct + Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in metadata." + Just m -> pure m + + case Map.lookup transitive.version transitiveMetadata.published of + Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." + Just published -> case published.compilers of + Left one -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix but unfinished single version: " <> Version.print one + Right many -> do + let many' = NonEmptyArray.toArray many + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11", "0.15.12" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + + Registry.readManifest transitive.name transitive.version >>= case _ of + Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in manifest index." + Just (Manifest manifest) -> do + let expectedDeps = Map.singleton (Utils.unsafePackageName "prelude") (Utils.unsafeRange ">=6.0.0 <7.0.0") + when (manifest.dependencies /= expectedDeps) do + Except.throw $ String.joinWith "\n" + [ "Expected transitive@1.0.0 to have dependencies" + , printJson (Internal.Codec.packageMap Range.codec) expectedDeps + , "\nbut got" + , printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + ] case result of - Left err -> do + Left exn -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Got an Aff exception! " <> Aff.message exn + Right (Left err) -> do recorded <- liftEffect (Ref.read logs) - Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 but got error: " <> err <> "\n\nLogs:\n" <> String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Right _ -> pure unit + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err + Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index b69b2f304..9d3c27c9e 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -108,8 +108,8 @@ type TestEnv = , username :: String } -runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff a -runTestEffects env operation = do +runTestEffects :: forall a. TestEnv -> Run TEST_EFFECTS a -> Aff (Either Aff.Error a) +runTestEffects env operation = Aff.attempt do resourceEnv <- Env.lookupResourceEnv githubCache <- liftEffect Cache.newCacheRef legacyCache <- liftEffect Cache.newCacheRef @@ -137,7 +137,7 @@ runTestEffects env operation = do -- | For testing simple Run functions that don't need the whole environment. runBaseEffects :: forall a. Run (LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff a -runBaseEffects = +runBaseEffects = do Log.interpret (\(Log _ _ next) -> pure next) -- Base effects >>> Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/lib/src/Operation/Validation.purs b/lib/src/Operation/Validation.purs index d25b47064..c842145d9 100644 --- a/lib/src/Operation/Validation.purs +++ b/lib/src/Operation/Validation.purs @@ -5,6 +5,7 @@ import Prelude import Data.Array as Array import Data.Array.NonEmpty (NonEmptyArray) import Data.Array.NonEmpty as NEA +import Data.Bifunctor as Bifunctor import Data.DateTime (DateTime) import Data.DateTime as DateTime import Data.Either (Either(..)) @@ -35,6 +36,8 @@ import Registry.Metadata (Metadata(..), PublishedMetadata, UnpublishedMetadata) import Registry.Operation (PublishData) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.PursGraph (AssociatedError, ModuleName, PursGraph) +import Registry.PursGraph as PursGraph import Registry.Range (Range) import Registry.Range as Range import Registry.Solver (CompilerIndex) @@ -69,6 +72,59 @@ isNotUnpublished :: Manifest -> Metadata -> Maybe UnpublishedMetadata isNotUnpublished (Manifest { version }) (Metadata { unpublished }) = Map.lookup version unpublished +data ValidateDepsError + = UnusedDependencies (NonEmptySet PackageName) + | MissingDependencies (NonEmptySet PackageName) + | UnusedAndMissing { unused :: NonEmptySet PackageName, missing :: NonEmptySet PackageName } + +derive instance Eq ValidateDepsError + +printValidateDepsError :: ValidateDepsError -> String +printValidateDepsError = case _ of + UnusedDependencies unused -> + "Unused dependencies (" <> printPackages unused <> ")" + MissingDependencies missing -> + "Missing dependencies (" <> printPackages missing <> ")" + UnusedAndMissing { unused, missing } -> + "Unused dependencies (" <> printPackages unused <> ") and missing dependencies (" <> printPackages missing <> ")" + where + printPackages :: NonEmptySet PackageName -> String + printPackages = String.joinWith ", " <<< map PackageName.print <<< NonEmptySet.toUnfoldable + +-- | Verifies that the manifest lists dependencies imported in the source code, +-- | no more (ie. unused) and no less (ie. transitive). The graph passed to this +-- | function should be the output of 'purs graph' executed on the 'output' +-- | directory of the package compiled with its dependencies. +noTransitiveOrMissingDeps :: Manifest -> PursGraph -> (FilePath -> Either String PackageName) -> Either (Either (NonEmptyArray AssociatedError) ValidateDepsError) Unit +noTransitiveOrMissingDeps (Manifest manifest) graph parser = do + associated <- Bifunctor.lmap Left $ PursGraph.associateModules parser graph + + let + packageModules :: Set ModuleName + packageModules = Map.keys $ Map.filter (_ == manifest.name) associated + + directImportModules :: Set ModuleName + directImportModules = PursGraph.directDependenciesOf packageModules graph + + directImportPackages :: Set PackageName + directImportPackages = Set.mapMaybe (flip Map.lookup associated) directImportModules + + -- Unused packages are those which are listed in the manifest dependencies + -- but which are not imported by the package source code. + unusedDependencies :: Set PackageName + unusedDependencies = Set.filter (not <<< flip Set.member directImportPackages) (Map.keys manifest.dependencies) + + -- Missing packages are those which are imported by the package source code + -- but which are not listed in its dependencies. + missingDependencies :: Set PackageName + missingDependencies = Set.filter (not <<< flip Map.member manifest.dependencies) directImportPackages + + case NonEmptySet.fromSet unusedDependencies, NonEmptySet.fromSet missingDependencies of + Nothing, Nothing -> Right unit + Just unused, Nothing -> Left $ Right $ UnusedDependencies unused + Nothing, Just missing -> Left $ Right $ MissingDependencies missing + Just unused, Just missing -> Left $ Right $ UnusedAndMissing { unused, missing } + -- | Verifies that the manifest dependencies are solvable by the registry solver. validateDependenciesSolve :: Version -> Manifest -> CompilerIndex -> Either Solver.SolverErrors (Map PackageName Version) validateDependenciesSolve compiler (Manifest manifest) compilerIndex = diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 30b5dc9eb..946f1f43d 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -369,7 +369,11 @@ runLegacyImport logs = do , compiler , resolutions: Just resolutions } - Except.runExcept (API.publish payload) >>= case _ of + legacyIndex = + Solver.exploreAllTransitiveDependencies + $ Solver.initializeRegistry + $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) + Except.runExcept (API.publish (Just legacyIndex) payload) >>= case _ of Left error -> do Log.error $ "Failed to publish " <> formatted <> ": " <> error Cache.put _importCache (PublishFailure manifest.name manifest.version) (PublishError error) @@ -529,39 +533,31 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa buildManifestForVersion :: Tag -> Run _ (Either VersionValidationError Manifest) buildManifestForVersion tag = Run.Except.runExceptAt _exceptVersion do version <- exceptVersion $ validateVersion tag - - -- TODO: This will use the manifest for the package version from the - -- registry, without trying to produce a legacy manifest. However, we may - -- want to always attempt to produce a legacy manifest. If we can produce - -- one we compare it to the existing entry, failing if there is a - -- difference; if we can't, we warn and fall back to the existing entry. - Registry.readManifest package.name (LenientVersion.version version) >>= case _ of - Just manifest -> pure manifest - Nothing -> Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of - Just cached -> exceptVersion cached - Nothing -> do - -- While technically not 'legacy', we do need to handle packages with - -- spago.yaml files because they've begun to pop up since the registry - -- alpha began and we don't want to drop them when doing a re-import. - fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of - Just manifest -> do - Log.debug $ "Built manifest from discovered spago.yaml file." - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) - pure manifest - Nothing -> do - Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) - manifest <- Run.Except.runExceptAt _exceptVersion do - exceptVersion $ validateVersionDisabled package.name version - legacyManifest <- do - Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of - Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } - Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest - case manifest of - Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err - Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest - exceptVersion manifest + Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of + Just cached -> exceptVersion cached + Nothing -> do + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest + Nothing -> do + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + legacyManifest <- do + Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of + Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } + Right result -> pure result + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag @@ -1103,7 +1099,7 @@ findFirstCompiler { source, manifest, resolutions, compilers, installed } = do search <- Except.runExcept $ for (Array.reverse (Array.sort compilers)) \target -> do result <- Cache.get API._compilerCache (API.Compilation manifest resolutions target) >>= case _ of Nothing -> do - Log.debug $ "Trying compiler " <> Version.print target + Log.info $ "Not cached, trying compiler " <> Version.print target workdir <- Tmp.mkTmpDir result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 0d26e8128..dc3321d39 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -239,7 +239,7 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish + API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref From 3cdb9b94ffdbecc1b14ef5700c928f822231e38d Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Thu, 7 Dec 2023 12:44:27 -0500 Subject: [PATCH 30/64] Tweaks for efficiency --- app/src/App/API.purs | 16 +++++++++------- app/src/App/CLI/Git.purs | 8 ++++---- scripts/src/LegacyImporter.purs | 17 ++++++++++------- 3 files changed, 23 insertions(+), 18 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index f6dab9efa..85abf1c79 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -445,10 +445,10 @@ publish maybeLegacyIndex payload = do Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do Comment.comment $ Array.fold - [ "Converted your spago.yaml into a purs.json manifest to use for publishing:\n" - , "```json\n" + [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```\n" + , "\n```\n" ] pure manifest @@ -474,10 +474,10 @@ publish maybeLegacyIndex payload = do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest Comment.comment $ Array.fold - [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:\n" - , "```json\n" + [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" + , "\n```json\n" , printJson Manifest.codec manifest - , "```\n" + , "\n```\n" ] pure manifest @@ -1217,7 +1217,9 @@ conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = Map.mapMaybe (\intersect -> Range.mk (Solver.lowerBound intersect) (Solver.upperBound intersect)) $ Safe.Coerce.coerce $ _.required - $ Solver.solveSteps (Solver.solveSeed { registry: legacyRegistry, required: manifestRequired }) + $ Solver.solveSteps + $ Solver.solveSeed + $ Solver.withReachable { registry: legacyRegistry, required: manifestRequired } Log.debug $ "Got transitive solution:\n" <> printJson (Internal.Codec.packageMap Range.codec) legacyTransitive diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac64c8e65..891d8419f 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -112,10 +112,10 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do ] pure true Just files -> do - Log.debug $ Array.fold - [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " - , NonEmptyArray.foldMap1 (append "\n - ") files - ] + -- Log.debug $ Array.fold + -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " + -- , NonEmptyArray.foldMap1 (append "\n - ") files + -- ] Log.warn $ Array.fold [ "Local checkout of " <> formatted , " has untracked or dirty files, it may not be safe to pull the latest." diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 946f1f43d..84f5b420b 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -263,8 +263,8 @@ runLegacyImport logs = do pure range let - publishLegacyPackage :: Manifest -> Run _ Unit - publishLegacyPackage (Manifest manifest) = do + publishLegacyPackage :: Solver.TransitivizedRegistry -> Manifest -> Run _ Unit + publishLegacyPackage legacyIndex (Manifest manifest) = do let formatted = formatPackageVersion manifest.name manifest.version Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of @@ -369,10 +369,6 @@ runLegacyImport logs = do , compiler , resolutions: Just resolutions } - legacyIndex = - Solver.exploreAllTransitiveDependencies - $ Solver.initializeRegistry - $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) Except.runExcept (API.publish (Just legacyIndex) payload) >>= case _ of Left error -> do Log.error $ "Failed to publish " <> formatted <> ": " <> error @@ -390,7 +386,14 @@ runLegacyImport logs = do , "----------" ] - void $ for manifests publishLegacyPackage + legacyIndex <- do + Log.info "Transitivizing legacy registry..." + pure + $ Solver.exploreAllTransitiveDependencies + $ Solver.initializeRegistry + $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) + + void $ for manifests (publishLegacyPackage legacyIndex) Log.info "Finished publishing! Collecting all publish failures and writing to disk." let From d0181e51b29fadba9988eef00051c0af013a03d0 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 8 Dec 2023 17:16:05 -0500 Subject: [PATCH 31/64] (hopefully) final run of the importer --- app/src/App/API.purs | 77 ++++++++++++++++++---------------- app/src/App/Effect/Source.purs | 5 ++- 2 files changed, 44 insertions(+), 38 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 85abf1c79..caf74ec9f 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -104,7 +104,7 @@ import Registry.PursGraph (ModuleName(..)) import Registry.PursGraph as PursGraph import Registry.Range as Range import Registry.Sha256 as Sha256 -import Registry.Solver (CompilerIndex(..), DependencyIndex, Intersection, SolverErrors) +import Registry.Solver (CompilerIndex, DependencyIndex, Intersection, SolverErrors) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) @@ -576,7 +576,9 @@ publish maybeLegacyIndex payload = do Just existing -> Run.liftAff $ writeJsonFile Manifest.codec packagePursJson existing publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions: verifiedResolutions, installedResolutions } >>= case _ of Left publishErr -> Except.throw publishErr - Right _ -> Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Right _ -> do + FS.Extra.remove tmp + Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" -- In this case the package version has not been published, so we proceed -- with ordinary publishing. @@ -644,18 +646,15 @@ publish maybeLegacyIndex payload = do "\n - " <> moduleName <> " (" <> path <> "): " <> error ] - Left (Right depError) - -- If the package fails the transitive / missing check and uses - -- a contemporary manifest then it should be rejected. - | (hadPursJson || hasSpagoYaml) -> - Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError - -- If the package fails, is legacy, and we have a legacy index - -- then we can try to fix it. - | Just legacyIndex <- maybeLegacyIndex -> do - Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError - conformLegacyManifest (Manifest receivedManifest) compilerIndex legacyIndex depError - | otherwise -> - Except.throw $ "Failed to validate unused / missing dependencies and no legacy index was provided to attempt a fix: " <> Operation.Validation.printValidateDepsError depError + -- FIXME: For now we attempt to fix packages if a legacy index + -- is provided (ie. the publish is via the importer) but we + -- should at some point make this a hard error. + Left (Right depError) -> case maybeLegacyIndex of + Nothing -> + Except.throw $ "Failed to validate unused / missing dependencies: " <> Operation.Validation.printValidateDepsError depError + Just legacyIndex -> do + Log.info $ "Found fixable dependency errors: " <> Operation.Validation.printValidateDepsError depError + conformLegacyManifest (Manifest receivedManifest) payload.compiler compilerIndex legacyIndex depError -- If the check passes then we can simply return the manifest and -- resolutions. @@ -689,7 +688,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan validatedResolutions installedResolutions + installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -1193,23 +1192,27 @@ type AdjustManifest = conformLegacyManifest :: forall r . Manifest + -> Version -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + r) (Tuple Manifest (Map PackageName Version)) -conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = Except.catch (\e -> unsafeCrashWith e) do + -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) +conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let - purs :: PackageName - purs = unsafeFromRight (PackageName.parse "purs") - manifestRequired :: SemigroupMap PackageName Intersection manifestRequired = Solver.initializeRequired manifest.dependencies legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of - Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable - Right solved -> pure solved - - Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) legacyResolutions + Left unsolvableLegacy -> do + Log.error $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy + case Solver.solveWithCompiler (Range.exact compiler) currentIndex manifest.dependencies of + Left unsolvableCurrent -> Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableCurrent + Right (Tuple _ solved) -> do + Log.debug $ "Got current resolutions as a fallback to unsolvable legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved + Right solved -> do + Log.debug $ "Got legacy resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) solved + pure solved let legacyTransitive :: Map PackageName Range @@ -1248,25 +1251,24 @@ conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = foundFromResolutions :: Map PackageName Range foundFromResolutions = Map.fromFoldable do - associated.fail <#> \pkg -> case Map.lookup pkg resolutionRanges of - Nothing -> unsafeCrashWith $ "Package " <> PackageName.print pkg <> " not found in resolution ranges" - Just range -> Tuple pkg range + associated.fail # Array.mapMaybe \pkg -> map (Tuple pkg) (Map.lookup pkg resolutionRanges) Map.union foundFromTransitive foundFromResolutions - fixUnused names (Manifest m) resolutions = do + fixUnused names (Manifest m) = do let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names let fixedDependencies = Map.difference m.dependencies unused - let fixedResolutions = Map.difference resolutions unused - Tuple fixedDependencies fixedResolutions + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved fixMissing names (Manifest m) = do let fixedDependencies = Map.union m.dependencies (associateMissing (NonEmptySet.toUnfoldable names)) -- Once we've fixed the missing dependencies we need to be sure we can still -- produce a viable solution with the current index. - case Solver.solve (un CompilerIndex currentIndex) fixedDependencies of - Left unsolvable -> unsafeCrashWith $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable - Right solved -> Tuple fixedDependencies (Map.delete purs solved) + case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of + Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved previousDepsMessage = Array.fold [ "Your package is using a legacy manifest format, so we have adjusted your dependencies to remove unused ones and add direct-imported ones. " @@ -1285,7 +1287,7 @@ conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = case problem of UnusedDependencies names -> do - let (Tuple deps resolutions) = fixUnused names (Manifest manifest) legacyResolutions + Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) Comment.comment $ Array.fold [ previousDepsMessage @@ -1294,7 +1296,7 @@ conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = ] pure $ Tuple newManifest resolutions MissingDependencies names -> do - let (Tuple deps resolutions) = fixMissing names (Manifest manifest) + Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) Comment.comment $ Array.fold [ previousDepsMessage @@ -1303,8 +1305,9 @@ conformLegacyManifest (Manifest manifest) currentIndex legacyRegistry problem = ] pure $ Tuple newManifest resolutions UnusedAndMissing { missing, unused } -> do - let result = fixMissing missing (Manifest manifest) - let (Tuple newDeps newResolutions) = fixUnused unused (Manifest (manifest { dependencies = (fst result) })) (snd result) + let unused' = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) unused + let trimmed = Map.difference manifest.dependencies unused' + Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) Comment.comment $ Array.fold [ previousDepsMessage diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index c7e6dfcf9..d172e0dee 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -82,7 +82,7 @@ handle importType = case _ of Log.debug $ "Using legacy Git clone to fetch package source at tag: " <> show { owner, repo, ref } let - repoDir = Path.concat [ destination, repo ] + repoDir = Path.concat [ destination, repo <> "-" <> ref ] clonePackageAtTag = do let url = Array.fold [ "https://github.com/", owner, "/", repo ] @@ -99,10 +99,13 @@ handle importType = case _ of Left error -> do Log.error $ "Failed to clone git tag: " <> Aff.message error <> ", retrying..." when (alreadyExists (Aff.message error)) $ FS.Extra.remove repoDir + Run.liftAff (Aff.delay (Aff.Milliseconds 1000.0)) Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error2 -> do Log.error $ "Failed to clone git tag (attempt 2): " <> Aff.message error2 <> ", retrying..." + when (alreadyExists (Aff.message error)) $ FS.Extra.remove repoDir + Run.liftAff (Aff.delay (Aff.Milliseconds 1000.0)) Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error3 -> do From 6f9f0cdcbf2ea138ab10dea3eb32cf76a68065f7 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 8 Dec 2023 17:18:58 -0500 Subject: [PATCH 32/64] Update spec to note transitive dependencies requirement. --- SPEC.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPEC.md b/SPEC.md index 2af908582..f68de5575 100644 --- a/SPEC.md +++ b/SPEC.md @@ -207,7 +207,7 @@ Note: - Globs you provide at the `includeFiles` and `excludeFiles` keys must contain only `*`, `**`, `/`, `.`, `..`, and characters for Linux file paths. It is not possible to negate a glob (ie. the `!` character), and globs cannot represent a path out of the package source directory. - When packaging your project source, the registry will first "include" your `src` directory and always-included files such as your `purs.json` file. Then it will include files which match globs indicated by the `includeFiles` key ([always-ignored files](#always-ignored-files) cannot be included). Finally, it will apply the excluding globs indicated by the `excludeFiles` key to the included files ([always-included files](#always-included-files) cannot be excluded). -- Dependencies you provide at the `dependencies` key must exist in the registry, and the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies). +- Dependencies you provide at the `dependencies` key must exist in the registry, the dependency ranges must be solvable (ie. it must be possible to produce a single version of each dependency that satisfies the provided version bounds, including any transitive dependencies), and transitive dependencies are not allowed (ie. any modules you import in your code must come from packages listed in your dependencies). For example: From 2721c6ac2be8ee04449d71b8bcbbdc13ecf5eee1 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 8 Dec 2023 18:41:19 -0500 Subject: [PATCH 33/64] attempt to discover publish compiler with both legacy and current indices --- scripts/src/LegacyImporter.purs | 229 +++++++++++++++++++++----------- 1 file changed, 148 insertions(+), 81 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 84f5b420b..1ff86da9a 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -35,6 +35,7 @@ import Data.Set.NonEmpty (NonEmptySet) import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits +import Data.These (These(..)) import Data.Variant as Variant import Effect.Class.Console as Console import Node.FS.Aff as FS.Aff @@ -275,91 +276,157 @@ runLegacyImport logs = do compilerIndex <- API.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted - case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of - Left unsolvable -> do - let errors = map Solver.printSolverError $ NonEmptyList.toUnfoldable unsolvable - Log.warn $ "Could not solve " <> formatted <> Array.foldMap (append "\n") errors - let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") - let { fail: nonCompiler } = partitionEithers $ map (\error -> if isCompilerSolveError error then Right error else Left error) errors - let joined = String.joinWith " " errors - Cache.put _importCache (PublishFailure manifest.name manifest.version) (if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined) - Right (Tuple _ resolutions) -> do - Log.debug $ "Solved " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies - possibleCompilers <- - if Map.isEmpty manifest.dependencies then do - Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." - pure $ NonEmptySet.fromFoldable1 allCompilers - else do - Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." - allMetadata <- Registry.readAllMetadata - case compatibleCompilers allMetadata resolutions of - Left [] -> do + eitherResolutions <- do + let toErrors = map Solver.printSolverError <<< NonEmptyList.toUnfoldable + let isCompilerSolveError = String.contains (String.Pattern "Conflict in version ranges for purs:") + let partitionIsCompiler = partitionEithers <<< map (\error -> if isCompilerSolveError error then Right error else Left error) + + legacySolution <- case Solver.solveFull { registry: legacyIndex, required: Solver.initializeRequired manifest.dependencies } of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with legacy index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right resolutions -> do + Log.debug $ "Solved " <> formatted <> " with legacy index." + pure $ Right resolutions + + currentSolution <- case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of + Left unsolvable -> do + let errors = toErrors unsolvable + let joined = String.joinWith " " errors + let { fail: nonCompiler } = partitionIsCompiler errors + Log.warn $ "Could not solve with current index " <> formatted <> Array.foldMap (append "\n") errors + pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined + Right (Tuple _ resolutions) -> do + Log.debug $ "Solved " <> formatted <> " with contemporary index." + pure $ Right resolutions + + pure $ case legacySolution, currentSolution of + Left err, Left _ -> Left err + Right resolutions, Left _ -> Right $ This resolutions + Left _, Right resolutions -> Right $ That resolutions + Right legacyResolutions, Right currentResolutions -> Right $ Both legacyResolutions currentResolutions + + case eitherResolutions of + -- We skip if we couldn't solve (but we write the error to cache). + Left err -> + Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right resolutionOptions -> do + Log.info "Selecting usable compiler from resolutions..." + + let + findFirstFromResolutions :: Map PackageName Version -> Run _ (Either (Map Version CompilerFailure) Version) + findFirstFromResolutions resolutions = do + Log.debug $ "Finding compiler for " <> formatted <> " with resolutions " <> printJson (Internal.Codec.packageMap Version.codec) resolutions <> "\nfrom dependency list\n" <> printJson (Internal.Codec.packageMap Range.codec) manifest.dependencies + possibleCompilers <- + if Map.isEmpty manifest.dependencies then do Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." pure $ NonEmptySet.fromFoldable1 allCompilers - Left errors -> do - let - printError { packages, compilers } = do - let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages - let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers - key <> " support compilers " <> val - Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) - Except.throw $ Array.fold - [ "Resolutions admit no overlapping compiler versions so your package cannot be compiled:\n" - , Array.foldMap (append "\n - " <<< printError) errors - ] - Right compilers -> do - Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) - pure compilers - - cached <- do - cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> - Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of - Nothing -> pure Nothing - Just { result: Left _ } -> pure Nothing - Just { target, result: Right _ } -> pure $ Just target - pure $ NonEmptyArray.fromArray $ Array.catMaybes cached - - selected <- case cached of - Just prev -> do - let selected = NonEmptyArray.last prev - Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected - pure $ Right selected - Nothing -> do - Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." - Log.debug "Fetching source and installing dependencies to test compilers" - tmp <- Tmp.mkTmpDir - { path } <- Source.fetch tmp manifest.location ref - Log.debug $ "Downloaded source to " <> path - Log.debug "Downloading dependencies..." - let installDir = Path.concat [ tmp, ".registry" ] - FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir - Log.debug $ "Installed to " <> installDir - Log.debug "Trying compilers one-by-one..." - selected <- findFirstCompiler - { source: path - , installed: installDir - , compilers: NonEmptySet.toUnfoldable possibleCompilers - , resolutions - , manifest: Manifest manifest - } - FS.Extra.remove tmp - pure selected - - case selected of - Left failures -> do + else do + Log.debug "No compiler version was produced by the solver, so all compilers are potentially compatible." + allMetadata <- Registry.readAllMetadata + case compatibleCompilers allMetadata resolutions of + Left [] -> do + Log.debug "No dependencies to determine ranges, so all compilers are potentially compatible." + pure $ NonEmptySet.fromFoldable1 allCompilers + Left errors -> do + let + printError { packages, compilers } = do + let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages + let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers + key <> " support compilers " <> val + Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) + Except.throw $ Array.fold + [ "Resolutions admit no overlapping compiler versions so your package cannot be compiled:\n" + , Array.foldMap (append "\n - " <<< printError) errors + ] + Right compilers -> do + Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) + pure compilers + + cached <- do + cached <- for (NonEmptySet.toUnfoldable possibleCompilers) \compiler -> + Cache.get API._compilerCache (API.Compilation (Manifest manifest) resolutions compiler) >>= case _ of + Nothing -> pure Nothing + Just { result: Left _ } -> pure Nothing + Just { target, result: Right _ } -> pure $ Just target + pure $ NonEmptyArray.fromArray $ Array.catMaybes cached + + case cached of + Just prev -> do + let selected = NonEmptyArray.last prev + Log.debug $ "Found successful cached compilation for " <> formatted <> " and chose " <> Version.print selected + pure $ Right selected + Nothing -> do + Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." + Log.debug "Fetching source and installing dependencies to test compilers" + tmp <- Tmp.mkTmpDir + { path } <- Source.fetch tmp manifest.location ref + Log.debug $ "Downloaded source to " <> path + Log.debug "Downloading dependencies..." + let installDir = Path.concat [ tmp, ".registry" ] + FS.Extra.ensureDirectory installDir + API.installBuildPlan resolutions installDir + Log.debug $ "Installed to " <> installDir + Log.debug "Trying compilers one-by-one..." + selected <- findFirstCompiler + { source: path + , installed: installDir + , compilers: NonEmptySet.toUnfoldable possibleCompilers + , resolutions + , manifest: Manifest manifest + } + FS.Extra.remove tmp + pure selected + + let + collectCompilerErrors :: Map Version CompilerFailure -> Map (NonEmptyArray Version) CompilerFailure + collectCompilerErrors failures = do let - collected :: Map (NonEmptyArray Version) CompilerFailure - collected = do - let - foldFn prev xs = do - let Tuple _ failure = NonEmptyArray.head xs - let key = map fst xs - Map.insert key failure prev - Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + foldFn prev xs = do + let Tuple _ failure = NonEmptyArray.head xs + let key = map fst xs + Map.insert key failure prev + Array.foldl foldFn Map.empty $ Array.groupAllBy (compare `on` snd) (Map.toUnfoldable failures) + + reportFailures :: forall a. _ -> Run _ (Either PublishError a) + reportFailures failures = do + let collected = collectCompilerErrors failures Log.error $ "Failed to find any valid compilers for publishing:\n" <> printJson compilerFailureMapCodec collected - Cache.put _importCache (PublishFailure manifest.name manifest.version) (NoCompilersFound collected) - Right compiler -> do + pure $ Left $ NoCompilersFound collected + + -- Here, we finally attempt to find a suitable compiler. If we only + -- got one set of working resolutions that's what we use. If we got + -- solutions with both the legacy and adjusted-manifest indices, then + -- we try the adjusted index first since that's what is used in the + -- publish pipeline. + eitherCompiler <- case resolutionOptions of + This legacyResolutions -> do + selected <- findFirstFromResolutions legacyResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + That currentResolutions -> do + selected <- findFirstFromResolutions currentResolutions + case selected of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Both legacyResolutions currentResolutions -> do + selectedCurrent <- findFirstFromResolutions currentResolutions + case selectedCurrent of + Right compiler -> pure $ Right $ Tuple compiler currentResolutions + Left currentFailures | legacyResolutions == currentResolutions -> reportFailures currentFailures + Left _ -> do + selectedLegacy <- findFirstFromResolutions legacyResolutions + case selectedLegacy of + Left failures -> reportFailures failures + Right compiler -> pure $ Right $ Tuple compiler legacyResolutions + + case eitherCompiler of + Left err -> Cache.put _importCache (PublishFailure manifest.name manifest.version) err + Right (Tuple compiler resolutions) -> do Log.debug $ "Selected " <> Version.print compiler <> " for publishing." let payload = From f8d0f80cd7e85e9e34c7b15b7ab7ee91b2fb6f34 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 9 Dec 2023 21:09:13 -0500 Subject: [PATCH 34/64] Tweaks --- app/src/App/API.purs | 10 +++++++--- app/src/App/CLI/Git.purs | 3 ++- scripts/src/LegacyImporter.purs | 20 ++++++++++++++++---- 3 files changed, 25 insertions(+), 8 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index caf74ec9f..4051e0638 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -1204,7 +1204,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p legacyResolutions <- case Solver.solveFull { registry: legacyRegistry, required: manifestRequired } of Left unsolvableLegacy -> do - Log.error $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy + Log.warn $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableLegacy case Solver.solveWithCompiler (Range.exact compiler) currentIndex manifest.dependencies of Left unsolvableCurrent -> Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvableCurrent Right (Tuple _ solved) -> do @@ -1259,7 +1259,9 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let unused = Map.fromFoldable $ NonEmptySet.map (\name -> Tuple name unit) names let fixedDependencies = Map.difference m.dependencies unused case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of - Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved fixMissing names (Manifest m) = do @@ -1267,7 +1269,9 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p -- Once we've fixed the missing dependencies we need to be sure we can still -- produce a viable solution with the current index. case Solver.solveWithCompiler (Range.exact compiler) currentIndex fixedDependencies of - Left unsolvable -> Except.throw $ "Legacy resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable + Left unsolvable -> do + Log.warn $ "Fixed dependencies cannot be used to produce a viable solution: " <> printJson (Internal.Codec.packageMap Range.codec) fixedDependencies + Except.throw $ "Resolutions not solvable\n" <> NonEmptyList.foldMap (append "\n - " <<< Solver.printSolverError) unsolvable Right (Tuple _ solved) -> pure $ Tuple fixedDependencies solved previousDepsMessage = Array.fold diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index 891d8419f..ac9ffc398 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -111,7 +111,8 @@ gitPull { address: { owner, repo }, pullMode } cwd = Except.runExcept do , " has no untracked or dirty files, it is safe to pull the latest." ] pure true - Just files -> do + Just _files -> do + -- This is a bit noisy, so commenting it out for now. -- Log.debug $ Array.fold -- [ "Some files are untracked or dirty in local checkout of " <> cwd <> ": " -- , NonEmptyArray.foldMap1 (append "\n - ") files diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 1ff86da9a..8a430bddd 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -36,6 +36,7 @@ import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Data.String.CodeUnits as String.CodeUnits import Data.These (These(..)) +import Data.Tuple (uncurry) import Data.Variant as Variant import Effect.Class.Console as Console import Node.FS.Aff as FS.Aff @@ -83,6 +84,7 @@ import Registry.Manifest as Manifest import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Range as Range +import Registry.Solver (CompilerIndex(..)) import Registry.Solver as Solver import Registry.Version as Version import Run (AFF, EFFECT, Run) @@ -290,7 +292,17 @@ runLegacyImport logs = do pure $ Left $ if Array.null nonCompiler then SolveFailedCompiler joined else SolveFailedDependencies joined Right resolutions -> do Log.debug $ "Solved " <> formatted <> " with legacy index." - pure $ Right resolutions + -- The solutions do us no good if the dependencies don't exist. Note + -- the compiler index is updated on every publish. + let lookupInRegistry res = maybe (Left res) (\_ -> Right res) (Map.lookup (fst res) (un CompilerIndex compilerIndex) >>= Map.lookup (snd res)) + let { fail: notRegistered } = partitionEithers $ map lookupInRegistry $ Map.toUnfoldable resolutions + if (Array.null notRegistered) then + pure $ Right resolutions + else do + let missing = "Some resolutions from legacy index are not registered: " <> String.joinWith ", " (map (uncurry formatPackageVersion) notRegistered) + Log.warn missing + Log.warn "Not using legacy index resolutions for this package." + pure $ Left $ SolveFailedDependencies missing currentSolution <- case Solver.solveWithCompiler allCompilersRange compilerIndex manifest.dependencies of Left unsolvable -> do @@ -337,11 +349,11 @@ runLegacyImport logs = do let key = String.joinWith ", " $ foldlWithIndex (\name prev version -> Array.cons (formatPackageVersion name version) prev) [] packages let val = String.joinWith ", " $ map Version.print $ NonEmptySet.toUnfoldable compilers key <> " support compilers " <> val - Cache.put _importCache (PublishFailure manifest.name manifest.version) (UnsolvableDependencyCompilers errors) - Except.throw $ Array.fold - [ "Resolutions admit no overlapping compiler versions so your package cannot be compiled:\n" + Log.warn $ Array.fold + [ "Resolutions admit no overlapping compiler versions:\n" , Array.foldMap (append "\n - " <<< printError) errors ] + pure $ NonEmptySet.fromFoldable1 allCompilers Right compilers -> do Log.debug $ "Compatible compilers for resolutions of " <> formatted <> ": " <> stringifyJson (CA.array Version.codec) (NonEmptySet.toUnfoldable compilers) pure compilers From e2d6e875a6bc2d39d6b02564629900014f42158d Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 10 Dec 2023 12:23:25 -0500 Subject: [PATCH 35/64] Patch some legacy manifests --- app/src/App/Legacy/Manifest.purs | 36 +++++++++++++++++++++++++++++++- 1 file changed, 35 insertions(+), 1 deletion(-) diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index c61e3d81d..57eefa208 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -58,7 +58,8 @@ type LegacyManifest = } toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location { license, description, dependencies } = do +toManifest name version location legacy = do + let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing @@ -162,6 +163,39 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr pure { license, dependencies, description } +-- | Some legacy manifests must be patched to be usable. +patchLegacyManifest :: PackageName -> Version -> LegacyManifest -> LegacyManifest +patchLegacyManifest name version legacy = do + let hyruleName = unsafeFromRight (PackageName.parse "hyrule") + -- hyrule v2.2.0 removes a module that breaks all versions of bolson + -- prior to the versions below + let earlyBolsonLimit = unsafeFromRight (Version.parse "0.3.0") + let earlyDekuLimit = unsafeFromRight (Version.parse "0.7.0") + let earlyRitoLimit = unsafeFromRight (Version.parse "0.3.0") + let earlyHyruleFixedRange = unsafeFromJust (Range.mk (unsafeFromRight (Version.parse "1.6.4")) (unsafeFromRight (Version.parse "2.2.0"))) + let earlyFixHyrule = Map.update (\_ -> Just earlyHyruleFixedRange) hyruleName + + -- hyrule v2.4.0 removes a module that breaks all versions of bolson, deku, + -- and rito prior to the versions below + let hyruleFixedRange = unsafeFromJust (Range.mk (unsafeFromRight (Version.parse "2.0.0")) (unsafeFromRight (Version.parse "2.4.0"))) + let bolsonLimit = unsafeFromRight (Version.parse "0.4.0") + let dekuLimit = unsafeFromRight (Version.parse "0.9.25") + let ritoLimit = unsafeFromRight (Version.parse "0.3.5") + let fixHyrule = Map.update (\_ -> Just hyruleFixedRange) hyruleName + + case PackageName.print name of + "bolson" + | version < earlyBolsonLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < bolsonLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + "deku" + | version < earlyDekuLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < dekuLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + "rito" + | version < earlyRitoLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < ritoLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + _ -> + legacy + _legacyManifestError :: Proxy "legacyManifestError" _legacyManifestError = Proxy From b8a21a86502e914679bbb1e23239ddd240203435 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 10 Dec 2023 19:27:16 -0500 Subject: [PATCH 36/64] Range tweaks for bolson/deku/rito --- app/src/App/Legacy/Manifest.purs | 39 ++++++++++++++++++-------------- scripts/src/LegacyImporter.purs | 1 + 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 57eefa208..8d99d7ddf 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -166,33 +166,38 @@ fetchLegacyManifest name address ref = Run.Except.runExceptAt _legacyManifestErr -- | Some legacy manifests must be patched to be usable. patchLegacyManifest :: PackageName -> Version -> LegacyManifest -> LegacyManifest patchLegacyManifest name version legacy = do - let hyruleName = unsafeFromRight (PackageName.parse "hyrule") + let bolson = unsafeFromRight (PackageName.parse "bolson") + let hyrule = unsafeFromRight (PackageName.parse "hyrule") + + let unsafeVersion = unsafeFromRight <<< Version.parse + let unsafeRange a b = unsafeFromJust (Range.mk (unsafeVersion a) (unsafeVersion b)) + let fixRange pkg range = Map.update (\_ -> Just range) pkg + -- hyrule v2.2.0 removes a module that breaks all versions of bolson -- prior to the versions below - let earlyBolsonLimit = unsafeFromRight (Version.parse "0.3.0") - let earlyDekuLimit = unsafeFromRight (Version.parse "0.7.0") - let earlyRitoLimit = unsafeFromRight (Version.parse "0.3.0") - let earlyHyruleFixedRange = unsafeFromJust (Range.mk (unsafeFromRight (Version.parse "1.6.4")) (unsafeFromRight (Version.parse "2.2.0"))) - let earlyFixHyrule = Map.update (\_ -> Just earlyHyruleFixedRange) hyruleName + let earlyHyruleFixedRange = unsafeRange "1.6.4" "2.2.0" + let earlyFixHyrule = fixRange hyrule earlyHyruleFixedRange -- hyrule v2.4.0 removes a module that breaks all versions of bolson, deku, -- and rito prior to the versions below - let hyruleFixedRange = unsafeFromJust (Range.mk (unsafeFromRight (Version.parse "2.0.0")) (unsafeFromRight (Version.parse "2.4.0"))) - let bolsonLimit = unsafeFromRight (Version.parse "0.4.0") - let dekuLimit = unsafeFromRight (Version.parse "0.9.25") - let ritoLimit = unsafeFromRight (Version.parse "0.3.5") - let fixHyrule = Map.update (\_ -> Just hyruleFixedRange) hyruleName + let hyruleFixedRange = unsafeRange "2.0.0" "2.4.0" + let fixHyrule = fixRange hyrule hyruleFixedRange + + -- bolson v0.3.1 changes the type of a function that breaks deku until 0.9.21 + let bolsonFixedRange = unsafeRange "0.1.0" "0.3.2" + let fixBolson = fixRange bolson bolsonFixedRange case PackageName.print name of "bolson" - | version < earlyBolsonLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } - | version < bolsonLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.4.0" -> legacy { dependencies = fixHyrule legacy.dependencies } "deku" - | version < earlyDekuLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } - | version < dekuLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + | version < unsafeVersion "0.7.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.9.21" -> legacy { dependencies = fixBolson (fixHyrule legacy.dependencies) } + | version < unsafeVersion "0.9.25" -> legacy { dependencies = fixHyrule legacy.dependencies } "rito" - | version < earlyRitoLimit -> legacy { dependencies = earlyFixHyrule legacy.dependencies } - | version < ritoLimit -> legacy { dependencies = fixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.0" -> legacy { dependencies = earlyFixHyrule legacy.dependencies } + | version < unsafeVersion "0.3.5" -> legacy { dependencies = fixHyrule legacy.dependencies } _ -> legacy diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 8a430bddd..5dc12de2e 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -431,6 +431,7 @@ runLegacyImport logs = do Right compiler -> pure $ Right $ Tuple compiler currentResolutions Left currentFailures | legacyResolutions == currentResolutions -> reportFailures currentFailures Left _ -> do + Log.info $ "Could not find suitable compiler from current index, trying legacy solution..." selectedLegacy <- findFirstFromResolutions legacyResolutions case selectedLegacy of Left failures -> reportFailures failures From 3d7ab49fe103a9e368dd568f1e2692a6a01aa3ad Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 18 Dec 2023 15:19:48 -0500 Subject: [PATCH 37/64] Update to fix darwin support for spago builds --- flake.lock | 20 ++++++++++---------- flake.nix | 13 ++++++++----- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/flake.lock b/flake.lock index 1ecbf5e9c..7a03037e2 100644 --- a/flake.lock +++ b/flake.lock @@ -52,16 +52,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1701730523, - "narHash": "sha256-WWgooXBkjXukyZzMUhkPJvvngKed2VW5yv+i8Qtpldc=", + "lastModified": 1702882221, + "narHash": "sha256-L/uOrBqkGsa45EvQk4DLq/aR6JeomW+7Mwe0mC/dVUM=", "owner": "nixos", "repo": "nixpkgs", - "rev": "8078ceb2777d790d3fbc53589ed3753532185d77", + "rev": "25fef6e30d8ad48f47a8411ccfe986d8baed8a15", "type": "github" }, "original": { "owner": "nixos", - "ref": "release-23.11", + "ref": "release-23.05", "repo": "nixpkgs", "type": "github" } @@ -75,11 +75,11 @@ "slimlock": "slimlock" }, "locked": { - "lastModified": 1701732039, - "narHash": "sha256-0KBXWRUgWKIS1oE0qFfCNXTbttozzS97gv0pW2GplAg=", + "lastModified": 1702928412, + "narHash": "sha256-h6ep8PVTWHw3Hf7SSlxxvy3ephcJg8wHvu9HrMvqYJc=", "owner": "thomashoneyman", "repo": "purescript-overlay", - "rev": "249f9042299dfd4a6f77ddff4a2849651a8211e5", + "rev": "41983080acb2095d00fbdf3ec78c65d65e5f21c7", "type": "github" }, "original": { @@ -125,11 +125,11 @@ ] }, "locked": { - "lastModified": 1688756706, - "narHash": "sha256-xzkkMv3neJJJ89zo3o2ojp7nFeaZc2G0fYwNXNJRFlo=", + "lastModified": 1702828829, + "narHash": "sha256-tL/ThLAk5JgYdKXy3MIZYnDavemjpemF17dVgbfbYM8=", "owner": "thomashoneyman", "repo": "slimlock", - "rev": "cf72723f59e2340d24881fd7bf61cb113b4c407c", + "rev": "a7ce81f35d236a5e58bce30e34825013e19ffade", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index b0fc05e4c..495da1785 100644 --- a/flake.nix +++ b/flake.nix @@ -2,7 +2,7 @@ description = "The PureScript Registry"; inputs = { - nixpkgs.url = "github:nixos/nixpkgs/release-23.11"; + nixpkgs.url = "github:nixos/nixpkgs/release-23.05"; flake-utils.url = "github:numtide/flake-utils"; flake-compat.url = "github:edolstra/flake-compat"; @@ -47,6 +47,9 @@ GIT_LFS_SKIP_SMUDGE = 1; registryOverlay = final: prev: rec { nodejs = prev.nodejs_20; + spago = prev.spago-bin.spago-0_93_19; + purs-tidy = prev.purs-tidy-unstable; + purs-backend-es = prev.purs-backend-es-unstable; # We don't want to force everyone to update their configs if they aren't # normally on flakes. @@ -203,7 +206,7 @@ pushd $WORKDIR export HEALTHCHECKS_URL=${defaultEnv.HEALTHCHECKS_URL} - ${pkgs.spago-unstable}/bin/spago test + ${pkgs.spago}/bin/spago test popd ''; @@ -694,9 +697,9 @@ # Development tooling purs - spago-unstable - purs-tidy-unstable - purs-backend-es-unstable + spago + purs-tidy + purs-backend-es ]; }; }; From 6bc8d094b528884e786b00633c405184f01e6c2f Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 18 Dec 2023 18:13:23 -0500 Subject: [PATCH 38/64] Clean up publish stats --- scripts/src/LegacyImporter.purs | 37 ++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 5dc12de2e..3da120c35 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -242,9 +242,10 @@ runLegacyImport logs = do Just _ -> pure unit Log.info "Ready for upload!" - let importStats = formatImportStats $ calculateImportStats legacyRegistry importedIndex - Log.info importStats - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) importStats + let importStats = calculateImportStats legacyRegistry importedIndex + let formattedStats = formatImportStats importStats + Log.info formattedStats + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "import-stats.txt" ]) formattedStats Log.info "Sorting packages for upload..." let allIndexPackages = ManifestIndex.toSortedArray ManifestIndex.ConsiderRanges importedIndex.registryIndex @@ -484,7 +485,7 @@ runLegacyImport logs = do failures <- Array.foldM collectError Map.empty allIndexPackages Run.liftAff $ writePublishFailures failures - let publishStats = collectPublishFailureStats importedIndex.registryIndex failures + let publishStats = collectPublishFailureStats importStats importedIndex.registryIndex failures let publishStatsMessage = formatPublishFailureStats publishStats Log.info publishStatsMessage Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage @@ -682,12 +683,12 @@ publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM } type PublishFailureStats = - { packages :: { total :: Int, partial :: Int, failed :: Set PackageName } - , versions :: { total :: Int, failed :: Int, reason :: Map String Int } + { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName } + , versions :: { total :: Int, considered :: Int, failed :: Int, reason :: Map String Int } } -collectPublishFailureStats :: ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats -collectPublishFailureStats importedIndex failures = do +collectPublishFailureStats :: ImportStats -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importStats importedIndex failures = do let index :: Map PackageName (Map Version Manifest) index = ManifestIndex.toMap importedIndex @@ -696,11 +697,17 @@ collectPublishFailureStats importedIndex failures = do countVersions = Array.foldl (\prev (Tuple _ versions) -> prev + Map.size versions) 0 <<< Map.toUnfoldable startPackages :: Int - startPackages = Map.size index + startPackages = importStats.packagesProcessed + + consideredPackages :: Int + consideredPackages = Map.size index startVersions :: Int startVersions = countVersions index + consideredVersions :: Int + consideredVersions = countVersions index + failedPackages :: Int failedPackages = Map.size failures @@ -734,11 +741,13 @@ collectPublishFailureStats importedIndex failures = do { packages: { total: startPackages + , considered: consideredPackages , partial: failedPackages , failed: removedPackages } , versions: { total: startVersions + , considered: consideredVersions , failed: failedVersions , reason: countByFailure } @@ -750,9 +759,13 @@ formatPublishFailureStats { packages, versions } = String.joinWith "\n" , "PUBLISH FAILURES" , "--------------------" , "" - , show packages.partial <> " out of " <> show packages.total <> " packages had at least 1 version fail (" <> show (Set.size packages.failed) <> " packages had all versions fail)." - , show versions.failed <> " out of " <> show versions.total <> " versions failed." - , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) + , show packages.considered <> " of " <> show packages.total <> " total packages were considered for publishing (others had no manifests imported.)" + , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.total <> " packages fully succeeded." + , " - " <> show packages.partial <> " packages partially succeeded." + , " - " <> show (Set.size packages.failed) <> " packages had all versions fail and are subject to removal." + , "" + , show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." + , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) ] compilerFailureMapCodec :: JsonCodec (Map (NonEmptyArray Version) CompilerFailure) From 9acbc940e92c403d7bbc3c09c4e6b193d39fb89f Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 18 Dec 2023 19:48:50 -0500 Subject: [PATCH 39/64] Enforce an explicit 0.13 date cutoff / core org cutoff --- foreign/src/Foreign/Octokit.purs | 13 ++- scripts/src/LegacyImporter.purs | 131 +++++++++++++++++++++++++------ 2 files changed, 116 insertions(+), 28 deletions(-) diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index a826cf5b4..91e8fec66 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -198,12 +198,17 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CA.Record.object "Commit" - { committer: CA.Record.object "Commit.committer" { date: Internal.Codec.iso8601DateTime } } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CA.Record.object "CommitData" + { data: CA.Record.object "Commit" + { committer: CA.Record.object "Commit.committer" + { date: Internal.Codec.iso8601DateTime + } + } + } } where - toJsonRep date = { committer: { date } } - fromJsonRep = _.committer.date + toJsonRep date = { data: { committer: { date } } } + fromJsonRep = _.data.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 3da120c35..b3ea22bd6 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -18,6 +18,9 @@ import Data.Codec.Argonaut.Common as CA.Common import Data.Codec.Argonaut.Record as CA.Record import Data.Codec.Argonaut.Variant as CA.Variant import Data.Compactable (separate) +import Data.DateTime (Date, Month(..)) +import Data.DateTime as DateTime +import Data.Enum (toEnum) import Data.Exists as Exists import Data.Filterable (partition) import Data.Foldable (foldMap) @@ -224,11 +227,26 @@ runLegacyImport logs = do pure $ fixupNames allPackages Log.info $ "Read " <> show (Set.size (Map.keys legacyRegistry)) <> " package names from the legacy registry." - importedIndex <- importLegacyRegistry legacyRegistry - Log.info "Writing package and version failures to disk..." - Run.liftAff $ writePackageFailures importedIndex.failedPackages - Run.liftAff $ writeVersionFailures importedIndex.failedVersions + Log.info "Reading reserved 0.13 packages..." + reserved0_13 <- readPackagesMetadata >>= case _ of + Left err -> do + Log.warn $ "Could not read reserved packages: " <> err + Log.warn $ "Determining reserved packages..." + metadata <- getPackagesMetadata legacyRegistry + let cutoff = filterPackages_0_13 metadata + writePackagesMetadata cutoff + pure cutoff + Right cutoff -> pure cutoff + + Log.info $ "Reserving metadata files for 0.13 and purs/metadata packages" + forWithIndex_ reserved0_13 \package { address } -> Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for reserved 0.13 package " <> PackageName.print package + let location = GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> Log.debug $ PackageName.print package <> " already reserved." let metadataPackage = unsafeFromRight (PackageName.parse "metadata") let pursPackage = unsafeFromRight (PackageName.parse "purs") @@ -241,6 +259,12 @@ runLegacyImport logs = do Registry.writeMetadata package entry Just _ -> pure unit + importedIndex <- importLegacyRegistry legacyRegistry + + Log.info "Writing package and version failures to disk..." + Run.liftAff $ writePackageFailures importedIndex.failedPackages + Run.liftAff $ writeVersionFailures importedIndex.failedVersions + Log.info "Ready for upload!" let importStats = calculateImportStats legacyRegistry importedIndex let formattedStats = formatImportStats importStats @@ -485,11 +509,12 @@ runLegacyImport logs = do failures <- Array.foldM collectError Map.empty allIndexPackages Run.liftAff $ writePublishFailures failures - let publishStats = collectPublishFailureStats importStats importedIndex.registryIndex failures + let publishStats = collectPublishFailureStats importStats (map _.address reserved0_13) importedIndex.registryIndex failures let publishStatsMessage = formatPublishFailureStats publishStats Log.info publishStatsMessage Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage - Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.failed))) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "reserved-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable publishStats.packages.reserved))) + Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "removed-packages.txt" ]) (String.joinWith "\n" (map PackageName.print (Set.toUnfoldable (Set.difference publishStats.packages.failed publishStats.packages.reserved)))) -- | Record all package failures to the 'package-failures.json' file. writePublishFailures :: Map PackageName (Map Version PublishError) -> Aff Unit @@ -514,7 +539,7 @@ type LegacyRegistry = Map RawPackageName String type ImportedIndex = { failedPackages :: Map RawPackageName PackageValidationError , failedVersions :: Map RawPackageName (Map RawVersion VersionValidationError) - , reservedPackages :: Map PackageName Location + , removedPackages :: Map PackageName Location , registryIndex :: ManifestIndex , packageRefs :: Map PackageName (Map Version RawVersion) } @@ -556,11 +581,11 @@ importLegacyRegistry legacyRegistry = do -- The list of all packages that were present in the legacy registry files, -- but which have no versions present in the fully-imported registry. - reservedPackages :: Map PackageName Location - reservedPackages = - Map.fromFoldable $ Array.mapMaybe reserved $ Map.toUnfoldable legacyRegistry + removedPackages :: Map PackageName Location + removedPackages = + Map.fromFoldable $ Array.mapMaybe removed $ Map.toUnfoldable legacyRegistry where - reserved (Tuple (RawPackageName name) address) = do + removed (Tuple (RawPackageName name) address) = do packageName <- hush $ PackageName.parse name guard $ isNothing $ Map.lookup packageName $ ManifestIndex.toMap validIndex { owner, repo } <- hush $ Parsing.runParser address legacyRepoParser @@ -592,7 +617,7 @@ importLegacyRegistry legacyRegistry = do pure { failedPackages: packageFailures , failedVersions: versionFailures - , reservedPackages: reservedPackages + , removedPackages: removedPackages , registryIndex: validIndex , packageRefs } @@ -683,12 +708,12 @@ publishErrorCodec = Profunctor.dimap toVariant fromVariant $ CA.Variant.variantM } type PublishFailureStats = - { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName } + { packages :: { total :: Int, considered :: Int, partial :: Int, failed :: Set PackageName, reserved :: Set PackageName } , versions :: { total :: Int, considered :: Int, failed :: Int, reason :: Map String Int } } -collectPublishFailureStats :: ImportStats -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats -collectPublishFailureStats importStats importedIndex failures = do +collectPublishFailureStats :: ImportStats -> Map PackageName Address -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats +collectPublishFailureStats importStats reserved0_13 importedIndex failures = do let index :: Map PackageName (Map Version Manifest) index = ManifestIndex.toMap importedIndex @@ -703,7 +728,7 @@ collectPublishFailureStats importStats importedIndex failures = do consideredPackages = Map.size index startVersions :: Int - startVersions = countVersions index + startVersions = importStats.versionsProcessed consideredVersions :: Int consideredVersions = countVersions index @@ -724,6 +749,11 @@ collectPublishFailureStats importStats importedIndex failures = do foldlWithIndex foldFn Set.empty failures + -- Packages that are eligible for removal — but are reserved due to 0.13 or + -- organization status — are the 'reserved packages'. + reservedPackages :: Set PackageName + reservedPackages = Set.intersection removedPackages (Map.keys reserved0_13) + countByFailure :: Map String Int countByFailure = do let @@ -743,6 +773,7 @@ collectPublishFailureStats importStats importedIndex failures = do { total: startPackages , considered: consideredPackages , partial: failedPackages + , reserved: reservedPackages , failed: removedPackages } , versions: @@ -760,11 +791,12 @@ formatPublishFailureStats { packages, versions } = String.joinWith "\n" , "--------------------" , "" , show packages.considered <> " of " <> show packages.total <> " total packages were considered for publishing (others had no manifests imported.)" - , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.total <> " packages fully succeeded." + , " - " <> show (packages.total - packages.partial - (Set.size packages.failed)) <> " out of " <> show packages.considered <> " packages fully succeeded." , " - " <> show packages.partial <> " packages partially succeeded." - , " - " <> show (Set.size packages.failed) <> " packages had all versions fail and are subject to removal." + , " - " <> show (Set.size packages.reserved) <> " packages fully failed, but are reserved due to 0.13 or organization status." + , " - " <> show (Set.size packages.failed - Set.size packages.reserved) <> " packages had all versions fail and will be removed." , "" - , show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." + , show versions.considered <> " of " <> show versions.total <> " total versions were considered for publishing.\n - " <> show versions.failed <> " out of " <> show versions.total <> " versions failed." , Array.foldMap (\(Tuple key val) -> "\n - " <> key <> ": " <> show val) (Array.sortBy (comparing snd) (Map.toUnfoldable versions.reason)) ] @@ -887,6 +919,56 @@ type PackageResult = , tags :: Array Tag } +type PackagesMetadata = { address :: Address, lastPublished :: Date } + +packagesMetadataCodec :: JsonCodec PackagesMetadata +packagesMetadataCodec = CA.Record.object "PackagesMetadata" + { address: CA.Record.object "Address" { owner: CA.string, repo: CA.string } + , lastPublished: Internal.Codec.iso8601Date + } + +getPackagesMetadata :: forall r. Map RawPackageName String -> Run (EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) +getPackagesMetadata legacyRegistry = do + associated <- for (Map.toUnfoldableUnordered legacyRegistry) \(Tuple rawName rawUrl) -> do + Except.runExceptAt (Proxy :: _ "exceptPackage") (validatePackage rawName rawUrl) >>= case _ of + Left _ -> pure Nothing + Right { name, address, tags } -> case Array.head tags of + Nothing -> pure Nothing + Just tag -> do + result <- GitHub.getCommitDate address tag.sha + case result of + Left error -> unsafeCrashWith ("Failed to get commit date for " <> PackageName.print name <> "@" <> tag.name <> ": " <> Octokit.printGitHubError error) + Right date -> pure $ Just $ Tuple name { address, lastPublished: DateTime.date date } + pure $ Map.fromFoldable $ Array.catMaybes associated + +filterPackages_0_13 :: Map PackageName PackagesMetadata -> Map PackageName PackagesMetadata +filterPackages_0_13 = do + let + -- 0.13 release date + cutoff = DateTime.canonicalDate (unsafeFromJust (toEnum 2019)) May (unsafeFromJust (toEnum 29)) + organizations = + [ "purescript" + , "purescript-contrib" + , "purescript-node" + , "purescript-web" + , "rowtype-yoga" + , "purescript-halogen" + , "purescript-deprecated" + ] + + Map.filterWithKey \_ metadata -> do + let { owner } = metadata.address + owner `Array.elem` organizations || metadata.lastPublished >= cutoff + +writePackagesMetadata :: forall r. Map PackageName PackagesMetadata -> Run (LOG + AFF + r) Unit +writePackagesMetadata pkgs = do + let path = Path.concat [ scratchDir, "packages-metadata.json" ] + Log.info $ "Writing packages metadata to " <> path + Run.liftAff $ writeJsonFile (packageMap packagesMetadataCodec) path pkgs + +readPackagesMetadata :: forall r. Run (AFF + r) (Either String (Map PackageName PackagesMetadata)) +readPackagesMetadata = Run.liftAff $ readJsonFile (packageMap packagesMetadataCodec) (Path.concat [ scratchDir, "packages-metadata.json" ]) + validatePackage :: forall r. RawPackageName -> String -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult validatePackage rawPackage rawUrl = do name <- exceptPackage $ validatePackageName rawPackage @@ -959,6 +1041,7 @@ validatePackageDisabled package = disabledPackages :: Map String String disabledPackages = Map.fromFoldable [ Tuple "metadata" reservedPackage + , Tuple "purs" reservedPackage , Tuple "bitstrings" noSrcDirectory , Tuple "purveyor" noSrcDirectory , Tuple "styled-components" noSrcDirectory @@ -1031,7 +1114,7 @@ formatPublishError = case _ of type ImportStats = { packagesProcessed :: Int , versionsProcessed :: Int - , packageNamesReserved :: Int + , packageNamesRemoved :: Int , packageResults :: { success :: Int, partial :: Int, fail :: Int } , versionResults :: { success :: Int, fail :: Int } , packageErrors :: Map String Int @@ -1044,7 +1127,7 @@ formatImportStats stats = String.joinWith "\n" , show stats.packagesProcessed <> " packages processed:" , indent $ show stats.packageResults.success <> " fully successful" , indent $ show stats.packageResults.partial <> " partially successful" - , indent $ show (stats.packageNamesReserved - stats.packageResults.fail) <> " omitted (no usable versions)" + , indent $ show (stats.packageNamesRemoved - stats.packageResults.fail) <> " omitted (no usable versions)" , indent $ show stats.packageResults.fail <> " fully failed" , indent "---" , formatErrors stats.packageErrors @@ -1077,8 +1160,8 @@ calculateImportStats legacyRegistry imported = do packagesProcessed = Map.size legacyRegistry - packageNamesReserved = - Map.size imported.reservedPackages + packageNamesRemoved = + Map.size imported.removedPackages packageResults = do let succeeded = Map.keys registryIndex @@ -1131,7 +1214,7 @@ calculateImportStats legacyRegistry imported = do { packagesProcessed , versionsProcessed - , packageNamesReserved + , packageNamesRemoved , packageResults , versionResults , packageErrors From bea20133494fd93ce2dfcb65fbce3aceb10c2e65 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 17 Jan 2024 10:51:02 -0500 Subject: [PATCH 40/64] Move location check above manifest parse --- app/src/App/API.purs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 4051e0638..200460fe2 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -420,6 +420,11 @@ publish maybeLegacyIndex payload = do let packageSpagoYaml = Path.concat [ downloadedPackage, "spago.yaml" ] hasSpagoYaml <- Run.liftEffect $ FS.Sync.exists packageSpagoYaml + address <- case existingMetadata.location of + Git _ -> Except.throw "Packages can only come from GitHub for now." + GitHub { subdir: Just subdir } -> Except.throw $ "Packages cannot yet use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." + GitHub { owner, repo } -> pure { owner, repo } + Manifest receivedManifest <- if hadPursJson then Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 packagePursJson)) >>= case _ of @@ -454,10 +459,6 @@ publish maybeLegacyIndex payload = do else do Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." - address <- case existingMetadata.location of - Git _ -> Except.throw "Legacy packages can only come from GitHub." - GitHub { subdir: Just subdir } -> Except.throw $ "Legacy packages cannot use the 'subdir' key, but this package specifies a " <> subdir <> " subdir." - GitHub { owner, repo } -> pure { owner, repo } version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." From 637a757aed0d3fb40b4dddf91da2c45a6b5f2510 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 14:22:49 -0400 Subject: [PATCH 41/64] format --- lib/src/Metadata.purs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index e59934277..a5bed639f 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -97,7 +97,7 @@ publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object decode :: JSON -> Except CJ.DecodeError (Either Version (NonEmptyArray Version)) decode json = except do map Left (CJ.decode Version.codec json) - <|> map Right (CJ.decode (CJ.Common.nonEmptyArray Version.codec) json) + <|> map Right (CJ.decode (CJ.Common.nonEmptyArray Version.codec) json) encode = case _ of Left version -> CJ.encode Version.codec version From ab184f2544daaf8c6b02eee89c16e73f9d2dd1c9 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 15:50:34 -0400 Subject: [PATCH 42/64] Fix octokit codec merge error --- foreign/src/Foreign/Octokit.purs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index d7787466b..f618bc4c0 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -207,17 +207,14 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "CommitData" $ CJ.Record.object - { data: CJ.named "Commit" $ CJ.Record.object - { committer: CJ.named "Commit.committer" $ CJ.Record.object - { date: Internal.Codec.iso8601DateTime - } - } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "Commit" $ CJ.Record.object + { committer: CJ.named "Commit.committer" $ CJ.Record.object + { date: Internal.Codec.iso8601DateTime } } } where - toJsonRep date = { data: { committer: { date } } } - fromJsonRep = _.data.committer.date + toJsonRep date = { committer: { date } } + fromJsonRep = _.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md From 9cc56e793068c19cb690a41e243fcda634a24496 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 15:55:01 -0400 Subject: [PATCH 43/64] Revert "Fix octokit codec merge error" This reverts commit ab184f2544daaf8c6b02eee89c16e73f9d2dd1c9. --- foreign/src/Foreign/Octokit.purs | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index f618bc4c0..d7787466b 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -207,14 +207,17 @@ getCommitDateRequest { address, commitSha } = , headers: Object.empty , args: noArgs , paginate: false - , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "Commit" $ CJ.Record.object - { committer: CJ.named "Commit.committer" $ CJ.Record.object - { date: Internal.Codec.iso8601DateTime } + , codec: Profunctor.dimap toJsonRep fromJsonRep $ CJ.named "CommitData" $ CJ.Record.object + { data: CJ.named "Commit" $ CJ.Record.object + { committer: CJ.named "Commit.committer" $ CJ.Record.object + { date: Internal.Codec.iso8601DateTime + } + } } } where - toJsonRep date = { committer: { date } } - fromJsonRep = _.committer.date + toJsonRep date = { data: { committer: { date } } } + fromJsonRep = _.data.committer.date -- | Create a comment on an issue. Requires authentication. -- | https://github.com/octokit/plugin-rest-endpoint-methods.js/blob/v5.16.0/docs/issues/createComment.md From c05fcb95743d8312c00f0b153b5d3e20bba1e0e4 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 16:29:55 -0400 Subject: [PATCH 44/64] Set compiler explicitly to 0.15.5 --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 7784bce83..e88c18bf2 100644 --- a/flake.nix +++ b/flake.nix @@ -852,7 +852,7 @@ dbmate # Development tooling - purs + purs-bin.purs-0_15_5 spago-bin.spago-0_93_19 # until new lockfile format supported by overlay purs-tidy-unstable purs-backend-es-unstable From 637488d80b084f9d62ec5ab2583cfa42fbc4ec0e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 16:35:37 -0400 Subject: [PATCH 45/64] Tweaks --- app/fixtures/registry/metadata/prelude.json | 2 +- app/fixtures/registry/metadata/type-equality.json | 2 +- flake.nix | 2 +- nix/test-vm.nix | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 4421ec79b..cb635ba04 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,7 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, - "compilers": ["0.15.10", "0.15.11", "0.15.12"], + "compilers": ["0.15.13", "0.15.14", "0.15.15"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index aed5ea89f..5a07ac762 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,7 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, - "compilers": ["0.15.9", "0.15.10", "0.15.11"], + "compilers": ["0.15.12", "0.15.13", "0.15.14"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/flake.nix b/flake.nix index e88c18bf2..7784bce83 100644 --- a/flake.nix +++ b/flake.nix @@ -852,7 +852,7 @@ dbmate # Development tooling - purs-bin.purs-0_15_5 + purs spago-bin.spago-0_93_19 # until new lockfile format supported by overlay purs-tidy-unstable purs-backend-es-unstable diff --git a/nix/test-vm.nix b/nix/test-vm.nix index 916866579..dadf32fa4 100644 --- a/nix/test-vm.nix +++ b/nix/test-vm.nix @@ -32,6 +32,6 @@ ]; }; - system.stateVersion = "23.11"; + system.stateVersion = "24.05"; }; } From 662dd002d55e6969d1aaf15ead0e047a2d613ff0 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 29 Jul 2024 16:45:44 -0400 Subject: [PATCH 46/64] Set all purs test compilers to 0.15.4 range --- app/fixtures/registry/metadata/prelude.json | 2 +- app/fixtures/registry/metadata/type-equality.json | 2 +- app/test/App/API.purs | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index cb635ba04..965567c83 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -6,7 +6,7 @@ "published": { "6.0.1": { "bytes": 31142, - "compilers": ["0.15.13", "0.15.14", "0.15.15"], + "compilers": ["0.15.3", "0.15.4", "0.15.5"], "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 5a07ac762..b5d5a86ea 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -6,7 +6,7 @@ "published": { "4.0.1": { "bytes": 2184, - "compilers": ["0.15.12", "0.15.13", "0.15.14"], + "compilers": ["0.15.2", "0.15.3", "0.15.4"], "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 9206b4ac8..fca1f14c0 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -92,7 +92,7 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.10" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref @@ -141,7 +141,7 @@ spec = do Left one -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix but unfinished single version: " <> Version.print one Right many -> do let many' = NonEmptyArray.toArray many - let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11", "0.15.12" ] + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -156,7 +156,7 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" @@ -170,7 +170,7 @@ spec = do let transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } transitivePublishArgs = - { compiler: Utils.unsafeVersion "0.15.10" + { compiler: Utils.unsafeVersion "0.15.4" , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version @@ -190,7 +190,7 @@ spec = do Left one -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix but unfinished single version: " <> Version.print one Right many -> do let many' = NonEmptyArray.toArray many - let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11", "0.15.12" ] + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') From 8156aa2e46e82abdeae2b8045552f642d3181802 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 30 Jul 2024 11:25:13 -0400 Subject: [PATCH 47/64] Update retry logic to fix integration test --- app/src/App/Effect/GitHub.purs | 6 ++---- app/src/App/Effect/Source.purs | 29 +++++++++++------------------ app/src/App/Prelude.purs | 14 +++++++++++--- app/src/Fetch/Retry.purs | 6 ++---- flake.nix | 9 +++++++-- foreign/src/Foreign/Tmp.js | 2 +- nix/test-vm.nix | 10 ++++++---- 7 files changed, 40 insertions(+), 36 deletions(-) diff --git a/app/src/App/Effect/GitHub.purs b/app/src/App/Effect/GitHub.purs index 8d0b313a8..914a3aa92 100644 --- a/app/src/App/Effect/GitHub.purs +++ b/app/src/App/Effect/GitHub.purs @@ -265,10 +265,8 @@ requestWithBackoff octokit githubRequest = do Log.debug $ "Making request to " <> route <> " with base URL " <> githubApiUrl result <- Run.liftAff do let - retryOptions = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt err -> case err of + retryOptions = defaultRetry + { retryOnFailure = \attempt err -> case err of UnexpectedError _ -> false DecodeError _ -> false -- https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#exceeding-the-rate-limit diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index d172e0dee..d0c44b6ce 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,7 +6,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate -import Data.String as String +import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff import Effect.Exception as Exception import Effect.Now as Now @@ -84,33 +84,26 @@ handle importType = case _ of let repoDir = Path.concat [ destination, repo <> "-" <> ref ] + -- If a git clone is cancelled by the timeout, but had partially-cloned, then it will + -- leave behind files that prevent a retry. + retryOpts = defaultRetry + { cleanupOnCancel = FS.Extra.remove repoDir + , timeout = Milliseconds 15_000.0 + } + clonePackageAtTag = do let url = Array.fold [ "https://github.com/", owner, "/", repo ] let args = [ "clone", url, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] - withRetryOnTimeout (Git.gitCLI args Nothing) >>= case _ of + withRetry retryOpts (Git.gitCLI args Nothing) >>= case _ of Cancelled -> Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> url <> " " <> ref Failed err -> Aff.throwError $ Aff.error err Succeeded _ -> pure unit - alreadyExists = String.contains (String.Pattern "already exists and is not an empty directory") - Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error -> do - Log.error $ "Failed to clone git tag: " <> Aff.message error <> ", retrying..." - when (alreadyExists (Aff.message error)) $ FS.Extra.remove repoDir - Run.liftAff (Aff.delay (Aff.Milliseconds 1000.0)) - Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of - Right _ -> Log.debug $ "Cloned package source to " <> repoDir - Left error2 -> do - Log.error $ "Failed to clone git tag (attempt 2): " <> Aff.message error2 <> ", retrying..." - when (alreadyExists (Aff.message error)) $ FS.Extra.remove repoDir - Run.liftAff (Aff.delay (Aff.Milliseconds 1000.0)) - Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of - Right _ -> Log.debug $ "Cloned package source to " <> repoDir - Left error3 -> do - Log.error $ "Failed to clone git tag (attempt 3): " <> Aff.message error3 - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref + Log.error $ "Failed to clone git tag: " <> Aff.message error + Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index a4a5864cc..7a046414d 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -171,7 +171,9 @@ withRetryOnTimeout = withRetry defaultRetry type Retry err = { timeout :: Aff.Milliseconds + , cleanupOnCancel :: Extra.Aff Unit , retryOnCancel :: Int -> Boolean + , cleanupOnFailure :: err -> Extra.Aff Unit , retryOnFailure :: Int -> err -> Boolean } @@ -180,7 +182,9 @@ type Retry err = defaultRetry :: forall err. Retry err defaultRetry = { timeout: Aff.Milliseconds 5000.0 + , cleanupOnCancel: pure unit , retryOnCancel: \attempt -> attempt <= 3 + , cleanupOnFailure: \_ -> pure unit , retryOnFailure: \_ _ -> false } @@ -194,7 +198,7 @@ derive instance (Eq err, Eq a) => Eq (RetryResult err a) -- | Attempt an effectful computation that can fail by specifying how to retry -- | the request and whether it should time out. withRetry :: forall err a. Retry err -> Extra.Aff (Either.Either err a) -> Extra.Aff (RetryResult err a) -withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } action = do +withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure, cleanupOnCancel, cleanupOnFailure } action = do let runAction :: Extra.Aff (Either.Either err a) -> Int -> Extra.Aff (RetryResult err a) runAction action' ms = do @@ -215,14 +219,18 @@ withRetry { timeout: Aff.Milliseconds timeout, retryOnCancel, retryOnFailure } a Cancelled -> if retryOnCancel attempt then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnCancel retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnCancel pure Cancelled Failed err -> if retryOnFailure attempt err then do let newTimeout = Int.floor timeout `Int.pow` (attempt + 1) + cleanupOnFailure err retry (attempt + 1) =<< runAction action newTimeout - else + else do + cleanupOnFailure err pure (Failed err) Succeeded result -> pure (Succeeded result) diff --git a/app/src/Fetch/Retry.purs b/app/src/Fetch/Retry.purs index 4260f6e46..cd182385a 100644 --- a/app/src/Fetch/Retry.purs +++ b/app/src/Fetch/Retry.purs @@ -43,10 +43,8 @@ withRetryRequest url opts = withRetry retry do if response.status >= 400 then Left $ StatusError response else Right response - retry = - { timeout: defaultRetry.timeout - , retryOnCancel: defaultRetry.retryOnCancel - , retryOnFailure: \attempt -> case _ of + retry = defaultRetry + { retryOnFailure = \attempt -> case _ of FetchError _ -> false StatusError { status } -> -- We retry on 500-level errors in case the server is temporarily diff --git a/flake.nix b/flake.nix index 7784bce83..c886c63d3 100644 --- a/flake.nix +++ b/flake.nix @@ -797,7 +797,7 @@ # Then we poll for job results, expecting an eventual 'success'. try_count = 0 - delay_seconds = 3 + delay_seconds = 5 prev_timestamp = "2023-07-29T00:00:00.000Z" log_level = "DEBUG" while True: @@ -809,7 +809,12 @@ success = poll_result['success'] assert success, f"GET /jobs/{job_id} should return success, but it returned {poll_result}" break - elif (try_count * delay_seconds) > 60: + + # A fairly long timeout because of the requirement to compile packages. + # FIXME: once this is split into multiple jobs, the timeout can be adjusted + # to a smaller number, e.g. 45 seconds maximum, but we can allow extra time + # for the subsequent compilation jobs to complete. + elif (try_count * delay_seconds) > 180: raise ValueError(f"Cancelling publish request after {try_count * delay_seconds} seconds, this is too long...") else: print(f"Job is still ongoing, retrying in {delay_seconds} seconds...") diff --git a/foreign/src/Foreign/Tmp.js b/foreign/src/Foreign/Tmp.js index b11d10232..8995afdfc 100644 --- a/foreign/src/Foreign/Tmp.js +++ b/foreign/src/Foreign/Tmp.js @@ -3,6 +3,6 @@ import { setGracefulCleanup, dirSync } from "tmp"; setGracefulCleanup(); export const mkTmpDirImpl = () => { - const tmpobj = dirSync(); + const tmpobj = dirSync({ template: 'XXXXXX' }); return tmpobj.name; }; diff --git a/nix/test-vm.nix b/nix/test-vm.nix index dadf32fa4..f77ef574a 100644 --- a/nix/test-vm.nix +++ b/nix/test-vm.nix @@ -19,10 +19,6 @@ services.getty.autologinUser = "root"; virtualisation = { - graphics = false; - host = { - inherit pkgs; - }; forwardPorts = [ { from = "host"; @@ -30,6 +26,12 @@ host.port = 8080; } ]; + graphics = false; + host = { + inherit pkgs; + }; + # Can be adjusted if necessary for test systems (default is 1024) + memorySize = 2048; }; system.stateVersion = "24.05"; From ed7913c1ce3ab9b5e76a6da109b92136cc5ed036 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 26 Aug 2024 18:44:33 -0400 Subject: [PATCH 48/64] Complete run of legacy importer --- app/src/App/API.purs | 25 +++++++---- app/src/App/Effect/Source.purs | 78 +++++++++++++++++++++++---------- app/test/Test/Assert/Run.purs | 8 ++-- flake.nix | 9 +++- scripts/src/LegacyImporter.purs | 8 +++- 5 files changed, 90 insertions(+), 38 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 65009a9cb..9a83187bd 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -496,14 +496,23 @@ publish maybeLegacyIndex payload = do ] unless (Operation.Validation.locationMatches (Manifest receivedManifest) (Metadata metadata)) do - Except.throw $ Array.fold - [ "The manifest file specifies a location (" - , stringifyJson Location.codec receivedManifest.location - , ") that differs from the location in the registry metadata (" - , stringifyJson Location.codec metadata.location - , "). If you would like to change the location of your package you should " - , "submit a transfer operation." - ] + if isJust maybeLegacyIndex then + -- The legacy importer is sometimes run on older packages, some of which have been transferred. Since + -- package metadata only records the latest location, this can cause a problem: the manifest reports + -- the location at the time, but the metadata reports the current location. + Log.warn $ Array.fold + [ "In legacy mode and manifest location differs from existing metadata. This indicates a package that was " + , "transferred from a previous location. Ignoring location match validation..." + ] + else + Except.throw $ Array.fold + [ "The manifest file specifies a location (" + , stringifyJson Location.codec receivedManifest.location + , ") that differs from the location in the registry metadata (" + , stringifyJson Location.codec metadata.location + , "). If you would like to change the location of your package you should " + , "submit a transfer operation." + ] when (Operation.Validation.isMetadataPackage (Manifest receivedManifest)) do Except.throw "The `metadata` package cannot be uploaded to the registry because it is a protected package." diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index d0c44b6ce..5f5fd328c 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -6,6 +6,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.JSDate as JSDate +import Data.String as String import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff import Effect.Exception as Exception @@ -38,7 +39,7 @@ data ImportType = Old | Recent derive instance Eq ImportType -- | An effect for fetching package sources -data Source a = Fetch FilePath Location String (Either String FetchedSource -> a) +data Source a = Fetch FilePath Location String (Either FetchError FetchedSource -> a) derive instance Functor Source @@ -49,9 +50,24 @@ _source = Proxy type FetchedSource = { path :: FilePath, published :: DateTime } +data FetchError + = GitHubOnly + | NoSubdir + | InaccessibleRepo Octokit.Address + | NoToplevelDir + | Fatal String + +printFetchError :: FetchError -> String +printFetchError = case _ of + GitHubOnly -> "Packages are only allowed to come from GitHub for now. See issue #15." + NoSubdir -> "Monorepos and the `subdir` key are not supported yet. See issue #16." + InaccessibleRepo { owner, repo } -> "Repository located at https://github.com/" <> owner <> "/" <> repo <> ".git is inaccessible or does not exist." + NoToplevelDir -> "Downloaded tarball has no top-level directory." + Fatal err -> "Unrecoverable error. " <> err + -- | Fetch the provided location to the provided destination path. fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch destination location ref = Except.rethrow =<< Run.lift _source (Fetch destination location ref identity) +fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a @@ -65,11 +81,11 @@ handle importType = case _ of case location of Git _ -> do -- TODO: Support non-GitHub packages. Remember subdir when doing so. (See #15) - Except.throw "Packages are only allowed to come from GitHub for now. See #15" + Except.throw GitHubOnly GitHub { owner, repo, subdir } -> do -- TODO: Support subdir. In the meantime, we verify subdir is not present. (See #16) - when (isJust subdir) $ Except.throw "`subdir` is not supported for now. See #16" + when (isJust subdir) $ Except.throw NoSubdir case pursPublishMethod of -- This needs to be removed so that we can support non-GitHub packages (#15) @@ -91,29 +107,45 @@ handle importType = case _ of , timeout = Milliseconds 15_000.0 } - clonePackageAtTag = do - let url = Array.fold [ "https://github.com/", owner, "/", repo ] - let args = [ "clone", url, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] - withRetry retryOpts (Git.gitCLI args Nothing) >>= case _ of - Cancelled -> Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> url <> " " <> ref - Failed err -> Aff.throwError $ Aff.error err - Succeeded _ -> pure unit + cloneUrl = + Array.fold [ "https://github.com/", owner, "/", repo ] + + cloneArgs = + [ "clone", cloneUrl, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] + + clonePackageAtTag = + withRetry retryOpts (Git.gitCLI cloneArgs Nothing) >>= case _ of + Cancelled -> + Aff.throwError $ Aff.error $ "Timed out attempting to clone git tag: " <> cloneUrl <> " " <> ref + Failed err -> + Aff.throwError $ Aff.error err + Succeeded _ -> + pure unit Run.liftAff (Aff.attempt clonePackageAtTag) >>= case _ of Right _ -> Log.debug $ "Cloned package source to " <> repoDir Left error -> do + Log.warn $ "Git clone command failed:\n " <> String.joinWith " " (Array.cons "git" cloneArgs) Log.error $ "Failed to clone git tag: " <> Aff.message error - Except.throw $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref + + -- We'll receive this message if we try to clone a repo which doesn't + -- exist, which is interpreted as an attempt to fetch a private repo. + let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" + + if String.contains (String.Pattern missingRepoErr) (Aff.message error) then + Except.throw $ InaccessibleRepo { owner, repo } + else + Except.throw $ Fatal $ "Failed to clone repository " <> owner <> "/" <> repo <> " at ref " <> ref Log.debug $ "Getting published time..." let getRefTime = case importType of Old -> do - timestamp <- Except.rethrow =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) + timestamp <- (Except.rethrow <<< lmap Fatal) =<< Run.liftAff (Git.gitCLI [ "log", "-1", "--date=iso8601-strict", "--format=%cd", ref ] (Just repoDir)) jsDate <- Run.liftEffect $ JSDate.parse timestamp dateTime <- case JSDate.toDateTime jsDate of - Nothing -> Except.throw $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate + Nothing -> Except.throw $ Fatal $ "Could not parse timestamp of git ref to a datetime given timestamp " <> timestamp <> " and parsed js date " <> JSDate.toUTCString jsDate Just parsed -> pure parsed pure dateTime Recent -> @@ -122,8 +154,8 @@ handle importType = case _ of -- Cloning will result in the `repo` name as the directory name publishedTime <- Except.runExcept getRefTime >>= case _ of Left error -> do - Log.error $ "Failed to get published time: " <> error - Except.throw $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." + Log.error $ "Failed to get published time. " <> printFetchError error + Except.throw $ Fatal $ "Cloned repository " <> owner <> "/" <> repo <> " at ref " <> ref <> ", but could not read the published time from the ref." Right value -> pure value pure { path: repoDir, published: publishedTime } @@ -138,12 +170,12 @@ handle importType = case _ of commit <- GitHub.getRefCommit { owner, repo } (RawVersion ref) >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at ref " <> ref <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref + Except.throw $ Fatal $ "Failed to fetch commit data associated with " <> upstream <> " at ref " <> ref Right result -> pure result GitHub.getCommitDate { owner, repo } commit >>= case _ of Left githubError -> do Log.error $ "Failed to fetch " <> upstream <> " at commit " <> commit <> ": " <> Octokit.printGitHubError githubError - Except.throw $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref + Except.throw $ Fatal $ "Unable to get published time for commit " <> commit <> " associated with the given ref " <> ref Right a -> pure a let tarballName = ref <> ".tar.gz" @@ -155,16 +187,16 @@ handle importType = case _ of Run.liftAff $ Fetch.withRetryRequest archiveUrl {} case response of - Cancelled -> Except.throw $ "Could not download " <> archiveUrl + Cancelled -> Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.FetchError error) -> do Log.error $ "Failed to download " <> archiveUrl <> " because of an HTTP error: " <> Exception.message error - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do arrayBuffer <- Run.liftAff arrayBufferAff buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) Log.error $ "Failed to download " <> archiveUrl <> " because of a non-200 status code (" <> show status <> ") with body " <> bodyString - Except.throw $ "Could not download " <> archiveUrl + Except.throw $ Fatal $ "Could not download " <> archiveUrl Succeeded { arrayBuffer: arrayBufferAff } -> do arrayBuffer <- Run.liftAff arrayBufferAff Log.debug $ "Successfully downloaded " <> archiveUrl <> " into a buffer." @@ -172,14 +204,14 @@ handle importType = case _ of Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of Left error -> do Log.error $ "Downloaded " <> archiveUrl <> " but failed to write it to the file at path " <> absoluteTarballPath <> ":\n" <> Aff.message error - Except.throw $ "Could not download " <> archiveUrl <> " due to an internal error." + Except.throw $ Fatal $ "Could not download " <> archiveUrl <> " due to an internal error." Right _ -> Log.debug $ "Tarball downloaded to " <> absoluteTarballPath Log.debug "Verifying tarball..." Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of Nothing -> - Except.throw "Downloaded tarball from GitHub has no top-level directory." + Except.throw NoToplevelDir Just path -> do Log.debug "Extracting the tarball..." Tar.extract { cwd: destination, archive: tarballName } diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 9d3c27c9e..42cc7d6ab 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -44,7 +44,7 @@ import Registry.App.Effect.Pursuit (PURSUIT, Pursuit(..)) import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry (REGISTRY, Registry(..)) import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE, Source(..)) +import Registry.App.Effect.Source (FetchError(..), SOURCE, Source(..)) import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE, Storage) import Registry.App.Effect.Storage as Storage @@ -309,8 +309,8 @@ handleSourceMock env = case _ of Fetch destination location ref reply -> do now <- Run.liftEffect Now.nowDateTime case location of - Git _ -> pure $ reply $ Left "Packages cannot be published from Git yet (only GitHub)." - GitHub { subdir } | isJust subdir -> pure $ reply $ Left "Packages cannot use the 'subdir' key yet." + Git _ -> pure $ reply $ Left GitHubOnly + GitHub { subdir } | isJust subdir -> pure $ reply $ Left NoSubdir GitHub { repo } -> do let name = stripPureScriptPrefix repo @@ -319,7 +319,7 @@ handleSourceMock env = case _ of localPath = Path.concat [ env.github, dirname ] destinationPath = Path.concat [ destination, dirname <> "-checkout" ] Run.liftAff (Aff.attempt (FS.Aff.stat localPath)) >>= case _ of - Left _ -> pure $ reply $ Left $ "Cannot copy " <> localPath <> " because it does not exist." + Left _ -> pure $ reply $ Left $ Fatal $ "Cannot copy " <> localPath <> " because it does not exist." Right _ -> do Run.liftAff $ FS.Extra.copy { from: localPath, to: destinationPath, preserveTimestamps: true } case pursPublishMethod of diff --git a/flake.nix b/flake.nix index c886c63d3..44f2c7537 100644 --- a/flake.nix +++ b/flake.nix @@ -70,6 +70,11 @@ # (typically >4GB), and source packgaes really ought not be shipping large # files — just source code. GIT_LFS_SKIP_SMUDGE = 1; + + # We disable git from entering interactive mode at any time, as there is no + # one there to answer prompts. + GIT_TERMINAL_PROMPT = 0; + registryOverlay = final: prev: rec { nodejs = prev.nodejs_20; @@ -284,7 +289,7 @@ # according to the env.example file, or to the values explicitly set below # (e.g. DHALL_PRELUDE and DHALL_TYPES). defaultEnv = parseEnv ./.env.example // { - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE; + inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE GIT_TERMINAL_PROMPT; }; # Parse a .env file, skipping empty lines and comments, into Nix attrset @@ -826,7 +831,7 @@ devShells = { default = pkgs.mkShell { - inherit GIT_LFS_SKIP_SMUDGE; + inherit GIT_LFS_SKIP_SMUDGE GIT_TERMINAL_PROMPT; name = "registry-dev"; packages = with pkgs; [ diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 95bbd61f1..fade3c9b0 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -1255,7 +1255,13 @@ fetchSpagoYaml address ref = do Right contents -> do Log.debug $ "Found spago.yaml file\n" <> contents case parseYaml SpagoYaml.spagoYamlCodec contents of - Left error -> Run.Except.throw $ "Failed to parse spago.yaml file:\n" <> contents <> "\nwith errors:\n" <> error + Left error -> do + Log.warn $ "Failed to parse spago.yaml file:\n" <> contents <> "\nwith errors:\n" <> error + pure Nothing + Right { package: Just { publish: Just { location: Just location } } } + | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do + Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." + pure Nothing Right config -> case SpagoYaml.spagoYamlToManifest config of Left err -> do Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err From ec8e3ff0d8bf79c7372278b9a5be916620d92b69 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 26 Aug 2024 18:47:08 -0400 Subject: [PATCH 49/64] Format --- app/src/App/Effect/Source.purs | 2 +- flake.nix | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index 5f5fd328c..828759792 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -130,7 +130,7 @@ handle importType = case _ of -- We'll receive this message if we try to clone a repo which doesn't -- exist, which is interpreted as an attempt to fetch a private repo. - let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" + let missingRepoErr = "fatal: could not read Username for 'https://github.com': terminal prompts disabled" if String.contains (String.Pattern missingRepoErr) (Aff.message error) then Except.throw $ InaccessibleRepo { owner, repo } diff --git a/flake.nix b/flake.nix index 44f2c7537..191561aa5 100644 --- a/flake.nix +++ b/flake.nix @@ -289,7 +289,12 @@ # according to the env.example file, or to the values explicitly set below # (e.g. DHALL_PRELUDE and DHALL_TYPES). defaultEnv = parseEnv ./.env.example // { - inherit DHALL_PRELUDE DHALL_TYPES GIT_LFS_SKIP_SMUDGE GIT_TERMINAL_PROMPT; + inherit + DHALL_PRELUDE + DHALL_TYPES + GIT_LFS_SKIP_SMUDGE + GIT_TERMINAL_PROMPT + ; }; # Parse a .env file, skipping empty lines and comments, into Nix attrset From a3f086bde9183cf3f57f55ddbc1a6d872ff0537b Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sat, 7 Jun 2025 14:11:47 +0300 Subject: [PATCH 50/64] Update SPEC.md --- SPEC.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SPEC.md b/SPEC.md index bb545f214..423d0d80d 100644 --- a/SPEC.md +++ b/SPEC.md @@ -234,7 +234,7 @@ For example: All packages in the registry have an associated metadata file, which is located in the `metadata` directory of the `registry` repository under the package name. For example, the metadata for the `aff` package is located at: https://github.com/purescript/registry/blob/main/metadata/aff.json. Metadata files are the source of truth on all published and unpublished versions for a particular package for what there content is and where the package is located. Metadata files are produced by the registry, not by package authors, though they take some information from package manifests. -Each published version of a package records four fields: +Each published version of a package records the following fields: - `hash`: a [`Sha256`](#Sha256) of the compressed archive fetched by the registry for the given version - `bytes`: the size of the tarball in bytes From de7c6e343cb2c9b5784bf71f0e76c3e38ebdf846 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sat, 7 Jun 2025 17:26:52 +0300 Subject: [PATCH 51/64] Add 'purescript' to the list of reserved packages --- scripts/src/LegacyImporter.purs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index fade3c9b0..e170b9bec 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -249,7 +249,8 @@ runLegacyImport logs = do let metadataPackage = unsafeFromRight (PackageName.parse "metadata") let pursPackage = unsafeFromRight (PackageName.parse "purs") - for_ [ metadataPackage, pursPackage ] \package -> + let purescriptPackage = unsafeFromRight (PackageName.parse "purescript") + for_ [ metadataPackage, pursPackage, purescriptPackage ] \package -> Registry.readMetadata package >>= case _ of Nothing -> do Log.info $ "Writing empty metadata file for " <> PackageName.print package From 8c8d728054eb00b7c756d7a5e6e5fdb31ee5da96 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sat, 7 Jun 2025 17:27:17 +0300 Subject: [PATCH 52/64] Move to NonEmpty in dhall types --- flake.nix | 2 +- types/v1/Metadata.dhall | 3 ++- types/v1/Prelude.dhall | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index 191561aa5..0857da8b4 100644 --- a/flake.nix +++ b/flake.nix @@ -59,7 +59,7 @@ DHALL_PRELUDE = "${ builtins.fetchGit { url = "https://github.com/dhall-lang/dhall-lang"; - rev = "e35f69d966f205fdc0d6a5e8d0209e7b600d90b3"; + rev = "25cf020ab307cb2d66826b0d1ddac8bc89241e27"; } }/Prelude/package.dhall"; diff --git a/types/v1/Metadata.dhall b/types/v1/Metadata.dhall index 2f50decf6..083960152 100644 --- a/types/v1/Metadata.dhall +++ b/types/v1/Metadata.dhall @@ -1,4 +1,5 @@ let Map = (./Prelude.dhall).Map.Type +let NonEmpty = (./Prelude.dhall).NonEmpty.Type let Owner = ./Owner.dhall @@ -14,7 +15,7 @@ let PublishedMetadata = { hash : Sha256 , bytes : Natural , publishedTime : ISO8601String - , compilers : < Single : Version | Many : List Version > + , compilers : NonEmpty Version } let UnpublishedMetadata = diff --git a/types/v1/Prelude.dhall b/types/v1/Prelude.dhall index 8b05657c4..d86e105e1 100644 --- a/types/v1/Prelude.dhall +++ b/types/v1/Prelude.dhall @@ -2,4 +2,4 @@ -- remote hosts in an offline environment (such as Nix in CI). DHALL_PRELUDE is -- automatically set in your Nix shell, but if you are not using a Nix shell and -- want to run this locally then the URL will be used instead. -env:DHALL_PRELUDE ? https://prelude.dhall-lang.org/v19.0.0/package.dhall sha256:eb693342eb769f782174157eba9b5924cf8ac6793897fc36a31ccbd6f56dafe2 +env:DHALL_PRELUDE From 7b577711536f0963911ce1afa373fdc4e8da262b Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 8 Jun 2025 16:48:14 +0300 Subject: [PATCH 53/64] compilers is a NonEmptyArray --- app/src/App/API.purs | 4 ++-- app/test/App/API.purs | 24 +++++++++------------ app/test/App/Legacy/PackageSet.purs | 2 +- lib/src/Metadata.purs | 16 ++------------ lib/src/Solver.purs | 8 ++----- lib/test/Registry/Operation/Validation.purs | 3 ++- scripts/src/LegacyImporter.purs | 4 +--- 7 files changed, 20 insertions(+), 41 deletions(-) diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 9a83187bd..89322d52b 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -738,7 +738,7 @@ publish maybeLegacyIndex payload = do Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." - let newPublishedVersion = { hash, ref: payload.ref, compilers: Left payload.compiler, publishedTime, bytes } + let newPublishedVersion = { hash, ref: payload.ref, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) @@ -786,7 +786,7 @@ publish maybeLegacyIndex payload = do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = Right (NonEmptySet.toUnfoldable1 validCompilers) })) (un Manifest manifest).version newMetadata.published } + let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) diff --git a/app/test/App/API.purs b/app/test/App/API.purs index fca1f14c0..3a8c66ef2 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -137,13 +137,11 @@ spec = do case Map.lookup version effectMetadata.published of Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." - Just published -> case published.compilers of - Left one -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix but unfinished single version: " <> Version.print one - Right many -> do - let many' = NonEmptyArray.toArray many - let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] - unless (many' == expected) do - Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') -- Finally, we can verify that publishing the package again should fail -- since it already exists. @@ -186,13 +184,11 @@ spec = do case Map.lookup transitive.version transitiveMetadata.published of Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." - Just published -> case published.compilers of - Left one -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix but unfinished single version: " <> Version.print one - Right many -> do - let many' = NonEmptyArray.toArray many - let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] - unless (many' == expected) do - Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') + Just published -> do + let many' = NonEmptyArray.toArray published.compilers + let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + unless (many' == expected) do + Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') Registry.readManifest transitive.name transitive.version >>= case _ of Nothing -> Except.throw $ "Expected " <> PackageName.print transitive.name <> " to be in manifest index." diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index e3279f68b..414b09a57 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -209,7 +209,7 @@ unsafeMetadataEntry (Tuple name version) = do { ref: LenientVersion.raw version , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" , bytes: 0.0 - , compilers: Right (NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2")) + , compilers: NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2") , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom } diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index 5079baf0a..ddc39b48b 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -73,7 +73,7 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object -- | not rely on its presence! type PublishedMetadata = { bytes :: Number - , compilers :: Either Version (NonEmptyArray Version) + , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime @@ -84,23 +84,11 @@ type PublishedMetadata = publishedMetadataCodec :: CJ.Codec PublishedMetadata publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object { bytes: CJ.number - , compilers: compilersCodec + , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime , ref: CJ.string } - where - compilersCodec :: CJ.Codec (Either Version (NonEmptyArray Version)) - compilersCodec = Codec.codec' decode encode - where - decode :: JSON -> Except CJ.DecodeError (Either Version (NonEmptyArray Version)) - decode json = except do - map Left (CJ.decode Version.codec json) - <|> map Right (CJ.decode (CJ.Common.nonEmptyArray Version.codec) json) - - encode = case _ of - Left version -> CJ.encode Version.codec version - Right versions -> CJ.encode (CJ.Common.nonEmptyArray Version.codec) versions -- | Metadata about an unpublished package version. type UnpublishedMetadata = diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index fad71e937..929894645 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -63,12 +63,8 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do getDependencies (Manifest manifest) = fromMaybe manifest.dependencies do Metadata { published } <- Map.lookup manifest.name metadata - { compilers: eitherCompilers } <- Map.lookup manifest.version published - -- If the dependency hasn't yet had all compilers computed for it, - -- then we don't add it to the dependencies to avoid over- - -- constraining the solver. - compilers <- Either.hush eitherCompilers - -- Otherwise, we construct a maximal range for the compilers the + { compilers } <- Map.lookup manifest.version published + -- Construct a maximal range for the compilers the -- indicated package version supports. let min = Foldable1.minimum compilers diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index 338fe4266..cf474f103 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -2,6 +2,7 @@ module Test.Registry.Operation.Validation where import Prelude +import Data.Array.NonEmpty as NonEmptyArray import Data.Either (Either(..)) import Data.Either as Either import Data.Foldable (for_) @@ -63,7 +64,7 @@ spec = do now = unsafeDateTime "2022-12-12T12:00:00.000Z" outOfRange = unsafeDateTime "2022-12-10T11:00:00.000Z" inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" - compilers = Left (unsafeVersion "0.13.0") + compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index e170b9bec..d642d41dc 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -1329,9 +1329,7 @@ compatibleCompilers allMetadata resolutions = do associated = Map.toUnfoldableUnordered resolutions # Array.mapMaybe \(Tuple name version) -> do Metadata metadata <- Map.lookup name allMetadata published <- Map.lookup version metadata.published - case published.compilers of - Left _ -> Nothing - Right compilers -> Just { name, version, compilers: compilers } + Just { name, version, compilers: published.compilers } case Array.uncons associated of Nothing -> From 343b90b126a92bc06686a9aacf2484c64e75c8a0 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 8 Jun 2025 15:12:41 +0000 Subject: [PATCH 54/64] Fix tests --- lib/test/Registry/Metadata.purs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index 08c12d887..3f2f97935 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,21 +25,21 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, - "compilers": "0.13.0", + "compilers": [ "0.13.0" ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", "publishedTime": "2021-03-27T10:03:46.000Z", "ref": "v0.1.0" }, "0.2.1": { "bytes": 3365, - "compilers": "0.13.0", + "compilers": [ "0.13.0" ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", "publishedTime": "2022-05-15T10:51:57.000Z", "ref": "v0.2.1" }, "1.0.0": { "bytes": 5155, - "compilers": "0.13.0", + "compilers": [ "0.13.0" ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", "publishedTime": "2022-11-03T17:30:28.000Z", "ref": "v1.0.0" From 67c7cb556340510de9661d0d0e19ea68a3b51d6c Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 8 Jun 2025 15:38:00 +0000 Subject: [PATCH 55/64] Fix tests --- lib/test/Registry/Metadata.purs | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index 3f2f97935..02e12c053 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -25,21 +25,27 @@ recordStudio = "published": { "0.1.0": { "bytes": 3438, - "compilers": [ "0.13.0" ], + "compilers": [ + "0.13.0" + ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", "publishedTime": "2021-03-27T10:03:46.000Z", "ref": "v0.1.0" }, "0.2.1": { "bytes": 3365, - "compilers": [ "0.13.0" ], + "compilers": [ + "0.13.0" + ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", "publishedTime": "2022-05-15T10:51:57.000Z", "ref": "v0.2.1" }, "1.0.0": { "bytes": 5155, - "compilers": [ "0.13.0" ], + "compilers": [ + "0.13.0" + ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", "publishedTime": "2022-11-03T17:30:28.000Z", "ref": "v1.0.0" From e863d6bfb8eec055a8d35580710c8f90b82448e1 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 6 Dec 2025 16:29:13 -0500 Subject: [PATCH 56/64] fix e2e tests --- app/fixtures/github-packages/effect-4.0.0/bower.json | 3 +-- app/fixtures/registry/metadata/prelude.json | 9 ++++++--- app/fixtures/registry/metadata/type-equality.json | 9 ++++++--- app/test/App/API.purs | 10 +++++----- lib/src/Range.purs | 1 - nix/test/config.nix | 3 --- 6 files changed, 18 insertions(+), 17 deletions(-) diff --git a/app/fixtures/github-packages/effect-4.0.0/bower.json b/app/fixtures/github-packages/effect-4.0.0/bower.json index bed5c5ab8..3b520e6ae 100644 --- a/app/fixtures/github-packages/effect-4.0.0/bower.json +++ b/app/fixtures/github-packages/effect-4.0.0/bower.json @@ -16,7 +16,6 @@ "package.json" ], "dependencies": { - "purescript-prelude": "^6.0.0", - "purescript-type-equality": "^4.0.0" + "purescript-prelude": "^6.0.0" } } diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 965567c83..d25e9a0f6 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -5,9 +5,12 @@ }, "published": { "6.0.1": { - "bytes": 31142, - "compilers": ["0.15.3", "0.15.4", "0.15.5"], - "hash": "sha256-o8p6SLYmVPqzXZhQFd2hGAWEwBoXl1swxLG/scpJ0V0=", + "bytes": 31129, + "compilers": [ + "0.15.9", + "0.15.10" + ], + "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", "ref": "v6.0.1" } diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index b5d5a86ea..b57b9fd09 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -5,9 +5,12 @@ }, "published": { "4.0.1": { - "bytes": 2184, - "compilers": ["0.15.2", "0.15.3", "0.15.4"], - "hash": "sha256-Hs9D6Y71zFi/b+qu5NSbuadUQXe5iv5iWx0226vOHUw=", + "bytes": 2179, + "compilers": [ + "0.15.9", + "0.15.10" + ], + "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", "ref": "v4.0.1" } diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 3a8c66ef2..caaf6c215 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -92,7 +92,7 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.4" + { compiler: Utils.unsafeVersion "0.15.9" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref @@ -139,7 +139,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -154,7 +154,7 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.4" + { compiler: Utils.unsafeVersion "0.15.9" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" @@ -168,7 +168,7 @@ spec = do let transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } transitivePublishArgs = - { compiler: Utils.unsafeVersion "0.15.4" + { compiler: Utils.unsafeVersion "0.15.9" , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version @@ -186,7 +186,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.3", "0.15.4", "0.15.5" ] + let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') diff --git a/lib/src/Range.purs b/lib/src/Range.purs index c0e5e1d45..0f707c578 100644 --- a/lib/src/Range.purs +++ b/lib/src/Range.purs @@ -4,7 +4,6 @@ module Registry.Range ( Range , caret - , exact , codec , exact , greaterThanOrEq diff --git a/nix/test/config.nix b/nix/test/config.nix index 84475d895..66813fe5b 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -380,12 +380,9 @@ let for repo in "$FIXTURES_DIR"/purescript/*/; do cd "$repo" git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ git commit -m "Fixture commit" - - && git commit -m "Fixture commit" git config receive.denyCurrentBranch ignore done From beb8d9327dc9b2100ce9db828d15d9a7fc4034b6 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 17 Dec 2025 08:13:00 -0500 Subject: [PATCH 57/64] update flake to latest --- flake.lock | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/flake.lock b/flake.lock index c7ffaaad8..456e35c15 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1765855794, - "narHash": "sha256-aG/E/kJ5PpEbrlVU+QHaCFm3ULOwL0ni85ONQef35pk=", + "lastModified": 1765976197, + "narHash": "sha256-KiX3eomD6ajjJ8ByA/cM1G7RbOjFbr3b+aX909i8K3o=", "owner": "nixos", "repo": "nixpkgs", - "rev": "33c80e50d3d783a58107326539e15181971272ed", + "rev": "5e38e4851ce6c82c2409b2b5616b4f3c69d6497e", "type": "github" }, "original": { @@ -83,11 +83,11 @@ ] }, "locked": { - "lastModified": 1765858580, - "narHash": "sha256-KEDJMxXKSEgywe3I7PTc2m5dAI2dTQwzzylo2cnU3+U=", + "lastModified": 1765940228, + "narHash": "sha256-G21SwmQsdMLfBIyhLtlPiAHkqOSJzNXTqnFGtMYGxAU=", "owner": "thomashoneyman", "repo": "purescript-overlay", - "rev": "45bbb4f5e657080adfd01d629ee6150c4b8c36f8", + "rev": "acca6cfb1b9605b8755b238285fe69ee4090a510", "type": "github" }, "original": { From a55bef668f313070e91caf07cc1e79a5130b5587 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 3 Jan 2026 14:27:21 -0500 Subject: [PATCH 58/64] add an agents file --- AGENTS.md | 129 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 AGENTS.md diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 000000000..dc87b7e4e --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,129 @@ +# AGENTS.md + +The PureScript Registry implements a package registry for PureScript. See SPEC.md for the registry specification and CONTRIBUTING.md for detailed contributor documentation. + +## Development Environment + +This project uses Nix with direnv. You should already be in the Nix shell automatically when entering the directory. If not, run: + +```sh +nix develop +``` + +### Build and Test + +The registry is implemented in PureScript. Use spago to build it and run PureScript tests. These are cheap and fast and should be used when working on the registry packages. + +```sh +spago build # Build all PureScript code +spago test # Run unit tests +``` + +Integration tests require two terminals (or the use of test-env in detached mode). The integration tests are only necessary to run if working on the server (app). + +```sh +# Terminal 1: Start test environment (wiremock mocks + registry server on port 9000) +nix run .#test-env + +# Terminal 2: Run E2E tests once server is ready +spago run -p registry-app-e2e +``` + +Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode. + +#### Smoke Test (Linux only) + +The smoke test verifies that the server comes up properly and tests deployment. Only run this test if you are making changes which could break the deployment of the server. + +```sh +nix build .#checks.x86_64-linux.smoke -L +``` + +#### Continuous Integration via Nix Checks + +There is a full suite of checks implemented with Nix which verify that packages build, formatting is correct, registry types are Dhall-conformant, and more. This is the primary check run in CI. + +```sh +nix flake check -L +``` + +## Formatting + +```sh +# Format PureScript +purs-tidy format-in-place app app-e2e foreign lib scripts +purs-tidy check app app-e2e foreign lib scripts + +# Format Nix files +nixfmt *.nix nix/**/*.nix +``` + +## Project Structure + +- `app/` — Registry server implementation. +- `app-e2e/` — E2E tests for the server API. +- `lib/` — **Public library** for consumers (Spago, Pursuit, etc.). Only types and functions useful to external tools belong here. Avoid implementation-specific code. +- `foreign/` — FFI bindings to JavaScript libraries. +- `scripts/` — Runnable modules for registry tasks (LegacyImporter, PackageTransferrer, PackageSetUpdater, etc.). Run via `nix run .#legacy-importer`, etc. +- `test-utils/` — Shared test utilities. +- `db/` — SQLite schemas and migrations (use `dbmate up` to initialize). +- `types/` — Dhall type specifications. +- `nix/` — Nix build and deployment configuration. + +## Scripts & Daily Workflows + +The `scripts/` directory contains modules run as daily jobs by the purescript/registry repository: + +- `LegacyImporter` — imports package versions from legacy Bower registry +- `PackageTransferrer` — handles package transfers +- `PackageSetUpdater` — automatic daily package set updates + +Run scripts via Nix: `nix run .#` (e.g., `nix run .#legacy-importer`). + +## Scratch Directory & Caching + +The `scratch/` directory (gitignored) is used by scripts for: +- `.cache/` — Cached API responses, downloaded packages, etc. +- `logs/` — Log files +- `registry/`, `registry-index/` — Local clones for testing, also modified and optionally committed to by scripts + +Caching is critical for the legacy importer due to the expense of downloading packages. The `Registry.App.Effect.Cache` module handles caching. + +## PureScript Conventions + +### Custom Prelude + +Always use `Registry.App.Prelude` in `app/` and `app-e2e/` directories: + +```purescript +import Registry.App.Prelude +``` + +### Effects via Run + +Use the `run` library for extensible effects. Do NOT perform HTTP calls, console logs, or other effects directly in `Aff`. Check for existing effects in `app/src/App/Effect/` or consider adding one. + +### Import Style + +Import types unqualified, values qualified. Use shortened module names: + +```purescript +import Registry.App.Prelude + +import Data.Array as Array +import Data.String as String +import Node.FS.Aff as FS.Aff +import Parsing (Parser) +import Parsing as Parsing +import Parsing.Combinators as Parsing.Combinators +import Registry.Operation (AuthenticatedData) +import Registry.SSH as SSH +``` + +## Deployment + +Continuous deployment via GitHub Actions on master. Manual deploy: + +```sh +colmena apply +``` From 5ab364ae4abb076ea86366e81424d64673f9cd92 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sun, 4 Jan 2026 11:15:51 -0500 Subject: [PATCH 59/64] add archive seeder script --- .gitignore | 2 + AGENTS.md | 4 +- app/src/App/Effect/Source.purs | 17 +- app/src/App/Legacy/Manifest.purs | 7 +- flake.nix | 7 - foreign/src/Foreign/Octokit.purs | 20 ++ lib/src/Metadata.purs | 5 - nix/overlay.nix | 4 + scripts/src/ArchiveSeeder.purs | 362 +++++++++++++++++++++++++++++ scripts/src/LegacyImporter.purs | 383 +++++++++++++++++++++++++++---- 10 files changed, 756 insertions(+), 55 deletions(-) create mode 100644 scripts/src/ArchiveSeeder.purs diff --git a/.gitignore b/.gitignore index 92fc94aae..497ffb046 100644 --- a/.gitignore +++ b/.gitignore @@ -15,6 +15,8 @@ result* *.sqlite3 *.sqlite3-wal *.sqlite3-shm + +TODO.md .spec-results # Keep it secret, keep it safe. diff --git a/AGENTS.md b/AGENTS.md index dc87b7e4e..43e474c2a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,6 +1,6 @@ # AGENTS.md -The PureScript Registry implements a package registry for PureScript. See SPEC.md for the registry specification and CONTRIBUTING.md for detailed contributor documentation. +The PureScript Registry implements a package registry for PureScript. See @SPEC.md for the registry specification and @CONTRIBUTING.md for detailed contributor documentation. ## Development Environment @@ -78,7 +78,7 @@ The `scripts/` directory contains modules run as daily jobs by the purescript/re - `PackageTransferrer` — handles package transfers - `PackageSetUpdater` — automatic daily package set updates -Run scripts via Nix: `nix run .#` (e.g., `nix run .#legacy-importer`). +Run scripts via Nix: `nix run .#` (e.g., `nix run .#legacy-importer`). All scripts support `--help` for usage information. ## Scratch Directory & Caching diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index 828759792..7981daf48 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -110,8 +110,23 @@ handle importType = case _ of cloneUrl = Array.fold [ "https://github.com/", owner, "/", repo ] + -- We disable Git LFS smudging because package sources should not + -- contain large binary files. This avoids downloading LFS objects + -- from misconfigured packages. cloneArgs = - [ "clone", cloneUrl, "--branch", ref, "--single-branch", "-c", "advice.detachedHead=false", repoDir ] + [ "-c" + , "filter.lfs.smudge=cat" + , "-c" + , "filter.lfs.process=cat" + , "clone" + , cloneUrl + , "--branch" + , ref + , "--single-branch" + , "-c" + , "advice.detachedHead=false" + , repoDir + ] clonePackageAtTag = withRetry retryOpts (Git.gitCLI cloneArgs Nothing) >>= case _ of diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 7197a6001..65aad78ec 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -484,7 +484,12 @@ fetchLegacyPackageSets = Run.Except.runExceptAt _legacyPackageSetsError do Nothing -> do Log.debug $ "Cache miss for legacy package set " <> refStr <> ", refetching..." result <- GitHub.getJsonFile Legacy.PackageSet.legacyPackageSetsRepo ref legacyPackageSetCodec "packages.json" - Cache.put _legacyCache (LegacySet ref) result + -- Only cache permanent errors (404, decode errors) and successes. + -- Transient errors (rate limits, network issues) should be retried. + case result of + Right _ -> Cache.put _legacyCache (LegacySet ref) result + Left err | Octokit.isPermanentGitHubError err -> Cache.put _legacyCache (LegacySet ref) result + Left _ -> pure unit pure result Just value -> pure value diff --git a/flake.nix b/flake.nix index 56a98f696..fb9d769cc 100644 --- a/flake.nix +++ b/flake.nix @@ -60,11 +60,6 @@ } }/Prelude/package.dhall"; - # We disable git-lfs files explicitly, as this is intended for large files - # (typically >4GB), and source packgaes really ought not be shipping large - # files — just source code. - GIT_LFS_SKIP_SMUDGE = 1; - # We disable git from entering interactive mode at any time, as there is no # one there to answer prompts. GIT_TERMINAL_PROMPT = 0; @@ -214,7 +209,6 @@ devShells.default = pkgs.mkShell { name = "registry-dev"; - inherit GIT_LFS_SKIP_SMUDGE; # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; @@ -273,7 +267,6 @@ inherit DHALL_PRELUDE DHALL_TYPES - GIT_LFS_SKIP_SMUDGE GIT_TERMINAL_PROMPT ; }; diff --git a/foreign/src/Foreign/Octokit.purs b/foreign/src/Foreign/Octokit.purs index d7787466b..41c882a97 100644 --- a/foreign/src/Foreign/Octokit.purs +++ b/foreign/src/Foreign/Octokit.purs @@ -28,6 +28,7 @@ module Registry.Foreign.Octokit , getRefCommitRequest , githubApiErrorCodec , githubErrorCodec + , isPermanentGitHubError , listTagsRequest , listTeamMembersRequest , newOctokit @@ -390,6 +391,25 @@ printGitHubError = case _ of , error ] +-- | Returns true if the error represents a permanent failure that is safe to +-- | cache across runs. Transient errors (rate limits, network issues, server +-- | errors) return false and should be retried. +-- | +-- | Permanent errors: +-- | - 404 Not Found: Resource doesn't exist at this ref/path +-- | - DecodeError: Content exists but is malformed (immutable at a given tag) +-- | +-- | Transient errors (should NOT be cached): +-- | - UnexpectedError: Network issues, DNS, TLS problems +-- | - 401/403: Auth or rate limit issues +-- | - 5xx: Server-side problems +-- | - Any other status codes +isPermanentGitHubError :: GitHubError -> Boolean +isPermanentGitHubError = case _ of + APIError { statusCode: 404 } -> true + DecodeError _ -> true + _ -> false + atKey :: forall a. String -> CJ.Codec a -> JSON.JObject -> Either CJ.DecodeError a atKey key codec object = Maybe.maybe diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index ddc39b48b..c54bed31e 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,20 +20,15 @@ module Registry.Metadata import Prelude -import Control.Alt ((<|>)) -import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) -import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) -import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor -import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location diff --git a/nix/overlay.nix b/nix/overlay.nix index 71049d6e8..24b36afa1 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -50,6 +50,10 @@ let # Map of script name -> { module, description } scripts = { + archive-seeder = { + module = "Registry.Scripts.ArchiveSeeder"; + description = "Seed the registry archive with tarballs for deleted GitHub repos"; + }; legacy-importer = { module = "Registry.Scripts.LegacyImporter"; description = "Import packages from legacy registries (bower, psc-package, etc.)"; diff --git a/scripts/src/ArchiveSeeder.purs b/scripts/src/ArchiveSeeder.purs new file mode 100644 index 000000000..a474876d5 --- /dev/null +++ b/scripts/src/ArchiveSeeder.purs @@ -0,0 +1,362 @@ +-- | This script populates the purescript/registry-archive repository with +-- | tarballs for packages whose GitHub sources have been deleted (404). +-- | +-- | The archive is a temporary measure for the legacy importer migration. +-- | Once packages are re-uploaded to the registry, the archive can be deleted. +-- | +-- | The script is designed to be re-run safely: +-- | - Caches 404 and accessible status to disk (scratch/.cache) +-- | - Skips tarballs that already exist in the archive +-- | - Reports transient errors (rate limits, network) separately +-- | - Exits with error code 1 if any packages had transient errors +module Registry.Scripts.ArchiveSeeder where + +import Registry.App.Prelude + +import ArgParse.Basic (ArgParser) +import ArgParse.Basic as Arg +import Control.Apply (lift2) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Exists as Exists +import Data.Formatter.DateTime as Formatter.DateTime +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Effect.Class.Console as Console +import Effect.Ref as Ref +import Node.FS.Aff as FS.Aff +import Node.FS.Sync as FS.Sync +import Node.Path as Path +import Node.Process as Process +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type Args = + { archivePath :: FilePath + , dryRun :: Boolean + , package :: Maybe PackageName + } + +parser :: ArgParser Args +parser = Arg.fromRecord + { archivePath: + Arg.argument [ "--archive-path" ] + "Path to local checkout of purescript/registry-archive" + # Arg.default (Path.concat [ scratchDir, "registry-archive" ]) + , dryRun: + Arg.flag [ "--dry-run" ] + "Run without writing tarballs or committing to the registry-archive repo." + # Arg.boolean + # Arg.default false + , package: + Arg.argument [ "--package" ] + "Only process the given package (by registry package name)." + # Arg.unformat "PACKAGE" PackageName.parse + # Arg.optional + } + +main :: Effect Unit +main = launchAff_ do + args <- Array.drop 2 <$> liftEffect Process.argv + + let description = "A script for seeding the registry archive with tarballs for deleted GitHub repos." + parsedArgs <- case Arg.parseArgs "archive-seeder" description parser args of + Left err -> Console.log (Arg.printArgError err) *> liftEffect (Process.exit' 1) + Right a -> pure a + + Env.loadEnvFile ".env" + resourceEnv <- Env.lookupResourceEnv + + githubCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + seederCacheRef <- Cache.newCacheRef + let cache = Path.concat [ scratchDir, ".cache" ] + FS.Extra.ensureDirectory cache + + runAppEffects <- do + debouncer <- Registry.newDebouncer + let registryEnv = { pull: Git.Autostash, write: Registry.ReadOnly, repos: Registry.defaultRepos, workdir: scratchDir, debouncer, cacheRef: registryCacheRef } + + token <- Env.lookupRequired Env.githubToken + s3 <- lift2 { key: _, secret: _ } (Env.lookupRequired Env.spacesKey) (Env.lookupRequired Env.spacesSecret) + octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl + pure do + Registry.interpret (Registry.handle registryEnv) + >>> Storage.interpret (Storage.handleS3 { s3, cache }) + >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) + >>> Cache.interpret _seederCache (Cache.handleMemoryFs { cache, ref: seederCacheRef }) + + -- Logging setup + let logDir = Path.concat [ scratchDir, "logs" ] + FS.Extra.ensureDirectory logDir + now <- nowUTC + + let + logFile = "archive-seeder-" <> String.take 19 (Formatter.DateTime.format Internal.Format.iso8601DateTime now) <> ".log" + logPath = Path.concat [ logDir, logFile ] + + hasErrors <- runArchiveSeeder parsedArgs logPath + # runAppEffects + # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) + # Comment.interpret Comment.handleLog + # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) + # Env.runResourceEnv resourceEnv + # Run.runBaseAff' + + when hasErrors do + liftEffect $ Process.exit' 1 + +-- | The status of a GitHub repo: either accessible or returns 404. +-- | We only cache these definitive states, not transient errors. +data RepoStatus = RepoAccessible | Repo404 + +derive instance Eq RepoStatus + +repoStatusCodec :: CJ.Codec RepoStatus +repoStatusCodec = CJ.prismaticCodec "RepoStatus" decode encode CJ.string + where + decode = case _ of + "accessible" -> Just RepoAccessible + "404" -> Just Repo404 + _ -> Nothing + encode = case _ of + RepoAccessible -> "accessible" + Repo404 -> "404" + +type SEEDER_CACHE r = (seederCache :: Cache SeederCache | r) + +_seederCache :: Proxy "seederCache" +_seederCache = Proxy + +data SeederCache :: (Type -> Type -> Type) -> Type -> Type +data SeederCache c a = RepoStatusCache PackageName (c RepoStatus a) + +instance Functor2 c => Functor (SeederCache c) where + map k (RepoStatusCache name a) = RepoStatusCache name (map2 k a) + +instance MemoryEncodable SeederCache where + encodeMemory = case _ of + RepoStatusCache name next -> + Exists.mkExists $ Key ("RepoStatus__" <> PackageName.print name) next + +instance FsEncodable SeederCache where + encodeFs = case _ of + RepoStatusCache name next -> + Exists.mkExists $ AsJson ("RepoStatus__" <> PackageName.print name) repoStatusCodec next + +type Stats = + { packagesChecked :: Int + , versionsChecked :: Int + , packagesNeedingArchive :: Int + , versionsNeedingArchive :: Int + , tarballsWritten :: Int + , tarballsSkipped :: Int + , tarballsMissing :: Int + , transientErrors :: Int + } + +emptyStats :: Stats +emptyStats = + { packagesChecked: 0 + , versionsChecked: 0 + , packagesNeedingArchive: 0 + , versionsNeedingArchive: 0 + , tarballsWritten: 0 + , tarballsSkipped: 0 + , tarballsMissing: 0 + , transientErrors: 0 + } + +type SeedEffects r = (SEEDER_CACHE + REGISTRY + STORAGE + GITHUB + LOG + EXCEPT String + AFF + EFFECT + r) + +-- | Returns true if there were transient errors that require re-running +runArchiveSeeder :: forall r. Args -> FilePath -> Run (SeedEffects r) Boolean +runArchiveSeeder args logPath = do + Log.info "Starting archive seeder!" + Log.info $ "Logs available at " <> logPath + Log.info $ "Archive path: " <> args.archivePath + when args.dryRun do + Log.info "Running in dry-run mode (no writes will be performed)" + case args.package of + Nothing -> Log.info "Processing all packages" + Just name -> Log.info $ "Processing single package: " <> PackageName.print name + + -- Ensure archive directory exists (unless dry-run) + unless args.dryRun do + Run.liftAff $ FS.Extra.ensureDirectory args.archivePath + + statsRef <- liftEffect $ Ref.new emptyStats + transientErrorsRef <- liftEffect $ Ref.new ([] :: Array String) + + let + processPackage name (Metadata metadata) = do + liftEffect $ Ref.modify_ (\s -> s { packagesChecked = s.packagesChecked + 1 }) statsRef + + let publishedVersions = Map.keys metadata.published + let versionCount = Set.size publishedVersions + liftEffect $ Ref.modify_ (\s -> s { versionsChecked = s.versionsChecked + versionCount }) statsRef + + -- Extract GitHub address from location + case metadata.location of + Git _ -> do + Log.debug $ PackageName.print name <> ": Git location, skipping (only GitHub packages supported)" + GitHub { owner, repo } -> do + let address = { owner, repo } + + -- Check cache first for definitive status + Cache.get _seederCache (RepoStatusCache name) >>= case _ of + Just RepoAccessible -> do + Log.debug $ PackageName.print name <> ": Cached as accessible, skipping" + Just Repo404 -> do + Log.debug $ PackageName.print name <> ": Cached as 404, processing..." + processDeletedPackage args statsRef name publishedVersions versionCount + Nothing -> do + -- Probe GitHub to check if the repo is accessible + GitHub.listTags address >>= case _ of + Right _ -> do + Log.debug $ PackageName.print name <> ": GitHub repo accessible, caching and skipping" + Cache.put _seederCache (RepoStatusCache name) RepoAccessible + Left (Octokit.APIError err) | err.statusCode == 404 -> do + Log.info $ PackageName.print name <> ": GitHub repo returns 404, caching and processing..." + Cache.put _seederCache (RepoStatusCache name) Repo404 + processDeletedPackage args statsRef name publishedVersions versionCount + Left otherErr -> do + -- Transient error - do NOT cache, log for re-run + let errMsg = PackageName.print name <> ": " <> Octokit.printGitHubError otherErr + Log.warn $ errMsg <> " (transient, will retry on next run)" + liftEffect $ Ref.modify_ (\s -> s { transientErrors = s.transientErrors + 1 }) statsRef + liftEffect $ Ref.modify_ (Array.snoc <@> errMsg) transientErrorsRef + + -- Process either single package or all packages + case args.package of + Just targetName -> Registry.readMetadata targetName >>= case _ of + Nothing -> Except.throw $ "Package " <> PackageName.print targetName <> " not found in registry metadata." + Just metadata -> processPackage targetName metadata + Nothing -> do + allMetadata <- Registry.readAllMetadata + Log.info $ "Read metadata for " <> show (Map.size allMetadata) <> " packages." + forWithIndex_ allMetadata processPackage + + -- Generate summary report + stats <- liftEffect $ Ref.read statsRef + transientErrors <- liftEffect $ Ref.read transientErrorsRef + let report = formatReport stats transientErrors + Log.info report + + let reportPath = Path.concat [ scratchDir, "archive-seeder-report.txt" ] + Run.liftAff $ FS.Aff.writeTextFile UTF8 reportPath report + Log.info $ "Report written to " <> reportPath + + let hadTransientErrors = stats.transientErrors > 0 + let wroteAnything = stats.tarballsWritten > 0 + + if hadTransientErrors then do + Log.warn $ "There were " <> show stats.transientErrors <> " transient errors. Re-run the script to retry." + pure true + else if args.dryRun then do + Log.info $ String.joinWith "\n" + [ "" + , "Dry run complete!" + , "Run without --dry-run to write tarballs and commit." + ] + pure false + else if wroteAnything then do + Log.warn "Make sure to commit and push archive changes!" + pure false + else do + Log.info "Archive seeding complete! No new tarballs were written." + pure false + +processDeletedPackage + :: forall r + . Args + -> Ref.Ref Stats + -> PackageName + -> Set Version + -> Int + -> Run (SeedEffects r) Unit +processDeletedPackage args statsRef name publishedVersions versionCount = do + liftEffect $ Ref.modify_ (\s -> s { packagesNeedingArchive = s.packagesNeedingArchive + 1 }) statsRef + liftEffect $ Ref.modify_ (\s -> s { versionsNeedingArchive = s.versionsNeedingArchive + versionCount }) statsRef + + Log.info $ PackageName.print name <> ": Checking S3 for tarballs..." + + -- Check S3 for available versions + Except.runExcept (Storage.query name) >>= case _ of + Left queryErr -> do + Log.warn $ PackageName.print name <> ": Failed to query S3: " <> queryErr + Right s3Versions -> do + Log.debug $ PackageName.print name <> ": S3 has " <> show (Set.size s3Versions) <> " versions" + + -- For each published version, try to download and write to archive + for_ publishedVersions \version -> do + let formatted = formatPackageVersion name version + let archiveSubdir = Path.concat [ args.archivePath, PackageName.print name ] + let archiveFile = Path.concat [ archiveSubdir, Version.print version <> ".tar.gz" ] + + -- Check if already exists in archive (skip check in dry-run since we don't ensure dir exists) + exists <- if args.dryRun then pure false else liftEffect $ FS.Sync.exists archiveFile + if exists then do + Log.debug $ formatted <> ": Already exists in archive, skipping" + liftEffect $ Ref.modify_ (\s -> s { tarballsSkipped = s.tarballsSkipped + 1 }) statsRef + else if Set.member version s3Versions then do + if args.dryRun then do + Log.info $ formatted <> ": Would download from S3 and write to archive (dry run)" + liftEffect $ Ref.modify_ (\s -> s { tarballsWritten = s.tarballsWritten + 1 }) statsRef + else do + Log.info $ formatted <> ": Downloading from S3..." + Run.liftAff $ FS.Extra.ensureDirectory archiveSubdir + Except.runExcept (Storage.download name version archiveFile) >>= case _ of + Left downloadErr -> do + Log.warn $ formatted <> ": Failed to download: " <> downloadErr + liftEffect $ Ref.modify_ (\s -> s { tarballsMissing = s.tarballsMissing + 1 }) statsRef + Right _ -> do + Log.info $ formatted <> ": Written to archive" + liftEffect $ Ref.modify_ (\s -> s { tarballsWritten = s.tarballsWritten + 1 }) statsRef + else do + Log.warn $ formatted <> ": Not available in S3" + liftEffect $ Ref.modify_ (\s -> s { tarballsMissing = s.tarballsMissing + 1 }) statsRef + +formatReport :: Stats -> Array String -> String +formatReport stats transientErrors = String.joinWith "\n" (header <> transients) + where + header = + [ "=== Archive Seeder Report ===" + , "" + , "Packages checked: " <> show stats.packagesChecked + , "Versions checked: " <> show stats.versionsChecked + , "" + , "Packages needing archive (GitHub 404): " <> show stats.packagesNeedingArchive + , "Versions needing archive: " <> show stats.versionsNeedingArchive + , "" + , "Tarballs written: " <> show stats.tarballsWritten + , "Tarballs skipped (already exist): " <> show stats.tarballsSkipped + , "Tarballs missing (not in S3): " <> show stats.tarballsMissing + , "" + , "Transient errors (re-run to retry): " <> show stats.transientErrors + ] + + transients = do + guard $ Array.null transientErrors + [ "", "Packages with transient errors:" ] <> map (" - " <> _) transientErrors diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..8e436f3d5 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -4,12 +4,51 @@ -- | It can be run in different modes depending on whether you want to generate -- | the registry from scratch, including uploading packages to the backend or -- | you just want to iteratively pick up new releases. +-- | +-- | The legacy importer clones the registry and registry-index repositories into +-- | `scratch/registry` and `scratch/registry-index`. After a run, you can diff +-- | against the upstream to see what changed: +-- | +-- | ```sh +-- | cd scratch/registry +-- | git diff origin/main -- metadata/ +-- | ``` +-- | +-- | For a fresh re-upload, reset the local clones first, then delete the +-- | metadata and index contents so the importer sees an empty registry: +-- | +-- | ```sh +-- | cd scratch/registry && git reset --hard origin/main +-- | rm -rf scratch/registry/metadata/* +-- | cd scratch/registry-index && git reset --hard origin/main +-- | rm -rf scratch/registry-index/*/ +-- | ``` +-- | +-- | To also recompute all cached manifests and compilation results, remove the +-- | cache directory, or you can remove specific cache files as needed. +-- | +-- | ```sh +-- | rm -rf scratch/.cache +-- | ``` +-- | +-- | The script writes several files to `scratch`: +-- | +-- | - `import-stats.txt` - Aggregate counts of import results +-- | - `package-failures.json` - Package-level failures (e.g., repo not found) +-- | - `version-failures.json` - Version-level failures (e.g., invalid manifest) +-- | - `publish-failures.json` - Publish-level failures (e.g., no valid compiler) +-- | - `publish-stats.txt` - Aggregate counts of publish results +-- | - `reserved-packages.txt` - Packages reserved due to 0.13 or org status +-- | - `removed-packages.txt` - Packages that fully failed and will be removed +-- | - `sorted-packages.txt` - All packages in topological order for publishing +-- | - `logs/` - Detailed logs from each run module Registry.Scripts.LegacyImporter where import Registry.App.Prelude import ArgParse.Basic (ArgParser) import ArgParse.Basic as Arg +import Codec.JSON.DecodeError as CJ.DecodeError import Control.Apply (lift2) import Data.Array as Array import Data.Array.NonEmpty as NonEmptyArray @@ -18,7 +57,7 @@ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Compactable (separate) -import Data.DateTime (Date, Month(..)) +import Data.DateTime (Date, DateTime, Month(..)) import Data.DateTime as DateTime import Data.Enum (toEnum) import Data.Exists as Exists @@ -41,7 +80,13 @@ import Data.String.CodeUnits as String.CodeUnits import Data.These (These(..)) import Data.Tuple (uncurry) import Data.Variant as Variant +import Effect.Aff as Aff import Effect.Class.Console as Console +import Effect.Exception as Exception +import Fetch.Retry as Fetch +import JSON as JSON +import JSON.Object as JSON.Object +import Node.Buffer as Buffer import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process @@ -57,6 +102,7 @@ import Registry.App.CLI.Git as Git import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment @@ -66,6 +112,7 @@ import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE) @@ -76,9 +123,11 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.Constants as Constants import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tar as Foreign.Tar import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec (packageMap, versionMap) import Registry.Internal.Codec as Internal.Codec @@ -291,9 +340,10 @@ runLegacyImport logs = do pure range let - publishLegacyPackage :: Solver.TransitivizedRegistry -> Manifest -> Run _ Unit - publishLegacyPackage legacyIndex (Manifest manifest) = do + publishLegacyPackage :: Solver.TransitivizedRegistry -> Set PackageName -> Manifest -> Run _ Unit + publishLegacyPackage legacyIndex archivePackages (Manifest manifest) = do let formatted = formatPackageVersion manifest.name manifest.version + let isArchiveBacked = manifest.name `Set.member` archivePackages Log.info $ "\n----------\nPUBLISHING: " <> formatted <> "\n----------\n" RawVersion ref <- case Map.lookup manifest.version =<< Map.lookup manifest.name importedIndex.packageRefs of Nothing -> Run.Except.throw $ "Unable to recover package ref for " <> formatted @@ -400,7 +450,14 @@ runLegacyImport logs = do Log.debug $ "No cached compilation for " <> formatted <> ", so compiling with all compilers to find first working one." Log.debug "Fetching source and installing dependencies to test compilers" tmp <- Tmp.mkTmpDir - { path } <- Source.fetch tmp manifest.location ref + path <- + if isArchiveBacked then do + Log.info $ "Using registry archive for " <> formatted <> " instead of GitHub clone." + { path: archivePath } <- fetchFromArchive tmp manifest.name manifest.version + pure archivePath + else do + { path: sourcePath } <- Source.fetch tmp manifest.location ref + pure sourcePath Log.debug $ "Downloaded source to " <> path Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] @@ -498,7 +555,8 @@ runLegacyImport logs = do $ Solver.initializeRegistry $ map (map (un Manifest >>> _.dependencies)) (ManifestIndex.toMap importedIndex.registryIndex) - void $ for manifests (publishLegacyPackage legacyIndex) + let archivePackages = importedIndex.archivePackages + void $ for manifests (publishLegacyPackage legacyIndex archivePackages) Log.info "Finished publishing! Collecting all publish failures and writing to disk." let @@ -542,6 +600,7 @@ type ImportedIndex = , removedPackages :: Map PackageName Location , registryIndex :: ManifestIndex , packageRefs :: Map PackageName (Map Version RawVersion) + , archivePackages :: Set PackageName } -- | Construct a valid registry index containing manifests for all packages from @@ -554,13 +613,16 @@ importLegacyRegistry legacyRegistry = do manifests <- forWithIndex legacyRegistry buildLegacyPackageManifests let - separatedPackages :: { left :: Map RawPackageName PackageValidationError, right :: Map RawPackageName (Map RawVersion _) } + separatedPackages :: { left :: Map RawPackageName PackageValidationError, right :: Map RawPackageName PackageManifests } separatedPackages = separate manifests + archiveBackedByRaw :: Map RawPackageName Boolean + archiveBackedByRaw = separatedPackages.right <#> _.archiveBacked + separatedVersions :: { left :: Map RawPackageName (Map RawVersion VersionValidationError), right :: Map RawPackageName (Map RawVersion Manifest) } separatedVersions = separatedPackages.right # flip foldlWithIndex { left: Map.empty, right: Map.empty } \key acc next -> do - let { left, right } = separate next + let { left, right } = separate next.versions { left: if Map.isEmpty left then acc.left else Map.insert key left acc.left , right: if Map.isEmpty right then acc.right else Map.insert key right acc.right } @@ -614,14 +676,29 @@ importLegacyRegistry legacyRegistry = do [ { package: RawPackageName (PackageName.print name), version: ref, dependencies: Array.fromFoldable $ Map.keys deps } ] Map.unionWith Map.union separatedVersions.left dependencyFailures + archivePackages :: Set PackageName + archivePackages = + Set.fromFoldable do + Tuple rawName isArchive <- Map.toUnfoldable archiveBackedByRaw + guard isArchive + let (RawPackageName raw) = rawName + name <- Array.fromFoldable (hush $ PackageName.parse raw) + pure name + pure { failedPackages: packageFailures , failedVersions: versionFailures , removedPackages: removedPackages , registryIndex: validIndex , packageRefs + , archivePackages } +type PackageManifests = + { archiveBacked :: Boolean + , versions :: Map RawVersion (Either VersionValidationError Manifest) + } + -- | Attempt to build valid manifests for all releases associated with the given -- | legacy package. This will result in a package error if versions could not -- | be fetched in the first place. Otherwise, it will produce errors for all @@ -630,7 +707,7 @@ buildLegacyPackageManifests :: forall r . RawPackageName -> String - -> Run (API.PublishEffects + IMPORT_CACHE + r) (Either PackageValidationError (Map RawVersion (Either VersionValidationError Manifest))) + -> Run (API.PublishEffects + IMPORT_CACHE + r) (Either PackageValidationError PackageManifests) buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPackage do Log.info $ "Processing " <> un RawPackageName rawPackage package <- validatePackage rawPackage rawUrl @@ -645,34 +722,49 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Cache.get _importCache (ImportManifest package.name (RawVersion tag.name)) >>= case _ of Just cached -> exceptVersion cached Nothing -> do - -- While technically not 'legacy', we do need to handle packages with - -- spago.yaml files because they've begun to pop up since the registry - -- alpha began and we don't want to drop them when doing a re-import. - fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of - Just manifest -> do - Log.debug $ "Built manifest from discovered spago.yaml file." - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) - pure manifest - Nothing -> do - Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) - manifest <- Run.Except.runExceptAt _exceptVersion do - exceptVersion $ validateVersionDisabled package.name version - legacyManifest <- do - Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of - Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } - Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest - case manifest of - Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err - Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val - Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest - exceptVersion manifest + -- For archive-backed packages (where GitHub repo is unavailable), + -- fetch the manifest directly from the registry archive tarball. + if package.archiveBacked then do + Log.debug $ "Package is archive-backed, fetching manifest from archive for " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + Run.Except.runExcept (fetchManifestFromArchive package.name (LenientVersion.version version)) >>= case _ of + Left error -> throwVersion { error: InvalidManifest { error: NoManifests, reason: error }, reason: "Failed to fetch manifest from archive." } + Right result -> pure result + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest from archive for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest + else do + -- While technically not 'legacy', we do need to handle packages with + -- spago.yaml files because they've begun to pop up since the registry + -- alpha began and we don't want to drop them when doing a re-import. + fetchSpagoYaml package.address (RawVersion tag.name) >>= case _ of + Just manifest -> do + Log.debug $ "Built manifest from discovered spago.yaml file." + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) (Right manifest) + pure manifest + Nothing -> do + Log.debug $ "Building manifest in legacy import because there is no registry entry, spago.yaml, or cached result: " <> formatPackageVersion package.name (LenientVersion.version version) + manifest <- Run.Except.runExceptAt _exceptVersion do + exceptVersion $ validateVersionDisabled package.name version + legacyManifest <- do + Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of + Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } + Right result -> pure result + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + case manifest of + Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err + Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val + Cache.put _importCache (ImportManifest package.name (RawVersion tag.name)) manifest + exceptVersion manifest manifests <- for package.tags \tag -> do manifest <- buildManifestForVersion tag pure (Tuple (RawVersion tag.name) manifest) - pure $ Map.fromFoldable manifests + pure { archiveBacked: package.archiveBacked, versions: Map.fromFoldable manifests } data PublishError = SolveFailedDependencies String @@ -920,6 +1012,12 @@ type PackageResult = { name :: PackageName , address :: Address , tags :: Array Tag + , archiveBacked :: Boolean + } + +type FetchTagsResult = + { tags :: Array Tag + , archiveBacked :: Boolean } type PackagesMetadata = { address :: Address, lastPublished :: Date } @@ -930,7 +1028,7 @@ packagesMetadataCodec = CJ.named "PackagesMetadata" $ CJ.Record.object , lastPublished: Internal.Codec.iso8601Date } -getPackagesMetadata :: forall r. Map RawPackageName String -> Run (EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) +getPackagesMetadata :: forall r. Map RawPackageName String -> Run (REGISTRY + LOG + EXCEPT String + GITHUB + r) (Map PackageName PackagesMetadata) getPackagesMetadata legacyRegistry = do associated <- for (Map.toUnfoldableUnordered legacyRegistry) \(Tuple rawName rawUrl) -> do Run.Except.runExceptAt (Proxy :: _ "exceptPackage") (validatePackage rawName rawUrl) >>= case _ of @@ -972,26 +1070,47 @@ writePackagesMetadata pkgs = do readPackagesMetadata :: forall r. Run (AFF + r) (Either String (Map PackageName PackagesMetadata)) readPackagesMetadata = Run.liftAff $ readJsonFile (packageMap packagesMetadataCodec) (Path.concat [ scratchDir, "packages-metadata.json" ]) -validatePackage :: forall r. RawPackageName -> String -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult +validatePackage :: forall r. RawPackageName -> String -> Run (REGISTRY + LOG + GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) PackageResult validatePackage rawPackage rawUrl = do name <- exceptPackage $ validatePackageName rawPackage exceptPackage $ validatePackageDisabled name address <- exceptPackage $ validatePackageAddress rawUrl - tags <- fetchPackageTags address + { tags, archiveBacked } <- fetchPackageTags name address -- We do not allow packages that redirect from their registered location elsewhere. The package -- transferrer will handle automatically transferring these packages. + -- Skip URL redirect validation for archive-backed packages since they have no valid tag URLs. case Array.head tags of - Nothing -> pure { name, address, tags } + Nothing -> pure { name, address, tags, archiveBacked } + Just _ | archiveBacked -> pure { name, address, tags, archiveBacked } Just tag -> do tagAddress <- exceptPackage case tagUrlToRepoUrl tag.url of Nothing -> Left { error: InvalidPackageURL tag.url, reason: "Failed to format redirected " <> tag.url <> " as a GitHub.Address." } Just formatted -> Right formatted exceptPackage $ validatePackageLocation { registered: address, received: tagAddress } - pure { name, address, tags } + pure { name, address, tags, archiveBacked } -fetchPackageTags :: forall r. Address -> Run (GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) (Array Tag) -fetchPackageTags address = GitHub.listTags address >>= case _ of +fetchPackageTags :: forall r. PackageName -> Address -> Run (REGISTRY + LOG + GITHUB + EXCEPT_PACKAGE + EXCEPT String + r) FetchTagsResult +fetchPackageTags name address = GitHub.listTags address >>= case _ of Left err -> case err of + Octokit.APIError apiError | apiError.statusCode == 404 -> do + let printed = PackageName.print name + Log.debug $ "GitHub 404 for " <> printed <> ", attempting to synthesize tags from metadata (local, then remote)" + versions <- Registry.readMetadata name >>= case _ of + Just (Metadata metadata) -> do + Log.debug $ "Found metadata for " <> printed <> " in local registry checkout." + pure $ Set.toUnfoldable $ Map.keys metadata.published + Nothing -> do + Log.debug $ "No local metadata for " <> printed <> ", fetching versions from remote registry main branch..." + fetchRemoteRegistryVersions name + case versions of + [] -> do + let error = CannotAccessRepo address + let reason = "GitHub 404 and no metadata found locally or in remote purescript/registry main branch to construct archive tags." + throwPackage { error, reason } + _ -> do + let tags = versions <#> \v -> { name: "v" <> Version.print v, sha: "", url: "" } + Log.info $ "Synthesized " <> show (Array.length tags) <> " tags from metadata for archive-backed package " <> printed + pure { tags, archiveBacked: true } Octokit.APIError apiError | apiError.statusCode >= 400 -> do let error = CannotAccessRepo address let reason = "GitHub API error with status code " <> show apiError.statusCode @@ -1002,7 +1121,49 @@ fetchPackageTags address = GitHub.listTags address >>= case _ of , Octokit.printGitHubError err ] Right tags -> - pure tags + pure { tags, archiveBacked: false } + +-- | Fetch published versions for a package directly from the remote registry repo (main branch). +-- | Used as a fallback when the local registry checkout has been cleared (e.g., during reuploads). +-- | Only extracts the version keys from the "published" field without fully parsing metadata, +-- | since the remote registry may have a different schema (e.g., missing 'compilers' field). +fetchRemoteRegistryVersions :: forall r. PackageName -> Run (GITHUB + LOG + r) (Array Version) +fetchRemoteRegistryVersions name = do + let + printed = PackageName.print name + ref = RawVersion "main" + path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] + Log.debug $ "Fetching published versions for " <> printed <> " from remote registry repo at path " <> path + GitHub.getContent Constants.registry ref path >>= case _ of + Left err -> case err of + Octokit.APIError apiError | apiError.statusCode == 404 -> do + Log.debug $ "No metadata found in remote registry repo for " <> printed <> " (404)" + pure [] + _ -> do + Log.warn $ "Failed to fetch metadata for " <> printed <> " from remote registry repo: " <> Octokit.printGitHubError err + pure [] + Right content -> case JSON.parse content of + Left parseErr -> do + Log.warn $ "Failed to parse metadata JSON for " <> printed <> ": " <> parseErr + pure [] + Right json -> case JSON.toJObject json of + Nothing -> do + Log.warn $ "Metadata for " <> printed <> " is not a JSON object" + pure [] + Just obj -> case JSON.Object.lookup "published" obj of + Nothing -> do + Log.debug $ "No 'published' field in metadata for " <> printed + pure [] + Just publishedJson -> case JSON.toJObject publishedJson of + Nothing -> do + Log.warn $ "'published' field for " <> printed <> " is not a JSON object" + pure [] + Just publishedObj -> do + let versionStrings = JSON.Object.keys publishedObj + let parseResults = versionStrings <#> \v -> Version.parse v + let versions = Array.mapMaybe hush parseResults + Log.debug $ "Extracted " <> show (Array.length versions) <> " versions from remote metadata for " <> printed + pure versions validatePackageLocation :: { registered :: Address, received :: Address } -> Either PackageValidationError Unit validatePackageLocation addresses = do @@ -1389,3 +1550,147 @@ instance FsEncodable ImportCache where PublishFailure name version next -> do let codec = publishErrorCodec Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next + +registryArchiveRawUrl :: String +registryArchiveRawUrl = "https://raw.githubusercontent.com/purescript/registry-archive/main" + +-- | Fetch a manifest directly from the registry archive tarball. +-- | Used for archive-backed packages where the original GitHub repo is unavailable. +fetchManifestFromArchive :: forall r. PackageName -> Version -> Run (LOG + EXCEPT String + AFF + EFFECT + r) Manifest +fetchManifestFromArchive name version = do + let formatted = formatPackageVersion name version + Log.debug $ "Fetching manifest from archive for " <> formatted + tmp <- Tmp.mkTmpDir + let + nameStr = PackageName.print name + versionStr = Version.print version + tarballName = versionStr <> ".tar.gz" + absoluteTarballPath = Path.concat [ tmp, tarballName ] + archiveUrl = registryArchiveRawUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" + + Log.debug $ "Fetching archive tarball from: " <> archiveUrl + response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} + + case response of + Cancelled -> do + FS.Extra.remove tmp + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.FetchError error) -> do + FS.Extra.remove tmp + Log.error $ "HTTP error when fetching archive: " <> Exception.message error + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) + FS.Extra.remove tmp + Log.error $ "Bad status (" <> show status <> ") when fetching archive with body: " <> bodyString + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl <> " (status " <> show status <> ")" + Succeeded { arrayBuffer: arrayBufferAff } -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of + Left error -> do + FS.Extra.remove tmp + Log.error $ "Downloaded archive but failed to write to " <> absoluteTarballPath <> ": " <> Aff.message error + Run.Except.throw $ "Could not save archive tarball for " <> formatted + Right _ -> + Log.debug $ "Tarball downloaded to " <> absoluteTarballPath + + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> do + FS.Extra.remove tmp + Run.Except.throw $ "Downloaded archive tarball for " <> formatted <> " has no top-level directory." + Just extractedPath -> do + Log.debug "Extracting archive tarball..." + Tar.extract { cwd: tmp, archive: tarballName } + let pursJsonPath = Path.concat [ tmp, extractedPath, "purs.json" ] + Run.liftAff (Aff.attempt (FS.Aff.readTextFile UTF8 pursJsonPath)) >>= case _ of + Left error -> do + FS.Extra.remove tmp + Log.error $ "Failed to read purs.json from archive: " <> Aff.message error + Run.Except.throw $ "No purs.json found in archive for " <> formatted + Right contents -> case JSON.parse contents of + Left parseErr -> do + FS.Extra.remove tmp + Log.error $ "Failed to parse purs.json as JSON: " <> parseErr + Run.Except.throw $ "Invalid purs.json in archive for " <> formatted + Right json -> case CJ.decode Manifest.codec json of + Left decodeErr -> do + FS.Extra.remove tmp + Log.error $ "Failed to decode purs.json manifest: " <> CJ.DecodeError.print decodeErr + Run.Except.throw $ "Could not decode purs.json manifest for " <> formatted + Right manifest -> do + FS.Extra.remove tmp + Log.debug $ "Successfully fetched manifest from archive for " <> formatted + pure manifest + +type ArchiveFetchedSource = + { path :: FilePath + , published :: DateTime + } + +fetchFromArchive + :: forall r + . FilePath + -> PackageName + -> Version + -> Run (REGISTRY + LOG + EXCEPT String + AFF + EFFECT + r) ArchiveFetchedSource +fetchFromArchive destination name version = do + let + nameStr = PackageName.print name + versionStr = Version.print version + tarballName = versionStr <> ".tar.gz" + absoluteTarballPath = Path.concat [ destination, tarballName ] + archiveUrl = registryArchiveRawUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" + + Log.debug $ "Fetching archive tarball from: " <> archiveUrl + + response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} + + case response of + Cancelled -> + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.FetchError error) -> do + Log.error $ "HTTP error when fetching archive: " <> Exception.message error + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl + Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) + Log.error $ "Bad status (" <> show status <> ") when fetching archive with body: " <> bodyString + Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl <> " (status " <> show status <> ")" + Succeeded { arrayBuffer: arrayBufferAff } -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of + Left error -> do + Log.error $ "Downloaded archive but failed to write to " <> absoluteTarballPath <> ": " <> Aff.message error + Run.Except.throw $ "Could not save archive tarball for " <> formatPackageVersion name version + Right _ -> + Log.debug $ "Tarball downloaded to " <> absoluteTarballPath + + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> + Run.Except.throw $ "Downloaded archive tarball for " <> formatPackageVersion name version <> " has no top-level directory." + Just extractedPath -> do + Log.debug "Extracting archive tarball..." + Tar.extract { cwd: destination, archive: tarballName } + publishedTime <- lookupPublishedTime name version + pure { path: Path.concat [ destination, extractedPath ], published: publishedTime } + +lookupPublishedTime + :: forall r + . PackageName + -> Version + -> Run (REGISTRY + EXCEPT String + r) DateTime +lookupPublishedTime name version = do + Registry.readMetadata name >>= case _ of + Nothing -> + Run.Except.throw $ "No metadata found for " <> PackageName.print name + Just (Metadata m) -> + case Map.lookup version m.published of + Nothing -> + Run.Except.throw $ "No published metadata for " <> formatPackageVersion name version + Just publishedMeta -> + pure publishedMeta.publishedTime From 5cc662765f973f849646f6d4df7edfe7f85740bb Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 6 Jan 2026 18:03:57 -0500 Subject: [PATCH 60/64] free 404 packages --- scripts/src/LegacyImporter.purs | 181 ++++++++++++++++++++++++-------- 1 file changed, 135 insertions(+), 46 deletions(-) diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 8e436f3d5..c4632e755 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -276,8 +276,8 @@ runLegacyImport logs = do Log.info $ "Read " <> show (Set.size (Map.keys legacyRegistry)) <> " package names from the legacy registry." - Log.info "Reading reserved 0.13 packages..." - reserved0_13 <- readPackagesMetadata >>= case _ of + Log.info "Reading packages eligible for reservation (post-0.13 or trusted orgs)..." + eligibleForReservation <- readPackagesMetadata >>= case _ of Left err -> do Log.warn $ "Could not read reserved packages: " <> err Log.warn $ "Determining reserved packages..." @@ -287,15 +287,7 @@ runLegacyImport logs = do pure cutoff Right cutoff -> pure cutoff - Log.info $ "Reserving metadata files for 0.13 and purs/metadata packages" - forWithIndex_ reserved0_13 \package { address } -> Registry.readMetadata package >>= case _ of - Nothing -> do - Log.info $ "Writing empty metadata file for reserved 0.13 package " <> PackageName.print package - let location = GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } - let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } - Registry.writeMetadata package entry - Just _ -> Log.debug $ PackageName.print package <> " already reserved." - + -- Reserve the 'metadata', 'purs', and 'purescript' package names let metadataPackage = unsafeFromRight (PackageName.parse "metadata") let pursPackage = unsafeFromRight (PackageName.parse "purs") let purescriptPackage = unsafeFromRight (PackageName.parse "purescript") @@ -310,6 +302,18 @@ runLegacyImport logs = do importedIndex <- importLegacyRegistry legacyRegistry + -- Reserve metadata files for post-0.13 packages that failed to import (no usable versions). + -- Pre-0.13 packages and explicitly freed packages are NOT reserved. + Log.info "Reserving metadata files for post-0.13 packages that failed import..." + let + packagesToReserve = Map.filterWithKey (\name _ -> Map.member name eligibleForReservation) importedIndex.removedPackages + forWithIndex_ packagesToReserve \package location -> Registry.readMetadata package >>= case _ of + Nothing -> do + Log.info $ "Writing empty metadata file for reserved package " <> PackageName.print package + let entry = Metadata { location, owners: Nothing, published: Map.empty, unpublished: Map.empty } + Registry.writeMetadata package entry + Just _ -> Log.debug $ PackageName.print package <> " already reserved." + Log.info "Writing package and version failures to disk..." Run.liftAff $ writePackageFailures importedIndex.failedPackages Run.liftAff $ writeVersionFailures importedIndex.failedVersions @@ -567,7 +571,7 @@ runLegacyImport logs = do failures <- Array.foldM collectError Map.empty allIndexPackages Run.liftAff $ writePublishFailures failures - let publishStats = collectPublishFailureStats importStats (map _.address reserved0_13) importedIndex.registryIndex failures + let publishStats = collectPublishFailureStats importStats (map _.address eligibleForReservation) importedIndex.registryIndex failures let publishStatsMessage = formatPublishFailureStats publishStats Log.info publishStatsMessage Run.liftAff $ FS.Aff.writeTextFile UTF8 (Path.concat [ scratchDir, "publish-stats.txt" ]) publishStatsMessage @@ -805,7 +809,7 @@ type PublishFailureStats = } collectPublishFailureStats :: ImportStats -> Map PackageName Address -> ManifestIndex -> Map PackageName (Map Version PublishError) -> PublishFailureStats -collectPublishFailureStats importStats reserved0_13 importedIndex failures = do +collectPublishFailureStats importStats eligibleForReservation importedIndex failures = do let index :: Map PackageName (Map Version Manifest) index = ManifestIndex.toMap importedIndex @@ -844,7 +848,7 @@ collectPublishFailureStats importStats reserved0_13 importedIndex failures = do -- Packages that are eligible for removal — but are reserved due to 0.13 or -- organization status — are the 'reserved packages'. reservedPackages :: Set PackageName - reservedPackages = Set.intersection removedPackages (Map.keys reserved0_13) + reservedPackages = Set.intersection removedPackages (Map.keys eligibleForReservation) countByFailure :: Map String Int countByFailure = do @@ -1131,39 +1135,32 @@ fetchRemoteRegistryVersions :: forall r. PackageName -> Run (GITHUB + LOG + r) ( fetchRemoteRegistryVersions name = do let printed = PackageName.print name - ref = RawVersion "main" path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] - Log.debug $ "Fetching published versions for " <> printed <> " from remote registry repo at path " <> path - GitHub.getContent Constants.registry ref path >>= case _ of - Left err -> case err of - Octokit.APIError apiError | apiError.statusCode == 404 -> do - Log.debug $ "No metadata found in remote registry repo for " <> printed <> " (404)" - pure [] - _ -> do - Log.warn $ "Failed to fetch metadata for " <> printed <> " from remote registry repo: " <> Octokit.printGitHubError err - pure [] - Right content -> case JSON.parse content of - Left parseErr -> do - Log.warn $ "Failed to parse metadata JSON for " <> printed <> ": " <> parseErr - pure [] - Right json -> case JSON.toJObject json of + Log.debug $ "Fetching published versions for " <> printed <> " from remote registry" + GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of + Left err -> do + case err of + Octokit.APIError apiError | apiError.statusCode == 404 -> + Log.debug $ "No metadata found in remote registry for " <> printed <> " (404)" + _ -> + Log.warn $ "Failed to fetch remote metadata for " <> printed <> ": " <> Octokit.printGitHubError err + pure [] + Right content -> do + let + parsed = do + json <- hush $ JSON.parse content + obj <- JSON.toJObject json + publishedJson <- JSON.Object.lookup "published" obj + publishedObj <- JSON.toJObject publishedJson + let versionStrings = JSON.Object.keys publishedObj + pure $ Array.mapMaybe (hush <<< Version.parse) versionStrings + case parsed of Nothing -> do - Log.warn $ "Metadata for " <> printed <> " is not a JSON object" + Log.warn $ "Could not extract versions from remote metadata for " <> printed pure [] - Just obj -> case JSON.Object.lookup "published" obj of - Nothing -> do - Log.debug $ "No 'published' field in metadata for " <> printed - pure [] - Just publishedJson -> case JSON.toJObject publishedJson of - Nothing -> do - Log.warn $ "'published' field for " <> printed <> " is not a JSON object" - pure [] - Just publishedObj -> do - let versionStrings = JSON.Object.keys publishedObj - let parseResults = versionStrings <#> \v -> Version.parse v - let versions = Array.mapMaybe hush parseResults - Log.debug $ "Extracted " <> show (Array.length versions) <> " versions from remote metadata for " <> printed - pure versions + Just versions -> do + Log.debug $ "Extracted " <> show (Array.length versions) <> " versions from remote metadata for " <> printed + pure versions validatePackageLocation :: { registered :: Address, received :: Address } -> Either PackageValidationError Unit validatePackageLocation addresses = do @@ -1206,14 +1203,52 @@ validatePackageDisabled package = disabledPackages = Map.fromFoldable [ Tuple "metadata" reservedPackage , Tuple "purs" reservedPackage + , Tuple "bitstrings" noSrcDirectory , Tuple "purveyor" noSrcDirectory , Tuple "styled-components" noSrcDirectory , Tuple "styled-system" noSrcDirectory + + , Tuple "arb-instances" freedPackage + , Tuple "big-integer" freedPackage + , Tuple "chosen" freedPackage + , Tuple "chosen-halogen" freedPackage + , Tuple "combinators" freedPackage + , Tuple "constraint-kanren" freedPackage + , Tuple "datareify" freedPackage + , Tuple "dynamic" freedPackage + , Tuple "flux-store" freedPackage + , Tuple "focus-ui" freedPackage + , Tuple "fussy" freedPackage + , Tuple "globals-safe" freedPackage + , Tuple "hashable" freedPackage + , Tuple "hubot" freedPackage + , Tuple "mdcss" freedPackage + , Tuple "node-args" freedPackage + , Tuple "node-readline-question" freedPackage + , Tuple "nunjucks" freedPackage + , Tuple "org" freedPackage + , Tuple "phantomjs" freedPackage + , Tuple "photons" freedPackage + , Tuple "pouchdb-ffi" freedPackage + , Tuple "pux-router" freedPackage + , Tuple "reactive" freedPackage + , Tuple "reactive-jquery" freedPackage + , Tuple "skull" freedPackage + , Tuple "slack" freedPackage + , Tuple "stablename" freedPackage + , Tuple "stm" freedPackage + , Tuple "stuff" freedPackage + , Tuple "subtype" freedPackage + , Tuple "toastr" freedPackage + , Tuple "uport" freedPackage + , Tuple "yaml" freedPackage + , Tuple "zmq" freedPackage ] where reservedPackage = "Reserved package which cannot be uploaded." noSrcDirectory = "No version contains a 'src' directory." + freedPackage = "Abandoned package whose name has been freed for reuse." -- | Validate that a package name parses. Expects the package to already have -- | had its 'purescript-' prefix removed. @@ -1635,7 +1670,7 @@ fetchFromArchive . FilePath -> PackageName -> Version - -> Run (REGISTRY + LOG + EXCEPT String + AFF + EFFECT + r) ArchiveFetchedSource + -> Run (REGISTRY + GITHUB + LOG + EXCEPT String + AFF + EFFECT + r) ArchiveFetchedSource fetchFromArchive destination name version = do let nameStr = PackageName.print name @@ -1676,7 +1711,8 @@ fetchFromArchive destination name version = do Just extractedPath -> do Log.debug "Extracting archive tarball..." Tar.extract { cwd: destination, archive: tarballName } - publishedTime <- lookupPublishedTime name version + -- Archive-backed packages may not have local metadata, so fetch from remote registry + publishedTime <- lookupRemotePublishedTime name version pure { path: Path.concat [ destination, extractedPath ], published: publishedTime } lookupPublishedTime @@ -1694,3 +1730,56 @@ lookupPublishedTime name version = do Run.Except.throw $ "No published metadata for " <> formatPackageVersion name version Just publishedMeta -> pure publishedMeta.publishedTime + +-- | Look up published time, falling back to remote registry if local metadata is missing. +-- | Used for archive-backed packages where the local checkout may not have metadata. +lookupRemotePublishedTime + :: forall r + . PackageName + -> Version + -> Run (REGISTRY + GITHUB + LOG + EXCEPT String + r) DateTime +lookupRemotePublishedTime name version = do + Registry.readMetadata name >>= case _ of + Just (Metadata m) -> + case Map.lookup version m.published of + Nothing -> + Run.Except.throw $ "No published metadata for " <> formatPackageVersion name version + Just publishedMeta -> + pure publishedMeta.publishedTime + Nothing -> do + Log.debug $ "No local metadata for " <> PackageName.print name <> ", fetching from remote registry..." + fetchRemotePublishedTime name version >>= case _ of + Nothing -> Run.Except.throw $ "No metadata found for " <> PackageName.print name + Just time -> pure time + +-- | Fetch the published time for a specific version from the remote registry repo (main branch). +-- | Used as a fallback when the local registry checkout doesn't have metadata for archive-backed packages. +fetchRemotePublishedTime :: forall r. PackageName -> Version -> Run (GITHUB + LOG + r) (Maybe DateTime) +fetchRemotePublishedTime name version = do + let + printed = PackageName.print name + path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] + Log.debug $ "Fetching published time for " <> formatPackageVersion name version <> " from remote registry" + GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of + Left err -> do + Log.warn $ "Failed to fetch remote metadata for " <> printed <> ": " <> Octokit.printGitHubError err + pure Nothing + Right content -> do + let + parsed = do + json <- hush $ JSON.parse content + obj <- JSON.toJObject json + publishedJson <- JSON.Object.lookup "published" obj + publishedObj <- JSON.toJObject publishedJson + versionJson <- JSON.Object.lookup (Version.print version) publishedObj + versionObj <- JSON.toJObject versionJson + timeJson <- JSON.Object.lookup "publishedTime" versionObj + timeStr <- JSON.toString timeJson + hush $ Formatter.DateTime.unformat Internal.Format.iso8601DateTime timeStr + case parsed of + Nothing -> do + Log.warn $ "Could not extract publishedTime for " <> formatPackageVersion name version <> " from remote metadata" + pure Nothing + Just dt -> do + Log.debug $ "Fetched published time for " <> formatPackageVersion name version <> " from remote registry" + pure $ Just dt From 902bea1d52313482d85d7caf7cc01ee7f620c39e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Tue, 6 Jan 2026 20:47:58 -0500 Subject: [PATCH 61/64] add ARCHIVE effect to fix getting archived pkgs --- .../registry-archive/prelude-6.0.2.tar.gz | Bin 0 -> 31025 bytes app/src/App/API.purs | 32 +- app/src/App/Effect/Archive.purs | 288 ++++++++++++++++++ app/src/App/Effect/Source.purs | 6 +- app/src/App/GitHubIssue.purs | 2 + app/src/App/Server.purs | 5 +- app/test/App/API.purs | 65 +++- app/test/Test/Assert/Run.purs | 81 ++++- flake.nix | 3 + scripts/src/LegacyImporter.purs | 137 +-------- scripts/src/PackageDeleter.purs | 2 + scripts/src/Solver.purs | 2 + 12 files changed, 486 insertions(+), 137 deletions(-) create mode 100644 app/fixtures/registry-archive/prelude-6.0.2.tar.gz create mode 100644 app/src/App/Effect/Archive.purs diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..2ef880dff4d0b29735276e267a483f25607e3387 GIT binary patch literal 31025 zcmV)AK*YZviwFP!000001MR)*dfP^_C_2CO6j(}jOv)53>gIK9%WEmN>}YRW&R9ya z<~ZIaNP-e#5|9Bxz=TVDZ?)WvjWhM0^6HoB{;?^V@xInSceWDu?I zwl~_F5B~B+o(*^&?(E>-&cmGz`rX;yroZz1rL)u7e7Lo_xv{fMIf};=iC|p^GTMsf6LS4PV2w{-sSPG9RIDIjyL{WJ3HHd z2{!H&i<;+OeEk2u5(KMBIF42y1*`1k-79aOx=TM@de(zdwGyzP0{r z6hha?$Fv_7Tn6Dn%4uAjO?q!WCs9TfOtL8NXYqMK1$2hD(Ru2AUst$5wm%5?K)wi0 zCudRcBV<3O#NbC8^+9wVC4(sG$5D>B{IT-K%Gd7bKk4JH|6ckz%I_Q_51c# zXLAGB|L*SAa{Vvjao2x&`O$hhOUKc9cuINBisC$f^xy$6+;%@5KQP4})Iqq$;+?v~ zWUx`>agkie7oY*> zq5s_Vzq!4&v!wqP@s!8^S(+4CI=aJoaQ*S$-q=}=|3aSn@qe8r;h>Ei!ENi9asKaY zZg#x!-`&{UU7r7oc=)IaWYF6*=y2>bg8nGX^FaM>1*3RayzbEJ=d&nzO>dI~j=7xQ z!*lv00H23N*nTld`fyg;>8hgr^YhVF-G><2h>NJ|^Y?Lr_%soTELvHK;Q-Exs?tG& zn_wkzEA~6B0MCAfJ~yaY1?tKMa)oWAKaI7gP5Af88vpyWSt^WZs zeoNy73+_F^@_SF{&(l4E`Sj@?KvUrCUR2=ZC-h8s-U=?#7{E5ZWrV)rKi~4d7G4DC zrz9?d2L7wlPk?r6|Cpth&{FyXb+4}nzXwR6b!qgw0TNg;AAsP2hD3Q#!1we#%81{~ zgLFtKU2JPN2w{u}O4sUq8%~tBv4$)frZ^6&qBw!YnDhZW(0M?d0bxaj7+4=FDzXVS zK8GQT`)5HuIj3gi(WePyj>AzvBpHy1fL03;3BpmH(zwUTDd5SlhS-S;^DyhhMHXgP z@D>+WZOX#=4mCz&*eHS=T9Oa1sI9|vG)g}s&{6m~2i${jFra#aBE=k4U4RHlCt;~a zR#of4t7upRhXZU#arG$Z;v@|&1DF!QFu?y`c2(B5@#&e9m$OlcJd65K<9?8c>WBq9}$eMnh{VWTwOg8%I=ixb}^UzP3T3y~s zy1{vtUc|r|1Yt0UhT&vX1TZZ}(HL4q^9l#2OB1;Z-JL=2T``DvGGpF1zME0GdFR-uGPVqu^N6H!CMgfyT zHgE-Sg@0o)z&9L?f-#IkudOCVu=g}*gu(jLpx4BI@hko5sgkg80PG-z?Kr#_JXsGy zc%vEEfKT_-PV!@vK?PWx`ZVqbAuT=Hbs-g&wJe($wv}uWwHSL^Wcp(`VrUoPXcA#g zTGqJeG+4-6M3+T-h0Z6tVNRpQ!S`u8ieRv)IkYM2Hz_;rVP0-q51Tgb!5bmm!!q?pS6~yFwpWbnb_*zH?G!`WDTY;h#y#3JzBWPpFZR^)f6u}qI!&{y z+p~Z(_`lB9#zUX~+u2_7e+zj$_DT)9#R6JMz`qE^S;d+iN8|W3ODE^Rl1yRI>}-ms zvC~>Y!bix%2ygThSsB?`6+YdZw$d2LayWs~-fvOA$cez_@?md;v}kq?yXqK@wW#V-+7o+(6Jd!Uc$f7qc`#jG3VaDGE z);mhSI2&*C|7#S*`6w3XdrzAcADwzoZpMev@bwoEIsVy2i)KGmdtjBk5YLOo5W21T z^<;^ScDw1w6PW?B#EHPuM#+GsiSL9>gHxCKqG@L&wkKpzt2pykjr0*XN}QBkkqOGo zAz9`5M3^ExM_EYN$H75Hl!0N*SyW82MEGx{C6dT4jOXqkDxz#0C(ODhfVZ8)KG~_E zfKOKtgw96xlz4jJOruK_Y$7Ko-0&dn21&|rTJtleMl3i;gDd2uE)_3oJ;UJ2;YS6Y z5ke?AD9}Hr6C_fIn;&+OH9duCop5`@F*NF`dsmWC#gQ|lvLJLn&(nU4>xwCuhP1Xx ze@^klr?X-$Ul7G$z!fwk!Xdk$n8%Y-#yg^SA~&xp+zZoZ*- z`ljOTfo2=Vm!qJ=#bEYAzbZoAIv{91Q9_rOa*U+9OgR+ppt4ysXa_ImZv1(M5)?Rd zf((fHbZlB^)mxTWNNe}Xty4%5H zJrNt@;IawhW$hl+PY-w#(F+u(MVCbF(1mICF-M^p&1qa{p~+aJ3@Qp?9t!J$YY^96 zh~@cOWH<^7*nF`t?}7PbRN!KTB^m~}H;tl}OyB4;m$cVEp>i*Gv1P=FKMJ8(UO)#S z;wZ&x2M36e%mUaEl*l__an=YX!3eh>EQe?b&Rn)MG4z%O6{IAWqg#pNBGY?2e%MnB z;oWMu+JduV^*yhB?4ASeox_C6c3=XSJ@nmbAM+3D^KaT4F3Yg0fIfK|RFrMn8y>K0 zi3s(e2M2k(-PSG8Ss;#!z38qaI>YrwyH&bOZa7QENczZz0!gahhAdhfvz{N9X&GyU}i5v>6JN-wkIuSD)I3(B*+nXf$)Xl6yne*OZ2X?0=osh`+4Isok5R0_ zq>9ixfKg8oF|dcOIOMm*d<(Kxw+{7=IK&-sXm{>VI(F!Es=y%X58$Xu=(k_bp58Xy z!g^3tX{Hu~uvrU&TK^2litrqUqWGNB&KLL1?X$TnVXD#G74wz`eb`<>*29@qWBWiD z<6a!9(||MMHOZJZ04@1qWMDt~3>fBta47b@vpwANA80mp&9k|27KK2N7hycIw~OK| zrmn&C02n6og88kJ-eIBK)fDvIw!4R6qsPDc^h-YVeSpK@l;rx@`9ywMC#07Z34^_- z(skd5{f|@nu9EYcfxUyT+m;&a6M0UNL%nS#7L+*mkyT)kkW7A~7O>cvLA}!Xyw3yQ1&y!#Tf9~H`^>Y!@ zlFHx3?;lH|F8CW4{0$0zggwh6njf`Af02rBhsNYB|ZPH%ESzf3QR{C*-XFh*W z1yDx5so%Er+qQn&nIh5mCG&SH(Kcdp+6?ECyU#8877ZlEQL9uD_b2L5%*@5P)?H$G zr4j51@t@+=vX<6Pl0lT=1**sNI4V-Aa%L12eC%yk4;J|<=*clEQ5oSJs!^rc$3S5q zjPU0yByyOCyhu~SGoxtTs1VZ}F@mod)G_6DSX$x_ z+9Kf`T562=PhWUi#p3dxz72~ZAV+{yb{N3$01I3pMOPyiqX4s?Ucl4@ej-c9x(&c8 z@GDHgW*1eSLsa@|>7w{CHy05`8gIB;bB)kNAT$B=*!Y7WpsGf~Lj1=`0UY*l2nZ(y z?D0y@<4DxB;7lQP5`K)6)1!S|Id+eA3GyRptU(sY(ee0>@`JrvQ+RW(!W!$FRdQ4{ zqPNh>2()(Z>`RTvW_a;>1JzupJBNhwDgArKa}eGdlF+nq1qN~q6aK2D*SFT0M_o-A z`;?Hq3{X*J9oSRY3&T+m0oN!x!%9@7w$LG72dH(UL@~S#d7`+XqwHsvlfDqxR@+yO zs9Q21J~9%cNLiAPt9zPL5s+VE`!Ab53@u}1Fs_hMt7B?{wpz@%U)4AfzE};kB#&h| zBh{s4S`5*ZG%_}2QR6cZiG>Js&9=dmIrG} zGW80NMC&R=&P3-dv_A8Z9rpXtxoDm-e~{ncpq%HFgDod${7>AOI!$@mYzBLK0ljYd zS&!1<413?sDQT2d?|uFm$`ZEWbwpQv((JNl!oFzgX-4upMd}s zs>5$KA3*huAEuLRac$qOXnQoP_4rRTwF~%9BvlMG=*qsJe+N@~1W(6>^~g?jj|8W4 z^a=iRI#0*&pHr!DXjL?&hv?sE0fL3;{J$tS-BJoLga6;&dFadk9&T^l=c@XfXDa;S`x8IVzHRK|44WZa7~Ivss&y?jv`d6qdrgH&C#42({y3pmRz zwDR3Ehr4CX7-n(T=45dJ$Pei_Rn{B2)*-{-AD7KwPurVbnv#b}5oP_eFgcB&;KV9% zg(dj4@gevYelxs@s}`sEV5RPBxM3=55e;p1_J(gLZZi}Tt`1I-1Kg0Gv%l`8&)W2r? zf8O%#|93VYZZ7x#MLcEwukTqw-)MP3YK{Ka&{qG_3SIBsS7~jvNIwiHl*PPtEurO!^}r#lqw*P)@f!1!mFzoeiJ<-^Gvs%l&@=kF);+arits#S2+`|G#v0 z|JPx0)*i=6gI>a34&PhBW@r21_P=lKZa-|&zPs|^0XdvP5?IeACTxY{PB4q|M-?UQ zar<-pF$U@=4%=yV`T&1Dc#Ygi`a#P?+bbn}7BrUIlx5a3nQVHc>N{ zUsu7?U?cbsZs~B8rdi`Mc!0vgN5Q(6xPH|%hfN7NK>yB=!-Mn+by{@4Uo{=H8-URN zfGo}6KK+aDb+{#yvEOk?rv|xXQ(-5ck`6RH*<2p7{|ryf{*SV(yCMGnGMoP2DAE7h zJInYFi+P-F0fmyVDWHljNg$R>`=9=)S^vjZ<6b(tqa9$@`tNM|_8&Vy6E4^PA|4>Z zY-J1{caK__Bxv#eERC{$=ugXPj%byWdy)mFJQT|2*q#a}9} zRaRATS6vm0J7$$tHlDAi&xR`7PDA`&kS>qrcrEjMjt**8nIcWmE8`S3^%gF>QP?L3 zhlS|Zqg4N)7rYGn+W zex>ZZi!_Y5Q*Hf+W;lI!zz)kG&V!kvK9Bn7EOFozF{->#IB2ivBXGtt_RVumxl0#o ztt5rpmdQn%mp;cNcsD6z>pYd)*O1pgBT5m-=Gtzw39c^24ow9|-fBEVDI4Ia_t<@L zaUqe961S_(Da0CvIv4SerQ_*kl)w=@ivJr9s4li@rcrI0oE{|U40Dk4=pOpvHoG7_Dbbl1P(TnH@J&i4JALZ`K;6 zT4qfsdpp)gGnE_;&Fuem$e%8okZjE?%80Z7KmQyWb^lmT4gUv}*qD~#MMN4PUsC@Y z*uRqf=hpV}{%;|V&Hs6WZpQ#BeQ|9#D-%(ZHd7xe;k=FdX;u|0NCQ=doA|DPn368f zr|PqV3T0F-91H-p3G9+-614&})kD)!wDi31*?G!-FCY$Odz~5k;KwW;x8J6paU+61 ze)6lB9cR3KHDBOWub-6TsF>XnW217BT|%ph*ff-YfFXCP={?BRs2 z_knUlpAbFx8!Eo-Ez3_aP>|bN$Cj?|emhL)to2Jka&3_#HlM60#b0U*Dv7FJjHt=F zLK3scE7HS*&QDawUM(!12J>tbqwJ${J<&JH1pjYV)+`zEdu{ES z_bQcE?wTt+W@iLgO-?%U<7fm46cnpGueGIIix?{{o6vTJJoReJ*_F0e{QUtNs5>K{ z^zcu4zSZabT_?goaa#Sn@ z_WQGvW{VsfXvSz1kK+VIWk@q}tQ7VFLqc2z)r*jzVT=ZCZB(l6ZCYiGj)Rz5Mx7&; z|6QwbNZ6{Tk${tnOAW|K{W!^@W0JhNapKT|`YfG{2BhlIBLV)<1o_5B04YV{LlNFn z>@Xx2>3RU~8^R2|xiG|M>%(vy!-r9roKC`1B&s>KZ65s3@FMi%CY?`uBNX2AEn1qw zo+|=(5yu~=WL-$`h4!x#;9TE0ly%-^;)ll^YFHH$|=&&KnxQpIPotfAAj_ zxkF|PiXk!7&zYWstO`S!e&Wf68FD0eN$?-)3)2D2rwk}~^4pAzy@cmcv-m2V+*7k- zHSCkw`(U*^#kk%EUoJx-oL={e<_!6DhBAGMEpJ8B4cdV{VTp4ocP?>r5nNIu8DYS0^$RWzRLdBs??)mYccKrbo$sU27mcm5*$h%V-A zufW{eSb3gP$y%1>xB;O{&ipg$3c%4GWixhpia&|jlR>cBSq)Yd-@))&qeT7TLlVJ+7)lv zb1Pj&581D@tEhxCx((CB*m7{x+;&!^_msv(+&mEW{NUKXKC>`%#bD` zVp9AVsxShA2gMQ)hq~mNEI1D_9KczWQ}9RZopc-*Lc=n_6Xr-$qGPvQDY*azHGx(H zpTjFVXhr`F6+P0STCf94&COiqi|g-c>N)(n;s)=%oVQaRi9!cyXt8&?Z<;gp0!V@p zd>fD(nD4Jp+h#E77kWSX7G~_I01_+C50m2}?SF(M$&^>kIv5o6c?4}HiFP_GrUfI= zsH?wGs$NyzY*tnOCeuMyLoAr^HH|^3Wdg}gA2`M6(CE$A3R2&zX$XFst1HEk&W4F9 z@U1@ute_~4*XUX6f@Xy1G|3WaeZhsnls{oOrK^=&|0*{{?XtdK&&_@*>VBnVQrAZn z%jDac%S4S*@J)FPzo{MaZ)9HlgD;cm11X2B8qJxJtm``x$eY@(#BY9QYRyh(Lcpy2 zMya(jWd^e&={j_a-a#??JKn%qXI;>R%qX;R{mlXl95eQAcm}fzVu2o~F=- zu-_+9GAvrW9^AlVS0`yIl8T}h;yQ(27-Amh(mqY9HAO8o%0W7osS;vj+Ym-kZ8|l< zL0c+Tyts5Oxx>df7iL0}s_WNYh;^qNK-+Tjd=|x7(7hyAu263QgFlmwA@KiYb#&Ga z{tkN#B*@kJ??D#)f&E;Q{07Nf?ci8L+*7aIbsyT7W^~h}g3#=Fgl36O5TX-(CzD$_ zR5HiEIHga6tEI4^{M7LOKSWnZ3Ga_iqh1z%QUCv)&7Iwjum9EQEcL$@^4R>pJLs1B zSLp-n?JN=)4(ShtjgN9AFZhjK264_W==qa`1K)ke(t*bBzc($dFc-X53Pb4DkD{=C zNQG7kR%`vl%B`P(!gOV`YP~Pi)*p_delcAQ%(F4NwlWx_1_05{K4`GL{mY2p;_GpA zop=LocRC>ltPo*rC`9ZnZo$2DLdK}0uqfibSnA4QoN#yh@#yGofrTPv7xNhKdYZRY zb=9zr+F`%{j+;CD+b5^1lmOd0CUl*Q-+hM};a`;zULhkU*hWO6ol;c~tD(V5Sc1%- z;3d?s_Ir7nog2Fb8(~%XUIi%Kggpz`;UIz`EVS&v0v1+63H{=1N97kD4l7F+g&>}bXSJu^cK1h$PL`tibA~!0QzZBk^${BEaHpDrb$c-YpjFLy@Km2Y^;&+RI*Mp2T#q!u`|FNKQ)ub{sJR%swWV81BQjGCxu$-;cT15|(m0JBqS~{)X zDi|saMcqOCf*_lvR8opnYo@qKQCUZZBB?HJ)|VkxYICZw*mFsr+WfpuHQ7^>&o`J< zOCR%DQ4Y$lRRy2R4tVW?N4!Z^WET0={0hcC?K7u-ab2-0_2SCe3Fz}=rm2iH8a4xI zz^eC>FZN29^C=`-bKT&gr=!ZN69^T~r?rIjEWdV1cP>^HH*Kl?rJ-5N;I^QwwRc-k zW(~~^siZ1iwF8Q#c;(PLisH_$Eg3$@ru_Fm+(yfb)nrITL61bRmo;ePZ=6Q3X5p~qmtM0O6pW)0I|fwa zp37a9_Grk{q|69tFVJiu6~-OMnR}|ifkJ1`aF414x6z-d)T0$MKOn@XpJZ--VSlXn zGt%!RI`sYtduhrkUfoOnTJ$?go%#QSeKnnksqQXoJ3|C@KLL?<+wQpY_SMpy{{FJ! zZM;s)8btHcpaW(3k3Z`5 zjYFlMp@rm;6$ut2zFKPON}pcYa-;lSf~wJl^9{qFH7N~o5d0G{5PWb`+*-G-Le{Kd{4Bny42+*m zL`4gRGQg;U;f2r(@_Q)uG`|}}r{wpVK3Ee|kCyPM6IWP+*RI(th~f;b1W|PweXjzh zsU8bhN$TwMoG&EK-2-HreAqi{%A#Qli7KhEZj^mM%_$+ZCy(p6$$zUZse9dMEtNdb z*{|0Zf!ceqEB(|8oSyWvqC}P8)9?0M$~=8=*A#a;V|P0lU+p9m5uJ}|wRFxw=tGsf z%!7V6qOzK9tdo+NrQr$4d^k6djd?xbi!)e?K|2zQG5+EC*{SssNoI>%fLuyTN6OPLbJB`7o24$JfcSbc2ukb52r#8MQ`O_$ z!=$jtB#emB6?Eu})_>pp-&-A26>ljex`A$NDepRFZed`JKCxE0aG}IxWv5ZjMsfKV z!))ZDnGKXwbP9Wt8F`A5jvj$Or}=tc>qrG#o9n%}&;@Xwqw87}GK^rb{azCvYpTca zr`KEQIXYak`ioyM(WiPjuQa)Q`7g!PcW&9UvoH($1-5O7YtO+UwxXe;&NLm4jDih|i;w-u? z*P@RfhhH(f)XkU~eHLC{1;aj9uUop*;||;7wy2uvF^F3_0}e_Re2`PwR4zyjyrSVN z`65NBD~@Px)~RyHsu?F?lH3wEV3F+pf^;4mbF&4L_s@5=rBR-Bw)&$cvQp>O{E?>LDF^Ey^r8gG<+zgG6Ez}K(^kLdh;j$NO2r7F*nF$i;SFH@*$N+T> zo)C~LVM5EQq;0BULdjGEpUM>(otR9=)i7~id}B?l)K-SMPr5L=cJ|CBrx+q(=UtP$ zj16bSbFGY%tuK_R03i+zj+v@dF2Ar^=QGz=p;wgIF*!_|%UU)ASH;GzL#1#f>X%?t zrIBOK?O6&rR?bR&oXihA;!O=r64aVB@-)%JLTx^7oF*f#Cl|3cx$77AMd-p$@#Ti) zl&In=MA)BsvI@6i9PL0h|E;2ofS*$4Myf_xRUDUTnnrGcIz=OQ;@wuy$bxoVHKT!b zad0a;LNjr13&~9FYu^@F^ZLej1@69J=}1|=fEyk*&QO`A?1f@Ag4It!IY( ze|u+Vs}%obbLszmAx}jV&xRFelg>1laP&8cPBD%PN&41V(l;aCrxWCp0L_r))rRn_ zqUG)&u;)Ldn6M%wXqAXi#7iwt{ADSm!FUv?Y2`=@^Bai^E4BYLxi2PKC9&c@mAF7V zDDpi_!mf3aI2kdMIQ)XDO*o$}pe`~do|RoG+EXG%+VSCcvs;$MJlRvxM|E_PNw3=Y z2qi7}5r%W!A~Y?TiV8sLz#^&1F{~T6h>??60Z6@qz^auCDj9d zi970mutgv1p2?6Sd2mC3R-oaF5#|#iObcIHidpLQnBG?EY^in~=&>7h zTQl@vJfOwYmAP}-#VA6j5~AH0?8w~N(^Ew9nkW6Joxn3&jY96)bD)gP0fPZ_fs>lS+1MK&qG$X zl4W)X+{@hS4NFHUH!*DEA!e1xxWT_ccU`GpO1SQ)_zVS}q|SkFI=|uk+~mDrRer4Eu;g4E z-d3BdE2Tye<&F6(53;5(_&<=g;oE;a>@4}e#XQyfKMwlW-u^l4>zw;PAxzspz0vN^Z{^LO z(sA$SmuXu+*elo9rC|K6TqyncZE#4mxi7R*6~gWy@D3s&{-EkI zw{YE~xScy?c!O6+)a>u7IB)MBe7(EtAuyY}-}~B^+GYLTKF>3A|9=@JFnayl904=- z|IMxK(*BPwVVC>=0-oyqUk2S$29Olj9qcJ5?$cbnS5B(9=9go{zThW#%3G{^h^Wc; zHab_{9DGEqOI#P7iu+tyXQv*Bqm=KYCb$bxz^76rUY42~b;#~1RqdDaG zM-CKA*RAF5YL#p4(KTx+xuF1s_HD6;j>*&sdYz{Ado$>50$>s_s>Fuhm=5G?mXdoN zEIXx%)@W4rV_4lFyI2yABYQ{n+hyksv0+XEwk)tys#`Xt32LC-jlKqPop}nvCJ$n3 zei5qrXG6JC75wBHxhr*c%}uaak+Gv$O{cM;PB6^Se?zTXwhYeYaZ_3qRtdOCrK@R-mq^*>!9QWi_YN$ArSZnZrkV} zvgr|hp3G~obQ~t}`DBEeyI3wxV0kEGf4dcl9qfdh6T~c`?&2bPk&XuF zc(rSkf6lY?A|8-a0KwgetJhI0FxLp@>6kR>2}Hn6N7*CvLlMRawnO56q(ea*yQv#e zpcTk<5oHC{1Puyxs1zH2G#Q{^DY4q0!rF030t}yWnr5WpkAGlw6k=H*Z5fK*J?vxR z61b-&x_K+;O+7Rist<(RU^hYp-!{jau!Dj6L4ab{;XzKU z40&McX=^moRDI-kc|~+BUrtA9LUuvogy{v6EO1z;I=dLqOo~DOJ>3g7EJsd%ouow! zJjH`7!d4BU%s{Z3qR0qr6`idFyQSa+!6*XkKaow`gFso#bu&un!s_ZmV)rq10Jd^3 z__T(OrZVX`9B2b9iYX)-6{p?}&{#^EkOfaN~5;D*DK@)b!Pp zJynzV!s>eDj`7k2B!rs!=o!DuXp`?IO_or=HSykr^Q)yiQiTb}UJKEB3&9M~U%v_2 zhVy8MXuw46pQS*51bFWazX*YdokGPD5)K_hwS(h$9FM|mbVVMhB-|Vjr$8STz?=~c z9T3$mIK!1$3YCMJB{+>UI9z`BiIiVEhAK9GEEkcf}7+8+X^5^hM z+E@pOnjV?{YvCdZPoe8tDilFQ?~DPi+bd{bH&Qzc_J%cVFNC7^H#rWS7t5X#Vo zj>l=jl_{Rc z>nv+(!m1g-gw8JTM*w_JF+w;n*#>dmpX78HK&3)HhZ&p$zzb#ev*Br56My8IyON=2Q62?u%tz>*4jNz}TJF>m-y<7Ck}WinNtg_B+;nJ7 z1G~Xa*{trd27uS+DjaMWcMVWoWr%7&uz5zk$Z!*+jWR|{H`bFW($68Jat!Z4BcyOEZ*nZz(nhQqxs71WIhXnF9PFu#RCiTVn}2JX-vt= z%!5@uNnGq?U8y*lxVk9PB@8mt0k9=>yVfzGA&RP2q7OLO*M7kfwrBudk!muvH=og5 zJgKqAWPGtQVC%?4oI78Qg-X>W1-VjjgCTk;gybq4R=$lAmm26?22iD@L#_M*3-j4j1Lj&Vi@^3Pnz5pizjZl0#*JT2&EIT@{$| zoXas*tuiI{>2f;ZU3`tuUk)=R@8>`?0KDuo5$0{3P5(nG0rUNVC4P?V8pXK3)N4K- zWa$4~u|o)zQ-~`<4A_5SI;gA2xCC(P@_YkGbsxw9qpB#Gj7>#+MS&g_D@@Oy>t37c zg6=u@<6PeA(i+Eh71rzmcBVBuZsu!%D2klSj?PPIjaJC#R7>c9!F0{?wUVYcb9^Cr zAN3%6q?iIzw1=vqnhT(b6QK)n%+^B!Q}#cb6I%mD(;z4mZdAe$Sl=6Ah~+E{Lyh6@ zzx(Kl^Gd>)XkplFrGfKFmulocDnQW!{C{tBc6UqnzZ*;a@5MY`2tSTW)p+AX)+6UE zMWxWi7LSw#9-_SDsrq^ONR3#soRJqzLn13Yy1@YQvOv`ibcf<%I>><@VA3?56tFBU zc{zs#Bt}jzOVxnHVe-LJNEZo8_hoG{-oUcjk_UgSE2#p) zxuvM-dTcN*7(jW6{~Y|)K2<)wu_BGZz`XH;j76WYEc~Yf-a!fgdg-Pd{1yMRQ}}3+ zpm=3!IFFEO@l>`{z1?H8fobFVGGpAGwsh(3CS6`j1ssw7{ z>PwSAEnGeL>#9t*w-GrYLh8+)b~HAvU^t5TQpp}$Vm9?0L5g}LBoIx|>;enWkRlQ;xBhGP|K~!j-H9K#8Ty(@tt3gCma z{A2W4Ts_^j(7i(5eq=Y-leU-gRJZUEvWzf=h+hqz#M2pT~4 z%E!V*4%Mp#kE*cke~UR~yK0M3=FZRzu*_zOFrlO~O8SFgY-KSs>mkK4Kt$MWb1_7Y zR=F!kqX~*JC*q`ED4Dlf?7gTTPKY`2y2w@qNFwNz9H_L>m3@)r^|?|GP}&blBJ2u5 zxo;T}DGWuzSD z(whc8Pgys(ivE$vAypGY?d($6tk&LgX8g%!h90VGOpW^CwTN#uq^=iB2e2oxVi=bk z$|HDi8zaT#^TnQe`On=sfSalR-|^)?+glHp_MZ!R(90NmJtQBySyXaE_69xx1^#UCRDN9NQTebymV99HMBGuKm?n z->&%EmH&-oCqaY!++mjQz9XM^nBATZ1fF2Oi{C$Xl1W%pPR3Y1Lzrn3s|t zzKzPsmRj@46uN)kccVvs{isDa9X(tHmhPHASV*tSJgqCS-tmf*a&%loZsokMX8)J9 zaW^3VX6*kPTU);TXJ>a~>HljXk1dmtL3cI&fruZ)A&WY?C=%`}0N1z#!JH9kUYNOsI$f;T-Hh}w@DdS#b&v&$OUp5z!XBc=2T?^KN^e%b09(0MO5 z?I~2MQEBJng!|=vOS^jSCEN%hjRNDRKgJsm0N6bp@D>yo7OHE1R4x(lQph zI~tdy9p{Lf(QL~Kn<5&20tiqgHDivkfS`JD64@eX1|(r>RbK_C@kL};Bszv%8;Ng{ zfZN$mM>ti)rb})X9EG!QzWF9NV$WcFe#?3`IgZi%jK4Rc_Gz0V_O^nqEWK`%PI(g? zLg~8LDxj_&nNSMSktiQ5?omHTG~ke~SjD1_i+0-FUwvgw*1g|#d&PWFO7fefGHbPE zI*hE+%R)@m@oOcdXs3y{w7EP)SIE@abCHXZwkC`B6BI z072dveMLZ;WH0c`pUE5h$ovAZM2ki#cu!o+Om;SY0iZC3e~>+Tet+RR3P^?tQ8B>~J{td07sK zqbjWmiKFYaPuQt}am1aD+O}c0iiZcPvO@@!QZOmit13~Bkx;nrQY8DKJ+njQ8o|=}^+f%jr=oGKxP&Yf~$4vbMQC{VcoIaD<_um3}nZ42Y2`itMwPVgq%l;awXaI1E|o z9Z!)$%rH2gVucclR@9G1Ipc;4&$aXdyLV$PHXdhutl8S9{p;`|KFFhb*yX^ zpEuO!if@het)~60E2`@&oXOmbjN4}0gH<)Q6j{eBsu%ZyQ`E53{v=XM>vhZt=PS&t zp5sQkVKy9<3b=HLxm5|QQo4yBFgqG)UEZUbg2s99AlPi;sRBtz%iLQjv%)h<(7$SR z<4r#qDTp$5vKx$;p zzAR+r+>xv_`8i)KD?fWxG^^L%zI@wwRu=hR6VS@3aaKsHia1No_C6)DRVjLveFRDT zDhHS92v$yYr2tko%&8%)WX7mKO4o>KRn=eBQLQA5l~dpbfvid`mdI3cz}_~P?A^t= zQt^eSo%eQQ9l2O-{HHtf{&o}pZ(F-NTTB0Mi+JkC|E|2h&EfxdbH~^J>@591Eaq|i zKYXfp{Cn`pcJF)e>7M&@wx?g^J^}vy?z`r^ZhpyP#`EO`mAiIb>H?tTN$lWLt!rNZ zJl&@+*1~%4beB*GyULv$0j_l>Hyi5lWNiCw!wzU!upOUmC;-L$Ro>dLrp941Vn=86 zLJrQX&*ajqR3c-)ZuH`pAeU`bt?_LGbcPC7bTgYY%?TYLInxK7-$=YrE;XY9+iJIS z%52L*aSoSrgy|d}=Sl-oftT%9umYPYJ_QMjsh)vDjNnK0D)H_t*LKmT3eR>2pDO*@ z0eW-0wS&ARr*>*>8*uE|&TgLX?ss|dH~&-T|DUMW#oYgOI=jCA|DElJOZ&ftJkG@( z4Z3yqEA~ySJ}kaQQsUv2{_UUtGcJ(~w~vaiNstqqe8sIfXh1p{d-08!rCn-sS(?wm z+^}n~B$w(MrD)D?`Q^YWQLK(~d#eRq-)>oHRYh6fiT@q++PpROR#vR?R+g--M9Jt_ zP7(pLwrLjP3HzLbbVi=%G6!f*fkgU@#4z}~+wMofuOBGlb?}FTT{r9~mmvlb&t8K? z$3>&#;L%}3s2IHcy{?~ES4}sI8q)ex{Z0MOG#-|g8lm;Ir~UGgr&j-qyp!CG5a9a! z|2CS0FZurkJTu1sPJ{q6&i}2Q9iRVy*x6m`e=p>5_5Wzl>xBT{r$v#DZO)%l?v;}) z-5)7@k~%?y;O9N>cby)PBOY)Sgd~y~S0q1MsAf{I2l;c~g*;7O~o0tPiZvyYFza>^4H*H6uCg-_K}6#-3| zrv!4wB!xV)W+>J&ZF)*IItQ%M%6i;ciBNlfEp_uzsz4c*z1>vRQZWswzgtelO~2-8 zvrlUGrXHov!lru0@dWTNm}9O9A{7&j5tvut!@Ea?&_{;9Qt~5fVs7b=tyY*QD=~sOvZlQNvEdr zWYaTGOF95P*}RP*)W7IcxBgk|gHb7NF9e>&|8+Vg|Nq;Y%lKalc^pSQX3#C(&rNOV z)OZ-{G?5Zg)C?^dmS2@D9ob(f&EI+L4)B~_x3$DHVywen?fgcoOKYDu7*s^L)fruA z_uu3yE*?dhcqCubmrwE&y<5twcH1StMG9W(F*oLjjxkcHH)a>R2nv9Ef%%MTwKyuH zT*?DY_oAz`_=+eSuaBaOXk>ikiPmgjkC2h#GJH3-kKMmA#M63Lg~YMS_R|b1KTpYB zuMl9}Aiqjb-9G+r<-FSKbZlip1Hw+%(FKv|5SK{vrXV(GO0VixSQNKORbyQ73CFu4 z*>c%d`gX002a0HOlWPZ=2x!8fw6<{^Xg*+9Px@!trMp;Z;voy{7R_}rz|rSv*%N+! z-=hMtcuZjvjN|)FJrdra6~91ikKx z2R~l_f@cl{N|!tCrtpC`9q&@-!}3n&pYo}p|9*(B&}+8@)43%lIFtV?+5hhDY%TTw z7VfOg1NR!vj8I6tBR7W9JleT9k2)cGT<*%8tn`MZzr z(Vfuh#;X4L1b+TsG}C_l^*fqhx&p6*-95pd)Kr;S2sF=sU9Fp-H}$thdt**!<6IX? z4I0|fT8rkj@vTN~(ngQbJDD}<&<`3rKEt( z>9p6AQGJ0oNmQp_rBJ9=veF1n(X=8|*>cX-w(_v8`ABF_DNLBpP~%ca+PYj;&l=qp z6Fg&zMi({5WVp1A%*e@>{?!Nv1GM03mfNcHNK7-kwe!IqPevYW=?a|%y|mQ!MTI7q z>5^8dLmZtj(@oP9!?>SjS}&@EhFy$o1DkFd*)djhrKfqu$@UMwd#{ZW74uM{G#@1y z0S_ZewchIorCK3w##Eg{HI9d34)aVqrD8zJonN7CSJ~$`tlR4OQZH9j&AmIXq8x`# zDQU{7o~=}s!^X|8E2js^SC}I(w#FQH+T5s9)R^mG4t;Y6h$9pm@9XtA?1$Gy8)n}v z(6%FC8hC6}3yt`evA7Jn)?AlR7+3Q*3%26xHVY>9oi^(=p9VF}!eykI^|fiRuldx+ zf4DmbFgLLO+SuA$@_&nXri}kxuzvK+;QuyvANuz{yIV{Bzr{RW{MUNhuQ%ejS@utf zHgT^atwQI|^9Az;og#B!)~KcRsmiXFE#}hW0#Sdk!~iOsT%Jy=Yn5nB_)_a11}m8E zAV$~5teE4h)UKe4M`Y-=?BRT%Jx?*zB`v!KheIKa%Q|}*UEvguF*=XuHpVVn=Q~E{ z@f^sw^?s0|ADSvYY-JT6nmRiLL#^tY3SMrW{+Y(S86;+LODpY0DWZQJW%NFepv+VCLSkM@ zRf<(mtAJM}p(^;yPpo1COLVHf|w^vML9q+hc~83#~drUF201D{xi86$A9lQ;>62&BTk%D z?8e5#A(sFK9AqM&oXbr-&6FH{zH(FwhV`LHi*SSy(W2~wc zZw}k3B)y5}r$`Jpnw5dH=g0ngb%KPLLr8M4KLrV2E1@V|xoQX|Oe&H4Zgf-!j-@VF z4a$TnKu}(s0tb|(7*!1l7335kdaned)#$2$r~nWM#EJf(fK)IpP>i+`DMs(}71O?q z-x$xv7MVD`u#VKA$xfj+y?70#`M+M~!4)}RR%shS!wI2Sq|c*K5eAfz?J0~GX#Eih zIa{%=Z&7%f1yD@DhW-sb4!=A4_e=V{-dXW2h1_Yp5=bM}X=bmJieJj^sr@U-*v}qDh2F4*z+HFtcBlazfb;iv71r6Udid zB9(S>zU0!lUoq(r<+M2w8;v1-*jf$_ybr+3#u%xaXjC+pOFqB}NPIR%SZBJ`?LEDP zl>L+H0?gv?@sfuhhQU;vKQD0d{~NHim3Ysc8)Ko7eWWeYd2YPzGP6;Cc~jIe0?b?; zjx_(ZJDm#6i#=m3=(88*YhgdT^RLwgzudg@r=l5gGK?=r!FFIKV;(#yCv!dUY0rFf z=0P4jt^A@2KHZZ(6Zr&;=I50AD@O~`ad>_%R=i^*^u$(~>F~rOwc9J2tMlz$Nl1s? zrlUKpKee|oV?b^|3`K&Xc7(=|#U>~-k%10Bay&2KALH?4OoXk*zb%1s`;)z-*?ABhM9=a9KA)vytC zHpCV>Xsqv0)F7R(gLpy0j!Nk4aDpm>M#2ndX5VlJab^t1a~4x%A)Es6+Q6Q}N@_NnA*x9H^lg_jXV}Gi; z=ju_lg{nfTXIBCWEBu5K_Vi9kp@uPookZb@_MCHof?wt7Xi}&%Kq>0CfwAu8!6k)u zRPp~{KkUXs4r7cW7UsYud=~}qW@!r)Yp-DO>bAYr>P#|Qs3x>}D8qKaT6m+3OJn<3 zl>Kv2j)_^~Z7=Zilq3pKOGZFI)@4_L)mjv5kd=ihvhf#d0uD!Mh~~slA36%+=xQRD zL6o5vu}H)*G!xi8Mk7d*Xrd&y5-WZupqpx*TYtim89aALCXrq6=7<$pIEBeo(7o(Z z95j*jaC9k_6GA7i1u_JCG<1?a=jdrDxEsHrLgMY*gy%(j-bbYKm#um#^d8opE0$|1LII86%Eu^H};uoNZu`g~URx?1Ks zNA#_dj}IkYb>e7o>nc8Y$jlC*g@sDLs$1`b)iTvvF}12||4<{nD2LQ4wMhlRTB!-A zrD8{^ETx!SIJ_z#m&DF8mCO@3tFB2|7-T(>fx!HxfWHs0jXcBgH+`c??iQXFzlz~{ zQkQGR-qP~td)kKu{2(1@?YG?>ZPh;kw_?u*fmH)ahEVsv^{U2Hjb~*5P&F=I08nRu zs>6R8G1l}B+|1RRO zuRk!mvp^G`bYn zUH`g5x=sITTn6`10zf4iCbqTC@S*ACCmR&al}O-k^BLornjb4GND5r&0O2OmX!J?v zvLZK8$R=I7sY+X=I&QI!uB^T)l=)hPyxWc`wGqwcmYk=)9(&KVOFGV=wQqL2ZgUQj zjkHx39gc88db3OEvn?-u*$Y<9?^PWCoZ-ORYIAj^)L7yj(#)>;1t(ce*15y;^U;-^ zcOMf1Z-E4y&%$ijw0>MR?a4yoD6a~Mo9$O5$>r*B1z0dGXuUXbM+APC%VRDVL1g=f z$YPawNBk~wiIQ?T zCQT~Wt|m~tuSll&E2deVs#8doYBbc(8f1igscj&VClw=(L`w}sCTPk{sUd61z?rzI z#Yk^a6%(kA(5atEirgvZ@RNE~qY=VhRY3sFLQRoD^>}GwsA7lv+Q-!L`9jZh`tNQ` z0B6vD=h7>A!_M74%=N2_S!8u`K>x)0T0soK&YLuXZ7Z%fDK(^UQ1er)Wpt zPDqfcaoWPAs?G5P;uMQK1$%~D6$Vsce5P=2c~v?lXsUCjS)mz3JeL~kgEulYC{`Pd z>7Fg)G>7S!L9N0M#SDCG`>-ikOtBI(wd+hgt*+s;fbv&Zf^qfLUJEp{FrO`$w8VEJ z@mig|#*c*Ru`j#+r|HKob z^+_tgxZpc7tMw8ARTW8ED7TUZC}2~H10$}=vp7Zrpr3O<5I$T%F>zNlB%NeIb9ir6 zmicT`kbmn^9=}cLm&|3;yWy=bHE&(!({yDj0q1Mqtg4zHays z*ldTbpeSx98SVpGN)L0!u1_tX{ej*{$V4HQ74^eCk%NGP~l~I_M~bwMxu_FB`ooQ7vXwb-5e7D$`(@sL>;!sK(Hs$5AsfKonz|#h*5)Fq{fd)sT8$R-_*>=a zr`DR=67l;uwISNkqXYQA*@>V68;~83{2rnO0=gfar8){v ziI?TGoWAl|POl;cDxK%_5zljenRcF24(~kYQkCbq$=)I0Q_pji**(wepyPRNm6!!z z&U00w`aD;4x#zhm<6n_VZtpZ#sd$>x`!!B;Rg*f+xk>J6uJZ6{ZcR~rno}B{<~(DX zYG9|F<~C4unmeH7H1`m~^IG>Pp50CvKFt*pHK(~M&ZoHtU!UfB2=vMAALRaEsDoTp z&o5V|Gt{E-k8ukjEqqnSw_k}qzU}4pwKuZmbE9X*`442yBKZGO{O_gwcR^3p`H!^i zpO61Xl6y}6pIX2)1HLiMfN#M6|NeUw|4(0qX2928S`_KHf@Wa4Uvf>S88C-WGf=5& znn7pp5b)_VgJkw-h8pNZGnf*y;47LzO4QH{QkO?FNSPANz`1>zK~jlk!1wFW3{sP( z8K_Af%^-P%W-!y*&hT2TA9bp6L<8@s>!3DK%;m!mG}Z5HteA3JJihhOh*{2P_f* z1y)G_=r>6ONCGfND@6iOo72b&zV;S=t7oqG&$qM#yuSV4Zs+0B{%;}AeB*yRJ+K+{ z-|p_VcmKb+vHfr<|5?am(|=yjccsGe=ZFF}i@Zdgo)|qyAf4$djFnChPj%?U3qLa0 z$%&tG$l&Yag`r%y{wtmt^KWyExDq^9!gQn?{aS!PXL`OJ1q(2c!;rGyd(aL1PdJ)UG3+IE- z=W9;8ICt=T#RtqK*~O{*ozivz^bzZj#g)TRq5bHON~>l}9BoK{yryYI@_>07k(g*+ z{UP)+W1@Mj8q{mYNYh2VIB7cFkCi54f9;C6oL37c`c=sMxONp>&Z~kG{VI44IippE zN14K=`@1)DZdI$TUSrYEp~lK{sX~RdViHy4s+m1EjP~Zuq0(HrAy|9HlqSawVcJu~ zpA8O#Ish^!=GWI94l8tZ-4UW6 z2YsBobE7Zk*Uj1>^ZM2eDTjSKG{G758RuzSE)1Habrc%sjj88%&*@} zBn)oWOeM3q<7SFHx2~DA^IX4~K+fE(m9SCWO(Si9m#T5j#wlT6z8wEo7ytV#{d{Ku zU}o_DJKMXS{eNetv$NFyT*%|Z|2`uZW6J7Fezr&=zhc6pU^;shdl5&YL9X+7iIoOm zmI%7>{zSriV!-UvY#99LCCEV)bg&&2KW7rwWhoN=2xkf=-AEDvKh9)d?i8dgZJj zB2?6mB0{CbY3wTgT~&$XQx;R{#fK~5LiZRul!#$&+#iTJrGZN8qrandk+y_qY(XCqsr) ziU(o_m_GRSf5l*!3-MP&x)uD}X}} z_5|5=G#MqqoSn=zcze-0DBr-fYY9e`C)ZNn%y#BlxoOxi#D{CC{t8#FrRrJ0xw^W7 zo=xkkz{YYlG^t9CS!sfJo>@!yR5)m56q~bfG&77Z-Vp%KoJZC@JOjkkVD&a0R~e4& zY?VBtYS+8m%GupDwXdq?iR1eArX5=A@%Gle+M>h)0Dt-3rQk%^9ecAk6Su`2`p%Cs zW1U%85KlF;uqyIeQLZ4n9Ar~V=qOZZp@Rq4)ZA9+E60;>eTk_-a=4fuB1eFIZL^Y|i0_WZ35!)twfmX61hg0vzlzaLEcqj&%eb>-K} zM5XIjCsDHTQ6e2X4aG?k<5?kkIW9;m$SskS(+MiO`rSf(#uln7N@QOS^4nO0N*niG@e>VaSH?DGhL8nL zB=wogm2)1176u|3Kw%Gi@KBIQnLAFjASlYj8Y@W? z($|cHuRIG8&e(N77o+?uj~*k}NpT_)_6v?xWLcD-LxMzyVZl<@8$}6_YtISkg$SLH zSn?_cgD`>UhSH;}cJM>`88PNKS#BR9x?N`iP*tKe?$!rc^T~OXNxdx1DfG7pAfzd(7PaPl1QSJkH?^`gv= z1GN!u*t}8rd0mA8F4Zo=$a3eJD9V7?I4*JwK}EsyP#f2RDP86Dp^gj}{1TxE0-ES& zv6@Vxj5uLXI`mJJFKt9g_+cau@RwAnY6+)|kFf z|C{*89-8*jIU%<;dfp_dgp2>H6+~`=RIV2G2XrZy^|92!yn6yp zh3!Y2{`Zce|J*44&nDXc zFZKVw{4;I*Z>RNdo*De#*5+=>{$r=J)c;?|dtc5n+lg1U?QQZvey1Jnnli|ESsR~M!)syvICOE*(i z_M_BNoV2z7`EJ<#T8S&#uDS`;H6dcvh6Wn#DJBLw58i{ldtY6}F-OyEI6d5QTI48% z^L;6ns!{>x)-(%t(}%eX>@6R`>8#1&*Xoas?T1NYp?X3 z>}37KnpU{V(^FcZFV|2*y0QgxDN;*M`?|k$4@L2z%Z-sJ(E-dETo71B5<<)XNvjRI zF$3Zl_`h(3QB?7wt?`aB2k-GC=4~niV)t52X^zVx(@C?g+OUZg+UW{)+XThvIh2Nc zipfcJCRjYm9{T@#5`I^2|g3-S!f2*8N{+*O&kAc9#A>7V|i_WAWC+ z_&aP_9CzFLewp(J{pm7q|CYvyRq!!<{^Rk=AOAWxYyTKe&H9(6e>Wn48}R?Tuq7<{ z{{=iV#veGVFYfv3|D<{pq*o8i3Ml=K(h}DL6BV{zjN^YqJAmUNE2j|GuZ0Z(Y996+n`T*q{5a ztsPEzJpG=ZfGA9>qwh!rq^eMjOM7Jit#LT{I@5d7;By56B6KKA0gMK=^>ua5u8`~B z$mA@u*_5ho$D)2KTj1u9P<94t=h?Y0ZAWE8@BYIM)o~9g4`*i zFaYPo`Y5`HMq+T%wq3Cs^I;M{;B@45s$8@ip+nhGxot$=aKs(bb)Nh3T2V({%3(a3 z>!klQ{nnU6%+T~ZJDj7l!$hxYyw(g$y=ywp2pDYd4t_6Hi!_w!Mp=%Lm%dPNZuPsi zHKdxNB^ta1ql!dUD0`qdFCubdHCLwPX43v7bN@(+7zTDQT4 zxdCsxVq6h}{LUf|d{qrEDDVfSh*K9rO38FH3|~gaQ-mhb=b;l|FA|6j;s(|_)uTO?rVgR7zEnmAimKyEoUIH;rI zd`}$=!A(_g-BgAPR4F+bwLC$gxqgfMhPF`OsTDN7`wsQue*b+gyVIG5rrMAc4Mo$> zvu#LAB%-6A=k`{up{UW=BZZ!bwa0o~*fOYe6!#8&RhS$$#(a21TjDSsjndD!L6S-m z9v=hNv6Br|^izD(c*t$X-&zk2F6nq51mk!x7)AUs2;fUMM8~VBbcTmcQFK?TMiS44 zJ#W@H2UE}aWZ7zZW#ngFL1i53mBQq@A}^|{>9I>xCyk9qP<^u+kTxVFLnF&>5atn@ z+_WlJO`vVw;Q!64YK?hYZPl9C@XIO_%IjMU`g7K^9n`0?XFFI|tjdb8vO8v55>B-$ ztlB^v%ffup!#}0jSfBNG+t!fNY9}s35evi03M?zbMso(7sdjPD4ZbxSBS(gDf+uF2 z)=p(aXBDj|<6G8MLY&9CPO$pgN8`W5Q$zn9e7ZC5AG7^`Zf@=P^530@+e`Xy5szb> z@yVk3=tnDv_)lYRj|FmYgv*!414+_8;H5yvWb8N@Gy#aYu15_b<+cD{AvY)O?!Z0B z!KdjopI8h`&I#Ti>c{N-Bl6w`4?rg)k->~oHpw_g3nWyxCO!2$g^r8I zN@l^tI#cvoPaOv}hA$*a$;fYuqK7D+fD%4LpIoQ1itq)BVHjTq+n537>E519v@R`6 z=)@>zc`Yf-qg4hwVS#@AfE`P7>I3#vn2=I^8$~}EAPFM}M4EvXG@uIfBHC@Q5OCL) zT17u7qsW=rt5<_32E|_>_bH|UoDJIWV9%n1qE8m_58r(^Er8RW$>g~ui9U%p7AR=@ zVv#~+KP=HFwEIG5vBH*pLZBO%3>9WfN`BPh>&T)cyt1VjP zK|%ekEi~Pcg0sz(%1(8uV6T7Qv4Zugq1$I;XWHRdai>ysK59xO7+Qnfsot@MjLfjK zv6`4I)=KC|18lH+H0PNRvg#OeErZ$B)A z%y$&PJ2jf$`KG7&op*x!LB46!y;mw~U{ZOnLW8_i!If4mkB6A8J&KZ3An*5pmf2{k z{8#y7o#(11&!yxsD(Z|3R`IW^onBu5Vu86h`Tx%5hVTD-`{B+~{=bli<#;mY%AW}n z1SFBnqIF)tc`y#Oca9;t2Mk#%D%o?((#*tTT|ZZl+0lkL{%J(*(>CShV3rnVe-=lO zuYY#cQr}T3WilYodhn@*flOOLRP@_T$A!t&x$;e9>2k3jj$R~U3fA=+r?4M!bF>o> z5=@_G>2E*-vFnv2NY5iB-|*tj@{3gQ`s%gRMckCh=ovOcngy|(k0bGhSAcDU{?I)E z7VD2v}wDyw!J33PVq>*R9@Q)*;E&1a=kUb*RpCiX)ojmtNuWCtuwW#%E6gS$wwia zX5omgT83#h4lCLSNWw9j`jy&Cz6hhpZGJk%)be&n7``2&q``h z#N8Gw;$gAhX#t}Gn-MwKijrm(s4E)?Ix*uvjkTw!9QI_5|FsMW?De3^;(LqY_Nx2M z=W*T-aZ?(omw5QP6`&m)0^ECoaQB|jpQn3N*VCtaveM6%IPpYllsoY1cO&sd}1aUf|oXXTz4G>C?1{2w1pHx50|^)REkPVJ%r<>iX6{Fw%9j4OK_UeXRQ zhED25#plS{b)0=RXs9>7`JdYpB-Fg_D?b>MCvUm7B$9WPyObL{k-V-!l~J_7{b!!g zEMJ=p{+E5q{2$)f+?neydaiH(yS25m^#8Sxr)K==N@h|1pF1W0Upw2KttJ1zh{s&9 z(~!$0JF>x|mxS6!c=emd#YBv$_+pv$uo82?Dn%<5UrO1f#mV^uowWmjQ=lu%Br=9r zXwwBO_N0I;aDGliV}&DADJQ{20K{h=<6Sf(gv}1<+7I7`Z&q zC6f@W^pPvE2#jRLR*KvIExde7o|MViu90AnxRlfBB46p6y|ofmMil>OAdvBesD_YL zoFg(ZdL1x&Fsftm;hP6tySA>HNhp(I^)&1d#G@N_WwvMsvB^E%B_^+O+6z=j5s&Q7 zBqzK*Al8VW8%5{}7g18)bw?AK(`pDqcZJ04EcWjE*Rlb;i(dXEBCW&(h&GgqxjkIE zgFl98^o(a9es6YVi47*o6EAebTW5@LxplpAT~fh;RgEJ;hL@IkJIz&@^+>x1=-yl0 zSCs(`<18=8g|)Q->FI5EW#MmP;tM%1l)rMb0aeGogQiYKBXJ}>VN(?{4z+Eekn0TC zDj9dlaY(Ma2N8^P#%mj~n>8;!cnz%`f|~5g7_@R-!KD8jUWtw?!sG>E`-Tp^wN|P@ zgqt|JK)b^x3xzn`U`)DHwqNu(*ZR04s5sZ#b<5Xv)%(6aBa~#;3{B9xQg*%AatwM*}xAihj7U0~|1ze);_SAr9wT*&$#^etP@LUx=iuHbjG5S$gGy zdqKNxA3VdT=$|3$~>1}UrYXPF^?w2mC8l#`N1Ft@C-N(pr2R=Od|Ed z!2kq7q1BD?jJ3l9kFM4~MptMI9-i~XDxS%GRv;rv9B5LY`9@(c8mSmGDrg~`@NQ%Z z4E>_DLPCo8Q83w7&73NZ0tpbg-8_6RG(Fb8q(R)mzV(1Xfd=WzHRFCkC ze3`;dMdU2iHVoZ%u(o!B-6O_436e>g@Iol$85KBC0X+_r7zj^}*u1u;&Scl3%s!T% zokjhRIr6P`f58vv@0Pea30070#SX)_wy&s5cxv&gbIT+I@%WyzOc|=`P(>Bygw%Ued zSGjS_JbA6k?N(8+nCrC(*A&Y&q~JZ$*(RYvf&qr&VGQ#DSb81k8&}4FZLUZix-A`} zITDhii1}NbQ1~O@r_jvj<~?-m_E0CtHr-wdOY=be*!24>i^jkx{7ZD(^9W@Ie$Uk| z9n)oBd&BphS?B*<1OT0J{&%)Fef#g7?X9K$*Fv5O9dA4jY~4&cplj4KVI_GApnw0P z4TsU2&k68*=TTN%wNdth8C!hRH|umm&5nKZ+9Bl~m3TkBmg8&GLVKB4PbCfk${?Vc zbJT79`yY=b5qG))m_X=ra_=MSiccaZ#^Y<2qd`dEMeW5xYTxw%gu zKQ%^Wmd0QXPWoedugRu&gW;*@n5o@@or3BAwztQ#1`U?3(@WF(h8SAEpK66EAcb3^T3G?c#6^jGD+n(AbO8A#Xh#|qA+fL#vUL{`t!-{1@D(uz3nZry zWKIeI?7Ap!pN$CW@{K|u3$ymh%4Qh>sL1L8g@D|cOv7P?IGbzY5Jl?l>%>EDnL8*8MBt5~ILI9h$S%4vwOjO5s;>Z7NK!LH8^jVMNU&HJNmzO1!Hi zcsYfSo55NEEGc4+3b`%JakH zXRr1TUmrXNw&nv9m#a(19mb8-t^Rm#+?A9vnS8z^`u*$}fk<2Td5O z!(+?OKS+1C z@18Po&;RqV|1ms8t}jjQu8x`Ze-F34^M4!uw>@B&ANJ3p)fSiOP2$l2%S8DIqZ8*@9}8f{ zfWDzw%ldjhO7b*YAH_ZRfk#xoP!P~h5dxqh^aF`>!f+5>JdaS-HA?z%WMRscVX6Q9 zu-(~dZzy24SyCQShK=^Ww>QoA(K)7ea4~GG>vG&c%D;ungm75-)-!%Hkb7OLY{Aeva#OE%DaDi_bSBY2E;6^`#gHzI4g?tM-Lvv z7VV5!yKsRo8TDo-PwP9nnc;vo~q@4Z0$cg zywAYlUBF{AfX~V&abapY=Y+O_-A=Ppbs*<8uZa03xF1XZb9je~Rb-2QuJ5+5pf300;6!s{jB1 literal 0 HcmV?d00001 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 89322d52b..8d78d7ca7 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -57,6 +57,8 @@ import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) @@ -328,7 +330,7 @@ authenticated auth = case auth.payload of Registry.mirrorLegacyRegistry payload.name payload.newLocation Comment.comment "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -385,7 +387,33 @@ publish maybeLegacyIndex payload = do -- the package directory along with its detected publish time. Log.debug "Metadata validated. Fetching package source code..." tmp <- Tmp.mkTmpDir - { path: downloadedPackage, published: publishedTime } <- Source.fetch tmp existingMetadata.location payload.ref + + -- Legacy imports may encounter packages whose GitHub repositories no longer + -- exist but whose tarballs are stored in the registry-archive. When Source.fetch + -- fails with InaccessibleRepo during a legacy import, we fall back to fetching + -- from the registry-archive instead. + { path: downloadedPackage, published: publishedTime } <- + Source.fetchEither tmp existingMetadata.location payload.ref >>= case _ of + Right result -> + pure result + Left (Source.InaccessibleRepo address) | isJust maybeLegacyIndex -> do + Log.warn $ Array.fold + [ "GitHub repository inaccessible during legacy import: " + , address.owner + , "/" + , address.repo + ] + Log.info "Falling back to registry-archive tarball..." + version <- case LenientVersion.parse payload.ref of + Left _ -> Except.throw $ Array.fold + [ "Cannot fall back to archive: ref " + , payload.ref + , " is not a valid version" + ] + Right v -> pure $ LenientVersion.version v + Archive.fetch tmp payload.name version + Left err -> + Except.throw $ Source.printFetchError err Log.debug $ "Package downloaded to " <> downloadedPackage <> ", verifying it contains a src directory with valid modules..." Internal.Path.readPursFiles (Path.concat [ downloadedPackage, "src" ]) >>= case _ of diff --git a/app/src/App/Effect/Archive.purs b/app/src/App/Effect/Archive.purs new file mode 100644 index 000000000..8c26092ad --- /dev/null +++ b/app/src/App/Effect/Archive.purs @@ -0,0 +1,288 @@ +-- | An effect for fetching packages from the registry-archive. +-- | +-- | The registry-archive stores tarballs for packages whose original GitHub +-- | repositories are no longer available. This effect provides operations to +-- | fetch source code and metadata from that archive. +-- | +-- | This effect can be removed when the legacy importer is no longer in use. +module Registry.App.Effect.Archive + ( ARCHIVE + , Archive(..) + , ArchiveError(..) + , FetchedSource + , _archive + , fetch + , fetchEither + , handle + , handleMock + , interpret + , printArchiveError + , registryArchiveUrl + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as Formatter.DateTime +import Data.Map as Map +import Effect.Aff as Aff +import Effect.Exception as Exception +import Fetch.Retry as Fetch +import JSON as JSON +import JSON.Object as JSON.Object +import Node.Buffer as Buffer +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.App.CLI.Tar as Tar +import Registry.Foreign.FSExtra as FS.Extra +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Legacy.Types (RawVersion(..)) +import Registry.Constants as Constants +import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Tar as Foreign.Tar +import Registry.Internal.Format as Internal.Format +import Registry.Metadata (Metadata(..)) +import Registry.PackageName (PackageName) +import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +-- | The base URL for fetching tarballs from the registry archive. +registryArchiveUrl :: String +registryArchiveUrl = "https://raw.githubusercontent.com/purescript/registry-archive/main" + +-- | The result of fetching source code from the registry archive. +type FetchedSource = + { path :: FilePath + , published :: DateTime + } + +-- | Errors that can occur when fetching from the archive. +data ArchiveError + = DownloadFailed PackageName Version String + | ExtractionFailed PackageName Version String + | PublishedTimeNotFound PackageName Version + +printArchiveError :: ArchiveError -> String +printArchiveError = case _ of + DownloadFailed name version reason -> Array.fold + [ "Failed to download " + , formatPackageVersion name version + , " from the registry archive: " + , reason + ] + ExtractionFailed name version reason -> Array.fold + [ "Failed to extract " + , formatPackageVersion name version + , " from the registry archive: " + , reason + ] + PublishedTimeNotFound name version -> Array.fold + [ "Could not find published time for " + , formatPackageVersion name version + ] + +-- | The Archive effect, which describes fetching package tarballs from the +-- | registry-archive repository. +data Archive a = Fetch FilePath PackageName Version (Either ArchiveError FetchedSource -> a) + +derive instance Functor Archive + +type ARCHIVE r = (archive :: Archive | r) + +_archive :: Proxy "archive" +_archive = Proxy + +-- | Fetch a package tarball from the registry archive, extracting it to the +-- | given destination directory. Returns the path to the extracted source and +-- | the published time. +fetch :: forall r. FilePath -> PackageName -> Version -> Run (ARCHIVE + EXCEPT String + r) FetchedSource +fetch destination name version = (Except.rethrow <<< lmap printArchiveError) =<< fetchEither destination name version + +-- | Fetch a package tarball from the registry archive, returning the typed +-- | ArchiveError on failure. +fetchEither :: forall r. FilePath -> PackageName -> Version -> Run (ARCHIVE + r) (Either ArchiveError FetchedSource) +fetchEither destination name version = Run.lift _archive (Fetch destination name version identity) + +-- | Run the ARCHIVE effect given a handler. +interpret :: forall r a. (Archive ~> Run r) -> Run (ARCHIVE + r) a -> Run r a +interpret handler = Run.interpret (Run.on _archive handler Run.send) + +-- | Handle the ARCHIVE effect by fetching from the real registry-archive on GitHub. +handle :: forall r a. Archive a -> Run (GITHUB + LOG + AFF + EFFECT + r) a +handle = case _ of + Fetch destination name version reply -> do + result <- fetchFromArchiveImpl destination name version + pure $ reply result + +-- | Internal implementation that fetches from the registry-archive and looks up +-- | the published time from the remote registry metadata. +fetchFromArchiveImpl + :: forall r + . FilePath + -> PackageName + -> Version + -> Run (GITHUB + LOG + AFF + EFFECT + r) (Either ArchiveError FetchedSource) +fetchFromArchiveImpl destination name version = do + let + nameStr = PackageName.print name + versionStr = Version.print version + tarballName = versionStr <> ".tar.gz" + -- Extract to a subdirectory to avoid path collisions with the packaging + -- directory (which uses the name-version format that archive tarballs + -- also use internally). + extractDir = Path.concat [ destination, "archive" ] + absoluteTarballPath = Path.concat [ extractDir, tarballName ] + archiveUrl = Array.fold + [ registryArchiveUrl + , "/" + , nameStr + , "/" + , versionStr + , ".tar.gz" + ] + + Log.debug $ "Fetching archive tarball from: " <> archiveUrl + FS.Extra.ensureDirectory extractDir + + response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} + + case response of + Cancelled -> + pure $ Left $ DownloadFailed name version "Request was cancelled" + Failed (Fetch.FetchError error) -> do + Log.error $ "HTTP error when fetching archive: " <> Exception.message error + pure $ Left $ DownloadFailed name version (Exception.message error) + Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) + Log.error $ Array.fold + [ "Bad status (" + , show status + , ") when fetching archive with body: " + , bodyString + ] + pure $ Left $ DownloadFailed name version ("HTTP status " <> show status) + Succeeded { arrayBuffer: arrayBufferAff } -> do + arrayBuffer <- Run.liftAff arrayBufferAff + buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer + Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of + Left error -> do + Log.error $ Array.fold + [ "Downloaded archive but failed to write to " + , absoluteTarballPath + , ": " + , Aff.message error + ] + pure $ Left $ DownloadFailed name version "Failed to write tarball to disk" + Right _ -> do + Log.debug $ "Tarball downloaded to " <> absoluteTarballPath + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> + pure $ Left $ ExtractionFailed name version "Tarball has no top-level directory" + Just extractedPath -> do + Log.debug "Extracting archive tarball..." + Tar.extract { cwd: extractDir, archive: tarballName } + fetchRemotePublishedTime name version >>= case _ of + Nothing -> pure $ Left $ PublishedTimeNotFound name version + Just publishedTime -> + pure $ Right { path: Path.concat [ extractDir, extractedPath ], published: publishedTime } + +-- | Fetch the published time for a specific version from the remote registry +-- | repo (main branch). Used as a fallback when the local registry checkout +-- | doesn't have metadata for archive-backed packages. +fetchRemotePublishedTime :: forall r. PackageName -> Version -> Run (GITHUB + LOG + r) (Maybe DateTime) +fetchRemotePublishedTime name version = do + let + printed = PackageName.print name + path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] + Log.debug $ Array.fold + [ "Fetching published time for " + , formatPackageVersion name version + , " from remote registry" + ] + GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of + Left err -> do + Log.warn $ Array.fold + [ "Failed to fetch remote metadata for " + , printed + , ": " + , Octokit.printGitHubError err + ] + pure Nothing + Right content -> do + let + parsed = do + json <- hush $ JSON.parse content + obj <- JSON.toJObject json + publishedJson <- JSON.Object.lookup "published" obj + publishedObj <- JSON.toJObject publishedJson + versionJson <- JSON.Object.lookup (Version.print version) publishedObj + versionObj <- JSON.toJObject versionJson + timeJson <- JSON.Object.lookup "publishedTime" versionObj + timeStr <- JSON.toString timeJson + hush $ Formatter.DateTime.unformat Internal.Format.iso8601DateTime timeStr + case parsed of + Nothing -> do + Log.warn $ Array.fold + [ "Could not extract publishedTime for " + , formatPackageVersion name version + , " from remote metadata" + ] + pure Nothing + Just dt -> do + Log.debug $ Array.fold + [ "Fetched published time for " + , formatPackageVersion name version + , " from remote registry" + ] + pure $ Just dt + +-- | A mock handler for testing that uses a local directory of tarballs instead +-- | of fetching from the remote registry-archive. +handleMock + :: forall r a + . { archiveDir :: FilePath, metadata :: Map PackageName Metadata } + -> Archive a + -> Run (LOG + AFF + EFFECT + r) a +handleMock env = case _ of + Fetch destination name version reply -> map (map reply) Except.runExcept do + let + tarballName = Version.print version <> ".tar.gz" + sourcePath = Path.concat [ env.archiveDir, PackageName.print name <> "-" <> Version.print version <> ".tar.gz" ] + absoluteTarballPath = Path.concat [ destination, tarballName ] + + Run.liftAff (Aff.attempt (FS.Aff.stat sourcePath)) >>= case _ of + Left _ -> + Except.throw $ DownloadFailed name version "Tarball not found in mock archive" + Right _ -> do + Run.liftAff (Aff.attempt (FS.Aff.copyFile sourcePath absoluteTarballPath)) >>= case _ of + Left error -> + Except.throw $ DownloadFailed name version (Aff.message error) + Right _ -> + Log.debug $ "Copied mock tarball to " <> absoluteTarballPath + + Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of + Nothing -> + Except.throw $ ExtractionFailed name version "Tarball has no top-level directory" + Just extractedPath -> do + Log.debug "Extracting mock archive tarball..." + Tar.extract { cwd: destination, archive: tarballName } + case Map.lookup name env.metadata of + Nothing -> + Except.throw $ PublishedTimeNotFound name version + Just (Metadata m) -> + case Map.lookup version m.published of + Nothing -> + Except.throw $ PublishedTimeNotFound name version + Just publishedMeta -> + pure { path: Path.concat [ destination, extractedPath ], published: publishedMeta.publishedTime } diff --git a/app/src/App/Effect/Source.purs b/app/src/App/Effect/Source.purs index 7981daf48..f9fe3444d 100644 --- a/app/src/App/Effect/Source.purs +++ b/app/src/App/Effect/Source.purs @@ -67,7 +67,11 @@ printFetchError = case _ of -- | Fetch the provided location to the provided destination path. fetch :: forall r. FilePath -> Location -> String -> Run (SOURCE + EXCEPT String + r) FetchedSource -fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< Run.lift _source (Fetch destination location ref identity) +fetch destination location ref = (Except.rethrow <<< lmap printFetchError) =<< fetchEither destination location ref + +-- | Fetch the provided location, returning the typed FetchError on failure. +fetchEither :: forall r. FilePath -> Location -> String -> Run (SOURCE + r) (Either FetchError FetchedSource) +fetchEither destination location ref = Run.lift _source (Fetch destination location ref identity) -- | Run the SOURCE effect given a handler. interpret :: forall r a. (Source ~> Run r) -> Run (SOURCE + r) a -> Run r a diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 56422ab64..c0bb2750b 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -18,6 +18,7 @@ import Node.Process as Process import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) @@ -96,6 +97,7 @@ main = launchAff_ $ do -- App effects # PackageSets.interpret (PackageSets.handle { workdir }) # Registry.interpret (Registry.handle registryEnv) + # Archive.interpret Archive.handle # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) # Pursuit.interpret (Pursuit.handleAff env.token) # Source.interpret (Source.handle Source.Recent) diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index b9aa35b1c..a6e8c3e1f 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -22,6 +22,8 @@ import Registry.API.V1 as V1 import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) @@ -217,7 +219,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response runServer env router' request = do @@ -313,6 +315,7 @@ runEffects env operation = Aff.attempt do , cacheRef: env.registryCacheRef } ) + # Archive.interpret Archive.handle # Pursuit.interpret (Pursuit.handleAff env.vars.token) # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) # Source.interpret (Source.handle Source.Recent) diff --git a/app/test/App/API.purs b/app/test/App/API.purs index caaf6c215..27ed33cf1 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -48,6 +48,7 @@ type PipelineEnv = , metadata :: Ref (Map PackageName Metadata) , index :: Ref ManifestIndex , storageDir :: FilePath + , archiveDir :: FilePath , githubDir :: FilePath } @@ -61,7 +62,7 @@ spec = do copySourceFiles Spec.describe "API pipelines run correctly" $ Spec.around withCleanEnv do - Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, githubDir } -> do + Spec.it "Publish a legacy-converted package with unused deps" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do logs <- liftEffect (Ref.new []) let @@ -80,6 +81,7 @@ spec = do , pursuitExcludes: Set.singleton (Utils.unsafePackageName "type-equality") , username: "jon" , storage: storageDir + , archive: archiveDir , github: githubDir } @@ -212,6 +214,65 @@ spec = do Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err Right (Right _) -> pure unit + + Spec.it "Falls back to archive when GitHub repo is inaccessible during legacy import" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do + logs <- liftEffect (Ref.new []) + + let + toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry + toLegacyIndex = + Solver.exploreAllTransitiveDependencies + <<< Solver.initializeRegistry + <<< map (map (_.dependencies <<< un Manifest)) + <<< ManifestIndex.toMap + + testEnv = + { workdir + , logs + , index + , metadata + , pursuitExcludes: Set.empty + , username: "jon" + , storage: storageDir + , archive: archiveDir + , github: githubDir + } + + -- The prelude@6.0.2 package exists in registry-archive but NOT in + -- github-packages or registry-storage. This simulates an archive-backed + -- package whose original GitHub repo is gone. + result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do + let + name = Utils.unsafePackageName "prelude" + version = Utils.unsafeVersion "6.0.2" + ref = "v6.0.2" + publishArgs = + { compiler: Utils.unsafeVersion "0.15.9" + , location: Just $ GitHub { owner: "purescript", repo: "purescript-prelude", subdir: Nothing } + , name + , ref + , resolutions: Nothing + } + + -- Legacy import with archive fallback + Registry.readAllManifests >>= \idx -> + API.publish (Just (toLegacyIndex idx)) publishArgs + + -- Verify the package was published to storage + Storage.query name >>= \versions -> + unless (Set.member version versions) do + Except.throw $ "Expected " <> formatPackageVersion name version <> " to be published to registry storage." + + case result of + Left exn -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Got an Aff exception! " <> Aff.message exn + Right (Left err) -> do + recorded <- liftEffect (Ref.read logs) + Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) + Assert.fail $ "Expected prelude@6.0.2 to be published via archive fallback but got error: " <> err + Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -244,6 +305,7 @@ spec = do copyFixture "registry-index" copyFixture "registry" copyFixture "registry-storage" + copyFixture "registry-archive" copyFixture "github-packages" let @@ -265,6 +327,7 @@ spec = do , metadata: fixtures.metadata , index: fixtures.index , storageDir: Path.concat [ testFixtures, "registry-storage" ] + , archiveDir: Path.concat [ testFixtures, "registry-archive" ] , githubDir: Path.concat [ testFixtures, "github-packages" ] } diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 42cc7d6ab..008d86cca 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -28,6 +28,9 @@ import Registry.API.V1 (LogLevel) import Registry.App.API (COMPILER_CACHE) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment (COMMENT) @@ -53,6 +56,7 @@ import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Prelude as Either import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (GitHubError(..), IssueNumber(..)) +import Registry.Foreign.Tar as Foreign.Tar import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName import Registry.Test.Utils as Utils @@ -82,6 +86,7 @@ type TEST_EFFECTS = + PACKAGE_SETS + STORAGE + SOURCE + + ARCHIVE + GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV @@ -104,6 +109,7 @@ type TestEnv = , index :: Ref ManifestIndex , pursuitExcludes :: Set PackageName , storage :: FilePath + , archive :: FilePath , github :: FilePath , username :: String } @@ -119,6 +125,7 @@ runTestEffects env operation = Aff.attempt do # PackageSets.interpret handlePackageSetsMock # Storage.interpret (handleStorageMock { storage: env.storage }) # Source.interpret (handleSourceMock { github: env.github }) + # Archive.interpret (handleArchiveMock { metadataRef: env.metadata, archive: env.archive }) # GitHub.interpret (handleGitHubMock { github: env.github }) -- Environments # Env.runGitHubEventEnv { username: env.username, issue: IssueNumber 1 } @@ -311,7 +318,7 @@ handleSourceMock env = case _ of case location of Git _ -> pure $ reply $ Left GitHubOnly GitHub { subdir } | isJust subdir -> pure $ reply $ Left NoSubdir - GitHub { repo } -> do + GitHub { owner, repo } -> do let name = stripPureScriptPrefix repo fixedRef = fromMaybe ref $ String.stripPrefix (String.Pattern "v") ref @@ -319,7 +326,7 @@ handleSourceMock env = case _ of localPath = Path.concat [ env.github, dirname ] destinationPath = Path.concat [ destination, dirname <> "-checkout" ] Run.liftAff (Aff.attempt (FS.Aff.stat localPath)) >>= case _ of - Left _ -> pure $ reply $ Left $ Fatal $ "Cannot copy " <> localPath <> " because it does not exist." + Left _ -> pure $ reply $ Left $ InaccessibleRepo { owner, repo } Right _ -> do Run.liftAff $ FS.Extra.copy { from: localPath, to: destinationPath, preserveTimestamps: true } case pursPublishMethod of @@ -391,3 +398,73 @@ handleGitHubMock env = case _ of -- currently used in tests. GetCommitDate _address _ref reply -> pure $ reply $ Left $ UnexpectedError "Unimplemented" + +type ArchiveMockEnv = + { metadataRef :: Ref (Map PackageName Metadata) + , archive :: FilePath + } + +-- | A mock implementation for the ARCHIVE effect that uses the registry-archive +-- | fixtures as the archive source. Archive tarballs are expected to be in the +-- | same format as storage tarballs (name-version.tar.gz). +handleArchiveMock :: forall r a. ArchiveMockEnv -> Archive.Archive a -> Run (AFF + EFFECT + r) a +handleArchiveMock env = case _ of + Archive.Fetch destination name version reply -> map (map reply) Except.runExcept do + -- For testing, we look up publishedTime from metadata if available, but + -- fall back to current time if not (to support tests where metadata has + -- been modified but tarballs still exist). + now <- Run.liftEffect Now.nowDateTime + metadata <- Run.liftEffect (Ref.read env.metadataRef) + let + publishedTime = fromMaybe now do + Metadata m <- Map.lookup name metadata + publishedMeta <- Map.lookup version m.published + pure publishedMeta.publishedTime + + let + tarballName = Version.print version <> ".tar.gz" + sourcePath = Path.concat [ env.archive, PackageName.print name <> "-" <> Version.print version <> ".tar.gz" ] + absoluteTarballPath = Path.concat [ destination, tarballName ] + + Run.liftAff (Aff.attempt (FS.Aff.stat sourcePath)) >>= case _ of + Left _ -> + Except.throw $ Archive.DownloadFailed name version "Tarball not found in mock archive" + Right _ -> + Run.liftAff (Aff.attempt (FS.Aff.copyFile sourcePath absoluteTarballPath)) >>= case _ of + Left error -> + Except.throw $ Archive.DownloadFailed name version (Aff.message error) + Right _ -> + pure unit + + extractedPath <- Run.liftAff $ Foreign.Tar.getToplevelDir absoluteTarballPath + case extractedPath of + Nothing -> + Except.throw $ Archive.ExtractionFailed name version "Tarball has no top-level directory" + Just path -> do + Run.liftAff $ Tar.extract { cwd: destination, archive: tarballName } + -- Rename to avoid conflict with packaging directory (same as source mock's "-checkout" suffix) + -- Strip trailing slash if present + let cleanPath = fromMaybe path $ String.stripSuffix (String.Pattern "/") path + let extractedDir = Path.concat [ destination, cleanPath ] + let finalPath = Path.concat [ destination, cleanPath <> "-archive" ] + Run.liftAff $ FS.Aff.rename extractedDir finalPath + + -- Initialize a git repo for purs publish (same as source mock) + -- We do this inside liftAff to avoid EXCEPT type mismatch with Git.withGit + Run.liftAff $ case pursPublishMethod of + LegacyPursPublish -> do + FS.Aff.writeTextFile UTF8 (Path.concat [ finalPath, ".gitignore" ]) "output" + let exec args = void $ Git.gitCLI args (Just finalPath) + let ref = "v" <> Version.print version + exec [ "init" ] + exec [ "config", "user.name", "test-user" ] + exec [ "config", "user.email", "test-user@aol.com" ] + exec [ "config", "commit.gpgSign", "false" ] + exec [ "config", "tag.gpgSign", "false" ] + exec [ "add", "." ] + exec [ "commit", "-m", "Initial commit" ] + exec [ "tag", "-m", ref, ref ] + PursPublish -> + pure unit + + pure { path: finalPath, published: publishedTime } diff --git a/flake.nix b/flake.nix index fb9d769cc..5da7ffbd6 100644 --- a/flake.nix +++ b/flake.nix @@ -214,6 +214,9 @@ SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; + # Dhall environment variables needed for manifest typechecking + inherit DHALL_TYPES DHALL_PRELUDE GIT_TERMINAL_PROMPT; + packages = with pkgs; registry-runtime-deps diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index c4632e755..ee9423d68 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -57,7 +57,7 @@ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Variant as CJ.Variant import Data.Compactable (separate) -import Data.DateTime (Date, DateTime, Month(..)) +import Data.DateTime (Date, Month(..)) import Data.DateTime as DateTime import Data.Enum (toEnum) import Data.Exists as Exists @@ -103,6 +103,7 @@ import Registry.App.CLI.Purs (CompilerFailure, compilerFailureCodec) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment @@ -195,6 +196,7 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure >>> Source.interpret (Source.handle Source.Old) @@ -206,6 +208,7 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.Autostash (Registry.CommitAs (Git.pacchettibottiCommitter token)))) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret Pursuit.handlePure >>> Source.interpret (Source.handle Source.Old) @@ -217,6 +220,7 @@ main = launchAff_ do octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl pure do Registry.interpret (Registry.handle (registryEnv Git.ForceClean (Registry.CommitAs (Git.pacchettibottiCommitter token)))) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleS3 { s3, cache }) >>> Pursuit.interpret (Pursuit.handleAff token) >>> Source.interpret (Source.handle Source.Recent) @@ -457,7 +461,7 @@ runLegacyImport logs = do path <- if isArchiveBacked then do Log.info $ "Using registry archive for " <> formatted <> " instead of GitHub clone." - { path: archivePath } <- fetchFromArchive tmp manifest.name manifest.version + { path: archivePath } <- Archive.fetch tmp manifest.name manifest.version pure archivePath else do { path: sourcePath } <- Source.fetch tmp manifest.location ref @@ -1586,9 +1590,6 @@ instance FsEncodable ImportCache where let codec = publishErrorCodec Exists.mkExists $ AsJson ("PublishFailure__" <> PackageName.print name <> "__" <> Version.print version) codec next -registryArchiveRawUrl :: String -registryArchiveRawUrl = "https://raw.githubusercontent.com/purescript/registry-archive/main" - -- | Fetch a manifest directly from the registry archive tarball. -- | Used for archive-backed packages where the original GitHub repo is unavailable. fetchManifestFromArchive :: forall r. PackageName -> Version -> Run (LOG + EXCEPT String + AFF + EFFECT + r) Manifest @@ -1601,7 +1602,7 @@ fetchManifestFromArchive name version = do versionStr = Version.print version tarballName = versionStr <> ".tar.gz" absoluteTarballPath = Path.concat [ tmp, tarballName ] - archiveUrl = registryArchiveRawUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" + archiveUrl = Archive.registryArchiveUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" Log.debug $ "Fetching archive tarball from: " <> archiveUrl response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} @@ -1659,127 +1660,3 @@ fetchManifestFromArchive name version = do FS.Extra.remove tmp Log.debug $ "Successfully fetched manifest from archive for " <> formatted pure manifest - -type ArchiveFetchedSource = - { path :: FilePath - , published :: DateTime - } - -fetchFromArchive - :: forall r - . FilePath - -> PackageName - -> Version - -> Run (REGISTRY + GITHUB + LOG + EXCEPT String + AFF + EFFECT + r) ArchiveFetchedSource -fetchFromArchive destination name version = do - let - nameStr = PackageName.print name - versionStr = Version.print version - tarballName = versionStr <> ".tar.gz" - absoluteTarballPath = Path.concat [ destination, tarballName ] - archiveUrl = registryArchiveRawUrl <> "/" <> nameStr <> "/" <> versionStr <> ".tar.gz" - - Log.debug $ "Fetching archive tarball from: " <> archiveUrl - - response <- Run.liftAff $ Fetch.withRetryRequest archiveUrl {} - - case response of - Cancelled -> - Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl - Failed (Fetch.FetchError error) -> do - Log.error $ "HTTP error when fetching archive: " <> Exception.message error - Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl - Failed (Fetch.StatusError { status, arrayBuffer: arrayBufferAff }) -> do - arrayBuffer <- Run.liftAff arrayBufferAff - buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer - bodyString <- Run.liftEffect $ Buffer.toString UTF8 (buffer :: Buffer) - Log.error $ "Bad status (" <> show status <> ") when fetching archive with body: " <> bodyString - Run.Except.throw $ "Could not download archive tarball from " <> archiveUrl <> " (status " <> show status <> ")" - Succeeded { arrayBuffer: arrayBufferAff } -> do - arrayBuffer <- Run.liftAff arrayBufferAff - buffer <- Run.liftEffect $ Buffer.fromArrayBuffer arrayBuffer - Run.liftAff (Aff.attempt (FS.Aff.writeFile absoluteTarballPath buffer)) >>= case _ of - Left error -> do - Log.error $ "Downloaded archive but failed to write to " <> absoluteTarballPath <> ": " <> Aff.message error - Run.Except.throw $ "Could not save archive tarball for " <> formatPackageVersion name version - Right _ -> - Log.debug $ "Tarball downloaded to " <> absoluteTarballPath - - Foreign.Tar.getToplevelDir absoluteTarballPath >>= case _ of - Nothing -> - Run.Except.throw $ "Downloaded archive tarball for " <> formatPackageVersion name version <> " has no top-level directory." - Just extractedPath -> do - Log.debug "Extracting archive tarball..." - Tar.extract { cwd: destination, archive: tarballName } - -- Archive-backed packages may not have local metadata, so fetch from remote registry - publishedTime <- lookupRemotePublishedTime name version - pure { path: Path.concat [ destination, extractedPath ], published: publishedTime } - -lookupPublishedTime - :: forall r - . PackageName - -> Version - -> Run (REGISTRY + EXCEPT String + r) DateTime -lookupPublishedTime name version = do - Registry.readMetadata name >>= case _ of - Nothing -> - Run.Except.throw $ "No metadata found for " <> PackageName.print name - Just (Metadata m) -> - case Map.lookup version m.published of - Nothing -> - Run.Except.throw $ "No published metadata for " <> formatPackageVersion name version - Just publishedMeta -> - pure publishedMeta.publishedTime - --- | Look up published time, falling back to remote registry if local metadata is missing. --- | Used for archive-backed packages where the local checkout may not have metadata. -lookupRemotePublishedTime - :: forall r - . PackageName - -> Version - -> Run (REGISTRY + GITHUB + LOG + EXCEPT String + r) DateTime -lookupRemotePublishedTime name version = do - Registry.readMetadata name >>= case _ of - Just (Metadata m) -> - case Map.lookup version m.published of - Nothing -> - Run.Except.throw $ "No published metadata for " <> formatPackageVersion name version - Just publishedMeta -> - pure publishedMeta.publishedTime - Nothing -> do - Log.debug $ "No local metadata for " <> PackageName.print name <> ", fetching from remote registry..." - fetchRemotePublishedTime name version >>= case _ of - Nothing -> Run.Except.throw $ "No metadata found for " <> PackageName.print name - Just time -> pure time - --- | Fetch the published time for a specific version from the remote registry repo (main branch). --- | Used as a fallback when the local registry checkout doesn't have metadata for archive-backed packages. -fetchRemotePublishedTime :: forall r. PackageName -> Version -> Run (GITHUB + LOG + r) (Maybe DateTime) -fetchRemotePublishedTime name version = do - let - printed = PackageName.print name - path = Path.concat [ Constants.metadataDirectory, printed <> ".json" ] - Log.debug $ "Fetching published time for " <> formatPackageVersion name version <> " from remote registry" - GitHub.getContent Constants.registry (RawVersion "main") path >>= case _ of - Left err -> do - Log.warn $ "Failed to fetch remote metadata for " <> printed <> ": " <> Octokit.printGitHubError err - pure Nothing - Right content -> do - let - parsed = do - json <- hush $ JSON.parse content - obj <- JSON.toJObject json - publishedJson <- JSON.Object.lookup "published" obj - publishedObj <- JSON.toJObject publishedJson - versionJson <- JSON.Object.lookup (Version.print version) publishedObj - versionObj <- JSON.toJObject versionJson - timeJson <- JSON.Object.lookup "publishedTime" versionObj - timeStr <- JSON.toString timeJson - hush $ Formatter.DateTime.unformat Internal.Format.iso8601DateTime timeStr - case parsed of - Nothing -> do - Log.warn $ "Could not extract publishedTime for " <> formatPackageVersion name version <> " from remote metadata" - pure Nothing - Just dt -> do - Log.debug $ "Fetched published time for " <> formatPackageVersion name version <> " from remote registry" - pure $ Just dt diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..950871d0b 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -19,6 +19,7 @@ import Node.Process as Process import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env @@ -152,6 +153,7 @@ main = launchAff_ do let interpret = Registry.interpret (Registry.handle registryEnv) + >>> Archive.interpret Archive.handle >>> Storage.interpret (if arguments.upload then Storage.handleS3 { s3, cache } else Storage.handleReadOnly cache) >>> Source.interpret (Source.handle Source.Old) >>> GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 8fa9a7070..13280a0af 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -31,6 +31,7 @@ import Parsing as Parsing import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env @@ -126,6 +127,7 @@ main = launchAff_ do let runAppEffects = Registry.interpret (Registry.handle (registryEnv Git.Autostash Registry.ReadOnly)) + >>> Archive.interpret Archive.handle >>> Storage.interpret (Storage.handleReadOnly cache) >>> Pursuit.interpret Pursuit.handlePure >>> Source.interpret (Source.handle Source.Old) From 8c8c7923485d81c195646ff4161171873c233f0d Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 00:09:05 -0500 Subject: [PATCH 62/64] add missing env vars --- flake.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/flake.nix b/flake.nix index 5da7ffbd6..edca743d8 100644 --- a/flake.nix +++ b/flake.nix @@ -132,6 +132,7 @@ spago-test = pkgs.runCommand "spago-test" { + inherit DHALL_TYPES DHALL_PRELUDE; nativeBuildInputs = with pkgs; [ From 2657f8ca9521b6298436738f0b9851a0a268feb1 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Thu, 8 Jan 2026 02:59:10 +0200 Subject: [PATCH 63/64] Job queue, matrix builder, concurrency control (#709) * Update database schemas and add job executor loop * Split Server module into Env, Router, JobExecutor, and Main * Fix up build * Run job executor * Fix integration tests * WIP matrix builds * add missing version to publish fixtures the publishCodec requires a version file but the test fixtures weren't updated to include it * Add missing packageName and packageVersion to InsertMatrixJob The JS insertMatrixJobImpl expects columns [jobId, packageName, packageVersion, compilerVersion, payload] but the PureScript types were missing packageName and packageVersion * Fix finishedAt timestamp to capture time after job execution * Implement matrix jobs, and the recursive enqueuing of new ones * Reset incomplete jobs so they can be picked up again * Run matrix jobs for the whole registry when finding a new compiler version * resolve build issues * fix smoke test * Split package jobs into separate tables, return all data from the job endpoint * implement thin client for github issues replaces the old GitHubIssue which ran registry jobs directly with one that hits the registry api instead. also added integration tests that ensure various jobs can be kicked off as github issue events and we get the resulting comments, issue close events, etc. * clean up test failures * reinstate missing comments * Remove COMMENT effect, add NOTIFY log * Implement endpoint for returning jobs * Check for existing jobs before enqueueing new ones * Add E2E test: publishing a package enqueues matrix jobs * Add E2E test: run a whole-registry upgrade when detecting a new compiler * Don't fail job fetch on unreadable logs * Fix archive seeder build * remove effect-4.0.0 from storage in unit tests * avoid race condition in initial jobs test The "can list jobs" test was asserting that initial matrix jobs have success: true, but the job executor runs asynchronously and jobs may not have completed by the time the test queries the API. Fixed by normalizing the 'success' field to a constant before comparison. * format * second test * Refactor e2e tests with wiremock scenarios (#713) * refactor e2e tests with wiremock scenarios also adds a number of new e2e tests for various scenarios * format, etc. * move out fixtures * relax cache deletion * strengthen assertions, fix discovered bugs * drop ref, move to manifest (#714) * review feedback * more feedback * trim tests down a bit to optimize speed to ~60s * Add endpoint for package set jobs + e2e tests for it * tweak unpublish test to verify matrix jobs fail gracefully * tweak agents to refer to scratch logs * remove slow archive seeder test * fix tests by bumping compiler --------- Co-authored-by: Thomas Honeyman Co-authored-by: Fyodor Soikin Co-authored-by: pacchettibotti Co-authored-by: Thomas Honeyman --- .env.example | 54 +- AGENTS.md | 49 +- CONTRIBUTING.md | 23 +- SPEC.md | 2 + app-e2e/spago.yaml | 19 +- app-e2e/src/Test/E2E/Endpoint/Jobs.purs | 63 + .../src/Test/E2E/Endpoint/PackageSets.purs | 52 + app-e2e/src/Test/E2E/Endpoint/Publish.purs | 76 ++ app-e2e/src/Test/E2E/Endpoint/Transfer.purs | 51 + app-e2e/src/Test/E2E/Endpoint/Unpublish.purs | 95 ++ app-e2e/src/Test/E2E/GitHubIssue.purs | 149 +++ app-e2e/src/Test/E2E/Main.purs | 22 - app-e2e/src/Test/E2E/Publish.purs | 84 -- app-e2e/src/Test/E2E/Support/Client.purs | 211 +++ app-e2e/src/Test/E2E/Support/Env.purs | 312 +++++ app-e2e/src/Test/E2E/Support/Fixtures.purs | 286 +++++ app-e2e/src/Test/E2E/Support/Types.purs | 48 + app-e2e/src/Test/E2E/Support/WireMock.purs | 173 +++ app-e2e/src/Test/E2E/Workflow.purs | 88 ++ app-e2e/src/Test/Main.purs | 40 + app/fixtures/addition_issue_created.json | 2 +- .../github-packages/console-6.1.0/LICENSE | 26 + .../github-packages/console-6.1.0/bower.json | 22 + .../console-6.1.0/src/Effect/Console.js | 9 + .../console-6.1.0/src/Effect/Console.purs | 46 + .../package-sets/latest-compatible-sets.json | 2 +- .../registry-archive/prelude-6.0.2.tar.gz | Bin 31025 -> 31321 bytes app/fixtures/registry-index/pr/el/prelude | 2 +- .../registry-index/ty/pe/type-equality | 2 +- .../registry-storage/console-6.1.0.tar.gz | Bin 0 -> 1646 bytes .../registry-storage/effect-4.0.0.tar.gz | Bin 0 -> 6262 bytes .../registry-storage/prelude-6.0.1.tar.gz | Bin 31142 -> 31313 bytes app/fixtures/registry/metadata/prelude.json | 4 +- .../registry/metadata/type-equality.json | 4 +- app/fixtures/registry/package-sets/0.0.1.json | 8 + app/fixtures/update_issue_comment.json | 2 +- app/spago.yaml | 2 +- app/src/App/API.purs | 265 ++-- app/src/App/Auth.purs | 20 +- app/src/App/CLI/Git.purs | 4 +- app/src/App/Effect/Archive.purs | 2 +- app/src/App/Effect/Comment.purs | 68 - app/src/App/Effect/Db.purs | 222 +++- app/src/App/Effect/Env.purs | 23 + app/src/App/Effect/Log.purs | 10 +- app/src/App/Effect/Registry.purs | 33 +- app/src/App/Effect/Storage.purs | 1 + app/src/App/GitHubIssue.purs | 344 +++-- app/src/App/Legacy/Manifest.purs | 6 +- app/src/App/Legacy/PackageSet.purs | 13 +- app/src/App/Main.purs | 90 ++ app/src/App/Manifest/SpagoYaml.purs | 8 +- app/src/App/Prelude.purs | 2 +- app/src/App/SQLite.js | 249 +++- app/src/App/SQLite.purs | 860 +++++++++++-- app/src/App/Server.purs | 346 ----- app/src/App/Server/Env.purs | 191 +++ app/src/App/Server/JobExecutor.purs | 180 +++ app/src/App/Server/MatrixBuilder.purs | 234 ++++ app/src/App/Server/Router.purs | 178 +++ app/test/App/API.purs | 82 +- app/test/App/GitHubIssue.purs | 5 + app/test/App/Legacy/PackageSet.purs | 32 +- app/test/App/Manifest/SpagoYaml.purs | 2 +- app/test/Test/Assert/Run.purs | 4 - .../20240914170550_delete_jobs_logs_table.sql | 22 + ...20240914171030_create_job_queue_tables.sql | 76 ++ db/schema.sql | 64 +- flake.nix | 16 +- lib/fixtures/manifests/aff-5.1.2.json | 1 + lib/fixtures/manifests/mysql-4.1.1.json | 1 + lib/fixtures/manifests/prelude-4.1.1.json | 1 + lib/src/API/V1.purs | 232 +++- lib/src/Manifest.purs | 2 + lib/src/ManifestIndex.purs | 26 +- lib/src/Metadata.purs | 7 - lib/src/Operation.purs | 67 +- lib/src/Solver.purs | 5 +- lib/test/Registry/ManifestIndex.purs | 7 +- lib/test/Registry/Metadata.purs | 12 +- lib/test/Registry/Operation.purs | 6 +- lib/test/Registry/Operation/Validation.purs | 5 +- nix/overlay.nix | 7 +- nix/test/config.nix | 775 ++++++++--- nix/test/integration.nix | 23 +- nix/test/smoke.nix | 41 +- nix/test/test-env.nix | 35 +- package-lock.json | 1132 ++++++++--------- scripts/src/ArchiveSeeder.purs | 2 - scripts/src/LegacyImporter.purs | 12 +- scripts/src/PackageDeleter.purs | 18 +- scripts/src/PackageSetUpdater.purs | 2 - scripts/src/PackageTransferrer.purs | 2 - scripts/src/Solver.purs | 3 - spago.lock | 109 +- test-utils/spago.yaml | 6 - test-utils/src/Registry/Test/Assert.purs | 12 + test-utils/src/Registry/Test/E2E/Client.purs | 180 --- test-utils/src/Registry/Test/Fixtures.purs | 18 + test-utils/src/Registry/Test/Utils.purs | 10 +- types/v1/Manifest.dhall | 1 + 101 files changed, 6171 insertions(+), 2269 deletions(-) create mode 100644 app-e2e/src/Test/E2E/Endpoint/Jobs.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/PackageSets.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Publish.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Transfer.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Unpublish.purs create mode 100644 app-e2e/src/Test/E2E/GitHubIssue.purs delete mode 100644 app-e2e/src/Test/E2E/Main.purs delete mode 100644 app-e2e/src/Test/E2E/Publish.purs create mode 100644 app-e2e/src/Test/E2E/Support/Client.purs create mode 100644 app-e2e/src/Test/E2E/Support/Env.purs create mode 100644 app-e2e/src/Test/E2E/Support/Fixtures.purs create mode 100644 app-e2e/src/Test/E2E/Support/Types.purs create mode 100644 app-e2e/src/Test/E2E/Support/WireMock.purs create mode 100644 app-e2e/src/Test/E2E/Workflow.purs create mode 100644 app-e2e/src/Test/Main.purs create mode 100644 app/fixtures/github-packages/console-6.1.0/LICENSE create mode 100644 app/fixtures/github-packages/console-6.1.0/bower.json create mode 100644 app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js create mode 100644 app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs create mode 100644 app/fixtures/registry-storage/console-6.1.0.tar.gz create mode 100644 app/fixtures/registry-storage/effect-4.0.0.tar.gz create mode 100644 app/fixtures/registry/package-sets/0.0.1.json delete mode 100644 app/src/App/Effect/Comment.purs create mode 100644 app/src/App/Main.purs delete mode 100644 app/src/App/Server.purs create mode 100644 app/src/App/Server/Env.purs create mode 100644 app/src/App/Server/JobExecutor.purs create mode 100644 app/src/App/Server/MatrixBuilder.purs create mode 100644 app/src/App/Server/Router.purs create mode 100644 db/migrations/20240914170550_delete_jobs_logs_table.sql create mode 100644 db/migrations/20240914171030_create_job_queue_tables.sql delete mode 100644 test-utils/src/Registry/Test/E2E/Client.purs create mode 100644 test-utils/src/Registry/Test/Fixtures.purs diff --git a/.env.example b/.env.example index febae2d29..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,44 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" - -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== - -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" - -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/AGENTS.md b/AGENTS.md index 43e474c2a..5ce5268dc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -10,26 +10,63 @@ This project uses Nix with direnv. You should already be in the Nix shell automa nix develop ``` -### Build and Test +Watch out for these Nix quirks: +- If Nix tries to fetch from git during a build, it is likely that spago.yaml files were changed but the lock file was not updated; if so, update the lockfile with `spago build` +- If a Nix build appears to be stale, then it is likely files were modified but are untracked by Git; if so, add modified files with `git add` and retry. -The registry is implemented in PureScript. Use spago to build it and run PureScript tests. These are cheap and fast and should be used when working on the registry packages. +### Build + +The registry is implemented in PureScript. Use spago to build it. ```sh spago build # Build all PureScript code -spago test # Run unit tests ``` -Integration tests require two terminals (or the use of test-env in detached mode). The integration tests are only necessary to run if working on the server (app). +The registry infrastructure is defined in Nix. Build it with Nix: + +```sh +nix build .#server +``` + +### Test + +The registry contains a mixture of unit tests, e2e tests, and nix flake checks. When you complete a change you should generally run the unit tests. When working on the server, you should generally also run the e2e tests. If you are on a Linux system, you can run `nix flake check -L` to run the flake checks prior to committing code to ensure it works. + +#### Unit Tests + +Unit tests can be run with `spago`. They are fast and cheap. + +```sh +spago test # Run all unit tests +spago test -p # Run tests for a specific package +``` + +#### End-to-End Tests + +The end-to-end (integration) tests are in `app-e2e`. They can be run via Nix on Linux: + +```sh +nix build .#checks.x86_64-linux.integration +``` + +Alternately, they can be run on macOS or for more iterative development of tests using two terminals: one to start the test env, and one to execute the tests. ```sh # Terminal 1: Start test environment (wiremock mocks + registry server on port 9000) nix run .#test-env # Terminal 2: Run E2E tests once server is ready -spago run -p registry-app-e2e +spago-test-e2e ``` -Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode. +Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode to use a single terminal. + +State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. This is useful, for example, to read the logs of the most recent run. For example: + +```sh +# after a test run, see the logs (log name is today's date) +cat /tmp/registry-test-env/scratch/logs/*.log +``` #### Smoke Test (Linux only) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 92f5f9dcf..ebe38a0dd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,20 +72,29 @@ nix build .#checks.x86_64-linux.smoke -L ### Integration Test +You can run the integration tests with the following on Linux: + +```sh +nix build .#checks.x86_64-linux.integration -L +``` + +On macOS or for iterative development, you can instead start the test environment and run the tests separately. + ```sh # Terminal 1: Start the test environment (wiremock mocks + registry server) nix run .#test-env -# Terminal 2: Once the server is ready, run the E2E tests -spago run -p registry-app-e2e +# Terminal 2: Run E2E tests once server is ready +spago-test-e2e ``` The test environment: - Starts wiremock services mocking GitHub, S3, Pursuit, etc. -- Starts the registry server on port 9000 with a temporary SQLite database +- Starts the registry server with a temporary SQLite database - Uses fixture data from `app/fixtures/` +- State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env` -Press `Ctrl+C` in Terminal 1 to stop all services. State is cleaned up automatically. +Press `Ctrl+C` in Terminal 1 to stop all services. All arguments after `--` are passed directly to process-compose: @@ -101,7 +110,11 @@ process-compose attach # Attach TUI process-compose down # Stop all services ``` -You can also set `STATE_DIR` to use a persistent state directory instead of a temp dir. +To examine state after a test run (e.g., for debugging), stop the test-env but don't restart it. The state remains in `/tmp/registry-test-env`: +- `db/registry.sqlite3` — SQLite database +- `scratch/registry/` — Local registry clone with metadata +- `scratch/registry-index/` — Local manifest index clone +- `repo-fixtures/` — Git fixture repositories ## Available Nix Commands diff --git a/SPEC.md b/SPEC.md index 423d0d80d..54c627d05 100644 --- a/SPEC.md +++ b/SPEC.md @@ -197,6 +197,7 @@ All packages in the registry contain a `purs.json` manifest file in their root d - `version`: a valid [`Version`](#version) - `license`: a valid [`License`](#license) - `location`: a valid [`Location`](#location) +- `ref`: a `string` representing the reference (e.g., a Git commit or Git tag) at the `location` that was used to fetch this version's source code - `owners` (optional): a non-empty array of [`Owner`](#owner) - `description` (optional): a description of your library as a plain text string, not markdown, up to 300 characters - `includeFiles` (optional): a non-empty array of globs, where globs are used to match file paths (in addition to the `src` directory and other [always-included files](#always-included-files)) that you want included in your package tarball @@ -221,6 +222,7 @@ For example: "githubOwner": "purescript", "githubRepo": "purescript-control" }, + "ref": "v4.2.0", "include": ["test/**/*.purs"], "exclude": ["test/graphs"], "dependencies": { "newtype": ">=3.0.0 <4.0.0", "prelude": ">=4.0.0 <5.0.0" } diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..fb3804b90 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,16 +5,27 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - - effect - - either - - maybe - - prelude + - exceptions + - fetch + - integers + - json + - node-child-process + - node-execa + - node-fs + - node-path + - node-process + - ordered-collections + - registry-app + - registry-foreign - registry-lib - registry-test-utils + - routing-duplex - spec - spec-node - strings + - transformers run: main: Test.E2E.Main diff --git a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs new file mode 100644 index 000000000..e02b623b5 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs @@ -0,0 +1,63 @@ +module Test.E2E.Endpoint.Jobs (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 (JobId(..)) +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Status endpoint" do + Spec.it "can reach the status endpoint" do + Client.getStatus + + Spec.describe "Jobs API" do + Spec.it "query parameters and filtering work correctly" do + -- Publish once and test all Jobs API features + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + let info = V1.jobInfo job + + -- Test: include_completed filtering + recentJobs <- Client.getJobsWith Client.ActiveOnly + allJobs <- Client.getJobsWith Client.IncludeCompleted + let allCount = Array.length allJobs + Assert.shouldSatisfy allCount (_ > 0) + let recentCount = Array.length recentJobs + Assert.shouldSatisfy recentCount (_ <= allCount) + let completedJob = Array.find (\j -> isJust (V1.jobInfo j).finishedAt) allJobs + case completedJob of + Just completed -> do + let + completedId = (V1.jobInfo completed).jobId + inRecent = Array.any (\j -> (V1.jobInfo j).jobId == completedId) recentJobs + when inRecent do + Assert.fail $ "Completed job " <> unwrap completedId <> " should be excluded from include_completed=false results" + Nothing -> pure unit + + -- Test: query parameters (level and since) + baseJob <- Client.getJob jobId Nothing Nothing + Assert.shouldEqual (V1.jobInfo baseJob).jobId info.jobId + debugJob <- Client.getJob jobId (Just V1.Debug) Nothing + Assert.shouldEqual (V1.jobInfo debugJob).jobId info.jobId + let sinceTime = fromMaybe info.createdAt info.finishedAt + sinceJob <- Client.getJob jobId Nothing (Just sinceTime) + Assert.shouldEqual (V1.jobInfo sinceJob).jobId info.jobId + + Spec.it "returns HTTP 404 for non-existent job ID" do + let fakeJobId = JobId "nonexistent-job-id-12345" + result <- Client.tryGetJob fakeJobId Nothing Nothing + case result of + Right _ -> + Assert.fail "Expected HTTP 404 for non-existent job" + Left err -> + case Client.clientErrorStatus err of + Just 404 -> pure unit + _ -> Assert.fail $ "Expected HTTP 404, got: " <> Client.printClientError err diff --git a/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs new file mode 100644 index 000000000..502853fbd --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs @@ -0,0 +1,52 @@ +module Test.E2E.Endpoint.PackageSets (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Effect.Aff as Aff +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Package Sets endpoint" do + Spec.it "accepts unauthenticated add/upgrade requests" do + { jobId } <- Client.packageSets Fixtures.packageSetAddRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "rejects unauthenticated compiler change requests" do + result <- Client.tryPackageSets Fixtures.packageSetCompilerChangeRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated compiler change" + + Spec.it "rejects unauthenticated package removal requests" do + result <- Client.tryPackageSets Fixtures.packageSetRemoveRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated package removal" + + Spec.it "accepts authenticated compiler change requests" do + { privateKey } <- ask + case Fixtures.signPackageSet privateKey Fixtures.packageSetCompilerChangeRequest of + Left err -> + liftAff $ Aff.throwError $ Aff.error $ "Failed to sign request: " <> err + Right signedRequest -> do + { jobId } <- Client.packageSets signedRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "returns existing job for duplicate requests" do + { jobId: firstJobId } <- Client.packageSets Fixtures.packageSetAddRequest + { jobId: secondJobId } <- Client.packageSets Fixtures.packageSetAddRequest + Assert.shouldEqual firstJobId secondJobId diff --git a/app-e2e/src/Test/E2E/Endpoint/Publish.purs b/app-e2e/src/Test/E2E/Endpoint/Publish.purs new file mode 100644 index 000000000..47e51c959 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Publish.purs @@ -0,0 +1,76 @@ +module Test.E2E.Endpoint.Publish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NEA +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.Manifest (Manifest(..)) +import Registry.Metadata (Metadata(..)) +import Registry.Sha256 as Sha256 +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish workflow" do + Spec.it "can publish effect@4.0.0 and verify all state changes" do + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + uploadOccurred <- Env.hasStorageUpload Fixtures.effect + unless uploadOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 PUT for effect/4.0.0.tar.gz" storageRequests + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.published of + Nothing -> Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in metadata published versions" + Just publishedMeta -> do + Assert.shouldSatisfy (Sha256.print publishedMeta.hash) (not <<< String.null) + + manifestEntries <- Env.readManifestIndexEntry Fixtures.effect.name + let hasVersion = Array.any (\(Manifest m) -> m.version == Fixtures.effect.version) manifestEntries + unless hasVersion do + Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in manifest index" + + Env.waitForAllMatrixJobs Fixtures.effect + + -- Collect the compilers from the matrix jobs that ran for this package + allJobs <- Client.getJobsWith Client.IncludeCompleted + let + matrixCompilers = Array.mapMaybe + ( case _ of + MatrixJob { packageName, packageVersion, compilerVersion } -> + if packageName == Fixtures.effect.name && packageVersion == Fixtures.effect.version then Just compilerVersion + else Nothing + _ -> Nothing + ) + allJobs + -- The expected compilers are: the publish compiler + all matrix job compilers + expectedCompilers = Set.fromFoldable $ Array.cons Fixtures.effectPublishData.compiler matrixCompilers + + Metadata metadataAfter <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadataAfter.published of + Nothing -> Assert.fail "Version missing after matrix jobs" + Just publishedMetaAfter -> do + let actualCompilers = Set.fromFoldable $ NEA.toArray publishedMetaAfter.compilers + Assert.shouldEqual actualCompilers expectedCompilers + + Spec.describe "Publish state machine" do + Spec.it "returns same jobId for duplicate publish requests" do + { jobId: id1 } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail id1 + { jobId: id2 } <- Client.publish Fixtures.effectPublishData + Assert.shouldEqual id1 id2 diff --git a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs new file mode 100644 index 000000000..6e3d49eef --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs @@ -0,0 +1,51 @@ +module Test.E2E.Endpoint.Transfer (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 as V1 +import Registry.Location (Location(..)) +import Registry.Metadata (Metadata(..)) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Transfer workflow" do + Spec.it "can transfer effect to a new location with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + -- Note: we don't wait for matrix jobs - transfer only modifies metadata + + Metadata originalMetadata <- Env.readMetadata Fixtures.effect.name + case originalMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "purescript" + Git _ -> Assert.fail "Expected GitHub location, got Git" + + -- clear the publish PUT so we can verify transfers leave storage unaffected + WireMock.clearStorageRequests + + authData <- Env.signTransferOrFail Fixtures.effectTransferData + { jobId: transferJobId } <- Client.transfer authData + transferJob <- Env.pollJobOrFail transferJobId + Assert.shouldSatisfy (V1.jobInfo transferJob).finishedAt isJust + + Metadata newMetadata <- Env.readMetadata Fixtures.effect.name + case newMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "new-owner" + Git _ -> Assert.fail "Expected GitHub location after transfer, got Git" + + storageRequests <- WireMock.getStorageRequests + let + packagePath = PackageName.print Fixtures.effect.name + putOrDeleteRequests = Array.filter + (\r -> (r.method == "PUT" || r.method == "DELETE") && WireMock.filterByUrlContaining packagePath [ r ] /= []) + storageRequests + unless (Array.null putOrDeleteRequests) do + WireMock.failWithRequests "Transfer should not PUT or DELETE to storage" putOrDeleteRequests diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs new file mode 100644 index 000000000..c58e88ea6 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -0,0 +1,95 @@ +module Test.E2E.Endpoint.Unpublish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish-Unpublish workflow" do + Spec.it "can publish then unpublish with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + + existsBefore <- Env.manifestIndexEntryExists Fixtures.effect + unless existsBefore do + Assert.fail "Expected version to exist in manifest index before unpublish" + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just unpublishedInfo -> + Assert.shouldSatisfy unpublishedInfo.reason (not <<< String.null) + + when (Map.member Fixtures.effect.version metadata.published) do + Assert.fail "Version 4.0.0 should not be in 'published' metadata after unpublish" + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 DELETE for effect/4.0.0.tar.gz" storageRequests + + existsAfter <- Env.manifestIndexEntryExists Fixtures.effect + when existsAfter do + Assert.fail "Expected version to be removed from manifest index after unpublish" + + -- Test race condition: submit unpublish while publish is still running. + -- Job priority (Unpublish > Matrix) ensures unpublish runs before matrix jobs. + Spec.it "unpublishing before matrix jobs complete causes them to fail gracefully" do + -- Submit publish, don't wait for it to complete + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + + -- Immediately submit unpublish - it will be queued and run after publish + -- but BEFORE matrix jobs due to job priority ordering + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + + -- Now wait for publish to complete + _ <- Env.pollJobOrFail publishJobId + + -- Wait for unpublish to complete + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + -- Verify unpublish succeeded + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just _ -> pure unit + + -- Wait for matrix jobs to complete + Env.waitForAllMatrixJobs Fixtures.effect + + -- Verify matrix jobs failed (they tried to download deleted tarball) + jobs <- Client.getJobs + let + matrixJobs = Array.filter (Env.isMatrixJobFor Fixtures.effect) jobs + allFailed = Array.all (\j -> not (V1.jobInfo j).success) matrixJobs + + unless (Array.null matrixJobs || allFailed) do + Assert.fail "Expected matrix jobs to fail after unpublish deleted the tarball" + + -- Critical: verify no bad writes occurred - the version should NOT be + -- back in published metadata (Map.update on missing key is a no-op) + Metadata metadataAfterMatrix <- Env.readMetadata Fixtures.effect.name + when (Map.member Fixtures.effect.version metadataAfterMatrix.published) do + Assert.fail "Matrix job incorrectly wrote to published metadata for unpublished version" diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..c4598313a --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,149 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | Tests the full flow: parsing GitHub event → submitting to registry API → +-- | polling for completion → posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation as Operation +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2E, E2ESpec) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.it "handles publish via GitHub issue, posts comments, and closes issue on success" do + requests <- runWorkflow $ mkPublishEvent Fixtures.effectPublishData + assertComment "Job started" requests + assertComment "Job completed successfully" requests + assertClosed requests + + Spec.it "posts failure comment and leaves issue open when job fails" do + requests <- runWorkflow $ mkAuthenticatedEvent "random-user" Fixtures.failingTransferData + assertComment "Job started" requests + assertComment "Job failed" requests + assertNoComment "Job completed successfully" requests + assertOpen requests + + Spec.it "calls Teams API to verify trustee membership for authenticated operation" do + requests <- runWorkflow $ mkAuthenticatedEvent packagingTeamUser Fixtures.trusteeAuthenticatedData + assertComment "Job started" requests + assertTeamsApiCalled requests + + Spec.it "posts error comment when issue body contains invalid JSON" do + requests <- runWorkflow Fixtures.invalidJsonIssueEvent + assertComment "malformed" requests + assertOpen requests + +-- Constants +testIssueNumber :: Int +testIssueNumber = 101 + +packagingTeamUser :: String +packagingTeamUser = "packaging-team-user" + +-- Event builders +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkPublishEvent :: Operation.PublishData -> String +mkPublishEvent publishData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.publishCodec publishData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: packagingTeamUser }, issue: { number: testIssueNumber, body } } + +mkAuthenticatedEvent :: String -> Operation.AuthenticatedData -> String +mkAuthenticatedEvent username authData = + let + body = "```json\n" <> JSON.print (CJ.encode Operation.authenticatedCodec authData) <> "\n```" + in + JSON.print $ CJ.encode githubEventCodec + { sender: { login: username }, issue: { number: testIssueNumber, body } } + +-- Workflow runner +runWorkflow :: String -> E2E (Array WireMock.WireMockRequest) +runWorkflow eventJson = do + { stateDir } <- ask + + Client.getStatus + + tmpDir <- liftAff Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + liftAff $ FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + originalCwd <- liftEffect Process.cwd + liftEffect $ Process.chdir stateDir + + envResult <- liftAff GitHubIssue.initializeGitHub + for_ envResult \env -> do + let testEnv = env { pollConfig = { maxAttempts: 60, interval: Milliseconds 500.0 }, logVerbosity = Quiet } + liftAff $ void $ GitHubIssue.runGitHubIssue testEnv + + liftEffect $ Process.chdir originalCwd + + WireMock.getGithubRequests + +-- Assertions (all operate on captured requests) +assertComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + unless (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertNoComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + when (Array.any (bodyContains text) comments) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertClosed :: Array WireMock.WireMockRequest -> E2E Unit +assertClosed requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + when (Array.null closes) do + WireMock.failWithRequests "Expected issue to be closed" requests + +assertOpen :: Array WireMock.WireMockRequest -> E2E Unit +assertOpen requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + unless (Array.null closes) do + WireMock.failWithRequests "Expected issue to remain open" requests + +assertTeamsApiCalled :: Array WireMock.WireMockRequest -> E2E Unit +assertTeamsApiCalled requests = do + let + teams = requests # Array.filter \r -> + r.method == "GET" && String.contains (String.Pattern "/orgs/purescript/teams/packaging/members") r.url + when (Array.null teams) do + WireMock.failWithRequests "Expected Teams API to be called" requests + +bodyContains :: String -> WireMock.WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs deleted file mode 100644 index 7bc030d76..000000000 --- a/app-e2e/src/Test/E2E/Main.purs +++ /dev/null @@ -1,22 +0,0 @@ -module Test.E2E.Main (main) where - -import Prelude - -import Data.Maybe (Maybe(..)) -import Data.Time.Duration (Milliseconds(..)) -import Effect (Effect) -import Test.E2E.Publish as Test.E2E.Publish -import Test.Spec as Spec -import Test.Spec.Reporter.Console (consoleReporter) -import Test.Spec.Runner.Node (runSpecAndExitProcess') -import Test.Spec.Runner.Node.Config as Cfg - -main :: Effect Unit -main = runSpecAndExitProcess' config [ consoleReporter ] do - Spec.describe "E2E Tests" do - Spec.describe "Publish" Test.E2E.Publish.spec - where - config = - { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } - , parseCLIOptions: false - } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs deleted file mode 100644 index f7bd1d63e..000000000 --- a/app-e2e/src/Test/E2E/Publish.purs +++ /dev/null @@ -1,84 +0,0 @@ --- | End-to-end tests for the Publish API endpoint. --- | These tests exercise the actual registry server via HTTP requests. -module Test.E2E.Publish (spec) where - -import Prelude - -import Data.Array as Array -import Data.Either (Either(..)) -import Data.Maybe (Maybe(..), isJust) -import Data.String as String -import Effect.Aff (Aff) -import Effect.Class (liftEffect) -import Effect.Class.Console as Console -import Registry.API.V1 as V1 -import Registry.Location as Registry.Location -import Registry.Test.Assert as Assert -import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils -import Test.Spec (Spec) -import Test.Spec as Spec - --- | Get client config from environment -getConfig :: Aff Client.Config -getConfig = liftEffect Client.configFromEnv - -spec :: Spec Unit -spec = do - Spec.describe "Server connectivity" do - Spec.it "can reach the status endpoint" do - config <- getConfig - result <- Client.getStatus config - case result of - Left err -> Assert.fail $ "Failed to reach status endpoint: " <> Client.printClientError err - Right _ -> pure unit - - Spec.it "can list jobs (initially empty)" do - config <- getConfig - result <- Client.getJobs config - case result of - Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - Right _ -> pure unit -- Jobs list may not be empty if other tests ran - - Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do - config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - } - - -- Submit publish request - publishResult <- Client.publish config publishData - case publishResult of - Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err - Right { jobId } -> do - -- Poll until job completes - job <- Client.pollJob config jobId - - -- If job failed, print logs for debugging - unless job.success do - Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs - Console.log $ String.joinWith "\n" logMessages - - -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs - let errorMessages = map _.message errorLogs - Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - - Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" diff --git a/app-e2e/src/Test/E2E/Support/Client.purs b/app-e2e/src/Test/E2E/Support/Client.purs new file mode 100644 index 000000000..3c1c02e62 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Client.purs @@ -0,0 +1,211 @@ +-- | HTTP client for making requests to the registry server during E2E tests. +-- | This module provides typed helpers for interacting with the Registry API. +-- | +-- | All client functions operate in the E2E monad (ReaderT TestEnv Aff) and +-- | throw on HTTP or parse errors. Use the `try*` variants (e.g., `tryGetJob`) +-- | when testing error responses - they return `Either ClientError a` with +-- | typed HTTP status codes. +module Test.E2E.Support.Client + ( ClientError(..) + , JobFilter(..) + , getJobs + , getJobsWith + , getJob + , tryGetJob + , getStatus + , publish + , unpublish + , transfer + , packageSets + , tryPackageSets + , pollJob + , printClientError + , clientErrorStatus + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Reader (ask) +import Data.Codec.JSON as CJ +import Data.DateTime (DateTime) +import Data.Int as Int +import Effect.Aff (delay) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Effect.Exception (Error) +import Effect.Exception as Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Registry.API.V1 (Job, JobId, LogLevel, Route(..)) +import Registry.API.V1 as V1 +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest, PublishData) +import Registry.Operation as Operation +import Routing.Duplex as Routing +import Test.E2E.Support.Types (E2E) + +-- | Errors that can occur during client operations +data ClientError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + | Timeout String + +printClientError :: ClientError -> String +printClientError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + Timeout msg -> "Timeout: " <> msg + +-- | Extract the HTTP status code from a ClientError, if it's an HttpError +clientErrorStatus :: ClientError -> Maybe Int +clientErrorStatus = case _ of + HttpError { status } -> Just status + _ -> Nothing + +-- | Convert a ClientError to an Effect Error for throwing +toError :: ClientError -> Error +toError = Exception.error <<< printClientError + +-- | Throw a ClientError as an Aff error +throw :: forall a. ClientError -> Aff a +throw = Aff.throwError <<< toError + +-- | Print a Route to its URL path using the route codec +printRoute :: Route -> String +printRoute = Routing.print V1.routes + +-- | Make a GET request and decode the response, returning Either on error. +tryGet :: forall a. CJ.Codec a -> String -> String -> Aff (Either ClientError a) +tryGet codec baseUrl path = do + response <- Fetch.fetch (baseUrl <> path) { method: GET } + body <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson codec body of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: body } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body } + +-- | Make a GET request and decode the response. Throws on error. +get :: forall a. CJ.Codec a -> String -> String -> Aff a +get codec baseUrl path = tryGet codec baseUrl path >>= either throw pure + +-- | Make a POST request with JSON body, returning Either on error. +tryPost :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff (Either ClientError res) +tryPost reqCodec resCodec baseUrl path reqBody = do + let jsonBody = JSON.print $ CJ.encode reqCodec reqBody + response <- Fetch.fetch (baseUrl <> path) + { method: POST + , headers: { "Content-Type": "application/json" } + , body: jsonBody + } + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson resCodec responseBody of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body: responseBody } + +-- | Make a POST request with JSON body and decode the response. Throws on error. +post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res +post reqCodec resCodec baseUrl path reqBody = tryPost reqCodec resCodec baseUrl path reqBody >>= either throw pure + +data JobFilter = ActiveOnly | IncludeCompleted + +-- | Get the list of jobs with a configurable filter +getJobsWith :: JobFilter -> E2E (Array Job) +getJobsWith filter = do + { clientConfig } <- ask + let + includeCompleted = case filter of + ActiveOnly -> Just false + IncludeCompleted -> Just true + route = Jobs { since: Nothing, include_completed: includeCompleted } + liftAff $ get (CJ.array V1.jobCodec) clientConfig.baseUrl (printRoute route) + +-- | Get the list of jobs (includes completed jobs) +getJobs :: E2E (Array Job) +getJobs = getJobsWith IncludeCompleted + +-- | Get a specific job by ID, with optional log filtering +getJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E Job +getJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ get V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Try to get a specific job by ID, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 404). +tryGetJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E (Either ClientError Job) +tryGetJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ tryGet V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Check if the server is healthy +getStatus :: E2E Unit +getStatus = do + { clientConfig } <- ask + liftAff do + response <- Fetch.fetch (clientConfig.baseUrl <> printRoute Status) { method: GET } + if response.status == 200 then + pure unit + else do + body <- response.text + throw $ HttpError { status: response.status, body } + +-- | Publish a package +publish :: PublishData -> E2E V1.JobCreatedResponse +publish reqBody = do + { clientConfig } <- ask + liftAff $ post Operation.publishCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Publish) reqBody + +-- | Unpublish a package (requires authentication) +unpublish :: AuthenticatedData -> E2E V1.JobCreatedResponse +unpublish authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Unpublish) authData + +-- | Transfer a package to a new location (requires authentication) +transfer :: AuthenticatedData -> E2E V1.JobCreatedResponse +transfer authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Transfer) authData + +-- | Submit a package set update request +packageSets :: PackageSetUpdateRequest -> E2E V1.JobCreatedResponse +packageSets request = do + { clientConfig } <- ask + liftAff $ post Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Try to submit a package set update, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 400 for unauthorized restricted ops). +tryPackageSets :: PackageSetUpdateRequest -> E2E (Either ClientError V1.JobCreatedResponse) +tryPackageSets request = do + { clientConfig } <- ask + liftAff $ tryPost Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Poll a job until it completes or times out. +-- | +-- | This is the recommended way to wait for job completion in E2E tests. +-- | Do not implement custom polling loops; use this function or the higher-level +-- | helpers in Test.E2E.Support.Env (pollJobOrFail, pollJobExpectFailure). +pollJob :: JobId -> E2E Job +pollJob jobId = do + { clientConfig } <- ask + go clientConfig 1 + where + go config attempt + | attempt > config.maxPollAttempts = + liftAff $ throw $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" + | otherwise = do + liftAff $ delay config.pollInterval + job <- getJob jobId (Just V1.Debug) Nothing + case (V1.jobInfo job).finishedAt of + Just _ -> pure job + Nothing -> do + when (attempt `mod` 10 == 0) do + Console.log $ "Polling job " <> unwrap jobId <> " (attempt " <> Int.toStringAs Int.decimal attempt <> ")" + go config (attempt + 1) diff --git a/app-e2e/src/Test/E2E/Support/Env.purs b/app-e2e/src/Test/E2E/Support/Env.purs new file mode 100644 index 000000000..06c8d47b9 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Env.purs @@ -0,0 +1,312 @@ +-- | Shared environment and helper functions for E2E tests. +-- | +-- | This module provides: +-- | - TestEnv type and E2E monad for test helpers (re-exported from Types) +-- | - Environment construction from env vars (mkTestEnv) +-- | - WireMock reset helpers for test isolation +-- | - Job polling with automatic failure handling +-- | - Git and metadata state inspection +-- | +-- | All functions operate in the E2E monad (ReaderT TestEnv Aff), so they +-- | have access to the shared test environment without explicit passing. +module Test.E2E.Support.Env + ( module ReExports + , mkTestEnv + , runE2E + , resetTestState + , resetDatabase + , resetGitFixtures + , resetLogs + , resetGitHubRequestCache + , pollJobOrFail + , pollJobExpectFailure + , signUnpublishOrFail + , signTransferOrFail + , gitStatus + , isCleanGitStatus + , waitForAllMatrixJobs + , isMatrixJobFor + , readMetadata + , readManifestIndexEntry + , manifestIndexEntryExists + , assertReposClean + , hasStorageUpload + , hasStorageDelete + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask, runReaderT) +import Data.Array as Array +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Node.ChildProcess.Types (Exit(..)) +import Node.FS.Aff as FS.Aff +import Node.Library.Execa as Execa +import Node.Path as Path +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Env as Env +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata) +import Registry.Metadata as Metadata +import Registry.Operation (AuthenticatedData, TransferData, UnpublishData) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Fixtures (PackageFixture) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.Types (ClientConfig, E2E, E2ESpec, TestEnv, WireMockConfig) as ReExports +import Test.E2E.Support.Types (E2E, TestEnv) +import Test.E2E.Support.WireMock as WireMock + +-- | Build the test environment from environment variables. +-- | Called once at startup in Main, before running any tests. +mkTestEnv :: Effect TestEnv +mkTestEnv = do + port <- Env.lookupRequired Env.serverPort + let + clientConfig = + { baseUrl: "http://localhost:" <> show port + , pollInterval: Milliseconds 2000.0 + , maxPollAttempts: 30 + } + + githubUrl <- Env.lookupRequired Env.githubApiUrl + storageUrl <- Env.lookupRequired Env.s3ApiUrl + let + githubWireMock = { baseUrl: githubUrl } + storageWireMock = { baseUrl: storageUrl } + + stateDir <- Env.lookupRequired Env.stateDir + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + + pure { clientConfig, githubWireMock, storageWireMock, stateDir, privateKey } + +-- | Run an E2E computation with a given environment. +-- | Primarily used by hoistSpec in Main. +runE2E :: forall a. TestEnv -> E2E a -> Aff a +runE2E env = flip runReaderT env + +-- | Reset all test state for isolation between tests. +-- | This is the recommended way to set up test isolation in Spec.before_. +-- | Resets: database, git fixtures, storage mock, and logs. +resetTestState :: E2E Unit +resetTestState = do + resetDatabase + resetGitFixtures + WireMock.clearStorageRequests + WireMock.resetStorageScenarios + WireMock.clearGithubRequests + resetGitHubRequestCache + resetLogs + +-- | Reset the database by clearing all job-related tables. +-- | +-- | This works because all job tables (publish_jobs, unpublish_jobs, transfer_jobs, +-- | matrix_jobs, package_set_jobs, logs) have foreign keys to job_info with +-- | ON DELETE CASCADE. See db/schema.sql for the schema definition. +resetDatabase :: E2E Unit +resetDatabase = do + { stateDir } <- ask + let dbPath = Path.concat [ stateDir, "db", "registry.sqlite3" ] + result <- liftAff $ _.getResult =<< Execa.execa "sqlite3" [ dbPath, "DELETE FROM job_info;" ] identity + case result.exit of + Normally 0 -> pure unit + _ -> liftAff $ Aff.throwError $ Aff.error $ "Failed to reset database: " <> result.stderr + +-- | Reset the git fixtures to restore original state. +-- | This restores metadata files modified by unpublish/transfer operations. +-- | +-- | Strategy: Reset the origin repos to their initial-fixture tag (created during +-- | setup), then delete the server's scratch git clones. The server will +-- | re-clone fresh copies on the next operation, ensuring a clean cache state. +resetGitFixtures :: E2E Unit +resetGitFixtures = do + { stateDir } <- ask + fixturesDir <- liftEffect $ Env.lookupRequired Env.repoFixturesDir + let + registryOrigin = Path.concat [ fixturesDir, "purescript", "registry" ] + registryIndexOrigin = Path.concat [ fixturesDir, "purescript", "registry-index" ] + scratchDir = Path.concat [ stateDir, "scratch" ] + resetOrigin registryOrigin + resetOrigin registryIndexOrigin + deleteGitClones scratchDir + where + resetOrigin dir = do + void $ gitOrFail [ "reset", "--hard", "initial-fixture" ] dir + void $ gitOrFail [ "clean", "-fd" ] dir + + deleteGitClones scratchDir = do + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry" ] + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry-index" ] + +-- | Clear server log files for test isolation. +-- | Deletes *.log files from the scratch/logs directory but preserves the directory itself. +resetLogs :: E2E Unit +resetLogs = do + { stateDir } <- ask + let logsDir = Path.concat [ stateDir, "scratch", "logs" ] + let cmd = "rm -f '" <> logsDir <> "'/*.log 2>/dev/null || true" + result <- liftAff $ _.getResult =<< Execa.execa "sh" [ "-c", cmd ] identity + case result.exit of + Normally _ -> pure unit + _ -> pure unit + +-- | Clear cached GitHub API requests from the scratch cache directory. +-- | This ensures each test makes fresh API calls rather than using cached responses. +resetGitHubRequestCache :: E2E Unit +resetGitHubRequestCache = do + { stateDir } <- ask + let cacheDir = Path.concat [ stateDir, "scratch", ".cache" ] + liftAff $ Aff.attempt (FS.Aff.readdir cacheDir) >>= case _ of + Left _ -> pure unit + Right files -> for_ files \file -> + when (String.Pattern "Request__" `String.contains` file) do + FS.Extra.remove (Path.concat [ cacheDir, file ]) + +-- | Poll a job until completion, failing the test if the job fails. +-- | Prints error logs on failure for debugging. +pollJobOrFail :: V1.JobId -> E2E V1.Job +pollJobOrFail jobId = do + job <- Client.pollJob jobId + unless (V1.jobInfo job).success do + Console.log "Job failed! Logs:" + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs + Console.log $ String.joinWith "\n" logMessages + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs + let errorMessages = map _.message errorLogs + Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages + pure job + +-- | Poll a job until completion, expecting it to fail. +-- | Returns the job for further assertions on error messages. +pollJobExpectFailure :: V1.JobId -> E2E V1.Job +pollJobExpectFailure jobId = do + job <- Client.pollJob jobId + when (V1.jobInfo job).success do + Assert.fail "Expected job to fail, but it succeeded" + pure job + +-- | Sign an unpublish operation using the pacchettibotti private key from environment. +signUnpublishOrFail :: UnpublishData -> E2E AuthenticatedData +signUnpublishOrFail unpublishData = do + { privateKey } <- ask + case Fixtures.signUnpublish privateKey unpublishData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign unpublish: " <> err + Right authData -> pure authData + +-- | Sign a transfer operation using the pacchettibotti private key from environment. +signTransferOrFail :: TransferData -> E2E AuthenticatedData +signTransferOrFail transferData = do + { privateKey } <- ask + case Fixtures.signTransfer privateKey transferData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign transfer: " <> err + Right authData -> pure authData + +-- | Run git status --porcelain in a directory and return the output. +gitStatus :: String -> E2E String +gitStatus cwd = gitOrFail [ "status", "--porcelain" ] cwd + +-- | Run a git command, throwing an exception on failure. +gitOrFail :: Array String -> FilePath -> E2E String +gitOrFail args cwd = liftAff $ Git.gitCLI args (Just cwd) >>= case _ of + Left err -> Aff.throwError $ Aff.error err + Right out -> pure out + +-- | Check if git status output indicates a clean working tree (no changes). +isCleanGitStatus :: String -> Boolean +isCleanGitStatus status = String.null status + +-- | Wait for all matrix jobs for a package to complete. +waitForAllMatrixJobs :: PackageFixture -> E2E Unit +waitForAllMatrixJobs pkg = go 120 0 + where + go :: Int -> Int -> E2E Unit + go 0 _ = liftAff $ Aff.throwError $ Aff.error "Timed out waiting for matrix jobs to complete" + go attempts lastCount = do + jobs <- Client.getJobs + let + matrixJobs = Array.filter (isMatrixJobFor pkg) jobs + totalCount = Array.length matrixJobs + finishedCount = Array.length $ Array.filter (\j -> isJust (V1.jobInfo j).finishedAt) matrixJobs + allFinished = finishedCount == totalCount + stillCreating = totalCount > lastCount + if totalCount >= 1 && allFinished && not stillCreating then + pure unit + else do + when (attempts `mod` 10 == 0) do + Console.log $ "Waiting for matrix jobs: " <> show finishedCount <> "/" <> show totalCount <> " finished" + liftAff $ Aff.delay (Milliseconds 1000.0) + go (attempts - 1) totalCount + +-- | Check if a job is a matrix job for the given package. +isMatrixJobFor :: PackageFixture -> Job -> Boolean +isMatrixJobFor pkg = case _ of + MatrixJob { packageName, packageVersion } -> + packageName == pkg.name && packageVersion == pkg.version + _ -> false + +-- | Read and parse the metadata file for a package from the server's scratch clone. +readMetadata :: PackageName -> E2E Metadata +readMetadata packageName = do + { stateDir } <- ask + let metadataPath = Path.concat [ stateDir, "scratch", "registry", "metadata", PackageName.print packageName <> ".json" ] + liftAff (readJsonFile Metadata.codec metadataPath) >>= case _ of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to read metadata for " <> PackageName.print packageName <> ": " <> err + Right metadata -> pure metadata + +-- | Read and parse the manifest index entry for a package from the server's scratch clone. +readManifestIndexEntry :: PackageName -> E2E (Array Manifest) +readManifestIndexEntry packageName = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath packageName >>= case _ of + Left err -> Aff.throwError $ Aff.error $ "Failed to read manifest index for " <> PackageName.print packageName <> ": " <> err + Right manifests -> pure $ Array.fromFoldable manifests + +-- | Check if a specific package version exists in the manifest index. +manifestIndexEntryExists :: PackageFixture -> E2E Boolean +manifestIndexEntryExists pkg = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath pkg.name >>= case _ of + Left _ -> pure false + Right manifests -> pure $ Array.any (\(Manifest m) -> m.version == pkg.version) $ Array.fromFoldable manifests + +-- | Assert that both git repos (registry and registry-index) have no uncommitted changes. +assertReposClean :: E2E Unit +assertReposClean = do + { stateDir } <- ask + let scratchRegistry = Path.concat [ stateDir, "scratch", "registry" ] + let scratchRegistryIndex = Path.concat [ stateDir, "scratch", "registry-index" ] + registryStatus <- gitStatus scratchRegistry + registryIndexStatus <- gitStatus scratchRegistryIndex + unless (isCleanGitStatus registryStatus) do + Assert.fail $ "registry repo has uncommitted changes:\n" <> registryStatus + unless (isCleanGitStatus registryIndexStatus) do + Assert.fail $ "registry-index repo has uncommitted changes:\n" <> registryIndexStatus + +-- | Check if a storage upload (PUT) occurred for a specific package. +hasStorageUpload :: PackageFixture -> E2E Boolean +hasStorageUpload pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + putRequests = WireMock.filterByMethod "PUT" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) putRequests + +-- | Check if a storage delete (DELETE) occurred for a specific package. +hasStorageDelete :: PackageFixture -> E2E Boolean +hasStorageDelete pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + deleteRequests = WireMock.filterByMethod "DELETE" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) deleteRequests diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs new file mode 100644 index 000000000..7fe0b556a --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -0,0 +1,286 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Test.E2E.Support.Fixtures + ( PackageFixture + , effect + , console + , prelude + , effectPublishData + , effectPublishDataDifferentLocation + , consolePublishData + , failingTransferData + , nonexistentTransferData + , trusteeAuthenticatedData + , effectUnpublishData + , effectTransferData + , nonexistentUnpublishData + , preludeUnpublishData + , signUnpublish + , signTransfer + , packageSetAddRequest + , packageSetCompilerChangeRequest + , packageSetRemoveRequest + , signPackageSet + , invalidJsonIssueEvent + ) where + +import Registry.App.Prelude + +import Data.Codec.JSON as CJ +import Data.Map as Map +import JSON as JSON +import Registry.Location (Location(..)) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PackageSetUpdateRequest, TransferData, UnpublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.SSH as SSH +import Registry.Test.Utils as Utils +import Registry.Version (Version) + +type PackageFixture = { name :: PackageName, version :: Version } + +-- | effect@4.0.0 fixture package +effect :: PackageFixture +effect = { name: Utils.unsafePackageName "effect", version: Utils.unsafeVersion "4.0.0" } + +-- | console@6.1.0 fixture package +console :: PackageFixture +console = { name: Utils.unsafePackageName "console", version: Utils.unsafeVersion "6.1.0" } + +-- | prelude@6.0.1 fixture package +prelude :: PackageFixture +prelude = { name: Utils.unsafePackageName "prelude", version: Utils.unsafeVersion "6.0.1" } + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: Operation.PublishData +effectPublishData = + { name: effect.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: effect.version + } + +-- | Publish data for effect@99.0.0 with a DIFFERENT location. +-- | Uses a non-existent version to avoid duplicate job detection, +-- | but still targets an existing package to test location conflicts. +effectPublishDataDifferentLocation :: Operation.PublishData +effectPublishDataDifferentLocation = + effectPublishData + { location = Just $ GitHub + { owner: "someone-else" + , repo: "purescript-effect" + , subdir: Nothing + } + , version = Utils.unsafeVersion "99.0.0" + , ref = "v99.0.0" + } + +-- | Publish data for console@6.1.0, used for concurrency tests. +-- | Console depends on effect ^4.0.0 and prelude ^6.0.0. +-- | This matches the fixtures in app/fixtures/github-packages/console-6.1.0 +consolePublishData :: Operation.PublishData +consolePublishData = + { name: console.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-console" + , subdir: Nothing + } + , ref: "v6.1.0" + , compiler: Utils.unsafeVersion "0.15.10" + , resolutions: Nothing + , version: console.version + } + +-- | Unpublish data for effect@4.0.0, used for publish-then-unpublish tests. +effectUnpublishData :: UnpublishData +effectUnpublishData = + { name: effect.name + , version: effect.version + , reason: "Testing unpublish flow" + } + +-- | Transfer data for effect, used for transfer tests. +-- | Transfers effect to a different GitHub owner. +effectTransferData :: TransferData +effectTransferData = + { name: effect.name + , newLocation: GitHub + { owner: "new-owner" + , repo: "purescript-effect" + , subdir: Nothing + } + } + +-- | Unpublish data for a nonexistent package. +-- | Used to test error handling when unpublishing an unknown package. +nonexistentUnpublishData :: UnpublishData +nonexistentUnpublishData = + { name: Utils.unsafePackageName "nonexistent-package" + , version: Utils.unsafeVersion "1.0.0" + , reason: "Testing error handling for unknown package" + } + +-- | Unpublish data for prelude@6.0.1. +-- | This package was published long ago (in fixtures), so it should fail +-- | the 48-hour time limit check. +preludeUnpublishData :: UnpublishData +preludeUnpublishData = + { name: prelude.name + , version: prelude.version + , reason: "Testing 48-hour limit enforcement" + } + +-- | Sign an unpublish operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signUnpublish :: String -> UnpublishData -> Either String AuthenticatedData +signUnpublish privateKey unpublishData = do + let rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishData + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private rawPayload + pure + { payload: Unpublish unpublishData + , rawPayload + , signature + } + +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = do + let + transferPayload :: TransferData + transferPayload = + { name: prelude.name + , newLocation: GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } + } + + rawPayload :: String + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + + { payload: Transfer transferPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = do + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: prelude.name + , version: prelude.version + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + + { payload: Unpublish unpublishPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Transfer data for a nonexistent package. +-- | Used to test error handling when transferring an unknown package. +-- | Job should fail with "has not been published before" error. +nonexistentTransferData :: TransferData +nonexistentTransferData = + { name: Utils.unsafePackageName "nonexistent-package" + , newLocation: GitHub + { owner: "someone" + , repo: "purescript-nonexistent" + , subdir: Nothing + } + } + +-- | Sign a transfer operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signTransfer :: String -> TransferData -> Either String AuthenticatedData +signTransfer privateKey transferData = do + let rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferData + private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + let signature = SSH.sign private rawPayload + pure + { payload: Transfer transferData + , rawPayload + , signature + } + +-- | type-equality@4.0.1 fixture package (exists in registry-index but not in initial package set) +typeEquality :: PackageFixture +typeEquality = { name: Utils.unsafePackageName "type-equality", version: Utils.unsafeVersion "4.0.1" } + +-- | Package set request to add type-equality@4.0.1. +-- | This is an unauthenticated request (no signature) since adding packages +-- | doesn't require trustee authentication. +packageSetAddRequest :: PackageSetUpdateRequest +packageSetAddRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton typeEquality.name (Just typeEquality.version) + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to change the compiler version. +-- | This requires authentication (pacchettibotti signature) since changing +-- | the compiler is a restricted operation. +packageSetCompilerChangeRequest :: PackageSetUpdateRequest +packageSetCompilerChangeRequest = + let + payload = PackageSetUpdate + { compiler: Just (Utils.unsafeVersion "0.15.11") + , packages: Map.empty + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to remove a package. +-- | This requires authentication (pacchettibotti signature) since removing +-- | packages is a restricted operation. +packageSetRemoveRequest :: PackageSetUpdateRequest +packageSetRemoveRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton effect.name Nothing + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Sign a package set update request using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signPackageSet :: String -> PackageSetUpdateRequest -> Either String PackageSetUpdateRequest +signPackageSet privateKey request = do + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private request.rawPayload + pure request { signature = Just signature } + +-- | GitHub issue event with invalid JSON in the body. +-- | Used to test that malformed JSON is handled gracefully with an error comment. +-- | Note: The inner JSON has a trailing comma (`"v1.0.0",}`) which is intentionally +-- | malformed to trigger a parse error. +invalidJsonIssueEvent :: String +invalidJsonIssueEvent = + """{"sender":{"login":"packaging-team-user"},"issue":{"number":101,"body":"```json\n{\"name\": \"effect\", \"ref\": \"v1.0.0\",}\n```"}}""" diff --git a/app-e2e/src/Test/E2E/Support/Types.purs b/app-e2e/src/Test/E2E/Support/Types.purs new file mode 100644 index 000000000..2e4429057 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Types.purs @@ -0,0 +1,48 @@ +-- | Core types for E2E tests. +-- | +-- | This module defines the shared environment and monad types used by all +-- | E2E test helpers. It's kept separate to avoid circular dependencies +-- | between Env, Client, and WireMock modules. +module Test.E2E.Support.Types + ( TestEnv + , ClientConfig + , WireMockConfig + , E2E + , E2ESpec + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ReaderT) +import Effect.Aff (Milliseconds) +import Test.Spec (SpecT) + +-- | Configuration for the E2E test client +type ClientConfig = + { baseUrl :: String + , pollInterval :: Milliseconds + , maxPollAttempts :: Int + } + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | The shared test environment available to all E2E helpers. +-- | Constructed once at startup from environment variables. +type TestEnv = + { clientConfig :: ClientConfig + , githubWireMock :: WireMockConfig + , storageWireMock :: WireMockConfig + , stateDir :: String + , privateKey :: String + } + +-- | The base monad for E2E test helpers. +-- | All Client, Env, and WireMock functions operate in this monad. +type E2E = ReaderT TestEnv Aff + +-- | The spec type for E2E tests. +-- | Test modules export `spec :: E2ESpec` instead of `spec :: Spec Unit`. +type E2ESpec = SpecT E2E Unit Identity Unit diff --git a/app-e2e/src/Test/E2E/Support/WireMock.purs b/app-e2e/src/Test/E2E/Support/WireMock.purs new file mode 100644 index 000000000..4e3789fca --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/WireMock.purs @@ -0,0 +1,173 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +-- | +-- | Also provides helpers for managing WireMock scenarios (stateful mocking). +-- | Scenarios allow responses to change based on state transitions - e.g., a +-- | package tarball returns 404 until it's been "uploaded" via PUT, after which +-- | it returns 200. +module Test.E2E.Support.WireMock + ( WireMockRequest + , WireMockError(..) + , getGithubRequests + , getStorageRequests + , clearGithubRequests + , clearStorageRequests + , resetStorageScenarios + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Reader (ask) +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Int as Int +import Data.String as String +import Effect.Aff as Aff +import Effect.Exception (Error) +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Test.E2E.Support.Types (E2E) + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from a WireMock instance +getRequestsFrom :: String -> Aff (Either WireMockError (Array WireMockRequest)) +getRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from a WireMock instance +clearRequestsFrom :: String -> Aff (Either WireMockError Unit) +clearRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Reset all scenarios to initial state on a WireMock instance +resetScenariosOn :: String -> Aff (Either WireMockError Unit) +resetScenariosOn baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/scenarios/reset") { method: POST } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Helper to run a WireMock operation and throw on error +orFail :: forall a. String -> Either WireMockError a -> Aff a +orFail context = case _ of + Left err -> Aff.throwError $ Aff.error $ context <> ": " <> printWireMockError err + Right a -> pure a + +-- | Get captured requests from the GitHub WireMock. +getGithubRequests :: E2E (Array WireMockRequest) +getGithubRequests = do + { githubWireMock } <- ask + liftAff $ getRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to get GitHub WireMock requests" + +-- | Get captured requests from the storage WireMock (S3, Pursuit). +getStorageRequests :: E2E (Array WireMockRequest) +getStorageRequests = do + { storageWireMock } <- ask + liftAff $ getRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to get storage WireMock requests" + +-- | Clear the GitHub WireMock request journal. +clearGithubRequests :: E2E Unit +clearGithubRequests = do + { githubWireMock } <- ask + liftAff $ clearRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to clear GitHub WireMock requests" + +-- | Clear the storage WireMock request journal. +clearStorageRequests :: E2E Unit +clearStorageRequests = do + { storageWireMock } <- ask + liftAff $ clearRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to clear storage WireMock requests" + +-- | Reset all storage WireMock scenarios to their initial state. +resetStorageScenarios :: E2E Unit +resetStorageScenarios = do + { storageWireMock } <- ask + liftAff $ resetScenariosOn storageWireMock.baseUrl >>= orFail "Failed to reset storage WireMock scenarios" + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests = String.joinWith "\n" <<< map formatRequest + where + formatRequest req = req.method <> " " <> req.url <> case req.body of + Nothing -> "" + Just body -> "\n Body: " <> body + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ String.joinWith "\n" [ msg, "\nCaptured requests:", formatRequests requests ] diff --git a/app-e2e/src/Test/E2E/Workflow.purs b/app-e2e/src/Test/E2E/Workflow.purs new file mode 100644 index 000000000..a03be0562 --- /dev/null +++ b/app-e2e/src/Test/E2E/Workflow.purs @@ -0,0 +1,88 @@ +-- | End-to-end tests for multi-operation workflows. +-- | +-- | These tests verify complex scenarios involving multiple operations, +-- | specifically dependency state validation across publish/unpublish sequences. +module Test.E2E.Workflow (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Dependency and unpublish interactions" do + Spec.it "publishing a package fails when its dependency was unpublished" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + _ <- Env.pollJobOrFail unpublishJobId + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + Assert.fail "Expected tarball delete from S3 for effect@4.0.0" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + when manifestExists do + Assert.fail "Expected effect@4.0.0 to be removed from manifest index after unpublish" + + WireMock.clearStorageRequests + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + consoleJob <- Env.pollJobExpectFailure consoleJobId + + let + logs = (V1.jobInfo consoleJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "Could not produce valid dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected dependency resolution error, got:\n" <> String.joinWith "\n" logMessages + + consoleUploadOccurred <- Env.hasStorageUpload Fixtures.console + when consoleUploadOccurred do + Assert.fail "Expected no tarball upload for console@6.1.0 after failed publish" + + Spec.it "unpublishing a package fails when dependents exist in manifest index" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + _ <- Env.pollJobOrFail consoleJobId + + WireMock.clearStorageRequests + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobExpectFailure unpublishJobId + + let + logs = (V1.jobInfo unpublishJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "unsatisfied dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected unsatisfied dependencies error, got:\n" <> + String.joinWith "\n" logMessages + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + when deleteOccurred do + Assert.fail "Expected no tarball delete for effect@4.0.0 after failed unpublish" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + unless manifestExists do + Assert.fail "Expected effect@4.0.0 to still exist in manifest index after failed unpublish" + + Metadata effectMeta <- Env.readMetadata Fixtures.effect.name + unless (isJust $ Map.lookup Fixtures.effect.version effectMeta.published) do + Assert.fail "Expected effect@4.0.0 to still be in published metadata after failed unpublish" diff --git a/app-e2e/src/Test/Main.purs b/app-e2e/src/Test/Main.purs new file mode 100644 index 000000000..a5b18d43c --- /dev/null +++ b/app-e2e/src/Test/Main.purs @@ -0,0 +1,40 @@ +module Test.E2E.Main (main) where + +import Registry.App.Prelude + +import Data.Time.Duration (Milliseconds(..)) +import Test.E2E.Endpoint.Jobs as Jobs +import Test.E2E.Endpoint.PackageSets as PackageSets +import Test.E2E.Endpoint.Publish as Publish +import Test.E2E.Endpoint.Transfer as Transfer +import Test.E2E.Endpoint.Unpublish as Unpublish +import Test.E2E.GitHubIssue as GitHubIssue +import Test.E2E.Support.Env (assertReposClean, mkTestEnv, resetTestState, runE2E) +import Test.E2E.Workflow as Workflow +import Test.Spec (hoistSpec) +import Test.Spec as Spec +import Test.Spec.Reporter.Console (consoleReporter) +import Test.Spec.Runner.Node (runSpecAndExitProcess') +import Test.Spec.Runner.Node.Config as Cfg + +main :: Effect Unit +main = do + env <- mkTestEnv + runSpecAndExitProcess' config [ consoleReporter ] $ hoistE2E env do + Spec.before_ resetTestState $ Spec.after_ assertReposClean $ Spec.describe "E2E Tests" do + Spec.describe "Endpoints" do + Spec.describe "Publish" Publish.spec + Spec.describe "Jobs" Jobs.spec + Spec.describe "Unpublish" Unpublish.spec + Spec.describe "Transfer" Transfer.spec + Spec.describe "PackageSets" PackageSets.spec + + Spec.describe "Workflows" do + Spec.describe "GitHubIssue" GitHubIssue.spec + Spec.describe "Multi-operation" Workflow.spec + where + hoistE2E env = hoistSpec identity (\_ m -> runE2E env m) + config = + { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 60_000.0 } + , parseCLIOptions: false + } diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/github-packages/console-6.1.0/LICENSE b/app/fixtures/github-packages/console-6.1.0/LICENSE new file mode 100644 index 000000000..311379c1e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/LICENSE @@ -0,0 +1,26 @@ +Copyright 2018 PureScript + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/app/fixtures/github-packages/console-6.1.0/bower.json b/app/fixtures/github-packages/console-6.1.0/bower.json new file mode 100644 index 000000000..da93c7f6e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/bower.json @@ -0,0 +1,22 @@ +{ + "name": "purescript-console", + "homepage": "https://github.com/purescript/purescript-console", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-console.git" + }, + "ignore": [ + "**/.*", + "bower_components", + "node_modules", + "output", + "test", + "bower.json", + "package.json" + ], + "dependencies": { + "purescript-effect": "^4.0.0", + "purescript-prelude": "^6.0.0" + } +} diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js new file mode 100644 index 000000000..432a4241b --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js @@ -0,0 +1,9 @@ +export const log = s => () => console.log(s); +export const warn = s => () => console.warn(s); +export const error = s => () => console.error(s); +export const info = s => () => console.info(s); +export const debug = s => () => console.debug(s); +export const time = s => () => console.time(s); +export const timeLog = s => () => console.timeLog(s); +export const timeEnd = s => () => console.timeEnd(s); +export const clear = () => console.clear(); diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs new file mode 100644 index 000000000..364ee2b1c --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs @@ -0,0 +1,46 @@ +-- | This module provides functions for outputting strings to the console. +module Effect.Console + ( log + , logShow + , warn + , warnShow + , error + , errorShow + , info + , infoShow + , debug + , debugShow + , time + , timeLog + , timeEnd + , clear + ) where + +import Prelude + +import Effect (Effect) + +foreign import log :: String -> Effect Unit +foreign import warn :: String -> Effect Unit +foreign import error :: String -> Effect Unit +foreign import info :: String -> Effect Unit +foreign import debug :: String -> Effect Unit +foreign import time :: String -> Effect Unit +foreign import timeLog :: String -> Effect Unit +foreign import timeEnd :: String -> Effect Unit +foreign import clear :: Effect Unit + +logShow :: forall a. Show a => a -> Effect Unit +logShow = log <<< show + +warnShow :: forall a. Show a => a -> Effect Unit +warnShow = warn <<< show + +errorShow :: forall a. Show a => a -> Effect Unit +errorShow = error <<< show + +infoShow :: forall a. Show a => a -> Effect Unit +infoShow = info <<< show + +debugShow :: forall a. Show a => a -> Effect Unit +debugShow = debug <<< show diff --git a/app/fixtures/package-sets/latest-compatible-sets.json b/app/fixtures/package-sets/latest-compatible-sets.json index ceba8dd7a..5cdbbb9c2 100644 --- a/app/fixtures/package-sets/latest-compatible-sets.json +++ b/app/fixtures/package-sets/latest-compatible-sets.json @@ -1,3 +1,3 @@ { - "0.15.9": "psc-0.15.9-20230105" + "0.15.10": "psc-0.15.10-20230105" } diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz index 2ef880dff4d0b29735276e267a483f25607e3387..c06e9b2767ae864e73f4bc4f1d9ef76583387aa1 100644 GIT binary patch literal 31321 zcmV)8K*qlxiwFP!000001MFRUciKp@&)@nKy`Hh9`ab(iqW=T0=@dq?KJS$q8G zn_T;FJvlrS-$&vbF7-|K)x&D-NxfFvKd3$0uU3!h`;XY+w-G0OML}r!j6Dkd$;xuK z6V9DY2(;O2mg@i0cC&TfZ9Om#?BJt=gJu0+J34$K?+@zL8l+ciN40}TZ2y7fD7^mU z{onLv3*Ww(gsiq-{X4sieBK@U_AE4v4jCT-Ta%lm+h-aVMTLf W z6e6|TVSrK#dY&H`Q)|Hn97hPq?=kLU+7Tipt4pEpmhPTJ?MD+~y*^NSukZJ)J!P^@=RA*QmLkyK+Buh?0u z(|iMujhF4ycJCbt@~YiCM_#XhS%Y0RI=y!D>a@{emsg$3i*AcSvy78=w|UxVpS4a* z;0_#F>*v;ak9FTPPEQM4iL$5K__75;HeQ~#kQ24?q}^#Xd)S=ht_hk0`A#dWd)aEX z@uu}l3mVhtysJo>-PV6!K_SRwCylel>sHq&tx*Z|N%N}HIzyR358bPm-CnzQ)oZcW z7Z)d_=x(d?Gpsw^XU6G8m$Y@&ZB>9nuR)A~1gNU}4DVlFb=#!0_Ia<>>0Dj*+85_# zX!%=E3&_}jswbq~i*sr!Xy~Hz4vC@;Nck1^_Du`YJE&07R0CDe1&uX(dNJS!3hZfZ zWaq8dr|s9R^JWWkFObUHcDGfAzG`=|0FXqsZyUh#ikggl1X79HcGplNwn9D0+OJsS z!~{U3O@VCjAfJoh1atXco#ykz~aAJuBx^?wuB zCxfvawCX!g+0G1B|A1EiJvlCRDwsd>&Vr|pAK!qbjt1t?n?6peJzhp( zBvZk1Bat2~B?;7EmwHhLmPa=rUN5^Rd-c7hW5KE-GGT4?0vi^b1>~dbkQOdxR2n3r zLXZ^s&ezClA_<`InRwYZu<9Y@_k!r|?qhRTaiE!U4HRdd3oCJ;(p|tI*8qDWhsSBK zpv|IKwvY#5iVDp)l`^x2A3)P04U&E}K0o}V*RJ&Z@3a~xXDxF&dQcu~=6|)mf0Uj7 zP<}iAH*x*I^3%XD-v8zOsf7a-z^ucqfd5*;xdc9+Xk6myzv^81z{f1z$ny*7HA%0api)o9)K~Yji_W{bS%;?r?HEwE{rT zUn8S6B=r7|-y`lXjun@`)*H?CPl7nyOY+zZFn&Q~hY6>dNH7!Hrsv;0mgXQR;z{C{ z??a*tAUbV{RaY6t4?nQB3wm}O@-hsg-|qt~w{AS}LMUAorW`DPqPr!AaYj}!;XLB7R4|PmI#?b6?%x_*@(|i_>8+F?hb9PNh+9Ot^a*u zRu9en7+YzkBe1BzzWH~vmgGA#Os(Q%*x!rmi3U>nI!+Uk;Tz+!O=6(-v9NZYHi#9o4A(t|CJk9WAp^>&tu*G&q3`l)BlI{C-v?6zln=~ zoOynTM|nZWBI@%uR=|$gIC5!EgOwa>z@0L!=spi4-_`PbJ}k?%UgmxJY)-5IR{Trf zn{hu}m_y5PuwaD=O3!dfe`dfxqk#Ks9^QjHq`apZzpm!ey#trI=bB!`ut2Ljy)fV`0N8;;0bQ324nV^l~ySYYL)f!j5xf96D(b6g%3C(;q3REE4 z{=FyV2CsGYA3ehR@>pa4YllbGto;X9aBKfJaqaD~KNvbrYT5=O$ixnqIK`p>oQZui zaTd}FFjybL2~5t==N6t2;CR7UmJi$@a=3-#-Z+arm~?!^rq)b^wG1tsXxx{PF}`<; zkt8&|0}%yeNoow{xk_}jTktL*ue)5SvmchspjB24Ems@KA z`~*P?j&X0Wq3%t9K{}C$K?2#MA((_X6h;cPM|}84YdUjy1-ZB^3ZSx}*Y9|@J@3>G z!oJCNcYCOwVB$s2h`Ag*={Ph7soZdIhb>2uo?5Owi^Mj&xw~sfy^DEhig_rSWe8@? z2_Jq4&_?R|f(OudMMwn?q8ErEwgd7^10J}2<1%|nwmn(zpFYKcxT)_~P1Y4v_g+XL zAQ2y4dLC{9N-2b3Fe+|%h|Fna5ivbVtkuFK-P<0kR5tQ>fH6{_YK)Ddfp}|!TtnPu zK+2ID+R&%m_pw8qfRk#Lv<=aO);4ApH4g8sY^wsjRXFDE*e0A)a*tG0{98aH9@+z< ziSQ;E4}nM1V!-9hpx`OXU0_*LDn<~L#PGBmiP$a>1P(8}_|6zTL|2A{QMz>F0-WP@v(Hfa>5r16_vB;vj827A>_Jzgu`R|eK zORNJ%+IGnRdu#+2)1TU|fPYjUKo*n`HoC$FP)-mB%wjD8+C5v+)bHng`Gbq=muIx0a9)R~LdYx~?D{07gO&oNZW>fEUJ8Q`4k& z1O%0&Vbvq*A~IW-7hx`8SwRl5I5B;ywZGr*Yr#GuP1J7uW_|4NU-J&elZa!_)0FD1g?|-nUx-T4|;XV6k6}=-1=%HX@FuZ6w|l*a?90s6!6Gf69jORouTt#-Fm7XCO1H-M+!|Rur zJe-!_aso;&Kq%ZSNo$yu1h<%N7lu(&j6lpU_vIABw;2RIP(vrxM>yU3nR@;B1rX>F za_Ja@*ovRz5czElo46{Z|0z`}z!wC=gG&Vp0 zN6Eqv#+*)tBZ*Co94Cg2;++XTwq0AzS+ZN+j5*-bIb}k#oR{e){Tqn zOhcD;!II*~qIB`RfVr(0m6>Zf4uT@}F5q4;vQP=!{gE}W9Xni@ z?2UJej%`KSHsQ2c4loX4b6pX32~c&A^bz$IbGVLV;q^vQ-~U_5tOmk*k%k;sN`;@y_;2UwQnGBkjBU zf3<_H|F@BAN&mmXtya%}kmKL=@n7|v|5vL%+4_H*xKgKlmhW4OGi#PQ;TtbG-GjpV z=^n03kYqaCy$L7J;&kaL9W$5TvIa6Ji6da`1ki@;{b%s?oH_9I(@)7so~Zub#&f(e z@`PJi{d4-@7t8%AkMQnwEt&rvK6bz%CGd6cU7$7dzh14@GxPtbesr{*|C_iZ0K`r= zwN8h5RFckXI<*p~+G9IT^F%FUy z1hlHQZxD#?3HE3@{6sLyN}U+eyDRRH2l0MT@CASxFPsFn z8y|Sd$F>`)TP3=B?k=VS&oOcuTY`=8yGahQWdqTCwK)rc?i|a>Eiup}@sg#Ki`1W! zkB+b7qXV@kJX8D$0v@QBZcH-YaStV~fS{k!FVpoayLgp$X_p0eNs_vBz=96QNV&}Y zu+M^p3%7y&FFq19GD;Ums@NgoT$bZTL#&X##=tZ(Ll7IZLaT8rUGYe84_S|Cz&n~JbVjz|w!ZZ<95){&@if%f!BLhsv z;|dGI3NECMjoZMmuKiZQUp<0P*MpqWk3W{-^N&BuX0=(ZTJ z$*8mdkYO-hWRS6A-BSFYH5%z>9&%AktG$n<_0@lmwLT8fn2gxe9*q<f}IOx+3*uz7{4gtMaOd%>dWdOj=2R zwDK}SG7<=v?9QnfhYeDY6aE}nrEu1WUkbxA@V{KdRz(;=_A=J-Zhf|{Iv9vO0o@4@ z(VgO09udZve*8h4wdTgG;v7`yoJnFYkLDzw(!qAoP>%)HSSpxbs19g^&>rbdfq$Hf zMfW+UpvZur>S8Si(Ez`4qj7oBKjK8hYm~Nh;0h||wv+D+$p9LM*+{rlUPA|42ZYF8 zE8B%(b!3P^4JH-9;L5Q#i~=cK&jR{VPAs^pom4hyD?S#~5#-6fAHRUFLW(%(GxBIm z${Lp~p4`N>1pjp>Fm4_w{A(TlJ3Khb{{H7-wYvTNpN(8Pj>AsZP@jCPAY{1JP#va= z+5C!@g1Ag3u@L5U`P9Dgy=cZtrROic2#3Yni(d%KDE(*824jPt6=n^B$RFilo8)sI z4RU#T?3ND4FU8^IU$%Kk01p+n6xL0=KyI{w+U(&(+qju?A49`n`SMf{&Lo;z?w$z9 zaF8_ZJN0uLe_9~W#D;>1z*Ou-VGIo7N)flWz@RiBMzJ~lkNrx51Z4(A0b&*p*p*gXV`t(px8`BHw%5Zj7TRs-l7dZtK=*TZ7U?Fs+ zyOPMT%0<*D4~YrelcpHMSV2dD*MbhFtZYw^Wl**0AtQh=7nxcy?j!>VCWHAfbA;_n z~4VEA<5BIKS^zPI@q+FMyz-H)9)(aQDeTB)m|aiUz8dnq*Mp*)yYE0h=thw2+^7fT{1 zX=7I!lXQ#4#$EQLN?Wt!QHwUQ3d$x66c(2Y*~%JfQNr@HaIGwzEI~AJ;}@}(*GQI` zbYP|iE9kgJE>#+_?+x1@>{{agH31Pu2qNda1MR(^E4ViP<0$+4A5ZH0+xLGqa_RnG zrqeZ7FrSk?0VqsFSI<`&g>+V*2w(~2qv3rVdJ@+x=Uhu)tD~MPxSb86Mc_$#F0ZhE zao?-3(k`9_{PchFC?KP!zHGTIQ5?mz3k(OX3hkLA|HelA($p{^P<#wPf2m^WAwaySP^l|96?Cw*EWfb>Qd6*-??&@gJu1q#Kp3k&4vF zR7o7DY%P(qw4!n$v(?ha1er?WoFH3;4hph~`ICZS(ezk#R8Tmd@@EA(&V`2s8SXJ6 z$kqP($g)iPsYRcjmdC{ZKywc=z`|4i27dfB$bI*XsU1v4h{fdHi?na6cFSU3;>P z|Jlf;19s8rCU$xTk6)@k457o|*6#`?w|=jdR|fNl(42BU1YFdI+p0nnLI`@dHi*xP zaFxe69vQK!L)xOZIH?66ac70@7VP*~+?76J7|KGv09l%f7m#7SrrhgtTyWrpp*Kx} zL0x`J=QfmyAuQs?FvQ<7V=6yh()sUH0!V|JXxAInC2EybEvo)oQGxKkyqKQz znluhZNkm3vI;vRIeg9b2(*y9 zNx#aQL86h782N*8S}d5Wt2oNpoMR_vo+LgVXIg?D?z)BuS3gS_eS3{8LyuoQhaom} z^kuDN=*F%niNS@U~KWh62``h@Bja&umf4bA} zi3IMt%B$IrKlg@zcr?A2v~J5TlTt>yq!C_ha@4&n`q1Rwze2OGP<2I4a+W6^+ z$qCL~#qKy?f$?!?Oxqu-4a`uO4EeK$7-o=aD+tU`pvQqls>zYqC%-B+jFg`_u~fAb zsU<2ffFvj4GFbH^NwQjg3^!;P>J1d8`&`&E(j+HVPnY7!0xR5?-ljIsXL_3XzbAMH zbaT#+o@MeM-~YpD@2I!o|JLyY_&?d{Uzqcg_m9hXK1D8ad+HbRdFq$o@a~5=JQaa> zJM|Nekv?0eJ_AFie>AajDwe~<>C{2?%}&-+g*CElW+IzeYbp)cG$-XM zX4#HvsFBvw4%Mj~7R>wt%$NFo4YtcHSu$L@$~LQI#w8}pnIMY2QsKZ@>0#M62TKDp zY=ZDEN46;5 z<(O&sE{BPzyUXF^e3xVAukUhX7pVQyyT)-kLtW!=@$_<~8$(VS?-EB7gzUL>dE*qK zFK>+L^`#fG&2yutdH;W!jq}gG|MyOMCma2bFZ2ZVf9UkO{lEDLFP6|`nip4*ScCF; zUYwt$<-D}5P?Ibgj0WV22UegejVr_fug;>u?&AT$$pI={UIKcba!bYtXS|Q2?rc0* zhUblSnZTNdZ&P3j@J;Lr03tT^*$4bg>;t~0<3tb{Vj%Fd1@PBGgg*uI*S3Dy=|;$U z5H0Wy`)@r1B8x(v3u8+*L}u2IT<{DsJuzk~1N^x{4$MZ#TomV^b5cP@ZWcQs(-IS* z6Vh*$%3&)o{F2xTeAjHnrD7}mjZ+mS17N(4oW0X8Q|_Ml1rDM3f(KCQs2B%L;P@#E z{9ew0k-35TC8;gc9%AdC|IO?F58$aT9erN>uinYgfxG^no^0;_*YX6`f9`a{{D(v6 zJ8FneIRuCCJwK??qLPk^UW@$k2 z2apadhR~HIexTxQ3|gyL3#he7&byLu`svq%17jzfKG?oEE%tfmfAjh;I&v-g-@g3s zbhG}iLGuBiWwH9-Cl-%w_VZ>>x4BObU1#IMBk<7-v|_% zCaBsJUt+ za<2K>-^(gO+TU)*to<#smD=Cb(G#ity+SQn`@1%#))W8I3-~Rcn*ASaoY#f_*V{jK z&wt1Jrmx^pq2wqd z_79}I&6-8CnR=eh2m|4LGMOZ!s(g>mL5Nw2ZMsl^NqLfnf#ZIG$i|~XoJG%`Jy#OU zj_K6a@jD4_AqKF!PQVGnpaXtx391i?m!-tuVR7bpBu7Vk60HCwjogL{*@lR>ms_2t zPCdGR1Fp^~PeeTU(j#^Ae2%9M{}tf|?#BJwQvYwg1NZ*t7)*jT>;D>_y*(SVhx{0A zS#IGhmo$CL1>Dqf4kv!2^R>-d@w<6y=Ks@=cQp6mS<3$%xbmN)-q8mCYb_7CXq-I# zXie;*SL5`tIFPOxrQgL~)bIDB;y}Azv=@)$BSYvS&R(Aid+5fIID&QG4eY?WC<-jF zE`os%sG_)WG$Pm=Z zc&zoW%FodmUv$trIz9TYhbKp;&MHc{C=_W|#rm)ALMk#syw*PBrFP*!3?bj=eO>aug98u$>v;e8a5Mkc@tB9dfX@s|9QY&yi4aMUk}-*$P;L}G&!W6K z1HS|S#E3Af=)WRMK8e6lk;GLZT~`!~tV-vJD>(ZPykQ?EqZou?Sx#O|DN=HomX)PP z$kEdhL}!!541{ROES;yBC<*0S=XqJC!*rHbS0wkNtcD4Q?T8|_Jph-c(wk|=m*_oB zv1Zsf%*qao$^o#YtlE#sq~hdU0!S*gKMb=5uR+a`ow0$ak6e9B5OE)+q)>?u$rZF{ zH26gN2jHCp1cxZG1W{;QfF32^dhSl*c`824;%vHzrvR#njTo2Fzr~;82l`4AU(Of9 zSvq1BpJ#b4SW5QS2Bk&g{8c_rsx#4EEx~R^udd1}nRoL=)jb!hb2ZCa+h@samP4n{ z#(&suKNJim=J1JL=76op!?P2=MFX|1oLxkpyeL1zYg9oT<1U<|TX5-E5NPAduW(0Ro`Z~j zOGDaY04^?ckA}a8|1kUP*ZMU1|GFy)(31WCaR1Pg{~RA|`2V#$7I$oVbTH)~nI-?o zx>ZgLdaE4}_&BZ}0{MVsqpX1cwzE@ zBV8^o+WlzM9r3y1Nrfj15~JYPN74S}!9D;*QsjPL1e`vTB9~I^&SIICnZh%Q#bUqx z_HzGp`+=;(_Pw`n<9pi=?I=S#!qAH_u_H{p2$>xr^CA>>gu;tZ*%2x)!Y4bzCmG>4 zQ`7}twnbk+8|hx`?YE(SD8+BOM@QI$Lag?f&lD%igoMqN4?2@AjshXm2k{%9-GFcE@Q6 zp3Qc}4`5Aq;l8@aXS+o*O)kj;iymv8DAgk)Fj!PGZ*M(_i?Ox8t$scdKmQw!w6|}+#qp&Ia2nWN6Wl?Kh3ScaBpD3VT)v%e2I*mZ-G+Ck0V@@hVu^Dl3~WRc4W(QeYKdIhGWnG9V2hcU<}L(Ab!YD z3`%M}&t_5FYxBzi$X$%jaZiMKDT-)FDa4DLV%QU5q1xazbErB2TeziFYO;SrB2|PQ z(W-tQ_#|T`ff@9w5ui3~7ALJZ3=&r`heJxuc-RqEK)6E!beZOH0csk|;*bjc9Z1l> zu>2S_II!GR?c|b=0UNoeAA|OBg1nV&<3wC(7w64wo!uqa^H|kGA^_|JU#@=lI{E8- z>h}M|XqJve!=v}dTM7b~>3{Y1kKFzLu(y%_t>v)}u4z{NDVY|Dt>t-X9a>++)me8g z4vqK{4@>dAV<-%;NN{g2dX>;koEl3J>bH|q!8C&?XH|7xKG@q!vhKWlk$ysr`rM+4w4Y9PLN{Of{X% z^1NtWMtcCF^dQ=G0(Y<4b&79-I6?lM5eFiGD@13O?S9p^$ZiRO{)>nLCwce_?{&0g zi@DoyS)IyX=BoTKdSxGU9XP`r4U8^MLaRI6ltLi~tvS3*!-0T)#=d;TEzpVst+5P{) ziM#%v935}$f7kL@#$TZHQ7Zw2ucpuse&2HgwzRKzBiWN;TDmbgY>CtmVs`LCtrN8# zJ#LU2E_dSI0i%yuGO8S(?w%|5R<|n(+VZO5iz2ZNlrWO63j+zEuHnS^MW)9)LzRmw z1%l|c;!Gs3S)fU@##(m50pXH>u&{trk=-EXRB_(pO=h?VN76|x+_7q8i3SNwq)y&R zwIM-g+pnY_uOK}%D|lGn7+R2tQ^V_B9N%u}a7#JOc0V;rIcs9|z3+&W;s(~#!WM2e z)Ixb12gzoz1}h0bxuq?Mr^sqc-TeEN$tQe;{pKA^2uQ;LF+>nS+bx8NF*%iO@xRs^ zj2yj#j}W&lA!H!2BrJ-gWS1wI3hLWygYVr)}R%WY|9cIi$mDWlzTAqLEjRQ=*EhDIr0dcXX8 zd+PW8Fu5hsIfnHIqH87Px zbGsinK{nXY+E%o-jP$B}sO6_MaV|?sTq?36VxWkB+|y~z{KspbHQ9fOv)-X+|8=sF z|E%RP&woD?OC-;I{1c3bhWKxmOu>!|UR%3u{MR}@GAx^M281krPDXh#mKmvEL3m(| z?8e7G{w?#9zbTgvz`1n*sSVAEItc$JXxjlLll#tD@p@-Mjfd5Oza4}^YU@L4I`94r zY?-K{IP^NfKc-@(gHJ!oFGR1%5P(#o76GdKkYK*>`YNfG9!E7I7qsetTO=*Xd`ToK z**0l$j`>Lcc^$`xa)+(e0AO=5tcsXcT6M=}+1Wj3hPM=%fM%= zEWRial8Z|OD=y+Vxx3il4C0x9{kz)jR+@cEiZXeC!2Wnhg}xlfc;Dyqb5XQ5=Nkyp z2Uv#zjmiNh9fC%K*n(OJ1t@;E2V3^ofiqJdN2YNk7unf5MR+Wpq?fbk1V4)I+%@=r z)IKMEA9I3tGp{}{wyzdLCkY!K%T)Z+PM9s`enN@vg~<$@HcCV~I50$=WY02{XbAi| zzO}G|#-S6km_$?L+9%@gF1$NAsq+wJK|kYmfDHWPq^9wYr~RLQgemem$4OF%IW!Y5 z9b=`y0Q@5U-_}&~R}8dUOf4(ku?@14Q-=pibV=5m!vrzqB7(F#6Eo%vh7(Mt0gs)_ z0ay`E`DmgI-hNiT<#?!YBspwh1cwefuV|3LSP{weKW zPV#_Qw8vIxQHKszBS7>l{KNCLKEkgmP!zyn#u(9b(y%Zh>pW|JQGUS@{W6`&Il+-d zLqH(p&}s*0c+a!sZ@&dIgvOD=86}4KkX+&RDO$0M5k!FKlF_1M+Fr+^NG1~_dX0rK zbc4Z96_c{kavmc!%vPsFBg3}Y1)-`Iaa7P+SSDgzx28!2VPbuO7qJ3WM29J%;kbRh zGh*R50ik@iU$ACgXF*PoY?)j~#A#z>ph%YagD%q^5VD~nB)oCuv6~6n;p;qF)bRf= zV{u4|XRnJmE5+oWllILG4+P8fzm88G{qKYQ+K{ zu`TBDNlheRTH|OQpPwURl2jK-l6f$$=<(@8nvHq;R(_NQ9V8&muJE(?B0^}>gS|`U zs~B2@t7ZB;3|d- zW+51xeZYMR8$QsS<(yHHiGoLhc4&x)basJ+bd&)AUhaRh>mi3E%NNr#90U2HkXRN4 z`U~rBVf%zfa==xW^3vt}L=@eItbkqa8!X|3Siz|}Hx$uP)R10`$p7>BYuFJQ{eRq% z1Zc_ne+tf?9Qn`D!N&e)EsyX2!;k-qn8SCL z1_1^Q>?8;}Ls;VyiAOX_K1#{GhTg{qj~)+f`4QsSK*|TvKzegQ|F&Qxt)xdks{$Yz z`h{VSV>%jdZd^0ke-zS^6chkyVk2M75Ir5XZ9{CvTRLYXNM&>?1b7!&4FNnrY-g$9 z4Zv?MWh9c}ZYq6GO+}T>f+#yK4do6Qo4t?n84MNP(-S(qTb#=`-+U9jBori)5b%7~ zF#zF-H{4dzopz(a^Q;mbHjqG9ZIqR0;Q)&g6OiTY3Wf?uVgZ7Z6ru=)8?r09sJd4Y zz&+%R@)pHWQPN(Q+G`}aiNXDW-75OaKKJT@mu9CnP0!|sa+sz?N@7ewS1h__y2#-) zomLCknSf8l{0$HfJ;I1mj6=m>{lGL0O!rBTxXE9}^F$CxE2AS+r9lojRiVP;ipL8w;GeYJm7aQKFgW*r>8XnXH&!6q0j1<;$WFI=#L8?Rr zBA!^39DhBZm?JxuS~GCOkVRn`>R5J>&SvuVSFRH52yADO&C(Ca zPGjNiE?vLK-Oo7J#4{0(*Fiku$U}-~%!?vL+3)}>J}~KFls|D^6uU@uMQ0$27hQ5| zptO3F%-DB|{2H}(v$j~N({9q$ff?*=W(+kB1_st46oYHb1+Fpj+D05@u0-3xhJ~vW zUADk*z*DB$VyIAwdH4fri8LeC5k+r$or8DLA&AfVef-to|G)F2^^9nN&-?Oo;9G0= zux)-*FoYCIp91Gyz3houH8-U4^DWtm{b-tRX#wRlH$?bz3OGlRpdbPk(tW+)$WTE` z4>%1KFc6!yv#jLrkLR6De$kH-Ed0>d&72B zEI{8O0RTd-H_dguXEH`+3|tGGQelkK;OK54vtQqOl3Cy7hMaQ(uye9lIt8Zy!+wMU*w%r5z}<>CCFGtVj#lGwROrPq*#QPJ z%s&2CQsnF!Ub_rm%<2>TlqGepMz1GJ`@k^{WLAST7^o1D(36IG{q~)ONMJ!DXwRH#dcf z0;javwg^BK=_cuT4Px6Ssb*?2>!Eg9ie4lGT4plD25Q@3h+`CJN|}KbZ2oYY_u3?3 zGKVA#b4lVJa45o1uhUyB&Nz@f9z%!X4fe!H--%QZd<~Jx}#eR7+Iik3_{~G%&_GJG8x8y;@=^zHYU|z`oOJy`bHordlwK zWVOCD5B4RWrS@NUpaQUj|35xH_VGVO^bP;NhNtf0zUAI!qYx&>s{*nBdGxq#T<|+W zFC+F6i;p|J2(NQu^p%)x8299Q`HRvqTrf=11n)(|7vCs~)oy8A(+s$)E)w!C1B+d) zm4;({p~_0DD|It|g_hHQ1$i~RwD3axuWj>|9CkcICC*j>NK5e(FVf)RAFnK6s0`G7 zsc7MnYUW^#pFt+4EN!tBpEN6bcag+?$fv!&yo_g7)k#D+Qf8&^a&_*` zbuMSox2#-4dCA*tos)IJt?CxAxlwuGDrDt>AzmKR0aGXsy6e_Cl=pzsG1QMxNL=1) zyV;tyJOB@28S|*@)fKjpqj!t#G#Y*nWv0ZEYD{{v+$1;(&01G^*f?!J3>RNY{8(o7Qn6KBr9W&SB)h@^aDMEK-ZnJ;> zYqBP@9;@%}%Cg59gnT{bQX0M?vj_^B`8c`Xjjc~jGu9dW=&9jY{kZwu;Ks~pfg0-{ z1gxnw|M{FoJ9mEN*0_HBJ}iK0&VN4@cjWsSo@Mqwr@a#w|MT#0bN*Y)1F*t09ni+Q zNIaiKv#JJ~Bl+lHf9OxsN_!*n(IQP>SoK)Q)_1|kL|4&2M>D~h0dLdq@IJ3g!-+qgZtsuHN958##`%U`pp%r|E z*?Pe_xxs2HuxyDElSh9bbeO4*6Wzb>`Xxg|8mtivM~7A$y;~%T63}~6X&%?5 z*zGwoD}(#zHs*dKgS<{kPBoP5L>hR+?SFQsbBR7UKc zj^4w}>$4U+vW)x_dCY!{FSWUg1U)0gviyhnA{!^FjPK|tl_P(svWyeznK;dJ8jWK~ zh@Tu=13g%w{b-xs5uTGVD;a{*s|;HU%aYhqn5(2>iIFk8l5w8EkH{F85q)P01S3N) zX!?b3tqI~yTG_FMf|)4?*;&Kz;(Ux8hH0i-6bzHCH1JguF(LVzts@(&6Ra+I(46Q9oj7g&3SX7|0 zY82VHxjKHSo+_J`;MeQ2F*!?h$MB5eskF(;I9hES%vxmiRjgMFlY=5yp;Z*aWW8>x z8)nt4p$e|*Wh3ED%9oPhMlXMc1}0ynF?}m5DlwlHRO<1rwY9R6y4YJ?v4`s1ps+-F zD&-!x={<*l8;^js&(i%LFOhF|09<8ZG`Hu+3 z10cQP36MkmBOrZ%Ga$b;9Rewea|+~i!DFD2phpEX9t2f%`y^Of0vrX+G)v3i!=OqN zJ`Jk!?Bk$H<(>y+eCI%@g5g9+Kdx~kRHex?Ay>^l6slNsDl|u{Iu=qOoC|prv{i36 zoD8j;<=usZWP=0K2Ek=lELXbpA6F|HMk4RzgD= zl#&|(B9ms-PS$p(ZJ}uBAsR z*k!RL;=!@XE5vfhp?yH@M}vc-icrk3gbT4g6bREXL$CwfC*tBNtYykzIJL^9K?HEo zMS}qDH&@_6G@xMt2Jr~XEe3_e>!#gMgz4={E^H!}H09&;j-ej!coLJq*z;tNnN=S_ z3Up@5oqeFi3B*Vh1kjQLCI`PH9|7A*RR7?AANE8|dj{P4Koe`n-G#`Vs7xVC1|=$_ zl4UVXN&?C%=b~H14+*$bCDWuZdLA((hgYB!TJFMhU?s1^gWX|TsRS_2%X5H^`jpH> zXS=P&xaP9OOlWg%t^6ENtBv-e7YKB<-Y70t+5M=J$?sG%xY~|K71V74M@CTlO$jqe zOu$h-PNE{trio8Fq%>*A-yZQ0>Qj%Q0rF*@_&rzwD~3mt33(*x@-UBxwjW-bWc~Qs z&rIs3%8V?{RLTTZQjKg#Mb)pD#-e9L@nGG&SRP3_h9t^?wO8%VIe0B!m$Oo+Qaq}s zQmS$yby8UvE0+fLqT6aMs_nRX2imVw$YrNDX4zSt*TKy7I+(#xD?7N2BIG~lQ+NM^ zJMVh%-+R3icm5wA9d7jh*78{QKiKJp_)qfV$gfyD8Hc2kEA`uz;Vl6X*Y<25viu^8 zO3bI+vP+IyA{^!1B$HFjkIf+})&yc3p4dfMvwRfKOa~ZH7yjJ8y3rCXc_#=<=KVT_`#XD+MI$ zOAq_a!%y@5&z-u8ZD^kr^h-5q0kl3vk2`bx0Soh| zX0(b=;h_o{HH=fta>*bSah8oxD5Yt5axp3fXcL6Wu}3B3I&@2^8;@MX^Q^Fzn@}l% zVTkSBu?lXxB{z*eF5MY=7`lv_@Q9-U;83u_SYtv01C0n+Rri-xkAUuPKoLP{{(h$} zyI0fv<@heV;&)O|tq~U0ELoL)&?>kd@vrGhe>KV>vP<@B7S3A@{-rnjz*EEjam~IU z|KHp2|Lb^`^uJ)aZZ8B}#{Zw39Ju(82dBLa|G$pM1p7q@TAfc%Y%b~I?N`m%@a>k-I7Ef5m|3R}kTqU+` zh@V7VZ&3T#Hok7J*c8gepxq!yDD!)K&WKSzu>e7=MJ6+jEz(%;5S_%r(QxqCMp9kJ zJV|jKUIS|DI%a_xV$8ybIaCVn*Xa^fWdd^J${H-D!A-_qbxjVow#q?%we&whm1Kb1 z0Jo|qxd6>Zu-lM*2Uc?AaIvf{Su)`1?kxF~%os3K&kD=%4w+x$iXe7a-q>j`hsSOy zX*guWO&1$Y*TiU-UJM>UX3)Rrc55|jwm|mnw%ge83XKievC0J=z{R?)s~b-r&CFAn z+e(&sqGS3II^31s7ib3cyS6r{UaJ^*T7t#@lcJ(gl3b=`m7v|rga?Jb2_|M<`{X3G zTu-f-lJSgy1scI`7bGyT8P)epRCH5_On2w?{zNR%`_E?{@#MmH(oe^jhpc_5tk8x&J*r+}QuE<+0_zrfxhhHexNAr7?T; z1wDD?&sifDPZl$nJ>=g{OdAnoDf|aqLeAq$G~oSJ{Cx~yZsO@nQ$6|EP7e(z7afc; zC?tE!jEf5WU%G974^PeffBNx`bU)x(GXGBw4&D2oqvOMk|F5+?=KO#9(Kz0cn7$e8S?9b^A>0Mums z>93}nft{U~d6hia*`ekkE0KYM(FZTf;7LBKQn7|zp$Qs$q?fW@l>es88&a!|$-_0d zU54f*#&Ym7emRg9Snwm-69-`t){UMR?vXpvXTkh@QK9_~dL)pf!Hy_06{k7k!Dl() z|BI4FX^EcQ%Ut{?8V=lOQ)Z0okLG#YPebg~RD3~Jx=5h1k|9Mabjf@I0ICUdBlki= z&X0~Df*7a$J`c13PD{~sX(N#Tolx&P!RSe9)jmNfa%n@vtlPPa}f(> z6UM*XEtFwITb2|uh2X-Pk^GIqRRqY;Gb(ec?&t+tqsrhvjj?SGb1$_-5q_ZDi1RiD=Wt|JEilponWc9H?c+ORM|aS_WF){BMW-9ay=Wdtvz=)hSC-&YY+rj}U5-E;p+53@KGw(^OKo-VGf)gt|$P!4{3G zOv9MbEAoBay7x1Q4y9$6TY+UZh_0EUXY43Og5#4x6|st?VW1+?1A}zEAO?+ez3}a~ z>-N6DCU&)}^zXx8!r$9dcm7|DX6abWBs2uOr6^#@`Tz9v(8d2cIo;o!|JU+Z=YO-) zO;Lao-VvMYv`7+qO+Rfh77U98NGdQL(eK%4F2AdSzR&0LMTOu>DogWwFi^VutP>gV z(yv7Zzy9io44h)6?7#_b*8-f87{vZ=W(4mYuqqZ2cU^IXa!#y)PYaA6kH;Oj0qnpr ztb^XKMfx28uDS0B2+V$vcO_}bmu;#N5#%cmlu}wC4&kN3QN!%!vDo9qpA_t4hQ_j^b!98gjZpfdK<)}CZoau7 zk}eHn5a&QPW5DatQZ*Py6Cgtj)X`a~1p4L2M*WS0lQu%t4S5lXo_jC*>0sxp2*8MIJ`tlt0dsGP_raDg6O3;<6G+ipvjl zYhx!tARWrAgxlqF9#GWqC12q^(8C)iUf1ogN!>ok42XG_rA} z($e(uInU9z>3p7NRM=QbdF{qBhNhS`V=P(*OQ&a*sDRE< zPgKTtGGwxp6EC3&55i0wV?`I^oCdWZ+R1eD7#%?Kb6ad|;CE4wx=lIC^+2wj)+B>P z5Esf6PFSSdUgZ}toTv9`P2B$P*+TSnoN<1FobAah&tccyzH#nOZ9)2(fy_dj>{Ywr zc%^!FqTzy^uf{|11v{25VmpkO$}_W!w$&i9spEMiwI5)1p{HBX$;1bUC1Efywh5IZ zBeiB~gA?o8e?ku%Jb)@lRvGJCE~zf;)TkOn{R@Si=XiyXP~*$KkfACi6Xzw@6vAZ! z)e5$l>I{}dceIc*W?+a?9)A~!TACvmr*Tz56yi>d7BI#q&OJFmEhZ@XloTZv*6Ut& zq67E?u@AeKa;h`RZifXh-Vz}fTwW(gdZUK8=JTQ_c=K=sMXa1v8R$Xt0z~cErC=%q zhr+wF;8BpN;!R}%(5VS>T>;8y%$4A)T8fcez8rUWn_hk6!JC=9^&lWRk+}=^AHm@v z-~(WiUe2OZOaYe)ri8~VQQi=^t>)4JkVe&?HJ3T>HlC>ph+Etd{QeL=13M|I;8k#&-5iT$Rw#T`Lt z4A)a?MW?B>gEQnJ=linmmN;OB1-I_VaY5uqV^XQyLJw|3ymyqR030~zbq%5ofx+bU z&pe6kh=ZmkP-<^_6pEwojVOe428FSr@Yi2ma^X! zcXWE{`TsmU*vNm@^7s(GEe*6w+8czcYiM!+=D7KZegJp!TCn&e1$xxB0)Hx`*2p8) z8lh=k=oF8=aGmI}R5if@m2@bjr1SVvQa2|X<6%-=Bnf?uhh&aWiZyBB%c3L{r(Hio zRfQ|rtgm8=iPSxltNTJ)Gq+Y`QShNH69RdeV2UJ5C}cveCHY-x#~_mM>gq8(QcB=e z@bnQ=UhfecnV^y1TN*Q1K%>eG3aDU>*I9t1tx0zVS(#YLL!JoMu~dpPD3*czXXPTV^20f3&JHqh_&F;duUCYGLw%!HWHr+=}NNnI02W+|IA zCsP-NKNTSi;O4~Yj^6gJ;pwaB>c71#qbmdtm0|l)%W|Py<$p?MRUBc&wsVK7KEj>- z7AZ1=v`gg%AM(kTRB{&0D~j4zzeS6S-#zvF2mHO;^B8M;7;i;fNx7Su^Ca*~-jw@4 z^z8Brl)6#ZER0TR=1Wr{h9=cD%TWl^&U;g7h9=cDOJkHV@4cxsrSEmkvKX`6dTT0Y zxgL(_iUu7VJjt2!$b>P68RS8j%i%4?ulNrqq7D>;T>R$*ijn<;7gMBtPvGZPYRvKn zClFIxF@K=cuwD`CkdO9p5@u@N(~M?|Fs((6Rk=; zQOx9%lFzlMU*7oFs)L(u)$tRk1`D^2B0U((=#d{x<%myx{mmK&W%M}sMJ9YK)rr3+ z+Db$00oO$H`23vh?dEtBMQkSV;hDo~cem83&V^ZoW<$&uTDFF`UbTEaux(^9VP!eF zBA~M;_Y!f|4N$eF0gyG&OB`+$dM%dAw$@u158C{leoWb6 zw7dFV^_p!X3o%<~pBn=g8mGl*C30HLjM&=`gmGgUiJ?7Z*(7 zByADNx?7MuTsKz~j8S2b1oF1CHRuKzm!kF|qhpt-5;KsOW99@`muu3VgR-oPE-$+} zsQAJ91b#I7Vhq5^ET>DL^BjP}Gvv%!E(0WsR)i#=j(hJ7E>8s3Uq5~Y3rzmr7%77!0l?qtNhje zyyf}Txhq|As$GMUxh2@Wwx)Tw_Lc)3U3@^_+%Elnjr79zk>l0r)_UjAy^i9!F626z z=Wdl){Tw%VF}`QXqyoh}vG3yuhu`=O2f3Yi1?BKtXs#>y^F2#}-7}PK824orjL+{YT{}K^5 z_kZhn_V#SeYu)dTe+~-{-}KTUc3#U(hxom5!STC!8vOs<4fx|f-3b4&2mXII_>Z6e zY3lzw0e}Ce<^ErM{SOiQWV8OS<8jSyBnYIVQ*Sn4oF;NN5(+1dNR( z=?EaUR6oGvSA(^+%Eb^t7s6x&Fs+H2jzx=wYE9HN!CDh}Dn>6F_S(kpgJ^irHh>CS zW-^p&x9zZD4^o6{5XQl-Y0?#tu!(jHrw@p?0+4zOSX_4_hXR<>q^BPMoCe=# zD+5krwI$-TsoJRXA|ZH{M}MT!FQb7`n*jx0HR=D|+4g^f{r~Xvbc6rAmZzcr?`r#B zw*McTh|YHQ|AUjw{=b&TQH8Ac{UX&tc@^QCzk7kOfQ}@y50Xpx2{a*DKG9KrmVLbRN1mW)|D2J+?fT0cn-?FmgQJ2YYB5~Hc7`U!P?(Z zVpQiZLlsCE$gnly3vP*S9NY=RX$#^B@FkcQ_x!qY-*eLUZK>7~EaH7zMep01X}WC! z9MCH5)5QL0B9^g0TT10C*mPR?i*7*c+6W$0E`Uh*60B;NRZ=p0vwABqO1hRSFiYW) z5hBuLjDw^#u*I4r9jRyX#zc&id@>QM@@YzH>+xzo6=B~X-)oiB)z~$tpvSGLWy#5v z7&ou##Kg7v11pPwh$ z*i!K1Ag#5jf&>MKDC(fnTSdXh@k&9Ad9361kFS--2CU-t4>|yAxc&4m5{CZg1Z;EB za^4(v*f*VMGD}IL&FotQ8&G2e>`!K_$-;UEkU)da(JV5xeyweQKTDNX{wbAa`KQ!^ zy}S8ev;HI6!ChGZE?NJN_7B|qzvF}bjsKsuJm&h3oo<}``(v8ydJAi_+tajzLX9h}OEJVe^S|A=4f?**N)-n@4(hEWisk?`d zVgwTlHi}H=2o(X8Wepgqw(>@!H_I0WW)wi!#1scXAtGLe8CSvtq(Tg)jK9G`%Ww@6 z=O-b?JUTKD&HAHaND zoUZb0ngGn^$3&D$j#ny^&`#j7>NCPlTla*dHf~QDuZ#Nizw?!m58J zib<^dO=*q+#!{@c7$lun`2tceluoLc$ODLr8A0PHvqt=a?XT@49WrgmABC$14V&d` zPFiqs>^mSKzsL~O2_4^*Ap@AbQCLtvGZ*u4 z7N3(|D5`?WScpg^nv~MIRF)c7ogsj<$Ddg`EX45u)#OF=Bp!DpXcWa-d4?O9x`V6lSh5pt*xt1TAM)xwlsbJu}c4? zIg-t@&Zma|xBYM2QU!1c|G$5D;`)C&IXd0of2`%Pbo_>SKKnk;ezElXK*VjR_Zc4! z=!5z+>;bIw@Zg~c{0(&gx4i8Cl$N*19sqC}vwxXacJ5Z@e;95w6P>4K{>$^l?UjH3 zY5Dv=IX-pI|A(jhoB6+v$C^Qc<#;k4TNCKAW_Di5$+@=^{ZgI*?>S5>@V@qd1clMg zo;i6hqx(mvT4hfE5>T7K${mi9;-d=Z z43zQR5Kxf;TUXgU!g-Td3=-%O9ytuD-{vMCl&KZcU;`j~+!+xZsE1^b_8u5;ux_%&=mF&kmA+ z2_T>w(tu2WGysY4$H!Z`!+3P#V4x1f^EyarlnZsyj3}QRH`$@Prq*wIo#>zw9lndi zoQd9|!>A0FdTj}||L2Y!EwNa*B!=age5~E%B1ZpRKs9N`o8H|_POPXeRVa&3NBF1(cUP*b*8?~6cYkk8NYQt^B`79p6TmjrH z^dAW86p6+@Bnd4i0Q4>MmO#A|fEy=qWsw8gumrC6=eZc7$!uaRKvWd~7N1l}Cf)X; zkdVA%561Zbh?FyU?}FrQ!)H{`#ko6 zwrH$e4V)KfCT{<1;RI>x8S;tI#7wtZp_ws3h%$;!t4K?lw86%}i<|CFt}9*3V?B-f zbqrVH3>cq`tt1*&If=*JEwu$MnZ|NW;uXqBcWG07**2Cf9SavPwU05-G|Hq^oZ#{` zq4`gQ!lu01ax;|cj7l~^LYYQdY8*l_nxIpyu}{uH&Z_V&GP|kuE<_kIDpr<bZ=+O=b5oSi8ezatY9!7$1#vH{`mJKL#IaXUUrkqUl=zlw*PvTTK zF-I0ggwg*mk2jK?Ydkmr=WXJ~3@ zsb{~`e}!w0`mZnZ%J}Fj(AyOVo0K?}Y_KkY>e{xfR=sOJsRr;(VVyQ;h(OKI5CL0a zsj019SJSq@>uRlq?+%&T$*F-? zOFo9HI6FBuzm?id06IgL0fSw;Wdo)+Zm)D|`ouEGar>*uHitS)#xMf)tJ{T6WC+x+ zp-#QF;j>EW7(c6UHh^~f{@MjmI;Rv0bc^8eaqS`~ol^t_x<$|!vUA zZdL~#*SD^S4}S1mZ>u!`2y#wI5QOX43TD6DtiGsKbN7|~o;iS;n&LFSel>wHxLGw( zyXWq!`6uR7YO0AV&-JUBl{7QFl$Em$DZ++%eZb@A1;JZ06d zV7}8VW%)B?ceJy(s#3V)hpQrK;6cqj;^h-H9R%N2E=Ff;7vcMHKg(xhTMyt#S>~fu z`a_p@d%K|Z@awNpYY2Zwz#FzOKpYz&U|>c3_FIT3{>q4;Z`#HHQ;C4BR6^wp0pJ{7 zVlQXE)27F7#tZ$}-5qYm8+^>@27t#ocyqxQhk6QLbF8C#V> zFJ?%cPAEzEa%g z3iYL`buFFJKbS+Ld@1OD=?5( zlLpmj3qK*M-{XuisGdZKP=`02dJInWz%e6;F+SCU$NVCCWU2-bx`4&E)&vj8HFZoQF>Cuz=D?J0xoo{*u~Z}Nw1bNL%Pl9h34sML7ttswSwC&&*d3~YkC4)jzA z4Vz}cqzMB%1U24PbEdjUQl3YeBdI~(R`W|UE449YU-WI(r#3##Q^oew#B&vas(tT7% z!tkNbvTx)fyTMIlWv2y=4h({Mnm!B4#Ss4a)$~rlVS21Lcrs7{2DCk;$Dw}Hl=fJ> zt_uY>H)9&yHY;Q9-8L&dhgQys(>AYw>?9)C^@hKQmc42_{v!VNo84|!xEc{gnB8Mm zbX`#+u=2v1Yh}8s@H|Zfyi-t9p@Rz5;x#*fIemS4lqsl=?m)#B4UGH< z32)xX0**P{5$U!PZhC)*t<=i*7^7J^$~pt(uvbaB~^US%Hqdm@2{K-L^UI zyneMjr@OvvISa2y z-5T&8PY#cdUHiY&;|>1vS{~2-4@}sAc>@i4DOhKRR57UQlK`I$`7p$?0sO}J@Dg7! zKLzO}*sZF_XWj4PDw*cRRX`r^MiCn!i$0aHAJ_;!dIT6(j~_pFK%?a`vV~wUU=?hC zN4VM6qkcOC#?{(Yvh*L)Pl<&QM56Ba1G~FbCKOcLG_f5KmKG+JwjhEm%L}p}4zDcBk+yhZZe2*xM`gK1eal64h*HqkR{n@LsXESb%q zI?|xQHiURi5J`w`Gz4=Wu%oEZ%BPG*0Pcu@I8LXV)JQUy7&=LM`q5kT&}>jcpB$4k z-H0(qi1EM`8>TWOw$-HNa5W?8=@NVsIQ1;jd0eEkE3#FKXU0d-~n5+#&B^X zlEsC{1~Ar2MkvbXX;l$S5?P4Gh+GCbJf$>p%VN=w@hrNCuaF?N=oy`Xp_gb@p0H!X z%vBrnbhjvegLxjkay+sInJ9;tOB>|@8+4(CCK^MD1!}2XP?AlnGq{Nd9MkWvz}gO! z*VKCSM=@e&fC8G9&$CxmKKdYnI8qrJb%3|^agmBTqr@p+GIp4NR$cimuXtH_yB#Y2 zr$V|Z7kw=fW(2IKN#gYyhJJI$@r5Au-fFc#^WB7bK^7@b%;kJJj;nm3qX4NuXCyBY$ z(IUnE3y3F`woNjs<#8S z-P#Kt*Vl%f9RQD(5ZYOu?IMabeE>^$0#4l_XyC;O*HEOwAtah$ht;)q zA{o+$z8FBuO2sbN0#qXpVv}WijcjWvNfpZAa24c@1&k{oM1@_)yd>9%E`jKFc9kg#iIrhQ2wK9TeVum+KD}tIL zuwF>WV8y7=5PM+8QVoH9VG4f5#`0CH4K;?Le1^p#2FI9FbQBUYAer-IX3a24K7<|` zbSW9aOd*;VW+ijM0Xk00Diw?gjHSxJksc1Ma%JbldbSQXBjYO@8LF$UF*NGC*UEkP4Pyo%zZyG0b~p9)>xN$jSI6k{iE7V&fN-@egCUBU`H8Ne?5B zq9|8#?8+g9$!#k`9T0)vq)SUQc(I&Y#Cj;37 zR4q9?l|0zlp)pKc03U&#XVrj*FR+4W z%&-71NQ|(^@&RNx!Kn;SIjI=J#pvuR8k4Xc#wpoQrQ#Ha-e(HI4Z-1f#FsdtX(w=R z(~)kcAPsusg(D)9N5W|`h8yvh_@(LG<#{F;U;w9O`DqucXZg?(Q(PfHQaL5x1<6C2 z5B)AXx%8JtBWC^bX|=2b81~8A zHU-zMezUdeR|7mX{|6j{P!Y=M} z>Cc+(1W@2yB)hakfTo1<&O8CMDCwkwqL;>Xsa7J~J(oF?qh;2$XeHfg7vsJF7cwS9 zl@j@NIiVeeEP8iN`;*$sYczl<-+>vT>7Rr@0!sM^DyDlFp2BtxmfY2I?4$& ziVV+-{Fh|JuD4N)E|1ZU)+$RbsfZU>FSsmZE)1<^3pYIyVMdOAJiMTI5nl;vTlf#& z5KipJ*iUBLC;q0P*zuFCReP~50{sZLGka=4*fOE3eEW-)53cP zbhyVIr6yU9Vpx>M@+FcC

6og5rqT{SL61U}1kvwvun@@7tNxnMS z(I>g{WOtzKv$NStM+oYOa61XE6C06GLJ1<&&Wp(7`2dxX#F3T zOtc54Op&KxqXeiD)(bn7rYpSCXY8;!%cWZiMMQAG%P>_`wi1!YtxLvHMU!hj}sP!M%eetkd6k_Op zX*T#D_VLeuyer(&=a>25x$j#3zmCLz8~QqGnY6=}ykhaRvzR9)A zVZ*<1X**(%%H-pMb;3s4hd60~p)mmTJ8(Yv#IemrTijxpRv?^0WwNWgvY`q!0cyoj zBl${}sQqWC7s4QoE+a7{q(Q&Z)FVYdBQ8b56n%(C-PJPZgE%qau$;HSYKYE^bK~+H zy*ZPD6(1lRUq&{8sxUaUdnI8G)pSaJETItDn92c+wl zvHb82kC=C@Z?H5m-oYp$UsTeUHK&S8*H^a8AmE#O*UJ_LSW-yrd{T@o&(d>C5Gbny zR@#*u)F5q?2H+QBR3+ge^^j)+l9I>OGP`WgO*^!;Tq&LzLdZyZS?2{@$qnhZVJI>u zbO|09ptofjSkFlraUKn2N`j>xgYq1B2i`?RMI3=(9Dh%)B!5l{ID|#oU3e_**Aw#i zobj|tO9UzJu(}cIZOV_ac83g3pxMR;(Ui0oa`p+0`;%EQt5S0lXn0P_dg_hS&W8|} z>NbR`&EZNO2_C|nX|5O>^?Z)1@rHZD%E{E>>M_~6f`j;c^+~#8Oj5a^t653;sj0aW zo|?2)UJZ8Tez3~y0~(jiJkHkGUamjQ2u}mro%jN0iV?gEz;ao)1E?7t{gff}3$Obl zrJ#nEZb?Eitwkk`P0=uvz&?^_mgnasjXbOr+J4ckMYua~A!()Eq)RK)liq-CNM0pL zbXHa8<%7MwsmNt9?25f|PuFq;j25M z-TbzJR#r*Rpv0vVG*`tAln?pNRxr4fJ|5117cI`ksyZ*FY!>#3c}Jg6;*$IrfqBZC zfk{URd|GJ)v}u(V+hqDdY$~M{S6X0|6&vnns;wmI!%fZ=^~Va6R6I=BX9?qIpy&X& zHQ)r#@l6Gb0C`?ecTaky|Im$EA7CxVKX6`b*Q!~oZbe)Im66i|G3>IE{EYfxX7YwD`tP{rZo5&RXz)PYzGq^?(21bhG}iZAB^x<2-`{@WCCDF57gVCdOK2Y#|=Wnp!x5eNynTU(}sI_?;d`+WiOvV zh(>j!BK%1@4v|31fQ^%gky2Dex!L%p(g>GDYg#~N9g$?=BB60Mg4VX>V1OVeT5=AD zC3@3L{5QZ@xNE85vK?0zzbpdd9jX_C+mQyI+!WqX^No$V8UfbC54X?DB1)|jL%&&ZfH-$?5zysX|EGFt58oE zg#UQ~Ho}uh0=TAPjqi3Hm9|NqM}2v;l|;Rc0q5l96ID1g53pJbI(YsWPpojYUrc*4 zaqR=GT9f!-rNoNrn-%gh)T6iH$|5D*#*ygz9ErU|qo`dn+K*>Zl8tkg8A^LQxr9!DYm@UvnH_}? znkUv_Ur#4xo_wT{dIjkQZ%123rUfZd$m|qon&OK3U$)&mln|P-p`UgheKAu+DD7dc zMWp`ZR*teVD@Y-+ejU2hC~-ICNIz|AObcA+lZMHYYlXlyx2@g3m(}3n2Vm%n;aRs^iFCZf!z&u;aYVV zS}Dy+=p#>kZDg`Cf&q%v`4P5617Vf-L~Aef09fPEC^_eoXF;g;H`pjw+2z1cy7h0c zG64WBz8tgx7Z+a--Ka0~>I}7gjF=pTQPtY#?xDDYI&Htg9@?~h*0=9jktFMBVPrYI zYK|K!3V+7o+R{H`&Xl>~H-4tmO%-*U00)vH%&%7gSJDL6++uP-NNCuwSeXk0E2@9kgF48i{%T`z%Sz zS<1QhSyEtsw8{a!84^ZMUVH|Xqpw}HC?tod5oCbQ_iCEnpGB9*lY-C%sG&}jTvp;d zLV}Pte2#h7`UcygZAQe!oIPu7!^qp#+DGC51%pzGPO1f5eV+$rKp zVHe^1ceqQ)5;HOELgNI46v*m|1eu`QjA2=zK}7{U1ATBeJq{(8a7Y6Y4Bv*2^8x6+ z$E>il>czBH3mWIq72Kp=29Cp%=qhy1)k+9j;vgkTaGO6tp~e&)U}1gZxv13D;O@|+ zsjHVCl^MT`YumcQ0m+i8y$5j8n8ES`>3}SznbLX#!tPwrwtWyJ9tErJf4}!jiI0Q3Dl41fm)l2u@%< zj&-y*QA^YgN)klFel35E>DSQuHdNoDcPe+*r$xWK6Z8p`8`*Ei*ckB`n-JfJVhet! zd%_z2N;Zcxz!uHn@Mp;!vX?=@+;@Rh$dJOdOj(>Eq8mE%P>JO*KU*q?g!n=^6aI{l zD=*H#F94q|Vc7=I7DDo7zBbW!8L_>iN5fe@`e4nQOpasOn*-pYh%M^C#2Y1~%m&|4 zat5%2wC3eynSxpau`0~((6tURG2mIrV!w^ccP7@lERidsDPL>J<80fI>M9s;%tJyR z%0eu}Ls?E=Et_^?)hRNyk74L$!NSJbEIuzu+GyO6kPf%fmGqALxTJ|T>~6u)!x~j{ zb3>TJS{%#WfFgsMhu~5xtWG2EmnxSv=V&?R*7Kv;GeYH*P5gT%!MTbbRXi z|2{p~-{AkOgl67c*d@}NFpmiAISOlK$^ zsn;jsUCdJ|(ou=Q)*nF^SKi{Vlx#9Ebaum*RIAga_-R>WM>Z=rV+o4FvEvt&8X7fr ztn4VO_kfx3URT0Y_OS*4(_3m+4$?T0GSXsAX-tz5PBYw&xzf6FT@pD%ow$WZT_tcEu;v6bh0 z)ME~2VyU{weC$&-XVMpE^-FOq4`yTOsK{*WGcji4o`|Ix(XH$?%{jnp3IA-%DY1eL zC*{W0Vs$p0O+{&oGRmsasPoD1!>IF#W7PSi=juatozd)=b{hIyoSqD}Xv*OXiCvq= z%p6(EEIP>~(>7YPER%tvWmcJ(6CKFu_aug6#Cu67xlCAr%6jumXr=PHwv>+_SF!9f$ zXeR!-e_xhQNf0HWe3QO=$bl;1FO={Xk?@1~$4nS8DQ~j-_uoMVqFQf$dDoVC{I;e4 zatlxbi16FPt>1pvD9QEn-!$mDod95&{pYc#|8sh@f3%tZ>v$S$IOS=auSFb-L$PI+ z={%i@#={%KH`*{$utL&=uZ<>?<2*5Zx)$JTwFsI>YGXxl-unekF(F${i&3+vT6Xkl zRx=>G$ks>vr=YU}zJF{}kf|N;Q)mi3{2mr;!Bk{4#dDugDy=qb_ahYuazhVzi#W((*^?`+rk0 zK;Hi7k$df8!TdmLa`a~X0(l8gcdH<(=`>5hn}DyB%Pr_sc(i*beU`0?Yf zV%u1v|9K}aAeQleho1k3;Z2 zyd)MK?26-lmBWH5_>S_di-N}%<=+wfnaD)&cx~p;{$4q=&9iwn&*s@Yn`iTE gp3Sp)HqYkSJez0pY@W@td2avwKWcGkECA2~014F$H~;_u literal 31025 zcmV)AK*YZviwFP!000001MR)*dfP^_C_2CO6j(}jOv)53>gIK9%WEmN>}YRW&R9ya z<~ZIaNP-e#5|9Bxz=TVDZ?)WvjWhM0^6HoB{;?^V@xInSceWDu?I zwl~_F5B~B+o(*^&?(E>-&cmGz`rX;yroZz1rL)u7e7Lo_xv{fMIf};=iC|p^GTMsf6LS4PV2w{-sSPG9RIDIjyL{WJ3HHd z2{!H&i<;+OeEk2u5(KMBIF42y1*`1k-79aOx=TM@de(zdwGyzP0{r z6hha?$Fv_7Tn6Dn%4uAjO?q!WCs9TfOtL8NXYqMK1$2hD(Ru2AUst$5wm%5?K)wi0 zCudRcBV<3O#NbC8^+9wVC4(sG$5D>B{IT-K%Gd7bKk4JH|6ckz%I_Q_51c# zXLAGB|L*SAa{Vvjao2x&`O$hhOUKc9cuINBisC$f^xy$6+;%@5KQP4})Iqq$;+?v~ zWUx`>agkie7oY*> zq5s_Vzq!4&v!wqP@s!8^S(+4CI=aJoaQ*S$-q=}=|3aSn@qe8r;h>Ei!ENi9asKaY zZg#x!-`&{UU7r7oc=)IaWYF6*=y2>bg8nGX^FaM>1*3RayzbEJ=d&nzO>dI~j=7xQ z!*lv00H23N*nTld`fyg;>8hgr^YhVF-G><2h>NJ|^Y?Lr_%soTELvHK;Q-Exs?tG& zn_wkzEA~6B0MCAfJ~yaY1?tKMa)oWAKaI7gP5Af88vpyWSt^WZs zeoNy73+_F^@_SF{&(l4E`Sj@?KvUrCUR2=ZC-h8s-U=?#7{E5ZWrV)rKi~4d7G4DC zrz9?d2L7wlPk?r6|Cpth&{FyXb+4}nzXwR6b!qgw0TNg;AAsP2hD3Q#!1we#%81{~ zgLFtKU2JPN2w{u}O4sUq8%~tBv4$)frZ^6&qBw!YnDhZW(0M?d0bxaj7+4=FDzXVS zK8GQT`)5HuIj3gi(WePyj>AzvBpHy1fL03;3BpmH(zwUTDd5SlhS-S;^DyhhMHXgP z@D>+WZOX#=4mCz&*eHS=T9Oa1sI9|vG)g}s&{6m~2i${jFra#aBE=k4U4RHlCt;~a zR#of4t7upRhXZU#arG$Z;v@|&1DF!QFu?y`c2(B5@#&e9m$OlcJd65K<9?8c>WBq9}$eMnh{VWTwOg8%I=ixb}^UzP3T3y~s zy1{vtUc|r|1Yt0UhT&vX1TZZ}(HL4q^9l#2OB1;Z-JL=2T``DvGGpF1zME0GdFR-uGPVqu^N6H!CMgfyT zHgE-Sg@0o)z&9L?f-#IkudOCVu=g}*gu(jLpx4BI@hko5sgkg80PG-z?Kr#_JXsGy zc%vEEfKT_-PV!@vK?PWx`ZVqbAuT=Hbs-g&wJe($wv}uWwHSL^Wcp(`VrUoPXcA#g zTGqJeG+4-6M3+T-h0Z6tVNRpQ!S`u8ieRv)IkYM2Hz_;rVP0-q51Tgb!5bmm!!q?pS6~yFwpWbnb_*zH?G!`WDTY;h#y#3JzBWPpFZR^)f6u}qI!&{y z+p~Z(_`lB9#zUX~+u2_7e+zj$_DT)9#R6JMz`qE^S;d+iN8|W3ODE^Rl1yRI>}-ms zvC~>Y!bix%2ygThSsB?`6+YdZw$d2LayWs~-fvOA$cez_@?md;v}kq?yXqK@wW#V-+7o+(6Jd!Uc$f7qc`#jG3VaDGE z);mhSI2&*C|7#S*`6w3XdrzAcADwzoZpMev@bwoEIsVy2i)KGmdtjBk5YLOo5W21T z^<;^ScDw1w6PW?B#EHPuM#+GsiSL9>gHxCKqG@L&wkKpzt2pykjr0*XN}QBkkqOGo zAz9`5M3^ExM_EYN$H75Hl!0N*SyW82MEGx{C6dT4jOXqkDxz#0C(ODhfVZ8)KG~_E zfKOKtgw96xlz4jJOruK_Y$7Ko-0&dn21&|rTJtleMl3i;gDd2uE)_3oJ;UJ2;YS6Y z5ke?AD9}Hr6C_fIn;&+OH9duCop5`@F*NF`dsmWC#gQ|lvLJLn&(nU4>xwCuhP1Xx ze@^klr?X-$Ul7G$z!fwk!Xdk$n8%Y-#yg^SA~&xp+zZoZ*- z`ljOTfo2=Vm!qJ=#bEYAzbZoAIv{91Q9_rOa*U+9OgR+ppt4ysXa_ImZv1(M5)?Rd zf((fHbZlB^)mxTWNNe}Xty4%5H zJrNt@;IawhW$hl+PY-w#(F+u(MVCbF(1mICF-M^p&1qa{p~+aJ3@Qp?9t!J$YY^96 zh~@cOWH<^7*nF`t?}7PbRN!KTB^m~}H;tl}OyB4;m$cVEp>i*Gv1P=FKMJ8(UO)#S z;wZ&x2M36e%mUaEl*l__an=YX!3eh>EQe?b&Rn)MG4z%O6{IAWqg#pNBGY?2e%MnB z;oWMu+JduV^*yhB?4ASeox_C6c3=XSJ@nmbAM+3D^KaT4F3Yg0fIfK|RFrMn8y>K0 zi3s(e2M2k(-PSG8Ss;#!z38qaI>YrwyH&bOZa7QENczZz0!gahhAdhfvz{N9X&GyU}i5v>6JN-wkIuSD)I3(B*+nXf$)Xl6yne*OZ2X?0=osh`+4Isok5R0_ zq>9ixfKg8oF|dcOIOMm*d<(Kxw+{7=IK&-sXm{>VI(F!Es=y%X58$Xu=(k_bp58Xy z!g^3tX{Hu~uvrU&TK^2litrqUqWGNB&KLL1?X$TnVXD#G74wz`eb`<>*29@qWBWiD z<6a!9(||MMHOZJZ04@1qWMDt~3>fBta47b@vpwANA80mp&9k|27KK2N7hycIw~OK| zrmn&C02n6og88kJ-eIBK)fDvIw!4R6qsPDc^h-YVeSpK@l;rx@`9ywMC#07Z34^_- z(skd5{f|@nu9EYcfxUyT+m;&a6M0UNL%nS#7L+*mkyT)kkW7A~7O>cvLA}!Xyw3yQ1&y!#Tf9~H`^>Y!@ zlFHx3?;lH|F8CW4{0$0zggwh6njf`Af02rBhsNYB|ZPH%ESzf3QR{C*-XFh*W z1yDx5so%Er+qQn&nIh5mCG&SH(Kcdp+6?ECyU#8877ZlEQL9uD_b2L5%*@5P)?H$G zr4j51@t@+=vX<6Pl0lT=1**sNI4V-Aa%L12eC%yk4;J|<=*clEQ5oSJs!^rc$3S5q zjPU0yByyOCyhu~SGoxtTs1VZ}F@mod)G_6DSX$x_ z+9Kf`T562=PhWUi#p3dxz72~ZAV+{yb{N3$01I3pMOPyiqX4s?Ucl4@ej-c9x(&c8 z@GDHgW*1eSLsa@|>7w{CHy05`8gIB;bB)kNAT$B=*!Y7WpsGf~Lj1=`0UY*l2nZ(y z?D0y@<4DxB;7lQP5`K)6)1!S|Id+eA3GyRptU(sY(ee0>@`JrvQ+RW(!W!$FRdQ4{ zqPNh>2()(Z>`RTvW_a;>1JzupJBNhwDgArKa}eGdlF+nq1qN~q6aK2D*SFT0M_o-A z`;?Hq3{X*J9oSRY3&T+m0oN!x!%9@7w$LG72dH(UL@~S#d7`+XqwHsvlfDqxR@+yO zs9Q21J~9%cNLiAPt9zPL5s+VE`!Ab53@u}1Fs_hMt7B?{wpz@%U)4AfzE};kB#&h| zBh{s4S`5*ZG%_}2QR6cZiG>Js&9=dmIrG} zGW80NMC&R=&P3-dv_A8Z9rpXtxoDm-e~{ncpq%HFgDod${7>AOI!$@mYzBLK0ljYd zS&!1<413?sDQT2d?|uFm$`ZEWbwpQv((JNl!oFzgX-4upMd}s zs>5$KA3*huAEuLRac$qOXnQoP_4rRTwF~%9BvlMG=*qsJe+N@~1W(6>^~g?jj|8W4 z^a=iRI#0*&pHr!DXjL?&hv?sE0fL3;{J$tS-BJoLga6;&dFadk9&T^l=c@XfXDa;S`x8IVzHRK|44WZa7~Ivss&y?jv`d6qdrgH&C#42({y3pmRz zwDR3Ehr4CX7-n(T=45dJ$Pei_Rn{B2)*-{-AD7KwPurVbnv#b}5oP_eFgcB&;KV9% zg(dj4@gevYelxs@s}`sEV5RPBxM3=55e;p1_J(gLZZi}Tt`1I-1Kg0Gv%l`8&)W2r? zf8O%#|93VYZZ7x#MLcEwukTqw-)MP3YK{Ka&{qG_3SIBsS7~jvNIwiHl*PPtEurO!^}r#lqw*P)@f!1!mFzoeiJ<-^Gvs%l&@=kF);+arits#S2+`|G#v0 z|JPx0)*i=6gI>a34&PhBW@r21_P=lKZa-|&zPs|^0XdvP5?IeACTxY{PB4q|M-?UQ zar<-pF$U@=4%=yV`T&1Dc#Ygi`a#P?+bbn}7BrUIlx5a3nQVHc>N{ zUsu7?U?cbsZs~B8rdi`Mc!0vgN5Q(6xPH|%hfN7NK>yB=!-Mn+by{@4Uo{=H8-URN zfGo}6KK+aDb+{#yvEOk?rv|xXQ(-5ck`6RH*<2p7{|ryf{*SV(yCMGnGMoP2DAE7h zJInYFi+P-F0fmyVDWHljNg$R>`=9=)S^vjZ<6b(tqa9$@`tNM|_8&Vy6E4^PA|4>Z zY-J1{caK__Bxv#eERC{$=ugXPj%byWdy)mFJQT|2*q#a}9} zRaRATS6vm0J7$$tHlDAi&xR`7PDA`&kS>qrcrEjMjt**8nIcWmE8`S3^%gF>QP?L3 zhlS|Zqg4N)7rYGn+W zex>ZZi!_Y5Q*Hf+W;lI!zz)kG&V!kvK9Bn7EOFozF{->#IB2ivBXGtt_RVumxl0#o ztt5rpmdQn%mp;cNcsD6z>pYd)*O1pgBT5m-=Gtzw39c^24ow9|-fBEVDI4Ia_t<@L zaUqe961S_(Da0CvIv4SerQ_*kl)w=@ivJr9s4li@rcrI0oE{|U40Dk4=pOpvHoG7_Dbbl1P(TnH@J&i4JALZ`K;6 zT4qfsdpp)gGnE_;&Fuem$e%8okZjE?%80Z7KmQyWb^lmT4gUv}*qD~#MMN4PUsC@Y z*uRqf=hpV}{%;|V&Hs6WZpQ#BeQ|9#D-%(ZHd7xe;k=FdX;u|0NCQ=doA|DPn368f zr|PqV3T0F-91H-p3G9+-614&})kD)!wDi31*?G!-FCY$Odz~5k;KwW;x8J6paU+61 ze)6lB9cR3KHDBOWub-6TsF>XnW217BT|%ph*ff-YfFXCP={?BRs2 z_knUlpAbFx8!Eo-Ez3_aP>|bN$Cj?|emhL)to2Jka&3_#HlM60#b0U*Dv7FJjHt=F zLK3scE7HS*&QDawUM(!12J>tbqwJ${J<&JH1pjYV)+`zEdu{ES z_bQcE?wTt+W@iLgO-?%U<7fm46cnpGueGIIix?{{o6vTJJoReJ*_F0e{QUtNs5>K{ z^zcu4zSZabT_?goaa#Sn@ z_WQGvW{VsfXvSz1kK+VIWk@q}tQ7VFLqc2z)r*jzVT=ZCZB(l6ZCYiGj)Rz5Mx7&; z|6QwbNZ6{Tk${tnOAW|K{W!^@W0JhNapKT|`YfG{2BhlIBLV)<1o_5B04YV{LlNFn z>@Xx2>3RU~8^R2|xiG|M>%(vy!-r9roKC`1B&s>KZ65s3@FMi%CY?`uBNX2AEn1qw zo+|=(5yu~=WL-$`h4!x#;9TE0ly%-^;)ll^YFHH$|=&&KnxQpIPotfAAj_ zxkF|PiXk!7&zYWstO`S!e&Wf68FD0eN$?-)3)2D2rwk}~^4pAzy@cmcv-m2V+*7k- zHSCkw`(U*^#kk%EUoJx-oL={e<_!6DhBAGMEpJ8B4cdV{VTp4ocP?>r5nNIu8DYS0^$RWzRLdBs??)mYccKrbo$sU27mcm5*$h%V-A zufW{eSb3gP$y%1>xB;O{&ipg$3c%4GWixhpia&|jlR>cBSq)Yd-@))&qeT7TLlVJ+7)lv zb1Pj&581D@tEhxCx((CB*m7{x+;&!^_msv(+&mEW{NUKXKC>`%#bD` zVp9AVsxShA2gMQ)hq~mNEI1D_9KczWQ}9RZopc-*Lc=n_6Xr-$qGPvQDY*azHGx(H zpTjFVXhr`F6+P0STCf94&COiqi|g-c>N)(n;s)=%oVQaRi9!cyXt8&?Z<;gp0!V@p zd>fD(nD4Jp+h#E77kWSX7G~_I01_+C50m2}?SF(M$&^>kIv5o6c?4}HiFP_GrUfI= zsH?wGs$NyzY*tnOCeuMyLoAr^HH|^3Wdg}gA2`M6(CE$A3R2&zX$XFst1HEk&W4F9 z@U1@ute_~4*XUX6f@Xy1G|3WaeZhsnls{oOrK^=&|0*{{?XtdK&&_@*>VBnVQrAZn z%jDac%S4S*@J)FPzo{MaZ)9HlgD;cm11X2B8qJxJtm``x$eY@(#BY9QYRyh(Lcpy2 zMya(jWd^e&={j_a-a#??JKn%qXI;>R%qX;R{mlXl95eQAcm}fzVu2o~F=- zu-_+9GAvrW9^AlVS0`yIl8T}h;yQ(27-Amh(mqY9HAO8o%0W7osS;vj+Ym-kZ8|l< zL0c+Tyts5Oxx>df7iL0}s_WNYh;^qNK-+Tjd=|x7(7hyAu263QgFlmwA@KiYb#&Ga z{tkN#B*@kJ??D#)f&E;Q{07Nf?ci8L+*7aIbsyT7W^~h}g3#=Fgl36O5TX-(CzD$_ zR5HiEIHga6tEI4^{M7LOKSWnZ3Ga_iqh1z%QUCv)&7Iwjum9EQEcL$@^4R>pJLs1B zSLp-n?JN=)4(ShtjgN9AFZhjK264_W==qa`1K)ke(t*bBzc($dFc-X53Pb4DkD{=C zNQG7kR%`vl%B`P(!gOV`YP~Pi)*p_delcAQ%(F4NwlWx_1_05{K4`GL{mY2p;_GpA zop=LocRC>ltPo*rC`9ZnZo$2DLdK}0uqfibSnA4QoN#yh@#yGofrTPv7xNhKdYZRY zb=9zr+F`%{j+;CD+b5^1lmOd0CUl*Q-+hM};a`;zULhkU*hWO6ol;c~tD(V5Sc1%- z;3d?s_Ir7nog2Fb8(~%XUIi%Kggpz`;UIz`EVS&v0v1+63H{=1N97kD4l7F+g&>}bXSJu^cK1h$PL`tibA~!0QzZBk^${BEaHpDrb$c-YpjFLy@Km2Y^;&+RI*Mp2T#q!u`|FNKQ)ub{sJR%swWV81BQjGCxu$-;cT15|(m0JBqS~{)X zDi|saMcqOCf*_lvR8opnYo@qKQCUZZBB?HJ)|VkxYICZw*mFsr+WfpuHQ7^>&o`J< zOCR%DQ4Y$lRRy2R4tVW?N4!Z^WET0={0hcC?K7u-ab2-0_2SCe3Fz}=rm2iH8a4xI zz^eC>FZN29^C=`-bKT&gr=!ZN69^T~r?rIjEWdV1cP>^HH*Kl?rJ-5N;I^QwwRc-k zW(~~^siZ1iwF8Q#c;(PLisH_$Eg3$@ru_Fm+(yfb)nrITL61bRmo;ePZ=6Q3X5p~qmtM0O6pW)0I|fwa zp37a9_Grk{q|69tFVJiu6~-OMnR}|ifkJ1`aF414x6z-d)T0$MKOn@XpJZ--VSlXn zGt%!RI`sYtduhrkUfoOnTJ$?go%#QSeKnnksqQXoJ3|C@KLL?<+wQpY_SMpy{{FJ! zZM;s)8btHcpaW(3k3Z`5 zjYFlMp@rm;6$ut2zFKPON}pcYa-;lSf~wJl^9{qFH7N~o5d0G{5PWb`+*-G-Le{Kd{4Bny42+*m zL`4gRGQg;U;f2r(@_Q)uG`|}}r{wpVK3Ee|kCyPM6IWP+*RI(th~f;b1W|PweXjzh zsU8bhN$TwMoG&EK-2-HreAqi{%A#Qli7KhEZj^mM%_$+ZCy(p6$$zUZse9dMEtNdb z*{|0Zf!ceqEB(|8oSyWvqC}P8)9?0M$~=8=*A#a;V|P0lU+p9m5uJ}|wRFxw=tGsf z%!7V6qOzK9tdo+NrQr$4d^k6djd?xbi!)e?K|2zQG5+EC*{SssNoI>%fLuyTN6OPLbJB`7o24$JfcSbc2ukb52r#8MQ`O_$ z!=$jtB#emB6?Eu})_>pp-&-A26>ljex`A$NDepRFZed`JKCxE0aG}IxWv5ZjMsfKV z!))ZDnGKXwbP9Wt8F`A5jvj$Or}=tc>qrG#o9n%}&;@Xwqw87}GK^rb{azCvYpTca zr`KEQIXYak`ioyM(WiPjuQa)Q`7g!PcW&9UvoH($1-5O7YtO+UwxXe;&NLm4jDih|i;w-u? z*P@RfhhH(f)XkU~eHLC{1;aj9uUop*;||;7wy2uvF^F3_0}e_Re2`PwR4zyjyrSVN z`65NBD~@Px)~RyHsu?F?lH3wEV3F+pf^;4mbF&4L_s@5=rBR-Bw)&$cvQp>O{E?>LDF^Ey^r8gG<+zgG6Ez}K(^kLdh;j$NO2r7F*nF$i;SFH@*$N+T> zo)C~LVM5EQq;0BULdjGEpUM>(otR9=)i7~id}B?l)K-SMPr5L=cJ|CBrx+q(=UtP$ zj16bSbFGY%tuK_R03i+zj+v@dF2Ar^=QGz=p;wgIF*!_|%UU)ASH;GzL#1#f>X%?t zrIBOK?O6&rR?bR&oXihA;!O=r64aVB@-)%JLTx^7oF*f#Cl|3cx$77AMd-p$@#Ti) zl&In=MA)BsvI@6i9PL0h|E;2ofS*$4Myf_xRUDUTnnrGcIz=OQ;@wuy$bxoVHKT!b zad0a;LNjr13&~9FYu^@F^ZLej1@69J=}1|=fEyk*&QO`A?1f@Ag4It!IY( ze|u+Vs}%obbLszmAx}jV&xRFelg>1laP&8cPBD%PN&41V(l;aCrxWCp0L_r))rRn_ zqUG)&u;)Ldn6M%wXqAXi#7iwt{ADSm!FUv?Y2`=@^Bai^E4BYLxi2PKC9&c@mAF7V zDDpi_!mf3aI2kdMIQ)XDO*o$}pe`~do|RoG+EXG%+VSCcvs;$MJlRvxM|E_PNw3=Y z2qi7}5r%W!A~Y?TiV8sLz#^&1F{~T6h>??60Z6@qz^auCDj9d zi970mutgv1p2?6Sd2mC3R-oaF5#|#iObcIHidpLQnBG?EY^in~=&>7h zTQl@vJfOwYmAP}-#VA6j5~AH0?8w~N(^Ew9nkW6Joxn3&jY96)bD)gP0fPZ_fs>lS+1MK&qG$X zl4W)X+{@hS4NFHUH!*DEA!e1xxWT_ccU`GpO1SQ)_zVS}q|SkFI=|uk+~mDrRer4Eu;g4E z-d3BdE2Tye<&F6(53;5(_&<=g;oE;a>@4}e#XQyfKMwlW-u^l4>zw;PAxzspz0vN^Z{^LO z(sA$SmuXu+*elo9rC|K6TqyncZE#4mxi7R*6~gWy@D3s&{-EkI zw{YE~xScy?c!O6+)a>u7IB)MBe7(EtAuyY}-}~B^+GYLTKF>3A|9=@JFnayl904=- z|IMxK(*BPwVVC>=0-oyqUk2S$29Olj9qcJ5?$cbnS5B(9=9go{zThW#%3G{^h^Wc; zHab_{9DGEqOI#P7iu+tyXQv*Bqm=KYCb$bxz^76rUY42~b;#~1RqdDaG zM-CKA*RAF5YL#p4(KTx+xuF1s_HD6;j>*&sdYz{Ado$>50$>s_s>Fuhm=5G?mXdoN zEIXx%)@W4rV_4lFyI2yABYQ{n+hyksv0+XEwk)tys#`Xt32LC-jlKqPop}nvCJ$n3 zei5qrXG6JC75wBHxhr*c%}uaak+Gv$O{cM;PB6^Se?zTXwhYeYaZ_3qRtdOCrK@R-mq^*>!9QWi_YN$ArSZnZrkV} zvgr|hp3G~obQ~t}`DBEeyI3wxV0kEGf4dcl9qfdh6T~c`?&2bPk&XuF zc(rSkf6lY?A|8-a0KwgetJhI0FxLp@>6kR>2}Hn6N7*CvLlMRawnO56q(ea*yQv#e zpcTk<5oHC{1Puyxs1zH2G#Q{^DY4q0!rF030t}yWnr5WpkAGlw6k=H*Z5fK*J?vxR z61b-&x_K+;O+7Rist<(RU^hYp-!{jau!Dj6L4ab{;XzKU z40&McX=^moRDI-kc|~+BUrtA9LUuvogy{v6EO1z;I=dLqOo~DOJ>3g7EJsd%ouow! zJjH`7!d4BU%s{Z3qR0qr6`idFyQSa+!6*XkKaow`gFso#bu&un!s_ZmV)rq10Jd^3 z__T(OrZVX`9B2b9iYX)-6{p?}&{#^EkOfaN~5;D*DK@)b!Pp zJynzV!s>eDj`7k2B!rs!=o!DuXp`?IO_or=HSykr^Q)yiQiTb}UJKEB3&9M~U%v_2 zhVy8MXuw46pQS*51bFWazX*YdokGPD5)K_hwS(h$9FM|mbVVMhB-|Vjr$8STz?=~c z9T3$mIK!1$3YCMJB{+>UI9z`BiIiVEhAK9GEEkcf}7+8+X^5^hM z+E@pOnjV?{YvCdZPoe8tDilFQ?~DPi+bd{bH&Qzc_J%cVFNC7^H#rWS7t5X#Vo zj>l=jl_{Rc z>nv+(!m1g-gw8JTM*w_JF+w;n*#>dmpX78HK&3)HhZ&p$zzb#ev*Br56My8IyON=2Q62?u%tz>*4jNz}TJF>m-y<7Ck}WinNtg_B+;nJ7 z1G~Xa*{trd27uS+DjaMWcMVWoWr%7&uz5zk$Z!*+jWR|{H`bFW($68Jat!Z4BcyOEZ*nZz(nhQqxs71WIhXnF9PFu#RCiTVn}2JX-vt= z%!5@uNnGq?U8y*lxVk9PB@8mt0k9=>yVfzGA&RP2q7OLO*M7kfwrBudk!muvH=og5 zJgKqAWPGtQVC%?4oI78Qg-X>W1-VjjgCTk;gybq4R=$lAmm26?22iD@L#_M*3-j4j1Lj&Vi@^3Pnz5pizjZl0#*JT2&EIT@{$| zoXas*tuiI{>2f;ZU3`tuUk)=R@8>`?0KDuo5$0{3P5(nG0rUNVC4P?V8pXK3)N4K- zWa$4~u|o)zQ-~`<4A_5SI;gA2xCC(P@_YkGbsxw9qpB#Gj7>#+MS&g_D@@Oy>t37c zg6=u@<6PeA(i+Eh71rzmcBVBuZsu!%D2klSj?PPIjaJC#R7>c9!F0{?wUVYcb9^Cr zAN3%6q?iIzw1=vqnhT(b6QK)n%+^B!Q}#cb6I%mD(;z4mZdAe$Sl=6Ah~+E{Lyh6@ zzx(Kl^Gd>)XkplFrGfKFmulocDnQW!{C{tBc6UqnzZ*;a@5MY`2tSTW)p+AX)+6UE zMWxWi7LSw#9-_SDsrq^ONR3#soRJqzLn13Yy1@YQvOv`ibcf<%I>><@VA3?56tFBU zc{zs#Bt}jzOVxnHVe-LJNEZo8_hoG{-oUcjk_UgSE2#p) zxuvM-dTcN*7(jW6{~Y|)K2<)wu_BGZz`XH;j76WYEc~Yf-a!fgdg-Pd{1yMRQ}}3+ zpm=3!IFFEO@l>`{z1?H8fobFVGGpAGwsh(3CS6`j1ssw7{ z>PwSAEnGeL>#9t*w-GrYLh8+)b~HAvU^t5TQpp}$Vm9?0L5g}LBoIx|>;enWkRlQ;xBhGP|K~!j-H9K#8Ty(@tt3gCma z{A2W4Ts_^j(7i(5eq=Y-leU-gRJZUEvWzf=h+hqz#M2pT~4 z%E!V*4%Mp#kE*cke~UR~yK0M3=FZRzu*_zOFrlO~O8SFgY-KSs>mkK4Kt$MWb1_7Y zR=F!kqX~*JC*q`ED4Dlf?7gTTPKY`2y2w@qNFwNz9H_L>m3@)r^|?|GP}&blBJ2u5 zxo;T}DGWuzSD z(whc8Pgys(ivE$vAypGY?d($6tk&LgX8g%!h90VGOpW^CwTN#uq^=iB2e2oxVi=bk z$|HDi8zaT#^TnQe`On=sfSalR-|^)?+glHp_MZ!R(90NmJtQBySyXaE_69xx1^#UCRDN9NQTebymV99HMBGuKm?n z->&%EmH&-oCqaY!++mjQz9XM^nBATZ1fF2Oi{C$Xl1W%pPR3Y1Lzrn3s|t zzKzPsmRj@46uN)kccVvs{isDa9X(tHmhPHASV*tSJgqCS-tmf*a&%loZsokMX8)J9 zaW^3VX6*kPTU);TXJ>a~>HljXk1dmtL3cI&fruZ)A&WY?C=%`}0N1z#!JH9kUYNOsI$f;T-Hh}w@DdS#b&v&$OUp5z!XBc=2T?^KN^e%b09(0MO5 z?I~2MQEBJng!|=vOS^jSCEN%hjRNDRKgJsm0N6bp@D>yo7OHE1R4x(lQph zI~tdy9p{Lf(QL~Kn<5&20tiqgHDivkfS`JD64@eX1|(r>RbK_C@kL};Bszv%8;Ng{ zfZN$mM>ti)rb})X9EG!QzWF9NV$WcFe#?3`IgZi%jK4Rc_Gz0V_O^nqEWK`%PI(g? zLg~8LDxj_&nNSMSktiQ5?omHTG~ke~SjD1_i+0-FUwvgw*1g|#d&PWFO7fefGHbPE zI*hE+%R)@m@oOcdXs3y{w7EP)SIE@abCHXZwkC`B6BI z072dveMLZ;WH0c`pUE5h$ovAZM2ki#cu!o+Om;SY0iZC3e~>+Tet+RR3P^?tQ8B>~J{td07sK zqbjWmiKFYaPuQt}am1aD+O}c0iiZcPvO@@!QZOmit13~Bkx;nrQY8DKJ+njQ8o|=}^+f%jr=oGKxP&Yf~$4vbMQC{VcoIaD<_um3}nZ42Y2`itMwPVgq%l;awXaI1E|o z9Z!)$%rH2gVucclR@9G1Ipc;4&$aXdyLV$PHXdhutl8S9{p;`|KFFhb*yX^ zpEuO!if@het)~60E2`@&oXOmbjN4}0gH<)Q6j{eBsu%ZyQ`E53{v=XM>vhZt=PS&t zp5sQkVKy9<3b=HLxm5|QQo4yBFgqG)UEZUbg2s99AlPi;sRBtz%iLQjv%)h<(7$SR z<4r#qDTp$5vKx$;p zzAR+r+>xv_`8i)KD?fWxG^^L%zI@wwRu=hR6VS@3aaKsHia1No_C6)DRVjLveFRDT zDhHS92v$yYr2tko%&8%)WX7mKO4o>KRn=eBQLQA5l~dpbfvid`mdI3cz}_~P?A^t= zQt^eSo%eQQ9l2O-{HHtf{&o}pZ(F-NTTB0Mi+JkC|E|2h&EfxdbH~^J>@591Eaq|i zKYXfp{Cn`pcJF)e>7M&@wx?g^J^}vy?z`r^ZhpyP#`EO`mAiIb>H?tTN$lWLt!rNZ zJl&@+*1~%4beB*GyULv$0j_l>Hyi5lWNiCw!wzU!upOUmC;-L$Ro>dLrp941Vn=86 zLJrQX&*ajqR3c-)ZuH`pAeU`bt?_LGbcPC7bTgYY%?TYLInxK7-$=YrE;XY9+iJIS z%52L*aSoSrgy|d}=Sl-oftT%9umYPYJ_QMjsh)vDjNnK0D)H_t*LKmT3eR>2pDO*@ z0eW-0wS&ARr*>*>8*uE|&TgLX?ss|dH~&-T|DUMW#oYgOI=jCA|DElJOZ&ftJkG@( z4Z3yqEA~ySJ}kaQQsUv2{_UUtGcJ(~w~vaiNstqqe8sIfXh1p{d-08!rCn-sS(?wm z+^}n~B$w(MrD)D?`Q^YWQLK(~d#eRq-)>oHRYh6fiT@q++PpROR#vR?R+g--M9Jt_ zP7(pLwrLjP3HzLbbVi=%G6!f*fkgU@#4z}~+wMofuOBGlb?}FTT{r9~mmvlb&t8K? z$3>&#;L%}3s2IHcy{?~ES4}sI8q)ex{Z0MOG#-|g8lm;Ir~UGgr&j-qyp!CG5a9a! z|2CS0FZurkJTu1sPJ{q6&i}2Q9iRVy*x6m`e=p>5_5Wzl>xBT{r$v#DZO)%l?v;}) z-5)7@k~%?y;O9N>cby)PBOY)Sgd~y~S0q1MsAf{I2l;c~g*;7O~o0tPiZvyYFza>^4H*H6uCg-_K}6#-3| zrv!4wB!xV)W+>J&ZF)*IItQ%M%6i;ciBNlfEp_uzsz4c*z1>vRQZWswzgtelO~2-8 zvrlUGrXHov!lru0@dWTNm}9O9A{7&j5tvut!@Ea?&_{;9Qt~5fVs7b=tyY*QD=~sOvZlQNvEdr zWYaTGOF95P*}RP*)W7IcxBgk|gHb7NF9e>&|8+Vg|Nq;Y%lKalc^pSQX3#C(&rNOV z)OZ-{G?5Zg)C?^dmS2@D9ob(f&EI+L4)B~_x3$DHVywen?fgcoOKYDu7*s^L)fruA z_uu3yE*?dhcqCubmrwE&y<5twcH1StMG9W(F*oLjjxkcHH)a>R2nv9Ef%%MTwKyuH zT*?DY_oAz`_=+eSuaBaOXk>ikiPmgjkC2h#GJH3-kKMmA#M63Lg~YMS_R|b1KTpYB zuMl9}Aiqjb-9G+r<-FSKbZlip1Hw+%(FKv|5SK{vrXV(GO0VixSQNKORbyQ73CFu4 z*>c%d`gX002a0HOlWPZ=2x!8fw6<{^Xg*+9Px@!trMp;Z;voy{7R_}rz|rSv*%N+! z-=hMtcuZjvjN|)FJrdra6~91ikKx z2R~l_f@cl{N|!tCrtpC`9q&@-!}3n&pYo}p|9*(B&}+8@)43%lIFtV?+5hhDY%TTw z7VfOg1NR!vj8I6tBR7W9JleT9k2)cGT<*%8tn`MZzr z(Vfuh#;X4L1b+TsG}C_l^*fqhx&p6*-95pd)Kr;S2sF=sU9Fp-H}$thdt**!<6IX? z4I0|fT8rkj@vTN~(ngQbJDD}<&<`3rKEt( z>9p6AQGJ0oNmQp_rBJ9=veF1n(X=8|*>cX-w(_v8`ABF_DNLBpP~%ca+PYj;&l=qp z6Fg&zMi({5WVp1A%*e@>{?!Nv1GM03mfNcHNK7-kwe!IqPevYW=?a|%y|mQ!MTI7q z>5^8dLmZtj(@oP9!?>SjS}&@EhFy$o1DkFd*)djhrKfqu$@UMwd#{ZW74uM{G#@1y z0S_ZewchIorCK3w##Eg{HI9d34)aVqrD8zJonN7CSJ~$`tlR4OQZH9j&AmIXq8x`# zDQU{7o~=}s!^X|8E2js^SC}I(w#FQH+T5s9)R^mG4t;Y6h$9pm@9XtA?1$Gy8)n}v z(6%FC8hC6}3yt`evA7Jn)?AlR7+3Q*3%26xHVY>9oi^(=p9VF}!eykI^|fiRuldx+ zf4DmbFgLLO+SuA$@_&nXri}kxuzvK+;QuyvANuz{yIV{Bzr{RW{MUNhuQ%ejS@utf zHgT^atwQI|^9Az;og#B!)~KcRsmiXFE#}hW0#Sdk!~iOsT%Jy=Yn5nB_)_a11}m8E zAV$~5teE4h)UKe4M`Y-=?BRT%Jx?*zB`v!KheIKa%Q|}*UEvguF*=XuHpVVn=Q~E{ z@f^sw^?s0|ADSvYY-JT6nmRiLL#^tY3SMrW{+Y(S86;+LODpY0DWZQJW%NFepv+VCLSkM@ zRf<(mtAJM}p(^;yPpo1COLVHf|w^vML9q+hc~83#~drUF201D{xi86$A9lQ;>62&BTk%D z?8e5#A(sFK9AqM&oXbr-&6FH{zH(FwhV`LHi*SSy(W2~wc zZw}k3B)y5}r$`Jpnw5dH=g0ngb%KPLLr8M4KLrV2E1@V|xoQX|Oe&H4Zgf-!j-@VF z4a$TnKu}(s0tb|(7*!1l7335kdaned)#$2$r~nWM#EJf(fK)IpP>i+`DMs(}71O?q z-x$xv7MVD`u#VKA$xfj+y?70#`M+M~!4)}RR%shS!wI2Sq|c*K5eAfz?J0~GX#Eih zIa{%=Z&7%f1yD@DhW-sb4!=A4_e=V{-dXW2h1_Yp5=bM}X=bmJieJj^sr@U-*v}qDh2F4*z+HFtcBlazfb;iv71r6Udid zB9(S>zU0!lUoq(r<+M2w8;v1-*jf$_ybr+3#u%xaXjC+pOFqB}NPIR%SZBJ`?LEDP zl>L+H0?gv?@sfuhhQU;vKQD0d{~NHim3Ysc8)Ko7eWWeYd2YPzGP6;Cc~jIe0?b?; zjx_(ZJDm#6i#=m3=(88*YhgdT^RLwgzudg@r=l5gGK?=r!FFIKV;(#yCv!dUY0rFf z=0P4jt^A@2KHZZ(6Zr&;=I50AD@O~`ad>_%R=i^*^u$(~>F~rOwc9J2tMlz$Nl1s? zrlUKpKee|oV?b^|3`K&Xc7(=|#U>~-k%10Bay&2KALH?4OoXk*zb%1s`;)z-*?ABhM9=a9KA)vytC zHpCV>Xsqv0)F7R(gLpy0j!Nk4aDpm>M#2ndX5VlJab^t1a~4x%A)Es6+Q6Q}N@_NnA*x9H^lg_jXV}Gi; z=ju_lg{nfTXIBCWEBu5K_Vi9kp@uPookZb@_MCHof?wt7Xi}&%Kq>0CfwAu8!6k)u zRPp~{KkUXs4r7cW7UsYud=~}qW@!r)Yp-DO>bAYr>P#|Qs3x>}D8qKaT6m+3OJn<3 zl>Kv2j)_^~Z7=Zilq3pKOGZFI)@4_L)mjv5kd=ihvhf#d0uD!Mh~~slA36%+=xQRD zL6o5vu}H)*G!xi8Mk7d*Xrd&y5-WZupqpx*TYtim89aALCXrq6=7<$pIEBeo(7o(Z z95j*jaC9k_6GA7i1u_JCG<1?a=jdrDxEsHrLgMY*gy%(j-bbYKm#um#^d8opE0$|1LII86%Eu^H};uoNZu`g~URx?1Ks zNA#_dj}IkYb>e7o>nc8Y$jlC*g@sDLs$1`b)iTvvF}12||4<{nD2LQ4wMhlRTB!-A zrD8{^ETx!SIJ_z#m&DF8mCO@3tFB2|7-T(>fx!HxfWHs0jXcBgH+`c??iQXFzlz~{ zQkQGR-qP~td)kKu{2(1@?YG?>ZPh;kw_?u*fmH)ahEVsv^{U2Hjb~*5P&F=I08nRu zs>6R8G1l}B+|1RRO zuRk!mvp^G`bYn zUH`g5x=sITTn6`10zf4iCbqTC@S*ACCmR&al}O-k^BLornjb4GND5r&0O2OmX!J?v zvLZK8$R=I7sY+X=I&QI!uB^T)l=)hPyxWc`wGqwcmYk=)9(&KVOFGV=wQqL2ZgUQj zjkHx39gc88db3OEvn?-u*$Y<9?^PWCoZ-ORYIAj^)L7yj(#)>;1t(ce*15y;^U;-^ zcOMf1Z-E4y&%$ijw0>MR?a4yoD6a~Mo9$O5$>r*B1z0dGXuUXbM+APC%VRDVL1g=f z$YPawNBk~wiIQ?T zCQT~Wt|m~tuSll&E2deVs#8doYBbc(8f1igscj&VClw=(L`w}sCTPk{sUd61z?rzI z#Yk^a6%(kA(5atEirgvZ@RNE~qY=VhRY3sFLQRoD^>}GwsA7lv+Q-!L`9jZh`tNQ` z0B6vD=h7>A!_M74%=N2_S!8u`K>x)0T0soK&YLuXZ7Z%fDK(^UQ1er)Wpt zPDqfcaoWPAs?G5P;uMQK1$%~D6$Vsce5P=2c~v?lXsUCjS)mz3JeL~kgEulYC{`Pd z>7Fg)G>7S!L9N0M#SDCG`>-ikOtBI(wd+hgt*+s;fbv&Zf^qfLUJEp{FrO`$w8VEJ z@mig|#*c*Ru`j#+r|HKob z^+_tgxZpc7tMw8ARTW8ED7TUZC}2~H10$}=vp7Zrpr3O<5I$T%F>zNlB%NeIb9ir6 zmicT`kbmn^9=}cLm&|3;yWy=bHE&(!({yDj0q1Mqtg4zHays z*ldTbpeSx98SVpGN)L0!u1_tX{ej*{$V4HQ74^eCk%NGP~l~I_M~bwMxu_FB`ooQ7vXwb-5e7D$`(@sL>;!sK(Hs$5AsfKonz|#h*5)Fq{fd)sT8$R-_*>=a zr`DR=67l;uwISNkqXYQA*@>V68;~83{2rnO0=gfar8){v ziI?TGoWAl|POl;cDxK%_5zljenRcF24(~kYQkCbq$=)I0Q_pji**(wepyPRNm6!!z z&U00w`aD;4x#zhm<6n_VZtpZ#sd$>x`!!B;Rg*f+xk>J6uJZ6{ZcR~rno}B{<~(DX zYG9|F<~C4unmeH7H1`m~^IG>Pp50CvKFt*pHK(~M&ZoHtU!UfB2=vMAALRaEsDoTp z&o5V|Gt{E-k8ukjEqqnSw_k}qzU}4pwKuZmbE9X*`442yBKZGO{O_gwcR^3p`H!^i zpO61Xl6y}6pIX2)1HLiMfN#M6|NeUw|4(0qX2928S`_KHf@Wa4Uvf>S88C-WGf=5& znn7pp5b)_VgJkw-h8pNZGnf*y;47LzO4QH{QkO?FNSPANz`1>zK~jlk!1wFW3{sP( z8K_Af%^-P%W-!y*&hT2TA9bp6L<8@s>!3DK%;m!mG}Z5HteA3JJihhOh*{2P_f* z1y)G_=r>6ONCGfND@6iOo72b&zV;S=t7oqG&$qM#yuSV4Zs+0B{%;}AeB*yRJ+K+{ z-|p_VcmKb+vHfr<|5?am(|=yjccsGe=ZFF}i@Zdgo)|qyAf4$djFnChPj%?U3qLa0 z$%&tG$l&Yag`r%y{wtmt^KWyExDq^9!gQn?{aS!PXL`OJ1q(2c!;rGyd(aL1PdJ)UG3+IE- z=W9;8ICt=T#RtqK*~O{*ozivz^bzZj#g)TRq5bHON~>l}9BoK{yryYI@_>07k(g*+ z{UP)+W1@Mj8q{mYNYh2VIB7cFkCi54f9;C6oL37c`c=sMxONp>&Z~kG{VI44IippE zN14K=`@1)DZdI$TUSrYEp~lK{sX~RdViHy4s+m1EjP~Zuq0(HrAy|9HlqSawVcJu~ zpA8O#Ish^!=GWI94l8tZ-4UW6 z2YsBobE7Zk*Uj1>^ZM2eDTjSKG{G758RuzSE)1Habrc%sjj88%&*@} zBn)oWOeM3q<7SFHx2~DA^IX4~K+fE(m9SCWO(Si9m#T5j#wlT6z8wEo7ytV#{d{Ku zU}o_DJKMXS{eNetv$NFyT*%|Z|2`uZW6J7Fezr&=zhc6pU^;shdl5&YL9X+7iIoOm zmI%7>{zSriV!-UvY#99LCCEV)bg&&2KW7rwWhoN=2xkf=-AEDvKh9)d?i8dgZJj zB2?6mB0{CbY3wTgT~&$XQx;R{#fK~5LiZRul!#$&+#iTJrGZN8qrandk+y_qY(XCqsr) ziU(o_m_GRSf5l*!3-MP&x)uD}X}} z_5|5=G#MqqoSn=zcze-0DBr-fYY9e`C)ZNn%y#BlxoOxi#D{CC{t8#FrRrJ0xw^W7 zo=xkkz{YYlG^t9CS!sfJo>@!yR5)m56q~bfG&77Z-Vp%KoJZC@JOjkkVD&a0R~e4& zY?VBtYS+8m%GupDwXdq?iR1eArX5=A@%Gle+M>h)0Dt-3rQk%^9ecAk6Su`2`p%Cs zW1U%85KlF;uqyIeQLZ4n9Ar~V=qOZZp@Rq4)ZA9+E60;>eTk_-a=4fuB1eFIZL^Y|i0_WZ35!)twfmX61hg0vzlzaLEcqj&%eb>-K} zM5XIjCsDHTQ6e2X4aG?k<5?kkIW9;m$SskS(+MiO`rSf(#uln7N@QOS^4nO0N*niG@e>VaSH?DGhL8nL zB=wogm2)1176u|3Kw%Gi@KBIQnLAFjASlYj8Y@W? z($|cHuRIG8&e(N77o+?uj~*k}NpT_)_6v?xWLcD-LxMzyVZl<@8$}6_YtISkg$SLH zSn?_cgD`>UhSH;}cJM>`88PNKS#BR9x?N`iP*tKe?$!rc^T~OXNxdx1DfG7pAfzd(7PaPl1QSJkH?^`gv= z1GN!u*t}8rd0mA8F4Zo=$a3eJD9V7?I4*JwK}EsyP#f2RDP86Dp^gj}{1TxE0-ES& zv6@Vxj5uLXI`mJJFKt9g_+cau@RwAnY6+)|kFf z|C{*89-8*jIU%<;dfp_dgp2>H6+~`=RIV2G2XrZy^|92!yn6yp zh3!Y2{`Zce|J*44&nDXc zFZKVw{4;I*Z>RNdo*De#*5+=>{$r=J)c;?|dtc5n+lg1U?QQZvey1Jnnli|ESsR~M!)syvICOE*(i z_M_BNoV2z7`EJ<#T8S&#uDS`;H6dcvh6Wn#DJBLw58i{ldtY6}F-OyEI6d5QTI48% z^L;6ns!{>x)-(%t(}%eX>@6R`>8#1&*Xoas?T1NYp?X3 z>}37KnpU{V(^FcZFV|2*y0QgxDN;*M`?|k$4@L2z%Z-sJ(E-dETo71B5<<)XNvjRI zF$3Zl_`h(3QB?7wt?`aB2k-GC=4~niV)t52X^zVx(@C?g+OUZg+UW{)+XThvIh2Nc zipfcJCRjYm9{T@#5`I^2|g3-S!f2*8N{+*O&kAc9#A>7V|i_WAWC+ z_&aP_9CzFLewp(J{pm7q|CYvyRq!!<{^Rk=AOAWxYyTKe&H9(6e>Wn48}R?Tuq7<{ z{{=iV#veGVFYfv3|D<{pq*o8i3Ml=K(h}DL6BV{zjN^YqJAmUNE2j|GuZ0Z(Y996+n`T*q{5a ztsPEzJpG=ZfGA9>qwh!rq^eMjOM7Jit#LT{I@5d7;By56B6KKA0gMK=^>ua5u8`~B z$mA@u*_5ho$D)2KTj1u9P<94t=h?Y0ZAWE8@BYIM)o~9g4`*i zFaYPo`Y5`HMq+T%wq3Cs^I;M{;B@45s$8@ip+nhGxot$=aKs(bb)Nh3T2V({%3(a3 z>!klQ{nnU6%+T~ZJDj7l!$hxYyw(g$y=ywp2pDYd4t_6Hi!_w!Mp=%Lm%dPNZuPsi zHKdxNB^ta1ql!dUD0`qdFCubdHCLwPX43v7bN@(+7zTDQT4 zxdCsxVq6h}{LUf|d{qrEDDVfSh*K9rO38FH3|~gaQ-mhb=b;l|FA|6j;s(|_)uTO?rVgR7zEnmAimKyEoUIH;rI zd`}$=!A(_g-BgAPR4F+bwLC$gxqgfMhPF`OsTDN7`wsQue*b+gyVIG5rrMAc4Mo$> zvu#LAB%-6A=k`{up{UW=BZZ!bwa0o~*fOYe6!#8&RhS$$#(a21TjDSsjndD!L6S-m z9v=hNv6Br|^izD(c*t$X-&zk2F6nq51mk!x7)AUs2;fUMM8~VBbcTmcQFK?TMiS44 zJ#W@H2UE}aWZ7zZW#ngFL1i53mBQq@A}^|{>9I>xCyk9qP<^u+kTxVFLnF&>5atn@ z+_WlJO`vVw;Q!64YK?hYZPl9C@XIO_%IjMU`g7K^9n`0?XFFI|tjdb8vO8v55>B-$ ztlB^v%ffup!#}0jSfBNG+t!fNY9}s35evi03M?zbMso(7sdjPD4ZbxSBS(gDf+uF2 z)=p(aXBDj|<6G8MLY&9CPO$pgN8`W5Q$zn9e7ZC5AG7^`Zf@=P^530@+e`Xy5szb> z@yVk3=tnDv_)lYRj|FmYgv*!414+_8;H5yvWb8N@Gy#aYu15_b<+cD{AvY)O?!Z0B z!KdjopI8h`&I#Ti>c{N-Bl6w`4?rg)k->~oHpw_g3nWyxCO!2$g^r8I zN@l^tI#cvoPaOv}hA$*a$;fYuqK7D+fD%4LpIoQ1itq)BVHjTq+n537>E519v@R`6 z=)@>zc`Yf-qg4hwVS#@AfE`P7>I3#vn2=I^8$~}EAPFM}M4EvXG@uIfBHC@Q5OCL) zT17u7qsW=rt5<_32E|_>_bH|UoDJIWV9%n1qE8m_58r(^Er8RW$>g~ui9U%p7AR=@ zVv#~+KP=HFwEIG5vBH*pLZBO%3>9WfN`BPh>&T)cyt1VjP zK|%ekEi~Pcg0sz(%1(8uV6T7Qv4Zugq1$I;XWHRdai>ysK59xO7+Qnfsot@MjLfjK zv6`4I)=KC|18lH+H0PNRvg#OeErZ$B)A z%y$&PJ2jf$`KG7&op*x!LB46!y;mw~U{ZOnLW8_i!If4mkB6A8J&KZ3An*5pmf2{k z{8#y7o#(11&!yxsD(Z|3R`IW^onBu5Vu86h`Tx%5hVTD-`{B+~{=bli<#;mY%AW}n z1SFBnqIF)tc`y#Oca9;t2Mk#%D%o?((#*tTT|ZZl+0lkL{%J(*(>CShV3rnVe-=lO zuYY#cQr}T3WilYodhn@*flOOLRP@_T$A!t&x$;e9>2k3jj$R~U3fA=+r?4M!bF>o> z5=@_G>2E*-vFnv2NY5iB-|*tj@{3gQ`s%gRMckCh=ovOcngy|(k0bGhSAcDU{?I)E z7VD2v}wDyw!J33PVq>*R9@Q)*;E&1a=kUb*RpCiX)ojmtNuWCtuwW#%E6gS$wwia zX5omgT83#h4lCLSNWw9j`jy&Cz6hhpZGJk%)be&n7``2&q``h z#N8Gw;$gAhX#t}Gn-MwKijrm(s4E)?Ix*uvjkTw!9QI_5|FsMW?De3^;(LqY_Nx2M z=W*T-aZ?(omw5QP6`&m)0^ECoaQB|jpQn3N*VCtaveM6%IPpYllsoY1cO&sd}1aUf|oXXTz4G>C?1{2w1pHx50|^)REkPVJ%r<>iX6{Fw%9j4OK_UeXRQ zhED25#plS{b)0=RXs9>7`JdYpB-Fg_D?b>MCvUm7B$9WPyObL{k-V-!l~J_7{b!!g zEMJ=p{+E5q{2$)f+?neydaiH(yS25m^#8Sxr)K==N@h|1pF1W0Upw2KttJ1zh{s&9 z(~!$0JF>x|mxS6!c=emd#YBv$_+pv$uo82?Dn%<5UrO1f#mV^uowWmjQ=lu%Br=9r zXwwBO_N0I;aDGliV}&DADJQ{20K{h=<6Sf(gv}1<+7I7`Z&q zC6f@W^pPvE2#jRLR*KvIExde7o|MViu90AnxRlfBB46p6y|ofmMil>OAdvBesD_YL zoFg(ZdL1x&Fsftm;hP6tySA>HNhp(I^)&1d#G@N_WwvMsvB^E%B_^+O+6z=j5s&Q7 zBqzK*Al8VW8%5{}7g18)bw?AK(`pDqcZJ04EcWjE*Rlb;i(dXEBCW&(h&GgqxjkIE zgFl98^o(a9es6YVi47*o6EAebTW5@LxplpAT~fh;RgEJ;hL@IkJIz&@^+>x1=-yl0 zSCs(`<18=8g|)Q->FI5EW#MmP;tM%1l)rMb0aeGogQiYKBXJ}>VN(?{4z+Eekn0TC zDj9dlaY(Ma2N8^P#%mj~n>8;!cnz%`f|~5g7_@R-!KD8jUWtw?!sG>E`-Tp^wN|P@ zgqt|JK)b^x3xzn`U`)DHwqNu(*ZR04s5sZ#b<5Xv)%(6aBa~#;3{B9xQg*%AatwM*}xAihj7U0~|1ze);_SAr9wT*&$#^etP@LUx=iuHbjG5S$gGy zdqKNxA3VdT=$|3$~>1}UrYXPF^?w2mC8l#`N1Ft@C-N(pr2R=Od|Ed z!2kq7q1BD?jJ3l9kFM4~MptMI9-i~XDxS%GRv;rv9B5LY`9@(c8mSmGDrg~`@NQ%Z z4E>_DLPCo8Q83w7&73NZ0tpbg-8_6RG(Fb8q(R)mzV(1Xfd=WzHRFCkC ze3`;dMdU2iHVoZ%u(o!B-6O_436e>g@Iol$85KBC0X+_r7zj^}*u1u;&Scl3%s!T% zokjhRIr6P`f58vv@0Pea30070#SX)_wy&s5cxv&gbIT+I@%WyzOc|=`P(>Bygw%Ued zSGjS_JbA6k?N(8+nCrC(*A&Y&q~JZ$*(RYvf&qr&VGQ#DSb81k8&}4FZLUZix-A`} zITDhii1}NbQ1~O@r_jvj<~?-m_E0CtHr-wdOY=be*!24>i^jkx{7ZD(^9W@Ie$Uk| z9n)oBd&BphS?B*<1OT0J{&%)Fef#g7?X9K$*Fv5O9dA4jY~4&cplj4KVI_GApnw0P z4TsU2&k68*=TTN%wNdth8C!hRH|umm&5nKZ+9Bl~m3TkBmg8&GLVKB4PbCfk${?Vc zbJT79`yY=b5qG))m_X=ra_=MSiccaZ#^Y<2qd`dEMeW5xYTxw%gu zKQ%^Wmd0QXPWoedugRu&gW;*@n5o@@or3BAwztQ#1`U?3(@WF(h8SAEpK66EAcb3^T3G?c#6^jGD+n(AbO8A#Xh#|qA+fL#vUL{`t!-{1@D(uz3nZry zWKIeI?7Ap!pN$CW@{K|u3$ymh%4Qh>sL1L8g@D|cOv7P?IGbzY5Jl?l>%>EDnL8*8MBt5~ILI9h$S%4vwOjO5s;>Z7NK!LH8^jVMNU&HJNmzO1!Hi zcsYfSo55NEEGc4+3b`%JakH zXRr1TUmrXNw&nv9m#a(19mb8-t^Rm#+?A9vnS8z^`u*$}fk<2Td5O z!(+?OKS+1C z@18Po&;RqV|1ms8t}jjQu8x`Ze-F34^M4!uw>@B&ANJ3p)fSiOP2$l2%S8DIqZ8*@9}8f{ zfWDzw%ldjhO7b*YAH_ZRfk#xoP!P~h5dxqh^aF`>!f+5>JdaS-HA?z%WMRscVX6Q9 zu-(~dZzy24SyCQShK=^Ww>QoA(K)7ea4~GG>vG&c%D;ungm75-)-!%Hkb7OLY{Aeva#OE%DaDi_bSBY2E;6^`#gHzI4g?tM-Lvv z7VV5!yKsRo8TDo-PwP9nnc;vo~q@4Z0$cg zywAYlUBF{AfX~V&abapY=Y+O_-A=Ppbs*<8uZa03xF1XZb9je~Rb-2QuJ5+5pf300;6!s{jB1 diff --git a/app/fixtures/registry-index/pr/el/prelude b/app/fixtures/registry-index/pr/el/prelude index e6c7d0759..8a01e7d6c 100644 --- a/app/fixtures/registry-index/pr/el/prelude +++ b/app/fixtures/registry-index/pr/el/prelude @@ -1 +1 @@ -{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"description":"The PureScript Prelude","dependencies":{}} +{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"ref":"v6.0.1","description":"The PureScript Prelude","dependencies":{}} diff --git a/app/fixtures/registry-index/ty/pe/type-equality b/app/fixtures/registry-index/ty/pe/type-equality index 8fbce8f14..8d5fc1d6e 100644 --- a/app/fixtures/registry-index/ty/pe/type-equality +++ b/app/fixtures/registry-index/ty/pe/type-equality @@ -1 +1 @@ -{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"dependencies":{}} \ No newline at end of file +{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"ref":"v4.0.1","dependencies":{}} \ No newline at end of file diff --git a/app/fixtures/registry-storage/console-6.1.0.tar.gz b/app/fixtures/registry-storage/console-6.1.0.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..52c94f426bf4508c1de99bd0c3d2cbf94d04bd89 GIT binary patch literal 1646 zcmV-!29fz6iwFP!000001ML`HPvb_AXa9;3PjCv(7bF#TImt;DV#(O4?ZBa`cQW>d zxJ|s#dUF)D_uo6acAOA$CG>zk)boHh^Zn%=4|;X))uD#aJ~@%kdi$iNe%FtW)iZ~^ z`boXfZnTc^iTHNA+1!JZ*LQ7iZ;^_C1MCStd4iAf^er5EThOaF?2dB%x0EHXWkVLY zmDPJ|{oBor`q%4iBtz}B99oC}dHw&?0QQq$M)yx)f4<-}4S75lMsyXClNrJ*`%6V=-CW=pma`S7O!gX_&?#O;V+x>-CSCN4;r)8O?~C&RH4@#_y3X z9YUPApDS)OqnHr2;M0HOfG$;_hHuJNe48*XHUE++efO@azgt>RufiA9IA;k>M7qQ$ zETUg#ELu!yfny6XUlhIxnu-Tn)C;x5%!BZEOkajW)UV2wh|Xye(IkvfYZ3Tb0CYU2 zp^!0uKGth`tx&gOnR7Z_L@HCCTZ%_Us(sUTt-oGPdHBjR;92*76Y=%hNu#{~-vqMy z6V++2)dtU=|9Jn`TV?&<0v^=Am*1Fp!MtRsPt5;Dt$n;v|KmopRnGsnfTJV$3;ao( zLN?!E&e_*EqA83QNho6M2pBUC*|-yNato>8_((x85EBZ^F{|YYvR=~jUczy708@60 z&x(9{6LzQ0cL7g|LxH87Grl?%_&6D};!t2Cdb7A)oeI2&XS6t2xp8?WNu2(Ur>zGR~cu zRrEV^%cShgtWvczx6InJn5}1|BrnO%{Hkn{d!0wiyGr7bKbTG-&_Q7W=zPRK8%me_ z9aX*$A3i`T8%J9<;?9Z+flgN3m8zn7ch#*fZVYm#h{tglNCg!w;J98;K8uB*(Q?s!xh9hV$oCf%Ya$VCJj~zfmYZ+KNfT2YTbm|{`(4-IWOdlYRD`MedMiUV%3sc3V zi6Mo>D0R?n{DZ*w;Wl4PD}S;J9F33@sCtp?24>eUo?YMg? z1yBLljt`c3Vfx7CI~8R#7t>Z^;GDq)aeD*&X>?7?^skj8XQppUTW4s@fT7{~W^ZH} zE(}NR(D4XhSX$rodX`~ckiL%A(ISvbV*B6?49nUYrS$zlj9r3G8eNM>ODf8~>5`r= z!&#krm^t*%s(?2nJyRaYCxXEk?sX;C>5-pD$b>}b8yCho@w9^{Qi3(UeA?l^swXpgv;ID@?Rn&o&ZTO*HD(1LF$!>9sN<-M2Z?#MG$(oEYYt~(m~ zrehys%8sU|EjzZNDhcKp82w9A sx}7s)?L9N^6P2>wAWyemu3ROQP(leMlu$wmCHxrt4F+&0Q~)Rd05MlE_W%F@ literal 0 HcmV?d00001 diff --git a/app/fixtures/registry-storage/effect-4.0.0.tar.gz b/app/fixtures/registry-storage/effect-4.0.0.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..e86537b25c2dff63cd072c7f6af387eaec1afb85 GIT binary patch literal 6262 zcmV-+7>VZ}iwFP!000001MFS@bJ|Lhe?RkA%%!esaULdo$Bti8MQj|i<=8&JFPqxi zdkATONrXg6!j5k*|NC{%jHHo}gs?GryJjjCgEZ>?bk9t8OX_LLZ*H{j7gRH-7ZXV#SH zwq-E9$R&8uoO*8yg=(2ZA-Tk!_*it?fPOHT#G#%B7?Gehw(sziFQR38Os6F{$J~Mm zl#qbR%Q9j>fSqn?U>}`wp-@`|0E`q3_y9mOV+o)#Fcc62*$@Mfav8S1oKwrw{DFC6 zI|UogIApGQ)M1+GhWh1$!Y@;xgE;jvxuT3;g=Z~oP7w=!4&lyh6XF3t9d_bsXP%3w z1wGqw3v=y(j493#z;6?38E};1WB?#@+oMEi1+D{v><+q3-~=BF7{rNpk0UK7PQhJJ z9cK<)#Be@2IA<&|dt6uGA71nZWN>{x{M_kx34FfkU;o}a>zJ6dW@VdmA%F7DjW#syteCqa3FW_tE zL+`RT{KA2p_l8%<>l~PM$W5m|?490TcKYPzwtsUy=n@dCaMl}~UUqt)x@TqJ4jf7M z_wLn@3@$pCmx)p$_K1uhx&X5C;j)XIxRhtTe)n{U;)I`0L34obvP1?q-P0a^=>Dw> z!gTsyN|NTF`(L-v2~Lu;&Zo}D?x0XyqY@aC)7yUc6JiEE3~oORhP~nKuuDE(U!QSB z54!!|!SM`E3YXUduC3caw*(x99nKg?fT{*3`1!-_pvRTgyBc=;{o9*i@A~QhB>xO* z0gN5E^o(ow`ihGR8oKU(L87PwuKW`De9?vDeN-sdR0mZs0F9jv)o#EK6gX65Bv;*! zm%WeOtJ5x?y+$gZdxP!)j8$)d9e^aV{oDbbw_Ic#BS0lS_67xi#+LX{h%l330K+ag;aNW`|(*?{6Dtu!RCE+ZR_bgHjn=saBLs{@1pJF|Cik&>wlYz zUTN#rf2&oCt^aDPS>MP1yJ-I`5b_G6x>s+>s|7^=E|2~X#k_b`!qYQ*P8ZsgcbR$K z!hQSZ%@hK)<+!Zd^EcsDZ&C=D5b;_rzy8DE?67`#YHAQDijxp7+b)9`@&V2x9607Z zEO>9=!aKpCrDHyVe;FwNfghX|n?f9mg#RvRzJC3t{Mu*0C;uNnaA8{zTf6>o3+^ob zfE$^mNrPh$qAr#JG>^JogbH6m{*i^Ie*@)-V{qsn9Ew2~5F%4cXFxTOTfx6@sJS$_ z7XGJ&tCNdS__1XZo#y`miMuUJu(|)Qx0;Q;|KCO1`~R2SBI|#fi(YB#)_=VoU;ou+ zZU6jlCoSUtRiAwaUVwqeg2Q}rwAkj(fGo5j-o>!LV)D>BXHd* z{xWou@85ARNq%cTpR@coY-#@Ab@Z+A*xdiunw8q#|L>yh{r}5uk@XLY{yB0$a8YlR z|52?p(EsoAKc2s>jKAUoy8PVJc|7L+uie_)|6R1b{eRhQW&Mjqzg0J|x&5!UYSq2{ z-$hFsf5i>JEuQO^@!7y5d2DX~o9%jiZ~u4E_V)i}x3u;DH+~tB( zv^0<1Q6eU=Tjm4>)t&`oYUxxY4+eQ~6WbvppAYj~YEDW)x*c9&XpSLx2|AvmSuW=F zffrQik4!f40>GLTsub#=TMD#S4 zdM>VwWUh^MTua2rU~J^#-Av})%)z@vXf}JA3qdOxK`S4E5}B+8G0BCaos6TM8%K#u z^FW#ALUWXi=4cZ%B{ItiXqF4paWbak4KbAn%MXj?LRPJ$n54?)*h=K^`4cpN{^&&M^5bb5X{Kl-!#C|%BQ#AV zX(~%r8amRGNWULjRAVbejTsL1&!)ynx*pRtovLdL8BSuIe>|W5+bR0b2rT{u^*_xT zfUH5v8YYaW;v=;(R?j+kM=67s5r_S|4&E$#^s>iz%_w7}x;oGaL%rM4KTa9_jEMJz zk3L&B{OhL4`iX+K4@q++akl*QDHOdkiJaI^qLgmAC(+mSvuOQ%%QfThrZ9QZrA)h@ zIvcn4{BM)@KaWfYc*OgkR&AgExs$ek|MRlj`u9Ii&jHF_|Mg0{T8+Q|X<+{M{{7GM zH<^aRTp}}p3_4*JX5Kt3;D5}S5mRKcG|AGR6oJ!qOGIW){LUw$?~j0CBu$AdG*FGcG&QG$g~6z(g*FPAkFewwFRtb% zIuSOXQv-90%m<=PJn9H)4%I2ULp@`TkF3xI^tCyKJ}^!$S0Hv^qTHHDG&<8ft$Z%U zE}vU9L2lqV*j4_ywU{?5VFH#(#gJNzS{@v+m!=`1!RVO02AD=nM%5v`wkWyR9&n^) zFadBhaX7@qV?Kg%o`}+C{vZUYPZnrEGX*ha_<7B>BJN%_A7CPq`0SKnA6JSMBt7v{ z2PK(z1@lp=7`yR2E6s6dphT`JOPO0Gzy8|!c~35v5{UXuh!dMx;a*m82>$Ddt5pF& zpKs;NvmMH&AhXDq3#R}kV98-Pe6M_x;^5FE&8~#W{pO7i0R>Qai#r>)tS}AlXvEZ3 zrhPo_UR@Z;v3YQkWUQ`}u-lK|v;WEe88n288*GGvU3Rw3`59b)n zw6Tq~@E8n@Jr@NGspHtr;lcsiP*&*)YiNU#9W2TRs|uEp%sGTrFN)}q4-YcG5C`SK zUh)EV>P#T7WfeeXGVdXpKNT5{oY|3ot1ZC|bN==16`2Ho_?&Q&M4=EvADe%pi{NpciJhs3ARjC)a-{8%TF`wn@)cG#3zvAKMp+W&6oAHZc$Aa#e+ zA8TZaS?MtM8~QB17+o@cfb}PQ8LUL~;Jmyix`P!T06qK|f%`ZP7F!0sbDPz}q}HNG$_PaC#auU0Lr_(32(@5pbdDcW&`axWoZHid<># z0g&s+4Vy8r_PsETN#?g0-D3K~Bbyl^>tZLSZ;N`Vd^ z6_^piCPemmPcH50C4MY_?)0xBE(^|drc;MbHP3bipbuELWzfD?eDwynxUWhv2ZvF<5o&_6721BEq6O1e>6j5{qJfqclp7_}w!gXYz$MAzWSU>e zIe1GSKos^U?Q1d2mMC6<2pR_W>P$J{_UjdPu>-`|8EcMH*DnbU z!w^4^&8?QkEsJYyJ=YCk)Yrktf>VD4oDK0c^5Vjj zCCZYlwHwJc6t(?SQL{vD|0Iz&)WOlVbdaTsqd$u-HdM~>ww05mq~kw}PM)f@YUR1q zmg%Z1e-`a*L!nBrULfigW8z@KLYWjb<&MHRfP=wT3|p{YI?Y;)<~T}qoI$!KyQdSc zuMpQ0$mcLg%Tp9^nj5Iz*2D658aXPzZB0HMNkY;dgMrfq-ZKd#?tmiz*VBd<7 zN(azR1u)GCVVZ%Xy+Sb^$x#N9*(O+K8Gw!w2&Y3j&V;%^e-(|bCZFdZ{t4lp@}T0BvJLl+nOnE12i z#lmFb1s@I^;}3b|(^aJakLy20j64Ypo-GoGynC;FRbHon2RaDq@}w%TO8%QW(#XEm z310nM{9r9o8E6GqEgvxH!*W2YA+&J}Tup!*$KcgCJeal_0PFbx1}-oM#JYf}$3WJ7 zka`SeUBZNgl>xMo52)fkb3kqQP>mSaMhMo3!EFR^!31OgZsr3V@w+)7H$%uY2D+(& zrZLz}1va5sz+3r%$Gmk8=q(lcBnG|}1wVr9M_ciXPD#-$Vjk~T&qKpu$MC#nx%!v3{ z`EHMk=fCGm@vh?iUfi$#fq2EKR8F;ZoL1ar0kwob z@B|D;aSVA-9Ir#M#_A^EI9|n(2Z=D2nWki|nM|@Sl_WSSphxcipq#;){eP}Dp--p- zxB33RX0yKk{hyt*{r;UU2oU+`~P;)B1H|Be9&)1o}zrD zr<6AEiv;+ad$1_l7Pq4B+fQiu{r}_l0Y75@f3?!y`~O|Ez5jpNZH@om$`637TmSV& zE$;u@&3*mnowRuFJC1rR@m&T2v7hefFZtUpF@G?_9^b4jMW&d#tLycZ?a|k7`Q{kD z+YEQ4`Vj@baud78_-|)8W9B)UgFAK%Dt`jlEyrJmCPgW!ziy%JOKK zfX)4Xqh8(r{=-h%-v7Vsmg4`nbK@`Dn)P38wB!E2R&CYy{(l!Oa{rH8OS_Rfel4+l zAt}bn7iwu`635Ik@=;u=@xQ`LdGMeW4`fO9j1rAY%VTCY>jB0J3sK;v_#YFXpGKDV z$wxM=%A+ciWQiP=`h{9Cte~DPC4@auFs7Y?X0}q3e^vRJP7XaVKpb|Gn&%7XNMMmtRF&v;Lcv@SGq2HS6tt z{I`p?Qt?dIrV=$4f}cy}HOOS$2I$Lw?bNfw4T9oX&mW>b+8@#477-#LeT{!NNav;( zEi)-gBZZHXT)*<5<|!4Zz-Y<(AH3vQkczDo>?=o0JAmshh965ZK;$1qP{%)FK;?hn zp$L3Ah&>X!aj3a8R(~V6vsjo95Q4FZSMwi~b)vsiD|8}M;a6uQ2E}4qD+N37ftjdm*2H=^Ong8mMpDwd1Enpohz*Vn*5tzL?=*8M zAK3YoA~|uM<<783o5kE2U41&I?6HmSuU*{_o3O()$gAP89E?Rk>RcR%{{rn?TVOB> zD(_U4%Q4;lA-q_OOzcwL2gAWd>_VpVA&Xuv55|e$O%u@I_f9JtH^d!PK+&Ix+}C*E;|lep80Yz&D$35{&U9UrJ)l7Ig<_~l=J&4hJ#MnU@Lh=cZD$zbH-;s&Z;#Yc#1)JUdw z1(geVA6FLLHG+CXyvUQx>>8DmbIp`IGi^1`Ohq2#lZn~rXRvWjm$tnSh7b$&*p|V# zeURlF$;fk-bQG!5CU2|9!qB9=0SHR`EU4yD16CQ#0b$?L?K~NcN75?u?J?343RA>F zFe~o^IG|9cA8fENfGnKhyl9mouyiMiz${T#d9Obvb1rSBs4McRog}?6mL{4=;lyhX zpKl4RYBmv=KBKUplnH{koE-R$Qn0zn?~7&rR#YYBNqSqdsx&P_fi~9giw8%BrFkLk=rp{17b#Jer8IY3Pv!Iq9321sA(P z#nX{#k4TYOx(TM@|J%8P6aYaW3d7gQV|0^R2)Rv&>GsX2MKM7TL7V^E2yPl*g`(gR zL%n}(O4=D^?{vA8$}|<7;%v~Z=~{crAUtMb$5`{YT6yKebrIC<4awhUFi7@)$MM9= gOYcjKeqOj8Ql$Gyuo|0C9|1^8f$< literal 0 HcmV?d00001 diff --git a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz index 1df21a5803b31594923bd710f97edb55cda75cc7..87d64d2c1de45193569f4d37a2a9fd11439b8400 100644 GIT binary patch literal 31313 zcmV)PK()UgiwFP!000001MFRUciKp@pTG4fdOde%!GjfpUwe``dk7{m_xN=IPc|oa zt`X1}O@zegfz4*}-EUQOx72z7aqN)0chE^d>PJ;|b$9izx=VQKb0-?|okMfitUmkU zn_Rna9UUBq??dqom-^Q3_xGwbxUW_ZclUo_2j51V^c4l62d0@{T?(Z+^|JvcfQKtWEhqb*Q*zOa{QF#5y`@i8$=e~VA4q0ut z`ge8}`Mfjm?P+KjZ9cSv(6{?hXnQWR+#!ns&TN+jUgQrrrSxsr^5<;i`IDf+?(A^P zJfFV3C}b0FXpiiHg#;>w<#RUW{=^PLK4eqho7qFS3CC6l9~_7{j(2Cfw`}0KLmMjv z3@c4|_}nn6CQFMHFmI$}8F)h|2c5}$9$FwH614i>46{@dT`#l;yaIW4U^qYt3Fy4s zVMY*OH*hR_!hO>)YIy;Gk){D905okFfly0iFi-~eKpB{9*U%e86YhqV>X>JqZ+MX7 zvxyaQ-?p3}(JyrhRntU==Ioj5g4P!9;&GblEK4C)K$#8G&MJuL^05ls02M-Ei7 zaXk4rW?V6Pf8-w_9&dyX^Gx z?4;T5u=>Rbq+WE}t=HGxOGwzNccA7LW*GI0_pJHPt9G-~VV7;zI=?z=0WqN7u3vOp z%}#~2E*fXoC#{RO6$S*@#buYBwa!~zDAv8K5K~#rNUE{RH|)IGZk)nn{dMcC)qPKb zylHhWkk=bvR%ciBcDL2IKC8Fc)ph&oveRVHEaRlrX`Izt=gkunxC2Ml{H1x(Wu4Rd z*;!#LQT9|DUpGO>`s=eMa-vqAwA#%^7n_sZH9&J9-&uuqu9}S&-ZcN&gvQj{?<d(}y3=g`0_#ra zg>iP-A#GiEnib&CtrKG)0jlb}!28$NofavrbIrH0 z@`9QQ8oF%1N1~_$QhtTKJ8eRG8x=~Ls-p@zps_|*F9!TTfnBYQ?4tShto62e(P(1s zB~p3U>NLyHSFH{f0Fub|T^)E{QKJOoR;VXg>kX@){L(_PWnt+1PD_px zQd#3vs@pW4-2SK6|GswzmhQK}bDxsOiuFIvOVviX3XK$zBSg@*yOjujJz=j294*4iMq=oY-l?I8Z5F|yu^EI-XNCGH) zCSLX}ta?cKgCM%S{mk4}9B8K80L7{2!b%*dbQf^Q4Zxnr;c*%)XwxW`E#yI%qC)dc zrA)2CN6@rLgQQ=L&rd(;wJSaU+s*pPdDEN>pOnXn`Cr}JJM zUPrbwWYA|}6nq7(JJ0_(0$d$DYqXyAt>G<6^^d-9xr6cX#0mgCe~paRfYAFtevi06 zKUQ4+x>s+seip>xUXsUVfbk0=J4`rDMS_{oHa-9LnKTDM5l<4o{1_5t0MThnth&lD ze*BTOT+p-Qke6W?yW_n)>wxJj_rml@CR4{3o^=MJ3H_X z@t)`JICdW%aEoG)21|rYp$a|3@NCFuCw$7?A$JEh*CZ9pu-5+^nbiYxH^x?)=?E+; zuxtL^tR?x*6jQ4>8FqK#dZK|;eh+eLB*QnxW$U^!^B;`wle>Q_=l>qe|LQ@tc63lX z*gvcx{yW&KZRYZbqKaV_lsYd5e)=m|WY$Ey9G{n|mM{}1+#HtYXdE`C4t{1A`wf{;bj z=ciV{j@c-3X-|Wd9IMZrGOg%74aI^3)hrST+Vr zh@{WUwRXZC9`dj0o&v=zRSPNw)BkdOEJh9bjStb%G93xcd^QSHAlm-DC*>NiRrVh} z!pHJhVgG9fht;h8KRVc)|E=ZP*fJksR(NsSUAzRFC$}o?+znLXnF@C3doYw z7|e5%h#5-2AQ6#7vfU6Jx*^$-zz!ozNDuww6MZa$ktG<_LP0Kf)*Sc=f)pI%-eN=D z+W>=fA`ycGvPT0j32`Wl6ljO|@Ox`Ab$A82xGV~wvY^*%dv{&$%nrhy$+owiU8Q z(04^h1rMSZh#|HE@=OCBxP9v~dqTE7S?`}e$AY-2?^jLM5mk3yNg*H+A6|PNZURav zgkUf#Zh45zX=M>HJxZ+Q!X(|>8mUw^^mu?VQlM&#jiP~gXMSyL)T5R}C5v>S=oE)WIEA+y8BXsDCcP?h<$w3e*gI8YC@tj>Hs@n-x_ zp*9JHGMZaBbrO|*Z-bw?^6P&WH1)KbKwshi?^QSP-)p%R^gkf%`}vcAzaLiOzy0cA zw*L?IHu!HnS8Q~9)a5;4C=soZ2^aC##Sn`us*7>Y>S$kBT%Z3Q*}lX&V5DuA46w(B zU@`rP?F#ru^#No-31OovtPkY`aWKw`9D;$V7X)_Sp>SewNN3!&xl1%ZYcs@la7~0@BDlGRLan#(407A`>%)qe%iAHhFk>Si^(1^p6RmJB)>%Up|Z5~XabCIOLnv{fXkhP14*JP zh{!J{0tMg5Ws|^_5+JkEL+?SaPMAXLJ)K)mjU)|FjD%u(R!@!=$^OoB{}!txDNAn_ zUd^(4$wsF(bo;d1Ou94LP&j(NwYF@EyqDnq}~PG3q}?yfxADn`nF?-bCaEVcj(wwq-_&U zo8}_*1Ew<=Ym}F-Jf8BI0v>G-5Yu>g6jQGt<-;o)HfR;}a{iGxY~)@87Boe+zR@63NV~L_ zqy6}3yTe{!8bcH)CM{d_(PSS{ej2%22`e73-y832uJo11|2WdVyZ=|)-}rxPxfb;Q z8{BI3{3kj7T^;|mm-GK>)x(Ydw~i}y%4hk$H9xnesT01@g3~=Hte@`T$^=QK!`<6( z{31@5p3*UM`5mhxgOWG`)=mIz$liScUoV*hUqAnxoaBk>A8b6w8zE1)mDN9|4}P)S zpYjOrQP+a`-{vC+98v;bkKP4ZG5`0f)xFI8Kiq?2oB6+nO9DXbbW`hem`5e)yrxqt zajHGC<1}BCA=oFL?a^F5l@+9v`{%^!(jT%Cd~nbLGEG3MYWoI(=#F5I zro&GJqpZ}4A-%ie4tNmn2PH4mBZL(jTj1xfhx@`wVB7J5mwarwp}JL~ ztLM&q()S!Ar?Cat7{43j5L-47%~zYT5a`aZoZJ!vO%g9zO1VhA8Tsh=IzBp3JHj)? zpCI6Ydg;a_^Bwn4;tB}*Dg82Czp{&$X_t0caF-;hO9w3IfDDz(+zWdwn7eQr*#F`~ zK_jDdaioeJBF<$wZZyOS>1hm1BQpfCK})n6w*rQslv_(xeO!=*{auh8_kBU0IQ}7x zVlcF$?;oH}2*P&2x|+K`MV zTAIc&E8W0ctCiWymoHN`ClP^aT#mXh7m#O~l1x8N>=Ub1(@YZAI4P6J9x~t5M-<8( zqQs>|jmtI-FcW@i)M2uLL^3X7#6IZ_d(Qs*2FnNRv-F=?Yz;&W!ayM40|cNdaQCU$ zDBK96^H~*>sp`>>S%cL2VBJ8jf5&S9{*zX*?%eXeZ#@zMuEKw{y)6Da0vB+D|JHHo z_%Ge*8U)N`RG41xl+Qz0_4MqbdLjm55h6?zVI@H!ovP@jQ`<7YWHhRe#pq z4C~r&75vpB_;fwUDgE?Q89x8`qg+n$7U)g!Yz5*uUFI5AaaO|5IQ`1ECuZBb!`*cF z$|hqp7-gL;#J39D(gAE*Tl%V-;5h-be3adFY~o9c~O-&MQr-Gwr0{w0;H7}5t5NWxMa7_ z%s6b2f}HT@z$%5aM*LD3mVy7}BDN~R2(lNkj(6*`b=AQ@>jfQarC$MT3UM)czk z;;c0{W)_BC+(KA4_5!)l25PehV{PMR#(fM8gXPOpK{(@RV!1maAj3h@wC~i*ZTxA0 zKw}#UA_7yf7lkn}h$}_h-W-F{fEdN*^zM6=1PRIz?jCkJyYHcx{GJaYvFD%#YS6E; zisoFs73m>W_Xbet zR^;N(y`}4En3Afh=TvR{l@ug^-&-niWH|w)(oL2wZgMG_c%PzotctV%CS@vKM3X-E zWiEL3UuTb6R;4v(p^bpSmh#Wl!wHG z?MYLNVXUB|z-vJVQ&zSo$TFzf^pFuin2SuU7ut7J?@7^4w}1aRa$XYqiO1TmFu(GIYp$Ef&mdTGv$Nqj3;9`z%g zW9_XhtnSCooM`20buHCZ(Ku1A%cB&U^H82ls})KNg+uj?wTmT@leDo*jY+!2V&g7* zQl+g~@~B0dSOsO11qzGHg=}dJwJ2eETDX>$PL?2=xbcfvi)$pyOgb=AgB5gKBbO?T z*!PC*Pj)Tv{~CY@BLtB%-iG!*&J|o4|8bc8{g0!)-Oc+yYq@m)FVpFoE11top8yml zqO0etj6yogPXw@p^3m`<4n2u$mUFJ9uhmh{72M7S(IW69J(pM5zqs#JSZN#20)GBK zc@&V*Q%|df5&S9 z{yXO0etxTAN%`9o8SLi%eB1!Pwe2gZyx_$JJ`*|f7gyS@jq+1bigh;-Na7M z;PETuZhk%Rva9dSqLI^?c)&}ue5w7wW$0H+Fbx2$E z7ALjf9(PvgZqAO6#a-zh!%*h(1<2AwynqbrHRaxvWwjYH6jbN%1NXUrZVnTU8i!NW@8*i>O0|s#aqa z{k|9BS&*vnT09Cq>LKQp-mX)1vU~lqbb5z7PCaH@;y&3SvuJm$yk+DXGZJs6DNgc5 zHQA)Nlr^0?b5*8}4@?`@Oyo-mMrC<(UsfGk8o@(`9JwY0@|tB^^C*!*&ym z!hO09<|aa5JTM5L)t-2+6<0sUhb;YguBcME$SUlN*yd6M{OlxYcixT_i>T>UIz^zAjW3_X7J9ERA?;g_|Np&PrTBnB6X zlHc2eKk8b({-0}}{OqgW1Y5QLx4WN-|ETTm?{4Bh)^ZiB|LIPDBocV&Dz9cg{@fe> z;nDP7(yE0&BRWBVOd5fKOd7y~?IKi=DJJkiCK2pQ9wubUAYjNQErSmK+uob+w~Zuu z;`6tj0}&j9@Ao8| zapauD3XG3C zW7_^uZD5ANWXPX2#4v+YTR~uk0zD2aQcaG;KKWItVWj-biKVKgNG(x;0VFvQm%*wZ zNs`s_W4J-1P;a0x-RHuVktR8@db$)(7FglF^ftA5KGW08|2@Gwpqq1k^emJA`2HVG z`iH#@|F@1O!2iik|H7P~ynkHA^C@zX+f%=g&r`nyhj%~3;i(A3+o_*$jP%($^%)pC z{iBJMQ?VQ-PNxpCZ+5c2Dy)%ZGZWd&T2pDjra38BDUVq*Gi8`5Gs|{VLyfeacBoF} zuwdpFV7}DvYp`8r$&%sHRkm3zGcGY%&ID2Hl?n&ON)OAnIanH)DYqcG|9ZtS0~H_` z7$K`ifHCsY6T4&*Si>lpSrLmAC{xX1t2^u1qENp|hRFP;e~j$*i^W6^us`*MOU5Tu zvSNAciT|Y+5y z(z_gpPj@-+Mt3>z)u(Ionl%3@-Bx{)ZFE8SWUB6R%&g^Ij!h8W<;WJryBsqO-{mk7b$2;koade=BkXQ*o&E}mYlbYsX#<6Yutf{;D8E^nMd^yQ5)y}tB9ws~&! zH1GdUvq}Eh_y7KJ|9GSS@r9ni{tumAxBoXE;l&b~oaV(JFH`QG_yrE3_<{#e>Zlk8P2l(`3;bTrfU&uO`X#9? z)E;8%pa0G4{}14)E**bf{ICA;;l8{6pB!)Q|JU*a)_?AF!~BOs=sRkNpK=He<9mKk zolTFN5tJ|e>hNr^kC1Zc{hXX{Z3W0M&;V>=5EsChpr>Z4m-&<8bTLQB1rg8A(fnAG z_rQ!)SID2yVG7!gv>ci{PLU&A1MRSuOb^;c^@PY+&IhMwYDoNEdSl58za8QLO=f~D zo?B8hk2Ev#YV^o9b&ntp6)IX&=_(SROr({{kTV-<>k=}WVmC`eia&&OU@?TQEb#*s zZ)4C}#ackEMRMMigws#ICL9<$*$lw;#c8q6JO7*4f6$0GhxmpvLwsvpf*>M7Z)D|VWLsWXmpjM{jTFoBXGUM92+Ip)Sq{t$r)iPE&|vO4zQfwfP+;xx{TdQO zS(cO-Ql)H(p^QTkLt{`Yi6Mr8#E`~ITXt=O#L&!CN(`;6Sz_oA1TsO@rXU}*(vZYZ z5>X>Dl*vhA=;W_vij1{H``hgidGL|p^fJ;dqP3mflTTBL2J;Zs{+4si*Zy8s5z_v4 zGiL2?nXT0RrjDLU?e7(8$=ctwF}0rfmtMec`PA(HVB@?l{J(zh$UXlZ^-ebTe`|Tn z{ojB|Gq4CClv!W$G7o;dXRs!p#iJA?cpY>tDRzb-AgT?!(euiHwlg7#Nz9k!DBoKU z`yJ1u(^G3(r(#o(`-yL89o~s93RtNo2v0!EqF}L!E7wPk4nxUNM(iI*d7CwhW;69X zn-K=W`(!#z##Q+qor4gw65Dj40F&}04Fkvh0+EeJhd7I#J$tSsm>tupt>bqR+(Hat zcb$S0hG7T%+!9nD5-&@M!NcOr^GJ@4_9R*XN*cKh7qSfzZ!fnxO`Upl{{~#0Q=W); z@TEuU=J^~?9sVoA4cv|Ux268y`upzv&r$Di1 zat8jK|EYm4OWzU$=A4GFq$zN@i5%sBiiL;6lE(%53Rk8l7yO4^E5U+K>c&S}D5JSlK`6Ou_9~~VWwSze~r0TT; zr7*bPfU*3#fBxy!^ViS+(ZPykQ?D;~0ctSx#O|DN=HomX)PP$kEdhL}$~*41{RO zES;yBC<*0S=XqJCqjZ*5S0wkNtVRil?T8|_Jph-c(wk|=m*_oBv1Zsf%*qap$^o#Y ztlE#swBqDk0!S*gKMb=5uR+a`ow0$ak6e9B5pf@-q)>?u$rZF{H2g&R2jHCp1cxZG z1W{;QfF32^dhSl+c`824;_P%0p8}{RHey^x{}z9WALuJhd^ul?X6cw!e4gdGU@6&O z8lshsm?@uwFJ8vy}Bx^WZumeRrg%1&ebeyZJ#BxSq_~(oBUzB{ZKHNn8R;& zNfLY3XbBo3O0-)lF=QUaY*dQgoQbK)GFedzMh69BfrvO=;nZWEAKY3 z;|BB({Ncg;lt+u`d+8yX2Nj7zG19-&;6n3M5E-LqV$lbsXc=;DGAiOsG%={N&S}Wa z5;cmi^2I$lI=166xn0@zhZvRHB1b7A5e%<;Rol7dc5k_h^z^Ly2}Z&L!$EB~G6!r$ z9-f`}EgGn8o@*HIJTN=_H18{Mndo=t# z`iI$Pzt*S8|JPkffR^n42fYJN{&Upd@c(OhEbiF!=wQk}GE4rEb*r2h^j13{@Nrx{ z1o8pNMp*&>ZD*xQNXwV;%N9hrA8mJg+wJI|F~NgKht-s9NP%>@V_hyT+Jk7^9rL;3 zNrfj15~JYPM^W!`zXw2(6nQWZ0VmI-$fXp!vsk8OrtpkovDk0Fz3iQAKah3UzW4TR zd~f@q9c5%k7w;}hk zTO{YRc$~DbPJ3_O_IBT1_Mg2K9W4lZw|Cl!_V%KroC!{6caoOi*=$$*0M>LD?yHM@ zwp%2p$t8JU(POO>rFvup28(Lu?d=C{&|4$OAekEIp&Q#CZ(_J@dr?MKgpnU%YDJj( z5i%=6=0_;32!$V^vLaM|gilt4Pus4z61ww)=+0EMLjR!=DAkBQh&ek0&A#%@hj-Cq zF}8Z!>gOZz^S|Lpd;9iV9ABycr-AJ?!5!3Cn4SnY&fjj=4bYnkl5uSWUp{Z%J!IcL zrYyLFBoEa z&w#xb+YD$MflkyK48Z5ox8Jr`we>;;ddJd76s~-~;vbgE`L;z*KfcTjTO`xuOJwYP z3zR~B9O0TYoOj5U3^T5^BU_&9tHs*ll>bKsUq}=R`q+|CmAaV%%E3| z0JUMWIBCUUkhp?598zk=!;Y{5!W|Nz%QTM*P}5)*hg9hAK!W~-<;S4Gf#t4hCzpH- z*vLKo7_^TQ@LBc$EqF@0bnQia`n5{$zSJFxBoB3vveXF9=$)_ zQV_UI|Eu3Sboc**-bVhnmd84{rdjo;tAuXi)L4p8znz>4rWr&ztE%(z!QNhyb?4oS^h0`{OwzcU7pHshYwrbcC;9vQ zJSiCe-J>`C)IhAEk0p=CAm+Owp`KEb>OC~i##hnfs2BY+)pR<`^P+Vb?E#3=gJ{}J`&HW_yCn$vFCq$@iZf;{sGAR@H;PWWlmdxY;ef&S#bRe_ILQvitx2V|V>OK0Mmk|E}e+ zjK4tXqgDb4UrnJO{J!S~Y-wNbMzSZxY3at~uq9GMh}pplwNBJ}^teH8xZH_*2aG;u z$+&WSx_hqJTivcGXv?dHFN(xAP{K&ME(|1ux`q?u7nvUK3{@_!6bPc%iZhYCW`QQr z8f)1J2ZT!k!omVhMRtRjQ^k3YH<{rg97!j&aL1~VB^o3!kve%N)rJI}ZNHL!yn^)5 ztl&|7V`xDpP7SYjaeTX>qb=n$+x^ri<*bP{@V+BbiW^u{3tPC^Pz&X493-2;8muG$ z<(9T2o+7I)b@T66CZF;Z_M3MwAs`J0#1KIQZMP66#^hAC#s6AwFmm(`K0@5Kgph&6 zlCUU}l3ku;DrjJ@4bsQGh)FL{#diJRFy*rvFWc+#203jwl9)Ua5$E0N@>xg`kkG!u z;DC%4DPCDhi?K~PEw`na*`+(Nq>OgUg%~h9QuT|w7#gK~>HYHW?WyDcBv#N}D1k0J z{~hjo_8&(_8~;yhd93rF_&bg(o^#d#uueEwy28P-4|+wl*1%K(&Fw+p1leFmYg^IU zGSaK^p_ZT4#JMajajD3Ph=C&haZjf;^B=E$)@1*6cyx5&*?%2x+{G6SVDslF5DNta!aMp~k~%!QT!-A+_}(HJx{V2DVI8Q5<@m;2%@5 z(!r-6E1czu;rOOK-(kqcUN$SsnVWWFR4m28`|ILCaX|GbXl zL%GA&Y5=gg7*$0~E3LX?v+V4iGs9boOhB_$#3-i(mNm_1v-|=&YZhOW2+74If)y8W zoZMY(a0c;A!2VtBb}P+3B}JJ$Kwy77q(WZ~WxVh6`MD@soAV6?=|il;kVfT@la4^6 zL2N-SgaQ=5+ruq;?7*36fFsj5l8fwYogzFIPt(g;bc`QGckUYeKWd*7zmGY=yP4Mj z7~5Bik&}cCk7X+UX(!AUb3dU(_rhcbP8%g69UK^$*;h&eP9FCAm0zySOr{%dQh z`6~w6EvA+g@7M-e$*IEwCAuW*%~67wauGq=orxK92Ez#^(~!r`lM zzU6qRa3ncwVFZT`I3c|$t0xx0(s)!C#Lc?(fdS}GKaRNg5Zogp7 zyv~B0AlWjxj)>F7$Uu=S^9Nm~Js@O5MM!w#%40VZw8PhVw5Z|#U&i8)6wh84aaM}S zKPT;*8y*Oj>3a&sLd|FSYy!rCLoKv8FY-_61i>Ge zc5S-93#&q@2u2`*DQ6K;)d4{qEDXgK3H4y)+>Ap8Tc2Yj;Nx=fkx7W@IjSBw-cyWlED3uYl0n|;WA3L8Gq zoaLNRl8J)Hf_7+#hjeykK}-> zF6E`m`H3jH4Os!ZJTO?oDY1g5>fBI7hfza%F(UuZyX!QSaM-reV>;K8gv8(@k zbhy8<|5?l9`~NT`K!(GoxpeE)iO%zjq@e5kMa+^NR}O@C2ZkVsaxz3glwY$nh;{#? z7Geusq-E0f9FTxXkrQgFu176FStn}kwFAyaT94ecA~+%WlYArLiz}mIHwyavf&r$K z3In8=%GX@@`lL}Si=dupe@M8G4^?%C1Y9KHZKOXX*y~cl$QkAEouxs5K?6Gpg3b`u zxJ2R+jgpU3a<8HH@!_M#LtB1?I5v>-K{S-!T+qKQ7)dMX(a)*?h=zV)*yEUv#+w`0 zjP@Uev?K)uK$_Ud7c)dphi%&soAH*;83|GuoeBZoMOGsK4-nf~DtH6%n@bsqWVoA3 z-&3ce%4R{79hZi3hm6hM$N3C~3h(I&o!%|Z<(qH5iCz*45=jVnKI<5O@WdN#E9stg zqv7+c5*;>_Kv!*)m1yArixLx%{b>B;~Q=6u5^FujI(;_7?rl2bpU30p~;WV9A3)z`~PsRKV z5Dz`Vh*FG0#bEuwG!0GnNsqY6U&ix95J)SdBUGh94meez(#1$hn0#zEDJA8q%Skd# z3lPc5R1r42A?p1EJSoR5#grF~BP!M1au%s*4)PUt)iQT|v!4~H|Afw^d!)f&htzkGp#KMwHd*xvTi(ha6g z+MRO;P2x~ax)6*z zNF+h}2885s%kFJmSbhifGJ>B1PHo5Gy`3>0y*Vab6U=NOeVLAc_}Va%-TpdYsJIcZ&QP zwRW?%SgF%)($#?(>}_TYH4cUb)*%#wYs>|%G4k3*9A&OV+rfr~s}o(ez;M7*rrKht zP>Ff?18RvhBh?W_Z~C46chLce&jthh)#3l%`O*4Dw7}<{{2ch!+C6BS-xLfXMbf9h zc~>ucVph!!sr-CPw&EZ<&9}6G@-#O@_;U(4N0Fc)0v6JqUT|cnprsF-h6)&pO<@@A z!v}~yORDJd9+nif&ZE6(zYV*J2m)T_9w$>w@bVP+SI%y@rGvd;yDAo-?~nihA=jJc zy52JxBQplB1x~3j#%XYLw~*PdZ#~JZ?{Y)VIRV(YwMpX2ZL^Ft;GSzp!Wt-(Xn9c^4RFo{d)+ zJYu9E{Tg=~q4o;jYVb9NmEZ;!8D5R~W`Z}^z}3aIE*$Reo`dfq8uxp%vXd~e$`Ln6k2S3E8ruF! zBd@`D0jr}UT$&&=C}dN=THt{7GQvI3sjG?uDl|@QSIZk*w%U1fQ>Z9#N~>*)0927~ zl8)CPwr!GXrY5rMKYjeCPQqXwjG8zMuDc38Cb#Q54U-*O%f(^NWw6eB<=x+ zA`CU&>pLZk$h2gru3(%*U9kc@lNl1|5Wn%w1Wj#z!?KN*_gXnnG_~%kmP4p?8L%9Z zdYmEaA*kFntcPSLNedz?n*p3itwvj!bTPHBOSx#9J0ScgbTO^zzFv!q_3*l6i_v!r zvW>mY$d5p^L?!-6R9r?wW3017+pE>9C6?mrR!a=*JFV6W+6`)|1=C1Y>r3-sU-DUM z|8)l{089A)qoX4q|MT!*!~d`2sk^vuxp&zpgo*L0fGj{BJ#HHp{EpDeh`q$(;|?#v z>zo*UC1xAOJ-J@~qI3cm4AV5hd(rU4H;Q7lTN>9i1MaGeguKhZVpnUW;TT`2veN2G z-HczM<@8@cUJWlTyiosZ+q@-*9nVmSvsD1nQvAe=G`RT3D+?GZLv>#&TDT-zUd)V) z@RE$~ccLd5Ig*n3Lw2mxR+k;C6Qs(#P-fP26|Tp%r6~vEX3!(N$fZN8_^L!PxsoZv zeYhz}#!DvMq0+zmXZ%OPILfF7LJ7Y)xApfQPV* zc~ti53fsuhyTx`I4ZnvnQ{rYLGtWlIYB|?TsI)hbrps~ng2lTZH;4#bk*i9X8&usD zxnrASnMhlJ1s+sEFr{jBtvkw%u0apX*X!SonQQTC7i57Hp*u3S*+2g^S(90h)pvJg z+2agCz8-Ta4d0Mi1O?4}oZRom)~BW!>kNMM)NrhR+v)3l-!63e zD%u}lfhFBL^#c}G^i90>r6g3JKj`?%qN1TV$C9Wm{=oJx(q!S>mE{eG!^qVzMuJh= zfp6eLf#0+auyhEH;h>R9FI!t+fAC3}cjz;4@)2VgK6jwLl&URL8L|61dLJ{d&syxr zGV)L4G5ayT)aEV{^o#=9esXLL^k9V! zqHTIdcuvNwWC%{LGHfj@OJYl5u9Au+M#k()#(4riB4b=e^qnaXj10Y?=@-7WrieFb zWycl@W~LluXAQ%P^D%N5rI~I~Fif`6z*kMgl;m%=j%=)wzknZZ{z^Wex5VFpGLrZBiDI4 zs%%<*fc(~U2&5>^DUj0zkAX&lJ{8b-5LD6alVEKLa1=DtEG>f%gDOqXGmSV)0zF62?rR=wSDGPH7*M?*V* zJ{!7(!SPSGFq{SLWOO`KwA7prRStAObPLhCA*xq2kKlDkEceOx^=HJI^`A$@-2gx> zS^tlFCw&k9f204op2uAOxzmkPpAP8&c2AYh@wLk7{AVWqiIqOBgoZLGB{u>@Ce5m$ z_68zz7OxUk$;4>t0K^X5e}QHy;D0|tSM!hYj|=nkJ4RnZO;T=MOOI5r%VJB!gJYFf zh~U88Cf1I-3z0ifnL?HfN>oTC%i=UC2`H!D}ZL}A?K%lGjMsc~y?njkOey5tj)pk6tpl(w*GJ@J~N|;Gv0*>=Z5*2ZF zn)swcN|Sc{?GX>5KJ^$HAYbN*--8vfVt6!}kVld(5A%p<`{A`o){n3K%%pCr%*fJA zrA$yI)yRfaRQ-BsEP6&157y0#<&mUgNTM8Ad)4logV*wPIV*)K#iM#Er79;7NqR>M!XQIsi)A-S5=)1c3C5)h@IjFDw1Ya z`~l!sl&9@iS67Sh6{{N_)^bP(RoDZg&(vY}##>m8U9GE43hbd;wEZRc`dNJGgUjZ* zlc&l5*Ig+9FFF7B4))#q-@~Kc=KQ~w$MygI3O(w0%DVMxx_dj(FXb74Ac`{vz|%co zgtwhNQ-Kt`3S9{vs8^?u%T901va>p`gPH4fFoUC3wtpK%$bZnM?*0dN-u2+W_xs20 z{67*eoB6+v$GZQ)PB+AVk{?HY#p200B%NNV->wXA35dA1XZw)l7g{I0tQw6s8N2Qe%ju>`&Ht`V;oUHC|%&Q}`ciqs%J($}!^Cg<3o zxheBrg2_nHOtnBuQNu2-Nh45~ry}v?ZB3c47IVKsp+X^INEuX@_&l4rW22_ffR?H( zx#N^;>8DCsnv_#DJr3}o{ID8{VJ*{!_E|x{RFf7!>r?c&GshpWFn?-BtN0Wis*q8` zIK?cN3{nwi*$9PFnuaGAqhf$IL8u&iR6?#Jx0Jf^$VEKQ3TwFul@b_+*xnth;I>h=5gfe`)my=>7&25tQcdcj~fxHO*g+@4_p7 zCk53SVNuPJRp|$v&B5-{iAB-k){$@E=%1SpsW<;rzzqNrn#!;oD$AlIXrCx&B5GstgmwHMG@HX!V%vuJN!0ZQwU2G% z>-LIGpTq`AUCe8!D1TRWb9SfCLtYlG^wih-vkSp1h16^)YQGA*kF?OvulDD+J*G4t9dC#fB$h0{WSxhi2) zeK+%lBTvL~r7f{*pTU{w|CO$O;8m;PKoda-I}kylr15b9GArCBS2$0wdZVGWCsI>l z4Vihj`+u$c7uBTKV*k-QIPm;`A02G$|JL%@@?TRoo);Ujmdw(az50Tlyz=L)5sRmb z8O$E??2(lFZ11=%w@g*AY{wn@H1~51A45X=^d~BzO29%2qMi~^6y=BHlh5j$y zw!ep`X8u3@ct^S)@GP1C$NLBF{m69D6K5BEzC@V%K-;E_{A@?-Ez#ceug%@lzssK zZWCdKaP^mQL<7&0s!$OSfDvP=D6fk+D^uxTAf3vX?5G`M|8)q|Wc=x`rkjDCotJr) zJlNTx<{>MQp@Pu|FU#ObKC4o(hFzfv8hfOdvR;(`rpy~st4_$nHMw1e<|W2*_%ePu zlonX#P6q=X zXhWQqq7jpl&gtE5X@6mlVjH&m~QZYGX=mP6s2qMX;x6Tc4VA1Q^P@FIz za~s@T+RXbTQe*=0Xwkqxx6N#OCo4eFL>=}9#t9|)XxR3N@2**&plP5Z0q&`lm`%<^ zLesj96EV!Il|UOgi8PLV6DQMvuU1NF9469P)@Ync!nj&#G`ze><+w56bQ&(!N-6Eb zL^{hfOr#=19Au2l6w*_HNMX4ss869F>`gob&vO9Nsc~n;PGaUF7R)A$f4N&I!-%#l zDP#)4g*7Aj8-=R~kfCQ(=2YF$3$#X+!G9WK+Z^UzYKbEJK)Dg;e`-@0P{PU%4-)9N z+N264n9UQ>rj7rtO=Lh3*LXNk%Z!&+_qVkSw7mJ>4*NT>ayR$Je$(a{kg0|CN5!c` zQ=_x{;T3C2#7w<~RRet(GojBFFtZ)tm?uWaC}T>c01-5#8;aw^YLy%&&=A%_d0qby z7H6OwL+GnhmZY3HQ9~Xf)FfSQROuN~s3Ts78tHoB+i%zHeSuBvYFFvs zhrfisx2Nv>zZlQbiI_=f2zE@R-$;p9>|8;!Q+noQ`@>u78v(rscfD_&ko9k(j zB=nko+F~pi77LJ6;B-vCXXCm2t_u1-pU)Q+f-9*k&F{fL>GHEqWWYvzy0aj~9PZu#X|;X@Q_4B1e_=C$@FXAlb|5K{6iYL=FKi*(Kxp+ix>Xr$yC zx~l3TYDhC;_&{c}8@4pUc48`mkiooGR7Qw)M;K!X& zbU!M%;h>{+W=V?D0!&VxqTx<}t58Ibcv&!7>eXVzciR$6E%DmUXtF>X>6+@cdNhzF zwXkC>=aewCM~Pr{(|myvAUX5_8HXKQgA>MVEICqixX7?OFuu6?=7LDNG>k!<1KCUf zuSZMOU>r?>3^7p0XL&Bt!v&}Kg#;w>Mv;X^Vk&i`SLr;R#l`HZ10}M%$t(x4kvLtA z>9#TlK}Jm8*;t;GPg&%`HJ2567>QHodkh&D6-@ZWRz7&EyILtzmQD+6~7m)w2_g7UX<2 z9*Hm5v2+pJVZ>CPnPs%C28m4_&nv0@0J94{-HJ{oK0qu9!=bTFs2mxoHB%d$Sl9j& zdf4CrR6(-JSl@CEP(Nr2)W?$IziGKHN-Wa7d^q7heIf0<*dp;51JPsYR@hOQz1AM-kk-Hf=m@} zDhq&4O_1vfP)1{}1YgxsjO6m=xWn7@>KhN<%;c>H0nv%fUAX@U4i5nz0Mqnx7M)-U zxKuDDJZ6dVhQMt#mkxk5s{SO8Ly#+l;Hd~Gz@IPjNnJt4&4N{ybIHKPfnchy3d$C9 zT@XpFdflEUl~oklGUreK)ig3uVQr__p0Q)vfh z$VJZgW!)`tzzhp+-I3#h$d4zaQn`g5+(vlsC{F=6aM0@-L>mEv$?Kna65A06O--QG z-t;IGN8cM!2U@-o2`NtRH^gj`GVyV8z9B;nQ7V|b*Lz^mZtBc{CGBRDca zBfqyaX0U)pl^GOJ!5Xi#07+Yu?hLXrv66>85v*gW6lYK@1relRW<;^IIA&nTQhaS1 zKkZnj#a~~IHq9f_ril{R8)18gWJA2c9GZG9A|ByBYZx1xeKsSf=B(zav_D_zNx@YK zkvR=q-@iY;j-LJnuy?K8m;r>GOt#O;MPB8HbI_a}Wa991RzAiBe>f|x5CT~_8X;Cz z#@&55E2)nH8CeP`RyIm+Ae@O*Gl5JjJrkJ;F{4laVkeWjCXmchHfc_#E((7tLKwi! ziPasw?OnsuSJBmfds#+T2p%fKdQr=Ap6TP-7R@V)>Z#wN#l`Qw`uzj`-tBvgwLOftBCe#|&CGcc_$6=3{U3UE`2|YdsB0ER zr!@1WsSrbx>YC*!glXr!sWd~A>YAl7%9!`wRGQNFx@K97S#G^Gm9tzAM|4Gl4i28= z%z0$On8OV6Ak5|P7UNg^A19&?6oXv+-w6~W`v)(kNc*0^&#ly$%9C-d~H##O-m3pF>$tNYBYf-w_*kkFe^0cPhS&qHiRSV7 zIosRK@g|DcOya{cht=+GsZ*T`vk1+Gm@l+!jc~nc`Fvp8$YRRMa&kpLXHW3=N*yQQ zatp6j(kz`X=8o7VdXAFVD-;OBy-`YDR8Wnuqdk-H&9^eTEaA#Su)y6dk?+#(k1>|y z5$gB@>gvla%_q=1jpQq@Lhp1stJC+8{cT0sSKikZHgW zjq+LAHq<~Ac_z-yXDJ#~zz`6xEoeCizHEoqqFAp)OPV5Tp@DUoeoES1eXn}WwvmOH zt<&Siz=g(XF;Y`@^Z|a;OcTs+H+8rbMe;kbX4D2}L9CMBD`f>^cr#vG-IiEX0T5sC!VivlQM_FB?&e zZs9;P}gd(J)ouN!Vg4!%Kl~O)YI5^{^DCZyvZ^t$3Bc+Ml;PzdCoNOHQ?G zP%^g!yVuq<57*vupreZq=$qT6zps&A_&##HI^A0D9J<$0JlBO>NAujR604u%1~10< zESXlIm?!pq9O3XAzu_Ra6R)5gehbZYC4atWDX?3{E_G_`Sc`#~1r#kv(u}(MZ4~O$ zivIELxTOx?cIPZ!++=>c=%r;!RG#N9napLt$D5c z-SN+1!Qq=;I>gRvx#UTJ%r=bDH$@1Ax=u`)p;vX{@$HoHkV( zbzUR{ukz@RRQhE!G-@-Xz^f+xzdPIhZ?OL#oSba%pV#s<^#5IL|I7CO{o|9qhyQ%M z+5gw_II58KzF(v|D6b-X^LH;07SNGo_Caz9KY=DB%O^TYlIH6$T8>{_YJ5gO(zH}r z>{MmqpegIE235A~k9DO5BzI;3A)bSBuVpz_%UZ%5n@!R&OR)BLlo-|d%TNUp1~P1o z_<~!a8wYp7aN2@+0(=Rk#XY~S-1nUHeOsz^1dDjzR?!ExW}0rB00*>6`!umXnuujA z(3Vp93O1cq{-PVux;BCbl?xyez67fpW|fr8-mKmVjFPV93d~Y?WQ2$`8RH;n4Q#O{ zNk{6NyfG0YC7(>hs(hM~+Iqa&Pes@_$oE<$bv1SkD(G=*YFTn}CC1IGIx%r=e!03c z|J(KZKT>xmMj*@1|HsGf`TywTaO3}VEl>WCXO5H6yzc}&Z#3jQ&r32*&KH%K(MhQch_z;rR*A7RA%1*41nqjM1!Jgv z8G34Xft-xsb+>!P$LHrsHn9{uIY?`5svtoD zB8obw^j1+Ya=cOyV;<|c{o`xpu>q^N{eup`8g4)Ri-e*7IRV>Tw467G9rjHpn$A+v zXfyj3!3NYA0sE5~YqGH30VL4ib2N)gtzT;!;LlQ}m48a5S^g=tVDE1J*R21Dc5qh~ zfJ@f@!`{Ao|97+ppGMq-A6 zCEYkZzXK>J%W79{duE^n#zIsqtOep>As7}SVJ$P!BfTKRkh**LC`K@`V57)%j!+Rm zS=NA&YAbIvdb50CU`7FiO-ykR6e8kfm~kacKq|yw%J>^Bv<%lEaefkF%%d~(`{E!5 z;nG(o4;i5c_C!W(80UppXwOA}LLEWI-RVU(kTb^ww?cw=Q-59*$tyz-67o#Oka~92 zjs7*i0E!t!!D>Sw*M(q-B{5EUB5JufPYNzq0&tW`GB2Z9`T@+h#px=~P7{FH{J2O9 z1C6uArgUD-u;F#aXnbc=)HWb*(n?`?fW4nAPg?4MN*pO_BW+MU6Y0e(ofU@t#?@Ao zg0OiiIJM+5M&)_Xt2gp&hOy~J|B0{^0Q=)aF{+F(D`_UdSXlMXL@|kVzbVZzz*vg4 z7K5boDqleAh0;kC6L|=6F(YUkW!8vau>G}tq(i0+`J-^vpkcF|%}EPRj(rCtU8$A(Ok;p>^G71aoXXaua&f;^@3q@5>84D4q zM3Yilm&#J(sxt(T_V_bPM};^Zpqjjhp2U-m1dXCtE6;EPQx}nHfWC`#Hsi4cLyOj1 z0tn+iGE9^GK`PZXTMl4E$J*dPWb$Z0yR~)oNozBRz?P;TJXYzyG)J;|*7?-%|F-{) zTdDvq;s1LF$FBdUNG#OUG}N=d=#SF4@BICdY|!OpFXHhqdve&kM;V9$G5eQ!W#?{X{)f>bcUj8%qH0;~swDDu>vgTXcdWecy#MRue?U`Sti}K1 z(Lvw4|2gg*Zsz|w9!vg5oo-41RS2ML5w=$A(H|MAON2ZF!Fa;R?~lIW51+s(wS4&* z&`V`X{ryL>{t=-vyXj1o@Gp;Ry?sAZvjyiD>5=4REtnMm{iZ7dO+683Fsd=1>|Fyo z8)OR4{%q==i&I{#aaV^T+&cbafOR|vX_YzsOF(S`D|a|bijOLsGf>8NBS1w4Y+YsZ z2{%VTm?ih}8-% zcjT*rxcVNu5~X__yETm-J$e+K!UaG4rl0V3DsiK@#|$e*`0OAFm;eI0Aq~g`NCS`v ze|)^9JB&v+4hHH#JgIG@F1m@9yrh5iF!og&fLha{op z1c1I}-V&&H3UK2jt}Jpu8DXL4mb(2-GZ>0_@l*`IevijK&=yUUtAXO5v2vJ7SX%%TnlQ!5GcyZI+$#tb`d90^VzmDNboB`vLiIqgd zDkt%zyQQ|kCDT~0NxVWC=`L-mFWbhlrDNgZrS>rfnnszliW6MErZoSFP}r1LTW&^j zol(iANGQ`tON~P)MiX?ZHTKCl$XOM>MP@g(-h~K5M#akV(DsDNR`D$TtJt81P%C@? z^~d5Y&rY9a`Qr4)rBC8iI5kHWMugG-FON5p zoohcU$$xKY3WDdpYuSGs_Ye9T`;T=ztM&iwOhJ~M|BsIko%8>`c-fr)*YcR>f9KNY z>#_uG@(w?pc2K=Lh0E8fa?M7a=$_rkvu9*7qC;N4c0-<1_MV}sp{1VvQvVgMJ?g){ z%q!!guRw2CAZ$|NRIvRd`7`J@`aH-&ZDpdkV^Lqh~?iKV8tc3n-|0mCfgk9Fd4%L)UR$AI*}nzzlJ*X+J?_6sbl=C z!r1`Y?fYvNMCqJTD9|l}$H%pcpma_V6zCQ~W60_)KR=8Vx9#7frDH2>>vWAtzk&r6 zjU_W;{U8d&su?{uG(C65kT+Itn5{Wtyuoq9Jk25Ejs`1-x(s+stZuqBod&G7>>dvq za6Nmry6u)TfTS6hkri6m8$)De@~i4*XXL+rZIKUScV+T<|GHTncwFDQB0l)R zbG@zB03gUYB|#9bV=I{bam#Y zt~}STW|rS@0)mp!omA4y@KRRJS~%J4^YfkmR@48xof<&%`Md*g%8i4nqLkFJ@9gWoCdyD*KWxs5ZXZVs{#6+We{cpden|^T3zNP(b+1VSex299$ zM^za=xIoKISN4nX6ACwe3KE(fc-_DYSFMi0ow>*AFzVu~YkA75U%`B*S<3Qf#O`Qk zaaE;o#}8LU(!hh7d(6uxYB~tMtz3-H*e=5N<9?RUCbk~Hld{amsq}{~@Ah^<>*3d5 zqt*!ij)6C9VSqR`K)}$7`0ck4QT&w=LEp5E0j3fGTd9P~83DjKyu@D4^hvxJ{<=nG zUYt8#%9&tU_#lv#s)FhK>-i)RT?p=Dz0BfZF0q@O^eEiK(`hhuRoTgP7n(VUxSErL z0tHt(w7*k*bS$bC*4~af3P&Be8S3wLXIkmptj6u3r6)o=dNQ#pgI>&#I-O9Q5~Qj~ ztsqb|A&dK}O1`Cow+X4RCAK% zjMbhp$nFUliv1>k$TpY1u_IX-r-n+6x84e3Uw4B1fWp8gxa&Yqh0w5R7EGHkutQMe zZ8c}AnzsdPfa{mVM-Z*hDp1@355`cJXj4xpUEND7okV9kx|76BBMOV>v6(u@_S%n7rFI$DP-&mgjWWmn~=E zwQ7bp?$qgmv;6({dObTDYL=>|RRm}L)%ADnLBXromlpUsPm}#0c)VK!{^Rk%(UEKa z2kN35`TrUo&;Acg*noKh4SFe9XNOcVsOysepAGpi#IgbW#`y3OUok%g=_S~$s>o;E z@8c>t&5NsmJl>5WHbNGCDq}ye5q$IrFs>dye(Zoo%VT5b1_jX1K4uB8rZRu(m{LV!WlRNs z#%=YAfn!PnRxMJnHBxws(L*@h@Aln zXj(qcURC+{g9zeCWoXm^-qy!OD(Z|9r+mrSVFFrp<+r@zW#R31sQ8}>>84!twMdu| zu%0G~*J~L1%^k-Vg4BDf)dJ0TYbpZYR%%MVBk9TURYrewhk)*M8erOIZ}Yss5y3W% zJViR6abl1W8&cTN)I2eK17N*Yn3H5=N55oqUvHY!wNYT2{Bh|tk-ZdsWg-ehHOE~!UTLkjk$wkhG8+nCC-+ZqsJRDL7P+KDq0qazzye2ZE^htnZS z$c?=&8j8qOI!`Px(B~o#=raw04EFB~VIB244&y_ZlT_rg-JFSRy^rok{WjPM_gv5G z9K~`}OmiZ}Iq^KJjGI_h>p^syZ|QU6X)fvzkH>hH6O$Hz^m?%EsvyNhVp6a|-UX82 zCT={o^e*x&Y$I^GMGPPT^iK^e3o<8(xz*7k#r_M3CzZD& zqZrCY_Jw_zm#p@d7U1<7#*!$vN|R-Sle7OHMnf0f=x<_;5dkmJkvH5*M882h*90q! zYdc5nvH_d!efLuLZk`1g&5^@sdVB&iogy*ZrLrdlP!ycP73|g5cJ}@no|^N&i4Xa? z{XZV|k39UpqYeJodY%SkZ>jy~0e&N%Hok!~Z1I^w0b)S@xHYP`1Ge4T3m(_khMgS% zkCqVHS)T18iZ$d}Iyl$rE9JEc&~;ZeIOhl}B(Ryd0prn?7gDh{idBB6{E%GmE%?rj z@On@vF(FiK1)@zg_2GP8n$>*(OLqcJ-63e`#R=C?q{1O2nqY_3wRR#IGJw7qLd#0U zF4zK8BM)PfWqXZmYbi=Wu9fmYQiw5`0>}rW{XP`MPejX3fI-pF!1I{F$j4_nfGUA= z2+;;&&z{T@Lh%Lm-eG(g8VdT6V2&NMZ$LTr!Me3Ff@orfH%Tjknj)}XNXTHtsL&95 zV8&7nfqh{Le#OS}Rjds)hM|0h#UTdAgi~}B5;7o}^JHetFiJjx9vXHj8Ny5RNy{n~j0udT%D|BxCF7VOydAh?1<};#yf4UfNq8{!E?YCBiT;}Aq9JTTmj=o%3=Oq6k%n|!S?zC z0=s^<7wtut8}|Dve3s$=e*XKve((6i^Z#>lxWWHk%TvRXOWxlILUx%h`R>ytAM&-x z*e^&0OR&y-<(C-dJu(l&97p72{1VBHUm~&b3(EM1vD}d@Rqv#Skw;OKD>-)Mkiz7) zm7xxZz;ALWu(^JNmj%>6fYJlTx=>UYW*D}iPc&@B(S4B8+>ge%z1#;$&V4ZG&}j^i zP-OG(^SR(Y4gkl##USPZ7`xnO)1CW2Aa$pfR5<(S6Ab(^ zEpA@e-2r@1k&YG>$l^YQn~u42jwq>OI_@h&nH_3Qe6whiT%41E>;bA4C!>BaG7!y? zW$9Tuiy43wn_kzCdqSV|DA~@=%e+b+?Cj7OCN6-FK+m&k$io*{K{RGq02d@iSY-JC zGMwO4hNqlV4B=vYb`?!X*bd{AY^YLkibL-+h2VzZa6INq9MQB>xVPy@w^NV?z45{k zk;xGh)8@JrT+;;eL)j{X;k)Bpf6fPH1{o#hO=)-!?(q&Tia-U z#j&$dssfIkjh`T7Ve`rxc(<{wadNw+RgE5&x3weD1NXK$>7jjXgB@dBUo&mpy1-^E zPAi8ESaf4rh9|mZtYBSDBhaWoz#_#a?QU#Q!sDmT3Pmk-j{S*~%MC0~%*Jyq1;{`E z_BS#RShuD)YeCnq;Q@{QyVJvVhEPr8?0n(Iso%IhcjLGdi8F?M^0rOEb*tZOt-96h zxp!DqZ+YOHaa+Hm$~N!@>#XhF$bV|&f55`tiSI9XmfZjMdIzrmufwDL4gSws9?Mr1 zoH}q=$}7{2$o}vB@#jASJl~D$FMO8F|D*k$H~&xi8~OiQ9ujtOpG$w%bSHoU=OWpq zB?2@hly~MSphZb19TdGZu1mEN;qJN2nH(*%u0<>9o^~a;yvTn^#_W0< z#pv=F?P#sC_!)edEKWoB~4O8szo`4|-c9_`Ry!>rJp4>B#hcR+`G(ot%X5ESEOP*$s;Q-AQ2W+)CcudBN1HTk)zd-rQ-yP*I&r3p!D96Vi)EqBVbG)GT zcmcKblABZ0RFafowPu&svb(hBvT8>w)J;K{geofj?108yMQ$O^?F7!eS1o7<+i5Hn zmX@#a%IdCmqNg8aRM?7=j|eacudTfScC%|;>Zr4FS~Cz-u194gbca?T9@p zlaCA52^(u4;-mqF#sJXo!1?4;$2J>naf?w}fp7|y$*%6ohAPwqs1-+z3i*a^423Av!b8jmvZN=1dA!e4u1! z-)uw#pS!|7~()4lCEFI^20YgV&1jB z!P3Nd2cw95QAuCcoGLC|U)eH)fN$BT&p~#%jC3s+f-j-=# zJtt+vc{GwK36^>S%5&fyco!8FaRh>K{5`pn{5dJ$5Ef~7;jy$|PsrnQ#?vM(5v07s z>PD!yDL=;A9Wpq9W*Z+wQ_^0@*(WsaPiDoeO3h86;W;VmsW(nLA3 zMJ|g`SL~I0y2g9{was2ymW!m^+dtUfvK)E)YsI{7wE#EXdZn1+N-+(s8TW9}_|gpV zKiE@${*!X&+aCd!od5a<-u>_KQEzkpTgy{_wI`2&)@`4>f>_~(ukMI;^ViYTopS|KH@uD!QfK*cr*iEv^W>5>b#V)S=cA$9eqNHOY&m`<|%K6CLJa4X{8a+ zrd3*Olj#Sssgzb+X@ON%Y`CAPwvwn1H#t|-A1h2!@i1YZC5)qiq66U8fD=5&Hx(=b z$kvX4?2gm8tWu0MAV9eZ~P|%n9i%qVL1t zG~(q1_8J@0;u}Bp>p${2Yr+3LJ~(#Q|K9%bX8m8sQzMp>i@z4r8Q{Obzw5APfZi%U z7~!uFwv|4}1i+B)tG5I7cBtNt8ia^J^WQs@jyX4`4fpciJ^XUZUOs^kjp|56_>*)T zB7v3x8z&JXrKpN>v++%(5iX0?w1CVyBFVxM}06|W)txwJ$BrcYvMPDlw( z)&Ws@cc8LSmA2FIPJ^CwGXsvP2&BP z5-X~2R>;dxkKTeSi?nlLJhcw{dO9id z zK?;fW>&T@>iMuIB`e{>RTHsO_eovPvxb7xmI*zL;Sa8#V+=)IU$vNIxfcU7gkj`*K z!oDkkf)PR8M$DXAYoNR2#b!cU@W=+hT=bmY~63N*Q&$FN@-R?A9?C) zBa@X83{b4jkFX^g2&=p&T6>`fz#5Oo$vK}q3qrNO!A7~tE(eCvt$%}+2>@vEWxox$ zxcGYDMtzxAXQ=IC#N;rHs@6Vt55*nSY5NuS(5CIPzJ1S%Bw0@jBg^SkbJ9>z_%jLD zmj0P=rp$G?)us)ozVcH5l2}{a4+C{imen5qB#li#|0G<*RAn7(3;a82Xc5>Q*VZCO z;TDNbS7;OXbGk~y&_AbK;M6Fnp@r~ovIen6{rMkmEAGY!sQLWg@AXey{NJPF-p2pW zTAr|ajXeG<3y_g~K?M~RWV!wUMV2iM`^Eb37&0;5LHmWGk(l?t&yuv9rJVbKB?b0J zs~pgqAz}3N#b;1C`r1{CLUM>2K?dl2ucqn!S#*g!DF|JF8tO#JWhKrdBnWxK=a_e` zZ?G-eW<*@f*|WwrjJ$2FeIyQ0Fes%6g-1?lp8QZfhm?fI&^e{Tog%Ijb`idRhr5I< zF%!csG)X{6fvm1bkO{iY7?uSZR8-J2)CYId<4|%5hcpnu@NM`cAA;U{%nD1ZUQBDX zph+HG!AA>6)G|mU>MA4GOX6M1|l4^SQ2}QLeACz_lIt zwNVNSX8HlSO2h3>dLinYTOZU}5Mjsz@FNO1ND0)HZ;XfnIS^a(=(4R`_2BA(?X*XW zh?<}`fB zX_j<&_$C*W(wzTD%DvL2mI%Z9(O4ua!PFKA2Q8Lv^b{!KQ6N?Xsq)4?cV=P`%wYL} zbU+r-|Y{%Ojv3G30qqz^w^ORcWd4_5GVgTqh^v(7d z;!o=1TXIU$VF_jW=y4QC+cp=xU9p<7QqKhfVM*DhsDX+i0#S_&1ShZ_$2!`Zs3mF# zB?+Qozm~ry^lM~&8>w&6JC!@@)1qJADf$G;jqJB$Y>aqYNmdF*+l&`hqakgzpbrlRa<{=>uWg!;gp)6;h zmQ6dc>J*vU$1rrWU}58I7N3_SZ8UC3NQYbLN_xkAT+&1vcDLZ@VU4P}xgpG9Eso`G zK#@UBa*hjt(tJ_k_lp>y@%V&diu7w-IW1E)G`MRPL7ydH$qTSU=5kS;rPwqGM1$qn{ zz+o5M)<(VV5#1FL+tYTq{ucNjNTTh36+@xiuKNmzQtB%%-_sXO6T?y}j_rRiEsD}B zJdfZjZkh0Q4`QX$h~CKogWyWUES_$ZcD{nAS^tNH8@Ch#F46xvJUVgxf1m94Huyhl zc^XAwrWTNClLt$b(%5;@H%tMQec(z0xFz~MHxrkAZh4nu(!QGRPrwFlW#JdgY)}G5 zMq&C`*s+;F<#LjoMH-$T(L2smq0^(9iu*aX1U&zVJg85YrM(o7(iuue>h+0u7xR>g zbW~!n^+(XfmA5!7C7TWno!zh{)#`L9ep(jUkx&8q;Ki(+sy`uC%UPmqgA`CvM?USIM67rs|W}_dhA& z+xM1s3!RKf#bswj6&I1ozvlB?YHzDjr<4c%xCCzq(%$S+r+l7ONkL9mMZ$%V;tKsI zP?Go2AH{FVZ()nYlpc%}_iJq!Ol$d8ilshU{G1FzS5b7;2E)!OuvfeyX+E?C!QqF*ABVsuS$+xNf(#+pK zE!Y3Q3k={T{D1GL=kotYhX)(||Ft|8{;#1EInc~TYg)FWzimbGo>YIhmae@+g22b=l7j;FzfQ=Z28TEww96kBGQ&eNG_JiIY{ zqYX0!De9Bprv0WYM(g=2Eq|20|2Gu_iwFP!000001MEF(bK5wQ^I5+F=b7C|8JV^$JK4&_*^+HJ(Vbr>$&*ca zeGw#q5@S9z1Z7)yH&ypH?(f|%x$Xu)f|TS}9M8^zB`VD##3x7HktFN4O zr?&RpH@Vi~dcCzp-*6Sbt^3V-Z3FIWwR-LKcWmq1pp(C*GT||F|Fv+6gZrIalid6t zw04_Eo#rF=f%JH_xw&lqH(qVMF3kT%z5ePu_9R67gU|nNG?~TTXq>Q(_1a(9$ut(7 zLF`SEQmHM5o=jq|KTW(SWIPzcYB8F@dN0;6ZZjv!i;6nfq-;b`maKr{tIP|cQWY{PW$(vHC z=CC|Ul0~k;We^Ra9!w^SMZy6`1jzf*B|b7k45P#wh$=kuWXT6e2*Bh z0uei<(#AXj(2+#}g8&R|I0d9`z@daVuzTXb^tgu6U>b-p;bvmiqPP^nlb8iO5wXX8 znbDUfg_>EY!*J>jI}#qX!lxk*1mc4C8M@=hA3}vN$|@1ry+k6`3SJb;Qov`dFK~qb z{t*-55K09u20$c;62TNzpdA48E}@zW4`?o665ZqqXId|ulAMSEt{iCO;d+X3&4g<8 zNU8AeerR=A=Xk&SvC(caxIbwh|J>SZ?y;RutoxzKc8^a!wOj8$blHdFgS}?E!x~3> zPs4m)nM*5S!P3xEOicH^kqYIdrub+mhM zy4N~-Uu6J*9UXVsLF=&9g=*d7Ds`%xm9l2+c%L0M+q)m&XJe;z(CU67MD|KH%(sX9O5{myBpMMP^Ib(`(>=}EVBd{lvve+04sjtywKN7Ow&qM-tX zj@zFQDAIt4Uu7RZG@-nW1SLu}kOUo|*lyRZ2K@m6yVe-lQS<#l>wWWRw~0@W5z5C_ zr&)oyYIU#zfW&S;HlWW_8ZyoiV5M$boswCxRhmiG+GmZupIeBwt_*YEY3X%BB-{O< z$?cRLo&V?Mf4kY(J8U|^@KO7?MgG_7>#vIPf9v(L{C|?`do}@)FG=LD>cvnho&C$% z0SFQjX1?H3oR`N*GI_JM<^>~1jy>TIrQ=0wBjDH5{+h0}mcd$^2kXeoQAGig07*4< z22pTxGXq>!5Uri5=MNdoSu&LmfOQqc7cR(ExwhL{>+|7=u=;C1=HXzx9dHTK^C39$ z0ZH$_{xTKuY}@qmr+QXXo~ zd<}>S2vHE~nbOD1RFz8Kf6rPW(AoD%%P5t4y&i~VK8ipKp>k81o?!hwTP-P-4nf@e zMP&tq{n=#N2gPR$8UPgJDDkjSd;_-lhq?0nzaL$Jru#)k;bZo33;(yC8UA0d*EgT} z|5IH5Rbp%fwfdDeY-N6LtyJ;(I0^*pe5!L_)~Fy1bt$j_1XAESCw5gI%%f0JUpt+> z)%xnL&%vrt55cyIqz4Aa44xzIP?pRlR2wj&N>DV7{fFS{AP9i)EA{1#!0JKBXA0=c zmut>T(*y04&j4`}g}~gTDG$L8{TvYIOUuBZnM_mMP^{*l(0(%|n$M{+C^|3w`c1i8 ze*fFOKVBcV?f=c!bNl~QZR2_WKgC7+AB^lvQ1@V|C-GF!;#V6(X1i18XvDH=hkNZ7}SADM!KhQ-n z$ZrT1~@!F|vIqT`(vNpWrRAJrui?fkE_a|6d z!_Bb1nzmB|r22JuvOzd}<9+#5S7HBy^7ZKA|JMCq->Mb&|Hk_B{(q7yFaK>hd?{k7 z#WgLCJ4k0gQm)=?FDl7(&0(??smytJwWa`5)F(o+0iA|k!piuGL-msMX?Hde&Pg0y z1BUcfb{xb4ESvvksHF5P>h%~pD>PNbbpl$7q`2xC)tdFr%iq(;Dpo}WilVrj^^o>G z=Hfsws179a24gnhp&fl4!~*O&cFFyz3fB^}p38VoL8H)X@iFf?^i);(h;y+3F&$Ty zI=b!oUNXZzaV?~HDX3QwgNY?*K?J~fJR(=H#QIs)t8palhZG2op*w*Y2N(kHxxFgJ zxfQIlo}Nd<%2l`b!7B=d0%W?Z8nS9KN+CkV6(rMA6B?n(95@B)^M8ss`kW2Ifw|`i z^JKNK05ram$?1d^5GR;0N(CsIh*-JGU0Am~oUw2k^hJz_r?_O@SQAF>`fvagpfm&t zP^3F6Bop}jin12?P$e87g%Yz$XL@yDr8gGjRn`J+Ist;dVZAbvig~)A!eH02OSa7} zd#3U(dc`Dxf1@Z1JL>IQRn?`>5q*xKnfhJU1(g)YhbVx5@nr1DAnnyvFw~Fh3N%;H zUHFlslEoRt4-IhPIAK=y=7K0~+dK~n6k}qlyEkueZZHDE z9hkmV925t>qv=n)NjKdOv9=AXTwZ-wVR0o7@|t}H)UFo;X~Z#~72&AD0+4NX2A}7P z!6+tG0HX?XU=R~3)=aFYxO^|$VVn(x%cnl9?_lDKEV=`0v`5UdXI8J-vGyL&yhQ)3 zm@QFF(}wh>*N}LY%c7w5XPRd&U<0c+cLkdc)<(uvLHi-h<=C6pdnp|=as*6~5iT+? zi^XC}$`Txyp}y_}2B0=zZd3Joz0BNIMd_?*Nc%WjMuhF_S)!T@B^b<;7QhBzN&U7~ zD6CA@cM7X{8#V;3+kQo_UiH;C?Xg7fEww+nUJEB8rbH*8ibYq(4U0s+nPoF!v=2Rs zPKlmmSOs~E0>o%1iqOS~m;{W(ZF;AgBP6WK0`=SFzErD~5JjncCoGkwvKUYO?BSSd z017vStS%z-^Vt5@%HMal>Hjn5%U9_A=DJ1yU*B3U>i=8t?^*wUic4IR5&~NRR0fVR zMajVJOxY;AvO@m6SR~Upv>sgR2MkonnEBX9I=%=;$@r(V_yPsnd@*o76d>Cy6jw@^ zmHn3&m8?Pt&(_sXlqF>>^Qa8I-ZCG)e*Dq&ubQ}o!I^jdlP*jfeo+m-K*J08vKumu z%31j1kLL&ghIRJkykcPdTFO2(1ylp9@as<{`2Sl^;#XX^?fkCYE%x7Z;1MM|126~!ZUPJ+nc89eR7EmHN zC`1u;j~L4&iiSq#Wniua(xje~=Dm@5IBrpysdiD{WdkQQTbV z2p~NX2rYngZi$v4)=O0>WVeJl=8g!vk7IY}*(b2B|K;0ITe%k#>&c z!33D8GX72|y&tMTjhbn^^g^mGX}oc##wpeh>*Q$BjNa9Qve1EI48dhCl!lbiz*A<# zAm<^`&XFJP=p0c1_rbt{1wX5%`y0=l2&p`14CX!}WUrYNfwvk&A@D-()4>dFUOJ^5 z&&>QDGt1+l*pTJ&Ky%VvW%`2D~>4ndaG+u3H$K0%Z?SmGJDgU&JQoTbUKSDhiv=bB>kW zDCm2kAigVLI}Q?e>T4?%n8bw;6I^gn=&LoTRc6g7U6i{{DU-qZaWFT>1B)z=+YJ@N zQ#OFj^a``7E8~@pp!hLdJX-ZN)Ihb?Q5?1sK+6a3M$`D| z#=f~=?Bcf3wd_C{~C%6j!Kb&;S{ilCc@g)b^mG|!5b|o#^_dS%g>d7`( zv2>@}?s;;+WA$uk-G7NZl^vvvVDWFh@TBi)=-b*;n17h-=k2!c$8!&F@D|>lezZ90 z>832CG>AW!7iDsxRC1I_hf=vvbFReuB{bc^54psV=>pjnI5{6TAL<@nP+U$AN`cbZ zPYTouuDlg1QKmf&240B^g7ZN1Sz0iRyxxI@RQaesom1sNr?%%)+~!*_zHSESrMRp6RI9~AH!MhxuS zRd&6=9ppfMrQRXVsDMxy=75W-sK#}gs zf4@-L^@Vzngi1a+untpYpIDL{uG>3c!ytisJz^oRw@!cm4$pq*lY|Ta9ySp}2fdni9Q-846J;QCSPYh66l5c``n3}|)KK(j zx93M=e&*W4Qh_@q{?uS&Jy=r%#tcYU{y=(24o>Q8&cE4L7?_1v4lt4J+GdkPr!~by z(hZ8sqDSg+kK!>B)1YWg<3_Wr%hBc9J;d?YS_<*$Q8&g6vp1S9>4H(^S-Hh19kN3n1g6 zXDb7&KU`iv7pG83g^Pf`ea9BGt=J3?a~7GR0#I-$)7S=#Aguz0l&6G_Q{=}yt9Hi9 zFgolBkVbiJeht;He(qU6ea6mH=!$Jg%;)o54`Y*&1%8DSG5C;3NcVjZ!ROgbed6}A;iyiYj3%HQwg)iH_c*{lKFYLqAEl=?BUU@=f-YMVE8V4?0R6tC6 zvi2?^+m_rFKE3Tq>$M__W%9g4XMH~!T;%pmXx6cr%^qq|N-e75##1j2i|ILe&eMEl z&}*dE_cb#4)NiH401qwY)Neq@3|J$Z3ziy=X#3a8#+_BMqB@$W8U|^90LOGfzl;5B zr))FdIGq&JT4{@nSC)d%!3N*w6RDyNR)my7#~hUUkPp4Vyt|KB8qm-wq3MxZRe>!K z0_4TqNCQ=wGWduA7-hSZhTpCLL&>y-Vu$qX>At%_v$bm-f*K1B3VFgk-)0xd*dx+l zIRk?!yTSUFq|cd9bFR){xiBpE)i34g*N+B8fEc3VB*VB;zbpw^sEUfgwu!Rr@WI6m zv|DKT-GY4%-~3pXU~k}NgFsbcr1H2I4#E=nV1eqfG_S>Anh0z8=_FIn%_W%PYxLy9 z)*uIf*%JwUrOB6y(aXr(IYv=~H6g0R4uIk;{1H z`7dfqp|phT5xOOjiFYrsmZ#ou_yYMF2zAg(U>Cd3&bfbpYq|f=_!++52Y8GBzgAx_ z`u|^T;QN1{{eR#7D!9VTq+32ftEieu!3A!#jtD$5ha+!zgCEVVX8cfgS?c?%s#8Pc zb9|RNl6nT89=(86>NluH{V!smqk^hEM@7k;AOdSiBv(TDGXm=@8OMWI7*(XRZZ;kg zJyFss08ILLI3*ALLfyP)&Y`c!2#?I=ArndP!l zBL&Q|>5ZG+oS{~soy6n5ZA&UgCq-WL5$p80b}sLo$9)fE8A_@cM9O1DITn!s5z<7O z4|^O4H?ak`KGbUhJ+28)K-D(o)LV5`HEOYq_0)8gb7vQ;+NP_ZbS9Fsz!JM;!77=n z9vatmu}OD8 zeE9n~DtuTtDtuU*kC5nvQ9LAU1xR#Dbx3#^VTSVyj5~32g1iimUQ~*Qaf>dJWv7L! zR(vvj$bBox4{Or4CWos5i+_$lTqo+VvF*+Xa)SzPJ!yQ%f^P)@T$=9>Qp*YFIurs3pe#%J zQy4H7Y-#?42u8o8T6r`m^%qtsRJ3?Hh>?^%M>y$A3@Svhju7nL?=|+NQQs&5 z6S;|31k1Pu#?~`V;S#5C7x9c2#58_uh4>G4Ew}${gZ}Hxz}x(PTXXq;uWPmEzyI(g z*9|^AV`AyBXu;!zf3?R8JfFuZOaCl$%z=JmTsReCY{Lk6ZWw8v8iTU##S2|znX?LH z#~0MN7y0sX{6B})ZEE_>X-3K~R{n*)V)d>dy8or3itOy5<4~l5T#7%=(0NKe&U$a` z%sRd^=iVg81W8arOad|h>M=>qZ@=~GCmJAS$#iyxn2AL+x~r?-Rn_(QpE?KLw>^2{ zU%NVN9`>eWZ`UtUmw-*TiqK52TZT@1tsa>Aux6x~rm#MRGp^Pa>k9Gd)2DxkqprUH zqp09+oxtLLctiiMkB+zTA2;$ejK6RZo6!G9`=6Tr|H0P(&t@K5T&q||=iEotm3kI}>sUzKXG0#8;@7%9_kW_yNz**J1vRIi1V2M5&gg#A=dJ zw~NuQZ z)Q;fGD2{$3I|z~lRh7D+@J5~V2HbvdJgbhKPwKofXMifBj)aDSN`2KUMKaQcA}>PK zj4f}Fdg6){tO*d$Rs`00^Bqo;w>eFe0_G8h1MUS^4Mz&bJn&yPRITL0&rMYuZD_Bo z(!UL;Ko z_ByV`(C-@W8`?my>vS)d%Q60`&Bo@ezv)D%kkc9`E<<^p&g+9A6+q+gGXm~(SHQv6 z0U+A1)!r4oPxy8nx}i@QAOU)JvMdETQ~_0ZV#c<1QeM+md<~Rg@U*`VpTeIYM4a>< zK2{}NtClVKwuz^a{zE@))4P!a+?@YELYEL*{(l3{it+z8$&wIycS_A)gD;{@4I}XlH7wL}g&o zfZIo6Xt%nK&S#lT)#}rkxJR1m{9(DMHz-soW{mNQx3R{T+1#HS4VAJtMrvortGVaZ zJMIQ>QRUn5QMtSi>!J9KbzQSo|JVh{XufT5?94ts{>rIOSX@61vlTvtrz!$!nWqGD z#UzD1t7a(H(l$M%8l3}HX|Nu5RwC4~UrW<`lqz0kg|pp+Y6(n3>hGRYao4Z8ZT3m+ z-qfSiRoH}Q98UlbgEi)wAQG5pjDWh?6|kd#W)HDzynBqIMM9Bi$j7blaWwq4tfIf; z%w^|_tw_%Mstx>FUqg}@dtiphMf$5`UHoB&-4J=~;k7Zx%s$$fX5&m9{<#nPz|&6u zZN~rG(P54LKY6r`|GJUK@&5+v_%lALLeEagR&)>a# z{o>{8mv8^yU@d;B*U!Iw=9T)~CqniEC!SR;=+sG5(|D1~-m7Ghk{kI^t5Yt{6;WQHXZR?` z{y{}(b7ds9siKd4evZK-vS~EWFX7i1!Nx^{0nDn4qoAKJ7^0A+i!|VCMOvbuo$h{+ zQrpo3t?q*qkD?t*XzJf{LgAH(U)NoR}l#Q28b8-9jt&P@qZ|lI!%_ReJ9Pcm?iY#Y=;kbD9 zb_o##!1!0VZM9rAluQnVl%&-9T_DtYz6r8*Unz3Fnxh~Mi4DwDWnyj?IE!w3bEuCW z=MK>9Qa8FFw7bZl$Bncy>jc7%^XZ#A`&_+g=~|C_7n632oEymi;_ON5*WZ1Vbb3Q& zt+$tI;ML7y@IF&rW~!Ua8(et?hAe#7sjoEc66@b(2Kx)rd2EJwo+P{~$(a**v{pqT z0zY(#6SWldfJ2@xWfoAl?uD?R@+8ba4E;$!?G4PUh}}vJEtLH_{l?0wt4)A5sUl*cpbo)$IK5_%zUee@-vb5;pJK6Mz4*lKwjy9Q*cvCx`pn``?W` zHo;~Ox<&kX$tXUaqfa}?~_Y8X7}zcLJPOFPUyY1n+voZ2FrOkq`n)~@`PIm zrDzI8NEf0#5dKvuLmUh8B&2=o02QG-c@*1D7~VQDWd$msf}?}8lj4n&JB7zk{S@pd4m!e? zYaDU|6!QS`H4k63@l{!M6#93>oQq(belXiQr_NB}IYy$l_V+I$uWbGn`*|6$@{w^)LX}7z&oI zw$N=A)W&`+VBT9D!W%+=M~#%Fq1N}O!BAY^%jh=_uFMN|C>Wj?YG;?iX0?vDGvg07 zGxSj1U}`iEuM@k2K@<*0?Ev(^p zgQKne*Cw9k@xK$(FM3w+e+P#LzWo3AWN^IY|2Fa1JlXRP&8A<^KiI}v&p+IAUjuC1 zUwCFIuIu>+^mR>2d0&0yF>{WAmsf4w@&zh4WG()qto>^S70*As&cCs}MjMj~0PKAW z)WW(8q#PE69kQ_@z>Ow0tD%b3OULSfo(0>nM1l846Bw+igVtE_pesKP(50RAIUYb- ztwa%xxdi<1ONp4 zX2Pmn&p$l&^SsQe*6++)wq`-?+tv(hRODvy^H3}U$zobPYv_%u((4WL=~{n^F#H{4 z?a!0!6d3nEL3drNx2`UU2k2udfMVqij!X(@>_3J-Pa+KfUhm&Gz#06XW>CYA>CauL z82^Of{w{yz831`FpL;@XpN&4v`~RJoe$lgH|35i6^!dM|gQG3~w~@!?|17>w z7`}x6Am>Iuvb>TnorjX7@(-lIz+@aa87$+k)RVyb-5ID-RGYh@@Se&(kWT|kQtE|w}^n5ORkkp|J*yiH`s?)$}Rd8@&{MKvg$iN z=-@?C6*>bF<=2!`XL(!18M^3ZIB-l!PPreW+#dLyx#}9ZBI^=6Oj0rp{3o&i5*1ZtikzXN@fRmvI^= z_I(6iqCjKS#h)V*Nj@}+LT2ezbc7jTo(_j9qzf%e=)|OC;RHnz!8=?XedZl@EX}ER z$Xzqhwso5n5?a(LV6Hq+zT;kWNHIgAIwh5H7?g9Y z(WI23TAeaXsY)q%@yZ!u)+woU7NMMD8V03;VyEQ8UnP0~l=!O&`p{NFhyPMz%OVI& zwB!$hry;1}B(b5uhS{dss7?#5%-B^mg-ju&vNkIQr*fS^#7$Q|>UPsTyEvDjF%BH9 zxouDLT4}}Vu*oB4l|PLoEapes;b8&utFfqNio`sTY}({+VQ@W)(DXe$NQGy-eY?L?E!F4Euo(`3zWqqQ9d*)>gY1-;V zy4t9+W=N^a+IqL9_gV}ZO}YA8XqW&E(aP0zT8g-}D&IBjdjQAQ$@Lqs$_CLlQ>^Lq z!c6cKZFLpv=+*`r0+e%sSNfgPq{5dW%1zT`R`!ArvJNtu+{(F$m7UAVR!y>a#|#bBmKN`q@Sa4#=vkv`;$0dC4Sq`JRECYV{kOB_kF{0-ZE0sw=%nY-rc!WvTU-U22_vv{uP2+xE zoIb?2hp$jNk-yV{hWk4;eA9~-WG#8Dl9-~j^qOSW`&vl4|JWAdUPn)({per0rL%dS z7jW=C#6$P%=z*8`;JVu+_y*(v{X0Vr#lhFOJL`Vusx`O{AoPDhmTq*P{>9%-xMjOGoWs zJSF(2vG@?NFiV9`C;UF2ENaFdw>2jYij4iB7WFVMHABXrQLK z4}5X;9T=g6w2+k*N=+A71d2*pEQ-2)18xE5NCN+)lXxC6^SmsS#e)Jpj*&g&0M@Pz zikP2>-gC5Po$~nt_`JI|MQJiTeu-YKfUlNnJ&TkciJDRL9E;R>JR*TAS#4x_Mn)yZ z8Y4mhEr;$*3kkwpi2<{gYn!I(Q*y*BVq}H9d7Wouk|g5q7pU;VmMqGUrKgHP|2>5x ze`E^(*D|kCV8$O73ASpQ6b6FT6ctY#TSX#qg55Jw)aJmKjXsc8_rqu`Q^I>lc^g|&XXN-;TNm7y)Q*k!3!I*H;gd%z(+k(|oKP<G~!?6c{N*z4?4*j1k7y(Ma0{LFGy!q2rI23C%C*z6BtO(~{Ov z&~ymr(G1amiRxMpNn|Fmcu?kAXxsGz$IL|ofr{lbB;ChrgQ`fWLrPq4mso3Mn>UHb zU0SXh@)!+>FurbfuYx809;2{kC7~3T?>TCpTO>0Pol4YPyNs`;jZJ{4>51t-y2Xf3 zq3e3e6IODdFI41sZX95q<*(HV$tN3W>UTg98D?^!e^vv`d z%~x(`kK$@$gyVP&Uua-C5GSmRb8Nn>%re^5lf=bN)|E2!!qtVYLn9burUPJ07>%rB zLPLz_uxHR|qwwuVf5Q>BXaHT2YBIIAT+v)SsjfS1WEX z#LHxE6&^9w8E%RGY~kqRND!qm|1J}~HAgs45*uPRsU9{$V0lPEWBGiT}`vgnx^%OUBUi6G^o{zAKwLz7E5wtJB^_g7) zQ2`W+oX$X_5K$$E3Xq)hG$E=hfEmxZGA$?^cZdMz6W+zw+<41jrf$925$TjI-fXM3 z2$=^zk#Cjf(IYB>m&&$;->y;J5c|F6<1sWsGr?mhVkyC3D1xA-imaPur!Mc3S*pW} ziGi+&#W+$C-&*YV_iMLuMy%)DIfk-vKf3pl;jFK?ck5aGydrCAxVcomKlQP=2Z+X~ zo}hPhUP^nkLN0S1+!7ef@Wq`!b3AiLEJv1(J0x!#lirEq0%+nSa}3}P*~Um<%Km3_ zVtc@78U%&HO==hd>-$j{VmS-LSY!CxZ$7%@T0k&nFG~+2Y2bQdxd!R586gX++*XhW z3sF-Ny3!U%kqpN1iz4)&@cHYwMR_b7I&gARq1YyQfu77EL315pZ(&5G1YEczByai) zkh?!vq3Ip(=+S@Vmx!@YQBEI{^LLL0ixo1yM-X(-b)FPbuOh(9Bw3Uk!8t}DT%MgK zsAlnD2?UbS#^{J*V%?<(F|Hag@fa>fU5n*SD^0^AU#sd{L}Bw31_v4~tq7y_7_&J(*;sDU~bw%`&81X`aDI7GN%wsAmk zjw223p(tA~iCdUAEKNeGh7v16S2`e>Xjz-j;}Qa9C!!iABzC5Lr5kIz|} zK;jmaizHIerCip>vNU?NA_5Se7Z(3K9T#ztV&y2LPU2}#35__{+AG}BsUl7l@w-fc z@Z-6~emtn3NI)oKgiVPBUTu;p2C3r_j6KhzM;95U7wkaD;xl+}MkULY<$(fKmvcJ<=_N}&G|P*GKbHy8{42UiH|8vv&#^huUP9cG!7_f0Ylx55*sZc)rxzEF;_^*kC& zz8RUJ!N2Qv%GYxLLL4L!=&(VJR!k+a)BZ9#E%N30zh5gg0KFf52{U$HA(2|X%zmiy z$$LnWOl4@+A-!$;ms>G+1c6Rd{heC%s&d%80@Z(CC@U@)Ybcn>RlG(?_G_H_?E|N@ zJ!tgSYX$7njU9>K*6K_ZDq7h^!)i2NS zX2x!INsR(#7c2lB({R7%5k6wE%a?;I=9VDYc^BP}2Hj||4@)0On0?by$r7?M>smKb za;2hMJ&aEA!bE#X%ApPMWJ3DQuxOEy-p6BCCuu5@ibkCbgF2$GAP z_pX0sMmNs_gyz^IG;4H%5S>tX7M&_bQScY1^l5ax6>Zd?HvS(bkkC6{a?+nI~uO32O8+;eZl z#eYnHMx3xt^ll_hiyS{xly7U3a?@wU`rkzS_v6FCi7)><96Z{t|4lqK`Cnex8UQ+_ z7cF9z2X>x(`LwH29NFKKFXY2To=)`-@+_}Pn(yd0USx?p7GR+B<5As**_0 z;8hZQ)ae31I|vw~;3h8mw(1{DBhEa!Kt&L6}%_Y7>j59~7#RtLmeS zI6MOl5#D}HeGn;GE-J>!Pi-#n4Gm=~QR6T-kl7?aKu4wc&ae3ERK<~#5wl!6;O7>R zlIcjT4|qgnfK7tAy<-MUW;aa+9zeRUvst53vCY%IO}4L~s(&2xqJv&^_;UnnCi;mS zhGfxRlo<-xRPVZuz}A>18^;lD3&(c(Su6qpXdS%l7%tJugpL2=seOCMm2f~iRb!{~ z;AuB1x=vpY)f%jq)*dnx0M|^ zI^3zkTpi1V%__SrSf)R|RuZ5e^=9&O=L>027qyYj=kbJ8=PU6ah5Hm=jlWM4-cC@_ zU%^(2x-&F4V0DEeM{Q$exKdx|Fh!I3%-MjrDQH+QtCCE##aT&US(1LoG#??49xFkr z#kg9#i^$4w+AUzKn5GkZ>sT@IiTxeoU_6KE2YfxxeZ&ozW6l$yqz-na%zf?90Lv88eR9NZEN8;iE$i2HD)qa7tTQz(eh{+SilL!i-`tl1!Z>)=tgpJ(wqFaY z;WulIQZ1_{R85Y9^NLE&W@h&PG~-WKT}ZZO7G=cQ|8GyXT;GkK2L4YxvfoV*803d; z7XRhw@bGXO|78=;it&GwoZnssu!8?TJUQ|8|4$%w%l~iUvH5>H3`;El3~}aF&YXIY zvr`A4Wxt=%FE^NqBQa9PNQ>ABSTnTWERGDj&5L+LeH#jPgL#Q9+qJ&#uGtmJoXYDELR5m?0AvbiLqdtd ziBpGFs}MSbU)*CnIm!%Ahg!@WLYRZ(prm1!e(pw3p7NI%f9-a6t(*k`q@0D7bF6aS zXh?LEmBj@$)(V5e9IkrvnrnyHX>4*ZrcPP^giTTjeh1vDoopA9P4Kkg8)s^y&9q`5 z9l8Hi!nX%=AT7x~3Hl+@tHO?=)1mk~I30PNlB$^#X2vj+$<@c`XJQM#1Ff}h;FWDi7xJyirgyIdA zwJ+EW`n|C=BxcbOhjYsxkel$@kET}#3v?{=E6qE zb+_k#1O4av-@hdraC7?q7|F#g{l9@{z45Q8US1Re-n>Q|GR^( z*nx0Z$DO7Cj*5(yB$tG*5&1PP!C?z};y$LzyRFedX0smop-CyUI?8k%9K7tcfRZm> za6n0u^7gHt@{+-OkteMT(6C>czy3KbTfV-|E9+xD0HE7|aNJ3+ku5POvAHgl@3gz& zlkNb%a0%w&K4{6n-O!SOdcyy@qCad(r8d+ix}ZO4T7o8W59tiEncqYDQ&k0e$bi1G z!!4rMc2lw=6P99U_+7Xj1fXJm@rrW?uUC8udA;J){Z47S0P1gEbyp2XAmXP#D!qm= zaUy~GGq>rEgP zpVk63L=MoJB5R{xRUe1`+qZXyK7g{($GN*U`f`4KTpMIw-?|~TUndtbYkx!AzGTd< zqLH_zM*QJ={U+mq`*H2X1@{RW`;AZl4K1Z*e)DD`VeoOy#692>H1p3wsWdc`cAlFz zGs{160m4RgH;uFbUa7`88>fV}{&f7`TKxYl%>Y*L|NDn0wfG+g`&<9-8+jc4|8btr zX-l#7|Hn;%A;x}S2%rSN`U;4_rl=DLo|f`QgGUE=t>AGbInW*8e;RLX1H5Ow^?y4R zfcEu2I5z92uhIA8^pJUjJ8L z@vT0FX4o;2fnD%cl;GLSdZ{YwMJlvXuv+UCE4N+&h4z5Mw+bih*%;he@Zg?ASNq{c zafF*Gn0icM?fGvk?x^Q+RizWTiw0eMTy)(vtC&(LiC;@hEOc*&C;dp_J@UO~<=M4q3o7hGK;-xNB4_>8H zRdEe6e}W&ehV^V*=Eb=er9co?l@9};bQ6Xau$R*WhOpAULo8r%5K8P9XFCeN@N5<= zT~%&zGlVt-$ZI$`tPr@;k^7z7V{&n`sP%5vaX9PP2WY;#gXxs>K~1_N$4-PP`ef=f zhAhlExP>k_n~jvINaIDoY^KEhRi%EVhDCLS_7mad+|;SCK5lBptb*fyx`+-L2J8V} zeWlV47y_jI_M0UJJ9ALBf*yNq@dF^!q~Hjl5l;n*XK9+nS|!|JI9FA}hMEdgjT&mg zO&T;*q*f!IDh#oQq!W^uFvNXRAt=8Uf_zILiG-SyLZ~?@h?;(F!c3cw)v+5RW9ADC zm1?quHP%7rUd8oQHr8l(u6!MtIe2O&j-3JK_^FvZ_7@nDQ$2ym1vLFauLIr2wrORO zfau(faBOuaD-}5Xsgm50P^mO->P7XrdZ&RDW1I#oO=Q=m*v}z)K*xdBv7r4%hT=l= zDwws!g$~6b?dmmCeN0hVM}{J4h)Av1mmyXfbE>fzx};BIeqN`V>}iUL?HNpI2Jog~ ztu<@+F$G1Q63DD%IconYrE)Gg&8CA(Od0P4$m0>CVNQ25a`AS5c9Wf`nX{eQ0)zaEqWk<-aMYs+`#pbS}z}ULS8%_W( z&bC!FP{t3X9fY_}SZ!~WIb)#WYeSDDg}RWJVB1~!G46IZ9cf$885-_UmEbn|6P0>2 zF!OcH?v|4*5VhN%k$x}Hq4y8iOD(5(xR?C3=y#Ml^Zx<+s-1`lcbB!DA%eP}fXKUT zciegV8fi{{f7$UiUZ-WGal?ZSl;wZ_QEw(kL)}s}^h#Cz@y5H6QK&WSa|?XEr$PSz zD$nwCdN<~uA7lS{u)o#+-pJEF{v7)4BDviK(2D!tgQElA|NF_w@z(xxBaeOm!-L*e zgT9X9w;djP5b(3k-{9v!T~Tl)hMUM{mlDzlWAkDV7ZnED^WqTsjtn%qS7=zI z->%U^fqv^;MfXwDl}dCL$>O}CO>8>hml^VFk2D-B5s^IXk3t{h5Wi$+JwL!swS8|>D?ojL#pGyT#dy{=Yz*|pw zmDxSvo1o+AZk1RCUru#%2NXKdRb4f+`d=&`1g-)7FF~XEUo30+Uv6@KtGd+L&5d%; zZIy@5Y->{M^P19TUHNgKLbqf0T+oSRmSJ$CAUWbs8pl?==TjM z09BKs0Jupm1)%aU1z=5UO#x6EQUE+-x@ursC;%I%q5vGwLIHRP;Tf-c6wiC73{wCK zi3SQl6=w>-gRj?$%C&+5@JB>Z04kk-j7^W26VImr+GTE6Q&4Vc@M|xR-NUTpU-mq#b-JG>Re;uRp2bguX2{-PyP}(%kd+f<@BTV zEXN%FSx%+GXSvQkAmHt1xn%av@&@R1mYWi*;H$G-N;I71QkQp@OPShP&bj@wTvF*Q z$KN+O%cUlLmQ$0wvt07XS#GANImiNgD=?t}KYKOQ%h{$i}@b)S(hqtx7 zKKDkpeLm`GKmVU+)BKa4|AUiS{HHDdzo{p1{^OuGo&PID^eQwd>_Pb=FV4@>a^b`b z{TqfG8BHh-3GYBv8e=G34utaH>4<%BNDpsFw+3&bqxqYMkpwnaxjo|k_F8nCsbK+$gK3bq1)tA)N|*lNmS2qn;^sXyER90Tq^84Qy{kZ%v7*fIHqJk0S|}@=bl%k zYSS$AiPI|Idv)b^6cZ z`z?llq1gIfbe>-(1+)CiSp4hRVDu)qV_f_(B+DTm!)y1+ryRm`a^S*>%d||oHL|la z9C{J|o-Zf!bPDC(kb($c){8n1y8*Ja^TaO;$qDO~s9)?|uOZx^gcVc?sjSllH;&x8*IZhQ3U?eQia4uWE_~MJ`HIvlrJW1BG$?-$JNW>Fl zC+VN|qtVN(f&m-J(i?T7iMW4cZ%H~*;XJ;45RcQT3TbUTEm2LXBl|E!)jjG5hE1XU zpb1wo!sMcZKKIuoUqX+LA?N*(+bibFzKrTYt;}9ynSm1t{B4fQv`A;y*v)`f3`29e z%+VQ5S}oOJqTU$L@CXBu#}N_sL}JZZVdF+N`}{<@{B^uY072dvopE`Y@gyJT7s1+L zCMAMC^_wzIlcuXhGD{0s*}7Cj-R_WXBb~Q|B@dkgjZ$u`%r>@&o(vUvxbUe#pd%MG zT2s0AlR}PLKTSn?Zuq#Q-e65>om9UY`L9~}7nxOZY}2*AqdtLxPK%Tr(1Rs0*9|;L zsNu1@TPw>^+D*|8Cxoc!AAhcuUhM+UJ}>L8?^LBBmv_2e`-E+B|Hd0S^CN28M(=1< znaoi|YmyXI3==$Cl0*~*rRf4DbM%7emD)AES%=e~_caPl8D)9)AWKepKPU2`$Je|m zZ*?UJlx1CId9i@)QL01_-&sPp#Gd@;<;8hv>?F{@fozImgaJ5T_$Y*bv z&C~bEUTfvuKKs{Ir{6?-af{-)76W1Zc@-sDzC1m{&b%zr3PnI8YWT>;r#ho1oELbC zRM+fGm-7MF%Dpkd9e9$=B~+~X8g(9IUD&D9e$qFI8J=x+4m}TK3zUJ2E81f&X^&Bc z_VFB!x-7v1i*~1Uo+3^#9kL7bmZFXru2g&jN8jQe;>qQFu^eOc&bsV*Ru_*Y=O`F>T}>*=fPpu{x%ZFu{)f}i0T)8v+cpETHp=>XB7Tf?5JKGMyD9` zTHiL%HLuP^3FqVq29Uswbl+?^Div@UAf=%bMsO&Mq67SZ+2=_WUEQOaqRx5rFgobs zsRBtz%iNP{XD06e;~1%y9HYqM+7d4e zMC>>Bp5?u9!F|I8(CsYy=Uywt4`R8KZnm-M}Sa zsK3Il0L9~!{nc75PNU?2{lmGSYCZ4m#yY~h2KmqH7>>z!{&n=-9viJ2>3x|8L~61vkM#SArSYq&faJDFIMs*x&8k4xjU^Ars3D7V-HxlH22? zx=fNR5SSat&G*1C%PFq)=z!|si09Yz8NQ5!_on?`!8LM&vlwYIMbT&7Fb&zEyJ z5vg37WpLor3Fx*$bE~WobfV2q${K^(hj6q9rTav^0#H9>=b#$;iS%kQGm1@fw?k`=^_ZcX`Z^0v;_*_BUAf3|=wv328Bmfz#S_3CJL0hkB*Eec zl)uRM+e42KgtW#vkS`0N|CR;mi}rV@f5J04psK00G`&~S*K7fWzS@Uc<)cq7+M(E~ z<>db;YAFvh^8Y;jBiyPq?Ek=&-;wjbfB6{ye}`xizV-jNfv08s??(CaUsl`yojeNo z|JmyQZ{)Ghe;D*;<9`!k5?iH#Ws#R3Whf?_*PK>>1q=R@xPW6iyj>qqgaKpnq5{Y!y@L zlAc|Iqg+ZiURWfgRjMoOyv*ht^p}26uwOq9h4>AZEjXMs{Roz&V9`zWzLTP;XBB<- zwyBDDU7dXxefu`pHvtvgb^51G^VT4-Dtb0?Ws|r}!d-`?Ttv_F|01HHp^xw(p$}%F zAHk-ERe80tmfgHHlz2@m*9Ze0o*5w|ZT;RLYAu?*1|^eeV8^v*WMn^4NUW>$E5!m@ zzkpXmEMKl@ZJl3iU@f?<-j+P59OkyxbO{72Kd$f0>p=7Qf6Jh+*0YNLe^j&oJK8_m z>i=%!xrhsqK*?OgIB}63LUmp>1ZYh@cmZ0?r_)M(3VggW-vVE+?XUX#RrCbb#lJ># z*g;SzKOE9)@&R5?tH-*)@9L-g_`NAuY+g)7Vl54>b;9FXftOjee&*M7g|vQtzIWb_3ygBb_Y%6v#?Q74c7gtsrj z$(CF5$rQSOU(c-_1BY9S)6v6KVCk;;JxcK2Lzj8lRN}$FD^klba1pWQ_|4aU%l>~i z%76c|GXC$;qer#<|7ffKx0%P||17@mdmUI;adkcOWnh2emw8f_Z_nb4UOG>@PU82) z^AGZ2$X`y2B(9Relz3W8H-(-`A+8XqCek5Hx#%LkQrIn6=}=_K^gAB&O;eekX3IrQ zTT#0bCBt%gE(agYloId!SSyiXy{qyH9S~u^y>rwS zr~;$KCbI1kxP_8c#W=7r@(l(AS}M6JiEnV&t~mHGx+!>K2jP8|0krGIBB=`a3TFXl z7>h`K%(=0$3_lI=lPXIR2vh-$Qwzu~>uad9#_P9HlJdqw1ofXu}0|enj zxW=|dqTi=DBf0%xa6AZ8))!(W795QUN$p3kwn_Yz@de_P85W#1|BuT`2r ze(NPtX(#8mTpIT)CLNNJHYXCJqDnVwxWR$F2Ew*AuoA&F#<*<`Q-&J;n?3%$UTbFzCOl6Bl}2Or1RQ%+htax{_V%2 zjvBygbvV-eH|}&Qv@Z6Hv7k>)xf%AWJO4&)@XM_`e*(=Q#e;ct6q(6bMo;R=Tn~I2 zns3fLD5IyrFRI|v;f@NlLAb$aUgkPOeE-PMQF_|k!YuOkvQS|YszRz~cTim(Zw%%XHdgJFnD!Ji*h!?GK6K6jGJYuY`La@HfRcXqfw3N!(G{to zDf=(j4@W8PI!}k}eVjOOG2b8oE=yaWSbqnLhuii>Tk6Yfp_)+bxDML|YvGMDE{*MD zkrxAgaGZ@05E*t@3;aAMi9*to5fG3~*_B~Uk8I5lpF48f3wPSV#LJq-nSLmI*-)(Dz@hOg>g0B5JkVtu>vtu{aq$d5odF7)e;P4Xdxx?C zXxsfC&F(w71YD*6adP6{{~aG4A8qgdHt~e6!rW`6p4fk`y!TqyO@^0`{Q+|p`0Zc% z5FVpw3VHcFv(Gy;7dR^tG%7C36iq1dWd#%8vX?NNp<#&}O8C3CBfMSF2K9#Rx8xMY z(pX4{(YKAoTmeD7!|a+WzNo zFsQ}<9Bl1>HuE$sexnEGruKAdI!kq$@Lgmz$?xgxTHb`dG5vQIU*0M^6}d?4nUATbP#l{34m+l?CQNQW4JZ;%m{E(E>(!#XC{?sgG@W zXO45H)?=>CskSi% z_6Qj!km0+w-FE-V5V!TNGKsdz0>25BpQBeE(G40^qo};j(A`%0_e2c3y-sJz9}bAz z97Q9mLtH})Zwg|AT6$Ht!lETjsv6_KCmioUvPH7hzKI$Dws4+LfS9)|udz?jpBFj| z(+gze-`zXstal&Aw_uuLMHgo3azLWva91xUXKYWMR^-2J%u>MRg6F!Z!s#Pi8)%!Q z(Af7VI~CD(@Kym23z5}crjz7FkuTJola^fcy_t^cRl}$Dl++8&g)xje?_jMRbSVmp zz2_JdsKocPx3Ct(QBOU6X!fV*VBl6_t|CkxgL@SfeS1bB-XtmPG&Rm|T0wW1g$2Fj z3n?D(S@f&ju&<+^=zh11erF&%ktOND5+svCE4TF#pd#?=*YKvm7s{er{y>#|r(O$P zwPkrgn0)S|Z2Nq~(?tKlE_P=Q0FWNKq5c2K@yXWye-lsJ_}`TSfED!rpl<(hGC0}N z{~LK6`X2@U%7}H1upgFrzLMZdQcnlrVF|KjI?M@{@jEAo4K=@@jcX5x$F67fFSo3o zeee&*md!?ySYw8a>oz_SFjHTDz2vCW*#m9Sk=o1rt+WHMaYHqPX$Sig13 z&%wp9e`oC44B#2PRmSf+E>*`aMp|uMH;wGv8(I)HYH#S$HO$%> z_cct~?UX7FL z{r}Dkz*p@5`-cZ!{HMXu!8ZQWW*$ez3tThKonhTryV*?p;a>Dhd4_5|)2WXz)TZ4=RwjxjzVOAkr>KWJ;3`AfZro#fTWJk*Fw$z+N+zzty2PjwQlJtC2P_!L+(E5hDdgzG)|7oE zlbW_TgKIW2&EPmo_xyO9*>O`9-zE?023b+`wU7&YPd7LyMPJ8#x&hegJbi6n>GjSEdrIe= zzVfeC`Xk(?+f5Y(VCCx^_~{DMV1L0Mumc60sNbDm^c$%JBSy9^u+-V41uQ<-n|Gdo z!t&=P^@2|W{jb&d?&1;v>EWBn|BoIW4z}|DO*|{d{{`x(-i{Sq!T${&)%d@YM@L)z zzl}V>z>R$x^iLD{+b9S#oibvDUm)h^m+>|5F!6f{fXCX=`T075zZqc)?Z-bU%(tYP zJAD;knm-`DCcoOEadQ?g2)C3A#@@CH47h^UNZUHQwzHzHYpel{tn4x?>yRkgx=c8D zwyw|BxYnioP}wTK7a?0GV_f2;7uj%uHaRTF|D-rszO)Lc0G;v(Cb$cxc;@et5>hp zxd^s)l(ySoh0c23(EUqc-5B>?SS2@I4Qt%O_rvz22sUVqn_}IrkGw2)Th`J9TUTHu z;w@SO8sS#&=tgxwPKBG^XDc$TcBO5Fe-7MhOK{P(ueME_oA0-usdG)Pxc#oLIZMAY zZ|VPd&&u=vyZE;3-)jDE-#`CPj<@!|n|Z?LKMwlmX8%;k>b3YkLYUY;`bFFy|Ee>8 zl#aX~|7c_VD2K=Saj78VXR;3ncq`kdGP_(~6Le(ytP-o>3(KcUggHJ{m&@>}GCsd2 zxjlAIr6RXSzi+_oshSk8$4zosJ(Y(!J!@WSMvu~v&*M4JRRi0?<=H?LljndI9?wGv zSvcLJ$j&)sn88y>H1Kz-IJ0*ie9hhIAut4h-}`ckzhi&+AAi2B+W+rD32epwzkhV# z%YTm#P7b&G|0W(s2Mhj%)gV4lY)!1PNfWE|HL=P;bU!K&nzX#Q26hTG3`3c0Y~Rr#;W$2!k-L!K+iqhI@s3|8^at2nlD{ojES*oyUke0+TDuYa=t*slLi z{{-njOYY+ed}1Zm!9WfYupt=q+{(Q|q$==7 z=bB!-b|``Gv&0UG96L=z|9^afpl?0&;5WjlGP3<8mX)!c=1n-s*it!q#{P6Qd;}*! zjhn!Ua#^a67X{2h`HL^Uh+Z?1%*rxmUOSPJQE>+2NMGg>dXJfs+|rMUGgmLhGk<1K zMRFP!Q*sWI&m=U33blZGH+`(EOsselwocMmxR%SpIMP+_byIQ;sKhdeXr|gVPGG3T zczw{1Msy*7zJ~svGO=#dw5vpPc>}W~U69@#6O^2V-2G5#aawc$;E)8hq0Qq}z}|75 zA^jQkcf75_65YmHOjK9FE23ld9i0|G4N=S{K!Snpg-H+)Lx;)UnWtj3_jVZ8o`JO!Wl=(^{e-uBBj+oJl zir{q`eGdWC+Aps3_$mY}tR`5x2S}T)pm)~P6$ULNLRy2mQT74Vq&w*8<~by?<1o5r z0ocO-6?f$W0QsRC>i-=d9&PpiHu0<)|GV%3u!8?TI@&+-#{bd&!8ZQmMxKxX*B{yk zfMv$xUxZKVN+0^;|Cbawe>G{ZmhAH>!;^~wf7u4FS^PmZh*V^@9FP3u;wseKgf==}t z`=UW@rf@wc=%kv`OtI2~0{7F6Jt$ByUsGmk)MRR1sQ@_1_KV-WH%5tyc_>kukCKdl zhY_V(zncf8S<$Cus;;BvGrf(Dj~S49=i3F^`urp7wtBwS%Ymx7ch^<&;gB9d6`!KX za|Jf;X8Jva?Y_bRL+U(Er>%`T#VolQ<|xO!J>m$(#`|VH4*TIv(T3S~3$z33RMTmT zyAYh~%3a9%&4R7?rpF?6R=a76r+A zgW}d36t>>5j?HNh`~frsiuIhkSl(9ROgw74eqN0Jz(+ZBadid0$rp=d#d-;gJj1(8 zR3a@j7IN(zh4c#09}6(U_oIn&&N9Xq(J7!ZFG&l6vXcjm`=p?YdH>FQL2PBVkRh)x z_sQ!E!i=@Ym{&HEZpXBvx`(Ll395UD>RuzNcoDxRnC{WF{Ln_h9gb;=Y7gUOg?r1{c#A6;^@=b{YD>798*MF9)|b>FHSjfEodAmK5W0umC`JWLbKa&bgOlBux8c zB#dJoaelw|Ie(o6cg#gw4cHSZN!x@p=DLlH`OqqWU7Hr^@@y6M4_D;8M%+|*a$1Nl_Bnv zE`+X~eXGkUhDg|X_arZ4!&zPNp~A`57b$A~nGgp{S43AXZ&q>AnAlqQEOh`ux!)5VCl$Mz1*N8GY#m+*gfMUn`T;xRs$L| zZ0_2bR(Srj>DLqir&=DAkI}A~u2ikL|G6}4ls-)39^MeY_ii+DyeZVsIeVYEbD9sX zPu)5GLK47uPTsaFq;1WIt<$!KGxrXgTH783GRIZFo5r@#2Agc{-N=8MtE#OP*#{!GB&0%RlzB&ienv@t+P4PW=79i=3Pz`*)^uKGb9P7KHL{+sAn0!LxVbZob|@{4+UU~!_EX130N{UW z549D}HLQIXa~i+aU@Kfz%9d7WSqbV+23xRR%q{L|N3Bu$gQNSHdSmh#eT`tw;DW$1 zk`QACNZRd?gBhDr*@gdgex%IN&-fAZc9ruPm)}!*q_W6#(kdPhn;3HDCa61V)Y@-O zZOGeVzXdbF;!(bIe!fBIF(?tzzN6$&>@loiebqb6ZL8f$U)sQ~{Dq}lvV}zCdLSKh zN_#W3YWdvfS2?f7 zPB){r%l;^siLl>&T=v)cnm^T(5Ux z>Ye0Md;c_s>_^*C%7-p`nHqfOR zXbR6Eo}jF$wfoa9LNEJpg3aSe(xEmz{PCy#2R~g6Ui<_D4T$~x@U#~_d>AF=91f@l z)3n5B>JQ)rl~^7C9#xUg9~8-Hazy~Fxz>wPGcy8{MR)V=?$>_OPgatJGI!8pKes#I zV7l$rvW%S!<9ddflVMiRkU1H$dWOQuP}DP2PKK(U;ljyqp)>q0e{ne9qbwhKVer^W zOcv)A%J1L9nw>{qeHE2IzW+IT3Uh0JSHC`i*Z)Z~?WdoqOKpD6RtnH)Z!C#x;=1sDyo0GJ>dW+KUTZgKk0BMB4u9_7w}jL_HWdH!_~@h-|Mz6;|9>-&FaNXTep;Sa zx`I7S=c~vUqzt6KE*XpU7mLuY-7M7`T%z>$k44b%#? zyK{7_MXIvwE=(MP!I5&26cwqMDy^zGI$JK{4Cs$|iiC;wxMm>M6uKen6|2lLEcV~Vf(DGII7WYP$z)j(GJjbX z$q(e4nF(laPJ(K*F)BZc8gGfhB?3nZw%FU4>{hB(!d&rdDH01u?CN+i)=HU}(+!x3 zDJYlRMJ3B!uiU@q`80Cmogc15mYpC=Zz&erGieI%$Z3kLSFSYEdLMZzDefJ~uIcbAhSC`Yt9Mt3VQC_Xd zS=Zk5Vc7bcc8B>DtGK`wdKQ|0b*+8i_&kTI>m5&rMW_DqH-%&cUNm)h31LU z)4WxO)>Jd^tXF%Kcw5D^^?Da`s<@@AHWRnSY=7sA?rmwI(OVBYKlFrxjvjh*2`$Wn z{B&@&=C)))0mqD?66Tn395Qw+vZCv`W&B6GhG^pdXvW>q4ZuqMFJSgO{_pr?8~<}7 z&%=k2!vfN{d-5#aR=&xTV&dwB#l@*3(1Ldshkfo=ZJ%|Y_Vs@^5}+09e=w-Ye~t%7 z+xSl#c>;^yNPrZTMZ}Xsi%3d3yVmc%xTjSfVlyT0v-~oPN`h1FxHU%|NT=hRWj~ri zaw#KCw_t9Ix`(>v`6QmRw|F*}$10cPdOaFhEg1O(g0t({+7aVElLF(N9jm;RrRZDK zM@JF?v%YB-;yHZkB3%%zQ8++L3M4dPBxcdCZo9u0yVNrJT^n<%4&pLI^r4cK!D8T| zF>vr0Fd|e8e*N0i&%3*p%j@4&cJ)oY7a9*uoN|9|4~Rd?)3E=e#B5XgAN%`#1v2Lv}Rc@`$ zVpP&C;mpU#JlR=1POCVpdJ&LV^JE&0a03|ueNkCgTh<*<$q{_A9QK>V`I0ZGfI35c z8p!Gd`Q&O-^aumrND|%SfH>@JCFX#N=|nEnjFxdvu98$F9%4-?69p@>sunqTp3T_< z(z|3f17fFqN8W0f4vbyA{&oh)FiiIA!k1U5dW$s(+NA9oXVHrnFEzU?*KG&`7F zdUT|>-BB+>t!lceW4>BlkyR+O@B5VM6b3I+7hXtq%kc4VU?0+bTkZ^+))qG7TLZil zn3!dI6#PRz4f{Xdi*0KEKR5`)e>mCt|J%r8@Baav3FkO?mf?=TI|%GswC&Ko5xvY* zxHP=~BOiXGzmQ>6uxL^wag`|5g<_dulrVqe#A;EKU=rhI2uDP@oSaFfv(g$`i}lQrIu0W zh&I5;Y8>92s-}_qHZCqT;7MilA?y~sMK5RNS-zZ4X{%>dj~U&#(kY3Jc%NKjk4B>l zYA$JK;K4yB79NFhff3;t;x`Xw@ggPXYT4;BKE(@M>O?pW|1-Xbzc!(;>BsqUJcr|x z1uif0Jo;5RlEd1fY5qgLNUAeZ&W?VKez-2HWYN!;Ko|nsU(K`5?pZRQ=Q!x|>7RDH zkE7oi!tZWP%JZPr6FmuHCr5j!kD)w@Cub@2ERY${7*z7)Gg$}vqeF}P1IDP{XqdYB z1)OCnCN>S+kpJL6GMS&tY=N=oD1Lelnm~&f>JBFtW+;(lpiq41h7o#(m0QL|oIw}E zSmK$9Q*NHU^*%9b&w12t9`g0#~W$paPO1XYIs=|+ z&p-5{?+c((l4{a-wn^ZAQe;M{G&Dw-mThdlg>;wjKII*CtqOGS87XunH0q zQooAMgts@6^bMP-^q-xJ7upMU%N+Q89s2O^slLak zYOl~$P3h0(rwNQu;4|{fb#Ru0;5W*7wf+6i=|zefrooTTKh%?!lX*G?M1loKg{Gb= zY=8Iz*)_ZA|B+lHfzi}fUI?}1*KxIkgZM3wAtfBii#S;4_eH%Vq$NeDJ;6r0aljkA zhI=2B+PE)cswMuFTjt*&2IjBQ*|w5;5pFj$!pgcKP}eq)YYJ12 ze>!_lyXa(MkN>rF)9v-3%Hr={G^T$!<RWCvV8w5Cff^dgV=+Dz3 z)%Eo0uwIK=U8*Ma-jh(_)pe~1Z=#Z)Ubs76>F#*N{qYK0TPtoaP3?HYO6!r#3^pHZ zhcw_R_K%}+uQa7Qw5MV;hHmJ)%x~R~PO4vmmbb}Duh8pxRdH~*bGLxE?sX5k$!-~Y zFHi;SG`!}Xq+UhOKd7vD6eaKmC{SG1UCPHGUKaLrJiXli%k+O{S!pGMRDL+z)pWAEs;Rdyku-2<5h=cMla(+#I$(5G@ZNZO5 z87bjtdxJo~BW<0ARvvc#h-&x|l7A)(kqP}YP8F2b1ofA4bDuzdYK-Uq0CR|sfc9Sd zb23jn*Pv?-PoQI3y9GN1)BiLa@~lC34matgwm$m3?hiqA!9ShvgH`+gr;q=(e}Kjw z-u{1dvep0F$fNaq)Uqu(l#!90qAJW;dU`g$mR&&9Yp$aT?>Mzh={$Y!n1|1=AHbpm z_8s^~7Odi>si*^^XcrCX@)<9032zoFqcvFP?qw;4R?<5QY3HEI5K0H=axSfwF=Njv zJr(_nH>`M5CwJ2^-UQ3tXOUmX^n28KZz-=2Z0&VrG)sEnN=hqtqf{7X#c6`Fa=FuF z%_l*t9U1Ftc&1XLX`YmHA5M4vJP}EH&o`<@eIO|c9FTdLP}elH4XFvMZFpT-ZyaA< zdaWw$R^gnxVViJG!K6B0sugmN>`48%VP$R{J>be9buheWfxLa`7zu2~z(|UL*UcFK z1;`bumEEF;esFte5ETb*FU7Tapk5CAJ}Z(%ev$lBbeoVuUe7fy9n)o>d&Bph`uUFv zgm=mPe-!_}f3S`Ju#u->{K=B%u2cY4(0@mRn*aCX{Vn~snaAjP(~ygzH&Sb&v5mA= zn8czTH8TQsqub1iPU1%h^3VpHM^o4_iv{{n#EoBTWM{HR_oDGq^v`WoX(R@;W>sFW zL!>MvKO{L5a#P&D-x0#^^ua>a+DFT?=1a8F9wC(otyNww=ar!U>_4CVFKTsy7cB^D0i@<-%oyV7LS&5HSC9?_hJgpnHk?UN zm#D()-eD3{K*Om=&`CAR!M9YZdJGO}ajNudS7Raab;#r*^q2VRO;Vo2;3nG3lIQ{! zl1?64l*^?zR-(#?l%ECy8DGe;NyJq^AMh;&V9jAv7m=dYM|N!^HIvG^##}s-*&@|n z=Gi^nB_=P9A{D^1eQtLqQOx5c$>>4{&+;incohyNpD$%jt09ct6@p$$l6ZRe{VVQ_ z?y8snCW^Q~LZPt4?cti+57hYlhG!st?~Y`NO_vj`X{45x>VFohC5S9BrGhTQx7Veu<2&=vk%Aw8e9%xYXNq zkM|{@sx94J8XJ8^D9tQ0G|?ERKjpfJixgeT^`mE{?N;tg=>nIN9L<58%&7Xojh*I- zzh8Sl#Nm7+I|LfOy!p;wh~y7uHjCb=rB^RFjQV~1;F%@WeP){I<0Fw?sjX+W0tMHEok5r>Rmd^asPOqX46sG-y!XP4O&|D zz8}3u{AuP|IbuCwKR3OzeYVf`**@E6`)r@>vwgPD_Srt$XZviQ?X!Kh&p*@i{{ug- JkC6b-0s!Lprzijb diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index d25e9a0f6..8c14057ad 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -7,8 +7,8 @@ "6.0.1": { "bytes": 31129, "compilers": [ - "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11" ], "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index b57b9fd09..e51b52614 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -7,8 +7,8 @@ "4.0.1": { "bytes": 2179, "compilers": [ - "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11" ], "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", diff --git a/app/fixtures/registry/package-sets/0.0.1.json b/app/fixtures/registry/package-sets/0.0.1.json new file mode 100644 index 000000000..cc82ad7d4 --- /dev/null +++ b/app/fixtures/registry/package-sets/0.0.1.json @@ -0,0 +1,8 @@ +{ + "version": "0.0.1", + "compiler": "0.15.10", + "published": "2024-01-01", + "packages": { + "prelude": "6.0.1" + } +} diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8d78d7ca7..8ebc66ba5 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan + , getPacchettiBotti , packageSetUpdate , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -31,7 +30,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet @@ -52,6 +51,7 @@ import Parsing as Parsing import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String +import Registry.API.V1 (PackageSetJobData) import Registry.App.Auth as Auth import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs @@ -61,12 +61,9 @@ import Registry.App.Effect.Archive (ARCHIVE) import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.PackageSets (Change(..), PACKAGE_SETS) @@ -85,18 +82,18 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob -import Registry.Foreign.Octokit (IssueNumber(..), Team) -import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Octokit (Team) import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData) import Registry.Operation as Operation import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation @@ -116,18 +113,17 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +-- | Effect row for package set updates. Authentication is done at the API +-- | boundary, so we don't need GITHUB or GITHUB_EVENT_ENV effects here. +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + LOG + EXCEPT String + r) --- | Process a package set update. Package set updates are only processed via --- | GitHub and not the HTTP API, so they require access to the GitHub env. -packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate payload = do - { issue, username } <- Env.askGitHubEvent +-- | Process a package set update from a queued job. Authentication has already +-- | been verified at the API boundary, so we don't need to check team membership. +packageSetUpdate :: forall r. PackageSetJobData -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate details = do + let Operation.PackageSetUpdate payload = details.payload - Log.debug $ Array.fold - [ "Package set update created from issue " <> show (un IssueNumber issue) <> " by user " <> username - , " with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload - ] + Log.debug $ "Package set update job starting with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload latestPackageSet <- Registry.readLatestPackageSet >>= case _ of Nothing -> do @@ -139,34 +135,8 @@ packageSetUpdate payload = do let prevCompiler = (un PackageSet latestPackageSet).compiler let prevPackages = (un PackageSet latestPackageSet).packages - Log.debug "Determining whether authentication is required (the compiler changed or packages were removed)..." - let didChangeCompiler = maybe false (not <<< eq prevCompiler) payload.compiler - let didRemovePackages = any isNothing payload.packages - - -- Changing the compiler version or removing packages are both restricted - -- to only the packaging team. We throw here if this is an authenticated - -- operation and we can't verify they are a member of the packaging team. - when (didChangeCompiler || didRemovePackages) do - Log.debug "Authentication is required. Verifying the user can take authenticated actions..." - GitHub.listTeamMembers packagingTeam >>= case _ of - Left githubError -> do - Log.error $ "Failed to retrieve the members of the packaging team from GitHub: " <> Octokit.printGitHubError githubError - Except.throw $ Array.fold - [ "This package set update changes the compiler version or removes a " - , "package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but we were " - , "unable to authenticate your account." - ] - Right members -> do - unless (Array.elem username members) do - Log.error $ "Username " <> username <> " is not a member of the packaging team, aborting..." - Except.throw $ Array.fold - [ "This package set update changes the compiler version or " - , "removes a package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but your " - , "username is not a member of the packaging team." - ] - Log.debug $ "Authentication verified for package set update by user " <> username + -- Note: authentication for restricted operations (compiler change, package removal) + -- is handled at the API boundary in the Router, not here. -- The compiler version cannot be downgraded. for_ payload.compiler \version -> when (version < prevCompiler) do @@ -224,18 +194,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -292,10 +262,14 @@ authenticated auth = case auth.payload of { published = Map.delete payload.version prev.published , unpublished = Map.insert payload.version unpublished prev.unpublished } + -- Delete the manifest entry first so ManifestIndex.delete can fail if other + -- packages still depend on this version. This way, we detect dependency + -- violations before performing any irreversible side effects like deleting + -- the tarball from storage. + Registry.deleteManifest payload.name payload.version Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated - Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -326,11 +300,11 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + ARCHIVE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -340,7 +314,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -472,13 +446,13 @@ publish maybeLegacyIndex payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest payload.ref config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -487,7 +461,7 @@ publish maybeLegacyIndex payload = do pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -502,8 +476,8 @@ publish maybeLegacyIndex payload = do ] Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." - let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold + let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location payload.ref legacyManifest + Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -578,22 +552,23 @@ publish maybeLegacyIndex payload = do ] Nothing | payload.compiler < Purs.minPursuitPublish -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -601,7 +576,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -617,22 +592,23 @@ publish maybeLegacyIndex payload = do Left publishErr -> Except.throw publishErr Right _ -> do FS.Extra.remove tmp - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions - Comment.comment "Verifying unused and/or missing dependencies..." + Log.notice "Verifying unused and/or missing dependencies..." -- First we install the resolutions and call 'purs graph' to adjust the -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -715,7 +691,7 @@ publish maybeLegacyIndex payload = do -- Now that we have the package source contents we can verify we can compile -- the package with exactly what is going to be uploaded. - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler , " and resolutions:\n" @@ -727,7 +703,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -736,11 +712,11 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit - Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." let tarballName = packageDirname <> ".tar.gz" let tarballPath = Path.concat [ tmp, tarballName ] Tar.create { cwd: tmp, folderName: packageDirname } @@ -751,7 +727,7 @@ publish maybeLegacyIndex payload = do Operation.Validation.ExceedsMaximum maxPackageBytes -> Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." -- If a package has under ~30 bytes it's about guaranteed that packaging the -- tarball failed. This can happen if the system running the API has a non- @@ -766,11 +742,11 @@ publish maybeLegacyIndex payload = do Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." - let newPublishedVersion = { hash, ref: payload.ref, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } + let newPublishedVersion = { hash, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -778,7 +754,7 @@ publish maybeLegacyIndex payload = do Registry.writeManifest manifest Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" + Log.notice "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" if payload.compiler >= Purs.minPursuitPublish then @@ -788,38 +764,45 @@ publish maybeLegacyIndex payload = do publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" , Version.print payload.compiler , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Log.notice "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } + + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.notice "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -904,32 +887,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -938,7 +919,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -949,49 +930,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -1027,7 +965,7 @@ type PublishToPursuit = publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1062,7 +1000,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1209,13 +1147,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version @@ -1235,7 +1166,7 @@ conformLegacyManifest -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let manifestRequired :: SemigroupMap PackageName Intersection @@ -1332,7 +1263,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p UnusedDependencies names -> do Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1341,7 +1272,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p MissingDependencies names -> do Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1352,7 +1283,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let trimmed = Map.difference manifest.dependencies unused' Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" diff --git a/app/src/App/Auth.purs b/app/src/App/Auth.purs index c8647304f..f9303fea8 100644 --- a/app/src/App/Auth.purs +++ b/app/src/App/Auth.purs @@ -1,6 +1,7 @@ module Registry.App.Auth ( SignAuthenticated , signPayload + , verifyPackageSetPayload , verifyPayload ) where @@ -8,7 +9,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.String as String -import Registry.Operation (AuthenticatedData) +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest) import Registry.SSH as SSH -- We take pacchettibotti as an extra owner because pacchettibotti can always @@ -35,3 +36,20 @@ signPayload :: SignAuthenticated -> Either String SSH.Signature signPayload { privateKey, rawPayload } = do private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } pure $ SSH.sign private rawPayload + +-- | Verify a package set update request using pacchettibotti's key. +-- | Returns an error if the signature is invalid or missing. +verifyPackageSetPayload :: Owner -> PackageSetUpdateRequest -> Aff (Either String Unit) +verifyPackageSetPayload pacchettiBotti request = do + case request.signature of + Nothing -> + pure $ Left "Package set update requires a signature for restricted operations." + Just signature -> do + let eitherKey = SSH.parsePublicKey (formatOwner pacchettiBotti) + pure do + key <- eitherKey + unless (SSH.verify key request.rawPayload signature) do + Left "The pacchettibotti signature is not valid for this payload." + where + formatOwner (Owner owner) = + String.joinWith " " [ owner.keytype, owner.public, fromMaybe "id" owner.id ] diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac9ffc398..baf513748 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -214,8 +214,8 @@ gitCommit { address: { owner, repo }, committer, commit, message } cwd = Except. -- Git will error if we try to commit without any changes actually staged, -- so the below command lists file paths (--name-only) that have changed -- between the index and current HEAD (--cached), only including files that - -- have been added or modified (--diff-filter=AM). - staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AM" ] \error -> + -- have been added, modified, or deleted (--diff-filter=AMD). + staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AMD" ] \error -> "Failed to check whether any changes are staged " <> inRepoErr error -- If there are no staged files, then we have nothing to commit. diff --git a/app/src/App/Effect/Archive.purs b/app/src/App/Effect/Archive.purs index 8c26092ad..17ca0675e 100644 --- a/app/src/App/Effect/Archive.purs +++ b/app/src/App/Effect/Archive.purs @@ -35,13 +35,13 @@ import Node.Buffer as Buffer import Node.FS.Aff as FS.Aff import Node.Path as Path import Registry.App.CLI.Tar as Tar -import Registry.Foreign.FSExtra as FS.Extra import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) import Registry.Constants as Constants +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Internal.Format as Internal.Format diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..96b75ca94 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,13 +5,16 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageSetJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PackageSetJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite +import Registry.Operation (PackageSetOperation) import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +24,29 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectJobs SelectJobsRequest (Array Job -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) + | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) + | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) + | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) + | ResetIncompleteJobs a derive instance Functor Db @@ -39,28 +58,87 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +-- | Select all logs for a given job, filtered by loglevel. +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow + +-- | Select a list of the latest jobs from the database +selectJobs :: forall r. SelectJobsRequest -> Run (DB + EXCEPT String + r) (Array Job) +selectJobs request = Run.lift _db (SelectJobs request identity) + +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) + +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow + +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Lookup a publish job from the database by name and version. +selectPublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectPublishJob packageName packageVersion = Run.lift _db (SelectPublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookup an unpublish job from the database by name and version. +selectUnpublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookop a transfer job from the database by name. +selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow + +-- | Lookup a pending package set job from the database by payload (for duplicate detection). +selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectPackageSetJobByPayload payload = Run.lift _db (SelectPackageSetJobByPayload payload identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +148,92 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log - pure next + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job + pure $ reply result + + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result + + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result + + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId + SelectJob request reply -> do + { unreadableLogs, job } <- Run.liftEffect $ SQLite.selectJob env.db request + unless (Array.null unreadableLogs) do + Log.warn $ "Some logs were not readable: " <> String.joinWith "\n" unreadableLogs pure $ reply job - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + SelectJobs request reply -> do + { failed, jobs } <- Run.liftEffect $ SQLite.selectJobs env.db request + unless (Array.null failed) do + Log.warn $ "Some jobs were not readable: " <> String.joinWith "\n" failed + pure $ reply jobs + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db + pure $ reply result + + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result + + SelectPublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectPublishJob env.db packageName packageVersion + pure $ reply result + + SelectUnpublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectUnpublishJob env.db packageName packageVersion + pure $ reply result + + SelectTransferJob packageName reply -> do + result <- Run.liftEffect $ SQLite.selectTransferJob env.db packageName + pure $ reply result + + SelectPackageSetJobByPayload payload reply -> do + result <- Run.liftEffect $ SQLite.selectPackageSetJobByPayload env.db payload + pure $ reply result + + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..873162264 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL @@ -272,6 +285,16 @@ pacchettibottiED25519Pub = EnvKey githubEventPath :: EnvKey FilePath githubEventPath = EnvKey { key: "GITHUB_EVENT_PATH", decode: pure } +-- Test environment variables (used by E2E tests) + +-- | Root directory for test state (database, scratch repos, etc). +stateDir :: EnvKey FilePath +stateDir = EnvKey { key: "STATE_DIR", decode: pure } + +-- | Directory containing git repository fixtures for tests. +repoFixturesDir :: EnvKey FilePath +repoFixturesDir = EnvKey { key: "REPO_FIXTURES_DIR", decode: pure } + decodeDatabaseUrl :: String -> Either String DatabaseUrl decodeDatabaseUrl input = do let prefix = "sqlite:" diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of @@ -134,5 +138,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index bd406ff25..48fbdf4a8 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -388,6 +388,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Right Git.Changed -> do Log.info "Registry repo has changed, clearing metadata cache..." + Cache.delete _registryCache AllMetadata resetFromDisk WriteMetadata name metadata reply -> map (map reply) Except.runExcept do @@ -501,10 +502,9 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Log.info $ "Mirroring legacy package set " <> name <> " to the legacy package sets repo" manifests <- Except.rethrow =<< handle env (ReadAllManifests identity) - metadata <- Except.rethrow =<< handle env (ReadAllMetadata identity) Log.debug $ "Converting package set..." - converted <- case Legacy.PackageSet.convertPackageSet manifests metadata set of + converted <- case Legacy.PackageSet.convertPackageSet manifests set of Left error -> Except.throw $ "Failed to convert package set " <> name <> " to a legacy package set: " <> error Right converted -> pure converted @@ -733,17 +733,30 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << result <- Git.gitPull { address, pullMode: env.pull } path pure result - now <- nowUTC - debouncers <- Run.liftEffect $ Ref.read env.debouncer - case Map.lookup path debouncers of - -- We will be behind the upstream by at most this amount of time. - Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> - pure $ Right Git.NoChange - -- If we didn't debounce, then we should fetch the upstream. - _ -> do + -- Check if the repo directory exists before consulting the debouncer. + -- This ensures that if the scratch directory is deleted (e.g., for test + -- isolation), we always re-clone rather than returning a stale NoChange. + repoExists <- Run.liftAff $ Aff.attempt (FS.Aff.stat path) + case repoExists of + Left _ -> do + -- Repo doesn't exist, bypass debouncer entirely and clone fresh result <- fetchLatest + now <- nowUTC Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer pure result + Right _ -> do + -- Repo exists, check debouncer + now <- nowUTC + debouncers <- Run.liftEffect $ Ref.read env.debouncer + case Map.lookup path debouncers of + -- We will be behind the upstream by at most this amount of time. + Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> + pure $ Right Git.NoChange + -- If we didn't debounce, then we should fetch the upstream. + _ -> do + result <- fetchLatest + Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer + pure result -- | Commit the file(s) indicated by the commit key with a commit message. commit :: CommitKey -> String -> Run _ (Either String GitResult) diff --git a/app/src/App/Effect/Storage.purs b/app/src/App/Effect/Storage.purs index c9a52a7bb..b6d6a0ad4 100644 --- a/app/src/App/Effect/Storage.purs +++ b/app/src/App/Effect/Storage.purs @@ -199,6 +199,7 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case Except.throw $ "Could not delete package " <> package <> " due to an error connecting to the storage backend." Succeeded _ -> do Log.debug $ "Deleted release of " <> package <> " from S3 at the path " <> packagePath + Cache.delete _storageCache (Package name version) pure unit else do Log.error $ packagePath <> " does not exist on S3 (available: " <> String.joinWith ", " published <> ")" diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index c0bb2750b..b0ab0f02c 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,123 +14,249 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - API.publish Nothing payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Archive.interpret Archive.handle - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret (Source.handle Source.Recent) + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Cache.interpret API._compilerCache (Cache.handleFs cache) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left packageSetOp@(PackageSetUpdate payload) -> do + -- Sign with pacchettibotti if submitter is a trustee + request <- signPackageSetIfTrustee packageSetOp payload + pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateRequestCodec request + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Notice && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -129,10 +264,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -140,17 +276,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -175,10 +306,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -201,9 +333,6 @@ readOperation eventPath = do pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -243,7 +372,7 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code -- | or issue creation. -- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent @@ -302,3 +431,32 @@ signPacchettiBottiIfTrustee auth = do else do Log.info "Authenticated payload not submitted by a registry trustee, continuing with original signature." pure auth + +-- | Sign a package set update with pacchettibotti's key if the submitter is a trustee. +-- | Non-trustees get an unsigned request (signature = Nothing). +signPackageSetIfTrustee + :: forall r + . PackageSetOperation + -> Operation.PackageSetUpdateData + -> Run (GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) Operation.PackageSetUpdateRequest +signPackageSetIfTrustee packageSetOp payload = do + let rawPayload = JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + GitHub.listTeamMembers API.packagingTeam >>= case _ of + Left githubError -> do + Log.warn $ Array.fold + [ "Unable to fetch members of packaging team, not signing package set request: " + , Octokit.printGitHubError githubError + ] + pure { payload: packageSetOp, rawPayload, signature: Nothing } + Right members -> do + { username } <- Env.askGitHubEvent + if Array.elem username members then do + Log.info "Package set update submitted by a registry trustee, signing with pacchettibotti keys." + { privateKey } <- Env.askPacchettiBotti + signature <- case Auth.signPayload { privateKey, rawPayload } of + Left _ -> Except.throw "Error signing package set update. cc: @purescript/packaging" + Right sig -> pure sig + pure { payload: packageSetOp, rawPayload, signature: Just signature } + else do + Log.info "Package set update not submitted by a registry trustee, sending unsigned request." + pure { payload: packageSetOp, rawPayload, signature: Nothing } diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 65aad78ec..8d997342f 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -59,13 +59,13 @@ type LegacyManifest = , dependencies :: Map PackageName Range } -toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location legacy = do +toManifest :: PackageName -> Version -> Location -> String -> LegacyManifest -> Manifest +toManifest name version location ref legacy = do let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing - Manifest { name, version, location, license, description, dependencies, includeFiles, excludeFiles, owners } + Manifest { name, version, location, ref, license, description, dependencies, includeFiles, excludeFiles, owners } -- | Attempt to retrieve a license, description, and set of dependencies from a -- | PureScript repo that does not have a Registry-supported manifest, but does diff --git a/app/src/App/Legacy/PackageSet.purs b/app/src/App/Legacy/PackageSet.purs index eb1ce8021..62b718d7c 100644 --- a/app/src/App/Legacy/PackageSet.purs +++ b/app/src/App/Legacy/PackageSet.purs @@ -102,8 +102,8 @@ printPscTag (PscTag { compiler, date }) = , Format.DateTime.format pscDateFormat (DateTime date bottom) ] -convertPackageSet :: ManifestIndex -> Map PackageName Metadata -> PackageSet -> Either String ConvertedLegacyPackageSet -convertPackageSet index metadataMap (PackageSet { compiler, packages, published, version }) = do +convertPackageSet :: ManifestIndex -> PackageSet -> Either String ConvertedLegacyPackageSet +convertPackageSet index (PackageSet { compiler, packages, published, version }) = do converted <- case separate $ mapWithIndex convertPackage packages of { left, right } | Map.isEmpty left -> Right right { left } -> do @@ -130,17 +130,14 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versions <- note noIndexPackageError $ Map.lookup packageName $ ManifestIndex.toMap index Manifest manifest <- note noIndexVersionError $ Map.lookup packageVersion versions - Metadata metadata <- note noMetadataPackageError $ Map.lookup packageName metadataMap - { ref } <- note noMetadataVersionError $ Map.lookup packageVersion metadata.published - - repo <- case metadata.location of + repo <- case manifest.location of GitHub { owner, repo, subdir: Nothing } -> Right $ "https://github.com/" <> owner <> "/" <> repo <> ".git" Git { url, subdir: Nothing } -> Right url GitHub _ -> Left usesSubdirError Git _ -> Left usesSubdirError pure - { version: RawVersion ref + { version: RawVersion manifest.ref , dependencies: Array.fromFoldable $ Map.keys $ manifest.dependencies , repo } @@ -149,8 +146,6 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versionStr = Version.print packageVersion noIndexPackageError = "No registry index entry found for " <> nameStr noIndexVersionError = "Found registry index entry for " <> nameStr <> " but none for version " <> versionStr - noMetadataPackageError = "No metadata entry found for " <> nameStr - noMetadataVersionError = "Found metadata entry for " <> nameStr <> " but no published version for " <> versionStr usesSubdirError = "Package " <> nameStr <> " uses the 'subdir' key, which is not supported for legacy package sets." printDhall :: LegacyPackageSet -> String diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..e638cc684 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,90 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import Node.Process as Process +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = do + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = do + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> do + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." + + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 + + loop restartDelay = do + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC + + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Manifest/SpagoYaml.purs b/app/src/App/Manifest/SpagoYaml.purs index 1d701e57c..66ffa1c48 100644 --- a/app/src/App/Manifest/SpagoYaml.purs +++ b/app/src/App/Manifest/SpagoYaml.purs @@ -27,9 +27,10 @@ import Registry.Range (Range) import Registry.Range as Range import Registry.Version as Version --- | Attempt to convert a spago.yaml file to a Manifest -spagoYamlToManifest :: SpagoYaml -> Either String Manifest -spagoYamlToManifest config = do +-- | Attempt to convert a spago.yaml file to a Manifest. The ref parameter is +-- | the Git reference (tag or commit) used to fetch this version's source. +spagoYamlToManifest :: String -> SpagoYaml -> Either String Manifest +spagoYamlToManifest ref config = do package@{ name, description, dependencies: spagoDependencies } <- note "No 'package' key found in config." config.package publish@{ version, license, owners } <- note "No 'publish' key found under the 'package' key in config." package.publish location <- note "No 'location' key found under the 'publish' key in config." publish.location @@ -43,6 +44,7 @@ spagoYamlToManifest config = do , description , license , location + , ref , owners , includeFiles , excludeFiles diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..0ff5bd696 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,13 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +19,224 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); +}; + +export const insertMatrixJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); +}; + +export const insertPackageSetJobImpl = (db, job) => { + const columns = ['jobId', 'payload', 'rawPayload', 'signature'] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); +}; + +const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { + const params = []; + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + `; + + if (jobId != null) { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } else if (packageName != null) { + query += ` WHERE job.packageName = ?`; + params.push(packageName); + if (packageVersion != null) { + query += ` AND job.packageVersion = ?`; + params.push(packageVersion); + } + } else { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectUnpublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); +}; + +export const selectTransferJobImpl = (db, { jobId, packageName }) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId, packageName }); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const selectPackageSetJobImpl = (db, jobId) => { + return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +// Find a pending package set job by payload (for duplicate detection) +export const selectPackageSetJobByPayloadImpl = (db, payload) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE job.payload = ? AND info.finishedAt IS NULL + ORDER BY info.createdAt ASC LIMIT 1 + `); + return stmt.get(payload); +}; + +const _selectJobs = (db, { table, since, includeCompleted }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.createdAt >= ? + `; + let params = [since]; + + if (includeCompleted === false) { + query += ` AND info.finishedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 100`; + const stmt = db.prepare(query); + + return stmt.all(...params); +} + +export const selectPublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PUBLISH_JOBS_TABLE, since, includeCompleted }); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectUnpublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: UNPUBLISH_JOBS_TABLE, since, includeCompleted }); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectTransferJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: TRANSFER_JOBS_TABLE, since, includeCompleted }); }; -export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); +export const selectMatrixJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: MATRIX_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectPackageSetJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PACKAGE_SET_JOBS_TABLE, since, includeCompleted }); +}; + +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them +export const resetIncompleteJobsImpl = (db) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 + `; + + const stmt = db.prepare(query); + return stmt.all(jobId, logLevel, since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..e51196d47 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,828 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob + ( ConnectOptions + , FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails , SQLite + , SelectJobRequest + , SelectJobsRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails , connect - , createJob - , deleteIncompleteJobs , finishJob - , insertLog - , runningJobForPackage + , insertLogLine + , insertMatrixJob + , insertPackageSetJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs , selectJob + , selectJobs , selectLogsByJob + , selectNextMatrixJob + , selectNextPackageSetJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , selectPackageSetJobByPayload + , selectPublishJob + , selectTransferJob + , selectUnpublishJob + , startJob ) where import Registry.App.Prelude +import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) +import Data.Array (sortBy, take) import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Function (on) +import Data.Nullable (notNull, null) +import Data.Nullable as Nullable +import Data.UUID.Random as UUID +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 +import Registry.API.V1 as V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature(..)) +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-------------------------------------------------------------------------------- +-- job_info table -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo + +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSStartJob = + { jobId :: String + , startedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: fromSuccess success + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -type NewJob = +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect { unreadableLogs :: Array String, job :: Either String (Maybe Job) } +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail: unreadableLogs, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + -- Failing to decode a log should not prevent us from returning a job, so we pass + -- failures through to be handled by application code + job <- runExceptT $ firstJust + [ selectPublishJobById logs + , selectMatrixJobById logs + , selectTransferJobById logs + , selectPackageSetJobById logs + , selectUnpublishJobById logs + ] + pure { job, unreadableLogs } + where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + + selectPublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: notNull jobId, packageName: null } + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectPackageSetJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (notNull jobId) + pure $ traverse + ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) + <<< packageSetJobDetailsFromJSRep + ) + maybeJobDetails + +type SelectJobsRequest = + { since :: DateTime + , includeCompleted :: Boolean + } + +selectJobs :: SQLite -> SelectJobsRequest -> Effect { failed :: Array String, jobs :: Array Job } +selectJobs db { since, includeCompleted } = do + publishJobs <- selectPublishJobs + unpublishJobs <- selectUnpublishJobs + transferJobs <- selectTransferJobs + matrixJobs <- selectMatrixJobs + packageSetJobs <- selectPackageSetJobs + let + { fail: failedJobs, success: allJobs } = partitionEithers + (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> packageSetJobs) + pure { failed: failedJobs, jobs: take 100 $ sortBy (compare `on` (V1.jobInfo >>> _.createdAt)) allJobs } + + where + selectPublishJobs = do + jobs <- Uncurried.runEffectFn3 selectPublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" }) <<< publishJobDetailsFromJSRep) jobs + + selectUnpublishJobs = do + jobs <- Uncurried.runEffectFn3 selectUnpublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" }) <<< unpublishJobDetailsFromJSRep) jobs + + selectTransferJobs = do + jobs <- Uncurried.runEffectFn3 selectTransferJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" }) <<< transferJobDetailsFromJSRep) jobs + + selectMatrixJobs = do + jobs <- Uncurried.runEffectFn3 selectMatrixJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" }) <<< matrixJobDetailsFromJSRep) jobs + + selectPackageSetJobs = do + jobs <- Uncurried.runEffectFn3 selectPackageSetJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" }) <<< packageSetJobDetailsFromJSRep) jobs + +-------------------------------------------------------------------------------- +-- publish_jobs table + +type PublishJobDetails = { jobId :: JobId - , jobType :: JobType , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PublishData } -type JSNewJob = +type JSPublishJobDetails = { jobId :: String - , jobType :: String , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId - , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt - , packageName: PackageName.print packageName - , ref +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectPublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite SelectPublishParams (Nullable JSPublishJobDetails) + +foreign import selectPublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +selectPublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe PublishJobDetails)) +selectPublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now } -type JobResult = +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = { jobId :: JobId - , finishedAt :: DateTime + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData } -type JSJobResult = +type JSUnpublishJobDetails = { jobId :: String - , finishedAt :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +type SelectUnpublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String } -type Job = +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite SelectUnpublishParams (Nullable JSUnpublishJobDetails) + +foreign import selectUnpublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +selectUnpublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe UnpublishJobDetails)) +selectUnpublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = { jobId :: JobId - , jobType :: JobType + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName - , ref :: String + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed + } + +type SelectTransferParams = { jobId :: Nullable String, packageName :: Nullable String } + +foreign import selectTransferJobImpl :: EffectFn2 SQLite SelectTransferParams (Nullable JSTransferJobDetails) + +foreign import selectTransferJobsImpl :: EffectFn3 SQLite String Boolean (Array JSTransferJobDetails) + +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db { jobId: null, packageName: null } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +selectTransferJob :: SQLite -> PackageName -> Effect (Either String (Maybe TransferJobDetails)) +selectTransferJob db packageName = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertTransferJob = + { jobId :: String + , packageName :: String + , payload :: String + , createdAt :: String + } + +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- matrix_jobs table + +type InsertMatrixJob = + { packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , createdAt :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload + } + +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId +insertMatrixJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId + +type MatrixJobDetails = + { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version } -type JSJob = +type JSMatrixJobDetails = { jobId :: String - , jobType :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String - , ref :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + } + +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) + +foreign import selectMatrixJobsImpl :: EffectFn3 SQLite String Boolean (Array JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +-------------------------------------------------------------------------------- +-- package_set_jobs table + +type PackageSetJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , payload :: PackageSetOperation + } + +type JSPackageSetJobDetails = + { jobId :: String , createdAt :: String + , startedAt :: Nullable String , finishedAt :: Nullable String , success :: Int + , payload :: String + } + +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , payload: parsed + } + +foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) + +foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSPackageSetJobDetails) + +foreign import selectPackageSetJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db null + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +-- | Find a pending package set job by payload (for duplicate detection) +selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe PackageSetJobDetails)) +selectPackageSetJobByPayload db payload = do + let payloadStr = stringifyJson Operation.packageSetOperationCodec payload + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobByPayloadImpl db payloadStr + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +type InsertPackageSetJob = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +type JSInsertPackageSetJob = + { jobId :: String + , createdAt :: String + , payload :: String + , rawPayload :: String + , signature :: Nullable String + } + +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + , payload: stringifyJson Operation.packageSetOperationCodec payload + , rawPayload + , signature: Nullable.toNullable $ map (\(Signature s) -> s) signature + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId +insertPackageSetJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- logs table + +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } + +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } - -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob - -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult - -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob - -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob - -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = DateTime.format Internal.Format.iso8601DateTime since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + timestamp + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index a6e8c3e1f..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,346 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Data.Codec.JSON as CJ -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Newtype (unwrap) -import Data.String as String -import Data.UUID.Random as UUID -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Archive (ARCHIVE) -import Registry.App.Effect.Archive as Archive -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish - - Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." - - Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Archive.interpret Archive.handle - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..70e5698f5 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,191 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.PackageSets (PACKAGE_SETS) +import Registry.App.Effect.PackageSets as PackageSets +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.resetIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + PACKAGE_SETS + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # PackageSets.interpret (PackageSets.handle { workdir: scratchDir }) + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Archive.interpret Archive.handle + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs new file mode 100644 index 000000000..4970fa935 --- /dev/null +++ b/app/src/App/Server/JobExecutor.purs @@ -0,0 +1,180 @@ +module Registry.App.Server.JobExecutor + ( runJobExecutor + ) where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) +import Control.Parallel as Parallel +import Data.Array as Array +import Data.DateTime (DateTime) +import Data.Map as Map +import Data.Set as Set +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex +import Registry.PackageName as PackageName +import Registry.Version as Version +import Run (Run) +import Run.Except (EXCEPT) + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler + Db.resetIncompleteJobs + loop + where + loop = do + maybeJob <- findNextAvailableJob + case maybeJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 1000.0) + loop + + Just job -> do + now <- nowUTC + let + jobId = (V1.jobInfo job).jobId + + Db.startJob { jobId, startedAt: now } + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. + jobResult <- liftAff do + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> unwrap jobId <> " timed out." + pure false + + Just (Left err) -> do + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err + pure false + + Just (Right _) -> do + Log.info $ "Job " <> unwrap jobId <> " succeeded." + pure true + + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } + loop + +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) +findNextAvailableJob = runMaybeT + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) + +executeJob :: DateTime -> Job -> Run ServerEffects Unit +executeJob _ = case _ of + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeResult \{ dependencies, version } -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + Db.insertMatrixJob + { payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } + PackageSetJob payload -> API.packageSetUpdate payload + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + Log.info $ "New compiler found: " <> Version.print newCompilerVersion + Log.info "Starting upgrade of the whole registry to the new compiler..." + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (Map.isEmpty manifest.dependencies) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..34aba9ba0 --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,234 @@ +module Registry.App.Server.MatrixBuilder + ( checkIfNewCompiler + , installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set as Set +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.API.V1 (MatrixJobData) +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + pure Nothing + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + pure Nothing + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler: target, resolutions, name, version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..2553ea4a6 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,178 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.DateTime as DateTime +import Data.Time.Duration (Hours(..), negateDuration) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (Route(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Auth as Auth +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageSetOperation(..)) +import Registry.Operation as Operation +import Run (Run) +import Run as Run +import Run.Except as Run.Except + +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> do + Console.log $ "Bad request: " <> Aff.message error + HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + + jobId <- lift (Db.selectPublishJob publish.name publish.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate publish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPublishJob { payload: publish } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + + jobId <- lift (Db.selectUnpublishJob payload.name payload.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate unpublish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + + jobId <- lift (Db.selectTransferJob payload.name) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate transfer job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs { since, include_completed }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift + ( Run.Except.runExcept $ Db.selectJobs + { includeCompleted: fromMaybe false include_completed + , since: fromMaybe oneHourAgo since + } + ) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching jobs: " <> err + HTTPurple.internalServerError $ "Error while fetching jobs: " <> err + Right jobs -> jsonOk (CJ.array V1.jobCodec) jobs + + Job jobId { level: maybeLogLevel, since }, Get -> do + now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe oneHourAgo since }) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.internalServerError $ "Error while fetching job: " <> err + Right Nothing -> do + HTTPurple.notFound + Right (Just job) -> jsonOk V1.jobCodec job + + PackageSets, Post -> do + request <- HTTPurple.fromJson (jsonDecoder Operation.packageSetUpdateRequestCodec) body + lift $ Log.info $ "Received PackageSet request: " <> request.rawPayload + + -- Check if the operation requires authentication (compiler change or package removal) + let + PackageSetUpdate payload = request.payload + didChangeCompiler = isJust payload.compiler + didRemovePackages = any isNothing payload.packages + requiresAuth = didChangeCompiler || didRemovePackages + + -- If restricted operation, verify pacchettibotti signature + authResult <- + if requiresAuth then do + pacchettiBotti <- lift API.getPacchettiBotti + lift $ Run.liftAff $ Auth.verifyPackageSetPayload pacchettiBotti request + else + pure (Right unit) + + case authResult of + Left err -> do + lift $ Log.error $ "Package set authentication failed: " <> err + HTTPurple.badRequest err + Right _ -> do + when requiresAuth do + lift $ Log.info "Package set authentication successful." + + -- Check for duplicate pending job with the same payload + jobId <- lift (Db.selectPackageSetJobByPayload request.payload) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate package set job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPackageSetJob + { payload: request.payload + , rawPayload: request.rawPayload + , signature: request.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 27ed33cf1..28f17f90e 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -94,16 +94,17 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version: version , resolutions: Nothing } -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -141,7 +142,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -156,28 +157,30 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. let transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } transitivePublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of @@ -188,7 +191,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -214,65 +217,6 @@ spec = do Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err Right (Right _) -> pure unit - - Spec.it "Falls back to archive when GitHub repo is inaccessible during legacy import" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do - logs <- liftEffect (Ref.new []) - - let - toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry - toLegacyIndex = - Solver.exploreAllTransitiveDependencies - <<< Solver.initializeRegistry - <<< map (map (_.dependencies <<< un Manifest)) - <<< ManifestIndex.toMap - - testEnv = - { workdir - , logs - , index - , metadata - , pursuitExcludes: Set.empty - , username: "jon" - , storage: storageDir - , archive: archiveDir - , github: githubDir - } - - -- The prelude@6.0.2 package exists in registry-archive but NOT in - -- github-packages or registry-storage. This simulates an archive-backed - -- package whose original GitHub repo is gone. - result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do - let - name = Utils.unsafePackageName "prelude" - version = Utils.unsafeVersion "6.0.2" - ref = "v6.0.2" - publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" - , location: Just $ GitHub { owner: "purescript", repo: "purescript-prelude", subdir: Nothing } - , name - , ref - , resolutions: Nothing - } - - -- Legacy import with archive fallback - Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs - - -- Verify the package was published to storage - Storage.query name >>= \versions -> - unless (Set.member version versions) do - Except.throw $ "Expected " <> formatPackageVersion name version <> " to be published to registry storage." - - case result of - Left exn -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Got an Aff exception! " <> Aff.message exn - Right (Left err) -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Expected prelude@6.0.2 to be published via archive fallback but got error: " <> err - Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do @@ -307,6 +251,10 @@ spec = do copyFixture "registry-storage" copyFixture "registry-archive" copyFixture "github-packages" + -- FIXME: This is a bit hacky, but we remove effect-4.0.0.tar.gz since the unit test publishes + -- it from scratch and will fail if effect-4.0.0 is already in storage. We have it in storage + -- for the separate integration tests. + FS.Extra.remove $ Path.concat [ testFixtures, "registry-storage", "effect-4.0.0.tar.gz" ] let readFixtures = do diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing @@ -103,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -121,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 414b09a57..2d4a7a2dc 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,8 +2,6 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude -import Data.Array.NonEmpty as NonEmptyArray -import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map import Data.Set as Set @@ -14,7 +12,6 @@ import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (legacyPackageSetCodec) import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName -import Registry.Sha256 as Sha256 import Registry.Test.Assert as Assert import Registry.Test.Utils as Utils import Registry.Version as Version @@ -93,7 +90,7 @@ packageSet = PackageSet convertedPackageSet :: ConvertedLegacyPackageSet convertedPackageSet = - case Legacy.PackageSet.convertPackageSet index metadata packageSet of + case Legacy.PackageSet.convertPackageSet index packageSet of Left err -> unsafeCrashWith err Right value -> value where @@ -104,13 +101,6 @@ convertedPackageSet = , mkManifest prelude [] ] - metadata = Map.fromFoldable - [ unsafeMetadataEntry assert - , unsafeMetadataEntry console - , unsafeMetadataEntry effect - , unsafeMetadataEntry prelude - ] - legacyPackageSetJson :: String legacyPackageSetJson = """{ @@ -201,23 +191,3 @@ mkManifest (Tuple name version) deps = do (PackageName.print name) (LenientVersion.print version) (map (bimap PackageName.print (LenientVersion.version >>> toRange)) deps) - -unsafeMetadataEntry :: Tuple PackageName LenientVersion -> Tuple PackageName Metadata -unsafeMetadataEntry (Tuple name version) = do - let - published = - { ref: LenientVersion.raw version - , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" - , bytes: 0.0 - , compilers: NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2") - , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom - } - - metadata = Metadata - { location: GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print name, subdir: Nothing } - , owners: Nothing - , published: Map.singleton (LenientVersion.version version) published - , unpublished: Map.empty - } - - Tuple name metadata diff --git a/app/test/App/Manifest/SpagoYaml.purs b/app/test/App/Manifest/SpagoYaml.purs index 973af0a99..52174063c 100644 --- a/app/test/App/Manifest/SpagoYaml.purs +++ b/app/test/App/Manifest/SpagoYaml.purs @@ -19,6 +19,6 @@ spec = do config <- SpagoYaml.readSpagoYaml (Path.concat [ fixturesPath, path ]) >>= case _ of Left err -> Aff.throwError $ Aff.error err Right config -> pure config - case SpagoYaml.spagoYamlToManifest config of + case SpagoYaml.spagoYamlToManifest "v1.0.0" config of Left err -> Assert.fail $ path <> " failed: " <> err Right _ -> pure unit diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 008d86cca..a858dc675 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -33,8 +33,6 @@ import Registry.App.Effect.Archive (ARCHIVE) import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -94,7 +92,6 @@ type TEST_EFFECTS = + GITHUB_CACHE + LEGACY_CACHE + COMPILER_CACHE - + COMMENT + LOG + EXCEPT String + AFF @@ -136,7 +133,6 @@ runTestEffects env operation = Aff.attempt do # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..cdb137ad4 --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,76 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + rawPayload TEXT NOT NULL, + signature TEXT, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..65319293a 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,57 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE transfer_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/flake.nix b/flake.nix index edca743d8..bbec41157 100644 --- a/flake.nix +++ b/flake.nix @@ -198,7 +198,8 @@ # Integration test - exercises the server API integration = import ./nix/test/integration.nix { - inherit pkgs spagoSrc testEnv; + inherit pkgs spagoSrc; + testSupport = testEnv; }; # VM smoke test - verifies deployment without full API testing @@ -211,13 +212,16 @@ devShells.default = pkgs.mkShell { name = "registry-dev"; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; # Dhall environment variables needed for manifest typechecking inherit DHALL_TYPES DHALL_PRELUDE GIT_TERMINAL_PROMPT; + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps @@ -229,11 +233,19 @@ nodejs jq dbmate + sqlite purs spago purs-tidy-unstable purs-backend-es-unstable process-compose + + # E2E test runner script - uses same fixed test environment as test-env + (writeShellScriptBin "spago-test-e2e" '' + set -euo pipefail + ${testEnv.envToExports testEnv.testEnv} + exec spago run -p registry-app-e2e + '') ]; }; } diff --git a/lib/fixtures/manifests/aff-5.1.2.json b/lib/fixtures/manifests/aff-5.1.2.json index 22684f05c..77bb331dd 100644 --- a/lib/fixtures/manifests/aff-5.1.2.json +++ b/lib/fixtures/manifests/aff-5.1.2.json @@ -6,6 +6,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-aff" }, + "ref": "v5.1.2", "dependencies": { "datetime": ">=4.0.0 <5.0.0", "effect": ">=2.0.0 <3.0.0", diff --git a/lib/fixtures/manifests/mysql-4.1.1.json b/lib/fixtures/manifests/mysql-4.1.1.json index 6f9703b61..e0e8c70fe 100644 --- a/lib/fixtures/manifests/mysql-4.1.1.json +++ b/lib/fixtures/manifests/mysql-4.1.1.json @@ -6,6 +6,7 @@ "githubOwner": "oreshinya", "githubRepo": "purescript-mysql" }, + "ref": "v4.1.1", "dependencies": { "aff": ">=5.0.2 <6.0.0", "js-date": ">=6.0.0 <7.0.0", diff --git a/lib/fixtures/manifests/prelude-4.1.1.json b/lib/fixtures/manifests/prelude-4.1.1.json index 3dd47411c..56ac6db20 100644 --- a/lib/fixtures/manifests/prelude-4.1.1.json +++ b/lib/fixtures/manifests/prelude-4.1.1.json @@ -7,6 +7,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-prelude" }, + "ref": "v4.1.1", "owners": [ { "keytype": "ed-25519", diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..4c399342e 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,33 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetJobData + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLogLevel + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,23 +36,33 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) +import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation import Registry.PackageName (PackageName) import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish | Unpublish | Transfer - | Jobs + | PackageSets + | Jobs { since :: Maybe DateTime, include_completed :: Maybe Boolean } | Job JobId { level :: Maybe LogLevel, since :: Maybe DateTime } | Status @@ -37,7 +73,11 @@ routes = Routing.root $ Routing.prefix "api" $ Routing.prefix "v1" $ RoutingG.su { "Publish": "publish" / RoutingG.noArgs , "Unpublish": "unpublish" / RoutingG.noArgs , "Transfer": "transfer" / RoutingG.noArgs - , "Jobs": "jobs" / RoutingG.noArgs + , "PackageSets": "package-sets" / RoutingG.noArgs + , "Jobs": "jobs" ? + { since: Routing.optional <<< timestampP <<< Routing.string + , include_completed: Routing.optional <<< Routing.boolean + } , "Job": "jobs" / ( jobIdS ? { level: Routing.optional <<< logLevelP <<< Routing.string @@ -64,55 +104,193 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | PackageSetJob PackageSetJobData + +type JobInfo r = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , ref :: String , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +type PackageSetJobData = JobInfo + ( payload :: PackageSetOperation + , jobType :: Proxy "packageset" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + PackageSetJob j -> CJ.encode packageSetJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec , packageName: PackageName.codec - , ref: CJ.string + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +packageSetJobDataCodec :: CJ.Codec PackageSetJobData +packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "packageset") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , payload: Operation.packageSetOperationCodec + } + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ +derive newtype instance Eq JobId jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | PackageSetJobType derive instance Eq JobType -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - printJobType :: JobType -> String printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + PackageSetJobType -> "packageset" type LogLine = { level :: LogLevel @@ -129,7 +307,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -139,6 +317,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -147,6 +326,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -154,6 +334,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -162,5 +343,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/lib/src/Manifest.purs b/lib/src/Manifest.purs index d660b459b..49bb62f2c 100644 --- a/lib/src/Manifest.purs +++ b/lib/src/Manifest.purs @@ -48,6 +48,7 @@ newtype Manifest = Manifest , version :: Version , license :: License , location :: Location + , ref :: String , owners :: Maybe (NonEmptyArray Owner) , description :: Maybe String , includeFiles :: Maybe (NonEmptyArray NonEmptyString) @@ -77,6 +78,7 @@ codec = Profunctor.wrapIso Manifest $ CJ.named "Manifest" $ CJ.object $ CJ.recordProp @"license" License.codec $ CJ.recordPropOptional @"description" (Internal.Codec.limitedString 300) $ CJ.recordProp @"location" Location.codec + $ CJ.recordProp @"ref" CJ.string $ CJ.recordPropOptional @"owners" (CJ.Common.nonEmptyArray Owner.codec) $ CJ.recordPropOptional @"includeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) $ CJ.recordPropOptional @"excludeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -20,10 +22,10 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -87,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = @@ -199,6 +206,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index c54bed31e..3235661de 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -63,17 +63,11 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object $ CJ.record -- | Metadata about a published package version. --- | --- | NOTE: The `ref` field is UNSPECIFIED and WILL BE REMOVED in the future. Do --- | not rely on its presence! type PublishedMetadata = { bytes :: Number , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime - - -- UNSPECIFIED: Will be removed in the future. - , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata @@ -82,7 +76,6 @@ publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime - , ref: CJ.string } -- | Metadata about an unpublished package version. diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..7327001e6 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,16 +14,21 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData + , PackageSetUpdateRequest , PublishData , TransferData , UnpublishData , authenticatedCodec + , packageName + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec + , packageSetUpdateRequestCodec , publishCodec , transferCodec , unpublishCodec @@ -58,6 +63,25 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -74,6 +98,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -84,6 +109,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } @@ -178,6 +204,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: @@ -197,3 +230,33 @@ packageSetUpdateCodec = CJ.named "PackageSetUpdate" $ CJ.Record.object -- `Compat` version of the `maybe` codec. , packages: Internal.Codec.packageMap (CJ.Common.nullable Version.codec) } + +-- | A package set update request that can be optionally authenticated. +-- | +-- | Non-trustees can submit add/upgrade operations without authentication. +-- | Trustees must sign requests for restricted operations (compiler changes, +-- | package removals) with pacchettibotti's key. +type PackageSetUpdateRequest = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +-- | A codec for encoding and decoding a `PackageSetUpdateRequest` as JSON. +packageSetUpdateRequestCodec :: CJ.Codec PackageSetUpdateRequest +packageSetUpdateRequestCodec = CJ.named "PackageSetUpdateRequest" $ Codec.codec' decode encode + where + decode json = do + rep <- Codec.decode repCodec json + payloadJson <- except $ lmap JSON.DecodeError.basic $ JSON.parse rep.payload + operation <- Codec.decode packageSetOperationCodec payloadJson + pure { payload: operation, rawPayload: rep.payload, signature: map Signature rep.signature } + + encode { rawPayload, signature } = + CJ.encode repCodec { payload: rawPayload, signature: map (\(Signature s) -> s) signature } + + repCodec :: CJ.Codec { payload :: String, signature :: Maybe String } + repCodec = CJ.named "PackageSetUpdateRequestRep" $ CJ.Record.object + { payload: CJ.string + , signature: CJ.Record.optional CJ.string + } diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index 18e0863ef..1fb7e13a6 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -151,9 +151,9 @@ spec = do contextEntry :: String contextEntry = - """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} + """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.1","dependencies":{}} +{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.2","dependencies":{}} +{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.3","dependencies":{}} """ testIndex @@ -242,6 +242,7 @@ manifestCodec' = Profunctor.dimap to from $ CJ.named "ManifestRep" $ CJ.Record.o { url: "https://github.com/purescript/purescript-" <> PackageName.print name <> ".git" , subdir: Nothing } + , ref: "v" <> Version.print version , description: Nothing , owners: Nothing , includeFiles: Nothing diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index 02e12c053..8daffc02c 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -29,8 +29,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", - "publishedTime": "2021-03-27T10:03:46.000Z", - "ref": "v0.1.0" + "publishedTime": "2021-03-27T10:03:46.000Z" }, "0.2.1": { "bytes": 3365, @@ -38,8 +37,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", - "publishedTime": "2022-05-15T10:51:57.000Z", - "ref": "v0.2.1" + "publishedTime": "2022-05-15T10:51:57.000Z" }, "1.0.0": { "bytes": 5155, @@ -47,8 +45,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", - "publishedTime": "2022-11-03T17:30:28.000Z", - "ref": "v1.0.0" + "publishedTime": "2022-11-03T17:30:28.000Z" }, "1.0.1": { "bytes": 5635, @@ -57,8 +54,7 @@ recordStudio = "0.13.1" ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", - "publishedTime": "2022-11-04T12:21:09.000Z", - "ref": "v1.0.1" + "publishedTime": "2022-11-04T12:21:09.000Z" } }, "unpublished": { diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index cf474f103..955b08164 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -15,7 +15,8 @@ import Registry.Manifest (Manifest(..)) import Registry.Metadata (Metadata(..)) import Registry.Operation.Validation (UnpublishError(..), forbiddenModules, getUnresolvedDependencies, validatePursModule, validateUnpublish) import Registry.Test.Assert as Assert -import Registry.Test.Utils (defaultHash, defaultLocation, fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) +import Registry.Test.Fixtures (defaultHash, defaultLocation) +import Registry.Test.Utils (fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) import Test.Spec (Spec) import Test.Spec as Spec @@ -66,7 +67,7 @@ spec = do inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers } metadata = Metadata { location: defaultLocation diff --git a/nix/overlay.nix b/nix/overlay.nix index 24b36afa1..8ec743a39 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -185,8 +185,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out @@ -239,7 +240,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/nix/test/config.nix b/nix/test/config.nix index 66813fe5b..07917444f 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -19,35 +19,51 @@ let ports = { server = serverPort; github = serverPort + 1; - bucket = serverPort + 2; - s3 = serverPort + 3; - pursuit = serverPort + 4; - healthchecks = serverPort + 5; + # Single storage WireMock instance for bucket + s3 + pursuit (merged for stateful scenarios) + storage = serverPort + 2; + healthchecks = serverPort + 3; }; - # Default state directory for tests - defaultStateDir = "/var/lib/registry-server"; + # Fixed state directory for tests - not configurable to avoid mismatch between + # test-env and spago-test-e2e shells. The test-env script cleans this up on start. + stateDir = "/tmp/registry-test-env"; # Mock service URLs for test environment + # All storage-related APIs (s3, bucket, pursuit) now share a single WireMock instance mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; - s3 = "http://localhost:${toString ports.s3}"; - bucket = "http://localhost:${toString ports.bucket}"; - pursuit = "http://localhost:${toString ports.pursuit}"; + storage = "http://localhost:${toString ports.storage}"; healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include - # mock secrets, then overrides external services with mock URLs. The DATABASE_URL - # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are - # implemented in the script directly. + # mock secrets, then overrides external services with mock URLs. + # All storage-related APIs share a single WireMock instance for stateful scenarios. testEnv = envDefaults // { + # State directory and derived paths + STATE_DIR = stateDir; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + DATABASE_URL = "sqlite:${stateDir}/db/registry.sqlite3"; # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; - S3_API_URL = mockUrls.s3; - S3_BUCKET_URL = mockUrls.bucket; - PURSUIT_API_URL = mockUrls.pursuit; + # All storage-related APIs share a single base URL for stateful scenarios + S3_API_URL = mockUrls.storage; + S3_BUCKET_URL = mockUrls.storage; + PURSUIT_API_URL = mockUrls.storage; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = @@ -61,17 +77,22 @@ let exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" ''; - # Apply git mock overlay to get registry packages with mocked git. + # Test overlay: mocks git and limits compilers for faster tests. # Using pkgs.extend avoids a second nixpkgs instantiation (more efficient). - # This substitutes gitMock for git in registry-runtime-deps, which causes - # registry-server to be rebuilt with the mock baked into its PATH wrapper. - gitMockOverlay = _: prev: { + testOverlay = _: prev: { + # Substitute gitMock for git in registry-runtime-deps registry-runtime-deps = map ( pkg: if pkg == prev.git then gitMock else pkg ) prev.registry-runtime-deps; + + # Limit to 2 compilers for faster matrix job tests. + # These versions match the compilers referenced in app/fixtures. + registry-supported-compilers = lib.filterAttrs ( + name: _: name == "purs-0_15_10" || name == "purs-0_15_11" + ) prev.registry-supported-compilers; }; - registryPkgs = pkgs.extend gitMockOverlay; + registryPkgs = pkgs.extend testOverlay; # Helper to create GitHub contents API response, as it returns base64-encoded content base64Response = @@ -127,6 +148,30 @@ let }; }; + # Console package helpers (console@6.1.0) + consoleBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + inherit fileName; + filePath = rootPath + "/app/fixtures/github-packages/console-6.1.0/${fileName}"; + }; + + console404Response = fileName: { + request = { + method = "GET"; + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + }; + response = { + status = 404; + headers."Content-Type" = "application/json"; + jsonBody = { + message = "Not Found"; + documentation_url = "https://docs.github.com/rest/repos/contents#get-repository-content"; + }; + }; + }; + # GitHub API wiremock mappings githubMappings = [ (effectBase64Response "bower.json") @@ -136,6 +181,13 @@ let (effect404Response "spago.dhall") (effect404Response "purs.json") (effect404Response "package.json") + # Console package (console@6.1.0) + (consoleBase64Response "bower.json") + (consoleBase64Response "LICENSE") + (console404Response "spago.yaml") + (console404Response "spago.dhall") + (console404Response "purs.json") + (console404Response "package.json") { request = { method = "GET"; @@ -153,85 +205,503 @@ let }; }; } - ]; - - # S3 API wiremock mappings (serves package tarballs) - s3Mappings = [ + # Accept issue comment creation (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; }; response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; }; } + # Accept issue closing (used by GitHubIssue workflow) { request = { - method = "GET"; - url = "/type-equality/4.0.1.tar.gz"; + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; }; response = { status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "type-equality-4.0.1.tar.gz"; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; }; } - ]; - - s3Files = [ - { - name = "prelude-6.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; - } - { - name = "type-equality-4.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; - } - ]; - - # S3 Bucket API wiremock mappings (handles upload/list operations) - # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name - # is in the hostname (purescript-registry.localhost:9002) and the path contains - # only the key. For example: GET /?prefix=effect/ instead of GET /purescript-registry?prefix=effect/ - bucketMappings = [ - # List objects - virtual-hosted style (bucket in hostname, path is just /?prefix=...) + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) { request = { method = "GET"; - urlPattern = "/\\?.*prefix=.*"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; }; response = { status = 200; - headers."Content-Type" = "application/xml"; - body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; - }; - } - # Upload effect@4.0.0 - virtual-hosted style (path is /effect/4.0.0.tar.gz) - { - request = { - method = "PUT"; - urlPattern = "/effect/4\\.0\\.0\\.tar\\.gz.*"; - }; - response = { - status = 200; - headers."ETag" = ''"abc123"''; - headers."Content-Type" = "application/xml"; - body = ""; - }; - } - # Fail upload for prelude (to test error handling) - { - request = { - method = "PUT"; - urlPattern = "/prelude/6\\.0\\.1\\.tar\\.gz.*"; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { + login = "packaging-team-user"; + id = 1; + } + ]; }; - response.status = 500; } ]; + # Fixture directory for storage (tarballs) + storageFixturesDir = rootPath + "/app/fixtures/registry-storage"; + + # Parse tarball filename into package name and version + # e.g. "effect-4.0.0.tar.gz" -> { name = "effect"; version = "4.0.0"; fileName = "effect-4.0.0.tar.gz"; } + # e.g. "type-equality-4.0.1.tar.gz" -> { name = "type-equality"; version = "4.0.1"; ... } + parseTarball = + fileName: + let + base = lib.removeSuffix ".tar.gz" fileName; + parts = lib.splitString "-" base; + # Version is the last part; name is everything before + version = lib.last parts; + name = lib.concatStringsSep "-" (lib.init parts); + in + { + inherit name version fileName; + }; + + # List all .tar.gz files in storage fixtures + storageTarballs = map parseTarball ( + builtins.filter (f: lib.hasSuffix ".tar.gz" f) ( + builtins.attrNames (builtins.readDir storageFixturesDir) + ) + ); + + # Metadata fixtures directory (to determine which packages are "published") + metadataFixturesDir = rootPath + "/app/fixtures/registry/metadata"; + metadataFiles = builtins.attrNames (builtins.readDir metadataFixturesDir); + publishedPackageNames = map (f: lib.removeSuffix ".json" f) metadataFiles; + + # ============================================================================ + # UNIFIED STORAGE MAPPINGS WITH WIREMOCK SCENARIOS + # ============================================================================ + # + # All storage-related APIs (S3 downloads, bucket uploads, Pursuit) are now served + # by a single WireMock instance with stateful scenarios. This enables proper + # read-after-write semantics - when a test publishes a package, subsequent + # downloads will succeed. + # + # Scenario design: + # - One scenario per package-version (e.g., "effect-4.0.0") + # - WireMock scenarios always start at state "Started" + # - Published packages (has metadata): "Started" means Present (tarball available) + # - After DELETE, transitions to "Deleted" state (404 on GET) + # - Unpublished packages (no metadata): "Started" means Absent (tarball 404) + # - After PUT upload, transitions to "Present" state + # - After DELETE, transitions to "Deleted" state (404 on GET) + # + # State machine: + # Published: Started(Present) --DELETE--> Deleted(404) + # Unpublished: Started(404) --PUT--> Present(200) --DELETE--> Deleted(404) + # + # Reset between tests via POST /__admin/scenarios/reset + # ============================================================================ + + # Generate S3 GET mappings with scenario support + s3Mappings = lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + tarPath = "/${pkg.name}/${pkg.version}.tar.gz"; + in + if isPublished then + # Published package: tarball available in Started state, 404 in Deleted state + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: 404 in Started, 200 in Present, 404 in Deleted + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs; + + # Generate s3Files list from fixtures (tarballs for bodyFileName references) + s3Files = map (pkg: { + name = pkg.fileName; + path = storageFixturesDir + "/${pkg.fileName}"; + }) storageTarballs; + + # Generate bucket PUT/DELETE/listObjects mappings with scenario support + # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name + # is in the hostname (purescript-registry.localhost:9002) and the path contains + # only the key. + bucketMappings = + # Generate per-package listObjects mappings with scenario support + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedName = lib.replaceStrings [ "-" ] [ "\\-" ] pkg.name; + listUrlPattern = "/\\?.*prefix=${escapedName}.*"; + presentContents = ''${pkg.name}/${pkg.version}.tar.gz1000"abc123"''; + in + if isPublished then + # Published package: listObjects returns contents in Started, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: listObjects returns empty in Started, contents in Present, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ ( + # Generate PUT/DELETE mappings for all packages with scenario support + lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedVersion = lib.replaceStrings [ "." ] [ "\\." ] pkg.version; + urlPattern = "/${pkg.name}/${escapedVersion}\\.tar\\.gz.*"; + in + if isPublished then + # Published package: PUT fails (already exists), DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already published"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Started state (package exists) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: PUT succeeds and transitions to Present, DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 200; + headers."ETag" = ''"abc123"''; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Present"; + } + # PUT in Present state fails (already uploaded) + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already uploaded"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + # DELETE in Started state fails (doesn't exist yet) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Present state (after publish) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs + ); + + # Pursuit API mappings with scenario support + pursuitMappings = + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + versionsUrl = "/packages/purescript-${pkg.name}/available-versions"; + publishedVersionsBody = ''[["${pkg.version}","https://pursuit.purescript.org/packages/purescript-${pkg.name}/${pkg.version}"]]''; + in + if isPublished then + # Published package: versions available in Started, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: empty in Started, has version in Present, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ [ + # Accept documentation uploads (POST /packages) + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; + # Healthchecks API wiremock mappings (simple ping endpoint) healthchecksMappings = [ { @@ -246,46 +716,9 @@ let } ]; - # Pursuit API wiremock mappings - pursuitMappings = [ - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-effect/available-versions"; - }; - response = { - status = 200; - body = ''[]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-type-equality/available-versions"; - }; - response = { - status = 200; - body = ''[["4.0.1","https://pursuit.purescript.org/packages/purescript-type-equality/4.0.1"]]''; - }; - } - { - request = { - method = "POST"; - url = "/packages"; - }; - response.status = 201; - } - ]; + # Combined storage mappings (S3 + bucket + Pursuit) + storageMappings = s3Mappings ++ bucketMappings ++ pursuitMappings; + storageFiles = s3Files; # Wiremock root directory builder mkWiremockRoot = @@ -304,7 +737,9 @@ let ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") files} ''; - # All wiremock configurations + # All WireMock configurations. + # Add new WireMock services here; both test-env.nix and integration.nix + # derive their processes from this attribute set automatically. wiremockConfigs = { github = { port = ports.github; @@ -313,26 +748,13 @@ let mappings = githubMappings; }; }; - s3 = { - port = ports.s3; - rootDir = mkWiremockRoot { - name = "s3"; - mappings = s3Mappings; - files = s3Files; - }; - }; - bucket = { - port = ports.bucket; + # Single storage WireMock instance with stateful scenarios + storage = { + port = ports.storage; rootDir = mkWiremockRoot { - name = "bucket"; - mappings = bucketMappings; - }; - }; - pursuit = { - port = ports.pursuit; - rootDir = mkWiremockRoot { - name = "pursuit"; - mappings = pursuitMappings; + name = "storage"; + mappings = storageMappings; + files = storageFiles; }; }; healthchecks = { @@ -357,45 +779,50 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ - GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ - git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${stateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + cp -r ${rootPath}/app/fixtures/github-packages/console-6.1.0 "$FIXTURES_DIR/purescript/purescript-console" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + # Tag the initial commit so we can reset to it for test isolation + gitbot tag -m "initial-fixture" initial-fixture + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + gitbot -C "$FIXTURES_DIR/purescript/purescript-console" tag -m "v6.1.0" v6.1.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( builtins.toJSON { name = "effect"; ref = "v4.0.0"; - compiler = "0.15.9"; + compiler = "0.15.10"; location = { githubOwner = "purescript"; githubRepo = "purescript-effect"; @@ -473,12 +900,12 @@ in { inherit ports - defaultStateDir + stateDir mockUrls testEnv envToExports gitMock - gitMockOverlay + testOverlay wiremockConfigs combinedWiremockRoot setupGitFixtures @@ -487,10 +914,8 @@ in serverStartScript # For custom wiremock setups githubMappings - s3Mappings - s3Files - bucketMappings - pursuitMappings + storageMappings + storageFiles mkWiremockRoot ; } diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..75b6e6487 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -1,7 +1,9 @@ { pkgs, spagoSrc, - testEnv, + # Test support module from test-env.nix. Named 'testSupport' to avoid confusion + # with testSupport.testEnv (the environment variables attribute set). + testSupport, }: if pkgs.stdenv.isDarwin then pkgs.runCommand "integration-skip" { } '' @@ -29,7 +31,7 @@ else ''; }; - ports = testEnv.ports; + ports = testSupport.ports; in pkgs.runCommand "e2e-integration" { @@ -38,10 +40,11 @@ else pkgs.curl pkgs.jq pkgs.git + pkgs.sqlite pkgs.nss_wrapper - testEnv.wiremockStartScript - testEnv.serverStartScript - testEnv.setupGitFixtures + testSupport.wiremockStartScript + testSupport.serverStartScript + testSupport.setupGitFixtures ]; NODE_PATH = "${pkgs.registry-package-lock}/node_modules"; # Use nss_wrapper to resolve S3 bucket subdomain in the Nix sandbox. @@ -57,7 +60,11 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + export REPO_FIXTURES_DIR="$STATE_DIR/repo-fixtures" + + # Export test environment variables for E2E test runners + ${testSupport.envToExports testSupport.testEnv} + mkdir -p $STATE_DIR # Start wiremock services @@ -65,8 +72,8 @@ else start-wiremock & WIREMOCK_PID=$! - # Wait for wiremock (github, bucket, s3, pursuit) - for port in ${toString ports.github} ${toString ports.bucket} ${toString ports.s3} ${toString ports.pursuit}; do + # Wait for wiremock (github, storage, healthchecks) + for port in ${toString ports.github} ${toString ports.storage} ${toString ports.healthchecks}; do until curl -s "http://localhost:$port/__admin" > /dev/null 2>&1; do sleep 0.5 done diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..d754f36b6 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -42,11 +46,14 @@ else timeout=30 ) - # Verify we get a valid JSON response (empty array for jobs) + # Verify we get a valid JSON response (the jobs endpoint responds) result = registry.succeed( "curl -s http://localhost:${envVars.SERVER_PORT}/api/v1/jobs" ) - assert result.strip() == "[]", f"Expected empty jobs array, got: {result}" + + # The server may create matrix jobs on startup for new compilers, so we just verify + # the response is valid JSON (starts with '[') + assert result.strip().startswith("["), f"Expected JSON array, got: {result}" # Verify the database was created and migrations ran registry.succeed("test -f ${stateDir}/db/registry.sqlite3") @@ -54,6 +61,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +77,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.testOverlay ]; virtualisation = { graphics = false; @@ -70,12 +86,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..764d01c47 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -59,18 +59,15 @@ let version = "0.5"; processes = { wiremock-github = mkWiremockProcess "github" ports.github; - wiremock-s3 = mkWiremockProcess "s3" ports.s3; - wiremock-bucket = mkWiremockProcess "bucket" ports.bucket; - wiremock-pursuit = mkWiremockProcess "pursuit" ports.pursuit; + # Unified storage WireMock instance for S3 + bucket + Pursuit with stateful scenarios + wiremock-storage = mkWiremockProcess "storage" ports.storage; wiremock-healthchecks = mkWiremockProcess "healthchecks" ports.healthchecks; registry-server = { command = "${serverStartScript}/bin/start-server"; depends_on = { wiremock-github.condition = "process_healthy"; - wiremock-s3.condition = "process_healthy"; - wiremock-bucket.condition = "process_healthy"; - wiremock-pursuit.condition = "process_healthy"; + wiremock-storage.condition = "process_healthy"; wiremock-healthchecks.condition = "process_healthy"; }; readiness_probe = { @@ -92,21 +89,21 @@ let processComposeYaml = pkgs.writeText "process-compose.yaml" (builtins.toJSON processComposeConfig); + testEnvExports = testConfig.envToExports testConfig.testEnv; + + # The state directory is fixed (not configurable) to avoid mismatch between + # the test-env and spago-test-e2e shells. + stateDir = testConfig.testEnv.STATE_DIR; + testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" - - if [ -z "''${STATE_DIR:-}" ]; then - STATE_DIR="$(mktemp -d)" - export STATE_DIR - echo "Using temporary directory: $STATE_DIR" - trap 'echo "Cleaning up $STATE_DIR..."; rm -rf "$STATE_DIR"' EXIT - else - export STATE_DIR - fi + # Clean up previous test state and create fresh directory + rm -rf ${stateDir} + mkdir -p ${stateDir} - mkdir -p "$STATE_DIR" + # Export all test environment variables + ${testEnvExports} exec ${pkgs.process-compose}/bin/process-compose up \ -f ${processComposeYaml} \ @@ -130,8 +127,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/package-lock.json b/package-lock.json index 3e868b0c6..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -253,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -319,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -368,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -392,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -408,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -429,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -454,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -473,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -496,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -513,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -532,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -550,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -568,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -583,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -607,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -622,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -636,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -650,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -666,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -691,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -705,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -723,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -772,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -788,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -805,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -823,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -836,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -848,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -864,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -876,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -912,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1169,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1207,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1224,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1245,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1261,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1276,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1290,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1303,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1317,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1331,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1347,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1362,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1377,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1391,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1416,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1430,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1444,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1463,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1483,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1497,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1510,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1525,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1541,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1554,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1567,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1581,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1594,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1619,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1638,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1656,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1668,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1745,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1760,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1778,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1804,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1817,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1831,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1875,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -2193,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", diff --git a/scripts/src/ArchiveSeeder.purs b/scripts/src/ArchiveSeeder.purs index a474876d5..fe0ae805f 100644 --- a/scripts/src/ArchiveSeeder.purs +++ b/scripts/src/ArchiveSeeder.purs @@ -32,7 +32,6 @@ import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -119,7 +118,6 @@ main = launchAff_ do hasErrors <- runArchiveSeeder parsedArgs logPath # runAppEffects # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index ee9423d68..05e73ae84 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -106,7 +106,6 @@ import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -124,6 +123,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants as Constants import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) @@ -241,7 +241,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' @@ -358,7 +357,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -470,7 +469,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler @@ -536,6 +535,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } @@ -761,7 +761,7 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location tag.name legacyManifest case manifest of Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val @@ -1463,7 +1463,7 @@ fetchSpagoYaml address ref = do | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." pure Nothing - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest (un RawVersion ref) config of Left err -> do Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err pure Nothing diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 950871d0b..257a7b1a2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -21,7 +21,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -160,7 +159,6 @@ main = launchAff_ do >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) >>> Cache.interpret _compilerCache (Cache.handleFs cache) - >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' @@ -230,21 +228,25 @@ deleteVersion arguments name version = do Just published, Nothing -> pure (Just (Right published)) Nothing, Just unpublished -> pure (Just (Left unpublished)) Nothing, Nothing -> pure Nothing + -- Read manifest before deleting it (needed for reimport) + maybeManifest <- Registry.readManifest name version let newMetadata = Metadata $ oldMetadata { published = Map.delete version oldMetadata.published, unpublished = Map.delete version oldMetadata.unpublished } Registry.writeMetadata name newMetadata Registry.deleteManifest name version -- --reimport when arguments.reimport do - case publishment of - Nothing -> Log.error "Cannot reimport a version that was not published" - Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" - Just (Right specificPackageMetadata) -> do + case publishment, maybeManifest of + Nothing, _ -> Log.error "Cannot reimport a version that was not published" + Just (Left _), _ -> Log.error "Cannot reimport a version that was specifically unpublished" + Just (Right _), Nothing -> Log.error $ "Cannot reimport: manifest not found for " <> formatted + Just (Right _), Just (Manifest manifest) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name - , ref: specificPackageMetadata.ref + , ref: manifest.ref + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 13280a0af..ce615b5a9 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff @@ -33,7 +32,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -153,7 +151,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/spago.lock b/spago.lock index 83d2afb8d..ea939ddeb 100644 --- a/spago.lock +++ b/spago.lock @@ -227,7 +227,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -239,14 +238,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -254,7 +249,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -299,11 +293,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -313,20 +303,32 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", - "effect", - "either", - "maybe", - "prelude", + "exceptions", + "fetch", + "integers", + "json", + "node-child-process", + "node-execa", + "node-fs", + "node-path", + "node-process", + "ordered-collections", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", + "routing-duplex", "spec", "spec-node", - "strings" + "strings", + "transformers" ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +336,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +345,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +371,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +381,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +425,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +452,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", @@ -604,7 +637,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -616,14 +648,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -632,7 +660,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -682,11 +709,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -846,7 +869,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -858,14 +880,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -874,7 +892,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -924,11 +941,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -1104,22 +1117,16 @@ "path": "test-utils", "core": { "dependencies": [ - "aff", "arrays", "bifunctors", "codec-json", "datetime", - "effect", "either", "exceptions", - "fetch", "foldable-traversable", "formatters", - "integers", "json", "maybe", - "newtype", - "node-process", "ordered-collections", "partial", "prelude", @@ -1152,7 +1159,6 @@ "enums", "exceptions", "exists", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -1164,14 +1170,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -1179,14 +1181,12 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", "node-event-emitter", "node-fs", "node-path", - "node-process", "node-streams", "nonempty", "now", @@ -1198,7 +1198,6 @@ "parsing", "partial", "pipes", - "posix-types", "prelude", "profunctor", "profunctor-lenses", @@ -1220,11 +1219,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] }, "test": { diff --git a/test-utils/spago.yaml b/test-utils/spago.yaml index d85190964..4362f8e77 100644 --- a/test-utils/spago.yaml +++ b/test-utils/spago.yaml @@ -3,22 +3,16 @@ package: build: pedanticPackages: true dependencies: - - aff - arrays - bifunctors - codec-json - datetime - - effect - either - exceptions - - fetch - foldable-traversable - formatters - - integers - json - maybe - - newtype - - node-process - ordered-collections - partial - prelude diff --git a/test-utils/src/Registry/Test/Assert.purs b/test-utils/src/Registry/Test/Assert.purs index 55c0f2277..2d15e7a74 100644 --- a/test-utils/src/Registry/Test/Assert.purs +++ b/test-utils/src/Registry/Test/Assert.purs @@ -38,6 +38,18 @@ shouldNotContain container elem = when (elem `Foldable.elem` container) do fail (Utils.unsafeStringify elem <> "\n\nshould not be a member of\n\n" <> Utils.unsafeStringify container) +-- | Assert that all elements in `expected` are present in `actual`. +-- | This is a subset check, not an equality check - `actual` may contain +-- | additional elements. +-- | +-- | Useful for E2E tests where a shared database means we can't predict +-- | exact contents, only that certain expected items are present. +shouldContainAll :: forall m a. MonadThrow Error m => Eq a => Array a -> Array a -> m Unit +shouldContainAll actual expected = + Foldable.for_ expected \elem -> + when (elem `Foldable.notElem` actual) do + fail ("Expected element not found:\n" <> Utils.unsafeStringify elem <> "\n\nin array:\n" <> Utils.unsafeStringify actual) + shouldSatisfy :: forall m a. MonadThrow Error m => a -> (a -> Boolean) -> m Unit shouldSatisfy a predicate = unless (predicate a) do diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs deleted file mode 100644 index 960484609..000000000 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ /dev/null @@ -1,180 +0,0 @@ --- | HTTP client for making requests to the registry server during E2E tests. --- | This module provides typed helpers for interacting with the Registry API. -module Registry.Test.E2E.Client - ( Config - , ClientError(..) - , defaultConfig - , configFromEnv - , getJobs - , getJob - , getStatus - , publish - , pollJob - , printClientError - ) where - -import Prelude - -import Codec.JSON.DecodeError as CJ.DecodeError -import Control.Monad.Error.Class (class MonadThrow, throwError) -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Class (lift) -import Data.Array as Array -import Data.Bifunctor (lmap) -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime) -import Data.Either (Either(..)) -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Int as Int -import Data.Maybe (Maybe(..)) -import Data.Newtype (unwrap) -import Effect (Effect) -import Effect.Aff (Aff, Milliseconds(..), delay) -import Effect.Aff.Class (class MonadAff, liftAff) -import Effect.Exception (Error, error) -import Effect.Exception as Effect.Exception -import Fetch (Method(..)) -import Fetch as Fetch -import JSON as JSON -import Node.Process as Process -import Registry.API.V1 (Job, JobId(..), LogLevel) -import Registry.API.V1 as V1 -import Registry.Internal.Format as Internal.Format -import Registry.Operation (PublishData) -import Registry.Operation as Operation - --- | Configuration for the E2E test client -type Config = - { baseUrl :: String - , timeout :: Milliseconds - , pollInterval :: Milliseconds - , maxPollAttempts :: Int - } - --- | Default configuration for production use (port 8080 matches HTTPurple default) -defaultConfig :: Config -defaultConfig = - { baseUrl: "http://localhost:8080" - , timeout: Milliseconds 30000.0 - , pollInterval: Milliseconds 2000.0 - , maxPollAttempts: 30 - } - --- | Create config from environment, reading SERVER_PORT. --- | --- | SERVER_PORT is required and must be set by the test environment. --- | See `nix/lib/env.nix` for the centralized environment configuration. -configFromEnv :: Effect Config -configFromEnv = do - maybePort <- Process.lookupEnv "SERVER_PORT" - case maybePort of - Nothing -> Effect.Exception.throw "SERVER_PORT environment variable is not set. Run tests via 'nix run .#test-env' or 'nix build .#checks.x86_64-linux.integration'." - Just port -> pure $ defaultConfig { baseUrl = "http://localhost:" <> port } - --- | Errors that can occur during client operations -data ClientError - = HttpError { status :: Int, body :: String } - | ParseError String - | Timeout String - | NetworkError String - -printClientError :: ClientError -> String -printClientError = case _ of - HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg - Timeout msg -> "Timeout: " <> msg - NetworkError msg -> "Network Error: " <> msg - --- | Convert a ClientError to an Effect Error for throwing -toError :: ClientError -> Error -toError = error <<< printClientError - --- | Parse JSON response body using a codec -parseResponse :: forall a. CJ.Codec a -> String -> Either String a -parseResponse codec body = do - json <- lmap (append "JSON parse error: ") $ JSON.parse body - lmap CJ.DecodeError.print $ CJ.decode codec json - --- | Make a GET request and decode the response -get :: forall a. CJ.Codec a -> Config -> String -> Aff (Either ClientError a) -get codec config path = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> path) { method: GET } - body <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse codec body of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body } - --- | Make a POST request with JSON body and decode the response -post :: forall req res. CJ.Codec req -> CJ.Codec res -> Config -> String -> req -> Aff (Either ClientError res) -post reqCodec resCodec config path reqBody = runExceptT do - let jsonBody = JSON.print $ CJ.encode reqCodec reqBody - response <- lift $ Fetch.fetch (config.baseUrl <> path) - { method: POST - , headers: { "Content-Type": "application/json" } - , body: jsonBody - } - responseBody <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err - Right a -> pure a - else - throwError $ HttpError { status: response.status, body: responseBody } - --- | Get the list of jobs -getJobs :: Config -> Aff (Either ClientError (Array Job)) -getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs" - --- | Get a specific job by ID, with optional log filtering -getJob :: Config -> JobId -> Maybe LogLevel -> Maybe DateTime -> Aff (Either ClientError Job) -getJob config (JobId jobId) level since = do - let - params = Array.catMaybes - [ level <#> \l -> "level=" <> V1.printLogLevel l - , since <#> \s -> "since=" <> Formatter.DateTime.format Internal.Format.iso8601DateTime s - ] - query = case params of - [] -> "" - ps -> "?" <> Array.intercalate "&" ps - get V1.jobCodec config ("/api/v1/jobs/" <> jobId <> query) - --- | Check if the server is healthy -getStatus :: Config -> Aff (Either ClientError Unit) -getStatus config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/api/v1/status") { method: GET } - if response.status == 200 then - pure unit - else do - body <- lift response.text - throwError $ HttpError { status: response.status, body } - --- | Publish a package -publish :: Config -> PublishData -> Aff (Either ClientError V1.JobCreatedResponse) -publish config publishData = - post Operation.publishCodec V1.jobCreatedResponseCodec config "/api/v1/publish" publishData - --- | Poll a job until it completes or times out -pollJob - :: forall m - . MonadAff m - => MonadThrow Error m - => Config - -> JobId - -> m Job -pollJob config jobId = go 1 - where - go attempt - | attempt > config.maxPollAttempts = - throwError $ toError $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" - | otherwise = do - liftAff $ delay config.pollInterval - result <- liftAff $ getJob config jobId (Just V1.Debug) Nothing - case result of - Left err -> throwError $ toError err - Right job -> - case job.finishedAt of - Just _ -> pure job - Nothing -> go (attempt + 1) diff --git a/test-utils/src/Registry/Test/Fixtures.purs b/test-utils/src/Registry/Test/Fixtures.purs new file mode 100644 index 000000000..28692c13c --- /dev/null +++ b/test-utils/src/Registry/Test/Fixtures.purs @@ -0,0 +1,18 @@ +module Registry.Test.Fixtures where + +import Prelude + +import Data.Either as Either +import Data.Maybe (Maybe(..)) +import Partial.Unsafe as Partial +import Registry.Location (Location(..)) +import Registry.Sha256 (Sha256) +import Registry.Sha256 as Sha256 + +-- | A Location for use within tests. +defaultLocation :: Location +defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } + +-- | A Sha256 for use within tests. +defaultHash :: Sha256 +defaultHash = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Failed to parse Sha256") $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/test-utils/src/Registry/Test/Utils.purs b/test-utils/src/Registry/Test/Utils.purs index 2db7280e5..57f177890 100644 --- a/test-utils/src/Registry/Test/Utils.purs +++ b/test-utils/src/Registry/Test/Utils.purs @@ -25,7 +25,6 @@ import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range as Range import Registry.SSH as SSH -import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) import Registry.Version as Version @@ -149,6 +148,7 @@ unsafeManifest name version dependencies = Manifest { url: "https://github.com/purescript/purescript-" <> name <> ".git" , subdir: Nothing } + , ref: "v" <> version , description: Nothing , owners: Nothing , includeFiles: Nothing @@ -158,11 +158,3 @@ unsafeManifest name version dependencies = Manifest -- | Format a package version as a string in the form 'name@X.Y.Z' formatPackageVersion :: PackageName -> Version -> String formatPackageVersion name version = PackageName.print name <> "@" <> Version.print version - --- | A Location for use within tests. -defaultLocation :: Location -defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } - --- | A Sha256 for use within tests. -defaultHash :: Sha256 -defaultHash = fromRight "Failed to parse Sha256" $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/types/v1/Manifest.dhall b/types/v1/Manifest.dhall index e9fe88850..2f1a6fa5b 100644 --- a/types/v1/Manifest.dhall +++ b/types/v1/Manifest.dhall @@ -13,6 +13,7 @@ let Manifest = , license : License , version : Version , location : ./Location.dhall + , ref : Text , owners : Optional (List ./Owner.dhall) , description : Optional Text , includeFiles : Optional (List Text) From b81f6522784f72addf31ad132812857b195b1ac3 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Thu, 8 Jan 2026 23:04:32 +0200 Subject: [PATCH 64/64] Add scaffolding to move cronjobs into the Job queue framework --- app/src/App/API.purs | 9 +- app/src/App/Effect/Db.purs | 42 +++--- app/src/App/Main.purs | 30 +++-- app/src/App/SQLite.js | 37 ++++-- app/src/App/SQLite.purs | 110 ++++++++------- app/src/App/Server/AdminJobs.purs | 36 +++++ app/src/App/Server/JobExecutor.purs | 18 ++- app/src/App/Server/Router.purs | 8 +- app/src/App/Server/Scheduler.purs | 78 +++++++++++ ...20240914171030_create_job_queue_tables.sql | 10 +- db/schema.sql | 6 +- lib/src/API/V1.purs | 125 +++++++++++++++--- 12 files changed, 385 insertions(+), 124 deletions(-) create mode 100644 app/src/App/Server/AdminJobs.purs create mode 100644 app/src/App/Server/Scheduler.purs diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8ebc66ba5..9ee16a93b 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -51,7 +51,6 @@ import Parsing as Parsing import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String -import Registry.API.V1 (PackageSetJobData) import Registry.App.Auth as Auth import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs @@ -93,7 +92,7 @@ import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PublishData) import Registry.Operation as Operation import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation @@ -119,9 +118,9 @@ type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + LOG + EXCEPT String -- | Process a package set update from a queued job. Authentication has already -- | been verified at the API boundary, so we don't need to check team membership. -packageSetUpdate :: forall r. PackageSetJobData -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate details = do - let Operation.PackageSetUpdate payload = details.payload +packageSetUpdate :: forall r. PackageSetOperation -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate operation = do + let PackageSetUpdate payload = operation Log.debug $ "Package set update job starting with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 96b75ca94..b37103531 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,7 +8,7 @@ import Data.String as String import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageSetJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PackageSetJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) +import Registry.App.SQLite (AdminJobDetails, FinishJob, InsertAdminJob, InsertMatrixJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite import Registry.Operation (PackageSetOperation) import Run (EFFECT, Run) @@ -30,7 +30,7 @@ data Db a | InsertUnpublishJob InsertUnpublishJob (JobId -> a) | InsertTransferJob InsertTransferJob (JobId -> a) | InsertMatrixJob InsertMatrixJob (JobId -> a) - | InsertPackageSetJob InsertPackageSetJob (JobId -> a) + | InsertAdminJob InsertAdminJob (JobId -> a) | FinishJob FinishJob a | StartJob StartJob a | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) @@ -39,11 +39,12 @@ data Db a | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) - | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | SelectNextAdminJob (Either String (Maybe AdminJobDetails) -> a) + | SelectRecentAdminJobs DateTime (Either String (Array AdminJobDetails) -> a) | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) - | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe PackageSetJobDetails) -> a) + | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe AdminJobDetails) -> a) | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) | ResetIncompleteJobs a @@ -92,9 +93,9 @@ insertTransferJob job = Run.lift _db (InsertTransferJob job identity) insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) --- | Insert a new package set job into the database. -insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId -insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) +-- | Insert a new admin job into the database. +insertAdminJob :: forall r. InsertAdminJob -> Run (DB + r) JobId +insertAdminJob job = Run.lift _db (InsertAdminJob job identity) -- | Start a job in the database. startJob :: forall r. StartJob -> Run (DB + r) Unit @@ -116,9 +117,13 @@ selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow --- | Select the next package set job from the database. -selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) -selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow +-- | Select the next admin job from the database. +selectNextAdminJob :: forall r. Run (DB + EXCEPT String + r) (Maybe AdminJobDetails) +selectNextAdminJob = Run.lift _db (SelectNextAdminJob identity) >>= Except.rethrow + +-- | Returns recent admin jobs since a given timestamp (for scheduler). +selectRecentAdminJobs :: forall r. DateTime -> Run (DB + EXCEPT String + r) (Array AdminJobDetails) +selectRecentAdminJobs since = Run.lift _db (SelectRecentAdminJobs since identity) >>= Except.rethrow -- | Lookup a publish job from the database by name and version. selectPublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) @@ -132,8 +137,9 @@ selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow --- | Lookup a pending package set job from the database by payload (for duplicate detection). -selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +-- | Lookup a pending package set job from the database by payload (for duplicate detection at API boundary). +-- | This is only used when a manual package set operation is submitted via the API. +selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe AdminJobDetails) selectPackageSetJobByPayload payload = Run.lift _db (SelectPackageSetJobByPayload payload identity) >>= Except.rethrow -- | Delete all incomplete jobs from the database. @@ -164,8 +170,8 @@ handleSQLite env = case _ of result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job pure $ reply result - InsertPackageSetJob job reply -> do - result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + InsertAdminJob job reply -> do + result <- Run.liftEffect $ SQLite.insertAdminJob env.db job pure $ reply result FinishJob job next -> do @@ -204,8 +210,12 @@ handleSQLite env = case _ of result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db pure $ reply result - SelectNextPackageSetJob reply -> do - result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + SelectNextAdminJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextAdminJob env.db + pure $ reply result + + SelectRecentAdminJobs since reply -> do + result <- Run.liftEffect $ SQLite.selectRecentAdminJobs env.db since pure $ reply result SelectPublishJob packageName packageVersion reply -> do diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index e638cc684..5d5169c5e 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -3,14 +3,15 @@ module Registry.App.Main where import Registry.App.Prelude hiding ((/)) import Data.DateTime (diff) -import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Data.Time.Duration (Minutes(..), Seconds(..), fromDuration) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry import Node.Process as Process -import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.Env (createServerEnv) import Registry.App.Server.JobExecutor as JobExecutor import Registry.App.Server.Router as Router +import Registry.App.Server.Scheduler as CronJobs main :: Effect Unit main = do @@ -22,15 +23,16 @@ main = do case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl - Aff.launchAff_ $ jobExecutor env + Aff.launchAff_ $ withRestartLoop "Scheduler" (CronJobs.runScheduler env) + Aff.launchAff_ $ withRestartLoop "Job executor" (JobExecutor.runJobExecutor env) Router.runRouter env where healthcheck :: String -> Aff Unit healthcheck healthchecksUrl = loop limit where limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + oneMinute = fromDuration (Minutes 1.0) + fiveMinutes = fromDuration (Minutes 5.0) loop n = do Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of @@ -63,22 +65,24 @@ main = do Succeeded _ -> do Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." - jobExecutor :: ServerEnv -> Aff Unit - jobExecutor env = do - loop initialRestartDelay + -- | Run an Aff action in an infinite loop with exponential backoff on failure. + -- | If the action keeps crashing immediately, restart delay doubles each time. + -- | Once the action runs for more than a minute, the delay resets. + withRestartLoop :: String -> Aff (Either Aff.Error Unit) -> Aff Unit + withRestartLoop name action = loop initialRestartDelay where - initialRestartDelay = Milliseconds 100.0 + initialRestartDelay = fromDuration (Seconds 0.1) loop restartDelay = do start <- nowUTC - result <- JobExecutor.runJobExecutor env + result <- action end <- nowUTC Console.error case result of - Left error -> "Job executor failed: " <> Aff.message error - Right _ -> "Job executor exited for no reason." + Left error -> name <> " failed: " <> Aff.message error + Right _ -> name <> " exited for no reason." - -- This is a heuristic: if the executor keeps crashing immediately, we + -- This is a heuristic: if the fiber keeps crashing immediately, we -- restart with an exponentially increasing delay, but once the executor -- had a run longer than a minute, we start over with a small delay. let diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 0ff5bd696..ccc0debb2 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -6,7 +6,7 @@ const PUBLISH_JOBS_TABLE = 'publish_jobs'; const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; const TRANSFER_JOBS_TABLE = 'transfer_jobs'; const MATRIX_JOBS_TABLE = 'matrix_jobs'; -const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; +const ADMIN_JOBS_TABLE = 'admin_jobs'; export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); @@ -88,9 +88,9 @@ export const insertMatrixJobImpl = (db, job) => { return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const insertPackageSetJobImpl = (db, job) => { - const columns = ['jobId', 'payload', 'rawPayload', 'signature'] - return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); +export const insertAdminJobImpl = (db, job) => { + const columns = ['jobId', 'adminJobType', 'payload', 'rawPayload', 'signature'] + return _insertJob(db, ADMIN_JOBS_TABLE, columns, job); }; const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { @@ -137,17 +137,21 @@ export const selectMatrixJobImpl = (db, jobId) => { return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); }; -export const selectPackageSetJobImpl = (db, jobId) => { - return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); +export const selectAdminJobImpl = (db, jobId) => { + return _selectJob(db, { table: ADMIN_JOBS_TABLE, jobId }); }; -// Find a pending package set job by payload (for duplicate detection) +// Find a pending package set job by payload (for duplicate detection at API boundary) +// Note: This function is kept for checking duplicates when a manual package set +// operation is submitted via the API. It only looks for package_set_operation type jobs. export const selectPackageSetJobByPayloadImpl = (db, payload) => { const stmt = db.prepare(` SELECT job.*, info.* - FROM ${PACKAGE_SET_JOBS_TABLE} job + FROM ${ADMIN_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE job.payload = ? AND info.finishedAt IS NULL + WHERE job.adminJobType = 'package_set_operation' + AND job.payload = ? + AND info.finishedAt IS NULL ORDER BY info.createdAt ASC LIMIT 1 `); return stmt.get(payload); @@ -188,8 +192,8 @@ export const selectMatrixJobsImpl = (db, since, includeCompleted) => { return _selectJobs(db, { table: MATRIX_JOBS_TABLE, since, includeCompleted }); }; -export const selectPackageSetJobsImpl = (db, since, includeCompleted) => { - return _selectJobs(db, { table: PACKAGE_SET_JOBS_TABLE, since, includeCompleted }); +export const selectAdminJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: ADMIN_JOBS_TABLE, since, includeCompleted }); }; export const startJobImpl = (db, args) => { @@ -240,3 +244,14 @@ export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { const stmt = db.prepare(query); return stmt.all(jobId, logLevel, since); }; + +// Returns recent admin jobs since a given timestamp (for scheduler) +export const selectRecentAdminJobsImpl = (db, since) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${ADMIN_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.createdAt >= ? + `); + return stmt.all(since); +}; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index e51196d47..4d0de6c7f 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -4,16 +4,16 @@ -- | nicer interface with PureScript types for higher-level modules to use. module Registry.App.SQLite - ( ConnectOptions + ( AdminJobDetails + , ConnectOptions , FinishJob + , InsertAdminJob , InsertMatrixJob - , InsertPackageSetJob , InsertPublishJob , InsertTransferJob , InsertUnpublishJob , JobInfo , MatrixJobDetails - , PackageSetJobDetails , PublishJobDetails , SQLite , SelectJobRequest @@ -23,18 +23,19 @@ module Registry.App.SQLite , UnpublishJobDetails , connect , finishJob + , insertAdminJob , insertLogLine , insertMatrixJob - , insertPackageSetJob , insertPublishJob , insertTransferJob , insertUnpublishJob , resetIncompleteJobs , selectJob , selectJobs + , selectRecentAdminJobs , selectLogsByJob + , selectNextAdminJob , selectNextMatrixJob - , selectNextPackageSetJob , selectNextPublishJob , selectNextTransferJob , selectNextUnpublishJob @@ -60,7 +61,7 @@ import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried import Record as Record -import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) +import Registry.API.V1 (AdminJobType, Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 import Registry.API.V1 as V1 import Registry.Internal.Codec as Internal.Codec @@ -206,7 +207,7 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do [ selectPublishJobById logs , selectMatrixJobById logs , selectTransferJobById logs - , selectPackageSetJobById logs + , selectAdminJobById logs , selectUnpublishJobById logs ] pure { job, unreadableLogs } @@ -253,11 +254,11 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do ) maybeJobDetails - selectPackageSetJobById logs = ExceptT do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (notNull jobId) + selectAdminJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectAdminJobImpl db (notNull jobId) pure $ traverse - ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) - <<< packageSetJobDetailsFromJSRep + ( map (AdminJob <<< Record.merge { logs, jobType: Proxy :: _ "admin" }) + <<< adminJobDetailsFromJSRep ) maybeJobDetails @@ -272,10 +273,10 @@ selectJobs db { since, includeCompleted } = do unpublishJobs <- selectUnpublishJobs transferJobs <- selectTransferJobs matrixJobs <- selectMatrixJobs - packageSetJobs <- selectPackageSetJobs + adminJobs <- selectAdminJobs let { fail: failedJobs, success: allJobs } = partitionEithers - (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> packageSetJobs) + (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> adminJobs) pure { failed: failedJobs, jobs: take 100 $ sortBy (compare `on` (V1.jobInfo >>> _.createdAt)) allJobs } where @@ -295,9 +296,9 @@ selectJobs db { since, includeCompleted } = do jobs <- Uncurried.runEffectFn3 selectMatrixJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted pure $ map (map (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" }) <<< matrixJobDetailsFromJSRep) jobs - selectPackageSetJobs = do - jobs <- Uncurried.runEffectFn3 selectPackageSetJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted - pure $ map (map (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" }) <<< packageSetJobDetailsFromJSRep) jobs + selectAdminJobs = do + jobs <- Uncurried.runEffectFn3 selectAdminJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (AdminJob <<< Record.merge { logs: [], jobType: Proxy :: _ "admin" }) <<< adminJobDetailsFromJSRep) jobs -------------------------------------------------------------------------------- -- publish_jobs table @@ -691,90 +692,103 @@ selectNextMatrixJob db = do pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails -------------------------------------------------------------------------------- --- package_set_jobs table +-- admin_jobs table -type PackageSetJobDetails = +type AdminJobDetails = { jobId :: JobId , createdAt :: DateTime , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean - , payload :: PackageSetOperation + , adminJobType :: AdminJobType } -type JSPackageSetJobDetails = +type JSAdminJobDetails = { jobId :: String , createdAt :: String , startedAt :: Nullable String , finishedAt :: Nullable String , success :: Int + , adminJobType :: String , payload :: String } -packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails -packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do +adminJobDetailsFromJSRep :: JSAdminJobDetails -> Either String AdminJobDetails +adminJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) s <- toSuccess success - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + parsedAdminJobType <- lmap JSON.DecodeError.print $ parseJson API.V1.adminJobTypeCodec payload pure { jobId: JobId jobId , createdAt: created , startedAt: started , finishedAt: finished , success: s - , payload: parsed + , adminJobType: parsedAdminJobType } -foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) +foreign import selectAdminJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSAdminJobDetails) + +foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSAdminJobDetails) -foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSPackageSetJobDetails) +foreign import selectAdminJobsImpl :: EffectFn3 SQLite String Boolean (Array JSAdminJobDetails) -foreign import selectPackageSetJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPackageSetJobDetails) +foreign import selectRecentAdminJobsImpl :: EffectFn2 SQLite String (Array JSAdminJobDetails) -selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) -selectNextPackageSetJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db null - pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails +selectNextAdminJob :: SQLite -> Effect (Either String (Maybe AdminJobDetails)) +selectNextAdminJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectAdminJobImpl db null + pure $ traverse adminJobDetailsFromJSRep maybeJobDetails --- | Find a pending package set job by payload (for duplicate detection) -selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe PackageSetJobDetails)) +-- | Find a pending package set job by payload (for duplicate detection at API boundary) +-- | This is only used when a manual package set operation is submitted via the API. +selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe AdminJobDetails)) selectPackageSetJobByPayload db payload = do let payloadStr = stringifyJson Operation.packageSetOperationCodec payload maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobByPayloadImpl db payloadStr - pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails - -type InsertPackageSetJob = - { payload :: PackageSetOperation - , rawPayload :: String + pure $ traverse adminJobDetailsFromJSRep maybeJobDetails + +-- | Returns recent admin jobs since a given timestamp (for scheduler) +selectRecentAdminJobs :: SQLite -> DateTime -> Effect (Either String (Array AdminJobDetails)) +selectRecentAdminJobs db since = do + let sinceStr = DateTime.format Internal.Format.iso8601DateTime since + jobs <- Uncurried.runEffectFn2 selectRecentAdminJobsImpl db sinceStr + pure $ traverse adminJobDetailsFromJSRep jobs + +type InsertAdminJob = + { adminJobType :: AdminJobType + , rawPayload :: Maybe String , signature :: Maybe Signature } -type JSInsertPackageSetJob = +type JSInsertAdminJob = { jobId :: String , createdAt :: String + , adminJobType :: String , payload :: String - , rawPayload :: String + , rawPayload :: Nullable String , signature :: Nullable String } -insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep jobId now { payload, rawPayload, signature } = +insertAdminJobToJSRep :: JobId -> DateTime -> InsertAdminJob -> JSInsertAdminJob +insertAdminJobToJSRep jobId now { adminJobType, rawPayload, signature } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now - , payload: stringifyJson Operation.packageSetOperationCodec payload - , rawPayload + , adminJobType: API.V1.adminJobTypeKey adminJobType + , payload: stringifyJson API.V1.adminJobTypeCodec adminJobType + , rawPayload: Nullable.toNullable rawPayload , signature: Nullable.toNullable $ map (\(Signature s) -> s) signature } -foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit +foreign import insertAdminJobImpl :: EffectFn2 SQLite JSInsertAdminJob Unit -insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId -insertPackageSetJob db job = do +insertAdminJob :: SQLite -> InsertAdminJob -> Effect JobId +insertAdminJob db job = do jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + Uncurried.runEffectFn2 insertAdminJobImpl db $ insertAdminJobToJSRep jobId now job pure jobId -------------------------------------------------------------------------------- diff --git a/app/src/App/Server/AdminJobs.purs b/app/src/App/Server/AdminJobs.purs new file mode 100644 index 000000000..2992d57a2 --- /dev/null +++ b/app/src/App/Server/AdminJobs.purs @@ -0,0 +1,36 @@ +-- | Execution of admin jobs (scheduled tasks and manual package set operations). +-- | The scheduled job implementations are stubbed for now - actual scripts will +-- | be plugged in later. +module Registry.App.Server.AdminJobs + ( executeAdminJob + ) where + +import Registry.App.Prelude + +import Registry.API.V1 (AdminJobType(..)) +import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects) +import Run (Run) + +-- | Execute an admin job based on its type. The scheduled job implementations +-- | (PackageTransfer, LegacyImport, PackageSetUpdate) are currently stubbed. +-- | Only AdminPackageSetOperation (manual API requests) is fully implemented. +executeAdminJob :: AdminJobType -> Run ServerEffects Unit +executeAdminJob = case _ of + AdminPackageTransfer -> do + Log.info "Running scheduled PackageTransfer job..." + Log.warn "TODO: PackageTransfer execution not yet implemented" + + AdminLegacyImport mode -> do + Log.info $ "Running scheduled LegacyImport job with mode: " <> V1.printLegacyImportMode mode + Log.warn "TODO: LegacyImport execution not yet implemented" + + AdminPackageSetUpdate mode -> do + Log.info $ "Running scheduled PackageSetUpdate job with mode: " <> V1.printPackageSetUpdateMode mode + Log.warn "TODO: PackageSetUpdate execution not yet implemented" + + AdminPackageSetOperation operation -> do + Log.info "Running manual package set operation from API..." + API.packageSetUpdate operation diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 4970fa935..4120fb65f 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -10,7 +10,7 @@ import Data.Array as Array import Data.DateTime (DateTime) import Data.Map as Map import Data.Set as Set -import Effect.Aff (Milliseconds(..)) +import Data.Time.Duration (Hours(..), Minutes(..), Seconds(..), fromDuration) import Effect.Aff as Aff import Record as Record import Registry.API.V1 (Job(..)) @@ -22,6 +22,7 @@ import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry +import Registry.App.Server.AdminJobs as AdminJobs import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.ManifestIndex as ManifestIndex @@ -52,7 +53,7 @@ runJobExecutor env = runEffects env do maybeJob <- findNextAvailableJob case maybeJob of Nothing -> do - liftAff $ Aff.delay (Milliseconds 1000.0) + liftAff $ Aff.delay $ fromDuration (Seconds 1.0) loop Just job -> do @@ -68,8 +69,13 @@ runJobExecutor env = runEffects env do jobResult <- liftAff do let envWithJobId = env { jobId = Just jobId } let execute = Just <$> (runEffects envWithJobId $ executeJob now job) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing + -- Admin jobs get a long timeout (they can run for a long time, + -- e.g. LegacyImporter), while other jobs a much shorter one + let + delay = case job of + AdminJob _ -> fromDuration (Hours 4.0) + _ -> fromDuration (Minutes 5.0) + let timeout = Aff.delay delay $> Nothing Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout success <- case jobResult of @@ -98,7 +104,7 @@ findNextAvailableJob = runMaybeT <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) - <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) + <|> (AdminJob <<< Record.merge { logs: [], jobType: Proxy :: _ "admin" } <$> MaybeT Db.selectNextAdminJob) executeJob :: DateTime -> Job -> Run ServerEffects Unit executeJob _ = case _ of @@ -154,7 +160,7 @@ executeJob _ = case _ of , packageName: solvedPackage , packageVersion: solvedVersion } - PackageSetJob payload -> API.packageSetUpdate payload + AdminJob { adminJobType } -> AdminJobs.executeAdminJob adminJobType upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit upgradeRegistryToNewCompiler newCompilerVersion = do diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 2553ea4a6..2035d542c 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -10,7 +10,7 @@ import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status -import Registry.API.V1 (Route(..)) +import Registry.API.V1 (AdminJobType(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth @@ -160,9 +160,9 @@ router { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.warn $ "Duplicate package set job insertion, returning existing one: " <> unwrap job.jobId pure job.jobId Nothing -> do - lift $ Db.insertPackageSetJob - { payload: request.payload - , rawPayload: request.rawPayload + lift $ Db.insertAdminJob + { adminJobType: AdminPackageSetOperation request.payload + , rawPayload: Just request.rawPayload , signature: request.signature } diff --git a/app/src/App/Server/Scheduler.purs b/app/src/App/Server/Scheduler.purs new file mode 100644 index 000000000..e0f66150f --- /dev/null +++ b/app/src/App/Server/Scheduler.purs @@ -0,0 +1,78 @@ +-- | Scheduler for admin jobs (PackageTransfer, LegacyImport, PackageSetUpdate). +module Registry.App.Server.Scheduler + ( runScheduler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.DateTime (DateTime, Time(..)) +import Data.DateTime as DateTime +import Data.Enum (fromEnum) +import Data.Time.Duration (Hours(..), fromDuration, negateDuration) +import Effect.Aff as Aff +import Registry.API.V1 (AdminJobType(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (AdminJobDetails) +import Registry.App.Server.Env (ServerEnv, runEffects) + +-- | The three admin job types that run on schedule. +-- | They are enqueued in this order: PackageTransfer -> LegacyImport -> PackageSetUpdate +scheduledAdminJobs :: Array AdminJobType +scheduledAdminJobs = + [ AdminPackageTransfer + , AdminLegacyImport V1.UpdateRegistry + , AdminPackageSetUpdate V1.CommitPackageSet + ] + +-- | Run the scheduler loop. Checks every hour if jobs should be enqueued. +-- | We run things in a window instead of a precise time, so that restarts and/or +-- | delays don't prevent jobs from happening. +runScheduler :: ServerEnv -> Aff (Either Aff.Error Unit) +runScheduler env = runEffects env do + Log.info "Starting Admin Job Scheduler" + loop + where + loop = do + liftAff $ Aff.delay $ fromDuration (Hours 1.0) + now <- nowUTC + + when (inScheduleWindow now) do + Log.info "In schedule window (00:00-04:00 UTC) - checking if admin jobs should be scheduled..." + -- Get jobs from last 12h + let twelveHoursAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 12.0)) now + recentJobs <- Db.selectRecentAdminJobs twelveHoursAgo + for_ scheduledAdminJobs \jobType -> do + when (shouldEnqueue jobType recentJobs) do + Log.info $ "Scheduling admin job: " <> V1.adminJobTypeKey jobType + void $ Db.insertAdminJob + { adminJobType: jobType + , rawPayload: Nothing + , signature: Nothing + } + + loop + +-- | Check if current time is in the schedule window +inScheduleWindow :: DateTime -> Boolean +inScheduleWindow dt = + let + Time hour _ _ _ = DateTime.time dt + in + fromEnum hour >= 0 && fromEnum hour < 4 + +-- | Determine if we should enqueue a job of the given type. +-- | Returns true if: +-- | 1. No incomplete job of that type exists (prevents duplicates) +-- | 2. Either never run, or last completed job was >12 hours ago +shouldEnqueue :: AdminJobType -> Array AdminJobDetails -> Boolean +shouldEnqueue jobType recentJobs = + let + jobsOfType = Array.filter (\j -> V1.adminJobTypeKey j.adminJobType == V1.adminJobTypeKey jobType) recentJobs + hasIncomplete = Array.any (\j -> isNothing j.finishedAt) jobsOfType + lastCompleted = Array.last $ Array.sortBy (comparing _.createdAt) $ + Array.filter (\j -> isJust j.finishedAt) jobsOfType + in + not hasIncomplete && isJust lastCompleted diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index cdb137ad4..35b43b5c0 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -48,11 +48,13 @@ CREATE TABLE matrix_jobs ( FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); --- Package set jobs -CREATE TABLE package_set_jobs ( +-- Admin jobs (scheduled tasks and manual package set operations) +CREATE TABLE admin_jobs ( jobId TEXT PRIMARY KEY NOT NULL, + adminJobType TEXT NOT NULL, -- 'package_transfer', 'legacy_import', 'package_set_update', 'package_set_operation' payload JSON NOT NULL, - rawPayload TEXT NOT NULL, + -- Keep these for manual package set operations (authenticated API requests) + rawPayload TEXT, signature TEXT, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); @@ -72,5 +74,5 @@ DROP TABLE publish_jobs; DROP TABLE unpublish_jobs; DROP TABLE transfer_jobs; DROP TABLE matrix_jobs; -DROP TABLE package_set_jobs; +DROP TABLE admin_jobs; DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 65319293a..803b621c7 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -37,9 +37,13 @@ CREATE TABLE matrix_jobs ( payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); -CREATE TABLE package_set_jobs ( +CREATE TABLE admin_jobs ( jobId TEXT PRIMARY KEY NOT NULL, + adminJobType TEXT NOT NULL, -- 'package_transfer', 'legacy_import', 'package_set_update', 'package_set_operation' payload JSON NOT NULL, + -- Keep these for manual package set operations (authenticated API requests) + rawPayload TEXT, + signature TEXT, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 4c399342e..978ccdac3 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,24 +1,31 @@ module Registry.API.V1 - ( JobCreatedResponse + ( AdminJobData + , AdminJobType(..) + , JobCreatedResponse , JobId(..) , JobInfo , JobType(..) , Job(..) + , LegacyImportMode(..) , LogLevel(..) , LogLine , MatrixJobData - , PackageSetJobData + , PackageSetUpdateMode(..) , PublishJobData , Route(..) , TransferJobData , UnpublishJobData + , adminJobTypeCodec + , adminJobTypeKey , jobInfo , jobCodec , jobCreatedResponseCodec , logLevelFromPriority , logLevelToPriority , printJobType + , printLegacyImportMode , printLogLevel + , printPackageSetUpdateMode , routes ) where @@ -37,7 +44,7 @@ import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) import Data.Map (Map) -import Data.Maybe (Maybe) +import Data.Maybe (Maybe(..)) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Data.Symbol (class IsSymbol) @@ -109,7 +116,7 @@ data Job | UnpublishJob UnpublishJobData | TransferJob TransferJobData | MatrixJob MatrixJobData - | PackageSetJob PackageSetJobData + | AdminJob AdminJobData type JobInfo r = { jobId :: JobId @@ -149,9 +156,34 @@ type MatrixJobData = JobInfo , jobType :: Proxy "matrix" ) -type PackageSetJobData = JobInfo - ( payload :: PackageSetOperation - , jobType :: Proxy "packageset" +-- | Admin job types for scheduled operations and manual package set updates +data AdminJobType + = AdminPackageTransfer + | AdminLegacyImport LegacyImportMode + | AdminPackageSetUpdate PackageSetUpdateMode + | AdminPackageSetOperation PackageSetOperation -- For manual API requests + +derive instance Eq AdminJobType + +data LegacyImportMode = DryRun | GenerateRegistry | UpdateRegistry + +derive instance Eq LegacyImportMode + +data PackageSetUpdateMode = GeneratePackageSet | CommitPackageSet + +derive instance Eq PackageSetUpdateMode + +-- | Returns the key used in the database for an admin job type +adminJobTypeKey :: AdminJobType -> String +adminJobTypeKey = case _ of + AdminPackageTransfer -> "package_transfer" + AdminLegacyImport _ -> "legacy_import" + AdminPackageSetUpdate _ -> "package_set_update" + AdminPackageSetOperation _ -> "package_set_operation" + +type AdminJobData = JobInfo + ( adminJobType :: AdminJobType + , jobType :: Proxy "admin" ) jobCodec :: CJ.Codec Job @@ -164,7 +196,7 @@ jobCodec = Codec.codec' decode encode <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) <|> map TransferJob (Codec.decode transferJobDataCodec json) <|> map MatrixJob (Codec.decode matrixJobDataCodec json) - <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + <|> map AdminJob (Codec.decode adminJobDataCodec json) encode :: Job -> JSON encode = case _ of @@ -172,7 +204,7 @@ jobCodec = Codec.codec' decode encode UnpublishJob j -> CJ.encode unpublishJobDataCodec j TransferJob j -> CJ.encode transferJobDataCodec j MatrixJob j -> CJ.encode matrixJobDataCodec j - PackageSetJob j -> CJ.encode packageSetJobDataCodec j + AdminJob j -> CJ.encode adminJobDataCodec j publishJobDataCodec :: CJ.Codec PublishJobData publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object @@ -242,18 +274,79 @@ matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object , payload: Internal.Codec.packageMap Version.codec } -packageSetJobDataCodec :: CJ.Codec PackageSetJobData -packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object +adminJobDataCodec :: CJ.Codec AdminJobData +adminJobDataCodec = CJ.named "AdminJob" $ CJ.Record.object { jobId: jobIdCodec - , jobType: symbolCodec (Proxy :: _ "packageset") + , jobType: symbolCodec (Proxy :: _ "admin") , createdAt: Internal.Codec.iso8601DateTime , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec - , payload: Operation.packageSetOperationCodec + , adminJobType: adminJobTypeCodec } +adminJobTypeCodec :: CJ.Codec AdminJobType +adminJobTypeCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError AdminJobType + decode json = do + obj <- Codec.decode (CJ.Record.object { type: CJ.string }) json + case obj.type of + "package_transfer" -> pure AdminPackageTransfer + "legacy_import" -> + map (\{ mode } -> AdminLegacyImport mode) + (Codec.decode (CJ.Record.object { mode: legacyImportModeCodec }) json) + "package_set_update" -> + map (\{ mode } -> AdminPackageSetUpdate mode) + (Codec.decode (CJ.Record.object { mode: packageSetUpdateModeCodec }) json) + "package_set_operation" -> + map (\{ payload } -> AdminPackageSetOperation payload) + (Codec.decode (CJ.Record.object { payload: Operation.packageSetOperationCodec }) json) + other -> except $ Left $ CJ.DecodeError.basic $ "Unknown admin job type: " <> other + + encode :: AdminJobType -> JSON + encode = case _ of + AdminPackageTransfer -> + CJ.encode (CJ.Record.object { type: CJ.string }) { type: "package_transfer" } + AdminLegacyImport mode -> + CJ.encode (CJ.Record.object { type: CJ.string, mode: legacyImportModeCodec }) + { type: "legacy_import", mode } + AdminPackageSetUpdate mode -> + CJ.encode (CJ.Record.object { type: CJ.string, mode: packageSetUpdateModeCodec }) + { type: "package_set_update", mode } + AdminPackageSetOperation payload -> + CJ.encode (CJ.Record.object { type: CJ.string, payload: Operation.packageSetOperationCodec }) + { type: "package_set_operation", payload } + +legacyImportModeCodec :: CJ.Codec LegacyImportMode +legacyImportModeCodec = CJ.Sum.enumSum printLegacyImportMode parseLegacyImportMode + where + parseLegacyImportMode = case _ of + "dry_run" -> Just DryRun + "generate_registry" -> Just GenerateRegistry + "update_registry" -> Just UpdateRegistry + _ -> Nothing + +printLegacyImportMode :: LegacyImportMode -> String +printLegacyImportMode = case _ of + DryRun -> "dry_run" + GenerateRegistry -> "generate_registry" + UpdateRegistry -> "update_registry" + +packageSetUpdateModeCodec :: CJ.Codec PackageSetUpdateMode +packageSetUpdateModeCodec = CJ.Sum.enumSum printPackageSetUpdateMode parsePackageSetUpdateMode + where + parsePackageSetUpdateMode = case _ of + "generate" -> Just GeneratePackageSet + "commit" -> Just CommitPackageSet + _ -> Nothing + +printPackageSetUpdateMode :: PackageSetUpdateMode -> String +printPackageSetUpdateMode = case _ of + GeneratePackageSet -> "generate" + CommitPackageSet -> "commit" + jobInfo :: Job -> JobInfo () jobInfo = case _ of PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> @@ -264,7 +357,7 @@ jobInfo = case _ of { jobId, createdAt, startedAt, finishedAt, success, logs } MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> { jobId, createdAt, startedAt, finishedAt, success, logs } - PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + AdminJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> { jobId, createdAt, startedAt, finishedAt, success, logs } newtype JobId = JobId String @@ -280,7 +373,7 @@ data JobType | UnpublishJobType | TransferJobType | MatrixJobType - | PackageSetJobType + | AdminJobType derive instance Eq JobType @@ -290,7 +383,7 @@ printJobType = case _ of UnpublishJobType -> "unpublish" TransferJobType -> "transfer" MatrixJobType -> "matrix" - PackageSetJobType -> "packageset" + AdminJobType -> "admin" type LogLine = { level :: LogLevel