From 20d6b216f11cfaa5e2d4f98da38c4736e4bcdd0e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 14 Sep 2024 14:07:21 -0400 Subject: [PATCH 01/36] Update database schemas and add job executor loop --- app/src/App/Effect/Db.purs | 130 +++-- app/src/App/Effect/Log.purs | 2 +- app/src/App/SQLite.js | 173 +++++-- app/src/App/SQLite.purs | 458 +++++++++++++----- app/src/App/Server.purs | 208 +++++--- .../20240914170550_delete_jobs_logs_table.sql | 22 + ...20240914171030_create_job_queue_tables.sql | 56 +++ db/schema.sql | 52 +- lib/src/API/V1.purs | 2 - lib/src/Operation.purs | 21 + 10 files changed, 875 insertions(+), 249 deletions(-) create mode 100644 db/migrations/20240914170550_delete_jobs_logs_table.sql create mode 100644 db/migrations/20240914171030_create_job_queue_tables.sql diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..142149bc0 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,10 +8,12 @@ import Data.String as String import Registry.API.V1 (JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +23,20 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a + = InsertPackageJob InsertPackageJob a + | InsertMatrixJob InsertMatrixJob a + | InsertPackageSetJob InsertPackageSetJob a + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) + | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + | DeleteIncompleteJobs a derive instance Functor Db @@ -39,28 +48,51 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. +-- | Select all logs for a given job, filtered by loglevel. selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) +selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow + +-- | Insert a new package job into the database. +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit +insertPackageJob job = Run.lift _db (InsertPackageJob job unit) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit +insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next package job from the database. +selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) +selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +deleteIncompleteJobs :: forall r. Run (DB + r) Unit +deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +102,52 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log + InsertPackageJob job next -> do + Run.liftEffect $ SQLite.insertPackageJob env.db job pure next - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertMatrixJob job next -> do + Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure next - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertPackageSetJob job next -> do + Run.liftEffect $ SQLite.insertPackageSetJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJobInfo jobId reply -> do + result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + pure $ reply result + + SelectNextPackageJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + DeleteIncompleteJobs next -> do + Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..a1cb72c0a 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -134,5 +134,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..fa9a8b539 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,11 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PACKAGE_JOBS_TABLE = 'package_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +17,152 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPackageJobImpl = (db, job) => { + const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = [ 'jobId', 'payload' ] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const selectNextPackageJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectNextMatrixJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${MATRIX_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectNextPackageSetJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); + const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? + `; + + const params = [jobId, logLevel]; + + if (since !== null) { + query += ' AND timestamp >= ?'; + params.push(since); + } + + query += ' ORDER BY timestamp ASC'; + + const stmt = db.prepare(query); + return stmt.all(...params); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..8c117fda7 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,426 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + +-- TOMORROW: +-- +-- * Add the job executor to server startup +-- * Move the various job details to the API.V1 module since it'll be returned by the UI +-- * Update the router to just create a job when received, and on lookup to return relevant details from the db +-- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the +-- same authentication requirements as for GitHub today. +-- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to +-- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to +-- do the whole toposort thing. +-- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn +-- +-- LATER +-- * Update tests that refer to the DB effect +-- * Adjust the integration test(s) to verify we're getting enforced concurrency control +-- * Update the GitHub issue module so it only submits a request to the registry and returns +-- a job id, rather than actually running the fns directly. Poll for a result still and +-- comment when the job completes. +-- +-- FOLLOWUP +-- * Punt on the squash commit until later. module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob - , SQLite + ( SQLite + , ConnectOptions , connect - , createJob - , deleteIncompleteJobs + , JobInfo + , selectJobInfo + , InsertPackageJob + , insertPackageJob + , InsertMatrixJob + , insertMatrixJob + , InsertPackageSetJob + , insertPackageSetJob + , FinishJob , finishJob - , insertLog - , runningJobForPackage - , selectJob + , StartJob + , startJob + , deleteIncompleteJobs + , insertLogLine , selectLogsByJob + , PackageJobDetails + , selectNextPackageJob + , MatrixJobDetails + , selectNextMatrixJob + , PackageSetJobDetails + , selectNextPackageSetJob ) where import Registry.App.Prelude -import Data.Array as Array +import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit - -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + isSuccess <- case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: isSuccess + } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +selectJobInfo db (JobId jobId) = do + maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId + pure $ traverse jobInfoFromJSRep maybeJobInfo + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: if success then 1 else 0 + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSStartJob = + { jobId :: String + , startedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit + +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl -type NewJob = +type InsertPackageJob = { jobId :: JobId , jobType :: JobType - , createdAt :: DateTime , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation } -type JSNewJob = +type JSInsertPackageJob = { jobId :: String , jobType :: String - , createdAt :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId +insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = + { jobId: un JobId jobId , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt , packageName: PackageName.print packageName - , ref + , packageVersion: Version.print packageVersion + , payload: stringifyJson Operation.packageOperationCodec payload + } + +foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep + +type InsertMatrixJob = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } -type JobResult = +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep + +type InsertPackageSetJob = { jobId :: JobId - , finishedAt :: DateTime - , success :: Boolean + , payload :: PackageSetOperation } -type JSJobResult = +type JSInsertPackageSetJob = { jobId :: String - , finishedAt :: String - , success :: Int + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload } -type Job = +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +type PackageJobDetails = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation , createdAt :: DateTime - , finishedAt :: Maybe DateTime - , success :: Boolean + , startedAt :: Maybe DateTime } -type JSJob = +type JSPackageJobDetails = { jobId :: String , jobType :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String , createdAt :: String - , finishedAt :: Nullable String - , success :: Int + , startedAt :: Nullable String } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do + ty <- API.V1.parseJobType jobType + name <- PackageName.parse packageName + version <- Version.parse packageVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , jobType: ty + , packageName: name + , packageVersion: version + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) + +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails + +type MatrixJobDetails = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSMatrixJobDetails = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +type PackageSetJobDetails = + { jobId :: JobId + , payload :: PackageSetOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSPackageSetJobDetails = + { jobId :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + pure + { jobId: JobId jobId + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp + } -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + (Nullable.toNullable timestamp) + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index b9aa35b1c..c9a8aac8a 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -3,13 +3,22 @@ module Registry.App.Server where import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) +import Control.Parallel as Parallel import Data.Codec.JSON as CJ +import Data.DateTime (DateTime(..)) +import Data.DateTime as DateTime import Data.Formatter.DateTime as Formatter.DateTime +import Data.Lens (Lens') +import Data.Lens as Lens +import Data.Lens.Record as Lens.Record import Data.Newtype (unwrap) import Data.String as String +import Data.Time.Duration (Minutes(..)) import Data.UUID.Random as UUID +import Effect.Aff (Fiber, Milliseconds(..)) import Effect.Aff as Aff import Effect.Class.Console as Console +import Effect.Ref as Ref import Fetch.Retry as Fetch.Retry import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) import HTTPurple as HTTPurple @@ -43,7 +52,7 @@ import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) import Registry.App.SQLite as SQLite import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (GitHubToken, Octokit) @@ -56,40 +65,121 @@ import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Run.Except as Run.Except newJobId :: forall m. MonadEffect m => m JobId newJobId = liftEffect do id <- UUID.make pure $ JobId $ UUID.toString id +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = map Just (runEffects env (executeJob now job)) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env $ case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED + +squashCommitRegistry :: Run ServerEffects Unit +squashCommitRegistry = do + pure unit + router :: ServerEnv -> Request Route -> Run ServerEffects Response router env { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish + -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do + -- Log.info $ "Received Publish request, job id: " <> unwrap jobId + -- API.publish Nothing publish + HTTPurple.emptyResponse Status.ok Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Unpublish { name, version } -> do + -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do + -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected unpublish operation." + HTTPurple.emptyResponse Status.ok Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." + HTTPurple.emptyResponse Status.ok + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Transfer { name } -> do + -- forkPipelineJob name "" TransferJob \jobId -> do + -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do jsonOk (CJ.array V1.jobCodec) [] @@ -97,12 +187,17 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) Status, Get -> HTTPurple.emptyResponse Status.ok @@ -112,35 +207,34 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } + -- where + -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response + -- forkPipelineJob packageName ref jobType action = do + -- -- First thing we check if the package already has a pipeline in progress + -- lift (Db.runningJobForPackage packageName) >>= case _ of + -- -- If yes, we error out if it's the wrong kind, return it if it's the same type + -- Right { jobId, jobType: runningJobType } -> do + -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId + -- case runningJobType == jobType of + -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } + -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName + -- -- otherwise spin up a new thread + -- Left _err -> do + -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" + -- jobId <- newJobId + -- now <- nowUTC + -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } + -- lift $ Db.createJob newJob + -- let newEnv = env { jobId = Just jobId } + + -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do + -- result <- runEffects newEnv (action jobId) + -- case result of + -- Left _ -> pure unit + -- Right _ -> do + -- finishedAt <- nowUTC + -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) + -- jsonOk V1.jobCreatedResponseCodec { jobId } type ServerEnvVars = { token :: GitHubToken @@ -219,7 +313,11 @@ createServerEnv = do type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response runServer env router' request = do result <- runEffects env (router' env request) case result of diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..2b01deb0b --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,56 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Package-oriented jobs (publish/unpublish/transfer) +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs (one compiler, all packages) +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE package_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..2ad866068 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,45 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..67216ca35 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -68,7 +68,6 @@ type Job = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -80,7 +79,6 @@ jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec , jobType: jobTypeCodec , packageName: PackageName.codec - , ref: CJ.string , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..521bc2883 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -23,6 +23,8 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +60,18 @@ data PackageOperation derive instance Eq PackageOperation +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -178,6 +192,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: From 4b9743ce99f2f28ce94c66252cfdf194c4c46b6b Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 21 Jun 2025 23:13:52 -0400 Subject: [PATCH 02/36] Split Server module into Env, Router, JobExecutor, and Main --- app/spago.yaml | 2 +- app/src/App/JobExecutor.purs | 89 +++++++ app/src/App/Main.purs | 85 +++++++ app/src/App/SQLite.js | 4 +- app/src/App/SQLite.purs | 29 +-- app/src/App/Server.purs | 441 --------------------------------- app/src/App/Server/Env.purs | 188 ++++++++++++++ app/src/App/Server/Router.purs | 84 +++++++ lib/src/API/V1.purs | 25 +- lib/src/JobType.purs | 26 ++ lib/src/Operation.purs | 17 +- 11 files changed, 502 insertions(+), 488 deletions(-) create mode 100644 app/src/App/JobExecutor.purs create mode 100644 app/src/App/Main.purs delete mode 100644 app/src/App/Server.purs create mode 100644 app/src/App/Server/Env.purs create mode 100644 app/src/App/Server/Router.purs create mode 100644 lib/src/JobType.purs diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs new file mode 100644 index 000000000..0bd6fa44f --- /dev/null +++ b/app/src/App/JobExecutor.purs @@ -0,0 +1,89 @@ +module Registry.App.JobExecutor where + +import Registry.App.Prelude hiding ((/)) + +import Control.Parallel as Parallel +import Data.DateTime (DateTime) +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Registry.API.V1 (JobId(..)) +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) +import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Run (Run) +import Run.Except (EXCEPT) + +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = Just <$> runEffects env (executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..c734d90f7 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,85 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.String as String +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import HTTPurple (Request, Response) +import HTTPurple as HTTPurple +import Node.Process as Process +import Registry.API.V1 (Route) +import Registry.API.V1 as V1 +import Registry.App.Effect.Env as Env +import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + -- Start healthcheck ping loop if URL is configured + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> do + _healthcheck <- Aff.launchAff do + let + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> + Console.error "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + + loop limit + pure unit + + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + + _close <- HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer env + } + pure unit + where + runServer :: ServerEnv -> Request Route -> Aff Response + runServer env request = do + result <- runEffects env (Router.router env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index fa9a8b539..97521d202 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -67,12 +67,12 @@ const _insertJob = (db, table, columns, job) => { }; export const insertPackageJobImpl = (db, job) => { - const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + const columns = [ 'jobId', 'jobType', 'payload' ] return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { - const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + const columns = [ 'jobId', 'compilerVersion', 'payload' ] return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 8c117fda7..b56575757 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -59,10 +59,11 @@ import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Registry.API.V1 (JobId(..), LogLevel, LogLine) import Registry.API.V1 as API.V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.Operation (PackageOperation, PackageSetOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -175,26 +176,22 @@ deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl type InsertPackageJob = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , packageVersion :: Version , payload :: PackageOperation } type JSInsertPackageJob = { jobId :: String , jobType :: String - , packageName :: String - , packageVersion :: String , payload :: String } insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = +insertPackageJobToJSRep { jobId, payload } = { jobId: un JobId jobId - , jobType: API.V1.printJobType jobType - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion + , jobType: JobType.print case payload of + Operation.Publish _ -> JobType.PublishJob + Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob + Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob , payload: stringifyJson Operation.packageOperationCodec payload } @@ -206,25 +203,19 @@ insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertP type InsertMatrixJob = { jobId :: JobId - , packageName :: PackageName - , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version } type JSInsertMatrixJob = { jobId :: String - , packageName :: String - , packageVersion :: String , compilerVersion :: String , payload :: String } insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = +insertMatrixJobToJSRep { jobId, compilerVersion, payload } = { jobId: un JobId jobId - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } @@ -257,7 +248,7 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -277,7 +268,7 @@ type JSPackageJobDetails = packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- API.V1.parseJobType jobType + ty <- JobType.parse jobType name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index c9a8aac8a..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,441 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Control.Parallel as Parallel -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime(..)) -import Data.DateTime as DateTime -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Lens (Lens') -import Data.Lens as Lens -import Data.Lens.Record as Lens.Record -import Data.Newtype (unwrap) -import Data.String as String -import Data.Time.Duration (Minutes(..)) -import Data.UUID.Random as UUID -import Effect.Aff (Fiber, Milliseconds(..)) -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Effect.Ref as Ref -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except -import Run.Except as Run.Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -data JobDetails - = PackageJob PackageJobDetails - | MatrixJob MatrixJobDetails - | PackageSetJob PackageSetJobDetails - -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do - Db.selectNextPackageJob >>= case _ of - Just job -> pure $ Just $ PackageJob job - Nothing -> Db.selectNextMatrixJob >>= case _ of - Just job -> pure $ Just $ MatrixJob job - Nothing -> Db.selectNextPackageSetJob >>= case _ of - Just job -> pure $ Just $ PackageSetJob job - Nothing -> pure Nothing - -runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop - where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err - - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop - - Right (Just job) -> do - now <- nowUTC - - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId - - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = map Just (runEffects env (executeJob now job)) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - - finishResult <- runEffects env $ case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } - - case finishResult of - Left err -> pure $ Left err - Right _ -> loop - -executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } - pure unit -- UNIMPLEMENTED - MatrixJob _details -> - pure unit -- UNIMPLEMENTED - PackageSetJob _details -> - pure unit -- UNIMPLEMENTED - -squashCommitRegistry :: Run ServerEffects Unit -squashCommitRegistry = do - pure unit - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - -- Log.info $ "Received Publish request, job id: " <> unwrap jobId - -- API.publish Nothing publish - HTTPurple.emptyResponse Status.ok - - Unpublish, Post -> do - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Unpublish { name, version } -> do - -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected unpublish operation." - HTTPurple.emptyResponse Status.ok - - Transfer, Post -> do - HTTPurple.emptyResponse Status.ok - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Transfer { name } -> do - -- forkPipelineJob name "" TransferJob \jobId -> do - -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right Nothing -> - HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - -- where - -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - -- forkPipelineJob packageName ref jobType action = do - -- -- First thing we check if the package already has a pipeline in progress - -- lift (Db.runningJobForPackage packageName) >>= case _ of - -- -- If yes, we error out if it's the wrong kind, return it if it's the same type - -- Right { jobId, jobType: runningJobType } -> do - -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - -- case runningJobType == jobType of - -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } - -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- -- otherwise spin up a new thread - -- Left _err -> do - -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - -- jobId <- newJobId - -- now <- nowUTC - -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } - -- lift $ Db.createJob newJob - -- let newEnv = env { jobId = Just jobId } - - -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - -- result <- runEffects newEnv (action jobId) - -- case result of - -- Left _ -> pure unit - -- Right _ -> do - -- finishedAt <- nowUTC - -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - -- jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer - :: ServerEnv - -> (ServerEnv -> Request Route -> Run ServerEffects Response) - -> Request Route - -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..1f6fdc489 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,188 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment (COMMENT) +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.deleteIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Comment.interpret Comment.handleLog + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..27af29a24 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,84 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Cont (ContT) +import Data.Codec.JSON as CJ +import Data.UUID.Random as UUID +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.Operation (PackageOperation) +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Run (Run) +import Run.Except as Run.Except + +router :: ServerEnv -> Request Route -> Run ServerEffects Response +router env { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + forkPackageJob $ Operation.Publish publish + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs, Get -> do + jsonOk (CJ.array V1.jobCodec) [] + + Job jobId { level: maybeLogLevel, since }, Get -> do + let logLevel = fromMaybe Error maybeLogLevel + logs <- lift $ Db.selectLogsByJob jobId logLevel since + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound + where + forkPackageJob :: PackageOperation -> ContT Response (Run _) Response + forkPackageJob operation = do + lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) + jobId <- newJobId + lift $ Db.insertPackageJob { jobId, payload: operation } + jsonOk V1.jobCreatedResponseCodec { jobId } + + newJobId :: forall m. MonadEffect m => m JobId + newJobId = liftEffect do + id <- UUID.make + pure $ JobId $ UUID.toString id diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 67216ca35..31c15866c 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,6 +15,7 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') @@ -66,7 +67,7 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime @@ -77,7 +78,7 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: JobType.codec , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime @@ -92,26 +93,6 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -printJobType :: JobType -> String -printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) - type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs new file mode 100644 index 000000000..b8dceaf38 --- /dev/null +++ b/lib/src/JobType.purs @@ -0,0 +1,26 @@ +module Registry.JobType where + +import Prelude +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Sum as CJ.Sum +import Data.Either (Either(..), hush) + +data JobType = PublishJob | UnpublishJob | TransferJob + +derive instance Eq JobType + +parse :: String -> Either String JobType +parse = case _ of + "publish" -> Right PublishJob + "unpublish" -> Right UnpublishJob + "transfer" -> Right TransferJob + j -> Left $ "Invalid job type " <> show j + +print :: JobType -> String +print = case _ of + PublishJob -> "publish" + UnpublishJob -> "unpublish" + TransferJob -> "transfer" + +codec :: CJ.Codec JobType +codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 521bc2883..518c1a6de 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,13 +23,15 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName , packageOperationCodec , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec , unpublishCodec - ) where + ) + where import Prelude @@ -60,6 +62,13 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + -- | A codec for encoding and decoding a `PackageOperation` as JSON. packageOperationCodec :: CJ.Codec PackageOperation packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode @@ -88,6 +97,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -98,6 +108,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } From 2fe96357cd553051ae70088891279dc5c6b22b67 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Wed, 25 Jun 2025 20:04:48 -0400 Subject: [PATCH 03/36] Fix up build --- app/src/App/JobExecutor.purs | 76 ++-- app/test/App/API.purs | 3 + app/test/App/GitHubIssue.purs | 3 + package-lock.json | 642 +++++++++++++++++++++++++++++++- package.json | 5 +- scripts/src/LegacyImporter.purs | 1 + scripts/src/PackageDeleter.purs | 1 + 7 files changed, 690 insertions(+), 41 deletions(-) diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs index 0bd6fa44f..e5d29bd95 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/JobExecutor.purs @@ -11,7 +11,7 @@ import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) -import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Run (Run) import Run.Except (EXCEPT) @@ -31,57 +31,55 @@ findNextAvailableJob = do Nothing -> pure Nothing runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop +runJobExecutor env = runEffects env do + Db.deleteIncompleteJobs + loop where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err + loop = do + mJob <- findNextAvailableJob + case mJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 100.0) + loop - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop + Just job -> do + now <- nowUTC - Right (Just job) -> do - now <- nowUTC + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId + Db.startJob { jobId, startedAt: now } - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = Just <$> runEffects env (executeJob now job) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- liftAff do + let execute = Just <$> (runEffects env $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - finishResult <- runEffects env case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + pure false - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + pure false - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + pure true - case finishResult of - Left err -> pure $ Left err - Right _ -> loop + Db.finishJob { jobId, finishedAt: now, success } + loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob now = case _ of PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } pure unit -- UNIMPLEMENTED MatrixJob _details -> pure unit -- UNIMPLEMENTED diff --git a/app/test/App/API.purs b/app/test/App/API.purs index caaf6c215..36a2e61a2 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,6 +96,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version , resolutions: Nothing } @@ -158,6 +159,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> @@ -172,6 +174,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..8276bf708 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing diff --git a/package-lock.json b/package-lock.json index 93959c062..f4b4a86cc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,10 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } }, "app": { "name": "registry-app", @@ -1598,6 +1601,12 @@ "node": ">=14.0.0" } }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1648,6 +1657,15 @@ "prebuild-install": "^7.1.1" } }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1676,6 +1694,27 @@ "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "license": "MIT", + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -1719,6 +1758,21 @@ "node": ">=10.0.0" } }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "license": "MIT", + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -1741,6 +1795,20 @@ "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -1763,6 +1831,52 @@ "node": ">=4.0.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "license": "MIT", + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "license": "MIT", + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -1784,6 +1898,50 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "license": "BSD-2-Clause", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -1900,6 +2058,18 @@ "node": ">=10" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -1921,6 +2091,15 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -1950,6 +2129,21 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -1969,6 +2163,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -1985,6 +2197,51 @@ "node": ">=0.10.0" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -2004,6 +2261,15 @@ "jsonrepair": "bin/cli.js" } }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2015,6 +2281,34 @@ "node": ">=10" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "license": "MIT" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2035,6 +2329,18 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2142,6 +2448,33 @@ } } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2150,6 +2483,56 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "license": "MIT", + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2195,6 +2578,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2262,6 +2654,124 @@ "node": ">=0.10.0" } }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "license": "MIT", + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/run-applescript/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/run-applescript/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/run-applescript/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/run-applescript/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -2322,6 +2832,33 @@ "node": ">=10" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -2365,6 +2902,34 @@ "simple-concat": "^1.0.0" } }, + "node_modules/spago": { + "version": "0.93.19", + "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", + "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", + "license": "BSD-3-Clause", + "dependencies": { + "better-sqlite3": "^8.6.0", + "env-paths": "^3.0.0", + "fast-glob": "^3.2.11", + "fs-extra": "^10.0.0", + "fuse.js": "^6.5.3", + "glob": "^7.1.6", + "markdown-it": "^12.0.4", + "open": "^9.1.0", + "punycode": "^2.3.0", + "semver": "^7.3.5", + "spdx-expression-parse": "^3.0.1", + "ssh2": "^1.14.0", + "supports-color": "^9.2.3", + "tar": "^6.1.11", + "tmp": "^0.2.1", + "xhr2": "^0.2.1", + "yaml": "^2.1.1" + }, + "bin": { + "spago": "bin/bundle.js" + } + }, "node_modules/spdx-exceptions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", @@ -2409,6 +2974,18 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -2422,6 +2999,18 @@ "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" }, + "node_modules/supports-color": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", + "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -2469,6 +3058,18 @@ "node": ">=6" } }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmp": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", @@ -2515,6 +3116,12 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", @@ -2528,6 +3135,15 @@ "node": ">= 10.0.0" } }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -2555,11 +3171,35 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/xhr2": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", + "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 76bc4e96e..5066e42c0 100644 --- a/package.json +++ b/package.json @@ -6,5 +6,8 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..0fdc94a06 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -471,6 +471,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..db9b54d23 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -243,6 +243,7 @@ deleteVersion arguments name version = do { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } From a4f1047e56c4d11666ac4aa622620ae16aa5d9f2 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 5 Jul 2025 23:27:56 -0400 Subject: [PATCH 04/36] Run job executor --- app/src/App/Main.purs | 133 ++++++++++++---------- app/src/App/Prelude.purs | 2 +- app/src/App/SQLite.purs | 6 +- app/src/App/{ => Server}/JobExecutor.purs | 22 ++-- app/src/App/Server/Router.purs | 46 ++++++-- lib/src/API/V1.purs | 7 -- lib/src/JobType.purs | 1 + lib/src/Operation.purs | 3 +- 8 files changed, 129 insertions(+), 91 deletions(-) rename app/src/App/{ => Server}/JobExecutor.purs (82%) diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index c734d90f7..df94b6e17 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -2,84 +2,103 @@ module Registry.App.Main where import Registry.App.Prelude hiding ((/)) -import Data.String as String +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Debug (traceM) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry -import HTTPurple (Request, Response) -import HTTPurple as HTTPurple import Node.Process as Process -import Registry.API.V1 (Route) -import Registry.API.V1 as V1 -import Registry.App.Effect.Env as Env -import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor import Registry.App.Server.Router as Router main :: Effect Unit -main = +main = do + traceM 1 createServerEnv # Aff.runAff_ case _ of Left error -> do + traceM 2 Console.log $ "Failed to start server: " <> Aff.message error Process.exit' 1 Right env -> do - -- Start healthcheck ping loop if URL is configured case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n + loop n = do + traceM 4 + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + traceM 5 + Aff.delay fiveMinutes + loop n - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) + Cancelled | n >= 0 -> do + traceM 6 + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) + Failed error | n >= 0 -> do + traceM 7 + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) + Succeeded { status } | status /= 200, n >= 0 -> do + traceM 8 + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." + Cancelled -> do + traceM 9 + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + Failed error -> do + traceM 10 + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + Succeeded _ -> do + traceM 11 + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." - loop limit - pure unit + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + traceM 12 + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional Env.serverPort + loop restartDelay = do + traceM 13 + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env - } - pure unit - where - runServer :: ServerEnv -> Request Route -> Aff Response - runServer env request = do - result <- runEffects env (Router.router env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response + traceM 14 + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b56575757..208befb9a 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -248,7 +248,6 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -258,7 +257,6 @@ type PackageJobDetails = type JSPackageJobDetails = { jobId :: String - , jobType :: String , packageName :: String , packageVersion :: String , payload :: String @@ -267,8 +265,7 @@ type JSPackageJobDetails = } packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- JobType.parse jobType +packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt @@ -276,7 +273,6 @@ packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payloa parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload pure { jobId: JobId jobId - , jobType: ty , packageName: name , packageVersion: version , payload: parsed diff --git a/app/src/App/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs similarity index 82% rename from app/src/App/JobExecutor.purs rename to app/src/App/Server/JobExecutor.purs index e5d29bd95..125a9a7a3 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,4 +1,4 @@ -module Registry.App.JobExecutor where +module Registry.App.Server.JobExecutor where import Registry.App.Prelude hiding ((/)) @@ -6,12 +6,13 @@ import Control.Parallel as Parallel import Data.DateTime (DateTime) import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff -import Registry.API.V1 (JobId(..)) +import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.Operation as Operation import Run (Run) import Run.Except (EXCEPT) @@ -21,7 +22,7 @@ data JobDetails | PackageSetJob PackageSetJobDetails findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do +findNextAvailableJob = Db.selectNextPackageJob >>= case _ of Just job -> pure $ Just $ PackageJob job Nothing -> Db.selectNextMatrixJob >>= case _ of @@ -63,24 +64,27 @@ runJobExecutor env = runEffects env do success <- case jobResult of Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." + Log.error $ "Job " <> unwrap jobId <> " timed out." pure false Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err pure false Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." + Log.info $ "Job " <> unwrap jobId <> " succeeded." pure true Db.finishJob { jobId, finishedAt: now, success } loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - pure unit -- UNIMPLEMENTED +executeJob _ = case _ of + PackageJob { payload: Operation.Publish p } -> + API.publish Nothing p + PackageJob { payload: Operation.Authenticated auth } -> + API.authenticated auth + MatrixJob _details -> pure unit -- UNIMPLEMENTED PackageSetJob _details -> diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 27af29a24..840dab5a0 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,23 +4,46 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ +import Data.String as String import Data.UUID.Random as UUID +import Effect.Aff as Aff +import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log -import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName import Run (Run) import Run.Except as Run.Except -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish @@ -45,22 +68,25 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] + jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] } ] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound Right Nothing -> HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + Right (Just job) -> + jsonOk V1.jobCodec + { jobId + , createdAt: job.createdAt + , finishedAt: job.finishedAt + , success: job.success + , logs + } Status, Get -> HTTPurple.emptyResponse Status.ok diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 31c15866c..4bae692f5 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,9 +15,6 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.JobType as JobType -import Registry.PackageName (PackageName) -import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG @@ -67,8 +64,6 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType.JobType - , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -78,8 +73,6 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: JobType.codec - , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs index b8dceaf38..dbc4eaf01 100644 --- a/lib/src/JobType.purs +++ b/lib/src/JobType.purs @@ -1,6 +1,7 @@ module Registry.JobType where import Prelude + import Data.Codec.JSON as CJ import Data.Codec.JSON.Sum as CJ.Sum import Data.Either (Either(..), hush) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 518c1a6de..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -30,8 +30,7 @@ module Registry.Operation , publishCodec , transferCodec , unpublishCodec - ) - where + ) where import Prelude From dfd7e78bc04e9d1ad9a9213a71c61dc922309fc8 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Tue, 9 Dec 2025 12:27:40 +0200 Subject: [PATCH 05/36] Fix integration tests --- app-e2e/src/Test/E2E/Publish.purs | 7 +- app/src/App/API.purs | 2 +- app/src/App/Main.purs | 14 -- app/src/App/SQLite.js | 11 +- app/src/App/SQLite.purs | 197 ++++++++++-------- app/src/App/Server/JobExecutor.purs | 27 ++- app/src/App/Server/Router.purs | 18 +- ...20240914171030_create_job_queue_tables.sql | 1 - db/schema.sql | 1 - lib/src/Metadata.purs | 5 - nix/overlay.nix | 4 +- package-lock.json | 76 +++++-- scripts/src/Solver.purs | 1 - test-utils/src/Registry/Test/E2E/Client.purs | 8 +- 14 files changed, 210 insertions(+), 162 deletions(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index f7bd1d63e..051d1931b 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -56,6 +56,7 @@ spec = do , ref: "v4.0.0" , compiler: Utils.unsafeVersion "0.15.9" , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" } -- Submit publish request @@ -79,6 +80,6 @@ spec = do Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" +-- Assert.shouldEqual job.jobType JobType.PublishJob +-- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") +-- Assert.shouldEqual job.ref "v4.0.0" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 89322d52b..8972a8230 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -31,7 +31,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index df94b6e17..e638cc684 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -4,7 +4,6 @@ import Registry.App.Prelude hiding ((/)) import Data.DateTime (diff) import Data.Time.Duration (Milliseconds(..), Seconds(..)) -import Debug (traceM) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry @@ -15,10 +14,8 @@ import Registry.App.Server.Router as Router main :: Effect Unit main = do - traceM 1 createServerEnv # Aff.runAff_ case _ of Left error -> do - traceM 2 Console.log $ "Failed to start server: " <> Aff.message error Process.exit' 1 Right env -> do @@ -36,58 +33,47 @@ main = do fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) loop n = do - traceM 4 Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of Succeeded { status } | status == 200 -> do - traceM 5 Aff.delay fiveMinutes loop n Cancelled | n >= 0 -> do - traceM 6 Console.warn $ "Healthchecks cancelled, will retry..." Aff.delay oneMinute loop (n - 1) Failed error | n >= 0 -> do - traceM 7 Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error Aff.delay oneMinute loop (n - 1) Succeeded { status } | status /= 200, n >= 0 -> do - traceM 8 Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status Aff.delay oneMinute loop (n - 1) Cancelled -> do - traceM 9 Console.error "Healthchecks cancelled and failure limit reached, will not retry." Failed error -> do - traceM 10 Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error Succeeded _ -> do - traceM 11 Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." jobExecutor :: ServerEnv -> Aff Unit jobExecutor env = do - traceM 12 loop initialRestartDelay where initialRestartDelay = Milliseconds 100.0 loop restartDelay = do - traceM 13 start <- nowUTC result <- JobExecutor.runJobExecutor env end <- nowUTC - traceM 14 Console.error case result of Left error -> "Job executor failed: " <> Aff.message error Right _ -> "Job executor exited for no reason." diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 97521d202..1e8042cca 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -44,7 +44,7 @@ const _insertJob = (db, table, columns, job) => { const insertInfo = db.prepare(` INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) - VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) `); const insertJob = db.prepare(` @@ -67,17 +67,17 @@ const _insertJob = (db, table, columns, job) => { }; export const insertPackageJobImpl = (db, job) => { - const columns = [ 'jobId', 'jobType', 'payload' ] + const columns = ['jobId', 'jobType', 'packageName', 'payload'] return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { - const columns = [ 'jobId', 'compilerVersion', 'payload' ] + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; export const insertPackageSetJobImpl = (db, job) => { - const columns = [ 'jobId', 'payload' ] + const columns = ['jobId', 'payload'] return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; @@ -87,6 +87,7 @@ export const selectNextPackageJobImpl = (db) => { FROM ${PACKAGE_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); @@ -99,6 +100,7 @@ export const selectNextMatrixJobImpl = (db) => { FROM ${MATRIX_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); @@ -111,6 +113,7 @@ export const selectNextPackageSetJobImpl = (db) => { FROM ${PACKAGE_SET_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 208befb9a..dd4268451 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -83,6 +83,9 @@ type ConnectOptions = connect :: ConnectOptions -> Effect SQLite connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +-------------------------------------------------------------------------------- +-- job_info table + -- | Metadata about a particular package, package set, or matrix job. type JobInfo = { jobId :: JobId @@ -124,6 +127,30 @@ selectJobInfo db (JobId jobId) = do maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId pure $ traverse jobInfoFromJSRep maybeJobInfo +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } + +type JSStartJob = + { jobId :: String + , startedAt :: String + } + +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt + } + +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit + +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + type FinishJob = { jobId :: JobId , success :: Boolean @@ -145,34 +172,52 @@ finishJobToJSRep { jobId, success, finishedAt } = foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -finishJob :: SQLite -> FinishJob -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit -type StartJob = +-- TODO: we shouldn't delete them I think? just remove the startedAt so they +-- can be retried +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl + +-------------------------------------------------------------------------------- +-- package_jobs table + +type PackageJobDetails = { jobId :: JobId - , startedAt :: DateTime + , packageName :: PackageName + , payload :: PackageOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime } -type JSStartJob = +type JSPackageJobDetails = { jobId :: String - , startedAt :: String - } - -startJobToJSRep :: StartJob -> JSStartJob -startJobToJSRep { jobId, startedAt } = - { jobId: un JobId jobId - , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt + , packageName :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String } -foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit - -startJob :: SQLite -> StartJob -> Effect Unit -startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , packageName: name + , payload: parsed + , createdAt: created + , startedAt: started + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails type InsertPackageJob = { jobId :: JobId @@ -182,24 +227,35 @@ type InsertPackageJob = type JSInsertPackageJob = { jobId :: String , jobType :: String + , packageName :: String , payload :: String + , createdAt :: String } -insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep { jobId, payload } = +insertPackageJobToJSRep :: DateTime -> InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep now { jobId, payload } = { jobId: un JobId jobId - , jobType: JobType.print case payload of - Operation.Publish _ -> JobType.PublishJob - Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob - Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob + , jobType: JobType.print jobType + , packageName: PackageName.print name , payload: stringifyJson Operation.packageOperationCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now } + where + { jobType, name } = case payload of + Operation.Publish { name } -> { jobType: JobType.PublishJob, name } + Operation.Authenticated { payload: Operation.Unpublish { name } } -> { jobType: JobType.UnpublishJob, name } + Operation.Authenticated { payload: Operation.Transfer { name } } -> { jobType: JobType.TransferJob, name } foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit -insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep +insertPackageJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep now job + +-------------------------------------------------------------------------------- +-- matrix_jobs table type InsertMatrixJob = { jobId :: JobId @@ -225,68 +281,6 @@ foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep -type InsertPackageSetJob = - { jobId :: JobId - , payload :: PackageSetOperation - } - -type JSInsertPackageSetJob = - { jobId :: String - , payload :: String - } - -insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep { jobId, payload } = - { jobId: un JobId jobId - , payload: stringifyJson Operation.packageSetOperationCodec payload - } - -foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit - -insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit -insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep - -type PackageJobDetails = - { jobId :: JobId - , packageName :: PackageName - , packageVersion :: Version - , payload :: PackageOperation - , createdAt :: DateTime - , startedAt :: Maybe DateTime - } - -type JSPackageJobDetails = - { jobId :: String - , packageName :: String - , packageVersion :: String - , payload :: String - , createdAt :: String - , startedAt :: Nullable String - } - -packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do - name <- PackageName.parse packageName - version <- Version.parse packageVersion - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload - pure - { jobId: JobId jobId - , packageName: name - , packageVersion: version - , payload: parsed - , createdAt: created - , startedAt: started - } - -foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) - -selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) -selectNextPackageJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db - pure $ traverse packageJobDetailsFromJSRep maybeJobDetails - type MatrixJobDetails = { jobId :: JobId , packageName :: PackageName @@ -332,6 +326,9 @@ selectNextMatrixJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails +-------------------------------------------------------------------------------- +-- package_set_jobs table + type PackageSetJobDetails = { jobId :: JobId , payload :: PackageSetOperation @@ -365,6 +362,30 @@ selectNextPackageSetJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails +type InsertPackageSetJob = + { jobId :: JobId + , payload :: PackageSetOperation + } + +type JSInsertPackageSetJob = + { jobId :: String + , payload :: String + } + +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +-------------------------------------------------------------------------------- +-- logs table + type JSLogLine = { level :: Int , message :: String diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 125a9a7a3..fa2f70f24 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,7 +1,8 @@ -module Registry.App.Server.JobExecutor where +module Registry.App.Server.JobExecutor (runJobExecutor) where import Registry.App.Prelude hiding ((/)) +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.DateTime (DateTime) import Effect.Aff (Milliseconds(..)) @@ -21,31 +22,21 @@ data JobDetails | MatrixJob MatrixJobDetails | PackageSetJob PackageSetJobDetails -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = - Db.selectNextPackageJob >>= case _ of - Just job -> pure $ Just $ PackageJob job - Nothing -> Db.selectNextMatrixJob >>= case _ of - Just job -> pure $ Just $ MatrixJob job - Nothing -> Db.selectNextPackageSetJob >>= case _ of - Just job -> pure $ Just $ PackageSetJob job - Nothing -> pure Nothing - runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" Db.deleteIncompleteJobs loop where loop = do - mJob <- findNextAvailableJob - case mJob of + maybeJob <- findNextAvailableJob + case maybeJob of Nothing -> do - liftAff $ Aff.delay (Milliseconds 100.0) + liftAff $ Aff.delay (Milliseconds 1000.0) loop Just job -> do now <- nowUTC - let jobId = case job of PackageJob details -> details.jobId @@ -78,6 +69,12 @@ runJobExecutor env = runEffects env do Db.finishJob { jobId, finishedAt: now, success } loop +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = runMaybeT + $ (PackageJob <$> MaybeT Db.selectNextPackageJob) + <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) + executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of PackageJob { payload: Operation.Publish p } -> diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 840dab5a0..5ebfd4823 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,10 +4,8 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ -import Data.String as String import Data.UUID.Random as UUID import Effect.Aff as Aff -import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status @@ -47,14 +45,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPackageJob $ Operation.Publish publish + insertPackageJob $ Operation.Publish publish Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body case auth.payload of Operation.Unpublish payload -> do lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload - forkPackageJob $ Operation.Authenticated auth + insertPackageJob $ Operation.Authenticated auth _ -> HTTPurple.badRequest "Expected unpublish operation." @@ -63,12 +61,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Transfer payload -> do lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload - forkPackageJob $ Operation.Authenticated auth + insertPackageJob $ Operation.Authenticated auth _ -> HTTPurple.badRequest "Expected transfer operation." + -- TODO return jobs Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] } ] + now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: now, finishedAt: Nothing, success: true, logs: [] } ] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel @@ -77,7 +77,7 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound - Right Nothing -> + Right Nothing -> do HTTPurple.notFound Right (Just job) -> jsonOk V1.jobCodec @@ -97,8 +97,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound where - forkPackageJob :: PackageOperation -> ContT Response (Run _) Response - forkPackageJob operation = do + insertPackageJob :: PackageOperation -> ContT Response (Run _) Response + insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) jobId <- newJobId lift $ Db.insertPackageJob { jobId, payload: operation } diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index 2b01deb0b..f4f1e68f3 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -14,7 +14,6 @@ CREATE TABLE package_jobs ( jobId TEXT PRIMARY KEY NOT NULL, jobType TEXT NOT NULL, packageName TEXT NOT NULL, - packageVersion TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); diff --git a/db/schema.sql b/db/schema.sql index 2ad866068..1baf6403f 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -10,7 +10,6 @@ CREATE TABLE package_jobs ( jobId TEXT PRIMARY KEY NOT NULL, jobType TEXT NOT NULL, packageName TEXT NOT NULL, - packageVersion TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index ddc39b48b..c54bed31e 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,20 +20,15 @@ module Registry.Metadata import Prelude -import Control.Alt ((<|>)) -import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) -import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) -import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor -import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location diff --git a/nix/overlay.nix b/nix/overlay.nix index 2ac1705fb..6c8d5b848 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -183,7 +183,7 @@ in ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-vm6k4DUDWUgPcPeym3YhA1hIg1LbHCDRBSH+7Zs52Uw="; + npmDepsHash = "sha256-Ju7R6Sa+NIHD8fkxLxicqToPLxLD4RM4wvl6bktE/7Y="; installPhase = '' mkdir -p $out @@ -236,7 +236,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/package-lock.json b/package-lock.json index f4b4a86cc..e22731749 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1615,6 +1615,12 @@ "safer-buffer": "~2.1.0" } }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1781,6 +1787,12 @@ "node": ">=10" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, "node_modules/cpu-features": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", @@ -2050,6 +2062,12 @@ "node": ">=8" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/fuse.js": { "version": "6.6.2", "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-6.6.2.tgz", @@ -2075,6 +2093,27 @@ "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -2119,6 +2158,17 @@ } ] }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2352,6 +2402,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -2654,20 +2716,6 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/run-applescript": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 8fa9a7070..aa2820e16 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index 960484609..ff34107df 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -74,14 +74,14 @@ configFromEnv = do -- | Errors that can occur during client operations data ClientError = HttpError { status :: Int, body :: String } - | ParseError String + | ParseError { msg :: String, raw :: String } | Timeout String | NetworkError String printClientError :: ClientError -> String printClientError = case _ of HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw Timeout msg -> "Timeout: " <> msg NetworkError msg -> "Network Error: " <> msg @@ -102,7 +102,7 @@ get codec config path = runExceptT do body <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse codec body of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: body } Right a -> pure a else throwError $ HttpError { status: response.status, body } @@ -119,7 +119,7 @@ post reqCodec resCodec config path reqBody = runExceptT do responseBody <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: responseBody } Right a -> pure a else throwError $ HttpError { status: response.status, body: responseBody } From cdbac72b0a2b89732c8b40aff2ffb1567c835d08 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 10:48:21 +0200 Subject: [PATCH 06/36] WIP matrix builds --- app/src/App/API.purs | 139 +++++++----------- app/src/App/Effect/Db.purs | 10 +- app/src/App/Main.purs | 6 + app/src/App/SQLite.js | 3 +- app/src/App/SQLite.purs | 30 +--- app/src/App/Server/Env.purs | 2 +- app/src/App/Server/JobExecutor.purs | 49 ++++++- app/src/App/Server/MatrixBuilder.purs | 202 ++++++++++++++++++++++++++ app/src/App/Server/Router.purs | 7 +- lib/src/ManifestIndex.purs | 17 ++- lib/src/Solver.purs | 5 +- scripts/src/LegacyImporter.purs | 5 +- 12 files changed, 334 insertions(+), 141 deletions(-) create mode 100644 app/src/App/Server/MatrixBuilder.purs diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8972a8230..1e69a129e 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan , packageSetUpdate + , packageSetUpdate2 , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -83,6 +82,8 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.SQLite (PackageSetJobDetails) +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob @@ -116,6 +117,11 @@ import Safe.Coerce as Safe.Coerce type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate2 {} = do + -- TODO: have github call into this + pure unit + -- | Process a package set update. Package set updates are only processed via -- | GitHub and not the HTTP API, so they require access to the GitHub env. packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit @@ -338,7 +344,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe (Map PackageName Range)) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -556,16 +562,17 @@ publish maybeLegacyIndex payload = do , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -573,7 +580,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -590,12 +597,13 @@ publish maybeLegacyIndex payload = do Right _ -> do FS.Extra.remove tmp Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions Comment.comment "Verifying unused and/or missing dependencies..." @@ -604,7 +612,7 @@ publish maybeLegacyIndex payload = do -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -699,7 +707,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -708,7 +716,7 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit @@ -770,28 +778,35 @@ publish maybeLegacyIndex payload = do , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Comment.comment "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just (un Manifest manifest).dependencies -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -876,32 +891,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -910,7 +923,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -921,49 +934,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -1034,7 +1004,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1181,13 +1151,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 142149bc0..e30f76f1a 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -36,7 +36,7 @@ data Db a | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | DeleteIncompleteJobs a + | ResetIncompleteJobs a derive instance Functor Db @@ -91,8 +91,8 @@ selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Package selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow -- | Delete all incomplete jobs from the database. -deleteIncompleteJobs :: forall r. Run (DB + r) Unit -deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -148,6 +148,6 @@ handleSQLite env = case _ of Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail pure $ reply success - DeleteIncompleteJobs next -> do - Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db pure next diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index e638cc684..90bef72d0 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -22,6 +22,12 @@ main = do case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + -- TODO: here before starting the executor we should check if we need to run + -- a whole-registry-compiler update. + -- To do that, we ask PursVersions what the compilers are, then we ask in the + -- metadata what the compilers for the latest prelude are, and if the latest + -- compiler is missing we enqueue a "compile everything", so that the executor + -- can pick it up first thing Aff.launchAff_ $ jobExecutor env Router.runRouter env where diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 1e8042cca..50bc82905 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -138,7 +138,8 @@ export const finishJobImpl = (db, args) => { return stmt.run(args); } -export const deleteIncompleteJobsImpl = (db) => { +// TODO this needs to be an update, no deletes +export const resetIncompleteJobsImpl = (db) => { const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); return stmt.run(); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index dd4268451..44a7f27d7 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -3,27 +3,6 @@ -- | the bindings here are still quite low-level and simply exist to provide a -- | nicer interface with PureScript types for higher-level modules to use. --- TOMORROW: --- --- * Add the job executor to server startup --- * Move the various job details to the API.V1 module since it'll be returned by the UI --- * Update the router to just create a job when received, and on lookup to return relevant details from the db --- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the --- same authentication requirements as for GitHub today. --- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to --- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to --- do the whole toposort thing. --- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn --- --- LATER --- * Update tests that refer to the DB effect --- * Adjust the integration test(s) to verify we're getting enforced concurrency control --- * Update the GitHub issue module so it only submits a request to the registry and returns --- a job id, rather than actually running the fns directly. Poll for a result still and --- comment when the job completes. --- --- FOLLOWUP --- * Punt on the squash commit until later. module Registry.App.SQLite ( SQLite , ConnectOptions @@ -40,7 +19,7 @@ module Registry.App.SQLite , finishJob , StartJob , startJob - , deleteIncompleteJobs + , resetIncompleteJobs , insertLogLine , selectLogsByJob , PackageJobDetails @@ -172,12 +151,12 @@ finishJobToJSRep { jobId, success, finishedAt } = foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -- TODO: we shouldn't delete them I think? just remove the startedAt so they -- can be retried -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -------------------------------------------------------------------------------- -- package_jobs table @@ -260,6 +239,7 @@ insertPackageJob db job = do type InsertMatrixJob = { jobId :: JobId , compilerVersion :: Version + -- TODO this is missing a buncha stuff , payload :: Map PackageName Version } diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 1f6fdc489..07baa935c 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -105,7 +105,7 @@ createServerEnv = do -- because they are stale runs from previous startups of the server. -- We can just remove the jobs, and all the logs belonging to them will be -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db + liftEffect $ SQLite.resetIncompleteJobs db pure { debouncer diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index fa2f70f24..5c9e1f883 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,18 +1,25 @@ -module Registry.App.Server.JobExecutor (runJobExecutor) where +module Registry.App.Server.JobExecutor + ( runJobExecutor + , newJobId + ) where import Registry.App.Prelude hiding ((/)) import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.DateTime (DateTime) +import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff +import Registry.API.V1 (JobId(..)) import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation import Run (Run) import Run.Except (EXCEPT) @@ -25,7 +32,7 @@ data JobDetails runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" - Db.deleteIncompleteJobs + Db.resetIncompleteJobs loop where loop = do @@ -69,20 +76,46 @@ runJobExecutor env = runEffects env do Db.finishJob { jobId, finishedAt: now, success } loop +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) findNextAvailableJob = runMaybeT $ (PackageJob <$> MaybeT Db.selectNextPackageJob) <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish p } -> - API.publish Nothing p + PackageJob { payload: Operation.Publish payload@{ compiler, name, version } } -> do + maybeDependencies <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeDependencies \dependencies -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (samePackageAllCompilers <> sameCompilerAllDependants) \matrixJob -> do + Log.info $ "Enqueuing matrix job" -- TODO print details + jobId <- newJobId + Db.insertMatrixJob { jobId, payload: matrixJob } PackageJob { payload: Operation.Authenticated auth } -> API.authenticated auth - - MatrixJob _details -> - pure unit -- UNIMPLEMENTED + MatrixJob details -> + -- TODO this job should return the success result, because if successful we need + -- to enqueue more matrix jobs: all its dependents for this same compiler version + MatrixBuilder.runMatrixJob details PackageSetJob _details -> - pure unit -- UNIMPLEMENTED + -- TODO: need to pass in the package_sets effect + -- API.packageSetUpdate2 details + pure unit diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..6a194ae3d --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,202 @@ +module Registry.App.Server.MatrixBuilder + ( installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.SQLite (MatrixJobDetails) +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) Unit +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + Except.throw $ "Cannot run Matrix Job for " <> PackageName.print packageName + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Map Version (Map PackageName Version)) +solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right res@(Tuple solvedCompiler _resolutions) -> case solvedCompiler == target of + true -> pure $ Just res + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Map.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Map Version (Map PackageName Version)) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right res@(Tuple solvedCompiler _resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just res + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Map.fromFoldable $ Array.catMaybes newJobs diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 5ebfd4823..bdbb1eff5 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -15,6 +15,7 @@ import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.App.Server.JobExecutor as JobExecutor import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -100,11 +101,7 @@ router { route, method, body } = HTTPurple.usingCont case route, method of insertPackageJob :: PackageOperation -> ContT Response (Run _) Response insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- newJobId + jobId <- JobExecutor.newJobId lift $ Db.insertPackageJob { jobId, payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } - newJobId :: forall m. MonadEffect m => m JobId - newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..8602d4982 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -21,9 +23,8 @@ module Registry.ManifestIndex , readEntryFile , removeFromEntryFile , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -45,7 +46,7 @@ import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..)) import Data.Maybe as Maybe -import Data.Newtype (un) +import Data.Newtype (un, unwrap) import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -66,6 +67,7 @@ import Node.Path as Path import Partial.Unsafe (unsafeCrashWith) import Registry.Manifest (Manifest(..)) import Registry.Manifest as Manifest +import Registry.Operation (packageName) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -199,6 +201,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 0fdc94a06..783ee353c 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -76,6 +76,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit @@ -300,7 +301,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -405,7 +406,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler From 253f85c704a93cb03c5ce57d19f6931e0ffa1272 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:16:21 +0100 Subject: [PATCH 07/36] add missing version to publish fixtures the publishCodec requires a version file but the test fixtures weren't updated to include it --- app/fixtures/addition_issue_created.json | 2 +- app/fixtures/update_issue_comment.json | 2 +- app/test/App/GitHubIssue.purs | 2 ++ lib/test/Registry/Operation.purs | 6 ++++-- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 8276bf708..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -106,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -124,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String From 13eaf3a2e5225b50c3ad5236ba5e28ca18284768 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:21:58 +0100 Subject: [PATCH 08/36] Add missing packageName and packageVersion to InsertMatrixJob The JS insertMatrixJobImpl expects columns [jobId, packageName, packageVersion, compilerVersion, payload] but the PureScript types were missing packageName and packageVersion --- app/src/App/SQLite.purs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 44a7f27d7..1485697c5 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -238,6 +238,8 @@ insertPackageJob db job = do type InsertMatrixJob = { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version , compilerVersion :: Version -- TODO this is missing a buncha stuff , payload :: Map PackageName Version @@ -245,6 +247,8 @@ type InsertMatrixJob = type JSInsertMatrixJob = { jobId :: String + , packageName :: String + , packageVersion :: String , compilerVersion :: String , payload :: String } @@ -252,6 +256,8 @@ type JSInsertMatrixJob = insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob insertMatrixJobToJSRep { jobId, compilerVersion, payload } = { jobId: un JobId jobId + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } From 301d3488dcfb081d60bbe7d1ec4b5d04f432faa5 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:22:29 +0100 Subject: [PATCH 09/36] Fix finishedAt timestamp to capture time after job execution --- app/src/App/Server/JobExecutor.purs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 5c9e1f883..cd9152ea2 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -73,7 +73,8 @@ runJobExecutor env = runEffects env do Log.info $ "Job " <> unwrap jobId <> " succeeded." pure true - Db.finishJob { jobId, finishedAt: now, success } + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } loop -- TODO: here we only get a single package for each operation, but really we should From 0a1399568c83cc7525a7b36570847b5252fa4c2e Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 17:03:57 +0200 Subject: [PATCH 10/36] Implement matrix jobs, and the recursive enqueuing of new ones --- app/src/App/GitHubIssue.purs | 2 +- app/src/App/SQLite.purs | 21 ++++++---- app/src/App/Server/JobExecutor.purs | 58 ++++++++++++++++++++++----- app/src/App/Server/MatrixBuilder.purs | 36 ++++++++++++----- app/src/App/Server/Router.purs | 3 +- app/test/App/API.purs | 6 +-- lib/src/ManifestIndex.purs | 3 +- scripts/src/PackageDeleter.purs | 2 +- 8 files changed, 93 insertions(+), 38 deletions(-) diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 56422ab64..3764398cf 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -58,7 +58,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish Nothing payload + void $ API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 1485697c5..153993a44 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -241,21 +241,22 @@ type InsertMatrixJob = , packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version - -- TODO this is missing a buncha stuff , payload :: Map PackageName Version } type JSInsertMatrixJob = { jobId :: String + , createdAt :: String , packageName :: String , packageVersion :: String , compilerVersion :: String , payload :: String } -insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep { jobId, compilerVersion, payload } = +insertMatrixJobToJSRep :: DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion, payload } = { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now , packageName: PackageName.print packageName , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion @@ -265,7 +266,9 @@ insertMatrixJobToJSRep { jobId, compilerVersion, payload } = foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit -insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep +insertMatrixJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep now job type MatrixJobDetails = { jobId :: JobId @@ -355,19 +358,23 @@ type InsertPackageSetJob = type JSInsertPackageSetJob = { jobId :: String + , createdAt :: String , payload :: String } -insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep { jobId, payload } = +insertPackageSetJobToJSRep :: DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep now { jobId, payload } = { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now , payload: stringifyJson Operation.packageSetOperationCodec payload } foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit -insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep +insertPackageSetJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep now job -------------------------------------------------------------------------------- -- logs table diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index cd9152ea2..30975d7f2 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -7,7 +7,9 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel +import Data.Array as Array import Data.DateTime (DateTime) +import Data.Set as Set import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff @@ -19,8 +21,9 @@ import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder -import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Registry.Version as Version import Run (Run) import Run.Except (EXCEPT) @@ -93,7 +96,7 @@ newJobId = do executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish payload@{ compiler, name, version } } -> do + PackageJob { payload: Operation.Publish payload@{ name, version } } -> do maybeDependencies <- API.publish Nothing payload -- The above operation will throw if not successful, and return a map of -- dependencies of the package only if it has not been published before. @@ -103,19 +106,52 @@ executeJob _ = case _ of -- compilers, and (2) same compiler, all packages that depend on this one -- TODO here we are building the compiler index, but we should really cache it compilerIndex <- MatrixBuilder.readCompilerIndex - let solverData = { compiler, name, version, dependencies, compilerIndex } + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData - for (samePackageAllCompilers <> sameCompilerAllDependants) \matrixJob -> do - Log.info $ "Enqueuing matrix job" -- TODO print details - jobId <- newJobId - Db.insertMatrixJob { jobId, payload: matrixJob } + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + jobId <- newJobId + Db.insertMatrixJob + { jobId + , payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } PackageJob { payload: Operation.Authenticated auth } -> API.authenticated auth - MatrixJob details -> - -- TODO this job should return the success result, because if successful we need - -- to enqueue more matrix jobs: all its dependents for this same compiler version - MatrixBuilder.runMatrixJob details + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + jobId <- newJobId + Db.insertMatrixJob + { jobId + , payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } PackageSetJob _details -> -- TODO: need to pass in the package_sets effect -- API.packageSetUpdate2 details diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 6a194ae3d..13097c2a6 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -12,6 +12,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.Array.NonEmpty as NonEmptyArray import Data.Map as Map +import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Effect.Aff as Aff @@ -41,7 +42,7 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except -runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) Unit +runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] @@ -58,13 +59,14 @@ runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion <> ":\n" <> printCompilerFailure compilerVersion err + pure Nothing Right _ -> do Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion Registry.readMetadata packageName >>= case _ of Nothing -> do Log.error $ "No existing metadata for " <> PackageName.print packageName - Except.throw $ "Cannot run Matrix Job for " <> PackageName.print packageName + pure Nothing Just (Metadata metadata) -> do let metadataWithCompilers = metadata @@ -79,6 +81,11 @@ runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing -- TODO feels like we should be doing this at startup and use the cache instead -- of reading files all over again @@ -140,8 +147,15 @@ type MatrixSolverData = , dependencies :: Map PackageName Range } -solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Map Version (Map PackageName Version)) -solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do -- remove the compiler we tested with from the set of all of them compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions newJobs <- for compilers \target -> do @@ -151,8 +165,8 @@ solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do Log.info $ "Failed to solve with compiler " <> Version.print target -- Log.debug $ Solver.printSolverError solverErrors pure Nothing - Right res@(Tuple solvedCompiler _resolutions) -> case solvedCompiler == target of - true -> pure $ Just res + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> pure $ Just { compiler: target, resolutions, name, version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" @@ -162,9 +176,9 @@ solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do , ")." ] pure Nothing - pure $ Map.fromFoldable $ Array.catMaybes newJobs + pure $ Set.fromFoldable $ Array.catMaybes newJobs -solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Map Version (Map PackageName Version)) +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) solveDependantsForCompiler { compilerIndex, name, version, compiler } = do manifestIndex <- Registry.readAllManifests let dependentManifests = ManifestIndex.dependants manifestIndex name version @@ -188,8 +202,8 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do Log.info $ "Failed to solve with compiler " <> Version.print compiler -- Log.debug $ Solver.printSolverError solverErrors pure Nothing - Right res@(Tuple solvedCompiler _resolutions) -> case compiler == solvedCompiler of - true -> pure $ Just res + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" @@ -199,4 +213,4 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do , ")." ] pure Nothing - pure $ Map.fromFoldable $ Array.catMaybes newJobs + pure $ Set.fromFoldable $ Array.catMaybes newJobs diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index bdbb1eff5..c95fbcf8c 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,12 +4,11 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ -import Data.UUID.Random as UUID import Effect.Aff as Aff import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status -import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 (LogLevel(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 36a2e61a2..63dcccc3d 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -102,7 +102,7 @@ spec = do -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -163,7 +163,7 @@ spec = do , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. @@ -178,7 +178,7 @@ spec = do , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 8602d4982..b5ecd390f 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -46,7 +46,7 @@ import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..)) import Data.Maybe as Maybe -import Data.Newtype (un, unwrap) +import Data.Newtype (un) import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -67,7 +67,6 @@ import Node.Path as Path import Partial.Unsafe (unsafeCrashWith) import Registry.Manifest (Manifest(..)) import Registry.Manifest as Manifest -import Registry.Operation (packageName) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index db9b54d23..e0de363ca 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -239,7 +239,7 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref From 50cd04be517bdf64524c8379dc60a7ca80cc6e31 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 17:25:00 +0100 Subject: [PATCH 11/36] Reset incomplete jobs so they can be picked up again --- app/src/App/SQLite.js | 10 ++++++++-- app/src/App/SQLite.purs | 2 -- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 50bc82905..9fbbeeec9 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -138,9 +138,15 @@ export const finishJobImpl = (db, args) => { return stmt.run(args); } -// TODO this needs to be an update, no deletes +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them export const resetIncompleteJobsImpl = (db) => { - const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); return stmt.run(); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 153993a44..5e4d98293 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -153,8 +153,6 @@ foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit --- TODO: we shouldn't delete them I think? just remove the startedAt so they --- can be retried resetIncompleteJobs :: SQLite -> Effect Unit resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl From 6a57d75ea0e29b31ac76b4c22a169d01b5b06c16 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 23:47:12 +0100 Subject: [PATCH 12/36] Run matrix jobs for the whole registry when finding a new compiler version --- app/src/App/Effect/Db.purs | 36 ++++++++--------- app/src/App/Main.purs | 6 --- app/src/App/SQLite.purs | 45 ++++++++++++--------- app/src/App/Server/JobExecutor.purs | 56 ++++++++++++++++++++------- app/src/App/Server/MatrixBuilder.purs | 16 +++++++- app/src/App/Server/Router.purs | 4 +- lib/src/ManifestIndex.purs | 12 ++++-- 7 files changed, 111 insertions(+), 64 deletions(-) diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index e30f76f1a..1e90a8163 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -25,9 +25,9 @@ import Run.Except as Except -- be part of app code we want to test. data Db a - = InsertPackageJob InsertPackageJob a - | InsertMatrixJob InsertMatrixJob a - | InsertPackageSetJob InsertPackageSetJob a + = InsertPackageJob InsertPackageJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) | FinishJob FinishJob a | StartJob StartJob a | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) @@ -63,16 +63,16 @@ selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow -- | Insert a new package job into the database. -insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit -insertPackageJob job = Run.lift _db (InsertPackageJob job unit) +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) JobId +insertPackageJob job = Run.lift _db (InsertPackageJob job identity) -- | Insert a new matrix job into the database. -insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit -insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) -- | Insert a new package set job into the database. -insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit -insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) -- | Start a job in the database. startJob :: forall r. StartJob -> Run (DB + r) Unit @@ -102,17 +102,17 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertPackageJob job next -> do - Run.liftEffect $ SQLite.insertPackageJob env.db job - pure next + InsertPackageJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageJob env.db job + pure $ reply result - InsertMatrixJob job next -> do - Run.liftEffect $ SQLite.insertMatrixJob env.db job - pure next + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result - InsertPackageSetJob job next -> do - Run.liftEffect $ SQLite.insertPackageSetJob env.db job - pure next + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result FinishJob job next -> do Run.liftEffect $ SQLite.finishJob env.db job diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index 90bef72d0..e638cc684 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -22,12 +22,6 @@ main = do case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl - -- TODO: here before starting the executor we should check if we need to run - -- a whole-registry-compiler update. - -- To do that, we ask PursVersions what the compilers are, then we ask in the - -- metadata what the compilers for the latest prelude are, and if the latest - -- compiler is missing we enqueue a "compile everything", so that the executor - -- can pick it up first thing Aff.launchAff_ $ jobExecutor env Router.runRouter env where diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 5e4d98293..09f91f612 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -36,6 +36,7 @@ import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable +import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried import Registry.API.V1 (JobId(..), LogLevel, LogLine) @@ -156,6 +157,11 @@ foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit resetIncompleteJobs :: SQLite -> Effect Unit resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + -------------------------------------------------------------------------------- -- package_jobs table @@ -197,8 +203,7 @@ selectNextPackageJob db = do pure $ traverse packageJobDetailsFromJSRep maybeJobDetails type InsertPackageJob = - { jobId :: JobId - , payload :: PackageOperation + { payload :: PackageOperation } type JSInsertPackageJob = @@ -209,8 +214,8 @@ type JSInsertPackageJob = , createdAt :: String } -insertPackageJobToJSRep :: DateTime -> InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep now { jobId, payload } = +insertPackageJobToJSRep :: JobId -> DateTime -> InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep jobId now { payload } = { jobId: un JobId jobId , jobType: JobType.print jobType , packageName: PackageName.print name @@ -226,17 +231,18 @@ insertPackageJobToJSRep now { jobId, payload } = foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. -insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob :: SQLite -> InsertPackageJob -> Effect JobId insertPackageJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep now job + Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep jobId now job + pure jobId -------------------------------------------------------------------------------- -- matrix_jobs table type InsertMatrixJob = - { jobId :: JobId - , packageName :: PackageName + { packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version @@ -251,8 +257,8 @@ type JSInsertMatrixJob = , payload :: String } -insertMatrixJobToJSRep :: DateTime -> InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion, payload } = +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now , packageName: PackageName.print packageName @@ -263,10 +269,12 @@ insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit -insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId insertMatrixJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep now job + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId type MatrixJobDetails = { jobId :: JobId @@ -350,8 +358,7 @@ selectNextPackageSetJob db = do pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails type InsertPackageSetJob = - { jobId :: JobId - , payload :: PackageSetOperation + { payload :: PackageSetOperation } type JSInsertPackageSetJob = @@ -360,8 +367,8 @@ type JSInsertPackageSetJob = , payload :: String } -insertPackageSetJobToJSRep :: DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep now { jobId, payload } = +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now , payload: stringifyJson Operation.packageSetOperationCodec payload @@ -369,10 +376,12 @@ insertPackageSetJobToJSRep now { jobId, payload } = foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit -insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId insertPackageSetJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep now job + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId -------------------------------------------------------------------------------- -- logs table diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 30975d7f2..63a5cbddd 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,6 +1,5 @@ module Registry.App.Server.JobExecutor ( runJobExecutor - , newJobId ) where import Registry.App.Prelude hiding ((/)) @@ -9,18 +8,21 @@ import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.Array as Array import Data.DateTime (DateTime) +import Data.Map as Map import Data.Set as Set -import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff -import Registry.API.V1 (JobId(..)) import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation import Registry.PackageName as PackageName import Registry.Version as Version @@ -35,6 +37,18 @@ data JobDetails runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler Db.resetIncompleteJobs loop where @@ -82,18 +96,13 @@ runJobExecutor env = runEffects env do -- TODO: here we only get a single package for each operation, but really we should -- have all of them and toposort them. There is something in ManifestIndex but not --- sure that's what we need +-- sure that's what we need findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) findNextAvailableJob = runMaybeT $ (PackageJob <$> MaybeT Db.selectNextPackageJob) <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) -newJobId :: forall m. MonadEffect m => m JobId -newJobId = do - id <- UUID.make - pure $ JobId $ UUID.toString id - executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of PackageJob { payload: Operation.Publish payload@{ name, version } } -> do @@ -117,10 +126,8 @@ executeJob _ = case _ of <> PackageName.print solvedPackage <> "@" <> Version.print solvedVersion - jobId <- newJobId Db.insertMatrixJob - { jobId - , payload: resolutions + { payload: resolutions , compilerVersion: solvedCompiler , packageName: solvedPackage , packageVersion: solvedVersion @@ -144,10 +151,8 @@ executeJob _ = case _ of <> PackageName.print solvedPackage <> "@" <> Version.print solvedVersion - jobId <- newJobId Db.insertMatrixJob - { jobId - , payload: resolutions + { payload: resolutions , compilerVersion: solvedCompiler , packageName: solvedPackage , packageVersion: solvedVersion @@ -156,3 +161,24 @@ executeJob _ = case _ of -- TODO: need to pass in the package_sets effect -- API.packageSetUpdate2 details pure unit + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (not (Map.isEmpty manifest.dependencies)) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 13097c2a6..7ae98d972 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -1,5 +1,6 @@ module Registry.App.Server.MatrixBuilder - ( installBuildPlan + ( checkIfNewCompiler + , installBuildPlan , printCompilerFailure , readCompilerIndex , runMatrixJob @@ -214,3 +215,16 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do ] pure Nothing pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index c95fbcf8c..9a3f08074 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -14,7 +14,6 @@ import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) -import Registry.App.Server.JobExecutor as JobExecutor import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -100,7 +99,6 @@ router { route, method, body } = HTTPurple.usingCont case route, method of insertPackageJob :: PackageOperation -> ContT Response (Run _) Response insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- JobExecutor.newJobId - lift $ Db.insertPackageJob { jobId, payload: operation } + jobId <- lift $ Db.insertPackageJob { payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index b5ecd390f..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -22,6 +22,7 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap , topologicalSort , toSortedArray @@ -88,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = From f1a602b1a9323b3fce4e20dcb0e290986ba78c50 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 19 Dec 2025 10:45:23 -0500 Subject: [PATCH 13/36] resolve build issues --- app/src/App/Server/Router.purs | 5 +- nix/overlay.nix | 5 +- package-lock.json | 1667 +++++++++++--------------------- package.json | 5 +- 4 files changed, 573 insertions(+), 1109 deletions(-) diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 9a3f08074..f371d1e71 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -66,8 +66,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of -- TODO return jobs Jobs, Get -> do - now <- liftEffect nowUTC - jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: now, finishedAt: Nothing, success: true, logs: [] } ] + _now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel @@ -101,4 +101,3 @@ router { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) jobId <- lift $ Db.insertPackageJob { payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } - diff --git a/nix/overlay.nix b/nix/overlay.nix index 8f2d68973..7eed2916d 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -181,8 +181,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out diff --git a/package-lock.json b/package-lock.json index fc22de8a0..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,10 +10,7 @@ "app", "foreign", "lib" - ], - "dependencies": { - "spago": "^0.93.19" - } + ] }, "app": { "name": "registry-app", @@ -256,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -322,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -371,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -395,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -411,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -432,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -457,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -476,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -499,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -516,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -535,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -553,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -571,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -586,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -610,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -625,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -639,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -653,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -669,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -694,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -708,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -726,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -775,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -791,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -808,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -826,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -839,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -851,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -867,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -879,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -915,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1172,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1210,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1227,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1248,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1264,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1279,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1293,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1306,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1320,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1334,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1350,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1365,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1380,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1394,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1419,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1433,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1447,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1466,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1486,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1500,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1513,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1528,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1544,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1557,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1570,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1584,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1597,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1622,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1641,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1659,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1671,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1748,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1763,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1781,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1807,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1820,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1834,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1878,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1903,12 +1900,6 @@ "node": ">=18.0.0" } }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "license": "Python-2.0" - }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1918,12 +1909,6 @@ "safer-buffer": "~2.1.0" } }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "license": "MIT" - }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1973,15 +1958,6 @@ "node": "20.x || 22.x || 23.x || 24.x || 25.x" } }, - "node_modules/big-integer": { - "version": "1.6.52", - "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", - "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", - "license": "Unlicense", - "engines": { - "node": ">=0.6" - } - }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -2014,27 +1990,6 @@ "integrity": "sha512-OHawaAbjwx6rqICCKgSG0SAnT05bzd7ppyKLVUITZpANBaaMFBAsaNkto3LoQ31tyFP5kNujE8Cdx85G9VzOkw==", "license": "MIT" }, - "node_modules/bplist-parser": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", - "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", - "license": "MIT", - "dependencies": { - "big-integer": "^1.6.44" - }, - "engines": { - "node": ">= 5.10.0" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -2080,21 +2035,6 @@ "node": ">=10.0.0" } }, - "node_modules/bundle-name": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", - "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", - "license": "MIT", - "dependencies": { - "run-applescript": "^5.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", @@ -2104,12 +2044,6 @@ "node": ">=18" } }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "license": "MIT" - }, "node_modules/cpu-features": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", @@ -2124,20 +2058,6 @@ "node": ">=10.0.0" } }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -2180,50 +2100,6 @@ "once": "^1.4.0" } }, - "node_modules/entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", - "license": "BSD-2-Clause", - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/env-paths": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", - "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/execa": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", - "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.1", - "human-signals": "^4.3.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^3.0.7", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": "^14.18.0 || ^16.14.0 || >=18.0.0" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -2317,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", @@ -2330,12 +2206,6 @@ "node": ">=14.14" } }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "license": "ISC" - }, "node_modules/fuse.js": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-7.1.0.tgz", @@ -2345,45 +2215,12 @@ "node": ">=10" } }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", "license": "MIT" }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -2402,15 +2239,6 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "license": "ISC" }, - "node_modules/human-signals": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", - "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=14.18.0" - } - }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -2431,17 +2259,6 @@ ], "license": "BSD-3-Clause" }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2454,21 +2271,6 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "license": "ISC" }, - "node_modules/is-docker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", - "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", - "license": "MIT", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -2490,24 +2292,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-inside-container": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", - "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", - "license": "MIT", - "dependencies": { - "is-docker": "^3.0.0" - }, - "bin": { - "is-inside-container": "cli.js" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -2560,18 +2344,6 @@ "node": ">=8.6" } }, - "node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2584,18 +2356,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -2666,56 +2426,6 @@ "wrappy": "1" } }, - "node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/open": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", - "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", - "license": "MIT", - "dependencies": { - "default-browser": "^4.0.0", - "define-lazy-prop": "^3.0.0", - "is-inside-container": "^1.0.0", - "is-wsl": "^2.2.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2764,15 +2474,6 @@ "once": "^1.3.1" } }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2844,110 +2545,6 @@ "node": ">=0.10.0" } }, - "node_modules/run-applescript": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", - "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", - "license": "MIT", - "dependencies": { - "execa": "^5.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/run-applescript/node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/run-applescript/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/run-applescript/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/run-applescript/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -3009,33 +2606,6 @@ "node": ">=10" } }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "license": "ISC" - }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -3081,34 +2651,6 @@ "simple-concat": "^1.0.0" } }, - "node_modules/spago": { - "version": "0.93.19", - "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", - "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", - "license": "BSD-3-Clause", - "dependencies": { - "better-sqlite3": "^8.6.0", - "env-paths": "^3.0.0", - "fast-glob": "^3.2.11", - "fs-extra": "^10.0.0", - "fuse.js": "^6.5.3", - "glob": "^7.1.6", - "markdown-it": "^12.0.4", - "open": "^9.1.0", - "punycode": "^2.3.0", - "semver": "^7.3.5", - "spdx-expression-parse": "^3.0.1", - "ssh2": "^1.14.0", - "supports-color": "^9.2.3", - "tar": "^6.1.11", - "tmp": "^0.2.1", - "xhr2": "^0.2.1", - "yaml": "^2.1.1" - }, - "bin": { - "spago": "bin/bundle.js" - } - }, "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", @@ -3157,18 +2699,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -3190,18 +2720,6 @@ ], "license": "MIT" }, - "node_modules/supports-color": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", - "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, "node_modules/tar": { "version": "7.5.2", "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", @@ -3252,18 +2770,6 @@ "node": ">=6" } }, - "node_modules/titleize": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", - "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/tmp": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", @@ -3309,12 +2815,6 @@ "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", "license": "Unlicense" }, - "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", - "license": "MIT" - }, "node_modules/universal-user-agent": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", @@ -3330,51 +2830,18 @@ "node": ">= 10.0.0" } }, - "node_modules/untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, - "node_modules/xhr2": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", - "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, "node_modules/yallist": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", diff --git a/package.json b/package.json index 5066e42c0..76bc4e96e 100644 --- a/package.json +++ b/package.json @@ -6,8 +6,5 @@ "app", "foreign", "lib" - ], - "dependencies": { - "spago": "^0.93.19" - } + ] } From f94399117f30304d6608df4bf8c65fe9adb06460 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 19 Dec 2025 10:58:11 -0500 Subject: [PATCH 14/36] fix smoke test --- nix/test/config.nix | 65 +++++++++++++++++++++++---------------------- nix/test/smoke.nix | 34 ++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 34 deletions(-) diff --git a/nix/test/config.nix b/nix/test/config.nix index 66813fe5b..454747b30 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -357,38 +357,39 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ - GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ - git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..1365d8283 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -54,6 +58,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +74,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.gitMockOverlay ]; virtualisation = { graphics = false; @@ -70,12 +83,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } From ea420fa07285a2a4b9a772af5769ceb712a47762 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Mon, 22 Dec 2025 16:38:51 +0100 Subject: [PATCH 15/36] Split package jobs into separate tables, return all data from the job endpoint --- app-e2e/src/Test/E2E/Publish.purs | 10 +- app/src/App/API.purs | 4 +- app/src/App/Effect/Db.purs | 93 +++- app/src/App/SQLite.js | 99 ++-- app/src/App/SQLite.purs | 460 ++++++++++++++---- app/src/App/Server/JobExecutor.purs | 39 +- app/src/App/Server/MatrixBuilder.purs | 4 +- app/src/App/Server/Router.purs | 42 +- app/test/App/API.purs | 2 +- ...20240914171030_create_job_queue_tables.sql | 29 +- lib/src/API/V1.purs | 206 +++++++- lib/src/JobType.purs | 27 - scripts/src/PackageDeleter.purs | 2 +- test-utils/src/Registry/Test/E2E/Client.purs | 2 +- 14 files changed, 763 insertions(+), 256 deletions(-) delete mode 100644 lib/src/JobType.purs diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 051d1931b..d06289340 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -68,18 +68,18 @@ spec = do job <- Client.pollJob config jobId -- If job failed, print logs for debugging - unless job.success do + unless (V1.jobInfo job).success do Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs Console.log $ String.joinWith "\n" logMessages -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs + when (not (V1.jobInfo job).success) do + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs let errorMessages = map _.message errorLogs Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - Assert.shouldSatisfy job.finishedAt isJust + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust -- Assert.shouldEqual job.jobType JobType.PublishJob -- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") -- Assert.shouldEqual job.ref "v4.0.0" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 1e69a129e..06d1ed943 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -344,7 +344,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe (Map PackageName Range)) +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -806,7 +806,7 @@ publish maybeLegacyIndex payload = do Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp - pure $ Just (un Manifest manifest).dependencies + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 1e90a8163..031c91a62 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,10 +5,25 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) +import Registry.App.SQLite + ( FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails + , SQLite + , SelectJobRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails + ) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run @@ -25,17 +40,21 @@ import Run.Except as Except -- be part of app code we want to test. data Db a - = InsertPackageJob InsertPackageJob (JobId -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) | InsertMatrixJob InsertMatrixJob (JobId -> a) | InsertPackageSetJob InsertPackageSetJob (JobId -> a) | FinishJob FinishJob a | StartJob StartJob a - | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) - | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) | InsertLogLine LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) | ResetIncompleteJobs a derive instance Functor Db @@ -51,7 +70,7 @@ insertLog :: forall r. LogLine -> Run (DB + r) Unit insertLog log = Run.lift _db (InsertLogLine log unit) -- | Select all logs for a given job, filtered by loglevel. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) -- | Set a job in the database to the 'finished' state. @@ -59,12 +78,20 @@ finishJob :: forall r. FinishJob -> Run (DB + r) Unit finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) -selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow --- | Insert a new package job into the database. -insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) JobId -insertPackageJob job = Run.lift _db (InsertPackageJob job identity) +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) -- | Insert a new matrix job into the database. insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId @@ -78,9 +105,17 @@ insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) startJob :: forall r. StartJob -> Run (DB + r) Unit startJob job = Run.lift _db (StartJob job unit) --- | Select the next package job from the database. -selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) -selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow + +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow + +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow -- | Select the next matrix job from the database. selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) @@ -102,8 +137,16 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertPackageJob job reply -> do - result <- Run.liftEffect $ SQLite.insertPackageJob env.db job + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result + + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result + + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job pure $ reply result InsertMatrixJob job reply -> do @@ -122,12 +165,20 @@ handleSQLite env = case _ of Run.liftEffect $ SQLite.startJob env.db job pure next - SelectJobInfo jobId reply -> do - result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + SelectJob request reply -> do + result <- Run.liftEffect $ SQLite.selectJob env.db request + pure $ reply result + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db pure $ reply result - SelectNextPackageJob reply -> do - result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db pure $ reply result SelectNextMatrixJob reply -> do diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 9fbbeeec9..bbad2ae78 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -2,7 +2,9 @@ import Database from "better-sqlite3"; const JOB_INFO_TABLE = 'job_info' const LOGS_TABLE = 'logs' -const PACKAGE_JOBS_TABLE = 'package_jobs'; +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; const MATRIX_JOBS_TABLE = 'matrix_jobs'; const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; @@ -66,9 +68,19 @@ const _insertJob = (db, table, columns, job) => { return insert(job); }; -export const insertPackageJobImpl = (db, job) => { - const columns = ['jobId', 'jobType', 'packageName', 'payload'] - return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { @@ -81,43 +93,44 @@ export const insertPackageSetJobImpl = (db, job) => { return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const selectNextPackageJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${PACKAGE_JOBS_TABLE} job +const _selectJob = (db, { table, jobId }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); + `; + + if (jobId === null) { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } else { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId }); }; -export const selectNextMatrixJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${MATRIX_JOBS_TABLE} job - JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); +export const selectUnpublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId }); }; -export const selectNextPackageSetJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${PACKAGE_SET_JOBS_TABLE} job - JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); +export const selectTransferJobImpl = (db, jobId) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId }); +}; + +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); +}; + +export const selectPackageSetJobImpl = (db, jobId) => { + return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; export const startJobImpl = (db, args) => { @@ -161,18 +174,10 @@ export const insertLogLineImpl = (db, logLine) => { export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { let query = ` SELECT * FROM ${LOGS_TABLE} - WHERE jobId = ? AND level >= ? + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 `; - const params = [jobId, logLevel]; - - if (since !== null) { - query += ' AND timestamp >= ?'; - params.push(since); - } - - query += ' ORDER BY timestamp ASC'; - const stmt = db.prepare(query); - return stmt.all(...params); + return stmt.all(jobId, logLevel, since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 09f91f612..814b2b82c 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -4,49 +4,61 @@ -- | nicer interface with PureScript types for higher-level modules to use. module Registry.App.SQLite - ( SQLite - , ConnectOptions - , connect - , JobInfo - , selectJobInfo - , InsertPackageJob - , insertPackageJob + ( ConnectOptions + , FinishJob , InsertMatrixJob - , insertMatrixJob , InsertPackageSetJob - , insertPackageSetJob - , FinishJob - , finishJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails + , SQLite + , SelectJobRequest , StartJob - , startJob - , resetIncompleteJobs + , TransferJobDetails + , UnpublishJobDetails + , connect + , finishJob , insertLogLine + , insertMatrixJob + , insertPackageSetJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs + , selectJob , selectLogsByJob - , PackageJobDetails - , selectNextPackageJob - , MatrixJobDetails , selectNextMatrixJob - , PackageSetJobDetails , selectNextPackageSetJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , startJob ) where import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable +import Data.String as String import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.JobType as JobType -import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature) import Registry.Version as Version -- | An active database connection acquired with `connect` @@ -83,29 +95,26 @@ type JSJobInfo = , success :: Int } -jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo -jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) - isSuccess <- case success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show success - pure - { jobId: JobId jobId - , createdAt: created - , startedAt: started - , finishedAt: finished - , success: isSuccess - } +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) -selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) -selectJobInfo db (JobId jobId) = do - maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId - pure $ traverse jobInfoFromJSRep maybeJobInfo +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo finishJob :: SQLite -> FinishJob -> Effect Unit finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep @@ -146,7 +155,7 @@ type JSFinishJob = finishJobToJSRep :: FinishJob -> JSFinishJob finishJobToJSRep { jobId, success, finishedAt } = { jobId: un JobId jobId - , success: if success then 1 else 0 + , success: fromSuccess success , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } @@ -162,80 +171,325 @@ newJobId = do id <- UUID.make pure $ JobId $ UUID.toString id +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect (Either String (Maybe Job)) +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + case fail of + [] -> runExceptT + ( selectPublishJob logs + <|> selectMatrixJob logs + <|> selectTransferJob logs + <|> selectPackageSetJob logs + <|> selectUnpublishJob logs + ) + _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail + where + selectPublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectPackageSetJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) + <<< packageSetJobDetailsFromJSRep + ) + maybeJobDetails + -------------------------------------------------------------------------------- --- package_jobs table +-- publish_jobs table -type PackageJobDetails = +type PublishJobDetails = { jobId :: JobId - , packageName :: PackageName - , payload :: PackageOperation , createdAt :: DateTime , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + } + +type JSPublishJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -type JSPackageJobDetails = +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db Nullable.null + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = { jobId :: String , packageName :: String + , packageVersion :: String , payload :: String , createdAt :: String + } + +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + } + +type JSUnpublishJobDetails = + { jobId :: String + , createdAt :: String , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt } = do - name <- PackageName.parse packageName +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload pure { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s , packageName: name + , packageVersion: version , payload: parsed + } + +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db Nullable.null + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId , createdAt: created , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed } -foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) +foreign import selectTransferJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSTransferJobDetails) -selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) -selectNextPackageJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db - pure $ traverse packageJobDetailsFromJSRep maybeJobDetails +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db Nullable.null + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails -type InsertPackageJob = - { payload :: PackageOperation +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature } -type JSInsertPackageJob = +type JSInsertTransferJob = { jobId :: String - , jobType :: String , packageName :: String , payload :: String , createdAt :: String } -insertPackageJobToJSRep :: JobId -> DateTime -> InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep jobId now { payload } = +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = { jobId: un JobId jobId - , jobType: JobType.print jobType - , packageName: PackageName.print name - , payload: stringifyJson Operation.packageOperationCodec payload + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } , createdAt: DateTime.format Internal.Format.iso8601DateTime now } - where - { jobType, name } = case payload of - Operation.Publish { name } -> { jobType: JobType.PublishJob, name } - Operation.Authenticated { payload: Operation.Unpublish { name } } -> { jobType: JobType.UnpublishJob, name } - Operation.Authenticated { payload: Operation.Transfer { name } } -> { jobType: JobType.TransferJob, name } -foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. -insertPackageJob :: SQLite -> InsertPackageJob -> Effect JobId -insertPackageJob db job = do +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep jobId now job + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job pure jobId -------------------------------------------------------------------------------- @@ -278,47 +532,55 @@ insertMatrixJob db job = do type MatrixJobDetails = { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version - , createdAt :: DateTime - , startedAt :: Maybe DateTime } type JSMatrixJobDetails = { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String , packageVersion :: String , compilerVersion :: String , payload :: String - , createdAt :: String - , startedAt :: Nullable String } matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails -matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success name <- PackageName.parse packageName version <- Version.parse packageVersion compiler <- Version.parse compilerVersion - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload pure { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s , packageName: name , packageVersion: version , compilerVersion: compiler , payload: parsed - , createdAt: created - , startedAt: started } -foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) selectNextMatrixJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails -------------------------------------------------------------------------------- @@ -326,35 +588,43 @@ selectNextMatrixJob db = do type PackageSetJobDetails = { jobId :: JobId - , payload :: PackageSetOperation , createdAt :: DateTime , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , payload :: PackageSetOperation } type JSPackageSetJobDetails = { jobId :: String - , payload :: String , createdAt :: String , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , payload :: String } packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails -packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload pure { jobId: JobId jobId - , payload: parsed , createdAt: created , startedAt: started + , finishedAt: finished + , success: s + , payload: parsed } -foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) +foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) selectNextPackageSetJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db Nullable.null pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails type InsertPackageSetJob = @@ -417,18 +687,18 @@ foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit insertLogLine :: SQLite -> LogLine -> Effect Unit insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep -foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) -- | Select all logs for a given job at or above the indicated log level. To get all -- | logs, pass the DEBUG log level. -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } selectLogsByJob db jobId level since = do - let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + let timestamp = DateTime.format Internal.Format.iso8601DateTime since jsLogLines <- Uncurried.runEffectFn4 selectLogsByJobImpl db (un JobId jobId) (API.V1.logLevelToPriority level) - (Nullable.toNullable timestamp) + timestamp pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 63a5cbddd..35e6a3991 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -12,6 +12,9 @@ import Data.Map as Map import Data.Set as Set import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db @@ -19,21 +22,14 @@ import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry -import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.ManifestIndex as ManifestIndex -import Registry.Operation as Operation import Registry.PackageName as PackageName import Registry.Version as Version import Run (Run) import Run.Except (EXCEPT) -data JobDetails - = PackageJob PackageJobDetails - | MatrixJob MatrixJobDetails - | PackageSetJob PackageSetJobDetails - runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" @@ -62,10 +58,7 @@ runJobExecutor env = runEffects env do Just job -> do now <- nowUTC let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId + jobId = (V1.jobInfo job).jobId Db.startJob { jobId, startedAt: now } @@ -96,20 +89,22 @@ runJobExecutor env = runEffects env do -- TODO: here we only get a single package for each operation, but really we should -- have all of them and toposort them. There is something in ManifestIndex but not --- sure that's what we need -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) findNextAvailableJob = runMaybeT - $ (PackageJob <$> MaybeT Db.selectNextPackageJob) - <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) - <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) -executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob :: DateTime -> Job -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish payload@{ name, version } } -> do - maybeDependencies <- API.publish Nothing payload + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload -- The above operation will throw if not successful, and return a map of -- dependencies of the package only if it has not been published before. - for_ maybeDependencies \dependencies -> do + for_ maybeResult \{ dependencies, version } -> do -- At this point this package has been verified with one compiler only. -- So we need to enqueue compilation jobs for (1) same package, all the other -- compilers, and (2) same compiler, all packages that depend on this one @@ -132,8 +127,8 @@ executeJob _ = case _ of , packageName: solvedPackage , packageVersion: solvedVersion } - PackageJob { payload: Operation.Authenticated auth } -> - API.authenticated auth + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload MatrixJob details@{ packageName, packageVersion } -> do maybeDependencies <- MatrixBuilder.runMatrixJob details -- Unlike the publishing case, after verifying a compilation here we only need diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 7ae98d972..8db8e883b 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -19,6 +19,7 @@ import Data.String as String import Effect.Aff as Aff import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (MatrixJobData) import Registry.App.CLI.Purs (CompilerFailure(..)) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions @@ -29,7 +30,6 @@ import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage -import Registry.App.SQLite (MatrixJobDetails) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Tmp as Tmp import Registry.ManifestIndex as ManifestIndex @@ -43,7 +43,7 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except -runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index f371d1e71..9143508de 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -2,21 +2,18 @@ module Registry.App.Server.Router where import Registry.App.Prelude hiding ((/)) -import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ import Effect.Aff as Aff import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status -import Registry.API.V1 (LogLevel(..), Route(..)) +import Registry.API.V1 (Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) -import Registry.Operation (PackageOperation) import Registry.Operation as Operation -import Registry.PackageName as PackageName import Run (Run) import Run.Except as Run.Except @@ -44,14 +41,20 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - insertPackageJob $ Operation.Publish publish + jobId <- lift $ Db.insertPublishJob { payload: publish } + jsonOk V1.jobCreatedResponseCodec { jobId } Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body case auth.payload of Operation.Unpublish payload -> do lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload - insertPackageJob $ Operation.Authenticated auth + jobId <- lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected unpublish operation." @@ -60,7 +63,12 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Transfer payload -> do lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload - insertPackageJob $ Operation.Authenticated auth + jobId <- lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected transfer operation." @@ -70,22 +78,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of jsonOk (CJ.array V1.jobCodec) [] Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of + now <- liftEffect nowUTC + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe now since }) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound Right Nothing -> do HTTPurple.notFound - Right (Just job) -> - jsonOk V1.jobCodec - { jobId - , createdAt: job.createdAt - , finishedAt: job.finishedAt - , success: job.success - , logs - } + Right (Just job) -> jsonOk V1.jobCodec job Status, Get -> HTTPurple.emptyResponse Status.ok @@ -95,9 +95,3 @@ router { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound - where - insertPackageJob :: PackageOperation -> ContT Response (Run _) Response - insertPackageJob operation = do - lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- lift $ Db.insertPackageJob { payload: operation } - jsonOk V1.jobCreatedResponseCodec { jobId } diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 63dcccc3d..122879e49 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,7 +96,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref - , version + , version: version , resolutions: Nothing } diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index f4f1e68f3..71727f473 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -9,16 +9,33 @@ CREATE TABLE job_info ( success INTEGER NOT NULL DEFAULT 0 ); --- Package-oriented jobs (publish/unpublish/transfer) -CREATE TABLE package_jobs ( +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( jobId TEXT PRIMARY KEY NOT NULL, - jobType TEXT NOT NULL, packageName TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); --- Compiler matrix jobs (one compiler, all packages) +-- Compiler matrix jobs CREATE TABLE matrix_jobs ( jobId TEXT PRIMARY KEY NOT NULL, packageName TEXT NOT NULL, @@ -49,7 +66,9 @@ CREATE TABLE IF NOT EXISTS logs ( -- migrate:down DROP TABLE job_info; -DROP TABLE package_jobs; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; DROP TABLE matrix_jobs; DROP TABLE package_set_jobs; DROP TABLE logs; diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 4bae692f5..8c08d181d 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,33 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetJobData + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLogLevel + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,15 +36,26 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) +import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish @@ -62,23 +99,169 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | PackageSetJob PackageSetJobData + +type JobInfo r = { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +type PackageSetJobData = JobInfo + ( payload :: PackageSetOperation + , jobType :: Proxy "packageset" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + PackageSetJob j -> CJ.encode packageSetJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +packageSetJobDataCodec :: CJ.Codec PackageSetJobData +packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "packageset") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , payload: Operation.packageSetOperationCodec + } + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ @@ -86,6 +269,23 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | PackageSetJobType + +derive instance Eq JobType + +printJobType :: JobType -> String +printJobType = case _ of + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + PackageSetJobType -> "packageset" + type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs deleted file mode 100644 index dbc4eaf01..000000000 --- a/lib/src/JobType.purs +++ /dev/null @@ -1,27 +0,0 @@ -module Registry.JobType where - -import Prelude - -import Data.Codec.JSON as CJ -import Data.Codec.JSON.Sum as CJ.Sum -import Data.Either (Either(..), hush) - -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parse :: String -> Either String JobType -parse = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -print :: JobType -> String -print = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -codec :: CJ.Codec JobType -codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index e0de363ca..925361fb2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -243,7 +243,7 @@ deleteVersion arguments name version = do { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref - , version + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index ff34107df..9d8b6b0b4 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -175,6 +175,6 @@ pollJob config jobId = go 1 case result of Left err -> throwError $ toError err Right job -> - case job.finishedAt of + case (V1.jobInfo job).finishedAt of Just _ -> pure job Nothing -> go (attempt + 1) From 9a8d1ba152b2116bc6c0ec8f95ae674720e1dc47 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 15:05:59 -0500 Subject: [PATCH 16/36] implement thin client for github issues replaces the old GitHubIssue which ran registry jobs directly with one that hits the registry api instead. also added integration tests that ensure various jobs can be kicked off as github issue events and we get the resulting comments, issue close events, etc. --- .env.example | 64 ++-- app-e2e/spago.yaml | 8 + app-e2e/src/Test/E2E/GitHubIssue.purs | 215 ++++++++++++ app-e2e/src/Test/E2E/Main.purs | 2 + app-e2e/src/Test/E2E/Publish.purs | 63 ++-- app/src/App/Effect/Env.purs | 13 + app/src/App/GitHubIssue.purs | 329 ++++++++++++------ app/src/App/SQLite.js | 1 + app/src/App/SQLite.purs | 22 +- app/src/App/Server/JobExecutor.purs | 4 +- flake.nix | 5 +- nix/test/config.nix | 58 +++ nix/test/integration.nix | 5 +- nix/test/test-env.nix | 3 +- spago.lock | 40 +++ .../src/Registry/Test/E2E/Fixtures.purs | 70 ++++ .../src/Registry/Test/E2E/WireMock.purs | 164 +++++++++ 17 files changed, 900 insertions(+), 166 deletions(-) create mode 100644 app-e2e/src/Test/E2E/GitHubIssue.purs create mode 100644 test-utils/src/Registry/Test/E2E/Fixtures.purs create mode 100644 test-utils/src/Registry/Test/E2E/WireMock.purs diff --git a/.env.example b/.env.example index febae2d29..4873fe0a0 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,60 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" +# ----------------------------------------------------------------------------- +# External Service URLs (optional overrides, have production defaults) +# ----------------------------------------------------------------------------- +# These default to production URLs in the app. Set these only when: +# - Running tests (test-env sets these automatically) +# - Using custom/staging infrastructure -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== +# GITHUB_API_URL="https://api.github.com" +# S3_API_URL="https://packages.registry.purescript.org" +# S3_BUCKET_URL="https://ams3.digitaloceanspaces.com" +# PURSUIT_API_URL="https://pursuit.purescript.org" +# REGISTRY_API_URL="https://registry.purescript.org/api" +# HEALTHCHECKS_URL="https://hc-ping.com/your-uuid" -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. + +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..c19e78c42 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,12 +5,20 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - effect - either + - foldable-traversable + - json - maybe + - node-fs + - node-path + - node-process - prelude + - registry-app + - registry-foreign - registry-lib - registry-test-utils - spec diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..b1931aaab --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,215 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | These tests exercise the full flow: parsing a GitHub event, submitting to +-- | the registry API, polling for completion, and posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation (AuthenticatedData) +import Registry.Operation as Operation +import Registry.Test.E2E.Client as Client +import Registry.Test.E2E.Fixtures as Fixtures +import Registry.Test.E2E.WireMock (WireMockRequest) +import Registry.Test.E2E.WireMock as WireMock +import Test.Spec (Spec) +import Test.Spec as Spec + +spec :: Spec Unit +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.before clearWireMockJournal do + + Spec.it "handles a publish via GitHub issue, posts comments, and closes issue on success" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.effectPublishData + + assertJobSucceeded result + assertHasComment jobStartedText result + assertHasComment jobCompletedText result + assertIssueClosed result + + Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.failingPublishData + + assertJobFailed result + assertHasComment jobStartedText result + assertHasComment jobFailedText result + assertNoComment jobCompletedText result + assertIssueOpen result + + Spec.it "re-signs authenticated operation for trustee (job fails due to unpublish time limit)" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEvent Fixtures.trusteeAuthenticatedData + + assertHasComment jobStartedText result + assertTeamsApiCalled result + + where + clearWireMockJournal :: Aff Unit + clearWireMockJournal = do + wmConfig <- liftEffect WireMock.configFromEnv + WireMock.clearRequestsOrFail wmConfig + +testIssueNumber :: Int +testIssueNumber = 101 + +-- | Username configured as a packaging team member in test WireMock fixtures. +-- | See nix/test/config.nix for the GitHub Teams API stub. +packagingTeamUsername :: String +packagingTeamUsername = "packaging-team-user" + +jobStartedText :: String +jobStartedText = "Job started" + +jobCompletedText :: String +jobCompletedText = "Job completed successfully" + +jobFailedText :: String +jobFailedText = "Job failed" + +packagingTeamMembersPath :: String +packagingTeamMembersPath = "/orgs/purescript/teams/packaging/members" + +testPollConfig :: GitHubIssue.PollConfig +testPollConfig = + { maxAttempts: 60 + , interval: Milliseconds 500.0 + } + +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkGitHubPublishEvent :: Operation.PublishData -> String +mkGitHubPublishEvent publishData = + let + publishJson = JSON.print $ CJ.encode Operation.publishCodec publishData + body = "```json\n" <> publishJson <> "\n```" + event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +mkGitHubAuthenticatedEvent :: AuthenticatedData -> String +mkGitHubAuthenticatedEvent authData = + let + authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData + body = "```json\n" <> authJson <> "\n```" + event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +issuePath :: Int -> String +issuePath n = "/issues/" <> show n + +issueCommentsPath :: Int -> String +issueCommentsPath n = issuePath n <> "/comments" + +commentRequests :: Array WireMockRequest -> Array WireMockRequest +commentRequests = + WireMock.filterByMethod "POST" + >>> WireMock.filterByUrlContaining (issueCommentsPath testIssueNumber) + +closeRequests :: Array WireMockRequest -> Array WireMockRequest +closeRequests = + WireMock.filterByMethod "PATCH" + >>> WireMock.filterByUrlContaining (issuePath testIssueNumber) + +teamsRequests :: Array WireMockRequest -> Array WireMockRequest +teamsRequests = + WireMock.filterByMethod "GET" + >>> WireMock.filterByUrlContaining packagingTeamMembersPath + +bodyContains :: String -> WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) + +hasComment :: String -> Array WireMockRequest -> Boolean +hasComment text = Array.any (bodyContains text) + +-- | Result of running the GitHubIssue workflow. +type RunResult = + { success :: Boolean + , requests :: Array WireMockRequest + } + +-- | Run the GitHub issue workflow with a given event JSON. +-- | Handles server check, temp file creation, env setup, and request capture. +runWorkflowWithEvent :: String -> Aff RunResult +runWorkflowWithEvent eventJson = do + -- Verify server is reachable + config <- liftEffect Client.configFromEnv + statusResult <- Client.getStatus config + case statusResult of + Left err -> Aff.throwError $ Aff.error $ "Server not reachable: " <> Client.printClientError err + Right _ -> pure unit + + -- Write event to temp file + tmpDir <- Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + -- Initialize and run workflow + envResult <- GitHubIssue.initializeGitHub + case envResult of + Nothing -> + Aff.throwError $ Aff.error "initializeGitHub returned Nothing" + Just env -> do + let testEnv = env { pollConfig = testPollConfig, logVerbosity = Quiet } + result <- GitHubIssue.runGitHubIssue testEnv + + -- Capture WireMock requests + wmConfig <- liftEffect WireMock.configFromEnv + requests <- WireMock.getRequestsOrFail wmConfig + + case result of + Left err -> + WireMock.failWithRequests ("runGitHubIssue failed: " <> err) requests + Right success -> + pure { success, requests } + +assertJobSucceeded :: RunResult -> Aff Unit +assertJobSucceeded { success, requests } = + unless success do + WireMock.failWithRequests "Job did not succeed" requests + +assertJobFailed :: RunResult -> Aff Unit +assertJobFailed { success, requests } = + when success do + WireMock.failWithRequests "Expected job to fail but it succeeded" requests + +assertHasComment :: String -> RunResult -> Aff Unit +assertHasComment text { requests } = + unless (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> RunResult -> Aff Unit +assertNoComment text { requests } = + when (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertIssueClosed :: RunResult -> Aff Unit +assertIssueClosed { requests } = + when (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to be closed, but no close request was made" requests + +assertIssueOpen :: RunResult -> Aff Unit +assertIssueOpen { requests } = + unless (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to remain open, but a close request was made" requests + +assertTeamsApiCalled :: RunResult -> Aff Unit +assertTeamsApiCalled { requests } = + when (Array.null (teamsRequests requests)) do + WireMock.failWithRequests "Expected GitHub Teams API to be called, but no such request was seen" requests diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs index 7bc030d76..bbd7f3212 100644 --- a/app-e2e/src/Test/E2E/Main.purs +++ b/app-e2e/src/Test/E2E/Main.purs @@ -5,6 +5,7 @@ import Prelude import Data.Maybe (Maybe(..)) import Data.Time.Duration (Milliseconds(..)) import Effect (Effect) +import Test.E2E.GitHubIssue as Test.E2E.GitHubIssue import Test.E2E.Publish as Test.E2E.Publish import Test.Spec as Spec import Test.Spec.Reporter.Console (consoleReporter) @@ -15,6 +16,7 @@ main :: Effect Unit main = runSpecAndExitProcess' config [ consoleReporter ] do Spec.describe "E2E Tests" do Spec.describe "Publish" Test.E2E.Publish.spec + Spec.describe "GitHubIssue" Test.E2E.GitHubIssue.spec where config = { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index d06289340..4168e1610 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -6,16 +6,16 @@ import Prelude import Data.Array as Array import Data.Either (Either(..)) +import Data.Foldable (for_) import Data.Maybe (Maybe(..), isJust) import Data.String as String import Effect.Aff (Aff) import Effect.Class (liftEffect) import Effect.Class.Console as Console import Registry.API.V1 as V1 -import Registry.Location as Registry.Location import Registry.Test.Assert as Assert import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils +import Registry.Test.E2E.Fixtures as Fixtures import Test.Spec (Spec) import Test.Spec as Spec @@ -41,26 +41,11 @@ spec = do Right _ -> pure unit -- Jobs list may not be empty if other tests ran Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do + Spec.it "can publish effect@4.0.0 and filter logs" do config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - , version: Utils.unsafeVersion "4.0.0" - } -- Submit publish request - publishResult <- Client.publish config publishData + publishResult <- Client.publish config Fixtures.effectPublishData case publishResult of Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err Right { jobId } -> do @@ -80,6 +65,40 @@ spec = do Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust --- Assert.shouldEqual job.jobType JobType.PublishJob --- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") --- Assert.shouldEqual job.ref "v4.0.0" + + -- Test log level filtering + allLogsResult <- Client.getJob config jobId (Just V1.Debug) Nothing + case allLogsResult of + Left err -> Assert.fail $ "Failed to get job with DEBUG level: " <> Client.printClientError err + Right allLogsJob -> do + let allLogs = (V1.jobInfo allLogsJob).logs + + infoLogsResult <- Client.getJob config jobId (Just V1.Info) Nothing + case infoLogsResult of + Left err -> Assert.fail $ "Failed to get job with INFO level: " <> Client.printClientError err + Right infoLogsJob -> do + let infoLogs = (V1.jobInfo infoLogsJob).logs + let debugOnlyLogs = Array.filter (\l -> l.level == V1.Debug) allLogs + + -- INFO logs should not contain any DEBUG logs + let infoContainsDebug = Array.any (\l -> l.level == V1.Debug) infoLogs + when infoContainsDebug do + Assert.fail "INFO level filter returned DEBUG logs" + + -- If there were DEBUG logs, INFO result should be smaller + when (Array.length debugOnlyLogs > 0) do + Assert.shouldSatisfy (Array.length infoLogs) (_ < Array.length allLogs) + + -- Test timestamp filtering + let logs = (V1.jobInfo job).logs + when (Array.length logs >= 2) do + case Array.index logs 0 of + Nothing -> pure unit + Just firstLog -> do + sinceResult <- Client.getJob config jobId (Just V1.Debug) (Just firstLog.timestamp) + case sinceResult of + Left err -> Assert.fail $ "Failed to get job with since filter: " <> Client.printClientError err + Right sinceJob -> do + let sinceLogs = (V1.jobInfo sinceJob).logs + for_ sinceLogs \l -> + Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..cd7880602 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 3764398cf..ced1add6a 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,121 +14,245 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - void $ API.publish Nothing payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret (Source.handle Source.Recent) + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Cache.interpret API._compilerCache (Cache.handleFs cache) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left (PackageSetUpdate payload) -> pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Info level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Info && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=INFO" + Just ts -> baseUrl <> "?level=INFO&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -127,10 +260,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -138,17 +272,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -173,10 +302,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -192,16 +322,11 @@ readOperation eventPath = do IssueEvent { issueNumber, body, username } <- case JSON.parse fileContents >>= decodeIssueEvent of Left err -> - -- If we don't receive a valid event path or the contents can't be decoded - -- then this is a catastrophic error and we exit the workflow. Aff.throwError $ Aff.error $ "Error while parsing json from " <> eventPath <> " : " <> err Right event -> pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -230,10 +355,6 @@ readOperation eventPath = do Right operation -> pure $ DecodedOperation issueNumber username operation --- | Users may submit issues with contents wrapped in code fences, perhaps with --- | a language specifier, trailing lines, and other issues. This rudimentary --- | cleanup pass retrieves all contents within an opening { and closing } --- | delimiter. firstObject :: String -> String firstObject input = fromMaybe input do before <- String.indexOf (String.Pattern "{") input @@ -241,9 +362,6 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment --- | or issue creation. --- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent { issueNumber :: IssueNumber , body :: String @@ -260,22 +378,9 @@ decodeIssueEvent json = lmap CJ.DecodeError.print do issueObject <- Octokit.atKey "issue" CJ.jobject object issueNumber <- Octokit.atKey "number" CJ.int issueObject - -- We accept issue creation and issue comment events, but both contain an - -- 'issue' field. However, only comments contain a 'comment' field. For that - -- reason we first try to parse the comment and fall back to the issue if - -- that fails. body <- Octokit.atKey "body" CJ.string =<< Octokit.atKey "comment" CJ.jobject object <|> pure issueObject pure $ IssueEvent { body, username, issueNumber: IssueNumber issueNumber } --- | Re-sign a payload as pacchettibotti if the authenticated operation was --- | submitted by a registry trustee. --- --- @pacchettibotti is considered an 'owner' of all packages for authenticated --- operations. Registry trustees can ask pacchettibotti to perform an action on --- behalf of a package by submitting a payload with an empty signature. If the --- payload was submitted by a trustee (ie. a member of the packaging team) then --- pacchettibotti will re-sign it and add itself as an owner before continuing --- with the authenticated operation. signPacchettiBottiIfTrustee :: forall r . AuthenticatedData diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index bbad2ae78..8b0a1765e 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -94,6 +94,7 @@ export const insertPackageSetJobImpl = (db, job) => { }; const _selectJob = (db, { table, jobId }) => { + const params = []; let query = ` SELECT job.*, info.* FROM ${table} job diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 814b2b82c..bf7bd3f69 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -42,6 +42,7 @@ module Registry.App.SQLite import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError +import Data.Array as Array import Control.Monad.Except (runExceptT) import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime @@ -191,15 +192,22 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do let logLevel = fromMaybe Error maybeLogLevel { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since case fail of - [] -> runExceptT - ( selectPublishJob logs - <|> selectMatrixJob logs - <|> selectTransferJob logs - <|> selectPackageSetJob logs - <|> selectUnpublishJob logs - ) + [] -> runExceptT $ firstJust + [ selectPublishJob logs + , selectMatrixJob logs + , selectTransferJob logs + , selectPackageSetJob logs + , selectUnpublishJob logs + ] _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + selectPublishJob logs = ExceptT do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) pure $ traverse diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 35e6a3991..2ede4307a 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -64,8 +64,10 @@ runJobExecutor env = runEffects env do -- We race the job execution against a timeout; if the timeout happens first, -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. jobResult <- liftAff do - let execute = Just <$> (runEffects env $ executeJob now job) + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes let timeout = Aff.delay (Milliseconds delay) $> Nothing Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout diff --git a/flake.nix b/flake.nix index 56a98f696..9aacbaa80 100644 --- a/flake.nix +++ b/flake.nix @@ -216,10 +216,13 @@ name = "registry-dev"; inherit GIT_LFS_SKIP_SMUDGE; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps diff --git a/nix/test/config.nix b/nix/test/config.nix index 454747b30..441d67765 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -30,6 +30,7 @@ let # Mock service URLs for test environment mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; s3 = "http://localhost:${toString ports.s3}"; bucket = "http://localhost:${toString ports.bucket}"; @@ -43,6 +44,7 @@ let # implemented in the script directly. testEnv = envDefaults // { # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; S3_API_URL = mockUrls.s3; S3_BUCKET_URL = mockUrls.bucket; @@ -54,6 +56,16 @@ let env: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: value: ''export ${name}="${value}"'') env); + # Pre-built shell exports for E2E test runners (used by test-env.nix and integration.nix) + testRunnerExports = '' + export SERVER_PORT="${toString ports.server}" + export REGISTRY_API_URL="${testEnv.REGISTRY_API_URL}" + export GITHUB_API_URL="${testEnv.GITHUB_API_URL}" + export PACCHETTIBOTTI_TOKEN="${testEnv.PACCHETTIBOTTI_TOKEN}" + export PACCHETTIBOTTI_ED25519_PUB="${testEnv.PACCHETTIBOTTI_ED25519_PUB}" + export PACCHETTIBOTTI_ED25519="${testEnv.PACCHETTIBOTTI_ED25519}" + ''; + # Git mock that redirects URLs to local fixtures; this is necessary because otherwise # commands would reach out to GitHub or the other package origins. gitMock = pkgs.writeShellScriptBin "git" '' @@ -153,6 +165,51 @@ let }; }; } + # Accept issue comment creation (used by GitHubIssue workflow) + { + request = { + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; + }; + response = { + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; + }; + } + # Accept issue closing (used by GitHubIssue workflow) + { + request = { + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; + }; + } + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) + { + request = { + method = "GET"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { login = "packaging-team-user"; id = 1; } + ]; + }; + } ]; # S3 API wiremock mappings (serves package tarballs) @@ -477,6 +534,7 @@ in defaultStateDir mockUrls testEnv + testRunnerExports envToExports gitMock gitMockOverlay diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..b178bfdf4 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -57,7 +57,10 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + + # Export test environment variables for E2E test runners + ${testEnv.testConfig.testRunnerExports} + mkdir -p $STATE_DIR # Start wiremock services diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..e27ef6376 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -95,7 +95,8 @@ let testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" + # Export test environment variables for E2E test runners + ${testConfig.testRunnerExports} if [ -z "''${STATE_DIR:-}" ]; then STATE_DIR="$(mktemp -d)" diff --git a/spago.lock b/spago.lock index 83d2afb8d..a6dbae907 100644 --- a/spago.lock +++ b/spago.lock @@ -313,12 +313,20 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", "effect", "either", + "foldable-traversable", + "json", "maybe", + "node-fs", + "node-path", + "node-process", "prelude", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "spec", @@ -327,6 +335,7 @@ ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +343,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +352,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +378,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +388,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +432,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +459,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs new file mode 100644 index 000000000..c69af3645 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/Fixtures.purs @@ -0,0 +1,70 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Registry.Test.E2E.Fixtures + ( effectPublishData + , failingPublishData + , trusteeAuthenticatedData + ) where + +import Prelude + +import Data.Codec.JSON as CJ +import Data.Maybe (Maybe(..)) +import JSON as JSON +import Registry.Location as Location +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData, UnpublishData) +import Registry.Operation as Operation +import Registry.SSH (Signature(..)) +import Registry.Test.Utils as Utils + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: PublishData +effectPublishData = + { name: Utils.unsafePackageName "effect" + , location: Just $ Location.GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" + } + +-- | Publish data for prelude@6.0.1, which already exists in metadata fixtures. +-- | Used to test failure scenarios (duplicate publish) in E2E tests. +failingPublishData :: PublishData +failingPublishData = + { name: Utils.unsafePackageName "prelude" + , location: Just $ Location.GitHub + { owner: "purescript" + , repo: "purescript-prelude" + , subdir: Nothing + } + , ref: "v6.0.1" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: Utils.unsafeVersion "6.0.1" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: Utils.unsafePackageName "prelude" + , version: Utils.unsafeVersion "6.0.1" + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + in + { payload: Unpublish unpublishPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } diff --git a/test-utils/src/Registry/Test/E2E/WireMock.purs b/test-utils/src/Registry/Test/E2E/WireMock.purs new file mode 100644 index 000000000..6895d9e44 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/WireMock.purs @@ -0,0 +1,164 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +module Registry.Test.E2E.WireMock + ( WireMockConfig + , WireMockRequest + , WireMockError(..) + , configFromEnv + , getRequests + , getRequestsOrFail + , clearRequests + , clearRequestsOrFail + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Prelude + +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Trans.Class (lift) +import Data.Array as Array +import Data.Bifunctor (lmap) +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Either (Either(..)) +import Data.Int as Int +import Data.Maybe (Maybe(..)) +import Data.String as String +import Effect (Effect) +import Effect.Aff (Aff) +import Effect.Aff as Aff +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import Effect.Exception (Error) +import JSON as JSON +import Node.Process as Process +import Codec.JSON.DecodeError as CJ.DecodeError + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Create config from GITHUB_API_URL environment variable. +-- | Convenience for tests that need to inspect GitHub mock requests. +-- | Each WireMock instance has its own admin API on the same port. +configFromEnv :: Effect WireMockConfig +configFromEnv = do + maybeUrl <- Process.lookupEnv "GITHUB_API_URL" + case maybeUrl of + Nothing -> Effect.Exception.throw "GITHUB_API_URL environment variable is not set." + Just baseUrl -> pure { baseUrl } + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from WireMock's journal +getRequests :: WireMockConfig -> Aff (Either WireMockError (Array WireMockRequest)) +getRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from WireMock's journal +clearRequests :: WireMockConfig -> Aff (Either WireMockError Unit) +clearRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Get requests, throwing on error. Useful in tests where failure should abort. +getRequestsOrFail :: WireMockConfig -> Aff (Array WireMockRequest) +getRequestsOrFail config = do + result <- getRequests config + case result of + Left err -> + throwError $ Aff.error $ "Failed to get WireMock requests: " <> printWireMockError err + Right rs -> + pure rs + +-- | Clear requests, throwing on error. Useful in test setup. +clearRequestsOrFail :: WireMockConfig -> Aff Unit +clearRequestsOrFail config = do + result <- clearRequests config + case result of + Left err -> + Aff.throwError $ Aff.error $ "Failed to clear WireMock journal: " <> printWireMockError err + Right _ -> + pure unit + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests requests = String.joinWith "\n" $ map formatRequest requests + where + formatRequest r = r.method <> " " <> r.url <> case r.body of + Nothing -> "" + Just b -> "\n Body: " <> b + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ + msg <> "\n\nCaptured requests:\n" <> formatRequests requests From 5ae9449a9d5facd554590298f9e9dc897d87276e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 18:18:01 -0500 Subject: [PATCH 17/36] clean up test failures --- .env.example | 16 ---------------- app/src/App/GitHubIssue.purs | 7 ++++--- nix/test/config.nix | 27 +++++++++++++++------------ nix/test/integration.nix | 2 +- nix/test/test-env.nix | 2 +- 5 files changed, 21 insertions(+), 33 deletions(-) diff --git a/.env.example b/.env.example index 4873fe0a0..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -13,22 +13,6 @@ SERVER_PORT=9000 # - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" - -# ----------------------------------------------------------------------------- -# External Service URLs (optional overrides, have production defaults) -# ----------------------------------------------------------------------------- -# These default to production URLs in the app. Set these only when: -# - Running tests (test-env sets these automatically) -# - Using custom/staging infrastructure - -# GITHUB_API_URL="https://api.github.com" -# S3_API_URL="https://packages.registry.purescript.org" -# S3_BUCKET_URL="https://ams3.digitaloceanspaces.com" -# PURSUIT_API_URL="https://pursuit.purescript.org" -# REGISTRY_API_URL="https://registry.purescript.org/api" -# HEALTHCHECKS_URL="https://hc-ping.com/your-uuid" - - # ----------------------------------------------------------------------------- # Secrets (required for production, use dummy values for local dev) # ----------------------------------------------------------------------------- diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index ced1add6a..812cc8131 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -99,9 +99,10 @@ runGitHubIssue env = do Right (Authenticated auth) -> do -- Re-sign with pacchettibotti if submitter is a trustee signed <- signPacchettiBottiIfTrustee auth - let endpoint = case signed.payload of - Unpublish _ -> "/v1/unpublish" - Transfer _ -> "/v1/transfer" + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } -- Submit to the registry API diff --git a/nix/test/config.nix b/nix/test/config.nix index 441d67765..3c06276e5 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -38,6 +38,15 @@ let healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include # mock secrets, then overrides external services with mock URLs. The DATABASE_URL # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are @@ -50,22 +59,14 @@ let S3_BUCKET_URL = mockUrls.bucket; PURSUIT_API_URL = mockUrls.pursuit; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = env: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: value: ''export ${name}="${value}"'') env); - # Pre-built shell exports for E2E test runners (used by test-env.nix and integration.nix) - testRunnerExports = '' - export SERVER_PORT="${toString ports.server}" - export REGISTRY_API_URL="${testEnv.REGISTRY_API_URL}" - export GITHUB_API_URL="${testEnv.GITHUB_API_URL}" - export PACCHETTIBOTTI_TOKEN="${testEnv.PACCHETTIBOTTI_TOKEN}" - export PACCHETTIBOTTI_ED25519_PUB="${testEnv.PACCHETTIBOTTI_ED25519_PUB}" - export PACCHETTIBOTTI_ED25519="${testEnv.PACCHETTIBOTTI_ED25519}" - ''; - # Git mock that redirects URLs to local fixtures; this is necessary because otherwise # commands would reach out to GitHub or the other package origins. gitMock = pkgs.writeShellScriptBin "git" '' @@ -206,7 +207,10 @@ let headers."Content-Type" = "application/json"; # Return packaging-team-user as a packaging team member for trustee re-signing tests jsonBody = [ - { login = "packaging-team-user"; id = 1; } + { + login = "packaging-team-user"; + id = 1; + } ]; }; } @@ -534,7 +538,6 @@ in defaultStateDir mockUrls testEnv - testRunnerExports envToExports gitMock gitMockOverlay diff --git a/nix/test/integration.nix b/nix/test/integration.nix index b178bfdf4..bc4f333e0 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -59,7 +59,7 @@ else export STATE_DIR=$TMPDIR/state # Export test environment variables for E2E test runners - ${testEnv.testConfig.testRunnerExports} + ${testEnv.testConfig.envToExports testEnv.testConfig.testEnv} mkdir -p $STATE_DIR diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index e27ef6376..ff5c4d57b 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -96,7 +96,7 @@ let set -e # Export test environment variables for E2E test runners - ${testConfig.testRunnerExports} + ${testConfig.envToExports testConfig.testEnv} if [ -z "''${STATE_DIR:-}" ]; then STATE_DIR="$(mktemp -d)" From ad6c3284686c593531f0ce890c48830b47a069d0 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 18:41:08 -0500 Subject: [PATCH 18/36] reinstate missing comments --- app/src/App/GitHubIssue.purs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 812cc8131..527027607 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -323,6 +323,8 @@ readOperation eventPath = do IssueEvent { issueNumber, body, username } <- case JSON.parse fileContents >>= decodeIssueEvent of Left err -> + -- If we don't receive a valid event path or the contents can't be decoded + -- then this is a catastrophic error and we exit the workflow. Aff.throwError $ Aff.error $ "Error while parsing json from " <> eventPath <> " : " <> err Right event -> pure event @@ -356,6 +358,10 @@ readOperation eventPath = do Right operation -> pure $ DecodedOperation issueNumber username operation +-- | Users may submit issues with contents wrapped in code fences, perhaps with +-- | a language specifier, trailing lines, and other issues. This rudimentary +-- | cleanup pass retrieves all contents within an opening { and closing } +-- | delimiter. firstObject :: String -> String firstObject input = fromMaybe input do before <- String.indexOf (String.Pattern "{") input @@ -363,6 +369,9 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code +-- | or issue creation. +-- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent { issueNumber :: IssueNumber , body :: String @@ -379,9 +388,22 @@ decodeIssueEvent json = lmap CJ.DecodeError.print do issueObject <- Octokit.atKey "issue" CJ.jobject object issueNumber <- Octokit.atKey "number" CJ.int issueObject + -- We accept issue creation and issue comment events, but both contain an + -- 'issue' field. However, only comments contain a 'comment' field. For that + -- reason we first try to parse the comment and fall back to the issue if + -- that fails. body <- Octokit.atKey "body" CJ.string =<< Octokit.atKey "comment" CJ.jobject object <|> pure issueObject pure $ IssueEvent { body, username, issueNumber: IssueNumber issueNumber } +-- | Re-sign a payload as pacchettibotti if the authenticated operation was +-- | submitted by a registry trustee. +-- +-- @pacchettibotti is considered an 'owner' of all packages for authenticated +-- operations. Registry trustees can ask pacchettibotti to perform an action on +-- behalf of a package by submitting a payload with an empty signature. If the +-- payload was submitted by a trustee (ie. a member of the packaging team) then +-- pacchettibotti will re-sign it and add itself as an owner before continuing +-- with the authenticated operation. signPacchettiBottiIfTrustee :: forall r . AuthenticatedData From 6c023cfd86e383b4b1de7e3825d552821142aa92 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 20:22:31 -0500 Subject: [PATCH 19/36] Remove COMMENT effect, add NOTIFY log --- app-e2e/src/Test/E2E/GitHubIssue.purs | 9 ++- app/src/App/API.purs | 68 +++++++++---------- app/src/App/Effect/Comment.purs | 68 ------------------- app/src/App/Effect/Log.purs | 8 ++- app/src/App/GitHubIssue.purs | 8 +-- app/src/App/SQLite.purs | 2 +- app/src/App/Server/Env.purs | 5 +- app/test/Test/Assert/Run.purs | 4 -- lib/src/API/V1.purs | 6 +- nix/test/test-env.nix | 4 +- scripts/src/LegacyImporter.purs | 2 - scripts/src/PackageDeleter.purs | 2 - scripts/src/PackageSetUpdater.purs | 2 - scripts/src/PackageTransferrer.purs | 2 - scripts/src/Solver.purs | 2 - .../src/Registry/Test/E2E/Fixtures.purs | 40 ++++++----- 16 files changed, 81 insertions(+), 151 deletions(-) delete mode 100644 app/src/App/Effect/Comment.purs diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs index b1931aaab..be9f3ba8f 100644 --- a/app-e2e/src/Test/E2E/GitHubIssue.purs +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -40,7 +40,7 @@ spec = do assertIssueClosed result Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do - result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.failingPublishData + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEventFrom "random-user" Fixtures.failingTransferData assertJobFailed result assertHasComment jobStartedText result @@ -102,11 +102,14 @@ mkGitHubPublishEvent publishData = JSON.print $ CJ.encode githubEventCodec event mkGitHubAuthenticatedEvent :: AuthenticatedData -> String -mkGitHubAuthenticatedEvent authData = +mkGitHubAuthenticatedEvent = mkGitHubAuthenticatedEventFrom packagingTeamUsername + +mkGitHubAuthenticatedEventFrom :: String -> AuthenticatedData -> String +mkGitHubAuthenticatedEventFrom username authData = let authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData body = "```json\n" <> authJson <> "\n```" - event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + event = { sender: { login: username }, issue: { number: testIssueNumber, body } } in JSON.print $ CJ.encode githubEventCodec event diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 06d1ed943..c5d174e94 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -58,8 +58,6 @@ import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) @@ -115,7 +113,7 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit packageSetUpdate2 {} = do @@ -228,18 +226,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -299,7 +297,7 @@ authenticated auth = case auth.payload of Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -330,11 +328,11 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -450,13 +448,13 @@ publish maybeLegacyIndex payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr Right config -> case SpagoYaml.spagoYamlToManifest config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -465,7 +463,7 @@ publish maybeLegacyIndex payload = do pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -481,7 +479,7 @@ publish maybeLegacyIndex payload = do Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -556,7 +554,7 @@ publish maybeLegacyIndex payload = do ] Nothing | payload.compiler < Purs.minPursuitPublish -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish @@ -565,7 +563,7 @@ publish maybeLegacyIndex payload = do pure Nothing Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] @@ -596,7 +594,7 @@ publish maybeLegacyIndex payload = do Left publishErr -> Except.throw publishErr Right _ -> do FS.Extra.remove tmp - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" pure Nothing -- In this case the package version has not been published, so we proceed @@ -606,7 +604,7 @@ publish maybeLegacyIndex payload = do compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions - Comment.comment "Verifying unused and/or missing dependencies..." + Log.notice "Verifying unused and/or missing dependencies..." -- First we install the resolutions and call 'purs graph' to adjust the -- manifest as needed, but we defer compilation until after this check @@ -695,7 +693,7 @@ publish maybeLegacyIndex payload = do -- Now that we have the package source contents we can verify we can compile -- the package with exactly what is going to be uploaded. - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler , " and resolutions:\n" @@ -720,7 +718,7 @@ publish maybeLegacyIndex payload = do Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit - Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." let tarballName = packageDirname <> ".tar.gz" let tarballPath = Path.concat [ tmp, tarballName ] Tar.create { cwd: tmp, folderName: packageDirname } @@ -731,7 +729,7 @@ publish maybeLegacyIndex payload = do Operation.Validation.ExceedsMaximum maxPackageBytes -> Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." -- If a package has under ~30 bytes it's about guaranteed that packaging the -- tarball failed. This can happen if the system running the API has a non- @@ -750,7 +748,7 @@ publish maybeLegacyIndex payload = do let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -758,7 +756,7 @@ publish maybeLegacyIndex payload = do Registry.writeManifest manifest Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" + Log.notice "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" if payload.compiler >= Purs.minPursuitPublish then @@ -768,11 +766,11 @@ publish maybeLegacyIndex payload = do publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" , Version.print payload.compiler , "). If you want to publish documentation, please try again with a later compiler." @@ -782,7 +780,7 @@ publish maybeLegacyIndex payload = do -- when running the server) this will be taken care of by followup jobs invoking -- the MatrixBuilder for each compiler version for_ maybeLegacyIndex \_idx -> do - Comment.comment "Determining all valid compiler versions for this package..." + Log.notice "Determining all valid compiler versions for this package..." allCompilers <- PursVersions.pursVersions { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } @@ -797,13 +795,13 @@ publish maybeLegacyIndex payload = do unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) - Comment.comment "Wrote completed metadata to the registry!" + Log.notice "Wrote completed metadata to the registry!" FS.Extra.remove tmp pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } @@ -969,7 +967,7 @@ type PublishToPursuit = publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1170,7 +1168,7 @@ conformLegacyManifest -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let manifestRequired :: SemigroupMap PackageName Intersection @@ -1267,7 +1265,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p UnusedDependencies names -> do Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1276,7 +1274,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p MissingDependencies names -> do Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1287,7 +1285,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let trimmed = Map.difference manifest.dependencies unused' Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index a1cb72c0a..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 527027607..e3eb353aa 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -185,10 +185,10 @@ pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 Right job -> do let info = V1.jobInfo job - -- Post any new logs (filtered to Info level and above, and after lastTimestamp) + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) let newLogs = Array.filter isNewLog info.logs - isNewLog l = l.level >= V1.Info && case lastTimestamp of + isNewLog l = l.level >= V1.Notice && case lastTimestamp of Nothing -> true Just ts -> l.timestamp > ts unless (Array.null newLogs) do @@ -228,8 +228,8 @@ fetchJob registryApiUrl (V1.JobId jobId) since = do let baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId url = case since of - Nothing -> baseUrl <> "?level=INFO" - Just ts -> baseUrl <> "?level=INFO&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts result <- Aff.attempt $ Fetch.fetch url { method: GET } case result of Left err -> pure $ Left $ "Network error: " <> Aff.message err diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index bf7bd3f69..783b5f756 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -42,8 +42,8 @@ module Registry.App.SQLite import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError -import Data.Array as Array import Control.Monad.Except (runExceptT) +import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 07baa935c..335764eef 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -14,8 +14,6 @@ import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) @@ -120,7 +118,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv @@ -174,7 +172,6 @@ runEffects env operation = Aff.attempt do Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) ) # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog # Log.interpret ( \log -> case env.jobId of Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 42cc7d6ab..8c3e24195 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -30,8 +30,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -89,7 +87,6 @@ type TEST_EFFECTS = + GITHUB_CACHE + LEGACY_CACHE + COMPILER_CACHE - + COMMENT + LOG + EXCEPT String + AFF @@ -129,7 +126,6 @@ runTestEffects env operation = Aff.attempt do # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 8c08d181d..fb4bd3b54 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -301,7 +301,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -311,6 +311,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -319,6 +320,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -326,6 +328,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -334,5 +337,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index ff5c4d57b..f7d7fb058 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -131,8 +131,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 783ee353c..910233047 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -59,7 +59,6 @@ import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -189,7 +188,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 925361fb2..399af2e93 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -20,7 +20,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -158,7 +157,6 @@ main = launchAff_ do >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) >>> Cache.interpret _compilerCache (Cache.handleFs cache) - >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index aa2820e16..51f2ef993 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -31,7 +31,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -150,7 +149,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs index c69af3645..70f1242b0 100644 --- a/test-utils/src/Registry/Test/E2E/Fixtures.purs +++ b/test-utils/src/Registry/Test/E2E/Fixtures.purs @@ -2,7 +2,7 @@ -- | Contains package operation data used across multiple test suites. module Registry.Test.E2E.Fixtures ( effectPublishData - , failingPublishData + , failingTransferData , trusteeAuthenticatedData ) where @@ -12,14 +12,14 @@ import Data.Codec.JSON as CJ import Data.Maybe (Maybe(..)) import JSON as JSON import Registry.Location as Location -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData, UnpublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) import Registry.Operation as Operation import Registry.SSH (Signature(..)) import Registry.Test.Utils as Utils -- | Standard publish data for effect@4.0.0, used by E2E tests. -- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 -effectPublishData :: PublishData +effectPublishData :: Operation.PublishData effectPublishData = { name: Utils.unsafePackageName "effect" , location: Just $ Location.GitHub @@ -33,21 +33,27 @@ effectPublishData = , version: Utils.unsafeVersion "4.0.0" } --- | Publish data for prelude@6.0.1, which already exists in metadata fixtures. --- | Used to test failure scenarios (duplicate publish) in E2E tests. -failingPublishData :: PublishData -failingPublishData = - { name: Utils.unsafePackageName "prelude" - , location: Just $ Location.GitHub - { owner: "purescript" - , repo: "purescript-prelude" - , subdir: Nothing +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = + let + transferPayload :: TransferData + transferPayload = + { name: Utils.unsafePackageName "prelude" + , newLocation: Location.GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } } - , ref: "v6.0.1" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - , version: Utils.unsafeVersion "6.0.1" - } + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + in + { payload: Transfer transferPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } -- | Authenticated data with an intentionally invalid signature. -- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. From e69b875acd77cd1e4c2ba926d488b8ab85a3da69 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Thu, 25 Dec 2025 22:43:25 +0100 Subject: [PATCH 20/36] Implement endpoint for returning jobs --- app/src/App/Effect/Db.purs | 28 ++++++++--------- app/src/App/SQLite.js | 39 ++++++++++++++++++++++++ app/src/App/SQLite.purs | 55 +++++++++++++++++++++++++++++++++- app/src/App/Server/Router.purs | 19 ++++++++---- lib/src/API/V1.purs | 7 +++-- 5 files changed, 124 insertions(+), 24 deletions(-) diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 031c91a62..2be13c39b 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,22 +8,7 @@ import Data.String as String import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite - ( FinishJob - , InsertMatrixJob - , InsertPackageSetJob - , InsertPublishJob - , InsertTransferJob - , InsertUnpublishJob - , MatrixJobDetails - , PackageSetJobDetails - , PublishJobDetails - , SQLite - , SelectJobRequest - , StartJob - , TransferJobDetails - , UnpublishJobDetails - ) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageSetJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PackageSetJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run @@ -48,6 +33,7 @@ data Db a | FinishJob FinishJob a | StartJob StartJob a | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectJobs SelectJobsRequest (Array Job -> a) | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) @@ -81,6 +67,10 @@ finishJob job = Run.lift _db (FinishJob job unit) selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow +-- | Select a list of the latest jobs from the database +selectJobs :: forall r. SelectJobsRequest -> Run (DB + EXCEPT String + r) (Array Job) +selectJobs request = Run.lift _db (SelectJobs request identity) + -- | Insert a new publish job into the database. insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId insertPublishJob job = Run.lift _db (InsertPublishJob job identity) @@ -169,6 +159,12 @@ handleSQLite env = case _ of result <- Run.liftEffect $ SQLite.selectJob env.db request pure $ reply result + SelectJobs request reply -> do + { failed, jobs } <- Run.liftEffect $ SQLite.selectJobs env.db request + unless (Array.null failed) do + Log.warn $ "Some jobs were not readable: " <> String.joinWith "\n" failed + pure $ reply jobs + SelectNextPublishJob reply -> do result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db pure $ reply result diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8b0a1765e..9a229d16d 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -134,6 +134,45 @@ export const selectPackageSetJobImpl = (db, jobId) => { return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; +const _selectJobs = (db, { table, since, includeCompleted }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE timestamp >= ? + `; + let params = [since]; + + if (includeCompleted === false) { + query += ` AND info.finishedAt IS NULL`; + } + + query += ` ORDER BY info.createdAt ASC LIMIT 100`; + const stmt = db.prepare(query); + + return stmt.all(...params); +} + +export const selectPublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PUBLISH_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectUnpublishJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: UNPUBLISH_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectTransferJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: TRANSFER_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectMatrixJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: MATRIX_JOBS_TABLE, since, includeCompleted }); +}; + +export const selectPackageSetJobsImpl = (db, since, includeCompleted) => { + return _selectJobs(db, { table: PACKAGE_SET_JOBS_TABLE, since, includeCompleted }); +}; + export const startJobImpl = (db, args) => { const stmt = db.prepare(` UPDATE ${JOB_INFO_TABLE} diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 783b5f756..048ba27a4 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -17,6 +17,7 @@ module Registry.App.SQLite , PublishJobDetails , SQLite , SelectJobRequest + , SelectJobsRequest , StartJob , TransferJobDetails , UnpublishJobDetails @@ -30,6 +31,7 @@ module Registry.App.SQLite , insertUnpublishJob , resetIncompleteJobs , selectJob + , selectJobs , selectLogsByJob , selectNextMatrixJob , selectNextPackageSetJob @@ -43,17 +45,20 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError import Control.Monad.Except (runExceptT) +import Data.Array (sortBy, take) import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime +import Data.Function (on) import Data.Nullable as Nullable import Data.String as String import Data.UUID.Random as UUID -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried import Record as Record import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 +import Registry.API.V1 as V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) @@ -248,6 +253,44 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do ) maybeJobDetails +type SelectJobsRequest = + { since :: DateTime + , includeCompleted :: Boolean + } + +selectJobs :: SQLite -> SelectJobsRequest -> Effect { failed :: Array String, jobs :: Array Job } +selectJobs db { since, includeCompleted } = do + publishJobs <- selectPublishJobs + unpublishJobs <- selectUnpublishJobs + transferJobs <- selectTransferJobs + matrixJobs <- selectMatrixJobs + packageSetJobs <- selectPackageSetJobs + let + { fail: failedJobs, success: allJobs } = partitionEithers + (publishJobs <> unpublishJobs <> transferJobs <> matrixJobs <> packageSetJobs) + pure { failed: failedJobs, jobs: take 100 $ sortBy (compare `on` (V1.jobInfo >>> _.createdAt)) allJobs } + + where + selectPublishJobs = do + jobs <- Uncurried.runEffectFn3 selectPublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" }) <<< publishJobDetailsFromJSRep) jobs + + selectUnpublishJobs = do + jobs <- Uncurried.runEffectFn3 selectUnpublishJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" }) <<< unpublishJobDetailsFromJSRep) jobs + + selectTransferJobs = do + jobs <- Uncurried.runEffectFn3 selectTransferJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" }) <<< transferJobDetailsFromJSRep) jobs + + selectMatrixJobs = do + jobs <- Uncurried.runEffectFn3 selectMatrixJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" }) <<< matrixJobDetailsFromJSRep) jobs + + selectPackageSetJobs = do + jobs <- Uncurried.runEffectFn3 selectPackageSetJobsImpl db (DateTime.format Internal.Format.iso8601DateTime since) includeCompleted + pure $ map (map (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" }) <<< packageSetJobDetailsFromJSRep) jobs + -------------------------------------------------------------------------------- -- publish_jobs table @@ -295,6 +338,8 @@ publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, create foreign import selectPublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPublishJobDetails) +foreign import selectPublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPublishJobDetails) + selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) selectNextPublishJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db Nullable.null @@ -378,6 +423,8 @@ unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, crea foreign import selectUnpublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSUnpublishJobDetails) +foreign import selectUnpublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSUnpublishJobDetails) + selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) selectNextUnpublishJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db Nullable.null @@ -463,6 +510,8 @@ transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, foreign import selectTransferJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSTransferJobDetails) +foreign import selectTransferJobsImpl :: EffectFn3 SQLite String Boolean (Array JSTransferJobDetails) + selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) selectNextTransferJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db Nullable.null @@ -586,6 +635,8 @@ matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) +foreign import selectMatrixJobsImpl :: EffectFn3 SQLite String Boolean (Array JSMatrixJobDetails) + selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) selectNextMatrixJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null @@ -630,6 +681,8 @@ packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) +foreign import selectPackageSetJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPackageSetJobDetails) + selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) selectNextPackageSetJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db Nullable.null diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 9143508de..020ee6457 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -72,17 +72,26 @@ router { route, method, body } = HTTPurple.usingCont case route, method of _ -> HTTPurple.badRequest "Expected transfer operation." - -- TODO return jobs - Jobs, Get -> do - _now <- liftEffect nowUTC - jsonOk (CJ.array V1.jobCodec) [] + Jobs { since, include_completed }, Get -> do + -- TODO should probably be 1h ago instead of now + now <- liftEffect nowUTC + lift + ( Run.Except.runExcept $ Db.selectJobs + { includeCompleted: fromMaybe false include_completed + , since: fromMaybe now since + } + ) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching jobs: " <> err + HTTPurple.internalServerError $ "Error while fetching jobs: " <> err + Right jobs -> jsonOk (CJ.array V1.jobCodec) jobs Job jobId { level: maybeLogLevel, since }, Get -> do now <- liftEffect nowUTC lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe now since }) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound + HTTPurple.internalServerError $ "Error while fetching job: " <> err Right Nothing -> do HTTPurple.notFound Right (Just job) -> jsonOk V1.jobCodec job diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index fb4bd3b54..fee64ef3c 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -61,7 +61,7 @@ data Route = Publish | Unpublish | Transfer - | Jobs + | Jobs { since :: Maybe DateTime, include_completed :: Maybe Boolean } | Job JobId { level :: Maybe LogLevel, since :: Maybe DateTime } | Status @@ -72,7 +72,10 @@ routes = Routing.root $ Routing.prefix "api" $ Routing.prefix "v1" $ RoutingG.su { "Publish": "publish" / RoutingG.noArgs , "Unpublish": "unpublish" / RoutingG.noArgs , "Transfer": "transfer" / RoutingG.noArgs - , "Jobs": "jobs" / RoutingG.noArgs + , "Jobs": "jobs" ? + { since: Routing.optional <<< timestampP <<< Routing.string + , include_completed: Routing.optional <<< Routing.boolean + } , "Job": "jobs" / ( jobIdS ? { level: Routing.optional <<< logLevelP <<< Routing.string From c33a3ad81bb9c6823a9cd1e4ea67338a05c23fc9 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Thu, 25 Dec 2025 22:46:42 +0100 Subject: [PATCH 21/36] Check for existing jobs before enqueueing new ones --- app/src/App/Effect/Db.purs | 27 +++++++++++ app/src/App/SQLite.js | 27 +++++++---- app/src/App/SQLite.purs | 85 ++++++++++++++++++++++++++-------- app/src/App/Server/Router.purs | 45 +++++++++++++----- 4 files changed, 144 insertions(+), 40 deletions(-) diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 2be13c39b..2e3c934f4 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -39,6 +39,9 @@ data Db a | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) + | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) + | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) | ResetIncompleteJobs a @@ -115,6 +118,18 @@ selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.ret selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow +-- | Lookup a publish job from the database by name and version. +selectPublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectPublishJob packageName packageVersion = Run.lift _db (SelectPublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookup an unpublish job from the database by name and version. +selectUnpublishJob :: forall r. PackageName -> Version -> Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob packageName packageVersion identity) >>= Except.rethrow + +-- | Lookop a transfer job from the database by name. +selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow + -- | Delete all incomplete jobs from the database. resetIncompleteJobs :: forall r. Run (DB + r) Unit resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) @@ -185,6 +200,18 @@ handleSQLite env = case _ of result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db pure $ reply result + SelectPublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectPublishJob env.db packageName packageVersion + pure $ reply result + + SelectUnpublishJob packageName packageVersion reply -> do + result <- Run.liftEffect $ SQLite.selectUnpublishJob env.db packageName packageVersion + pure $ reply result + + SelectTransferJob packageName reply -> do + result <- Run.liftEffect $ SQLite.selectTransferJob env.db packageName + pure $ reply result + InsertLogLine log next -> do Run.liftEffect $ SQLite.insertLogLine env.db log pure next diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 9a229d16d..5526b6ad3 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -93,7 +93,7 @@ export const insertPackageSetJobImpl = (db, job) => { return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -const _selectJob = (db, { table, jobId }) => { +const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { const params = []; let query = ` SELECT job.*, info.* @@ -101,11 +101,18 @@ const _selectJob = (db, { table, jobId }) => { JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId `; - if (jobId === null) { - query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; - } else { + if (jobId !== null) { query += ` WHERE info.jobId = ?`; params.push(jobId); + } else if (packageName !== null) { + query += ` WHERE job.packageName = ?`; + params.push(packageName); + if (packageVersion !== null) { + query += ` AND job.packageVersion = ?`; + params.push(packageVersion); + } + } else { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; } query += ` ORDER BY info.createdAt ASC LIMIT 1`; @@ -114,16 +121,16 @@ const _selectJob = (db, { table, jobId }) => { return stmt.get(...params); } -export const selectPublishJobImpl = (db, jobId) => { - return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId }); +export const selectPublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); }; -export const selectUnpublishJobImpl = (db, jobId) => { - return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId }); +export const selectUnpublishJobImpl = (db, { jobId, packageName, packageVersion }) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId, packageName, packageVersion }); }; -export const selectTransferJobImpl = (db, jobId) => { - return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId }); +export const selectTransferJobImpl = (db, { jobId, packageName }) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId, packageName }); }; export const selectMatrixJobImpl = (db, jobId) => { diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 048ba27a4..b9bef05d6 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -38,6 +38,9 @@ module Registry.App.SQLite , selectNextPublishJob , selectNextTransferJob , selectNextUnpublishJob + , selectPublishJob + , selectTransferJob + , selectUnpublishJob , startJob ) where @@ -50,6 +53,7 @@ import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Function (on) +import Data.Nullable (notNull, null) import Data.Nullable as Nullable import Data.String as String import Data.UUID.Random as UUID @@ -198,11 +202,11 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since case fail of [] -> runExceptT $ firstJust - [ selectPublishJob logs - , selectMatrixJob logs - , selectTransferJob logs - , selectPackageSetJob logs - , selectUnpublishJob logs + [ selectPublishJobById logs + , selectMatrixJobById logs + , selectTransferJobById logs + , selectPackageSetJobById logs + , selectUnpublishJobById logs ] _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail where @@ -213,31 +217,34 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do Just job -> pure (Just job) Nothing -> next - selectPublishJob logs = ExceptT do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) + selectPublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } pure $ traverse ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) <<< publishJobDetailsFromJSRep ) maybeJobDetails - selectUnpublishJob logs = ExceptT do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db (Nullable.notNull jobId) + selectUnpublishJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: notNull jobId, packageName: null, packageVersion: null } pure $ traverse ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) <<< unpublishJobDetailsFromJSRep ) maybeJobDetails - selectTransferJob logs = ExceptT do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db (Nullable.notNull jobId) + selectTransferJobById logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: notNull jobId, packageName: null } pure $ traverse ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) <<< transferJobDetailsFromJSRep ) maybeJobDetails - selectMatrixJob logs = ExceptT do + selectMatrixJobById logs = ExceptT do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) pure $ traverse ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) @@ -245,7 +252,7 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do ) maybeJobDetails - selectPackageSetJob logs = ExceptT do + selectPackageSetJobById logs = ExceptT do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (Nullable.notNull jobId) pure $ traverse ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) @@ -336,13 +343,28 @@ publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, create , payload: parsed } -foreign import selectPublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPublishJobDetails) +type SelectPublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite SelectPublishParams (Nullable JSPublishJobDetails) foreign import selectPublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPublishJobDetails) selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) selectNextPublishJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db Nullable.null + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +selectPublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe PublishJobDetails)) +selectPublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } pure $ traverse publishJobDetailsFromJSRep maybeJobDetails type InsertPublishJob = @@ -421,13 +443,28 @@ unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, crea , payload: parsed } -foreign import selectUnpublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSUnpublishJobDetails) +type SelectUnpublishParams = + { jobId :: Nullable String + , packageName :: Nullable String + , packageVersion :: Nullable String + } + +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite SelectUnpublishParams (Nullable JSUnpublishJobDetails) foreign import selectUnpublishJobsImpl :: EffectFn3 SQLite String Boolean (Array JSUnpublishJobDetails) selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) selectNextUnpublishJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db Nullable.null + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db { jobId: null, packageName: null, packageVersion: null } + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +selectUnpublishJob :: SQLite -> PackageName -> Version -> Effect (Either String (Maybe UnpublishJobDetails)) +selectUnpublishJob db packageName packageVersion = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + , packageVersion: notNull $ Version.print packageVersion + } pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails type InsertUnpublishJob = @@ -508,13 +545,23 @@ transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, , payload: parsed } -foreign import selectTransferJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSTransferJobDetails) +type SelectTransferParams = { jobId :: Nullable String, packageName :: Nullable String } + +foreign import selectTransferJobImpl :: EffectFn2 SQLite SelectTransferParams (Nullable JSTransferJobDetails) foreign import selectTransferJobsImpl :: EffectFn3 SQLite String Boolean (Array JSTransferJobDetails) selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) selectNextTransferJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db Nullable.null + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db { jobId: null, packageName: null } + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails + +selectTransferJob :: SQLite -> PackageName -> Effect (Either String (Maybe TransferJobDetails)) +selectTransferJob db packageName = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db + { jobId: null + , packageName: notNull $ PackageName.print packageName + } pure $ traverse transferJobDetailsFromJSRep maybeJobDetails type InsertTransferJob = diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 020ee6457..cdb255e88 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -41,7 +41,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - jobId <- lift $ Db.insertPublishJob { payload: publish } + + jobId <- lift (Db.selectPublishJob publish.name publish.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate publish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPublishJob { payload: publish } + jsonOk V1.jobCreatedResponseCodec { jobId } Unpublish, Post -> do @@ -49,11 +56,18 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Unpublish payload -> do lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload - jobId <- lift $ Db.insertUnpublishJob - { payload: payload - , rawPayload: auth.rawPayload - , signature: auth.signature - } + + jobId <- lift (Db.selectUnpublishJob payload.name payload.version) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate unpublish job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected unpublish operation." @@ -63,11 +77,18 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Transfer payload -> do lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload - jobId <- lift $ Db.insertTransferJob - { payload: payload - , rawPayload: auth.rawPayload - , signature: auth.signature - } + + jobId <- lift (Db.selectTransferJob payload.name) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate transfer job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected transfer operation." @@ -96,6 +117,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.notFound Right (Just job) -> jsonOk V1.jobCodec job + -- TODO packageset jobs? + Status, Get -> HTTPurple.emptyResponse Status.ok From e524f004c731e7df60f247bf8b3f607ac07e6a7e Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 4 Jan 2026 20:28:35 +0100 Subject: [PATCH 22/36] Add E2E test: publishing a package enqueues matrix jobs --- app-e2e/src/Test/E2E/Publish.purs | 86 ++++++++++++++++++ .../registry-storage/effect-4.0.0.tar.gz | Bin 0 -> 6262 bytes app/src/App/SQLite.js | 8 +- app/src/App/Server/MatrixBuilder.purs | 8 +- app/src/App/Server/Router.purs | 14 ++- db/schema.sql | 17 +++- nix/test/config.nix | 15 +++ test-utils/src/Registry/Test/E2E/Client.purs | 2 +- 8 files changed, 137 insertions(+), 13 deletions(-) create mode 100644 app/fixtures/registry-storage/effect-4.0.0.tar.gz diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 4168e1610..28893b187 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -5,6 +5,7 @@ module Test.E2E.Publish (spec) where import Prelude import Data.Array as Array +import Data.Codec.JSON as CJ import Data.Either (Either(..)) import Data.Foldable (for_) import Data.Maybe (Maybe(..), isJust) @@ -12,10 +13,18 @@ import Data.String as String import Effect.Aff (Aff) import Effect.Class (liftEffect) import Effect.Class.Console as Console +import JSON as JSON +import Registry.API.V1 (Job(..)) import Registry.API.V1 as V1 +import Registry.Internal.Codec as Internal.Codec +import Registry.Operation as Operation +import Registry.PackageName (PackageName) import Registry.Test.Assert as Assert import Registry.Test.E2E.Client as Client import Registry.Test.E2E.Fixtures as Fixtures +import Registry.Test.Utils (unsafePackageName, unsafeVersion) +import Registry.Version (Version) +import Registry.Version as Version import Test.Spec (Spec) import Test.Spec as Spec @@ -102,3 +111,80 @@ spec = do let sinceLogs = (V1.jobInfo sinceJob).logs for_ sinceLogs \l -> Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) + + Spec.it "kicks off a matrix job for 0.15.10 once the package is published" do + config <- getConfig + maybeJobs <- Client.getJobs config + case maybeJobs of + Left err -> Assert.fail $ "Failed to get jobs: " <> Client.printClientError err + Right jobs -> do + let + expectedJobs = + [ { jobType: "publish" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Nothing + , payload: """{"compiler":"0.15.9","location":{"githubOwner":"purescript","githubRepo":"purescript-effect"},"name":"effect","ref":"v4.0.0","version":"4.0.0"}""" + , success: true + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.10" + , payload: """{"prelude":"6.0.1"}""" + , success: true + } + ] + Assert.shouldEqual expectedJobs (map deterministicJob jobs) + +type DeterministicJob = + { jobType :: String + , packageName :: Maybe PackageName + , packageVersion :: Maybe Version + , compilerVersion :: Maybe Version + , payload :: String + , success :: Boolean + } + +deterministicJob :: Job -> DeterministicJob +deterministicJob = case _ of + PublishJob { success, packageName, packageVersion, payload } -> + { jobType: "publish" + , packageName: Just packageName + , packageVersion: Just packageVersion + , compilerVersion: Nothing + , success + , payload: JSON.print $ CJ.encode Operation.publishCodec payload + } + UnpublishJob { success, packageName, packageVersion, payload } -> + { jobType: "unpublish" + , packageName: Just packageName + , packageVersion: Just packageVersion + , compilerVersion: Nothing + , success + , payload: JSON.print $ CJ.encode Operation.authenticatedCodec payload + } + TransferJob { success, packageName, payload } -> + { jobType: "transfer" + , packageName: Just packageName + , packageVersion: Nothing + , compilerVersion: Nothing + , success + , payload: JSON.print $ CJ.encode Operation.authenticatedCodec payload + } + MatrixJob { success, packageName, packageVersion, compilerVersion, payload } -> + { jobType: "matrix" + , packageName: Just packageName + , packageVersion: Just packageVersion + , compilerVersion: Just compilerVersion + , success + , payload: JSON.print $ CJ.encode (Internal.Codec.packageMap Version.codec) payload + } + PackageSetJob { success, payload } -> + { jobType: "packageset" + , packageName: Nothing + , packageVersion: Nothing + , compilerVersion: Nothing + , success + , payload: JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + } diff --git a/app/fixtures/registry-storage/effect-4.0.0.tar.gz b/app/fixtures/registry-storage/effect-4.0.0.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..e86537b25c2dff63cd072c7f6af387eaec1afb85 GIT binary patch literal 6262 zcmV-+7>VZ}iwFP!000001MFS@bJ|Lhe?RkA%%!esaULdo$Bti8MQj|i<=8&JFPqxi zdkATONrXg6!j5k*|NC{%jHHo}gs?GryJjjCgEZ>?bk9t8OX_LLZ*H{j7gRH-7ZXV#SH zwq-E9$R&8uoO*8yg=(2ZA-Tk!_*it?fPOHT#G#%B7?Gehw(sziFQR38Os6F{$J~Mm zl#qbR%Q9j>fSqn?U>}`wp-@`|0E`q3_y9mOV+o)#Fcc62*$@Mfav8S1oKwrw{DFC6 zI|UogIApGQ)M1+GhWh1$!Y@;xgE;jvxuT3;g=Z~oP7w=!4&lyh6XF3t9d_bsXP%3w z1wGqw3v=y(j493#z;6?38E};1WB?#@+oMEi1+D{v><+q3-~=BF7{rNpk0UK7PQhJJ z9cK<)#Be@2IA<&|dt6uGA71nZWN>{x{M_kx34FfkU;o}a>zJ6dW@VdmA%F7DjW#syteCqa3FW_tE zL+`RT{KA2p_l8%<>l~PM$W5m|?490TcKYPzwtsUy=n@dCaMl}~UUqt)x@TqJ4jf7M z_wLn@3@$pCmx)p$_K1uhx&X5C;j)XIxRhtTe)n{U;)I`0L34obvP1?q-P0a^=>Dw> z!gTsyN|NTF`(L-v2~Lu;&Zo}D?x0XyqY@aC)7yUc6JiEE3~oORhP~nKuuDE(U!QSB z54!!|!SM`E3YXUduC3caw*(x99nKg?fT{*3`1!-_pvRTgyBc=;{o9*i@A~QhB>xO* z0gN5E^o(ow`ihGR8oKU(L87PwuKW`De9?vDeN-sdR0mZs0F9jv)o#EK6gX65Bv;*! zm%WeOtJ5x?y+$gZdxP!)j8$)d9e^aV{oDbbw_Ic#BS0lS_67xi#+LX{h%l330K+ag;aNW`|(*?{6Dtu!RCE+ZR_bgHjn=saBLs{@1pJF|Cik&>wlYz zUTN#rf2&oCt^aDPS>MP1yJ-I`5b_G6x>s+>s|7^=E|2~X#k_b`!qYQ*P8ZsgcbR$K z!hQSZ%@hK)<+!Zd^EcsDZ&C=D5b;_rzy8DE?67`#YHAQDijxp7+b)9`@&V2x9607Z zEO>9=!aKpCrDHyVe;FwNfghX|n?f9mg#RvRzJC3t{Mu*0C;uNnaA8{zTf6>o3+^ob zfE$^mNrPh$qAr#JG>^JogbH6m{*i^Ie*@)-V{qsn9Ew2~5F%4cXFxTOTfx6@sJS$_ z7XGJ&tCNdS__1XZo#y`miMuUJu(|)Qx0;Q;|KCO1`~R2SBI|#fi(YB#)_=VoU;ou+ zZU6jlCoSUtRiAwaUVwqeg2Q}rwAkj(fGo5j-o>!LV)D>BXHd* z{xWou@85ARNq%cTpR@coY-#@Ab@Z+A*xdiunw8q#|L>yh{r}5uk@XLY{yB0$a8YlR z|52?p(EsoAKc2s>jKAUoy8PVJc|7L+uie_)|6R1b{eRhQW&Mjqzg0J|x&5!UYSq2{ z-$hFsf5i>JEuQO^@!7y5d2DX~o9%jiZ~u4E_V)i}x3u;DH+~tB( zv^0<1Q6eU=Tjm4>)t&`oYUxxY4+eQ~6WbvppAYj~YEDW)x*c9&XpSLx2|AvmSuW=F zffrQik4!f40>GLTsub#=TMD#S4 zdM>VwWUh^MTua2rU~J^#-Av})%)z@vXf}JA3qdOxK`S4E5}B+8G0BCaos6TM8%K#u z^FW#ALUWXi=4cZ%B{ItiXqF4paWbak4KbAn%MXj?LRPJ$n54?)*h=K^`4cpN{^&&M^5bb5X{Kl-!#C|%BQ#AV zX(~%r8amRGNWULjRAVbejTsL1&!)ynx*pRtovLdL8BSuIe>|W5+bR0b2rT{u^*_xT zfUH5v8YYaW;v=;(R?j+kM=67s5r_S|4&E$#^s>iz%_w7}x;oGaL%rM4KTa9_jEMJz zk3L&B{OhL4`iX+K4@q++akl*QDHOdkiJaI^qLgmAC(+mSvuOQ%%QfThrZ9QZrA)h@ zIvcn4{BM)@KaWfYc*OgkR&AgExs$ek|MRlj`u9Ii&jHF_|Mg0{T8+Q|X<+{M{{7GM zH<^aRTp}}p3_4*JX5Kt3;D5}S5mRKcG|AGR6oJ!qOGIW){LUw$?~j0CBu$AdG*FGcG&QG$g~6z(g*FPAkFewwFRtb% zIuSOXQv-90%m<=PJn9H)4%I2ULp@`TkF3xI^tCyKJ}^!$S0Hv^qTHHDG&<8ft$Z%U zE}vU9L2lqV*j4_ywU{?5VFH#(#gJNzS{@v+m!=`1!RVO02AD=nM%5v`wkWyR9&n^) zFadBhaX7@qV?Kg%o`}+C{vZUYPZnrEGX*ha_<7B>BJN%_A7CPq`0SKnA6JSMBt7v{ z2PK(z1@lp=7`yR2E6s6dphT`JOPO0Gzy8|!c~35v5{UXuh!dMx;a*m82>$Ddt5pF& zpKs;NvmMH&AhXDq3#R}kV98-Pe6M_x;^5FE&8~#W{pO7i0R>Qai#r>)tS}AlXvEZ3 zrhPo_UR@Z;v3YQkWUQ`}u-lK|v;WEe88n288*GGvU3Rw3`59b)n zw6Tq~@E8n@Jr@NGspHtr;lcsiP*&*)YiNU#9W2TRs|uEp%sGTrFN)}q4-YcG5C`SK zUh)EV>P#T7WfeeXGVdXpKNT5{oY|3ot1ZC|bN==16`2Ho_?&Q&M4=EvADe%pi{NpciJhs3ARjC)a-{8%TF`wn@)cG#3zvAKMp+W&6oAHZc$Aa#e+ zA8TZaS?MtM8~QB17+o@cfb}PQ8LUL~;Jmyix`P!T06qK|f%`ZP7F!0sbDPz}q}HNG$_PaC#auU0Lr_(32(@5pbdDcW&`axWoZHid<># z0g&s+4Vy8r_PsETN#?g0-D3K~Bbyl^>tZLSZ;N`Vd^ z6_^piCPemmPcH50C4MY_?)0xBE(^|drc;MbHP3bipbuELWzfD?eDwynxUWhv2ZvF<5o&_6721BEq6O1e>6j5{qJfqclp7_}w!gXYz$MAzWSU>e zIe1GSKos^U?Q1d2mMC6<2pR_W>P$J{_UjdPu>-`|8EcMH*DnbU z!w^4^&8?QkEsJYyJ=YCk)Yrktf>VD4oDK0c^5Vjj zCCZYlwHwJc6t(?SQL{vD|0Iz&)WOlVbdaTsqd$u-HdM~>ww05mq~kw}PM)f@YUR1q zmg%Z1e-`a*L!nBrULfigW8z@KLYWjb<&MHRfP=wT3|p{YI?Y;)<~T}qoI$!KyQdSc zuMpQ0$mcLg%Tp9^nj5Iz*2D658aXPzZB0HMNkY;dgMrfq-ZKd#?tmiz*VBd<7 zN(azR1u)GCVVZ%Xy+Sb^$x#N9*(O+K8Gw!w2&Y3j&V;%^e-(|bCZFdZ{t4lp@}T0BvJLl+nOnE12i z#lmFb1s@I^;}3b|(^aJakLy20j64Ypo-GoGynC;FRbHon2RaDq@}w%TO8%QW(#XEm z310nM{9r9o8E6GqEgvxH!*W2YA+&J}Tup!*$KcgCJeal_0PFbx1}-oM#JYf}$3WJ7 zka`SeUBZNgl>xMo52)fkb3kqQP>mSaMhMo3!EFR^!31OgZsr3V@w+)7H$%uY2D+(& zrZLz}1va5sz+3r%$Gmk8=q(lcBnG|}1wVr9M_ciXPD#-$Vjk~T&qKpu$MC#nx%!v3{ z`EHMk=fCGm@vh?iUfi$#fq2EKR8F;ZoL1ar0kwob z@B|D;aSVA-9Ir#M#_A^EI9|n(2Z=D2nWki|nM|@Sl_WSSphxcipq#;){eP}Dp--p- zxB33RX0yKk{hyt*{r;UU2oU+`~P;)B1H|Be9&)1o}zrD zr<6AEiv;+ad$1_l7Pq4B+fQiu{r}_l0Y75@f3?!y`~O|Ez5jpNZH@om$`637TmSV& zE$;u@&3*mnowRuFJC1rR@m&T2v7hefFZtUpF@G?_9^b4jMW&d#tLycZ?a|k7`Q{kD z+YEQ4`Vj@baud78_-|)8W9B)UgFAK%Dt`jlEyrJmCPgW!ziy%JOKK zfX)4Xqh8(r{=-h%-v7Vsmg4`nbK@`Dn)P38wB!E2R&CYy{(l!Oa{rH8OS_Rfel4+l zAt}bn7iwu`635Ik@=;u=@xQ`LdGMeW4`fO9j1rAY%VTCY>jB0J3sK;v_#YFXpGKDV z$wxM=%A+ciWQiP=`h{9Cte~DPC4@auFs7Y?X0}q3e^vRJP7XaVKpb|Gn&%7XNMMmtRF&v;Lcv@SGq2HS6tt z{I`p?Qt?dIrV=$4f}cy}HOOS$2I$Lw?bNfw4T9oX&mW>b+8@#477-#LeT{!NNav;( zEi)-gBZZHXT)*<5<|!4Zz-Y<(AH3vQkczDo>?=o0JAmshh965ZK;$1qP{%)FK;?hn zp$L3Ah&>X!aj3a8R(~V6vsjo95Q4FZSMwi~b)vsiD|8}M;a6uQ2E}4qD+N37ftjdm*2H=^Ong8mMpDwd1Enpohz*Vn*5tzL?=*8M zAK3YoA~|uM<<783o5kE2U41&I?6HmSuU*{_o3O()$gAP89E?Rk>RcR%{{rn?TVOB> zD(_U4%Q4;lA-q_OOzcwL2gAWd>_VpVA&Xuv55|e$O%u@I_f9JtH^d!PK+&Ix+}C*E;|lep80Yz&D$35{&U9UrJ)l7Ig<_~l=J&4hJ#MnU@Lh=cZD$zbH-;s&Z;#Yc#1)JUdw z1(geVA6FLLHG+CXyvUQx>>8DmbIp`IGi^1`Ohq2#lZn~rXRvWjm$tnSh7b$&*p|V# zeURlF$;fk-bQG!5CU2|9!qB9=0SHR`EU4yD16CQ#0b$?L?K~NcN75?u?J?343RA>F zFe~o^IG|9cA8fENfGnKhyl9mouyiMiz${T#d9Obvb1rSBs4McRog}?6mL{4=;lyhX zpKl4RYBmv=KBKUplnH{koE-R$Qn0zn?~7&rR#YYBNqSqdsx&P_fi~9giw8%BrFkLk=rp{17b#Jer8IY3Pv!Iq9321sA(P z#nX{#k4TYOx(TM@|J%8P6aYaW3d7gQV|0^R2)Rv&>GsX2MKM7TL7V^E2yPl*g`(gR zL%n}(O4=D^?{vA8$}|<7;%v~Z=~{crAUtMb$5`{YT6yKebrIC<4awhUFi7@)$MM9= gOYcjKeqOj8Ql$Gyuo|0C9|1^8f$< literal 0 HcmV?d00001 diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 5526b6ad3..7814c9219 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -101,13 +101,13 @@ const _selectJob = (db, { table, jobId, packageName, packageVersion }) => { JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId `; - if (jobId !== null) { + if (jobId != null) { query += ` WHERE info.jobId = ?`; params.push(jobId); - } else if (packageName !== null) { + } else if (packageName != null) { query += ` WHERE job.packageName = ?`; params.push(packageName); - if (packageVersion !== null) { + if (packageVersion != null) { query += ` AND job.packageVersion = ?`; params.push(packageVersion); } @@ -146,7 +146,7 @@ const _selectJobs = (db, { table, since, includeCompleted }) => { SELECT job.*, info.* FROM ${table} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE timestamp >= ? + WHERE info.createdAt >= ? `; let params = [since]; diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 8db8e883b..34aba9ba0 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -167,7 +167,9 @@ solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = -- Log.debug $ Solver.printSolverError solverErrors pure Nothing Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of - true -> pure $ Just { compiler: target, resolutions, name, version } + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler: target, resolutions, name, version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" @@ -204,7 +206,9 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do -- Log.debug $ Solver.printSolverError solverErrors pure Nothing Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of - true -> pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } + true -> do + Log.debug $ "Solved with compiler " <> Version.print solvedCompiler + pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index cdb255e88..095232b4b 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -3,7 +3,10 @@ module Registry.App.Server.Router where import Registry.App.Prelude hiding ((/)) import Data.Codec.JSON as CJ +import Data.DateTime as DateTime +import Data.Time.Duration (Hours(..), negateDuration) import Effect.Aff as Aff +import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status @@ -33,7 +36,9 @@ runRouter env = do runServer request = do result <- runEffects env (router request) case result of - Left error -> HTTPurple.badRequest (Aff.message error) + Left error -> do + Console.log $ "Bad request: " <> Aff.message error + HTTPurple.badRequest (Aff.message error) Right response -> pure response router :: Request Route -> Run ServerEffects Response @@ -94,12 +99,12 @@ router { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.badRequest "Expected transfer operation." Jobs { since, include_completed }, Get -> do - -- TODO should probably be 1h ago instead of now now <- liftEffect nowUTC + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now lift ( Run.Except.runExcept $ Db.selectJobs { includeCompleted: fromMaybe false include_completed - , since: fromMaybe now since + , since: fromMaybe oneHourAgo since } ) >>= case _ of Left err -> do @@ -109,7 +114,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Job jobId { level: maybeLogLevel, since }, Get -> do now <- liftEffect nowUTC - lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe now since }) >>= case _ of + let oneHourAgo = fromMaybe now $ DateTime.adjust (negateDuration (Hours 1.0)) now + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe oneHourAgo since }) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.internalServerError $ "Error while fetching job: " <> err diff --git a/db/schema.sql b/db/schema.sql index 1baf6403f..65319293a 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -6,9 +6,22 @@ CREATE TABLE job_info ( finishedAt TEXT, success INTEGER NOT NULL DEFAULT 0 ); -CREATE TABLE package_jobs ( +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE transfer_jobs ( jobId TEXT PRIMARY KEY NOT NULL, - jobType TEXT NOT NULL, packageName TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE diff --git a/nix/test/config.nix b/nix/test/config.nix index 3c06276e5..fdd24a537 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -218,6 +218,17 @@ let # S3 API wiremock mappings (serves package tarballs) s3Mappings = [ + { + request = { + method = "GET"; + url = "/effect/4.0.0.tar.gz"; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = "effect-4.0.0.tar.gz"; + }; + } { request = { method = "GET"; @@ -243,6 +254,10 @@ let ]; s3Files = [ + { + name = "effect-4.0.0.tar.gz"; + path = rootPath + "/app/fixtures/registry-storage/effect-4.0.0.tar.gz"; + } { name = "prelude-6.0.1.tar.gz"; path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index 9d8b6b0b4..8d31b9850 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -126,7 +126,7 @@ post reqCodec resCodec config path reqBody = runExceptT do -- | Get the list of jobs getJobs :: Config -> Aff (Either ClientError (Array Job)) -getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs" +getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs?include_completed=true" -- | Get a specific job by ID, with optional log filtering getJob :: Config -> JobId -> Maybe LogLevel -> Maybe DateTime -> Aff (Either ClientError Job) From 6dc01f0175bb0cb87e75e7633405742dd69f3ad6 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 4 Jan 2026 20:57:35 +0100 Subject: [PATCH 23/36] Add E2E test: run a whole-registry upgrade when detecting a new compiler --- app-e2e/src/Test/E2E/Publish.purs | 61 +++++++++++++++++++++++++++-- app/src/App/Server/JobExecutor.purs | 4 +- 2 files changed, 60 insertions(+), 5 deletions(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 28893b187..d47000f05 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -42,12 +42,12 @@ spec = do Left err -> Assert.fail $ "Failed to reach status endpoint: " <> Client.printClientError err Right _ -> pure unit - Spec.it "can list jobs (initially empty)" do + Spec.it "can list jobs (initially only compiler-upgrade matrix jobs)" do config <- getConfig result <- Client.getJobs config case result of Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - Right _ -> pure unit -- Jobs list may not be empty if other tests ran + Right jobs -> Assert.shouldEqual initialJobs (map deterministicJob jobs) Spec.describe "Publish workflow" do Spec.it "can publish effect@4.0.0 and filter logs" do @@ -112,14 +112,14 @@ spec = do for_ sinceLogs \l -> Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) - Spec.it "kicks off a matrix job for 0.15.10 once the package is published" do + Spec.it "kicks off matrix jobs for effect@4.0.0 once the package is published" do config <- getConfig maybeJobs <- Client.getJobs config case maybeJobs of Left err -> Assert.fail $ "Failed to get jobs: " <> Client.printClientError err Right jobs -> do let - expectedJobs = + expectedJobs = initialJobs <> [ { jobType: "publish" , packageName: Just $ unsafePackageName "effect" , packageVersion: Just $ unsafeVersion "4.0.0" @@ -134,6 +134,41 @@ spec = do , payload: """{"prelude":"6.0.1"}""" , success: true } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.11" + , payload: """{"prelude":"6.0.1"}""" + , success: false + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.12" + , payload: """{"prelude":"6.0.1"}""" + , success: false + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.13" + , payload: """{"prelude":"6.0.1"}""" + , success: false + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.14" + , payload: """{"prelude":"6.0.1"}""" + , success: false + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "effect" + , packageVersion: Just $ unsafeVersion "4.0.0" + , compilerVersion: Just $ unsafeVersion "0.15.15" + , payload: """{"prelude":"6.0.1"}""" + , success: false + } ] Assert.shouldEqual expectedJobs (map deterministicJob jobs) @@ -188,3 +223,21 @@ deterministicJob = case _ of , success , payload: JSON.print $ CJ.encode Operation.packageSetOperationCodec payload } + +initialJobs :: Array DeterministicJob +initialJobs = + [ { jobType: "matrix" + , packageName: Just $ unsafePackageName "prelude" + , packageVersion: Just $ unsafeVersion "6.0.1" + , compilerVersion: Just $ unsafeVersion "0.15.15" + , payload: """{}""" + , success: true + } + , { jobType: "matrix" + , packageName: Just $ unsafePackageName "type-equality" + , packageVersion: Just $ unsafeVersion "4.0.1" + , compilerVersion: Just $ unsafeVersion "0.15.15" + , payload: """{}""" + , success: true + } + ] diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 2ede4307a..b329b8253 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -161,12 +161,14 @@ executeJob _ = case _ of upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit upgradeRegistryToNewCompiler newCompilerVersion = do + Log.info $ "New compiler found: " <> Version.print newCompilerVersion + Log.info "Starting upgrade of the whole registry to the new compiler..." allManifests <- Registry.readAllManifests for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do -- Note: we enqueue compilation jobs only for packages with no dependencies, -- because from them we should be able to reach the whole of the registry, -- as they complete new jobs for their dependants will be queued up. - when (not (Map.isEmpty manifest.dependencies)) do + when (Map.isEmpty manifest.dependencies) do Log.info $ "Enqueuing matrix job for _new_ compiler " <> Version.print newCompilerVersion <> ", package " From bf90252a6d57646af722669a8e286f041ff33829 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 4 Jan 2026 21:25:46 +0100 Subject: [PATCH 24/36] Don't fail job fetch on unreadable logs --- app/src/App/Effect/Db.purs | 6 ++++-- app/src/App/SQLite.purs | 24 ++++++++++++------------ 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 2e3c934f4..c78188ac1 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -171,8 +171,10 @@ handleSQLite env = case _ of pure next SelectJob request reply -> do - result <- Run.liftEffect $ SQLite.selectJob env.db request - pure $ reply result + { unreadableLogs, job } <- Run.liftEffect $ SQLite.selectJob env.db request + unless (Array.null unreadableLogs) do + Log.warn $ "Some logs were not readable: " <> String.joinWith "\n" unreadableLogs + pure $ reply job SelectJobs request reply -> do { failed, jobs } <- Run.liftEffect $ SQLite.selectJobs env.db request diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b9bef05d6..249ac4d09 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -55,7 +55,6 @@ import Data.Formatter.DateTime as DateTime import Data.Function (on) import Data.Nullable (notNull, null) import Data.Nullable as Nullable -import Data.String as String import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3, EffectFn4) import Effect.Uncurried as Uncurried @@ -196,19 +195,20 @@ type SelectJobRequest = , jobId :: JobId } -selectJob :: SQLite -> SelectJobRequest -> Effect (Either String (Maybe Job)) +selectJob :: SQLite -> SelectJobRequest -> Effect { unreadableLogs :: Array String, job :: Either String (Maybe Job) } selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do let logLevel = fromMaybe Error maybeLogLevel - { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since - case fail of - [] -> runExceptT $ firstJust - [ selectPublishJobById logs - , selectMatrixJobById logs - , selectTransferJobById logs - , selectPackageSetJobById logs - , selectUnpublishJobById logs - ] - _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail + { fail: unreadableLogs, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + -- Failing to decode a log should not prevent us from returning a job, so we pass + -- failures through to be handled by application code + job <- runExceptT $ firstJust + [ selectPublishJobById logs + , selectMatrixJobById logs + , selectTransferJobById logs + , selectPackageSetJobById logs + , selectUnpublishJobById logs + ] + pure { job, unreadableLogs } where firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) firstJust = Array.foldl go (pure Nothing) From 96bee58253c4feba4a8955d5b32daef6aa95aa2f Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 5 Jan 2026 12:22:58 -0500 Subject: [PATCH 25/36] Fix archive seeder build --- scripts/src/ArchiveSeeder.purs | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/src/ArchiveSeeder.purs b/scripts/src/ArchiveSeeder.purs index a474876d5..fe0ae805f 100644 --- a/scripts/src/ArchiveSeeder.purs +++ b/scripts/src/ArchiveSeeder.purs @@ -32,7 +32,6 @@ import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -119,7 +118,6 @@ main = launchAff_ do hasErrors <- runArchiveSeeder parsedArgs logPath # runAppEffects # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' From c9bade00f0afd6e23af7099987742e679bb2cb5d Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 5 Jan 2026 12:44:36 -0500 Subject: [PATCH 26/36] remove effect-4.0.0 from storage in unit tests --- app/test/App/API.purs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 122879e49..dab673b56 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -248,6 +248,10 @@ spec = do copyFixture "registry" copyFixture "registry-storage" copyFixture "github-packages" + -- FIXME: This is a bit hacky, but we remove effect-4.0.0.tar.gz since the unit test publishes + -- it from scratch and will fail if effect-4.0.0 is already in storage. We have it in storage + -- for the separate integration tests. + FS.Extra.remove $ Path.concat [ testFixtures, "registry-storage", "effect-4.0.0.tar.gz" ] let readFixtures = do From 9ac3531e8cc5a3d0f324cf6f5471037cdfc9fa33 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 5 Jan 2026 12:50:00 -0500 Subject: [PATCH 27/36] avoid race condition in initial jobs test The "can list jobs" test was asserting that initial matrix jobs have success: true, but the job executor runs asynchronously and jobs may not have completed by the time the test queries the API. Fixed by normalizing the 'success' field to a constant before comparison. --- app-e2e/src/Test/E2E/Publish.purs | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index d47000f05..3cd43f578 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -47,7 +47,11 @@ spec = do result <- Client.getJobs config case result of Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - Right jobs -> Assert.shouldEqual initialJobs (map deterministicJob jobs) + -- We ignore success status because the job executor runs asynchronously + -- and jobs may not have completed by the time we query. + Right jobs -> + let ignoreSuccess j = j { success = true } + in Assert.shouldEqual (map ignoreSuccess initialJobs) (map (ignoreSuccess <<< deterministicJob) jobs) Spec.describe "Publish workflow" do Spec.it "can publish effect@4.0.0 and filter logs" do From 4fe219b328c37ca077475f426188dfa351ffbb70 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 5 Jan 2026 12:55:04 -0500 Subject: [PATCH 28/36] format --- app-e2e/src/Test/E2E/Publish.purs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 3cd43f578..78f9afe63 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -50,8 +50,10 @@ spec = do -- We ignore success status because the job executor runs asynchronously -- and jobs may not have completed by the time we query. Right jobs -> - let ignoreSuccess j = j { success = true } - in Assert.shouldEqual (map ignoreSuccess initialJobs) (map (ignoreSuccess <<< deterministicJob) jobs) + let + ignoreSuccess j = j { success = true } + in + Assert.shouldEqual (map ignoreSuccess initialJobs) (map (ignoreSuccess <<< deterministicJob) jobs) Spec.describe "Publish workflow" do Spec.it "can publish effect@4.0.0 and filter logs" do From 82c6b5a18fb88fca37427e024a511c854b038123 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 5 Jan 2026 13:15:50 -0500 Subject: [PATCH 29/36] second test --- app-e2e/src/Test/E2E/Publish.purs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 78f9afe63..d094a768e 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -176,7 +176,9 @@ spec = do , success: false } ] - Assert.shouldEqual expectedJobs (map deterministicJob jobs) + let + ignoreSuccess j = j { success = true } + Assert.shouldEqual (map ignoreSuccess expectedJobs) (map (ignoreSuccess <<< deterministicJob) jobs) type DeterministicJob = { jobType :: String From ab3119941dbb8fb31f162b7f4aaf852dea008b56 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 13:01:55 -0800 Subject: [PATCH 30/36] Refactor e2e tests with wiremock scenarios (#713) * refactor e2e tests with wiremock scenarios also adds a number of new e2e tests for various scenarios * format, etc. * move out fixtures * relax cache deletion * strengthen assertions, fix discovered bugs * drop ref, move to manifest (#714) * review feedback * more feedback --- AGENTS.md | 20 +- CONTRIBUTING.md | 23 +- SPEC.md | 2 + app-e2e/spago.yaml | 13 +- app-e2e/src/Test/E2E/Endpoint/Jobs.purs | 76 ++ app-e2e/src/Test/E2E/Endpoint/Publish.purs | 76 ++ app-e2e/src/Test/E2E/Endpoint/Transfer.purs | 51 ++ app-e2e/src/Test/E2E/Endpoint/Unpublish.purs | 52 ++ app-e2e/src/Test/E2E/GitHubIssue.purs | 267 +++---- app-e2e/src/Test/E2E/Main.purs | 24 - app-e2e/src/Test/E2E/Publish.purs | 251 ------- app-e2e/src/Test/E2E/Support/Client.purs | 192 +++++ app-e2e/src/Test/E2E/Support/Env.purs | 312 ++++++++ app-e2e/src/Test/E2E/Support/Fixtures.purs | 226 ++++++ app-e2e/src/Test/E2E/Support/Types.purs | 48 ++ .../src/Test/E2E/Support}/WireMock.purs | 139 ++-- app-e2e/src/Test/E2E/Workflow.purs | 107 +++ app-e2e/src/Test/Main.purs | 38 + .../github-packages/console-6.1.0/LICENSE | 26 + .../github-packages/console-6.1.0/bower.json | 22 + .../console-6.1.0/src/Effect/Console.js | 9 + .../console-6.1.0/src/Effect/Console.purs | 46 ++ .../registry-archive/prelude-6.0.2.tar.gz | Bin 31025 -> 31321 bytes app/fixtures/registry-index/pr/el/prelude | 2 +- .../registry-index/ty/pe/type-equality | 2 +- .../registry-storage/console-6.1.0.tar.gz | Bin 0 -> 1646 bytes .../registry-storage/prelude-6.0.1.tar.gz | Bin 31142 -> 31313 bytes app/fixtures/registry/metadata/prelude.json | 4 +- .../registry/metadata/type-equality.json | 4 +- app/src/App/API.purs | 12 +- app/src/App/CLI/Git.purs | 4 +- app/src/App/Effect/Archive.purs | 2 +- app/src/App/Effect/Env.purs | 10 + app/src/App/Effect/Registry.purs | 33 +- app/src/App/GitHubIssue.purs | 1 - app/src/App/Legacy/Manifest.purs | 6 +- app/src/App/Legacy/PackageSet.purs | 13 +- app/src/App/Manifest/SpagoYaml.purs | 8 +- app/src/App/Server/Env.purs | 4 +- app/test/App/API.purs | 4 +- app/test/App/Legacy/PackageSet.purs | 32 +- app/test/App/Manifest/SpagoYaml.purs | 2 +- flake.nix | 11 +- lib/fixtures/manifests/aff-5.1.2.json | 1 + lib/fixtures/manifests/mysql-4.1.1.json | 1 + lib/fixtures/manifests/prelude-4.1.1.json | 1 + lib/src/API/V1.purs | 1 + lib/src/Manifest.purs | 2 + lib/src/Metadata.purs | 7 - lib/test/Registry/ManifestIndex.purs | 7 +- lib/test/Registry/Metadata.purs | 12 +- lib/test/Registry/Operation/Validation.purs | 5 +- nix/test/config.nix | 677 +++++++++++++----- nix/test/integration.nix | 20 +- nix/test/smoke.nix | 7 +- nix/test/test-env.nix | 35 +- scripts/src/LegacyImporter.purs | 4 +- scripts/src/PackageDeleter.purs | 13 +- spago.lock | 71 +- test-utils/spago.yaml | 6 - test-utils/src/Registry/Test/Assert.purs | 12 + test-utils/src/Registry/Test/E2E/Client.purs | 180 ----- .../src/Registry/Test/E2E/Fixtures.purs | 76 -- test-utils/src/Registry/Test/Fixtures.purs | 18 + test-utils/src/Registry/Test/Utils.purs | 10 +- types/v1/Manifest.dhall | 1 + 66 files changed, 2196 insertions(+), 1145 deletions(-) create mode 100644 app-e2e/src/Test/E2E/Endpoint/Jobs.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Publish.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Transfer.purs create mode 100644 app-e2e/src/Test/E2E/Endpoint/Unpublish.purs delete mode 100644 app-e2e/src/Test/E2E/Main.purs delete mode 100644 app-e2e/src/Test/E2E/Publish.purs create mode 100644 app-e2e/src/Test/E2E/Support/Client.purs create mode 100644 app-e2e/src/Test/E2E/Support/Env.purs create mode 100644 app-e2e/src/Test/E2E/Support/Fixtures.purs create mode 100644 app-e2e/src/Test/E2E/Support/Types.purs rename {test-utils/src/Registry/Test/E2E => app-e2e/src/Test/E2E/Support}/WireMock.purs (52%) create mode 100644 app-e2e/src/Test/E2E/Workflow.purs create mode 100644 app-e2e/src/Test/Main.purs create mode 100644 app/fixtures/github-packages/console-6.1.0/LICENSE create mode 100644 app/fixtures/github-packages/console-6.1.0/bower.json create mode 100644 app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js create mode 100644 app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs create mode 100644 app/fixtures/registry-storage/console-6.1.0.tar.gz delete mode 100644 test-utils/src/Registry/Test/E2E/Client.purs delete mode 100644 test-utils/src/Registry/Test/E2E/Fixtures.purs create mode 100644 test-utils/src/Registry/Test/Fixtures.purs diff --git a/AGENTS.md b/AGENTS.md index 43e474c2a..d97738fe2 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -10,6 +10,10 @@ This project uses Nix with direnv. You should already be in the Nix shell automa nix develop ``` +Watch out for these Nix quirks: +- If Nix tries to fetch from git during a build, it is likely that spago.yaml files were changed but the lock file was not updated; if so, update the lockfile with `spago build` +- If a Nix build appears to be stale, then it is likely files were modified but are untracked by Git; if so, add modified files with `git add` and retry. + ### Build and Test The registry is implemented in PureScript. Use spago to build it and run PureScript tests. These are cheap and fast and should be used when working on the registry packages. @@ -19,17 +23,27 @@ spago build # Build all PureScript code spago test # Run unit tests ``` -Integration tests require two terminals (or the use of test-env in detached mode). The integration tests are only necessary to run if working on the server (app). +#### End-to-End Tests + +The end-to-end (integration) tests are in `app-e2e`. They can be run via Nix on Linux: + +``` +nix build .#checks.x86_64-linux.integration +``` + +Alternately, they can be run on macOS or for more iterative development of tests using two terminals: one to start the test env, and one to execute the tests. ```sh # Terminal 1: Start test environment (wiremock mocks + registry server on port 9000) nix run .#test-env # Terminal 2: Run E2E tests once server is ready -spago run -p registry-app-e2e +spago-test-e2e ``` -Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode. +Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode to use a single terminal. + +State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. #### Smoke Test (Linux only) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 92f5f9dcf..ebe38a0dd 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -72,20 +72,29 @@ nix build .#checks.x86_64-linux.smoke -L ### Integration Test +You can run the integration tests with the following on Linux: + +```sh +nix build .#checks.x86_64-linux.integration -L +``` + +On macOS or for iterative development, you can instead start the test environment and run the tests separately. + ```sh # Terminal 1: Start the test environment (wiremock mocks + registry server) nix run .#test-env -# Terminal 2: Once the server is ready, run the E2E tests -spago run -p registry-app-e2e +# Terminal 2: Run E2E tests once server is ready +spago-test-e2e ``` The test environment: - Starts wiremock services mocking GitHub, S3, Pursuit, etc. -- Starts the registry server on port 9000 with a temporary SQLite database +- Starts the registry server with a temporary SQLite database - Uses fixture data from `app/fixtures/` +- State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env` -Press `Ctrl+C` in Terminal 1 to stop all services. State is cleaned up automatically. +Press `Ctrl+C` in Terminal 1 to stop all services. All arguments after `--` are passed directly to process-compose: @@ -101,7 +110,11 @@ process-compose attach # Attach TUI process-compose down # Stop all services ``` -You can also set `STATE_DIR` to use a persistent state directory instead of a temp dir. +To examine state after a test run (e.g., for debugging), stop the test-env but don't restart it. The state remains in `/tmp/registry-test-env`: +- `db/registry.sqlite3` — SQLite database +- `scratch/registry/` — Local registry clone with metadata +- `scratch/registry-index/` — Local manifest index clone +- `repo-fixtures/` — Git fixture repositories ## Available Nix Commands diff --git a/SPEC.md b/SPEC.md index 423d0d80d..54c627d05 100644 --- a/SPEC.md +++ b/SPEC.md @@ -197,6 +197,7 @@ All packages in the registry contain a `purs.json` manifest file in their root d - `version`: a valid [`Version`](#version) - `license`: a valid [`License`](#license) - `location`: a valid [`Location`](#location) +- `ref`: a `string` representing the reference (e.g., a Git commit or Git tag) at the `location` that was used to fetch this version's source code - `owners` (optional): a non-empty array of [`Owner`](#owner) - `description` (optional): a description of your library as a plain text string, not markdown, up to 300 characters - `includeFiles` (optional): a non-empty array of globs, where globs are used to match file paths (in addition to the `src` directory and other [always-included files](#always-included-files)) that you want included in your package tarball @@ -221,6 +222,7 @@ For example: "githubOwner": "purescript", "githubRepo": "purescript-control" }, + "ref": "v4.2.0", "include": ["test/**/*.purs"], "exclude": ["test/graphs"], "dependencies": { "newtype": ">=3.0.0 <4.0.0", "prelude": ">=4.0.0 <5.0.0" } diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index c19e78c42..fb3804b90 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -8,21 +8,24 @@ package: - codec-json - console - datetime - - effect - - either - - foldable-traversable + - exceptions + - fetch + - integers - json - - maybe + - node-child-process + - node-execa - node-fs - node-path - node-process - - prelude + - ordered-collections - registry-app - registry-foreign - registry-lib - registry-test-utils + - routing-duplex - spec - spec-node - strings + - transformers run: main: Test.E2E.Main diff --git a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs new file mode 100644 index 000000000..2aa157673 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs @@ -0,0 +1,76 @@ +module Test.E2E.Endpoint.Jobs (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 (JobId(..)) +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Status endpoint" do + Spec.it "can reach the status endpoint" do + Client.getStatus + + Spec.describe "Jobs list" do + Spec.it "excludes completed jobs when include_completed is false" do + -- Create a job and wait for it to complete + { jobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail jobId + + -- Now we have at least one completed job + recentJobs <- Client.getJobsWith Client.ActiveOnly + allJobs <- Client.getJobsWith Client.IncludeCompleted + + -- All jobs should include the completed publish job + let allCount = Array.length allJobs + Assert.shouldSatisfy allCount (_ > 0) + + -- Active-only should return fewer or equal jobs + let recentCount = Array.length recentJobs + Assert.shouldSatisfy recentCount (_ <= allCount) + + -- Verify completed jobs are excluded from active-only results + let completedJob = Array.find (\job -> isJust (V1.jobInfo job).finishedAt) allJobs + case completedJob of + Just completed -> do + let + completedId = (V1.jobInfo completed).jobId + inRecent = Array.any (\job -> (V1.jobInfo job).jobId == completedId) recentJobs + when inRecent do + Assert.fail $ "Completed job " <> unwrap completedId <> " should be excluded from include_completed=false results" + Nothing -> pure unit + + Spec.describe "Job query parameters" do + Spec.it "accepts level and since parameters" do + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + let info = V1.jobInfo job + + baseJob <- Client.getJob jobId Nothing Nothing + Assert.shouldEqual (V1.jobInfo baseJob).jobId info.jobId + + debugJob <- Client.getJob jobId (Just V1.Debug) Nothing + Assert.shouldEqual (V1.jobInfo debugJob).jobId info.jobId + + let sinceTime = fromMaybe info.createdAt info.finishedAt + sinceJob <- Client.getJob jobId Nothing (Just sinceTime) + Assert.shouldEqual (V1.jobInfo sinceJob).jobId info.jobId + + Spec.describe "Jobs API error handling" do + Spec.it "returns HTTP 404 for non-existent job ID" do + let fakeJobId = JobId "nonexistent-job-id-12345" + result <- Client.tryGetJob fakeJobId Nothing Nothing + case result of + Right _ -> + Assert.fail "Expected HTTP 404 for non-existent job" + Left err -> + case Client.clientErrorStatus err of + Just 404 -> pure unit + _ -> Assert.fail $ "Expected HTTP 404, got: " <> Client.printClientError err diff --git a/app-e2e/src/Test/E2E/Endpoint/Publish.purs b/app-e2e/src/Test/E2E/Endpoint/Publish.purs new file mode 100644 index 000000000..47e51c959 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Publish.purs @@ -0,0 +1,76 @@ +module Test.E2E.Endpoint.Publish (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NEA +import Data.Map as Map +import Data.Set as Set +import Data.String as String +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.Manifest (Manifest(..)) +import Registry.Metadata (Metadata(..)) +import Registry.Sha256 as Sha256 +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish workflow" do + Spec.it "can publish effect@4.0.0 and verify all state changes" do + { jobId } <- Client.publish Fixtures.effectPublishData + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + uploadOccurred <- Env.hasStorageUpload Fixtures.effect + unless uploadOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 PUT for effect/4.0.0.tar.gz" storageRequests + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.published of + Nothing -> Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in metadata published versions" + Just publishedMeta -> do + Assert.shouldSatisfy (Sha256.print publishedMeta.hash) (not <<< String.null) + + manifestEntries <- Env.readManifestIndexEntry Fixtures.effect.name + let hasVersion = Array.any (\(Manifest m) -> m.version == Fixtures.effect.version) manifestEntries + unless hasVersion do + Assert.fail $ "Expected version " <> Version.print Fixtures.effect.version <> " in manifest index" + + Env.waitForAllMatrixJobs Fixtures.effect + + -- Collect the compilers from the matrix jobs that ran for this package + allJobs <- Client.getJobsWith Client.IncludeCompleted + let + matrixCompilers = Array.mapMaybe + ( case _ of + MatrixJob { packageName, packageVersion, compilerVersion } -> + if packageName == Fixtures.effect.name && packageVersion == Fixtures.effect.version then Just compilerVersion + else Nothing + _ -> Nothing + ) + allJobs + -- The expected compilers are: the publish compiler + all matrix job compilers + expectedCompilers = Set.fromFoldable $ Array.cons Fixtures.effectPublishData.compiler matrixCompilers + + Metadata metadataAfter <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadataAfter.published of + Nothing -> Assert.fail "Version missing after matrix jobs" + Just publishedMetaAfter -> do + let actualCompilers = Set.fromFoldable $ NEA.toArray publishedMetaAfter.compilers + Assert.shouldEqual actualCompilers expectedCompilers + + Spec.describe "Publish state machine" do + Spec.it "returns same jobId for duplicate publish requests" do + { jobId: id1 } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail id1 + { jobId: id2 } <- Client.publish Fixtures.effectPublishData + Assert.shouldEqual id1 id2 diff --git a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs new file mode 100644 index 000000000..e06b466a2 --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs @@ -0,0 +1,51 @@ +module Test.E2E.Endpoint.Transfer (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Registry.API.V1 as V1 +import Registry.Location (Location(..)) +import Registry.Metadata (Metadata(..)) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Transfer workflow" do + Spec.it "can transfer effect to a new location with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + Env.waitForAllMatrixJobs Fixtures.effect + + Metadata originalMetadata <- Env.readMetadata Fixtures.effect.name + case originalMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "purescript" + Git _ -> Assert.fail "Expected GitHub location, got Git" + + -- clear the publish PUT so we can verify transfers leave storage unaffected + WireMock.clearStorageRequests + + authData <- Env.signTransferOrFail Fixtures.effectTransferData + { jobId: transferJobId } <- Client.transfer authData + transferJob <- Env.pollJobOrFail transferJobId + Assert.shouldSatisfy (V1.jobInfo transferJob).finishedAt isJust + + Metadata newMetadata <- Env.readMetadata Fixtures.effect.name + case newMetadata.location of + GitHub { owner } -> Assert.shouldEqual owner "new-owner" + Git _ -> Assert.fail "Expected GitHub location after transfer, got Git" + + storageRequests <- WireMock.getStorageRequests + let + packagePath = PackageName.print Fixtures.effect.name + putOrDeleteRequests = Array.filter + (\r -> (r.method == "PUT" || r.method == "DELETE") && WireMock.filterByUrlContaining packagePath [ r ] /= []) + storageRequests + unless (Array.null putOrDeleteRequests) do + WireMock.failWithRequests "Transfer should not PUT or DELETE to storage" putOrDeleteRequests diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs new file mode 100644 index 000000000..9a82e943b --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -0,0 +1,52 @@ +module Test.E2E.Endpoint.Unpublish (spec) where + +import Registry.App.Prelude + +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Publish-Unpublish workflow" do + Spec.it "can publish effect@4.0.0 then unpublish it with full state verification" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + Env.waitForAllMatrixJobs Fixtures.effect + + existsBefore <- Env.manifestIndexEntryExists Fixtures.effect + unless existsBefore do + Assert.fail "Expected version to exist in manifest index before unpublish" + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just unpublishedInfo -> + Assert.shouldSatisfy unpublishedInfo.reason (not <<< String.null) + + when (Map.member Fixtures.effect.version metadata.published) do + Assert.fail "Version 4.0.0 should not be in 'published' metadata after unpublish" + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + storageRequests <- WireMock.getStorageRequests + WireMock.failWithRequests "Expected S3 DELETE for effect/4.0.0.tar.gz" storageRequests + + existsAfter <- Env.manifestIndexEntryExists Fixtures.effect + when existsAfter do + Assert.fail "Expected version to be removed from manifest index after unpublish" diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs index be9f3ba8f..c4598313a 100644 --- a/app-e2e/src/Test/E2E/GitHubIssue.purs +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -1,218 +1,149 @@ -- | End-to-end tests for the GitHubIssue workflow. --- | These tests exercise the full flow: parsing a GitHub event, submitting to --- | the registry API, polling for completion, and posting comments. +-- | Tests the full flow: parsing GitHub event → submitting to registry API → +-- | polling for completion → posting comments. module Test.E2E.GitHubIssue (spec) where import Registry.App.Prelude +import Control.Monad.Reader (ask) import Data.Array as Array import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.String as String import Effect.Aff (Milliseconds(..)) -import Effect.Aff as Aff import JSON as JSON import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process import Registry.App.GitHubIssue as GitHubIssue import Registry.Foreign.Tmp as Tmp -import Registry.Operation (AuthenticatedData) import Registry.Operation as Operation -import Registry.Test.E2E.Client as Client -import Registry.Test.E2E.Fixtures as Fixtures -import Registry.Test.E2E.WireMock (WireMockRequest) -import Registry.Test.E2E.WireMock as WireMock -import Test.Spec (Spec) +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2E, E2ESpec) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock import Test.Spec as Spec -spec :: Spec Unit +spec :: E2ESpec spec = do Spec.describe "GitHubIssue end-to-end" do - Spec.before clearWireMockJournal do - - Spec.it "handles a publish via GitHub issue, posts comments, and closes issue on success" \_ -> do - result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.effectPublishData - - assertJobSucceeded result - assertHasComment jobStartedText result - assertHasComment jobCompletedText result - assertIssueClosed result - - Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do - result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEventFrom "random-user" Fixtures.failingTransferData - - assertJobFailed result - assertHasComment jobStartedText result - assertHasComment jobFailedText result - assertNoComment jobCompletedText result - assertIssueOpen result - - Spec.it "re-signs authenticated operation for trustee (job fails due to unpublish time limit)" \_ -> do - result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEvent Fixtures.trusteeAuthenticatedData - - assertHasComment jobStartedText result - assertTeamsApiCalled result - - where - clearWireMockJournal :: Aff Unit - clearWireMockJournal = do - wmConfig <- liftEffect WireMock.configFromEnv - WireMock.clearRequestsOrFail wmConfig - + Spec.it "handles publish via GitHub issue, posts comments, and closes issue on success" do + requests <- runWorkflow $ mkPublishEvent Fixtures.effectPublishData + assertComment "Job started" requests + assertComment "Job completed successfully" requests + assertClosed requests + + Spec.it "posts failure comment and leaves issue open when job fails" do + requests <- runWorkflow $ mkAuthenticatedEvent "random-user" Fixtures.failingTransferData + assertComment "Job started" requests + assertComment "Job failed" requests + assertNoComment "Job completed successfully" requests + assertOpen requests + + Spec.it "calls Teams API to verify trustee membership for authenticated operation" do + requests <- runWorkflow $ mkAuthenticatedEvent packagingTeamUser Fixtures.trusteeAuthenticatedData + assertComment "Job started" requests + assertTeamsApiCalled requests + + Spec.it "posts error comment when issue body contains invalid JSON" do + requests <- runWorkflow Fixtures.invalidJsonIssueEvent + assertComment "malformed" requests + assertOpen requests + +-- Constants testIssueNumber :: Int testIssueNumber = 101 --- | Username configured as a packaging team member in test WireMock fixtures. --- | See nix/test/config.nix for the GitHub Teams API stub. -packagingTeamUsername :: String -packagingTeamUsername = "packaging-team-user" - -jobStartedText :: String -jobStartedText = "Job started" - -jobCompletedText :: String -jobCompletedText = "Job completed successfully" - -jobFailedText :: String -jobFailedText = "Job failed" - -packagingTeamMembersPath :: String -packagingTeamMembersPath = "/orgs/purescript/teams/packaging/members" - -testPollConfig :: GitHubIssue.PollConfig -testPollConfig = - { maxAttempts: 60 - , interval: Milliseconds 500.0 - } +packagingTeamUser :: String +packagingTeamUser = "packaging-team-user" +-- Event builders githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object { sender: CJ.Record.object { login: CJ.string } , issue: CJ.Record.object { number: CJ.int, body: CJ.string } } -mkGitHubPublishEvent :: Operation.PublishData -> String -mkGitHubPublishEvent publishData = +mkPublishEvent :: Operation.PublishData -> String +mkPublishEvent publishData = let - publishJson = JSON.print $ CJ.encode Operation.publishCodec publishData - body = "```json\n" <> publishJson <> "\n```" - event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + body = "```json\n" <> JSON.print (CJ.encode Operation.publishCodec publishData) <> "\n```" in - JSON.print $ CJ.encode githubEventCodec event - -mkGitHubAuthenticatedEvent :: AuthenticatedData -> String -mkGitHubAuthenticatedEvent = mkGitHubAuthenticatedEventFrom packagingTeamUsername + JSON.print $ CJ.encode githubEventCodec + { sender: { login: packagingTeamUser }, issue: { number: testIssueNumber, body } } -mkGitHubAuthenticatedEventFrom :: String -> AuthenticatedData -> String -mkGitHubAuthenticatedEventFrom username authData = +mkAuthenticatedEvent :: String -> Operation.AuthenticatedData -> String +mkAuthenticatedEvent username authData = let - authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData - body = "```json\n" <> authJson <> "\n```" - event = { sender: { login: username }, issue: { number: testIssueNumber, body } } + body = "```json\n" <> JSON.print (CJ.encode Operation.authenticatedCodec authData) <> "\n```" in - JSON.print $ CJ.encode githubEventCodec event + JSON.print $ CJ.encode githubEventCodec + { sender: { login: username }, issue: { number: testIssueNumber, body } } -issuePath :: Int -> String -issuePath n = "/issues/" <> show n +-- Workflow runner +runWorkflow :: String -> E2E (Array WireMock.WireMockRequest) +runWorkflow eventJson = do + { stateDir } <- ask -issueCommentsPath :: Int -> String -issueCommentsPath n = issuePath n <> "/comments" + Client.getStatus -commentRequests :: Array WireMockRequest -> Array WireMockRequest -commentRequests = - WireMock.filterByMethod "POST" - >>> WireMock.filterByUrlContaining (issueCommentsPath testIssueNumber) - -closeRequests :: Array WireMockRequest -> Array WireMockRequest -closeRequests = - WireMock.filterByMethod "PATCH" - >>> WireMock.filterByUrlContaining (issuePath testIssueNumber) - -teamsRequests :: Array WireMockRequest -> Array WireMockRequest -teamsRequests = - WireMock.filterByMethod "GET" - >>> WireMock.filterByUrlContaining packagingTeamMembersPath + tmpDir <- liftAff Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + liftAff $ FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath -bodyContains :: String -> WireMockRequest -> Boolean -bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) + originalCwd <- liftEffect Process.cwd + liftEffect $ Process.chdir stateDir -hasComment :: String -> Array WireMockRequest -> Boolean -hasComment text = Array.any (bodyContains text) + envResult <- liftAff GitHubIssue.initializeGitHub + for_ envResult \env -> do + let testEnv = env { pollConfig = { maxAttempts: 60, interval: Milliseconds 500.0 }, logVerbosity = Quiet } + liftAff $ void $ GitHubIssue.runGitHubIssue testEnv --- | Result of running the GitHubIssue workflow. -type RunResult = - { success :: Boolean - , requests :: Array WireMockRequest - } + liftEffect $ Process.chdir originalCwd --- | Run the GitHub issue workflow with a given event JSON. --- | Handles server check, temp file creation, env setup, and request capture. -runWorkflowWithEvent :: String -> Aff RunResult -runWorkflowWithEvent eventJson = do - -- Verify server is reachable - config <- liftEffect Client.configFromEnv - statusResult <- Client.getStatus config - case statusResult of - Left err -> Aff.throwError $ Aff.error $ "Server not reachable: " <> Client.printClientError err - Right _ -> pure unit - - -- Write event to temp file - tmpDir <- Tmp.mkTmpDir - let eventPath = Path.concat [ tmpDir, "github-event.json" ] - FS.Aff.writeTextFile UTF8 eventPath eventJson - liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + WireMock.getGithubRequests - -- Initialize and run workflow - envResult <- GitHubIssue.initializeGitHub - case envResult of - Nothing -> - Aff.throwError $ Aff.error "initializeGitHub returned Nothing" - Just env -> do - let testEnv = env { pollConfig = testPollConfig, logVerbosity = Quiet } - result <- GitHubIssue.runGitHubIssue testEnv - - -- Capture WireMock requests - wmConfig <- liftEffect WireMock.configFromEnv - requests <- WireMock.getRequestsOrFail wmConfig - - case result of - Left err -> - WireMock.failWithRequests ("runGitHubIssue failed: " <> err) requests - Right success -> - pure { success, requests } - -assertJobSucceeded :: RunResult -> Aff Unit -assertJobSucceeded { success, requests } = - unless success do - WireMock.failWithRequests "Job did not succeed" requests - -assertJobFailed :: RunResult -> Aff Unit -assertJobFailed { success, requests } = - when success do - WireMock.failWithRequests "Expected job to fail but it succeeded" requests - -assertHasComment :: String -> RunResult -> Aff Unit -assertHasComment text { requests } = - unless (hasComment text (commentRequests requests)) do +-- Assertions (all operate on captured requests) +assertComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + unless (Array.any (bodyContains text) comments) do WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests -assertNoComment :: String -> RunResult -> Aff Unit -assertNoComment text { requests } = - when (hasComment text (commentRequests requests)) do +assertNoComment :: String -> Array WireMock.WireMockRequest -> E2E Unit +assertNoComment text requests = do + let + comments = requests # Array.filter \r -> + r.method == "POST" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber <> "/comments") r.url + when (Array.any (bodyContains text) comments) do WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests -assertIssueClosed :: RunResult -> Aff Unit -assertIssueClosed { requests } = - when (Array.null (closeRequests requests)) do - WireMock.failWithRequests "Expected issue to be closed, but no close request was made" requests +assertClosed :: Array WireMock.WireMockRequest -> E2E Unit +assertClosed requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + when (Array.null closes) do + WireMock.failWithRequests "Expected issue to be closed" requests -assertIssueOpen :: RunResult -> Aff Unit -assertIssueOpen { requests } = - unless (Array.null (closeRequests requests)) do - WireMock.failWithRequests "Expected issue to remain open, but a close request was made" requests +assertOpen :: Array WireMock.WireMockRequest -> E2E Unit +assertOpen requests = do + let + closes = requests # Array.filter \r -> + r.method == "PATCH" && String.contains (String.Pattern $ "/issues/" <> show testIssueNumber) r.url + unless (Array.null closes) do + WireMock.failWithRequests "Expected issue to remain open" requests -assertTeamsApiCalled :: RunResult -> Aff Unit -assertTeamsApiCalled { requests } = - when (Array.null (teamsRequests requests)) do - WireMock.failWithRequests "Expected GitHub Teams API to be called, but no such request was seen" requests +assertTeamsApiCalled :: Array WireMock.WireMockRequest -> E2E Unit +assertTeamsApiCalled requests = do + let + teams = requests # Array.filter \r -> + r.method == "GET" && String.contains (String.Pattern "/orgs/purescript/teams/packaging/members") r.url + when (Array.null teams) do + WireMock.failWithRequests "Expected Teams API to be called" requests + +bodyContains :: String -> WireMock.WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs deleted file mode 100644 index bbd7f3212..000000000 --- a/app-e2e/src/Test/E2E/Main.purs +++ /dev/null @@ -1,24 +0,0 @@ -module Test.E2E.Main (main) where - -import Prelude - -import Data.Maybe (Maybe(..)) -import Data.Time.Duration (Milliseconds(..)) -import Effect (Effect) -import Test.E2E.GitHubIssue as Test.E2E.GitHubIssue -import Test.E2E.Publish as Test.E2E.Publish -import Test.Spec as Spec -import Test.Spec.Reporter.Console (consoleReporter) -import Test.Spec.Runner.Node (runSpecAndExitProcess') -import Test.Spec.Runner.Node.Config as Cfg - -main :: Effect Unit -main = runSpecAndExitProcess' config [ consoleReporter ] do - Spec.describe "E2E Tests" do - Spec.describe "Publish" Test.E2E.Publish.spec - Spec.describe "GitHubIssue" Test.E2E.GitHubIssue.spec - where - config = - { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } - , parseCLIOptions: false - } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs deleted file mode 100644 index d094a768e..000000000 --- a/app-e2e/src/Test/E2E/Publish.purs +++ /dev/null @@ -1,251 +0,0 @@ --- | End-to-end tests for the Publish API endpoint. --- | These tests exercise the actual registry server via HTTP requests. -module Test.E2E.Publish (spec) where - -import Prelude - -import Data.Array as Array -import Data.Codec.JSON as CJ -import Data.Either (Either(..)) -import Data.Foldable (for_) -import Data.Maybe (Maybe(..), isJust) -import Data.String as String -import Effect.Aff (Aff) -import Effect.Class (liftEffect) -import Effect.Class.Console as Console -import JSON as JSON -import Registry.API.V1 (Job(..)) -import Registry.API.V1 as V1 -import Registry.Internal.Codec as Internal.Codec -import Registry.Operation as Operation -import Registry.PackageName (PackageName) -import Registry.Test.Assert as Assert -import Registry.Test.E2E.Client as Client -import Registry.Test.E2E.Fixtures as Fixtures -import Registry.Test.Utils (unsafePackageName, unsafeVersion) -import Registry.Version (Version) -import Registry.Version as Version -import Test.Spec (Spec) -import Test.Spec as Spec - --- | Get client config from environment -getConfig :: Aff Client.Config -getConfig = liftEffect Client.configFromEnv - -spec :: Spec Unit -spec = do - Spec.describe "Server connectivity" do - Spec.it "can reach the status endpoint" do - config <- getConfig - result <- Client.getStatus config - case result of - Left err -> Assert.fail $ "Failed to reach status endpoint: " <> Client.printClientError err - Right _ -> pure unit - - Spec.it "can list jobs (initially only compiler-upgrade matrix jobs)" do - config <- getConfig - result <- Client.getJobs config - case result of - Left err -> Assert.fail $ "Failed to list jobs: " <> Client.printClientError err - -- We ignore success status because the job executor runs asynchronously - -- and jobs may not have completed by the time we query. - Right jobs -> - let - ignoreSuccess j = j { success = true } - in - Assert.shouldEqual (map ignoreSuccess initialJobs) (map (ignoreSuccess <<< deterministicJob) jobs) - - Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0 and filter logs" do - config <- getConfig - - -- Submit publish request - publishResult <- Client.publish config Fixtures.effectPublishData - case publishResult of - Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err - Right { jobId } -> do - -- Poll until job completes - job <- Client.pollJob config jobId - - -- If job failed, print logs for debugging - unless (V1.jobInfo job).success do - Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs - Console.log $ String.joinWith "\n" logMessages - - -- Verify job completed successfully - when (not (V1.jobInfo job).success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs - let errorMessages = map _.message errorLogs - Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - - Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust - - -- Test log level filtering - allLogsResult <- Client.getJob config jobId (Just V1.Debug) Nothing - case allLogsResult of - Left err -> Assert.fail $ "Failed to get job with DEBUG level: " <> Client.printClientError err - Right allLogsJob -> do - let allLogs = (V1.jobInfo allLogsJob).logs - - infoLogsResult <- Client.getJob config jobId (Just V1.Info) Nothing - case infoLogsResult of - Left err -> Assert.fail $ "Failed to get job with INFO level: " <> Client.printClientError err - Right infoLogsJob -> do - let infoLogs = (V1.jobInfo infoLogsJob).logs - let debugOnlyLogs = Array.filter (\l -> l.level == V1.Debug) allLogs - - -- INFO logs should not contain any DEBUG logs - let infoContainsDebug = Array.any (\l -> l.level == V1.Debug) infoLogs - when infoContainsDebug do - Assert.fail "INFO level filter returned DEBUG logs" - - -- If there were DEBUG logs, INFO result should be smaller - when (Array.length debugOnlyLogs > 0) do - Assert.shouldSatisfy (Array.length infoLogs) (_ < Array.length allLogs) - - -- Test timestamp filtering - let logs = (V1.jobInfo job).logs - when (Array.length logs >= 2) do - case Array.index logs 0 of - Nothing -> pure unit - Just firstLog -> do - sinceResult <- Client.getJob config jobId (Just V1.Debug) (Just firstLog.timestamp) - case sinceResult of - Left err -> Assert.fail $ "Failed to get job with since filter: " <> Client.printClientError err - Right sinceJob -> do - let sinceLogs = (V1.jobInfo sinceJob).logs - for_ sinceLogs \l -> - Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) - - Spec.it "kicks off matrix jobs for effect@4.0.0 once the package is published" do - config <- getConfig - maybeJobs <- Client.getJobs config - case maybeJobs of - Left err -> Assert.fail $ "Failed to get jobs: " <> Client.printClientError err - Right jobs -> do - let - expectedJobs = initialJobs <> - [ { jobType: "publish" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Nothing - , payload: """{"compiler":"0.15.9","location":{"githubOwner":"purescript","githubRepo":"purescript-effect"},"name":"effect","ref":"v4.0.0","version":"4.0.0"}""" - , success: true - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.10" - , payload: """{"prelude":"6.0.1"}""" - , success: true - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.11" - , payload: """{"prelude":"6.0.1"}""" - , success: false - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.12" - , payload: """{"prelude":"6.0.1"}""" - , success: false - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.13" - , payload: """{"prelude":"6.0.1"}""" - , success: false - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.14" - , payload: """{"prelude":"6.0.1"}""" - , success: false - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "effect" - , packageVersion: Just $ unsafeVersion "4.0.0" - , compilerVersion: Just $ unsafeVersion "0.15.15" - , payload: """{"prelude":"6.0.1"}""" - , success: false - } - ] - let - ignoreSuccess j = j { success = true } - Assert.shouldEqual (map ignoreSuccess expectedJobs) (map (ignoreSuccess <<< deterministicJob) jobs) - -type DeterministicJob = - { jobType :: String - , packageName :: Maybe PackageName - , packageVersion :: Maybe Version - , compilerVersion :: Maybe Version - , payload :: String - , success :: Boolean - } - -deterministicJob :: Job -> DeterministicJob -deterministicJob = case _ of - PublishJob { success, packageName, packageVersion, payload } -> - { jobType: "publish" - , packageName: Just packageName - , packageVersion: Just packageVersion - , compilerVersion: Nothing - , success - , payload: JSON.print $ CJ.encode Operation.publishCodec payload - } - UnpublishJob { success, packageName, packageVersion, payload } -> - { jobType: "unpublish" - , packageName: Just packageName - , packageVersion: Just packageVersion - , compilerVersion: Nothing - , success - , payload: JSON.print $ CJ.encode Operation.authenticatedCodec payload - } - TransferJob { success, packageName, payload } -> - { jobType: "transfer" - , packageName: Just packageName - , packageVersion: Nothing - , compilerVersion: Nothing - , success - , payload: JSON.print $ CJ.encode Operation.authenticatedCodec payload - } - MatrixJob { success, packageName, packageVersion, compilerVersion, payload } -> - { jobType: "matrix" - , packageName: Just packageName - , packageVersion: Just packageVersion - , compilerVersion: Just compilerVersion - , success - , payload: JSON.print $ CJ.encode (Internal.Codec.packageMap Version.codec) payload - } - PackageSetJob { success, payload } -> - { jobType: "packageset" - , packageName: Nothing - , packageVersion: Nothing - , compilerVersion: Nothing - , success - , payload: JSON.print $ CJ.encode Operation.packageSetOperationCodec payload - } - -initialJobs :: Array DeterministicJob -initialJobs = - [ { jobType: "matrix" - , packageName: Just $ unsafePackageName "prelude" - , packageVersion: Just $ unsafeVersion "6.0.1" - , compilerVersion: Just $ unsafeVersion "0.15.15" - , payload: """{}""" - , success: true - } - , { jobType: "matrix" - , packageName: Just $ unsafePackageName "type-equality" - , packageVersion: Just $ unsafeVersion "4.0.1" - , compilerVersion: Just $ unsafeVersion "0.15.15" - , payload: """{}""" - , success: true - } - ] diff --git a/app-e2e/src/Test/E2E/Support/Client.purs b/app-e2e/src/Test/E2E/Support/Client.purs new file mode 100644 index 000000000..6985b9611 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Client.purs @@ -0,0 +1,192 @@ +-- | HTTP client for making requests to the registry server during E2E tests. +-- | This module provides typed helpers for interacting with the Registry API. +-- | +-- | All client functions operate in the E2E monad (ReaderT TestEnv Aff) and +-- | throw on HTTP or parse errors. Use the `try*` variants (e.g., `tryGetJob`) +-- | when testing error responses - they return `Either ClientError a` with +-- | typed HTTP status codes. +module Test.E2E.Support.Client + ( ClientError(..) + , JobFilter(..) + , getJobs + , getJobsWith + , getJob + , tryGetJob + , getStatus + , publish + , unpublish + , transfer + , pollJob + , printClientError + , clientErrorStatus + ) where + +import Registry.App.Prelude + +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Monad.Reader (ask) +import Data.Codec.JSON as CJ +import Data.DateTime (DateTime) +import Data.Int as Int +import Effect.Aff (delay) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Effect.Exception (Error) +import Effect.Exception as Exception +import Fetch (Method(..)) +import Fetch as Fetch +import JSON as JSON +import Registry.API.V1 (Job, JobId, LogLevel, Route(..)) +import Registry.API.V1 as V1 +import Registry.Operation (AuthenticatedData, PublishData) +import Registry.Operation as Operation +import Routing.Duplex as Routing +import Test.E2E.Support.Types (E2E) + +-- | Errors that can occur during client operations +data ClientError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + | Timeout String + +printClientError :: ClientError -> String +printClientError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + Timeout msg -> "Timeout: " <> msg + +-- | Extract the HTTP status code from a ClientError, if it's an HttpError +clientErrorStatus :: ClientError -> Maybe Int +clientErrorStatus = case _ of + HttpError { status } -> Just status + _ -> Nothing + +-- | Convert a ClientError to an Effect Error for throwing +toError :: ClientError -> Error +toError = Exception.error <<< printClientError + +-- | Throw a ClientError as an Aff error +throw :: forall a. ClientError -> Aff a +throw = Aff.throwError <<< toError + +-- | Print a Route to its URL path using the route codec +printRoute :: Route -> String +printRoute = Routing.print V1.routes + +-- | Make a GET request and decode the response, returning Either on error. +tryGet :: forall a. CJ.Codec a -> String -> String -> Aff (Either ClientError a) +tryGet codec baseUrl path = do + response <- Fetch.fetch (baseUrl <> path) { method: GET } + body <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson codec body of + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: body } + Right a -> pure $ Right a + else + pure $ Left $ HttpError { status: response.status, body } + +-- | Make a GET request and decode the response. Throws on error. +get :: forall a. CJ.Codec a -> String -> String -> Aff a +get codec baseUrl path = tryGet codec baseUrl path >>= either throw pure + +-- | Make a POST request with JSON body and decode the response. Throws on error. +post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res +post reqCodec resCodec baseUrl path reqBody = do + let jsonBody = JSON.print $ CJ.encode reqCodec reqBody + response <- Fetch.fetch (baseUrl <> path) + { method: POST + , headers: { "Content-Type": "application/json" } + , body: jsonBody + } + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case parseJson resCodec responseBody of + Left err -> throw $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } + Right a -> pure a + else + throw $ HttpError { status: response.status, body: responseBody } + +data JobFilter = ActiveOnly | IncludeCompleted + +-- | Get the list of jobs with a configurable filter +getJobsWith :: JobFilter -> E2E (Array Job) +getJobsWith filter = do + { clientConfig } <- ask + let + includeCompleted = case filter of + ActiveOnly -> Just false + IncludeCompleted -> Just true + route = Jobs { since: Nothing, include_completed: includeCompleted } + liftAff $ get (CJ.array V1.jobCodec) clientConfig.baseUrl (printRoute route) + +-- | Get the list of jobs (includes completed jobs) +getJobs :: E2E (Array Job) +getJobs = getJobsWith IncludeCompleted + +-- | Get a specific job by ID, with optional log filtering +getJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E Job +getJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ get V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Try to get a specific job by ID, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 404). +tryGetJob :: JobId -> Maybe LogLevel -> Maybe DateTime -> E2E (Either ClientError Job) +tryGetJob jobId level since = do + { clientConfig } <- ask + let route = Job jobId { level, since } + liftAff $ tryGet V1.jobCodec clientConfig.baseUrl (printRoute route) + +-- | Check if the server is healthy +getStatus :: E2E Unit +getStatus = do + { clientConfig } <- ask + liftAff do + response <- Fetch.fetch (clientConfig.baseUrl <> printRoute Status) { method: GET } + if response.status == 200 then + pure unit + else do + body <- response.text + throw $ HttpError { status: response.status, body } + +-- | Publish a package +publish :: PublishData -> E2E V1.JobCreatedResponse +publish reqBody = do + { clientConfig } <- ask + liftAff $ post Operation.publishCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Publish) reqBody + +-- | Unpublish a package (requires authentication) +unpublish :: AuthenticatedData -> E2E V1.JobCreatedResponse +unpublish authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Unpublish) authData + +-- | Transfer a package to a new location (requires authentication) +transfer :: AuthenticatedData -> E2E V1.JobCreatedResponse +transfer authData = do + { clientConfig } <- ask + liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Transfer) authData + +-- | Poll a job until it completes or times out. +-- | +-- | This is the recommended way to wait for job completion in E2E tests. +-- | Do not implement custom polling loops; use this function or the higher-level +-- | helpers in Test.E2E.Support.Env (pollJobOrFail, pollJobExpectFailure). +pollJob :: JobId -> E2E Job +pollJob jobId = do + { clientConfig } <- ask + go clientConfig 1 + where + go config attempt + | attempt > config.maxPollAttempts = + liftAff $ throw $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" + | otherwise = do + liftAff $ delay config.pollInterval + job <- getJob jobId (Just V1.Debug) Nothing + case (V1.jobInfo job).finishedAt of + Just _ -> pure job + Nothing -> do + when (attempt `mod` 10 == 0) do + Console.log $ "Polling job " <> unwrap jobId <> " (attempt " <> Int.toStringAs Int.decimal attempt <> ")" + go config (attempt + 1) diff --git a/app-e2e/src/Test/E2E/Support/Env.purs b/app-e2e/src/Test/E2E/Support/Env.purs new file mode 100644 index 000000000..06c8d47b9 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Env.purs @@ -0,0 +1,312 @@ +-- | Shared environment and helper functions for E2E tests. +-- | +-- | This module provides: +-- | - TestEnv type and E2E monad for test helpers (re-exported from Types) +-- | - Environment construction from env vars (mkTestEnv) +-- | - WireMock reset helpers for test isolation +-- | - Job polling with automatic failure handling +-- | - Git and metadata state inspection +-- | +-- | All functions operate in the E2E monad (ReaderT TestEnv Aff), so they +-- | have access to the shared test environment without explicit passing. +module Test.E2E.Support.Env + ( module ReExports + , mkTestEnv + , runE2E + , resetTestState + , resetDatabase + , resetGitFixtures + , resetLogs + , resetGitHubRequestCache + , pollJobOrFail + , pollJobExpectFailure + , signUnpublishOrFail + , signTransferOrFail + , gitStatus + , isCleanGitStatus + , waitForAllMatrixJobs + , isMatrixJobFor + , readMetadata + , readManifestIndexEntry + , manifestIndexEntryExists + , assertReposClean + , hasStorageUpload + , hasStorageDelete + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask, runReaderT) +import Data.Array as Array +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Node.ChildProcess.Types (Exit(..)) +import Node.FS.Aff as FS.Aff +import Node.Library.Execa as Execa +import Node.Path as Path +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Env as Env +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Manifest (Manifest(..)) +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata (Metadata) +import Registry.Metadata as Metadata +import Registry.Operation (AuthenticatedData, TransferData, UnpublishData) +import Registry.PackageName as PackageName +import Registry.Test.Assert as Assert +import Registry.Version as Version +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Fixtures (PackageFixture) +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.Types (ClientConfig, E2E, E2ESpec, TestEnv, WireMockConfig) as ReExports +import Test.E2E.Support.Types (E2E, TestEnv) +import Test.E2E.Support.WireMock as WireMock + +-- | Build the test environment from environment variables. +-- | Called once at startup in Main, before running any tests. +mkTestEnv :: Effect TestEnv +mkTestEnv = do + port <- Env.lookupRequired Env.serverPort + let + clientConfig = + { baseUrl: "http://localhost:" <> show port + , pollInterval: Milliseconds 2000.0 + , maxPollAttempts: 30 + } + + githubUrl <- Env.lookupRequired Env.githubApiUrl + storageUrl <- Env.lookupRequired Env.s3ApiUrl + let + githubWireMock = { baseUrl: githubUrl } + storageWireMock = { baseUrl: storageUrl } + + stateDir <- Env.lookupRequired Env.stateDir + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + + pure { clientConfig, githubWireMock, storageWireMock, stateDir, privateKey } + +-- | Run an E2E computation with a given environment. +-- | Primarily used by hoistSpec in Main. +runE2E :: forall a. TestEnv -> E2E a -> Aff a +runE2E env = flip runReaderT env + +-- | Reset all test state for isolation between tests. +-- | This is the recommended way to set up test isolation in Spec.before_. +-- | Resets: database, git fixtures, storage mock, and logs. +resetTestState :: E2E Unit +resetTestState = do + resetDatabase + resetGitFixtures + WireMock.clearStorageRequests + WireMock.resetStorageScenarios + WireMock.clearGithubRequests + resetGitHubRequestCache + resetLogs + +-- | Reset the database by clearing all job-related tables. +-- | +-- | This works because all job tables (publish_jobs, unpublish_jobs, transfer_jobs, +-- | matrix_jobs, package_set_jobs, logs) have foreign keys to job_info with +-- | ON DELETE CASCADE. See db/schema.sql for the schema definition. +resetDatabase :: E2E Unit +resetDatabase = do + { stateDir } <- ask + let dbPath = Path.concat [ stateDir, "db", "registry.sqlite3" ] + result <- liftAff $ _.getResult =<< Execa.execa "sqlite3" [ dbPath, "DELETE FROM job_info;" ] identity + case result.exit of + Normally 0 -> pure unit + _ -> liftAff $ Aff.throwError $ Aff.error $ "Failed to reset database: " <> result.stderr + +-- | Reset the git fixtures to restore original state. +-- | This restores metadata files modified by unpublish/transfer operations. +-- | +-- | Strategy: Reset the origin repos to their initial-fixture tag (created during +-- | setup), then delete the server's scratch git clones. The server will +-- | re-clone fresh copies on the next operation, ensuring a clean cache state. +resetGitFixtures :: E2E Unit +resetGitFixtures = do + { stateDir } <- ask + fixturesDir <- liftEffect $ Env.lookupRequired Env.repoFixturesDir + let + registryOrigin = Path.concat [ fixturesDir, "purescript", "registry" ] + registryIndexOrigin = Path.concat [ fixturesDir, "purescript", "registry-index" ] + scratchDir = Path.concat [ stateDir, "scratch" ] + resetOrigin registryOrigin + resetOrigin registryIndexOrigin + deleteGitClones scratchDir + where + resetOrigin dir = do + void $ gitOrFail [ "reset", "--hard", "initial-fixture" ] dir + void $ gitOrFail [ "clean", "-fd" ] dir + + deleteGitClones scratchDir = do + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry" ] + liftAff $ FS.Extra.remove $ Path.concat [ scratchDir, "registry-index" ] + +-- | Clear server log files for test isolation. +-- | Deletes *.log files from the scratch/logs directory but preserves the directory itself. +resetLogs :: E2E Unit +resetLogs = do + { stateDir } <- ask + let logsDir = Path.concat [ stateDir, "scratch", "logs" ] + let cmd = "rm -f '" <> logsDir <> "'/*.log 2>/dev/null || true" + result <- liftAff $ _.getResult =<< Execa.execa "sh" [ "-c", cmd ] identity + case result.exit of + Normally _ -> pure unit + _ -> pure unit + +-- | Clear cached GitHub API requests from the scratch cache directory. +-- | This ensures each test makes fresh API calls rather than using cached responses. +resetGitHubRequestCache :: E2E Unit +resetGitHubRequestCache = do + { stateDir } <- ask + let cacheDir = Path.concat [ stateDir, "scratch", ".cache" ] + liftAff $ Aff.attempt (FS.Aff.readdir cacheDir) >>= case _ of + Left _ -> pure unit + Right files -> for_ files \file -> + when (String.Pattern "Request__" `String.contains` file) do + FS.Extra.remove (Path.concat [ cacheDir, file ]) + +-- | Poll a job until completion, failing the test if the job fails. +-- | Prints error logs on failure for debugging. +pollJobOrFail :: V1.JobId -> E2E V1.Job +pollJobOrFail jobId = do + job <- Client.pollJob jobId + unless (V1.jobInfo job).success do + Console.log "Job failed! Logs:" + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs + Console.log $ String.joinWith "\n" logMessages + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs + let errorMessages = map _.message errorLogs + Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages + pure job + +-- | Poll a job until completion, expecting it to fail. +-- | Returns the job for further assertions on error messages. +pollJobExpectFailure :: V1.JobId -> E2E V1.Job +pollJobExpectFailure jobId = do + job <- Client.pollJob jobId + when (V1.jobInfo job).success do + Assert.fail "Expected job to fail, but it succeeded" + pure job + +-- | Sign an unpublish operation using the pacchettibotti private key from environment. +signUnpublishOrFail :: UnpublishData -> E2E AuthenticatedData +signUnpublishOrFail unpublishData = do + { privateKey } <- ask + case Fixtures.signUnpublish privateKey unpublishData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign unpublish: " <> err + Right authData -> pure authData + +-- | Sign a transfer operation using the pacchettibotti private key from environment. +signTransferOrFail :: TransferData -> E2E AuthenticatedData +signTransferOrFail transferData = do + { privateKey } <- ask + case Fixtures.signTransfer privateKey transferData of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to sign transfer: " <> err + Right authData -> pure authData + +-- | Run git status --porcelain in a directory and return the output. +gitStatus :: String -> E2E String +gitStatus cwd = gitOrFail [ "status", "--porcelain" ] cwd + +-- | Run a git command, throwing an exception on failure. +gitOrFail :: Array String -> FilePath -> E2E String +gitOrFail args cwd = liftAff $ Git.gitCLI args (Just cwd) >>= case _ of + Left err -> Aff.throwError $ Aff.error err + Right out -> pure out + +-- | Check if git status output indicates a clean working tree (no changes). +isCleanGitStatus :: String -> Boolean +isCleanGitStatus status = String.null status + +-- | Wait for all matrix jobs for a package to complete. +waitForAllMatrixJobs :: PackageFixture -> E2E Unit +waitForAllMatrixJobs pkg = go 120 0 + where + go :: Int -> Int -> E2E Unit + go 0 _ = liftAff $ Aff.throwError $ Aff.error "Timed out waiting for matrix jobs to complete" + go attempts lastCount = do + jobs <- Client.getJobs + let + matrixJobs = Array.filter (isMatrixJobFor pkg) jobs + totalCount = Array.length matrixJobs + finishedCount = Array.length $ Array.filter (\j -> isJust (V1.jobInfo j).finishedAt) matrixJobs + allFinished = finishedCount == totalCount + stillCreating = totalCount > lastCount + if totalCount >= 1 && allFinished && not stillCreating then + pure unit + else do + when (attempts `mod` 10 == 0) do + Console.log $ "Waiting for matrix jobs: " <> show finishedCount <> "/" <> show totalCount <> " finished" + liftAff $ Aff.delay (Milliseconds 1000.0) + go (attempts - 1) totalCount + +-- | Check if a job is a matrix job for the given package. +isMatrixJobFor :: PackageFixture -> Job -> Boolean +isMatrixJobFor pkg = case _ of + MatrixJob { packageName, packageVersion } -> + packageName == pkg.name && packageVersion == pkg.version + _ -> false + +-- | Read and parse the metadata file for a package from the server's scratch clone. +readMetadata :: PackageName -> E2E Metadata +readMetadata packageName = do + { stateDir } <- ask + let metadataPath = Path.concat [ stateDir, "scratch", "registry", "metadata", PackageName.print packageName <> ".json" ] + liftAff (readJsonFile Metadata.codec metadataPath) >>= case _ of + Left err -> liftAff $ Aff.throwError $ Aff.error $ "Failed to read metadata for " <> PackageName.print packageName <> ": " <> err + Right metadata -> pure metadata + +-- | Read and parse the manifest index entry for a package from the server's scratch clone. +readManifestIndexEntry :: PackageName -> E2E (Array Manifest) +readManifestIndexEntry packageName = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath packageName >>= case _ of + Left err -> Aff.throwError $ Aff.error $ "Failed to read manifest index for " <> PackageName.print packageName <> ": " <> err + Right manifests -> pure $ Array.fromFoldable manifests + +-- | Check if a specific package version exists in the manifest index. +manifestIndexEntryExists :: PackageFixture -> E2E Boolean +manifestIndexEntryExists pkg = do + { stateDir } <- ask + let indexPath = Path.concat [ stateDir, "scratch", "registry-index" ] + liftAff $ ManifestIndex.readEntryFile indexPath pkg.name >>= case _ of + Left _ -> pure false + Right manifests -> pure $ Array.any (\(Manifest m) -> m.version == pkg.version) $ Array.fromFoldable manifests + +-- | Assert that both git repos (registry and registry-index) have no uncommitted changes. +assertReposClean :: E2E Unit +assertReposClean = do + { stateDir } <- ask + let scratchRegistry = Path.concat [ stateDir, "scratch", "registry" ] + let scratchRegistryIndex = Path.concat [ stateDir, "scratch", "registry-index" ] + registryStatus <- gitStatus scratchRegistry + registryIndexStatus <- gitStatus scratchRegistryIndex + unless (isCleanGitStatus registryStatus) do + Assert.fail $ "registry repo has uncommitted changes:\n" <> registryStatus + unless (isCleanGitStatus registryIndexStatus) do + Assert.fail $ "registry-index repo has uncommitted changes:\n" <> registryIndexStatus + +-- | Check if a storage upload (PUT) occurred for a specific package. +hasStorageUpload :: PackageFixture -> E2E Boolean +hasStorageUpload pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + putRequests = WireMock.filterByMethod "PUT" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) putRequests + +-- | Check if a storage delete (DELETE) occurred for a specific package. +hasStorageDelete :: PackageFixture -> E2E Boolean +hasStorageDelete pkg = do + requests <- WireMock.getStorageRequests + let + expectedPath = PackageName.print pkg.name <> "/" <> Version.print pkg.version <> ".tar.gz" + deleteRequests = WireMock.filterByMethod "DELETE" requests + pure $ Array.any (\r -> String.contains (String.Pattern expectedPath) r.url) deleteRequests diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs new file mode 100644 index 000000000..f23b494e3 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -0,0 +1,226 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Test.E2E.Support.Fixtures + ( PackageFixture + , effect + , console + , prelude + , effectPublishData + , effectPublishDataDifferentLocation + , consolePublishData + , failingTransferData + , nonexistentTransferData + , trusteeAuthenticatedData + , effectUnpublishData + , effectTransferData + , nonexistentUnpublishData + , preludeUnpublishData + , signUnpublish + , signTransfer + , invalidJsonIssueEvent + ) where + +import Registry.App.Prelude + +import Data.Codec.JSON as CJ +import JSON as JSON +import Registry.Location (Location(..)) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.SSH as SSH +import Registry.Test.Utils as Utils +import Registry.Version (Version) + +type PackageFixture = { name :: PackageName, version :: Version } + +-- | effect@4.0.0 fixture package +effect :: PackageFixture +effect = { name: Utils.unsafePackageName "effect", version: Utils.unsafeVersion "4.0.0" } + +-- | console@6.1.0 fixture package +console :: PackageFixture +console = { name: Utils.unsafePackageName "console", version: Utils.unsafeVersion "6.1.0" } + +-- | prelude@6.0.1 fixture package +prelude :: PackageFixture +prelude = { name: Utils.unsafePackageName "prelude", version: Utils.unsafeVersion "6.0.1" } + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: Operation.PublishData +effectPublishData = + { name: effect.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: effect.version + } + +-- | Publish data for effect@99.0.0 with a DIFFERENT location. +-- | Uses a non-existent version to avoid duplicate job detection, +-- | but still targets an existing package to test location conflicts. +effectPublishDataDifferentLocation :: Operation.PublishData +effectPublishDataDifferentLocation = + effectPublishData + { location = Just $ GitHub + { owner: "someone-else" + , repo: "purescript-effect" + , subdir: Nothing + } + , version = Utils.unsafeVersion "99.0.0" + , ref = "v99.0.0" + } + +-- | Publish data for console@6.1.0, used for concurrency tests. +-- | Console depends on effect ^4.0.0 and prelude ^6.0.0. +-- | This matches the fixtures in app/fixtures/github-packages/console-6.1.0 +consolePublishData :: Operation.PublishData +consolePublishData = + { name: console.name + , location: Just $ GitHub + { owner: "purescript" + , repo: "purescript-console" + , subdir: Nothing + } + , ref: "v6.1.0" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: console.version + } + +-- | Unpublish data for effect@4.0.0, used for publish-then-unpublish tests. +effectUnpublishData :: UnpublishData +effectUnpublishData = + { name: effect.name + , version: effect.version + , reason: "Testing unpublish flow" + } + +-- | Transfer data for effect, used for transfer tests. +-- | Transfers effect to a different GitHub owner. +effectTransferData :: TransferData +effectTransferData = + { name: effect.name + , newLocation: GitHub + { owner: "new-owner" + , repo: "purescript-effect" + , subdir: Nothing + } + } + +-- | Unpublish data for a nonexistent package. +-- | Used to test error handling when unpublishing an unknown package. +nonexistentUnpublishData :: UnpublishData +nonexistentUnpublishData = + { name: Utils.unsafePackageName "nonexistent-package" + , version: Utils.unsafeVersion "1.0.0" + , reason: "Testing error handling for unknown package" + } + +-- | Unpublish data for prelude@6.0.1. +-- | This package was published long ago (in fixtures), so it should fail +-- | the 48-hour time limit check. +preludeUnpublishData :: UnpublishData +preludeUnpublishData = + { name: prelude.name + , version: prelude.version + , reason: "Testing 48-hour limit enforcement" + } + +-- | Sign an unpublish operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signUnpublish :: String -> UnpublishData -> Either String AuthenticatedData +signUnpublish privateKey unpublishData = do + let rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishData + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private rawPayload + pure + { payload: Unpublish unpublishData + , rawPayload + , signature + } + +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = do + let + transferPayload :: TransferData + transferPayload = + { name: prelude.name + , newLocation: GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } + } + + rawPayload :: String + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + + { payload: Transfer transferPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = do + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: prelude.name + , version: prelude.version + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + + { payload: Unpublish unpublishPayload + , rawPayload + , signature: SSH.Signature "invalid-signature-for-testing" + } + +-- | Transfer data for a nonexistent package. +-- | Used to test error handling when transferring an unknown package. +-- | Job should fail with "has not been published before" error. +nonexistentTransferData :: TransferData +nonexistentTransferData = + { name: Utils.unsafePackageName "nonexistent-package" + , newLocation: GitHub + { owner: "someone" + , repo: "purescript-nonexistent" + , subdir: Nothing + } + } + +-- | Sign a transfer operation using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signTransfer :: String -> TransferData -> Either String AuthenticatedData +signTransfer privateKey transferData = do + let rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferData + private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + let signature = SSH.sign private rawPayload + pure + { payload: Transfer transferData + , rawPayload + , signature + } + +-- | GitHub issue event with invalid JSON in the body. +-- | Used to test that malformed JSON is handled gracefully with an error comment. +-- | Note: The inner JSON has a trailing comma (`"v1.0.0",}`) which is intentionally +-- | malformed to trigger a parse error. +invalidJsonIssueEvent :: String +invalidJsonIssueEvent = + """{"sender":{"login":"packaging-team-user"},"issue":{"number":101,"body":"```json\n{\"name\": \"effect\", \"ref\": \"v1.0.0\",}\n```"}}""" diff --git a/app-e2e/src/Test/E2E/Support/Types.purs b/app-e2e/src/Test/E2E/Support/Types.purs new file mode 100644 index 000000000..2e4429057 --- /dev/null +++ b/app-e2e/src/Test/E2E/Support/Types.purs @@ -0,0 +1,48 @@ +-- | Core types for E2E tests. +-- | +-- | This module defines the shared environment and monad types used by all +-- | E2E test helpers. It's kept separate to avoid circular dependencies +-- | between Env, Client, and WireMock modules. +module Test.E2E.Support.Types + ( TestEnv + , ClientConfig + , WireMockConfig + , E2E + , E2ESpec + ) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ReaderT) +import Effect.Aff (Milliseconds) +import Test.Spec (SpecT) + +-- | Configuration for the E2E test client +type ClientConfig = + { baseUrl :: String + , pollInterval :: Milliseconds + , maxPollAttempts :: Int + } + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | The shared test environment available to all E2E helpers. +-- | Constructed once at startup from environment variables. +type TestEnv = + { clientConfig :: ClientConfig + , githubWireMock :: WireMockConfig + , storageWireMock :: WireMockConfig + , stateDir :: String + , privateKey :: String + } + +-- | The base monad for E2E test helpers. +-- | All Client, Env, and WireMock functions operate in this monad. +type E2E = ReaderT TestEnv Aff + +-- | The spec type for E2E tests. +-- | Test modules export `spec :: E2ESpec` instead of `spec :: Spec Unit`. +type E2ESpec = SpecT E2E Unit Identity Unit diff --git a/test-utils/src/Registry/Test/E2E/WireMock.purs b/app-e2e/src/Test/E2E/Support/WireMock.purs similarity index 52% rename from test-utils/src/Registry/Test/E2E/WireMock.purs rename to app-e2e/src/Test/E2E/Support/WireMock.purs index 6895d9e44..4e3789fca 100644 --- a/test-utils/src/Registry/Test/E2E/WireMock.purs +++ b/app-e2e/src/Test/E2E/Support/WireMock.purs @@ -2,15 +2,19 @@ -- | -- | This module provides helpers to query WireMock's request journal, allowing -- | tests to assert on what HTTP requests were made to mock services. -module Registry.Test.E2E.WireMock - ( WireMockConfig - , WireMockRequest +-- | +-- | Also provides helpers for managing WireMock scenarios (stateful mocking). +-- | Scenarios allow responses to change based on state transitions - e.g., a +-- | package tarball returns 404 until it's been "uploaded" via PUT, after which +-- | it returns 200. +module Test.E2E.Support.WireMock + ( WireMockRequest , WireMockError(..) - , configFromEnv - , getRequests - , getRequestsOrFail - , clearRequests - , clearRequestsOrFail + , getGithubRequests + , getStorageRequests + , clearGithubRequests + , clearStorageRequests + , resetStorageScenarios , filterByMethod , filterByUrlContaining , printWireMockError @@ -18,34 +22,24 @@ module Registry.Test.E2E.WireMock , failWithRequests ) where -import Prelude +import Registry.App.Prelude +import Codec.JSON.DecodeError as CJ.DecodeError import Control.Monad.Error.Class (class MonadThrow, throwError) import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Class (lift) +import Control.Monad.Reader (ask) import Data.Array as Array -import Data.Bifunctor (lmap) import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record -import Data.Either (Either(..)) import Data.Int as Int -import Data.Maybe (Maybe(..)) import Data.String as String -import Effect (Effect) -import Effect.Aff (Aff) import Effect.Aff as Aff +import Effect.Exception (Error) import Effect.Exception as Effect.Exception import Fetch (Method(..)) import Fetch as Fetch -import Effect.Exception (Error) import JSON as JSON -import Node.Process as Process -import Codec.JSON.DecodeError as CJ.DecodeError - --- | Configuration for connecting to WireMock admin API -type WireMockConfig = - { baseUrl :: String - } +import Test.E2E.Support.Types (E2E) -- | A recorded request from WireMock's journal type WireMockRequest = @@ -64,16 +58,6 @@ printWireMockError = case _ of HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw --- | Create config from GITHUB_API_URL environment variable. --- | Convenience for tests that need to inspect GitHub mock requests. --- | Each WireMock instance has its own admin API on the same port. -configFromEnv :: Effect WireMockConfig -configFromEnv = do - maybeUrl <- Process.lookupEnv "GITHUB_API_URL" - case maybeUrl of - Nothing -> Effect.Exception.throw "GITHUB_API_URL environment variable is not set." - Just baseUrl -> pure { baseUrl } - -- | Codec for a single request entry in WireMock's response requestCodec :: CJ.Codec WireMockRequest requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object @@ -100,10 +84,10 @@ parseResponse codec body = do json <- lmap (append "JSON parse error: ") $ JSON.parse body lmap CJ.DecodeError.print $ CJ.decode codec json --- | Get all recorded requests from WireMock's journal -getRequests :: WireMockConfig -> Aff (Either WireMockError (Array WireMockRequest)) -getRequests config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: GET } +-- | Get all recorded requests from a WireMock instance +getRequestsFrom :: String -> Aff (Either WireMockError (Array WireMockRequest)) +getRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: GET } body <- lift response.text if response.status == 200 then case parseResponse journalCodec body of @@ -112,35 +96,61 @@ getRequests config = runExceptT do else throwError $ HttpError { status: response.status, body } --- | Clear all recorded requests from WireMock's journal -clearRequests :: WireMockConfig -> Aff (Either WireMockError Unit) -clearRequests config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: DELETE } +-- | Clear all recorded requests from a WireMock instance +clearRequestsFrom :: String -> Aff (Either WireMockError Unit) +clearRequestsFrom baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Reset all scenarios to initial state on a WireMock instance +resetScenariosOn :: String -> Aff (Either WireMockError Unit) +resetScenariosOn baseUrl = runExceptT do + response <- lift $ Fetch.fetch (baseUrl <> "/__admin/scenarios/reset") { method: POST } if response.status == 200 then pure unit else do body <- lift response.text throwError $ HttpError { status: response.status, body } --- | Get requests, throwing on error. Useful in tests where failure should abort. -getRequestsOrFail :: WireMockConfig -> Aff (Array WireMockRequest) -getRequestsOrFail config = do - result <- getRequests config - case result of - Left err -> - throwError $ Aff.error $ "Failed to get WireMock requests: " <> printWireMockError err - Right rs -> - pure rs - --- | Clear requests, throwing on error. Useful in test setup. -clearRequestsOrFail :: WireMockConfig -> Aff Unit -clearRequestsOrFail config = do - result <- clearRequests config - case result of - Left err -> - Aff.throwError $ Aff.error $ "Failed to clear WireMock journal: " <> printWireMockError err - Right _ -> - pure unit +-- | Helper to run a WireMock operation and throw on error +orFail :: forall a. String -> Either WireMockError a -> Aff a +orFail context = case _ of + Left err -> Aff.throwError $ Aff.error $ context <> ": " <> printWireMockError err + Right a -> pure a + +-- | Get captured requests from the GitHub WireMock. +getGithubRequests :: E2E (Array WireMockRequest) +getGithubRequests = do + { githubWireMock } <- ask + liftAff $ getRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to get GitHub WireMock requests" + +-- | Get captured requests from the storage WireMock (S3, Pursuit). +getStorageRequests :: E2E (Array WireMockRequest) +getStorageRequests = do + { storageWireMock } <- ask + liftAff $ getRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to get storage WireMock requests" + +-- | Clear the GitHub WireMock request journal. +clearGithubRequests :: E2E Unit +clearGithubRequests = do + { githubWireMock } <- ask + liftAff $ clearRequestsFrom githubWireMock.baseUrl >>= orFail "Failed to clear GitHub WireMock requests" + +-- | Clear the storage WireMock request journal. +clearStorageRequests :: E2E Unit +clearStorageRequests = do + { storageWireMock } <- ask + liftAff $ clearRequestsFrom storageWireMock.baseUrl >>= orFail "Failed to clear storage WireMock requests" + +-- | Reset all storage WireMock scenarios to their initial state. +resetStorageScenarios :: E2E Unit +resetStorageScenarios = do + { storageWireMock } <- ask + liftAff $ resetScenariosOn storageWireMock.baseUrl >>= orFail "Failed to reset storage WireMock scenarios" -- | Filter requests by HTTP method filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest @@ -152,13 +162,12 @@ filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pa -- | Format an array of requests for debugging output formatRequests :: Array WireMockRequest -> String -formatRequests requests = String.joinWith "\n" $ map formatRequest requests +formatRequests = String.joinWith "\n" <<< map formatRequest where - formatRequest r = r.method <> " " <> r.url <> case r.body of + formatRequest req = req.method <> " " <> req.url <> case req.body of Nothing -> "" - Just b -> "\n Body: " <> b + Just body -> "\n Body: " <> body -- | Fail a test with a message and debug info about captured requests. failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a -failWithRequests msg requests = throwError $ Effect.Exception.error $ - msg <> "\n\nCaptured requests:\n" <> formatRequests requests +failWithRequests msg requests = throwError $ Effect.Exception.error $ String.joinWith "\n" [ msg, "\nCaptured requests:", formatRequests requests ] diff --git a/app-e2e/src/Test/E2E/Workflow.purs b/app-e2e/src/Test/E2E/Workflow.purs new file mode 100644 index 000000000..3e65b5840 --- /dev/null +++ b/app-e2e/src/Test/E2E/Workflow.purs @@ -0,0 +1,107 @@ +-- | End-to-end tests for multi-operation workflows. +-- | +-- | These tests verify complex scenarios involving multiple operations: +-- | 1. Git state remains clean after multiple matrix jobs complete +-- | 2. Dependency state is validated correctly across publish/unpublish sequences +module Test.E2E.Workflow (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Map as Map +import Data.String as String +import Registry.API.V1 as V1 +import Registry.Metadata (Metadata(..)) +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.E2E.Support.WireMock as WireMock +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Concurrent git operations" do + Spec.it "multiple matrix jobs complete without conflict" do + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail publishJobId + Env.waitForAllMatrixJobs Fixtures.effect + + uploadOccurred <- Env.hasStorageUpload Fixtures.effect + unless uploadOccurred do + Assert.fail "Expected tarball upload to S3 for effect@4.0.0" + + Metadata metadata <- Env.readMetadata Fixtures.effect.name + unless (isJust $ Map.lookup Fixtures.effect.version metadata.published) do + Assert.fail "Expected effect@4.0.0 to be in published metadata" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + unless manifestExists do + Assert.fail "Expected effect@4.0.0 to exist in manifest index" + + Spec.describe "Dependency and unpublish interactions" do + Spec.it "publishing a package fails when its dependency was unpublished" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + _ <- Env.pollJobOrFail unpublishJobId + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + unless deleteOccurred do + Assert.fail "Expected tarball delete from S3 for effect@4.0.0" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + when manifestExists do + Assert.fail "Expected effect@4.0.0 to be removed from manifest index after unpublish" + + WireMock.clearStorageRequests + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + consoleJob <- Env.pollJobExpectFailure consoleJobId + + let + logs = (V1.jobInfo consoleJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "Could not produce valid dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected dependency resolution error, got:\n" <> String.joinWith "\n" logMessages + + consoleUploadOccurred <- Env.hasStorageUpload Fixtures.console + when consoleUploadOccurred do + Assert.fail "Expected no tarball upload for console@6.1.0 after failed publish" + + Spec.it "unpublishing a package fails when dependents exist in manifest index" do + { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData + _ <- Env.pollJobOrFail effectJobId + + { jobId: consoleJobId } <- Client.publish Fixtures.consolePublishData + _ <- Env.pollJobOrFail consoleJobId + + WireMock.clearStorageRequests + + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + unpublishJob <- Env.pollJobExpectFailure unpublishJobId + + let + logs = (V1.jobInfo unpublishJob).logs + logMessages = map _.message logs + hasDependencyError = Array.any (String.contains (String.Pattern "unsatisfied dependencies")) logMessages + unless hasDependencyError do + Assert.fail $ "Expected unsatisfied dependencies error, got:\n" <> + String.joinWith "\n" logMessages + + deleteOccurred <- Env.hasStorageDelete Fixtures.effect + when deleteOccurred do + Assert.fail "Expected no tarball delete for effect@4.0.0 after failed unpublish" + + manifestExists <- Env.manifestIndexEntryExists Fixtures.effect + unless manifestExists do + Assert.fail "Expected effect@4.0.0 to still exist in manifest index after failed unpublish" + + Metadata effectMeta <- Env.readMetadata Fixtures.effect.name + unless (isJust $ Map.lookup Fixtures.effect.version effectMeta.published) do + Assert.fail "Expected effect@4.0.0 to still be in published metadata after failed unpublish" diff --git a/app-e2e/src/Test/Main.purs b/app-e2e/src/Test/Main.purs new file mode 100644 index 000000000..bf3a108fb --- /dev/null +++ b/app-e2e/src/Test/Main.purs @@ -0,0 +1,38 @@ +module Test.E2E.Main (main) where + +import Registry.App.Prelude + +import Data.Time.Duration (Milliseconds(..)) +import Test.E2E.Endpoint.Jobs as Jobs +import Test.E2E.Endpoint.Publish as Publish +import Test.E2E.Endpoint.Transfer as Transfer +import Test.E2E.Endpoint.Unpublish as Unpublish +import Test.E2E.GitHubIssue as GitHubIssue +import Test.E2E.Support.Env (assertReposClean, mkTestEnv, resetTestState, runE2E) +import Test.E2E.Workflow as Workflow +import Test.Spec (hoistSpec) +import Test.Spec as Spec +import Test.Spec.Reporter.Console (consoleReporter) +import Test.Spec.Runner.Node (runSpecAndExitProcess') +import Test.Spec.Runner.Node.Config as Cfg + +main :: Effect Unit +main = do + env <- mkTestEnv + runSpecAndExitProcess' config [ consoleReporter ] $ hoistE2E env do + Spec.before_ resetTestState $ Spec.after_ assertReposClean $ Spec.describe "E2E Tests" do + Spec.describe "Endpoints" do + Spec.describe "Publish" Publish.spec + Spec.describe "Jobs" Jobs.spec + Spec.describe "Unpublish" Unpublish.spec + Spec.describe "Transfer" Transfer.spec + + Spec.describe "Workflows" do + Spec.describe "GitHubIssue" GitHubIssue.spec + Spec.describe "Multi-operation" Workflow.spec + where + hoistE2E env = hoistSpec identity (\_ m -> runE2E env m) + config = + { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 60_000.0 } + , parseCLIOptions: false + } diff --git a/app/fixtures/github-packages/console-6.1.0/LICENSE b/app/fixtures/github-packages/console-6.1.0/LICENSE new file mode 100644 index 000000000..311379c1e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/LICENSE @@ -0,0 +1,26 @@ +Copyright 2018 PureScript + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/app/fixtures/github-packages/console-6.1.0/bower.json b/app/fixtures/github-packages/console-6.1.0/bower.json new file mode 100644 index 000000000..da93c7f6e --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/bower.json @@ -0,0 +1,22 @@ +{ + "name": "purescript-console", + "homepage": "https://github.com/purescript/purescript-console", + "license": "BSD-3-Clause", + "repository": { + "type": "git", + "url": "https://github.com/purescript/purescript-console.git" + }, + "ignore": [ + "**/.*", + "bower_components", + "node_modules", + "output", + "test", + "bower.json", + "package.json" + ], + "dependencies": { + "purescript-effect": "^4.0.0", + "purescript-prelude": "^6.0.0" + } +} diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js new file mode 100644 index 000000000..432a4241b --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.js @@ -0,0 +1,9 @@ +export const log = s => () => console.log(s); +export const warn = s => () => console.warn(s); +export const error = s => () => console.error(s); +export const info = s => () => console.info(s); +export const debug = s => () => console.debug(s); +export const time = s => () => console.time(s); +export const timeLog = s => () => console.timeLog(s); +export const timeEnd = s => () => console.timeEnd(s); +export const clear = () => console.clear(); diff --git a/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs new file mode 100644 index 000000000..364ee2b1c --- /dev/null +++ b/app/fixtures/github-packages/console-6.1.0/src/Effect/Console.purs @@ -0,0 +1,46 @@ +-- | This module provides functions for outputting strings to the console. +module Effect.Console + ( log + , logShow + , warn + , warnShow + , error + , errorShow + , info + , infoShow + , debug + , debugShow + , time + , timeLog + , timeEnd + , clear + ) where + +import Prelude + +import Effect (Effect) + +foreign import log :: String -> Effect Unit +foreign import warn :: String -> Effect Unit +foreign import error :: String -> Effect Unit +foreign import info :: String -> Effect Unit +foreign import debug :: String -> Effect Unit +foreign import time :: String -> Effect Unit +foreign import timeLog :: String -> Effect Unit +foreign import timeEnd :: String -> Effect Unit +foreign import clear :: Effect Unit + +logShow :: forall a. Show a => a -> Effect Unit +logShow = log <<< show + +warnShow :: forall a. Show a => a -> Effect Unit +warnShow = warn <<< show + +errorShow :: forall a. Show a => a -> Effect Unit +errorShow = error <<< show + +infoShow :: forall a. Show a => a -> Effect Unit +infoShow = info <<< show + +debugShow :: forall a. Show a => a -> Effect Unit +debugShow = debug <<< show diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz index 2ef880dff4d0b29735276e267a483f25607e3387..c06e9b2767ae864e73f4bc4f1d9ef76583387aa1 100644 GIT binary patch literal 31321 zcmV)8K*qlxiwFP!000001MFRUciKp@&)@nKy`Hh9`ab(iqW=T0=@dq?KJS$q8G zn_T;FJvlrS-$&vbF7-|K)x&D-NxfFvKd3$0uU3!h`;XY+w-G0OML}r!j6Dkd$;xuK z6V9DY2(;O2mg@i0cC&TfZ9Om#?BJt=gJu0+J34$K?+@zL8l+ciN40}TZ2y7fD7^mU z{onLv3*Ww(gsiq-{X4sieBK@U_AE4v4jCT-Ta%lm+h-aVMTLf W z6e6|TVSrK#dY&H`Q)|Hn97hPq?=kLU+7Tipt4pEpmhPTJ?MD+~y*^NSukZJ)J!P^@=RA*QmLkyK+Buh?0u z(|iMujhF4ycJCbt@~YiCM_#XhS%Y0RI=y!D>a@{emsg$3i*AcSvy78=w|UxVpS4a* z;0_#F>*v;ak9FTPPEQM4iL$5K__75;HeQ~#kQ24?q}^#Xd)S=ht_hk0`A#dWd)aEX z@uu}l3mVhtysJo>-PV6!K_SRwCylel>sHq&tx*Z|N%N}HIzyR358bPm-CnzQ)oZcW z7Z)d_=x(d?Gpsw^XU6G8m$Y@&ZB>9nuR)A~1gNU}4DVlFb=#!0_Ia<>>0Dj*+85_# zX!%=E3&_}jswbq~i*sr!Xy~Hz4vC@;Nck1^_Du`YJE&07R0CDe1&uX(dNJS!3hZfZ zWaq8dr|s9R^JWWkFObUHcDGfAzG`=|0FXqsZyUh#ikggl1X79HcGplNwn9D0+OJsS z!~{U3O@VCjAfJoh1atXco#ykz~aAJuBx^?wuB zCxfvawCX!g+0G1B|A1EiJvlCRDwsd>&Vr|pAK!qbjt1t?n?6peJzhp( zBvZk1Bat2~B?;7EmwHhLmPa=rUN5^Rd-c7hW5KE-GGT4?0vi^b1>~dbkQOdxR2n3r zLXZ^s&ezClA_<`InRwYZu<9Y@_k!r|?qhRTaiE!U4HRdd3oCJ;(p|tI*8qDWhsSBK zpv|IKwvY#5iVDp)l`^x2A3)P04U&E}K0o}V*RJ&Z@3a~xXDxF&dQcu~=6|)mf0Uj7 zP<}iAH*x*I^3%XD-v8zOsf7a-z^ucqfd5*;xdc9+Xk6myzv^81z{f1z$ny*7HA%0api)o9)K~Yji_W{bS%;?r?HEwE{rT zUn8S6B=r7|-y`lXjun@`)*H?CPl7nyOY+zZFn&Q~hY6>dNH7!Hrsv;0mgXQR;z{C{ z??a*tAUbV{RaY6t4?nQB3wm}O@-hsg-|qt~w{AS}LMUAorW`DPqPr!AaYj}!;XLB7R4|PmI#?b6?%x_*@(|i_>8+F?hb9PNh+9Ot^a*u zRu9en7+YzkBe1BzzWH~vmgGA#Os(Q%*x!rmi3U>nI!+Uk;Tz+!O=6(-v9NZYHi#9o4A(t|CJk9WAp^>&tu*G&q3`l)BlI{C-v?6zln=~ zoOynTM|nZWBI@%uR=|$gIC5!EgOwa>z@0L!=spi4-_`PbJ}k?%UgmxJY)-5IR{Trf zn{hu}m_y5PuwaD=O3!dfe`dfxqk#Ks9^QjHq`apZzpm!ey#trI=bB!`ut2Ljy)fV`0N8;;0bQ324nV^l~ySYYL)f!j5xf96D(b6g%3C(;q3REE4 z{=FyV2CsGYA3ehR@>pa4YllbGto;X9aBKfJaqaD~KNvbrYT5=O$ixnqIK`p>oQZui zaTd}FFjybL2~5t==N6t2;CR7UmJi$@a=3-#-Z+arm~?!^rq)b^wG1tsXxx{PF}`<; zkt8&|0}%yeNoow{xk_}jTktL*ue)5SvmchspjB24Ems@KA z`~*P?j&X0Wq3%t9K{}C$K?2#MA((_X6h;cPM|}84YdUjy1-ZB^3ZSx}*Y9|@J@3>G z!oJCNcYCOwVB$s2h`Ag*={Ph7soZdIhb>2uo?5Owi^Mj&xw~sfy^DEhig_rSWe8@? z2_Jq4&_?R|f(OudMMwn?q8ErEwgd7^10J}2<1%|nwmn(zpFYKcxT)_~P1Y4v_g+XL zAQ2y4dLC{9N-2b3Fe+|%h|Fna5ivbVtkuFK-P<0kR5tQ>fH6{_YK)Ddfp}|!TtnPu zK+2ID+R&%m_pw8qfRk#Lv<=aO);4ApH4g8sY^wsjRXFDE*e0A)a*tG0{98aH9@+z< ziSQ;E4}nM1V!-9hpx`OXU0_*LDn<~L#PGBmiP$a>1P(8}_|6zTL|2A{QMz>F0-WP@v(Hfa>5r16_vB;vj827A>_Jzgu`R|eK zORNJ%+IGnRdu#+2)1TU|fPYjUKo*n`HoC$FP)-mB%wjD8+C5v+)bHng`Gbq=muIx0a9)R~LdYx~?D{07gO&oNZW>fEUJ8Q`4k& z1O%0&Vbvq*A~IW-7hx`8SwRl5I5B;ywZGr*Yr#GuP1J7uW_|4NU-J&elZa!_)0FD1g?|-nUx-T4|;XV6k6}=-1=%HX@FuZ6w|l*a?90s6!6Gf69jORouTt#-Fm7XCO1H-M+!|Rur zJe-!_aso;&Kq%ZSNo$yu1h<%N7lu(&j6lpU_vIABw;2RIP(vrxM>yU3nR@;B1rX>F za_Ja@*ovRz5czElo46{Z|0z`}z!wC=gG&Vp0 zN6Eqv#+*)tBZ*Co94Cg2;++XTwq0AzS+ZN+j5*-bIb}k#oR{e){Tqn zOhcD;!II*~qIB`RfVr(0m6>Zf4uT@}F5q4;vQP=!{gE}W9Xni@ z?2UJej%`KSHsQ2c4loX4b6pX32~c&A^bz$IbGVLV;q^vQ-~U_5tOmk*k%k;sN`;@y_;2UwQnGBkjBU zf3<_H|F@BAN&mmXtya%}kmKL=@n7|v|5vL%+4_H*xKgKlmhW4OGi#PQ;TtbG-GjpV z=^n03kYqaCy$L7J;&kaL9W$5TvIa6Ji6da`1ki@;{b%s?oH_9I(@)7so~Zub#&f(e z@`PJi{d4-@7t8%AkMQnwEt&rvK6bz%CGd6cU7$7dzh14@GxPtbesr{*|C_iZ0K`r= zwN8h5RFckXI<*p~+G9IT^F%FUy z1hlHQZxD#?3HE3@{6sLyN}U+eyDRRH2l0MT@CASxFPsFn z8y|Sd$F>`)TP3=B?k=VS&oOcuTY`=8yGahQWdqTCwK)rc?i|a>Eiup}@sg#Ki`1W! zkB+b7qXV@kJX8D$0v@QBZcH-YaStV~fS{k!FVpoayLgp$X_p0eNs_vBz=96QNV&}Y zu+M^p3%7y&FFq19GD;Ums@NgoT$bZTL#&X##=tZ(Ll7IZLaT8rUGYe84_S|Cz&n~JbVjz|w!ZZ<95){&@if%f!BLhsv z;|dGI3NECMjoZMmuKiZQUp<0P*MpqWk3W{-^N&BuX0=(ZTJ z$*8mdkYO-hWRS6A-BSFYH5%z>9&%AktG$n<_0@lmwLT8fn2gxe9*q<f}IOx+3*uz7{4gtMaOd%>dWdOj=2R zwDK}SG7<=v?9QnfhYeDY6aE}nrEu1WUkbxA@V{KdRz(;=_A=J-Zhf|{Iv9vO0o@4@ z(VgO09udZve*8h4wdTgG;v7`yoJnFYkLDzw(!qAoP>%)HSSpxbs19g^&>rbdfq$Hf zMfW+UpvZur>S8Si(Ez`4qj7oBKjK8hYm~Nh;0h||wv+D+$p9LM*+{rlUPA|42ZYF8 zE8B%(b!3P^4JH-9;L5Q#i~=cK&jR{VPAs^pom4hyD?S#~5#-6fAHRUFLW(%(GxBIm z${Lp~p4`N>1pjp>Fm4_w{A(TlJ3Khb{{H7-wYvTNpN(8Pj>AsZP@jCPAY{1JP#va= z+5C!@g1Ag3u@L5U`P9Dgy=cZtrROic2#3Yni(d%KDE(*824jPt6=n^B$RFilo8)sI z4RU#T?3ND4FU8^IU$%Kk01p+n6xL0=KyI{w+U(&(+qju?A49`n`SMf{&Lo;z?w$z9 zaF8_ZJN0uLe_9~W#D;>1z*Ou-VGIo7N)flWz@RiBMzJ~lkNrx51Z4(A0b&*p*p*gXV`t(px8`BHw%5Zj7TRs-l7dZtK=*TZ7U?Fs+ zyOPMT%0<*D4~YrelcpHMSV2dD*MbhFtZYw^Wl**0AtQh=7nxcy?j!>VCWHAfbA;_n z~4VEA<5BIKS^zPI@q+FMyz-H)9)(aQDeTB)m|aiUz8dnq*Mp*)yYE0h=thw2+^7fT{1 zX=7I!lXQ#4#$EQLN?Wt!QHwUQ3d$x66c(2Y*~%JfQNr@HaIGwzEI~AJ;}@}(*GQI` zbYP|iE9kgJE>#+_?+x1@>{{agH31Pu2qNda1MR(^E4ViP<0$+4A5ZH0+xLGqa_RnG zrqeZ7FrSk?0VqsFSI<`&g>+V*2w(~2qv3rVdJ@+x=Uhu)tD~MPxSb86Mc_$#F0ZhE zao?-3(k`9_{PchFC?KP!zHGTIQ5?mz3k(OX3hkLA|HelA($p{^P<#wPf2m^WAwaySP^l|96?Cw*EWfb>Qd6*-??&@gJu1q#Kp3k&4vF zR7o7DY%P(qw4!n$v(?ha1er?WoFH3;4hph~`ICZS(ezk#R8Tmd@@EA(&V`2s8SXJ6 z$kqP($g)iPsYRcjmdC{ZKywc=z`|4i27dfB$bI*XsU1v4h{fdHi?na6cFSU3;>P z|Jlf;19s8rCU$xTk6)@k457o|*6#`?w|=jdR|fNl(42BU1YFdI+p0nnLI`@dHi*xP zaFxe69vQK!L)xOZIH?66ac70@7VP*~+?76J7|KGv09l%f7m#7SrrhgtTyWrpp*Kx} zL0x`J=QfmyAuQs?FvQ<7V=6yh()sUH0!V|JXxAInC2EybEvo)oQGxKkyqKQz znluhZNkm3vI;vRIeg9b2(*y9 zNx#aQL86h782N*8S}d5Wt2oNpoMR_vo+LgVXIg?D?z)BuS3gS_eS3{8LyuoQhaom} z^kuDN=*F%niNS@U~KWh62``h@Bja&umf4bA} zi3IMt%B$IrKlg@zcr?A2v~J5TlTt>yq!C_ha@4&n`q1Rwze2OGP<2I4a+W6^+ z$qCL~#qKy?f$?!?Oxqu-4a`uO4EeK$7-o=aD+tU`pvQqls>zYqC%-B+jFg`_u~fAb zsU<2ffFvj4GFbH^NwQjg3^!;P>J1d8`&`&E(j+HVPnY7!0xR5?-ljIsXL_3XzbAMH zbaT#+o@MeM-~YpD@2I!o|JLyY_&?d{Uzqcg_m9hXK1D8ad+HbRdFq$o@a~5=JQaa> zJM|Nekv?0eJ_AFie>AajDwe~<>C{2?%}&-+g*CElW+IzeYbp)cG$-XM zX4#HvsFBvw4%Mj~7R>wt%$NFo4YtcHSu$L@$~LQI#w8}pnIMY2QsKZ@>0#M62TKDp zY=ZDEN46;5 z<(O&sE{BPzyUXF^e3xVAukUhX7pVQyyT)-kLtW!=@$_<~8$(VS?-EB7gzUL>dE*qK zFK>+L^`#fG&2yutdH;W!jq}gG|MyOMCma2bFZ2ZVf9UkO{lEDLFP6|`nip4*ScCF; zUYwt$<-D}5P?Ibgj0WV22UegejVr_fug;>u?&AT$$pI={UIKcba!bYtXS|Q2?rc0* zhUblSnZTNdZ&P3j@J;Lr03tT^*$4bg>;t~0<3tb{Vj%Fd1@PBGgg*uI*S3Dy=|;$U z5H0Wy`)@r1B8x(v3u8+*L}u2IT<{DsJuzk~1N^x{4$MZ#TomV^b5cP@ZWcQs(-IS* z6Vh*$%3&)o{F2xTeAjHnrD7}mjZ+mS17N(4oW0X8Q|_Ml1rDM3f(KCQs2B%L;P@#E z{9ew0k-35TC8;gc9%AdC|IO?F58$aT9erN>uinYgfxG^no^0;_*YX6`f9`a{{D(v6 zJ8FneIRuCCJwK??qLPk^UW@$k2 z2apadhR~HIexTxQ3|gyL3#he7&byLu`svq%17jzfKG?oEE%tfmfAjh;I&v-g-@g3s zbhG}iLGuBiWwH9-Cl-%w_VZ>>x4BObU1#IMBk<7-v|_% zCaBsJUt+ za<2K>-^(gO+TU)*to<#smD=Cb(G#ity+SQn`@1%#))W8I3-~Rcn*ASaoY#f_*V{jK z&wt1Jrmx^pq2wqd z_79}I&6-8CnR=eh2m|4LGMOZ!s(g>mL5Nw2ZMsl^NqLfnf#ZIG$i|~XoJG%`Jy#OU zj_K6a@jD4_AqKF!PQVGnpaXtx391i?m!-tuVR7bpBu7Vk60HCwjogL{*@lR>ms_2t zPCdGR1Fp^~PeeTU(j#^Ae2%9M{}tf|?#BJwQvYwg1NZ*t7)*jT>;D>_y*(SVhx{0A zS#IGhmo$CL1>Dqf4kv!2^R>-d@w<6y=Ks@=cQp6mS<3$%xbmN)-q8mCYb_7CXq-I# zXie;*SL5`tIFPOxrQgL~)bIDB;y}Azv=@)$BSYvS&R(Aid+5fIID&QG4eY?WC<-jF zE`os%sG_)WG$Pm=Z zc&zoW%FodmUv$trIz9TYhbKp;&MHc{C=_W|#rm)ALMk#syw*PBrFP*!3?bj=eO>aug98u$>v;e8a5Mkc@tB9dfX@s|9QY&yi4aMUk}-*$P;L}G&!W6K z1HS|S#E3Af=)WRMK8e6lk;GLZT~`!~tV-vJD>(ZPykQ?EqZou?Sx#O|DN=HomX)PP z$kEdhL}!!541{ROES;yBC<*0S=XqJC!*rHbS0wkNtcD4Q?T8|_Jph-c(wk|=m*_oB zv1Zsf%*qao$^o#YtlE#sq~hdU0!S*gKMb=5uR+a`ow0$ak6e9B5OE)+q)>?u$rZF{ zH26gN2jHCp1cxZG1W{;QfF32^dhSl*c`824;%vHzrvR#njTo2Fzr~;82l`4AU(Of9 zSvq1BpJ#b4SW5QS2Bk&g{8c_rsx#4EEx~R^udd1}nRoL=)jb!hb2ZCa+h@samP4n{ z#(&suKNJim=J1JL=76op!?P2=MFX|1oLxkpyeL1zYg9oT<1U<|TX5-E5NPAduW(0Ro`Z~j zOGDaY04^?ckA}a8|1kUP*ZMU1|GFy)(31WCaR1Pg{~RA|`2V#$7I$oVbTH)~nI-?o zx>ZgLdaE4}_&BZ}0{MVsqpX1cwzE@ zBV8^o+WlzM9r3y1Nrfj15~JYPN74S}!9D;*QsjPL1e`vTB9~I^&SIICnZh%Q#bUqx z_HzGp`+=;(_Pw`n<9pi=?I=S#!qAH_u_H{p2$>xr^CA>>gu;tZ*%2x)!Y4bzCmG>4 zQ`7}twnbk+8|hx`?YE(SD8+BOM@QI$Lag?f&lD%igoMqN4?2@AjshXm2k{%9-GFcE@Q6 zp3Qc}4`5Aq;l8@aXS+o*O)kj;iymv8DAgk)Fj!PGZ*M(_i?Ox8t$scdKmQw!w6|}+#qp&Ia2nWN6Wl?Kh3ScaBpD3VT)v%e2I*mZ-G+Ck0V@@hVu^Dl3~WRc4W(QeYKdIhGWnG9V2hcU<}L(Ab!YD z3`%M}&t_5FYxBzi$X$%jaZiMKDT-)FDa4DLV%QU5q1xazbErB2TeziFYO;SrB2|PQ z(W-tQ_#|T`ff@9w5ui3~7ALJZ3=&r`heJxuc-RqEK)6E!beZOH0csk|;*bjc9Z1l> zu>2S_II!GR?c|b=0UNoeAA|OBg1nV&<3wC(7w64wo!uqa^H|kGA^_|JU#@=lI{E8- z>h}M|XqJve!=v}dTM7b~>3{Y1kKFzLu(y%_t>v)}u4z{NDVY|Dt>t-X9a>++)me8g z4vqK{4@>dAV<-%;NN{g2dX>;koEl3J>bH|q!8C&?XH|7xKG@q!vhKWlk$ysr`rM+4w4Y9PLN{Of{X% z^1NtWMtcCF^dQ=G0(Y<4b&79-I6?lM5eFiGD@13O?S9p^$ZiRO{)>nLCwce_?{&0g zi@DoyS)IyX=BoTKdSxGU9XP`r4U8^MLaRI6ltLi~tvS3*!-0T)#=d;TEzpVst+5P{) ziM#%v935}$f7kL@#$TZHQ7Zw2ucpuse&2HgwzRKzBiWN;TDmbgY>CtmVs`LCtrN8# zJ#LU2E_dSI0i%yuGO8S(?w%|5R<|n(+VZO5iz2ZNlrWO63j+zEuHnS^MW)9)LzRmw z1%l|c;!Gs3S)fU@##(m50pXH>u&{trk=-EXRB_(pO=h?VN76|x+_7q8i3SNwq)y&R zwIM-g+pnY_uOK}%D|lGn7+R2tQ^V_B9N%u}a7#JOc0V;rIcs9|z3+&W;s(~#!WM2e z)Ixb12gzoz1}h0bxuq?Mr^sqc-TeEN$tQe;{pKA^2uQ;LF+>nS+bx8NF*%iO@xRs^ zj2yj#j}W&lA!H!2BrJ-gWS1wI3hLWygYVr)}R%WY|9cIi$mDWlzTAqLEjRQ=*EhDIr0dcXX8 zd+PW8Fu5hsIfnHIqH87Px zbGsinK{nXY+E%o-jP$B}sO6_MaV|?sTq?36VxWkB+|y~z{KspbHQ9fOv)-X+|8=sF z|E%RP&woD?OC-;I{1c3bhWKxmOu>!|UR%3u{MR}@GAx^M281krPDXh#mKmvEL3m(| z?8e7G{w?#9zbTgvz`1n*sSVAEItc$JXxjlLll#tD@p@-Mjfd5Oza4}^YU@L4I`94r zY?-K{IP^NfKc-@(gHJ!oFGR1%5P(#o76GdKkYK*>`YNfG9!E7I7qsetTO=*Xd`ToK z**0l$j`>Lcc^$`xa)+(e0AO=5tcsXcT6M=}+1Wj3hPM=%fM%= zEWRial8Z|OD=y+Vxx3il4C0x9{kz)jR+@cEiZXeC!2Wnhg}xlfc;Dyqb5XQ5=Nkyp z2Uv#zjmiNh9fC%K*n(OJ1t@;E2V3^ofiqJdN2YNk7unf5MR+Wpq?fbk1V4)I+%@=r z)IKMEA9I3tGp{}{wyzdLCkY!K%T)Z+PM9s`enN@vg~<$@HcCV~I50$=WY02{XbAi| zzO}G|#-S6km_$?L+9%@gF1$NAsq+wJK|kYmfDHWPq^9wYr~RLQgemem$4OF%IW!Y5 z9b=`y0Q@5U-_}&~R}8dUOf4(ku?@14Q-=pibV=5m!vrzqB7(F#6Eo%vh7(Mt0gs)_ z0ay`E`DmgI-hNiT<#?!YBspwh1cwefuV|3LSP{weKW zPV#_Qw8vIxQHKszBS7>l{KNCLKEkgmP!zyn#u(9b(y%Zh>pW|JQGUS@{W6`&Il+-d zLqH(p&}s*0c+a!sZ@&dIgvOD=86}4KkX+&RDO$0M5k!FKlF_1M+Fr+^NG1~_dX0rK zbc4Z96_c{kavmc!%vPsFBg3}Y1)-`Iaa7P+SSDgzx28!2VPbuO7qJ3WM29J%;kbRh zGh*R50ik@iU$ACgXF*PoY?)j~#A#z>ph%YagD%q^5VD~nB)oCuv6~6n;p;qF)bRf= zV{u4|XRnJmE5+oWllILG4+P8fzm88G{qKYQ+K{ zu`TBDNlheRTH|OQpPwURl2jK-l6f$$=<(@8nvHq;R(_NQ9V8&muJE(?B0^}>gS|`U zs~B2@t7ZB;3|d- zW+51xeZYMR8$QsS<(yHHiGoLhc4&x)basJ+bd&)AUhaRh>mi3E%NNr#90U2HkXRN4 z`U~rBVf%zfa==xW^3vt}L=@eItbkqa8!X|3Siz|}Hx$uP)R10`$p7>BYuFJQ{eRq% z1Zc_ne+tf?9Qn`D!N&e)EsyX2!;k-qn8SCL z1_1^Q>?8;}Ls;VyiAOX_K1#{GhTg{qj~)+f`4QsSK*|TvKzegQ|F&Qxt)xdks{$Yz z`h{VSV>%jdZd^0ke-zS^6chkyVk2M75Ir5XZ9{CvTRLYXNM&>?1b7!&4FNnrY-g$9 z4Zv?MWh9c}ZYq6GO+}T>f+#yK4do6Qo4t?n84MNP(-S(qTb#=`-+U9jBori)5b%7~ zF#zF-H{4dzopz(a^Q;mbHjqG9ZIqR0;Q)&g6OiTY3Wf?uVgZ7Z6ru=)8?r09sJd4Y zz&+%R@)pHWQPN(Q+G`}aiNXDW-75OaKKJT@mu9CnP0!|sa+sz?N@7ewS1h__y2#-) zomLCknSf8l{0$HfJ;I1mj6=m>{lGL0O!rBTxXE9}^F$CxE2AS+r9lojRiVP;ipL8w;GeYJm7aQKFgW*r>8XnXH&!6q0j1<;$WFI=#L8?Rr zBA!^39DhBZm?JxuS~GCOkVRn`>R5J>&SvuVSFRH52yADO&C(Ca zPGjNiE?vLK-Oo7J#4{0(*Fiku$U}-~%!?vL+3)}>J}~KFls|D^6uU@uMQ0$27hQ5| zptO3F%-DB|{2H}(v$j~N({9q$ff?*=W(+kB1_st46oYHb1+Fpj+D05@u0-3xhJ~vW zUADk*z*DB$VyIAwdH4fri8LeC5k+r$or8DLA&AfVef-to|G)F2^^9nN&-?Oo;9G0= zux)-*FoYCIp91Gyz3houH8-U4^DWtm{b-tRX#wRlH$?bz3OGlRpdbPk(tW+)$WTE` z4>%1KFc6!yv#jLrkLR6De$kH-Ed0>d&72B zEI{8O0RTd-H_dguXEH`+3|tGGQelkK;OK54vtQqOl3Cy7hMaQ(uye9lIt8Zy!+wMU*w%r5z}<>CCFGtVj#lGwROrPq*#QPJ z%s&2CQsnF!Ub_rm%<2>TlqGepMz1GJ`@k^{WLAST7^o1D(36IG{q~)ONMJ!DXwRH#dcf z0;javwg^BK=_cuT4Px6Ssb*?2>!Eg9ie4lGT4plD25Q@3h+`CJN|}KbZ2oYY_u3?3 zGKVA#b4lVJa45o1uhUyB&Nz@f9z%!X4fe!H--%QZd<~Jx}#eR7+Iik3_{~G%&_GJG8x8y;@=^zHYU|z`oOJy`bHordlwK zWVOCD5B4RWrS@NUpaQUj|35xH_VGVO^bP;NhNtf0zUAI!qYx&>s{*nBdGxq#T<|+W zFC+F6i;p|J2(NQu^p%)x8299Q`HRvqTrf=11n)(|7vCs~)oy8A(+s$)E)w!C1B+d) zm4;({p~_0DD|It|g_hHQ1$i~RwD3axuWj>|9CkcICC*j>NK5e(FVf)RAFnK6s0`G7 zsc7MnYUW^#pFt+4EN!tBpEN6bcag+?$fv!&yo_g7)k#D+Qf8&^a&_*` zbuMSox2#-4dCA*tos)IJt?CxAxlwuGDrDt>AzmKR0aGXsy6e_Cl=pzsG1QMxNL=1) zyV;tyJOB@28S|*@)fKjpqj!t#G#Y*nWv0ZEYD{{v+$1;(&01G^*f?!J3>RNY{8(o7Qn6KBr9W&SB)h@^aDMEK-ZnJ;> zYqBP@9;@%}%Cg59gnT{bQX0M?vj_^B`8c`Xjjc~jGu9dW=&9jY{kZwu;Ks~pfg0-{ z1gxnw|M{FoJ9mEN*0_HBJ}iK0&VN4@cjWsSo@Mqwr@a#w|MT#0bN*Y)1F*t09ni+Q zNIaiKv#JJ~Bl+lHf9OxsN_!*n(IQP>SoK)Q)_1|kL|4&2M>D~h0dLdq@IJ3g!-+qgZtsuHN958##`%U`pp%r|E z*?Pe_xxs2HuxyDElSh9bbeO4*6Wzb>`Xxg|8mtivM~7A$y;~%T63}~6X&%?5 z*zGwoD}(#zHs*dKgS<{kPBoP5L>hR+?SFQsbBR7UKc zj^4w}>$4U+vW)x_dCY!{FSWUg1U)0gviyhnA{!^FjPK|tl_P(svWyeznK;dJ8jWK~ zh@Tu=13g%w{b-xs5uTGVD;a{*s|;HU%aYhqn5(2>iIFk8l5w8EkH{F85q)P01S3N) zX!?b3tqI~yTG_FMf|)4?*;&Kz;(Ux8hH0i-6bzHCH1JguF(LVzts@(&6Ra+I(46Q9oj7g&3SX7|0 zY82VHxjKHSo+_J`;MeQ2F*!?h$MB5eskF(;I9hES%vxmiRjgMFlY=5yp;Z*aWW8>x z8)nt4p$e|*Wh3ED%9oPhMlXMc1}0ynF?}m5DlwlHRO<1rwY9R6y4YJ?v4`s1ps+-F zD&-!x={<*l8;^js&(i%LFOhF|09<8ZG`Hu+3 z10cQP36MkmBOrZ%Ga$b;9Rewea|+~i!DFD2phpEX9t2f%`y^Of0vrX+G)v3i!=OqN zJ`Jk!?Bk$H<(>y+eCI%@g5g9+Kdx~kRHex?Ay>^l6slNsDl|u{Iu=qOoC|prv{i36 zoD8j;<=usZWP=0K2Ek=lELXbpA6F|HMk4RzgD= zl#&|(B9ms-PS$p(ZJ}uBAsR z*k!RL;=!@XE5vfhp?yH@M}vc-icrk3gbT4g6bREXL$CwfC*tBNtYykzIJL^9K?HEo zMS}qDH&@_6G@xMt2Jr~XEe3_e>!#gMgz4={E^H!}H09&;j-ej!coLJq*z;tNnN=S_ z3Up@5oqeFi3B*Vh1kjQLCI`PH9|7A*RR7?AANE8|dj{P4Koe`n-G#`Vs7xVC1|=$_ zl4UVXN&?C%=b~H14+*$bCDWuZdLA((hgYB!TJFMhU?s1^gWX|TsRS_2%X5H^`jpH> zXS=P&xaP9OOlWg%t^6ENtBv-e7YKB<-Y70t+5M=J$?sG%xY~|K71V74M@CTlO$jqe zOu$h-PNE{trio8Fq%>*A-yZQ0>Qj%Q0rF*@_&rzwD~3mt33(*x@-UBxwjW-bWc~Qs z&rIs3%8V?{RLTTZQjKg#Mb)pD#-e9L@nGG&SRP3_h9t^?wO8%VIe0B!m$Oo+Qaq}s zQmS$yby8UvE0+fLqT6aMs_nRX2imVw$YrNDX4zSt*TKy7I+(#xD?7N2BIG~lQ+NM^ zJMVh%-+R3icm5wA9d7jh*78{QKiKJp_)qfV$gfyD8Hc2kEA`uz;Vl6X*Y<25viu^8 zO3bI+vP+IyA{^!1B$HFjkIf+})&yc3p4dfMvwRfKOa~ZH7yjJ8y3rCXc_#=<=KVT_`#XD+MI$ zOAq_a!%y@5&z-u8ZD^kr^h-5q0kl3vk2`bx0Soh| zX0(b=;h_o{HH=fta>*bSah8oxD5Yt5axp3fXcL6Wu}3B3I&@2^8;@MX^Q^Fzn@}l% zVTkSBu?lXxB{z*eF5MY=7`lv_@Q9-U;83u_SYtv01C0n+Rri-xkAUuPKoLP{{(h$} zyI0fv<@heV;&)O|tq~U0ELoL)&?>kd@vrGhe>KV>vP<@B7S3A@{-rnjz*EEjam~IU z|KHp2|Lb^`^uJ)aZZ8B}#{Zw39Ju(82dBLa|G$pM1p7q@TAfc%Y%b~I?N`m%@a>k-I7Ef5m|3R}kTqU+` zh@V7VZ&3T#Hok7J*c8gepxq!yDD!)K&WKSzu>e7=MJ6+jEz(%;5S_%r(QxqCMp9kJ zJV|jKUIS|DI%a_xV$8ybIaCVn*Xa^fWdd^J${H-D!A-_qbxjVow#q?%we&whm1Kb1 z0Jo|qxd6>Zu-lM*2Uc?AaIvf{Su)`1?kxF~%os3K&kD=%4w+x$iXe7a-q>j`hsSOy zX*guWO&1$Y*TiU-UJM>UX3)Rrc55|jwm|mnw%ge83XKievC0J=z{R?)s~b-r&CFAn z+e(&sqGS3II^31s7ib3cyS6r{UaJ^*T7t#@lcJ(gl3b=`m7v|rga?Jb2_|M<`{X3G zTu-f-lJSgy1scI`7bGyT8P)epRCH5_On2w?{zNR%`_E?{@#MmH(oe^jhpc_5tk8x&J*r+}QuE<+0_zrfxhhHexNAr7?T; z1wDD?&sifDPZl$nJ>=g{OdAnoDf|aqLeAq$G~oSJ{Cx~yZsO@nQ$6|EP7e(z7afc; zC?tE!jEf5WU%G974^PeffBNx`bU)x(GXGBw4&D2oqvOMk|F5+?=KO#9(Kz0cn7$e8S?9b^A>0Mums z>93}nft{U~d6hia*`ekkE0KYM(FZTf;7LBKQn7|zp$Qs$q?fW@l>es88&a!|$-_0d zU54f*#&Ym7emRg9Snwm-69-`t){UMR?vXpvXTkh@QK9_~dL)pf!Hy_06{k7k!Dl() z|BI4FX^EcQ%Ut{?8V=lOQ)Z0okLG#YPebg~RD3~Jx=5h1k|9Mabjf@I0ICUdBlki= z&X0~Df*7a$J`c13PD{~sX(N#Tolx&P!RSe9)jmNfa%n@vtlPPa}f(> z6UM*XEtFwITb2|uh2X-Pk^GIqRRqY;Gb(ec?&t+tqsrhvjj?SGb1$_-5q_ZDi1RiD=Wt|JEilponWc9H?c+ORM|aS_WF){BMW-9ay=Wdtvz=)hSC-&YY+rj}U5-E;p+53@KGw(^OKo-VGf)gt|$P!4{3G zOv9MbEAoBay7x1Q4y9$6TY+UZh_0EUXY43Og5#4x6|st?VW1+?1A}zEAO?+ez3}a~ z>-N6DCU&)}^zXx8!r$9dcm7|DX6abWBs2uOr6^#@`Tz9v(8d2cIo;o!|JU+Z=YO-) zO;Lao-VvMYv`7+qO+Rfh77U98NGdQL(eK%4F2AdSzR&0LMTOu>DogWwFi^VutP>gV z(yv7Zzy9io44h)6?7#_b*8-f87{vZ=W(4mYuqqZ2cU^IXa!#y)PYaA6kH;Oj0qnpr ztb^XKMfx28uDS0B2+V$vcO_}bmu;#N5#%cmlu}wC4&kN3QN!%!vDo9qpA_t4hQ_j^b!98gjZpfdK<)}CZoau7 zk}eHn5a&QPW5DatQZ*Py6Cgtj)X`a~1p4L2M*WS0lQu%t4S5lXo_jC*>0sxp2*8MIJ`tlt0dsGP_raDg6O3;<6G+ipvjl zYhx!tARWrAgxlqF9#GWqC12q^(8C)iUf1ogN!>ok42XG_rA} z($e(uInU9z>3p7NRM=QbdF{qBhNhS`V=P(*OQ&a*sDRE< zPgKTtGGwxp6EC3&55i0wV?`I^oCdWZ+R1eD7#%?Kb6ad|;CE4wx=lIC^+2wj)+B>P z5Esf6PFSSdUgZ}toTv9`P2B$P*+TSnoN<1FobAah&tccyzH#nOZ9)2(fy_dj>{Ywr zc%^!FqTzy^uf{|11v{25VmpkO$}_W!w$&i9spEMiwI5)1p{HBX$;1bUC1Efywh5IZ zBeiB~gA?o8e?ku%Jb)@lRvGJCE~zf;)TkOn{R@Si=XiyXP~*$KkfACi6Xzw@6vAZ! z)e5$l>I{}dceIc*W?+a?9)A~!TACvmr*Tz56yi>d7BI#q&OJFmEhZ@XloTZv*6Ut& zq67E?u@AeKa;h`RZifXh-Vz}fTwW(gdZUK8=JTQ_c=K=sMXa1v8R$Xt0z~cErC=%q zhr+wF;8BpN;!R}%(5VS>T>;8y%$4A)T8fcez8rUWn_hk6!JC=9^&lWRk+}=^AHm@v z-~(WiUe2OZOaYe)ri8~VQQi=^t>)4JkVe&?HJ3T>HlC>ph+Etd{QeL=13M|I;8k#&-5iT$Rw#T`Lt z4A)a?MW?B>gEQnJ=linmmN;OB1-I_VaY5uqV^XQyLJw|3ymyqR030~zbq%5ofx+bU z&pe6kh=ZmkP-<^_6pEwojVOe428FSr@Yi2ma^X! zcXWE{`TsmU*vNm@^7s(GEe*6w+8czcYiM!+=D7KZegJp!TCn&e1$xxB0)Hx`*2p8) z8lh=k=oF8=aGmI}R5if@m2@bjr1SVvQa2|X<6%-=Bnf?uhh&aWiZyBB%c3L{r(Hio zRfQ|rtgm8=iPSxltNTJ)Gq+Y`QShNH69RdeV2UJ5C}cveCHY-x#~_mM>gq8(QcB=e z@bnQ=UhfecnV^y1TN*Q1K%>eG3aDU>*I9t1tx0zVS(#YLL!JoMu~dpPD3*czXXPTV^20f3&JHqh_&F;duUCYGLw%!HWHr+=}NNnI02W+|IA zCsP-NKNTSi;O4~Yj^6gJ;pwaB>c71#qbmdtm0|l)%W|Py<$p?MRUBc&wsVK7KEj>- z7AZ1=v`gg%AM(kTRB{&0D~j4zzeS6S-#zvF2mHO;^B8M;7;i;fNx7Su^Ca*~-jw@4 z^z8Brl)6#ZER0TR=1Wr{h9=cD%TWl^&U;g7h9=cDOJkHV@4cxsrSEmkvKX`6dTT0Y zxgL(_iUu7VJjt2!$b>P68RS8j%i%4?ulNrqq7D>;T>R$*ijn<;7gMBtPvGZPYRvKn zClFIxF@K=cuwD`CkdO9p5@u@N(~M?|Fs((6Rk=; zQOx9%lFzlMU*7oFs)L(u)$tRk1`D^2B0U((=#d{x<%myx{mmK&W%M}sMJ9YK)rr3+ z+Db$00oO$H`23vh?dEtBMQkSV;hDo~cem83&V^ZoW<$&uTDFF`UbTEaux(^9VP!eF zBA~M;_Y!f|4N$eF0gyG&OB`+$dM%dAw$@u158C{leoWb6 zw7dFV^_p!X3o%<~pBn=g8mGl*C30HLjM&=`gmGgUiJ?7Z*(7 zByADNx?7MuTsKz~j8S2b1oF1CHRuKzm!kF|qhpt-5;KsOW99@`muu3VgR-oPE-$+} zsQAJ91b#I7Vhq5^ET>DL^BjP}Gvv%!E(0WsR)i#=j(hJ7E>8s3Uq5~Y3rzmr7%77!0l?qtNhje zyyf}Txhq|As$GMUxh2@Wwx)Tw_Lc)3U3@^_+%Elnjr79zk>l0r)_UjAy^i9!F626z z=Wdl){Tw%VF}`QXqyoh}vG3yuhu`=O2f3Yi1?BKtXs#>y^F2#}-7}PK824orjL+{YT{}K^5 z_kZhn_V#SeYu)dTe+~-{-}KTUc3#U(hxom5!STC!8vOs<4fx|f-3b4&2mXII_>Z6e zY3lzw0e}Ce<^ErM{SOiQWV8OS<8jSyBnYIVQ*Sn4oF;NN5(+1dNR( z=?EaUR6oGvSA(^+%Eb^t7s6x&Fs+H2jzx=wYE9HN!CDh}Dn>6F_S(kpgJ^irHh>CS zW-^p&x9zZD4^o6{5XQl-Y0?#tu!(jHrw@p?0+4zOSX_4_hXR<>q^BPMoCe=# zD+5krwI$-TsoJRXA|ZH{M}MT!FQb7`n*jx0HR=D|+4g^f{r~Xvbc6rAmZzcr?`r#B zw*McTh|YHQ|AUjw{=b&TQH8Ac{UX&tc@^QCzk7kOfQ}@y50Xpx2{a*DKG9KrmVLbRN1mW)|D2J+?fT0cn-?FmgQJ2YYB5~Hc7`U!P?(Z zVpQiZLlsCE$gnly3vP*S9NY=RX$#^B@FkcQ_x!qY-*eLUZK>7~EaH7zMep01X}WC! z9MCH5)5QL0B9^g0TT10C*mPR?i*7*c+6W$0E`Uh*60B;NRZ=p0vwABqO1hRSFiYW) z5hBuLjDw^#u*I4r9jRyX#zc&id@>QM@@YzH>+xzo6=B~X-)oiB)z~$tpvSGLWy#5v z7&ou##Kg7v11pPwh$ z*i!K1Ag#5jf&>MKDC(fnTSdXh@k&9Ad9361kFS--2CU-t4>|yAxc&4m5{CZg1Z;EB za^4(v*f*VMGD}IL&FotQ8&G2e>`!K_$-;UEkU)da(JV5xeyweQKTDNX{wbAa`KQ!^ zy}S8ev;HI6!ChGZE?NJN_7B|qzvF}bjsKsuJm&h3oo<}``(v8ydJAi_+tajzLX9h}OEJVe^S|A=4f?**N)-n@4(hEWisk?`d zVgwTlHi}H=2o(X8Wepgqw(>@!H_I0WW)wi!#1scXAtGLe8CSvtq(Tg)jK9G`%Ww@6 z=O-b?JUTKD&HAHaND zoUZb0ngGn^$3&D$j#ny^&`#j7>NCPlTla*dHf~QDuZ#Nizw?!m58J zib<^dO=*q+#!{@c7$lun`2tceluoLc$ODLr8A0PHvqt=a?XT@49WrgmABC$14V&d` zPFiqs>^mSKzsL~O2_4^*Ap@AbQCLtvGZ*u4 z7N3(|D5`?WScpg^nv~MIRF)c7ogsj<$Ddg`EX45u)#OF=Bp!DpXcWa-d4?O9x`V6lSh5pt*xt1TAM)xwlsbJu}c4? zIg-t@&Zma|xBYM2QU!1c|G$5D;`)C&IXd0of2`%Pbo_>SKKnk;ezElXK*VjR_Zc4! z=!5z+>;bIw@Zg~c{0(&gx4i8Cl$N*19sqC}vwxXacJ5Z@e;95w6P>4K{>$^l?UjH3 zY5Dv=IX-pI|A(jhoB6+v$C^Qc<#;k4TNCKAW_Di5$+@=^{ZgI*?>S5>@V@qd1clMg zo;i6hqx(mvT4hfE5>T7K${mi9;-d=Z z43zQR5Kxf;TUXgU!g-Td3=-%O9ytuD-{vMCl&KZcU;`j~+!+xZsE1^b_8u5;ux_%&=mF&kmA+ z2_T>w(tu2WGysY4$H!Z`!+3P#V4x1f^EyarlnZsyj3}QRH`$@Prq*wIo#>zw9lndi zoQd9|!>A0FdTj}||L2Y!EwNa*B!=age5~E%B1ZpRKs9N`o8H|_POPXeRVa&3NBF1(cUP*b*8?~6cYkk8NYQt^B`79p6TmjrH z^dAW86p6+@Bnd4i0Q4>MmO#A|fEy=qWsw8gumrC6=eZc7$!uaRKvWd~7N1l}Cf)X; zkdVA%561Zbh?FyU?}FrQ!)H{`#ko6 zwrH$e4V)KfCT{<1;RI>x8S;tI#7wtZp_ws3h%$;!t4K?lw86%}i<|CFt}9*3V?B-f zbqrVH3>cq`tt1*&If=*JEwu$MnZ|NW;uXqBcWG07**2Cf9SavPwU05-G|Hq^oZ#{` zq4`gQ!lu01ax;|cj7l~^LYYQdY8*l_nxIpyu}{uH&Z_V&GP|kuE<_kIDpr<bZ=+O=b5oSi8ezatY9!7$1#vH{`mJKL#IaXUUrkqUl=zlw*PvTTK zF-I0ggwg*mk2jK?Ydkmr=WXJ~3@ zsb{~`e}!w0`mZnZ%J}Fj(AyOVo0K?}Y_KkY>e{xfR=sOJsRr;(VVyQ;h(OKI5CL0a zsj019SJSq@>uRlq?+%&T$*F-? zOFo9HI6FBuzm?id06IgL0fSw;Wdo)+Zm)D|`ouEGar>*uHitS)#xMf)tJ{T6WC+x+ zp-#QF;j>EW7(c6UHh^~f{@MjmI;Rv0bc^8eaqS`~ol^t_x<$|!vUA zZdL~#*SD^S4}S1mZ>u!`2y#wI5QOX43TD6DtiGsKbN7|~o;iS;n&LFSel>wHxLGw( zyXWq!`6uR7YO0AV&-JUBl{7QFl$Em$DZ++%eZb@A1;JZ06d zV7}8VW%)B?ceJy(s#3V)hpQrK;6cqj;^h-H9R%N2E=Ff;7vcMHKg(xhTMyt#S>~fu z`a_p@d%K|Z@awNpYY2Zwz#FzOKpYz&U|>c3_FIT3{>q4;Z`#HHQ;C4BR6^wp0pJ{7 zVlQXE)27F7#tZ$}-5qYm8+^>@27t#ocyqxQhk6QLbF8C#V> zFJ?%cPAEzEa%g z3iYL`buFFJKbS+Ld@1OD=?5( zlLpmj3qK*M-{XuisGdZKP=`02dJInWz%e6;F+SCU$NVCCWU2-bx`4&E)&vj8HFZoQF>Cuz=D?J0xoo{*u~Z}Nw1bNL%Pl9h34sML7ttswSwC&&*d3~YkC4)jzA z4Vz}cqzMB%1U24PbEdjUQl3YeBdI~(R`W|UE449YU-WI(r#3##Q^oew#B&vas(tT7% z!tkNbvTx)fyTMIlWv2y=4h({Mnm!B4#Ss4a)$~rlVS21Lcrs7{2DCk;$Dw}Hl=fJ> zt_uY>H)9&yHY;Q9-8L&dhgQys(>AYw>?9)C^@hKQmc42_{v!VNo84|!xEc{gnB8Mm zbX`#+u=2v1Yh}8s@H|Zfyi-t9p@Rz5;x#*fIemS4lqsl=?m)#B4UGH< z32)xX0**P{5$U!PZhC)*t<=i*7^7J^$~pt(uvbaB~^US%Hqdm@2{K-L^UI zyneMjr@OvvISa2y z-5T&8PY#cdUHiY&;|>1vS{~2-4@}sAc>@i4DOhKRR57UQlK`I$`7p$?0sO}J@Dg7! zKLzO}*sZF_XWj4PDw*cRRX`r^MiCn!i$0aHAJ_;!dIT6(j~_pFK%?a`vV~wUU=?hC zN4VM6qkcOC#?{(Yvh*L)Pl<&QM56Ba1G~FbCKOcLG_f5KmKG+JwjhEm%L}p}4zDcBk+yhZZe2*xM`gK1eal64h*HqkR{n@LsXESb%q zI?|xQHiURi5J`w`Gz4=Wu%oEZ%BPG*0Pcu@I8LXV)JQUy7&=LM`q5kT&}>jcpB$4k z-H0(qi1EM`8>TWOw$-HNa5W?8=@NVsIQ1;jd0eEkE3#FKXU0d-~n5+#&B^X zlEsC{1~Ar2MkvbXX;l$S5?P4Gh+GCbJf$>p%VN=w@hrNCuaF?N=oy`Xp_gb@p0H!X z%vBrnbhjvegLxjkay+sInJ9;tOB>|@8+4(CCK^MD1!}2XP?AlnGq{Nd9MkWvz}gO! z*VKCSM=@e&fC8G9&$CxmKKdYnI8qrJb%3|^agmBTqr@p+GIp4NR$cimuXtH_yB#Y2 zr$V|Z7kw=fW(2IKN#gYyhJJI$@r5Au-fFc#^WB7bK^7@b%;kJJj;nm3qX4NuXCyBY$ z(IUnE3y3F`woNjs<#8S z-P#Kt*Vl%f9RQD(5ZYOu?IMabeE>^$0#4l_XyC;O*HEOwAtah$ht;)q zA{o+$z8FBuO2sbN0#qXpVv}WijcjWvNfpZAa24c@1&k{oM1@_)yd>9%E`jKFc9kg#iIrhQ2wK9TeVum+KD}tIL zuwF>WV8y7=5PM+8QVoH9VG4f5#`0CH4K;?Le1^p#2FI9FbQBUYAer-IX3a24K7<|` zbSW9aOd*;VW+ijM0Xk00Diw?gjHSxJksc1Ma%JbldbSQXBjYO@8LF$UF*NGC*UEkP4Pyo%zZyG0b~p9)>xN$jSI6k{iE7V&fN-@egCUBU`H8Ne?5B zq9|8#?8+g9$!#k`9T0)vq)SUQc(I&Y#Cj;37 zR4q9?l|0zlp)pKc03U&#XVrj*FR+4W z%&-71NQ|(^@&RNx!Kn;SIjI=J#pvuR8k4Xc#wpoQrQ#Ha-e(HI4Z-1f#FsdtX(w=R z(~)kcAPsusg(D)9N5W|`h8yvh_@(LG<#{F;U;w9O`DqucXZg?(Q(PfHQaL5x1<6C2 z5B)AXx%8JtBWC^bX|=2b81~8A zHU-zMezUdeR|7mX{|6j{P!Y=M} z>Cc+(1W@2yB)hakfTo1<&O8CMDCwkwqL;>Xsa7J~J(oF?qh;2$XeHfg7vsJF7cwS9 zl@j@NIiVeeEP8iN`;*$sYczl<-+>vT>7Rr@0!sM^DyDlFp2BtxmfY2I?4$& ziVV+-{Fh|JuD4N)E|1ZU)+$RbsfZU>FSsmZE)1<^3pYIyVMdOAJiMTI5nl;vTlf#& z5KipJ*iUBLC;q0P*zuFCReP~50{sZLGka=4*fOE3eEW-)53cP zbhyVIr6yU9Vpx>M@+FcC

6og5rqT{SL61U}1kvwvun@@7tNxnMS z(I>g{WOtzKv$NStM+oYOa61XE6C06GLJ1<&&Wp(7`2dxX#F3T zOtc54Op&KxqXeiD)(bn7rYpSCXY8;!%cWZiMMQAG%P>_`wi1!YtxLvHMU!hj}sP!M%eetkd6k_Op zX*T#D_VLeuyer(&=a>25x$j#3zmCLz8~QqGnY6=}ykhaRvzR9)A zVZ*<1X**(%%H-pMb;3s4hd60~p)mmTJ8(Yv#IemrTijxpRv?^0WwNWgvY`q!0cyoj zBl${}sQqWC7s4QoE+a7{q(Q&Z)FVYdBQ8b56n%(C-PJPZgE%qau$;HSYKYE^bK~+H zy*ZPD6(1lRUq&{8sxUaUdnI8G)pSaJETItDn92c+wl zvHb82kC=C@Z?H5m-oYp$UsTeUHK&S8*H^a8AmE#O*UJ_LSW-yrd{T@o&(d>C5Gbny zR@#*u)F5q?2H+QBR3+ge^^j)+l9I>OGP`WgO*^!;Tq&LzLdZyZS?2{@$qnhZVJI>u zbO|09ptofjSkFlraUKn2N`j>xgYq1B2i`?RMI3=(9Dh%)B!5l{ID|#oU3e_**Aw#i zobj|tO9UzJu(}cIZOV_ac83g3pxMR;(Ui0oa`p+0`;%EQt5S0lXn0P_dg_hS&W8|} z>NbR`&EZNO2_C|nX|5O>^?Z)1@rHZD%E{E>>M_~6f`j;c^+~#8Oj5a^t653;sj0aW zo|?2)UJZ8Tez3~y0~(jiJkHkGUamjQ2u}mro%jN0iV?gEz;ao)1E?7t{gff}3$Obl zrJ#nEZb?Eitwkk`P0=uvz&?^_mgnasjXbOr+J4ckMYua~A!()Eq)RK)liq-CNM0pL zbXHa8<%7MwsmNt9?25f|PuFq;j25M z-TbzJR#r*Rpv0vVG*`tAln?pNRxr4fJ|5117cI`ksyZ*FY!>#3c}Jg6;*$IrfqBZC zfk{URd|GJ)v}u(V+hqDdY$~M{S6X0|6&vnns;wmI!%fZ=^~Va6R6I=BX9?qIpy&X& zHQ)r#@l6Gb0C`?ecTaky|Im$EA7CxVKX6`b*Q!~oZbe)Im66i|G3>IE{EYfxX7YwD`tP{rZo5&RXz)PYzGq^?(21bhG}iZAB^x<2-`{@WCCDF57gVCdOK2Y#|=Wnp!x5eNynTU(}sI_?;d`+WiOvV zh(>j!BK%1@4v|31fQ^%gky2Dex!L%p(g>GDYg#~N9g$?=BB60Mg4VX>V1OVeT5=AD zC3@3L{5QZ@xNE85vK?0zzbpdd9jX_C+mQyI+!WqX^No$V8UfbC54X?DB1)|jL%&&ZfH-$?5zysX|EGFt58oE zg#UQ~Ho}uh0=TAPjqi3Hm9|NqM}2v;l|;Rc0q5l96ID1g53pJbI(YsWPpojYUrc*4 zaqR=GT9f!-rNoNrn-%gh)T6iH$|5D*#*ygz9ErU|qo`dn+K*>Zl8tkg8A^LQxr9!DYm@UvnH_}? znkUv_Ur#4xo_wT{dIjkQZ%123rUfZd$m|qon&OK3U$)&mln|P-p`UgheKAu+DD7dc zMWp`ZR*teVD@Y-+ejU2hC~-ICNIz|AObcA+lZMHYYlXlyx2@g3m(}3n2Vm%n;aRs^iFCZf!z&u;aYVV zS}Dy+=p#>kZDg`Cf&q%v`4P5617Vf-L~Aef09fPEC^_eoXF;g;H`pjw+2z1cy7h0c zG64WBz8tgx7Z+a--Ka0~>I}7gjF=pTQPtY#?xDDYI&Htg9@?~h*0=9jktFMBVPrYI zYK|K!3V+7o+R{H`&Xl>~H-4tmO%-*U00)vH%&%7gSJDL6++uP-NNCuwSeXk0E2@9kgF48i{%T`z%Sz zS<1QhSyEtsw8{a!84^ZMUVH|Xqpw}HC?tod5oCbQ_iCEnpGB9*lY-C%sG&}jTvp;d zLV}Pte2#h7`UcygZAQe!oIPu7!^qp#+DGC51%pzGPO1f5eV+$rKp zVHe^1ceqQ)5;HOELgNI46v*m|1eu`QjA2=zK}7{U1ATBeJq{(8a7Y6Y4Bv*2^8x6+ z$E>il>czBH3mWIq72Kp=29Cp%=qhy1)k+9j;vgkTaGO6tp~e&)U}1gZxv13D;O@|+ zsjHVCl^MT`YumcQ0m+i8y$5j8n8ES`>3}SznbLX#!tPwrwtWyJ9tErJf4}!jiI0Q3Dl41fm)l2u@%< zj&-y*QA^YgN)klFel35E>DSQuHdNoDcPe+*r$xWK6Z8p`8`*Ei*ckB`n-JfJVhet! zd%_z2N;Zcxz!uHn@Mp;!vX?=@+;@Rh$dJOdOj(>Eq8mE%P>JO*KU*q?g!n=^6aI{l zD=*H#F94q|Vc7=I7DDo7zBbW!8L_>iN5fe@`e4nQOpasOn*-pYh%M^C#2Y1~%m&|4 zat5%2wC3eynSxpau`0~((6tURG2mIrV!w^ccP7@lERidsDPL>J<80fI>M9s;%tJyR z%0eu}Ls?E=Et_^?)hRNyk74L$!NSJbEIuzu+GyO6kPf%fmGqALxTJ|T>~6u)!x~j{ zb3>TJS{%#WfFgsMhu~5xtWG2EmnxSv=V&?R*7Kv;GeYH*P5gT%!MTbbRXi z|2{p~-{AkOgl67c*d@}NFpmiAISOlK$^ zsn;jsUCdJ|(ou=Q)*nF^SKi{Vlx#9Ebaum*RIAga_-R>WM>Z=rV+o4FvEvt&8X7fr ztn4VO_kfx3URT0Y_OS*4(_3m+4$?T0GSXsAX-tz5PBYw&xzf6FT@pD%ow$WZT_tcEu;v6bh0 z)ME~2VyU{weC$&-XVMpE^-FOq4`yTOsK{*WGcji4o`|Ix(XH$?%{jnp3IA-%DY1eL zC*{W0Vs$p0O+{&oGRmsasPoD1!>IF#W7PSi=juatozd)=b{hIyoSqD}Xv*OXiCvq= z%p6(EEIP>~(>7YPER%tvWmcJ(6CKFu_aug6#Cu67xlCAr%6jumXr=PHwv>+_SF!9f$ zXeR!-e_xhQNf0HWe3QO=$bl;1FO={Xk?@1~$4nS8DQ~j-_uoMVqFQf$dDoVC{I;e4 zatlxbi16FPt>1pvD9QEn-!$mDod95&{pYc#|8sh@f3%tZ>v$S$IOS=auSFb-L$PI+ z={%i@#={%KH`*{$utL&=uZ<>?<2*5Zx)$JTwFsI>YGXxl-unekF(F${i&3+vT6Xkl zRx=>G$ks>vr=YU}zJF{}kf|N;Q)mi3{2mr;!Bk{4#dDugDy=qb_ahYuazhVzi#W((*^?`+rk0 zK;Hi7k$df8!TdmLa`a~X0(l8gcdH<(=`>5hn}DyB%Pr_sc(i*beU`0?Yf zV%u1v|9K}aAeQleho1k3;Z2 zyd)MK?26-lmBWH5_>S_di-N}%<=+wfnaD)&cx~p;{$4q=&9iwn&*s@Yn`iTE gp3Sp)HqYkSJez0pY@W@td2avwKWcGkECA2~014F$H~;_u literal 31025 zcmV)AK*YZviwFP!000001MR)*dfP^_C_2CO6j(}jOv)53>gIK9%WEmN>}YRW&R9ya z<~ZIaNP-e#5|9Bxz=TVDZ?)WvjWhM0^6HoB{;?^V@xInSceWDu?I zwl~_F5B~B+o(*^&?(E>-&cmGz`rX;yroZz1rL)u7e7Lo_xv{fMIf};=iC|p^GTMsf6LS4PV2w{-sSPG9RIDIjyL{WJ3HHd z2{!H&i<;+OeEk2u5(KMBIF42y1*`1k-79aOx=TM@de(zdwGyzP0{r z6hha?$Fv_7Tn6Dn%4uAjO?q!WCs9TfOtL8NXYqMK1$2hD(Ru2AUst$5wm%5?K)wi0 zCudRcBV<3O#NbC8^+9wVC4(sG$5D>B{IT-K%Gd7bKk4JH|6ckz%I_Q_51c# zXLAGB|L*SAa{Vvjao2x&`O$hhOUKc9cuINBisC$f^xy$6+;%@5KQP4})Iqq$;+?v~ zWUx`>agkie7oY*> zq5s_Vzq!4&v!wqP@s!8^S(+4CI=aJoaQ*S$-q=}=|3aSn@qe8r;h>Ei!ENi9asKaY zZg#x!-`&{UU7r7oc=)IaWYF6*=y2>bg8nGX^FaM>1*3RayzbEJ=d&nzO>dI~j=7xQ z!*lv00H23N*nTld`fyg;>8hgr^YhVF-G><2h>NJ|^Y?Lr_%soTELvHK;Q-Exs?tG& zn_wkzEA~6B0MCAfJ~yaY1?tKMa)oWAKaI7gP5Af88vpyWSt^WZs zeoNy73+_F^@_SF{&(l4E`Sj@?KvUrCUR2=ZC-h8s-U=?#7{E5ZWrV)rKi~4d7G4DC zrz9?d2L7wlPk?r6|Cpth&{FyXb+4}nzXwR6b!qgw0TNg;AAsP2hD3Q#!1we#%81{~ zgLFtKU2JPN2w{u}O4sUq8%~tBv4$)frZ^6&qBw!YnDhZW(0M?d0bxaj7+4=FDzXVS zK8GQT`)5HuIj3gi(WePyj>AzvBpHy1fL03;3BpmH(zwUTDd5SlhS-S;^DyhhMHXgP z@D>+WZOX#=4mCz&*eHS=T9Oa1sI9|vG)g}s&{6m~2i${jFra#aBE=k4U4RHlCt;~a zR#of4t7upRhXZU#arG$Z;v@|&1DF!QFu?y`c2(B5@#&e9m$OlcJd65K<9?8c>WBq9}$eMnh{VWTwOg8%I=ixb}^UzP3T3y~s zy1{vtUc|r|1Yt0UhT&vX1TZZ}(HL4q^9l#2OB1;Z-JL=2T``DvGGpF1zME0GdFR-uGPVqu^N6H!CMgfyT zHgE-Sg@0o)z&9L?f-#IkudOCVu=g}*gu(jLpx4BI@hko5sgkg80PG-z?Kr#_JXsGy zc%vEEfKT_-PV!@vK?PWx`ZVqbAuT=Hbs-g&wJe($wv}uWwHSL^Wcp(`VrUoPXcA#g zTGqJeG+4-6M3+T-h0Z6tVNRpQ!S`u8ieRv)IkYM2Hz_;rVP0-q51Tgb!5bmm!!q?pS6~yFwpWbnb_*zH?G!`WDTY;h#y#3JzBWPpFZR^)f6u}qI!&{y z+p~Z(_`lB9#zUX~+u2_7e+zj$_DT)9#R6JMz`qE^S;d+iN8|W3ODE^Rl1yRI>}-ms zvC~>Y!bix%2ygThSsB?`6+YdZw$d2LayWs~-fvOA$cez_@?md;v}kq?yXqK@wW#V-+7o+(6Jd!Uc$f7qc`#jG3VaDGE z);mhSI2&*C|7#S*`6w3XdrzAcADwzoZpMev@bwoEIsVy2i)KGmdtjBk5YLOo5W21T z^<;^ScDw1w6PW?B#EHPuM#+GsiSL9>gHxCKqG@L&wkKpzt2pykjr0*XN}QBkkqOGo zAz9`5M3^ExM_EYN$H75Hl!0N*SyW82MEGx{C6dT4jOXqkDxz#0C(ODhfVZ8)KG~_E zfKOKtgw96xlz4jJOruK_Y$7Ko-0&dn21&|rTJtleMl3i;gDd2uE)_3oJ;UJ2;YS6Y z5ke?AD9}Hr6C_fIn;&+OH9duCop5`@F*NF`dsmWC#gQ|lvLJLn&(nU4>xwCuhP1Xx ze@^klr?X-$Ul7G$z!fwk!Xdk$n8%Y-#yg^SA~&xp+zZoZ*- z`ljOTfo2=Vm!qJ=#bEYAzbZoAIv{91Q9_rOa*U+9OgR+ppt4ysXa_ImZv1(M5)?Rd zf((fHbZlB^)mxTWNNe}Xty4%5H zJrNt@;IawhW$hl+PY-w#(F+u(MVCbF(1mICF-M^p&1qa{p~+aJ3@Qp?9t!J$YY^96 zh~@cOWH<^7*nF`t?}7PbRN!KTB^m~}H;tl}OyB4;m$cVEp>i*Gv1P=FKMJ8(UO)#S z;wZ&x2M36e%mUaEl*l__an=YX!3eh>EQe?b&Rn)MG4z%O6{IAWqg#pNBGY?2e%MnB z;oWMu+JduV^*yhB?4ASeox_C6c3=XSJ@nmbAM+3D^KaT4F3Yg0fIfK|RFrMn8y>K0 zi3s(e2M2k(-PSG8Ss;#!z38qaI>YrwyH&bOZa7QENczZz0!gahhAdhfvz{N9X&GyU}i5v>6JN-wkIuSD)I3(B*+nXf$)Xl6yne*OZ2X?0=osh`+4Isok5R0_ zq>9ixfKg8oF|dcOIOMm*d<(Kxw+{7=IK&-sXm{>VI(F!Es=y%X58$Xu=(k_bp58Xy z!g^3tX{Hu~uvrU&TK^2litrqUqWGNB&KLL1?X$TnVXD#G74wz`eb`<>*29@qWBWiD z<6a!9(||MMHOZJZ04@1qWMDt~3>fBta47b@vpwANA80mp&9k|27KK2N7hycIw~OK| zrmn&C02n6og88kJ-eIBK)fDvIw!4R6qsPDc^h-YVeSpK@l;rx@`9ywMC#07Z34^_- z(skd5{f|@nu9EYcfxUyT+m;&a6M0UNL%nS#7L+*mkyT)kkW7A~7O>cvLA}!Xyw3yQ1&y!#Tf9~H`^>Y!@ zlFHx3?;lH|F8CW4{0$0zggwh6njf`Af02rBhsNYB|ZPH%ESzf3QR{C*-XFh*W z1yDx5so%Er+qQn&nIh5mCG&SH(Kcdp+6?ECyU#8877ZlEQL9uD_b2L5%*@5P)?H$G zr4j51@t@+=vX<6Pl0lT=1**sNI4V-Aa%L12eC%yk4;J|<=*clEQ5oSJs!^rc$3S5q zjPU0yByyOCyhu~SGoxtTs1VZ}F@mod)G_6DSX$x_ z+9Kf`T562=PhWUi#p3dxz72~ZAV+{yb{N3$01I3pMOPyiqX4s?Ucl4@ej-c9x(&c8 z@GDHgW*1eSLsa@|>7w{CHy05`8gIB;bB)kNAT$B=*!Y7WpsGf~Lj1=`0UY*l2nZ(y z?D0y@<4DxB;7lQP5`K)6)1!S|Id+eA3GyRptU(sY(ee0>@`JrvQ+RW(!W!$FRdQ4{ zqPNh>2()(Z>`RTvW_a;>1JzupJBNhwDgArKa}eGdlF+nq1qN~q6aK2D*SFT0M_o-A z`;?Hq3{X*J9oSRY3&T+m0oN!x!%9@7w$LG72dH(UL@~S#d7`+XqwHsvlfDqxR@+yO zs9Q21J~9%cNLiAPt9zPL5s+VE`!Ab53@u}1Fs_hMt7B?{wpz@%U)4AfzE};kB#&h| zBh{s4S`5*ZG%_}2QR6cZiG>Js&9=dmIrG} zGW80NMC&R=&P3-dv_A8Z9rpXtxoDm-e~{ncpq%HFgDod${7>AOI!$@mYzBLK0ljYd zS&!1<413?sDQT2d?|uFm$`ZEWbwpQv((JNl!oFzgX-4upMd}s zs>5$KA3*huAEuLRac$qOXnQoP_4rRTwF~%9BvlMG=*qsJe+N@~1W(6>^~g?jj|8W4 z^a=iRI#0*&pHr!DXjL?&hv?sE0fL3;{J$tS-BJoLga6;&dFadk9&T^l=c@XfXDa;S`x8IVzHRK|44WZa7~Ivss&y?jv`d6qdrgH&C#42({y3pmRz zwDR3Ehr4CX7-n(T=45dJ$Pei_Rn{B2)*-{-AD7KwPurVbnv#b}5oP_eFgcB&;KV9% zg(dj4@gevYelxs@s}`sEV5RPBxM3=55e;p1_J(gLZZi}Tt`1I-1Kg0Gv%l`8&)W2r? zf8O%#|93VYZZ7x#MLcEwukTqw-)MP3YK{Ka&{qG_3SIBsS7~jvNIwiHl*PPtEurO!^}r#lqw*P)@f!1!mFzoeiJ<-^Gvs%l&@=kF);+arits#S2+`|G#v0 z|JPx0)*i=6gI>a34&PhBW@r21_P=lKZa-|&zPs|^0XdvP5?IeACTxY{PB4q|M-?UQ zar<-pF$U@=4%=yV`T&1Dc#Ygi`a#P?+bbn}7BrUIlx5a3nQVHc>N{ zUsu7?U?cbsZs~B8rdi`Mc!0vgN5Q(6xPH|%hfN7NK>yB=!-Mn+by{@4Uo{=H8-URN zfGo}6KK+aDb+{#yvEOk?rv|xXQ(-5ck`6RH*<2p7{|ryf{*SV(yCMGnGMoP2DAE7h zJInYFi+P-F0fmyVDWHljNg$R>`=9=)S^vjZ<6b(tqa9$@`tNM|_8&Vy6E4^PA|4>Z zY-J1{caK__Bxv#eERC{$=ugXPj%byWdy)mFJQT|2*q#a}9} zRaRATS6vm0J7$$tHlDAi&xR`7PDA`&kS>qrcrEjMjt**8nIcWmE8`S3^%gF>QP?L3 zhlS|Zqg4N)7rYGn+W zex>ZZi!_Y5Q*Hf+W;lI!zz)kG&V!kvK9Bn7EOFozF{->#IB2ivBXGtt_RVumxl0#o ztt5rpmdQn%mp;cNcsD6z>pYd)*O1pgBT5m-=Gtzw39c^24ow9|-fBEVDI4Ia_t<@L zaUqe961S_(Da0CvIv4SerQ_*kl)w=@ivJr9s4li@rcrI0oE{|U40Dk4=pOpvHoG7_Dbbl1P(TnH@J&i4JALZ`K;6 zT4qfsdpp)gGnE_;&Fuem$e%8okZjE?%80Z7KmQyWb^lmT4gUv}*qD~#MMN4PUsC@Y z*uRqf=hpV}{%;|V&Hs6WZpQ#BeQ|9#D-%(ZHd7xe;k=FdX;u|0NCQ=doA|DPn368f zr|PqV3T0F-91H-p3G9+-614&})kD)!wDi31*?G!-FCY$Odz~5k;KwW;x8J6paU+61 ze)6lB9cR3KHDBOWub-6TsF>XnW217BT|%ph*ff-YfFXCP={?BRs2 z_knUlpAbFx8!Eo-Ez3_aP>|bN$Cj?|emhL)to2Jka&3_#HlM60#b0U*Dv7FJjHt=F zLK3scE7HS*&QDawUM(!12J>tbqwJ${J<&JH1pjYV)+`zEdu{ES z_bQcE?wTt+W@iLgO-?%U<7fm46cnpGueGIIix?{{o6vTJJoReJ*_F0e{QUtNs5>K{ z^zcu4zSZabT_?goaa#Sn@ z_WQGvW{VsfXvSz1kK+VIWk@q}tQ7VFLqc2z)r*jzVT=ZCZB(l6ZCYiGj)Rz5Mx7&; z|6QwbNZ6{Tk${tnOAW|K{W!^@W0JhNapKT|`YfG{2BhlIBLV)<1o_5B04YV{LlNFn z>@Xx2>3RU~8^R2|xiG|M>%(vy!-r9roKC`1B&s>KZ65s3@FMi%CY?`uBNX2AEn1qw zo+|=(5yu~=WL-$`h4!x#;9TE0ly%-^;)ll^YFHH$|=&&KnxQpIPotfAAj_ zxkF|PiXk!7&zYWstO`S!e&Wf68FD0eN$?-)3)2D2rwk}~^4pAzy@cmcv-m2V+*7k- zHSCkw`(U*^#kk%EUoJx-oL={e<_!6DhBAGMEpJ8B4cdV{VTp4ocP?>r5nNIu8DYS0^$RWzRLdBs??)mYccKrbo$sU27mcm5*$h%V-A zufW{eSb3gP$y%1>xB;O{&ipg$3c%4GWixhpia&|jlR>cBSq)Yd-@))&qeT7TLlVJ+7)lv zb1Pj&581D@tEhxCx((CB*m7{x+;&!^_msv(+&mEW{NUKXKC>`%#bD` zVp9AVsxShA2gMQ)hq~mNEI1D_9KczWQ}9RZopc-*Lc=n_6Xr-$qGPvQDY*azHGx(H zpTjFVXhr`F6+P0STCf94&COiqi|g-c>N)(n;s)=%oVQaRi9!cyXt8&?Z<;gp0!V@p zd>fD(nD4Jp+h#E77kWSX7G~_I01_+C50m2}?SF(M$&^>kIv5o6c?4}HiFP_GrUfI= zsH?wGs$NyzY*tnOCeuMyLoAr^HH|^3Wdg}gA2`M6(CE$A3R2&zX$XFst1HEk&W4F9 z@U1@ute_~4*XUX6f@Xy1G|3WaeZhsnls{oOrK^=&|0*{{?XtdK&&_@*>VBnVQrAZn z%jDac%S4S*@J)FPzo{MaZ)9HlgD;cm11X2B8qJxJtm``x$eY@(#BY9QYRyh(Lcpy2 zMya(jWd^e&={j_a-a#??JKn%qXI;>R%qX;R{mlXl95eQAcm}fzVu2o~F=- zu-_+9GAvrW9^AlVS0`yIl8T}h;yQ(27-Amh(mqY9HAO8o%0W7osS;vj+Ym-kZ8|l< zL0c+Tyts5Oxx>df7iL0}s_WNYh;^qNK-+Tjd=|x7(7hyAu263QgFlmwA@KiYb#&Ga z{tkN#B*@kJ??D#)f&E;Q{07Nf?ci8L+*7aIbsyT7W^~h}g3#=Fgl36O5TX-(CzD$_ zR5HiEIHga6tEI4^{M7LOKSWnZ3Ga_iqh1z%QUCv)&7Iwjum9EQEcL$@^4R>pJLs1B zSLp-n?JN=)4(ShtjgN9AFZhjK264_W==qa`1K)ke(t*bBzc($dFc-X53Pb4DkD{=C zNQG7kR%`vl%B`P(!gOV`YP~Pi)*p_delcAQ%(F4NwlWx_1_05{K4`GL{mY2p;_GpA zop=LocRC>ltPo*rC`9ZnZo$2DLdK}0uqfibSnA4QoN#yh@#yGofrTPv7xNhKdYZRY zb=9zr+F`%{j+;CD+b5^1lmOd0CUl*Q-+hM};a`;zULhkU*hWO6ol;c~tD(V5Sc1%- z;3d?s_Ir7nog2Fb8(~%XUIi%Kggpz`;UIz`EVS&v0v1+63H{=1N97kD4l7F+g&>}bXSJu^cK1h$PL`tibA~!0QzZBk^${BEaHpDrb$c-YpjFLy@Km2Y^;&+RI*Mp2T#q!u`|FNKQ)ub{sJR%swWV81BQjGCxu$-;cT15|(m0JBqS~{)X zDi|saMcqOCf*_lvR8opnYo@qKQCUZZBB?HJ)|VkxYICZw*mFsr+WfpuHQ7^>&o`J< zOCR%DQ4Y$lRRy2R4tVW?N4!Z^WET0={0hcC?K7u-ab2-0_2SCe3Fz}=rm2iH8a4xI zz^eC>FZN29^C=`-bKT&gr=!ZN69^T~r?rIjEWdV1cP>^HH*Kl?rJ-5N;I^QwwRc-k zW(~~^siZ1iwF8Q#c;(PLisH_$Eg3$@ru_Fm+(yfb)nrITL61bRmo;ePZ=6Q3X5p~qmtM0O6pW)0I|fwa zp37a9_Grk{q|69tFVJiu6~-OMnR}|ifkJ1`aF414x6z-d)T0$MKOn@XpJZ--VSlXn zGt%!RI`sYtduhrkUfoOnTJ$?go%#QSeKnnksqQXoJ3|C@KLL?<+wQpY_SMpy{{FJ! zZM;s)8btHcpaW(3k3Z`5 zjYFlMp@rm;6$ut2zFKPON}pcYa-;lSf~wJl^9{qFH7N~o5d0G{5PWb`+*-G-Le{Kd{4Bny42+*m zL`4gRGQg;U;f2r(@_Q)uG`|}}r{wpVK3Ee|kCyPM6IWP+*RI(th~f;b1W|PweXjzh zsU8bhN$TwMoG&EK-2-HreAqi{%A#Qli7KhEZj^mM%_$+ZCy(p6$$zUZse9dMEtNdb z*{|0Zf!ceqEB(|8oSyWvqC}P8)9?0M$~=8=*A#a;V|P0lU+p9m5uJ}|wRFxw=tGsf z%!7V6qOzK9tdo+NrQr$4d^k6djd?xbi!)e?K|2zQG5+EC*{SssNoI>%fLuyTN6OPLbJB`7o24$JfcSbc2ukb52r#8MQ`O_$ z!=$jtB#emB6?Eu})_>pp-&-A26>ljex`A$NDepRFZed`JKCxE0aG}IxWv5ZjMsfKV z!))ZDnGKXwbP9Wt8F`A5jvj$Or}=tc>qrG#o9n%}&;@Xwqw87}GK^rb{azCvYpTca zr`KEQIXYak`ioyM(WiPjuQa)Q`7g!PcW&9UvoH($1-5O7YtO+UwxXe;&NLm4jDih|i;w-u? z*P@RfhhH(f)XkU~eHLC{1;aj9uUop*;||;7wy2uvF^F3_0}e_Re2`PwR4zyjyrSVN z`65NBD~@Px)~RyHsu?F?lH3wEV3F+pf^;4mbF&4L_s@5=rBR-Bw)&$cvQp>O{E?>LDF^Ey^r8gG<+zgG6Ez}K(^kLdh;j$NO2r7F*nF$i;SFH@*$N+T> zo)C~LVM5EQq;0BULdjGEpUM>(otR9=)i7~id}B?l)K-SMPr5L=cJ|CBrx+q(=UtP$ zj16bSbFGY%tuK_R03i+zj+v@dF2Ar^=QGz=p;wgIF*!_|%UU)ASH;GzL#1#f>X%?t zrIBOK?O6&rR?bR&oXihA;!O=r64aVB@-)%JLTx^7oF*f#Cl|3cx$77AMd-p$@#Ti) zl&In=MA)BsvI@6i9PL0h|E;2ofS*$4Myf_xRUDUTnnrGcIz=OQ;@wuy$bxoVHKT!b zad0a;LNjr13&~9FYu^@F^ZLej1@69J=}1|=fEyk*&QO`A?1f@Ag4It!IY( ze|u+Vs}%obbLszmAx}jV&xRFelg>1laP&8cPBD%PN&41V(l;aCrxWCp0L_r))rRn_ zqUG)&u;)Ldn6M%wXqAXi#7iwt{ADSm!FUv?Y2`=@^Bai^E4BYLxi2PKC9&c@mAF7V zDDpi_!mf3aI2kdMIQ)XDO*o$}pe`~do|RoG+EXG%+VSCcvs;$MJlRvxM|E_PNw3=Y z2qi7}5r%W!A~Y?TiV8sLz#^&1F{~T6h>??60Z6@qz^auCDj9d zi970mutgv1p2?6Sd2mC3R-oaF5#|#iObcIHidpLQnBG?EY^in~=&>7h zTQl@vJfOwYmAP}-#VA6j5~AH0?8w~N(^Ew9nkW6Joxn3&jY96)bD)gP0fPZ_fs>lS+1MK&qG$X zl4W)X+{@hS4NFHUH!*DEA!e1xxWT_ccU`GpO1SQ)_zVS}q|SkFI=|uk+~mDrRer4Eu;g4E z-d3BdE2Tye<&F6(53;5(_&<=g;oE;a>@4}e#XQyfKMwlW-u^l4>zw;PAxzspz0vN^Z{^LO z(sA$SmuXu+*elo9rC|K6TqyncZE#4mxi7R*6~gWy@D3s&{-EkI zw{YE~xScy?c!O6+)a>u7IB)MBe7(EtAuyY}-}~B^+GYLTKF>3A|9=@JFnayl904=- z|IMxK(*BPwVVC>=0-oyqUk2S$29Olj9qcJ5?$cbnS5B(9=9go{zThW#%3G{^h^Wc; zHab_{9DGEqOI#P7iu+tyXQv*Bqm=KYCb$bxz^76rUY42~b;#~1RqdDaG zM-CKA*RAF5YL#p4(KTx+xuF1s_HD6;j>*&sdYz{Ado$>50$>s_s>Fuhm=5G?mXdoN zEIXx%)@W4rV_4lFyI2yABYQ{n+hyksv0+XEwk)tys#`Xt32LC-jlKqPop}nvCJ$n3 zei5qrXG6JC75wBHxhr*c%}uaak+Gv$O{cM;PB6^Se?zTXwhYeYaZ_3qRtdOCrK@R-mq^*>!9QWi_YN$ArSZnZrkV} zvgr|hp3G~obQ~t}`DBEeyI3wxV0kEGf4dcl9qfdh6T~c`?&2bPk&XuF zc(rSkf6lY?A|8-a0KwgetJhI0FxLp@>6kR>2}Hn6N7*CvLlMRawnO56q(ea*yQv#e zpcTk<5oHC{1Puyxs1zH2G#Q{^DY4q0!rF030t}yWnr5WpkAGlw6k=H*Z5fK*J?vxR z61b-&x_K+;O+7Rist<(RU^hYp-!{jau!Dj6L4ab{;XzKU z40&McX=^moRDI-kc|~+BUrtA9LUuvogy{v6EO1z;I=dLqOo~DOJ>3g7EJsd%ouow! zJjH`7!d4BU%s{Z3qR0qr6`idFyQSa+!6*XkKaow`gFso#bu&un!s_ZmV)rq10Jd^3 z__T(OrZVX`9B2b9iYX)-6{p?}&{#^EkOfaN~5;D*DK@)b!Pp zJynzV!s>eDj`7k2B!rs!=o!DuXp`?IO_or=HSykr^Q)yiQiTb}UJKEB3&9M~U%v_2 zhVy8MXuw46pQS*51bFWazX*YdokGPD5)K_hwS(h$9FM|mbVVMhB-|Vjr$8STz?=~c z9T3$mIK!1$3YCMJB{+>UI9z`BiIiVEhAK9GEEkcf}7+8+X^5^hM z+E@pOnjV?{YvCdZPoe8tDilFQ?~DPi+bd{bH&Qzc_J%cVFNC7^H#rWS7t5X#Vo zj>l=jl_{Rc z>nv+(!m1g-gw8JTM*w_JF+w;n*#>dmpX78HK&3)HhZ&p$zzb#ev*Br56My8IyON=2Q62?u%tz>*4jNz}TJF>m-y<7Ck}WinNtg_B+;nJ7 z1G~Xa*{trd27uS+DjaMWcMVWoWr%7&uz5zk$Z!*+jWR|{H`bFW($68Jat!Z4BcyOEZ*nZz(nhQqxs71WIhXnF9PFu#RCiTVn}2JX-vt= z%!5@uNnGq?U8y*lxVk9PB@8mt0k9=>yVfzGA&RP2q7OLO*M7kfwrBudk!muvH=og5 zJgKqAWPGtQVC%?4oI78Qg-X>W1-VjjgCTk;gybq4R=$lAmm26?22iD@L#_M*3-j4j1Lj&Vi@^3Pnz5pizjZl0#*JT2&EIT@{$| zoXas*tuiI{>2f;ZU3`tuUk)=R@8>`?0KDuo5$0{3P5(nG0rUNVC4P?V8pXK3)N4K- zWa$4~u|o)zQ-~`<4A_5SI;gA2xCC(P@_YkGbsxw9qpB#Gj7>#+MS&g_D@@Oy>t37c zg6=u@<6PeA(i+Eh71rzmcBVBuZsu!%D2klSj?PPIjaJC#R7>c9!F0{?wUVYcb9^Cr zAN3%6q?iIzw1=vqnhT(b6QK)n%+^B!Q}#cb6I%mD(;z4mZdAe$Sl=6Ah~+E{Lyh6@ zzx(Kl^Gd>)XkplFrGfKFmulocDnQW!{C{tBc6UqnzZ*;a@5MY`2tSTW)p+AX)+6UE zMWxWi7LSw#9-_SDsrq^ONR3#soRJqzLn13Yy1@YQvOv`ibcf<%I>><@VA3?56tFBU zc{zs#Bt}jzOVxnHVe-LJNEZo8_hoG{-oUcjk_UgSE2#p) zxuvM-dTcN*7(jW6{~Y|)K2<)wu_BGZz`XH;j76WYEc~Yf-a!fgdg-Pd{1yMRQ}}3+ zpm=3!IFFEO@l>`{z1?H8fobFVGGpAGwsh(3CS6`j1ssw7{ z>PwSAEnGeL>#9t*w-GrYLh8+)b~HAvU^t5TQpp}$Vm9?0L5g}LBoIx|>;enWkRlQ;xBhGP|K~!j-H9K#8Ty(@tt3gCma z{A2W4Ts_^j(7i(5eq=Y-leU-gRJZUEvWzf=h+hqz#M2pT~4 z%E!V*4%Mp#kE*cke~UR~yK0M3=FZRzu*_zOFrlO~O8SFgY-KSs>mkK4Kt$MWb1_7Y zR=F!kqX~*JC*q`ED4Dlf?7gTTPKY`2y2w@qNFwNz9H_L>m3@)r^|?|GP}&blBJ2u5 zxo;T}DGWuzSD z(whc8Pgys(ivE$vAypGY?d($6tk&LgX8g%!h90VGOpW^CwTN#uq^=iB2e2oxVi=bk z$|HDi8zaT#^TnQe`On=sfSalR-|^)?+glHp_MZ!R(90NmJtQBySyXaE_69xx1^#UCRDN9NQTebymV99HMBGuKm?n z->&%EmH&-oCqaY!++mjQz9XM^nBATZ1fF2Oi{C$Xl1W%pPR3Y1Lzrn3s|t zzKzPsmRj@46uN)kccVvs{isDa9X(tHmhPHASV*tSJgqCS-tmf*a&%loZsokMX8)J9 zaW^3VX6*kPTU);TXJ>a~>HljXk1dmtL3cI&fruZ)A&WY?C=%`}0N1z#!JH9kUYNOsI$f;T-Hh}w@DdS#b&v&$OUp5z!XBc=2T?^KN^e%b09(0MO5 z?I~2MQEBJng!|=vOS^jSCEN%hjRNDRKgJsm0N6bp@D>yo7OHE1R4x(lQph zI~tdy9p{Lf(QL~Kn<5&20tiqgHDivkfS`JD64@eX1|(r>RbK_C@kL};Bszv%8;Ng{ zfZN$mM>ti)rb})X9EG!QzWF9NV$WcFe#?3`IgZi%jK4Rc_Gz0V_O^nqEWK`%PI(g? zLg~8LDxj_&nNSMSktiQ5?omHTG~ke~SjD1_i+0-FUwvgw*1g|#d&PWFO7fefGHbPE zI*hE+%R)@m@oOcdXs3y{w7EP)SIE@abCHXZwkC`B6BI z072dveMLZ;WH0c`pUE5h$ovAZM2ki#cu!o+Om;SY0iZC3e~>+Tet+RR3P^?tQ8B>~J{td07sK zqbjWmiKFYaPuQt}am1aD+O}c0iiZcPvO@@!QZOmit13~Bkx;nrQY8DKJ+njQ8o|=}^+f%jr=oGKxP&Yf~$4vbMQC{VcoIaD<_um3}nZ42Y2`itMwPVgq%l;awXaI1E|o z9Z!)$%rH2gVucclR@9G1Ipc;4&$aXdyLV$PHXdhutl8S9{p;`|KFFhb*yX^ zpEuO!if@het)~60E2`@&oXOmbjN4}0gH<)Q6j{eBsu%ZyQ`E53{v=XM>vhZt=PS&t zp5sQkVKy9<3b=HLxm5|QQo4yBFgqG)UEZUbg2s99AlPi;sRBtz%iLQjv%)h<(7$SR z<4r#qDTp$5vKx$;p zzAR+r+>xv_`8i)KD?fWxG^^L%zI@wwRu=hR6VS@3aaKsHia1No_C6)DRVjLveFRDT zDhHS92v$yYr2tko%&8%)WX7mKO4o>KRn=eBQLQA5l~dpbfvid`mdI3cz}_~P?A^t= zQt^eSo%eQQ9l2O-{HHtf{&o}pZ(F-NTTB0Mi+JkC|E|2h&EfxdbH~^J>@591Eaq|i zKYXfp{Cn`pcJF)e>7M&@wx?g^J^}vy?z`r^ZhpyP#`EO`mAiIb>H?tTN$lWLt!rNZ zJl&@+*1~%4beB*GyULv$0j_l>Hyi5lWNiCw!wzU!upOUmC;-L$Ro>dLrp941Vn=86 zLJrQX&*ajqR3c-)ZuH`pAeU`bt?_LGbcPC7bTgYY%?TYLInxK7-$=YrE;XY9+iJIS z%52L*aSoSrgy|d}=Sl-oftT%9umYPYJ_QMjsh)vDjNnK0D)H_t*LKmT3eR>2pDO*@ z0eW-0wS&ARr*>*>8*uE|&TgLX?ss|dH~&-T|DUMW#oYgOI=jCA|DElJOZ&ftJkG@( z4Z3yqEA~ySJ}kaQQsUv2{_UUtGcJ(~w~vaiNstqqe8sIfXh1p{d-08!rCn-sS(?wm z+^}n~B$w(MrD)D?`Q^YWQLK(~d#eRq-)>oHRYh6fiT@q++PpROR#vR?R+g--M9Jt_ zP7(pLwrLjP3HzLbbVi=%G6!f*fkgU@#4z}~+wMofuOBGlb?}FTT{r9~mmvlb&t8K? z$3>&#;L%}3s2IHcy{?~ES4}sI8q)ex{Z0MOG#-|g8lm;Ir~UGgr&j-qyp!CG5a9a! z|2CS0FZurkJTu1sPJ{q6&i}2Q9iRVy*x6m`e=p>5_5Wzl>xBT{r$v#DZO)%l?v;}) z-5)7@k~%?y;O9N>cby)PBOY)Sgd~y~S0q1MsAf{I2l;c~g*;7O~o0tPiZvyYFza>^4H*H6uCg-_K}6#-3| zrv!4wB!xV)W+>J&ZF)*IItQ%M%6i;ciBNlfEp_uzsz4c*z1>vRQZWswzgtelO~2-8 zvrlUGrXHov!lru0@dWTNm}9O9A{7&j5tvut!@Ea?&_{;9Qt~5fVs7b=tyY*QD=~sOvZlQNvEdr zWYaTGOF95P*}RP*)W7IcxBgk|gHb7NF9e>&|8+Vg|Nq;Y%lKalc^pSQX3#C(&rNOV z)OZ-{G?5Zg)C?^dmS2@D9ob(f&EI+L4)B~_x3$DHVywen?fgcoOKYDu7*s^L)fruA z_uu3yE*?dhcqCubmrwE&y<5twcH1StMG9W(F*oLjjxkcHH)a>R2nv9Ef%%MTwKyuH zT*?DY_oAz`_=+eSuaBaOXk>ikiPmgjkC2h#GJH3-kKMmA#M63Lg~YMS_R|b1KTpYB zuMl9}Aiqjb-9G+r<-FSKbZlip1Hw+%(FKv|5SK{vrXV(GO0VixSQNKORbyQ73CFu4 z*>c%d`gX002a0HOlWPZ=2x!8fw6<{^Xg*+9Px@!trMp;Z;voy{7R_}rz|rSv*%N+! z-=hMtcuZjvjN|)FJrdra6~91ikKx z2R~l_f@cl{N|!tCrtpC`9q&@-!}3n&pYo}p|9*(B&}+8@)43%lIFtV?+5hhDY%TTw z7VfOg1NR!vj8I6tBR7W9JleT9k2)cGT<*%8tn`MZzr z(Vfuh#;X4L1b+TsG}C_l^*fqhx&p6*-95pd)Kr;S2sF=sU9Fp-H}$thdt**!<6IX? z4I0|fT8rkj@vTN~(ngQbJDD}<&<`3rKEt( z>9p6AQGJ0oNmQp_rBJ9=veF1n(X=8|*>cX-w(_v8`ABF_DNLBpP~%ca+PYj;&l=qp z6Fg&zMi({5WVp1A%*e@>{?!Nv1GM03mfNcHNK7-kwe!IqPevYW=?a|%y|mQ!MTI7q z>5^8dLmZtj(@oP9!?>SjS}&@EhFy$o1DkFd*)djhrKfqu$@UMwd#{ZW74uM{G#@1y z0S_ZewchIorCK3w##Eg{HI9d34)aVqrD8zJonN7CSJ~$`tlR4OQZH9j&AmIXq8x`# zDQU{7o~=}s!^X|8E2js^SC}I(w#FQH+T5s9)R^mG4t;Y6h$9pm@9XtA?1$Gy8)n}v z(6%FC8hC6}3yt`evA7Jn)?AlR7+3Q*3%26xHVY>9oi^(=p9VF}!eykI^|fiRuldx+ zf4DmbFgLLO+SuA$@_&nXri}kxuzvK+;QuyvANuz{yIV{Bzr{RW{MUNhuQ%ejS@utf zHgT^atwQI|^9Az;og#B!)~KcRsmiXFE#}hW0#Sdk!~iOsT%Jy=Yn5nB_)_a11}m8E zAV$~5teE4h)UKe4M`Y-=?BRT%Jx?*zB`v!KheIKa%Q|}*UEvguF*=XuHpVVn=Q~E{ z@f^sw^?s0|ADSvYY-JT6nmRiLL#^tY3SMrW{+Y(S86;+LODpY0DWZQJW%NFepv+VCLSkM@ zRf<(mtAJM}p(^;yPpo1COLVHf|w^vML9q+hc~83#~drUF201D{xi86$A9lQ;>62&BTk%D z?8e5#A(sFK9AqM&oXbr-&6FH{zH(FwhV`LHi*SSy(W2~wc zZw}k3B)y5}r$`Jpnw5dH=g0ngb%KPLLr8M4KLrV2E1@V|xoQX|Oe&H4Zgf-!j-@VF z4a$TnKu}(s0tb|(7*!1l7335kdaned)#$2$r~nWM#EJf(fK)IpP>i+`DMs(}71O?q z-x$xv7MVD`u#VKA$xfj+y?70#`M+M~!4)}RR%shS!wI2Sq|c*K5eAfz?J0~GX#Eih zIa{%=Z&7%f1yD@DhW-sb4!=A4_e=V{-dXW2h1_Yp5=bM}X=bmJieJj^sr@U-*v}qDh2F4*z+HFtcBlazfb;iv71r6Udid zB9(S>zU0!lUoq(r<+M2w8;v1-*jf$_ybr+3#u%xaXjC+pOFqB}NPIR%SZBJ`?LEDP zl>L+H0?gv?@sfuhhQU;vKQD0d{~NHim3Ysc8)Ko7eWWeYd2YPzGP6;Cc~jIe0?b?; zjx_(ZJDm#6i#=m3=(88*YhgdT^RLwgzudg@r=l5gGK?=r!FFIKV;(#yCv!dUY0rFf z=0P4jt^A@2KHZZ(6Zr&;=I50AD@O~`ad>_%R=i^*^u$(~>F~rOwc9J2tMlz$Nl1s? zrlUKpKee|oV?b^|3`K&Xc7(=|#U>~-k%10Bay&2KALH?4OoXk*zb%1s`;)z-*?ABhM9=a9KA)vytC zHpCV>Xsqv0)F7R(gLpy0j!Nk4aDpm>M#2ndX5VlJab^t1a~4x%A)Es6+Q6Q}N@_NnA*x9H^lg_jXV}Gi; z=ju_lg{nfTXIBCWEBu5K_Vi9kp@uPookZb@_MCHof?wt7Xi}&%Kq>0CfwAu8!6k)u zRPp~{KkUXs4r7cW7UsYud=~}qW@!r)Yp-DO>bAYr>P#|Qs3x>}D8qKaT6m+3OJn<3 zl>Kv2j)_^~Z7=Zilq3pKOGZFI)@4_L)mjv5kd=ihvhf#d0uD!Mh~~slA36%+=xQRD zL6o5vu}H)*G!xi8Mk7d*Xrd&y5-WZupqpx*TYtim89aALCXrq6=7<$pIEBeo(7o(Z z95j*jaC9k_6GA7i1u_JCG<1?a=jdrDxEsHrLgMY*gy%(j-bbYKm#um#^d8opE0$|1LII86%Eu^H};uoNZu`g~URx?1Ks zNA#_dj}IkYb>e7o>nc8Y$jlC*g@sDLs$1`b)iTvvF}12||4<{nD2LQ4wMhlRTB!-A zrD8{^ETx!SIJ_z#m&DF8mCO@3tFB2|7-T(>fx!HxfWHs0jXcBgH+`c??iQXFzlz~{ zQkQGR-qP~td)kKu{2(1@?YG?>ZPh;kw_?u*fmH)ahEVsv^{U2Hjb~*5P&F=I08nRu zs>6R8G1l}B+|1RRO zuRk!mvp^G`bYn zUH`g5x=sITTn6`10zf4iCbqTC@S*ACCmR&al}O-k^BLornjb4GND5r&0O2OmX!J?v zvLZK8$R=I7sY+X=I&QI!uB^T)l=)hPyxWc`wGqwcmYk=)9(&KVOFGV=wQqL2ZgUQj zjkHx39gc88db3OEvn?-u*$Y<9?^PWCoZ-ORYIAj^)L7yj(#)>;1t(ce*15y;^U;-^ zcOMf1Z-E4y&%$ijw0>MR?a4yoD6a~Mo9$O5$>r*B1z0dGXuUXbM+APC%VRDVL1g=f z$YPawNBk~wiIQ?T zCQT~Wt|m~tuSll&E2deVs#8doYBbc(8f1igscj&VClw=(L`w}sCTPk{sUd61z?rzI z#Yk^a6%(kA(5atEirgvZ@RNE~qY=VhRY3sFLQRoD^>}GwsA7lv+Q-!L`9jZh`tNQ` z0B6vD=h7>A!_M74%=N2_S!8u`K>x)0T0soK&YLuXZ7Z%fDK(^UQ1er)Wpt zPDqfcaoWPAs?G5P;uMQK1$%~D6$Vsce5P=2c~v?lXsUCjS)mz3JeL~kgEulYC{`Pd z>7Fg)G>7S!L9N0M#SDCG`>-ikOtBI(wd+hgt*+s;fbv&Zf^qfLUJEp{FrO`$w8VEJ z@mig|#*c*Ru`j#+r|HKob z^+_tgxZpc7tMw8ARTW8ED7TUZC}2~H10$}=vp7Zrpr3O<5I$T%F>zNlB%NeIb9ir6 zmicT`kbmn^9=}cLm&|3;yWy=bHE&(!({yDj0q1Mqtg4zHays z*ldTbpeSx98SVpGN)L0!u1_tX{ej*{$V4HQ74^eCk%NGP~l~I_M~bwMxu_FB`ooQ7vXwb-5e7D$`(@sL>;!sK(Hs$5AsfKonz|#h*5)Fq{fd)sT8$R-_*>=a zr`DR=67l;uwISNkqXYQA*@>V68;~83{2rnO0=gfar8){v ziI?TGoWAl|POl;cDxK%_5zljenRcF24(~kYQkCbq$=)I0Q_pji**(wepyPRNm6!!z z&U00w`aD;4x#zhm<6n_VZtpZ#sd$>x`!!B;Rg*f+xk>J6uJZ6{ZcR~rno}B{<~(DX zYG9|F<~C4unmeH7H1`m~^IG>Pp50CvKFt*pHK(~M&ZoHtU!UfB2=vMAALRaEsDoTp z&o5V|Gt{E-k8ukjEqqnSw_k}qzU}4pwKuZmbE9X*`442yBKZGO{O_gwcR^3p`H!^i zpO61Xl6y}6pIX2)1HLiMfN#M6|NeUw|4(0qX2928S`_KHf@Wa4Uvf>S88C-WGf=5& znn7pp5b)_VgJkw-h8pNZGnf*y;47LzO4QH{QkO?FNSPANz`1>zK~jlk!1wFW3{sP( z8K_Af%^-P%W-!y*&hT2TA9bp6L<8@s>!3DK%;m!mG}Z5HteA3JJihhOh*{2P_f* z1y)G_=r>6ONCGfND@6iOo72b&zV;S=t7oqG&$qM#yuSV4Zs+0B{%;}AeB*yRJ+K+{ z-|p_VcmKb+vHfr<|5?am(|=yjccsGe=ZFF}i@Zdgo)|qyAf4$djFnChPj%?U3qLa0 z$%&tG$l&Yag`r%y{wtmt^KWyExDq^9!gQn?{aS!PXL`OJ1q(2c!;rGyd(aL1PdJ)UG3+IE- z=W9;8ICt=T#RtqK*~O{*ozivz^bzZj#g)TRq5bHON~>l}9BoK{yryYI@_>07k(g*+ z{UP)+W1@Mj8q{mYNYh2VIB7cFkCi54f9;C6oL37c`c=sMxONp>&Z~kG{VI44IippE zN14K=`@1)DZdI$TUSrYEp~lK{sX~RdViHy4s+m1EjP~Zuq0(HrAy|9HlqSawVcJu~ zpA8O#Ish^!=GWI94l8tZ-4UW6 z2YsBobE7Zk*Uj1>^ZM2eDTjSKG{G758RuzSE)1Habrc%sjj88%&*@} zBn)oWOeM3q<7SFHx2~DA^IX4~K+fE(m9SCWO(Si9m#T5j#wlT6z8wEo7ytV#{d{Ku zU}o_DJKMXS{eNetv$NFyT*%|Z|2`uZW6J7Fezr&=zhc6pU^;shdl5&YL9X+7iIoOm zmI%7>{zSriV!-UvY#99LCCEV)bg&&2KW7rwWhoN=2xkf=-AEDvKh9)d?i8dgZJj zB2?6mB0{CbY3wTgT~&$XQx;R{#fK~5LiZRul!#$&+#iTJrGZN8qrandk+y_qY(XCqsr) ziU(o_m_GRSf5l*!3-MP&x)uD}X}} z_5|5=G#MqqoSn=zcze-0DBr-fYY9e`C)ZNn%y#BlxoOxi#D{CC{t8#FrRrJ0xw^W7 zo=xkkz{YYlG^t9CS!sfJo>@!yR5)m56q~bfG&77Z-Vp%KoJZC@JOjkkVD&a0R~e4& zY?VBtYS+8m%GupDwXdq?iR1eArX5=A@%Gle+M>h)0Dt-3rQk%^9ecAk6Su`2`p%Cs zW1U%85KlF;uqyIeQLZ4n9Ar~V=qOZZp@Rq4)ZA9+E60;>eTk_-a=4fuB1eFIZL^Y|i0_WZ35!)twfmX61hg0vzlzaLEcqj&%eb>-K} zM5XIjCsDHTQ6e2X4aG?k<5?kkIW9;m$SskS(+MiO`rSf(#uln7N@QOS^4nO0N*niG@e>VaSH?DGhL8nL zB=wogm2)1176u|3Kw%Gi@KBIQnLAFjASlYj8Y@W? z($|cHuRIG8&e(N77o+?uj~*k}NpT_)_6v?xWLcD-LxMzyVZl<@8$}6_YtISkg$SLH zSn?_cgD`>UhSH;}cJM>`88PNKS#BR9x?N`iP*tKe?$!rc^T~OXNxdx1DfG7pAfzd(7PaPl1QSJkH?^`gv= z1GN!u*t}8rd0mA8F4Zo=$a3eJD9V7?I4*JwK}EsyP#f2RDP86Dp^gj}{1TxE0-ES& zv6@Vxj5uLXI`mJJFKt9g_+cau@RwAnY6+)|kFf z|C{*89-8*jIU%<;dfp_dgp2>H6+~`=RIV2G2XrZy^|92!yn6yp zh3!Y2{`Zce|J*44&nDXc zFZKVw{4;I*Z>RNdo*De#*5+=>{$r=J)c;?|dtc5n+lg1U?QQZvey1Jnnli|ESsR~M!)syvICOE*(i z_M_BNoV2z7`EJ<#T8S&#uDS`;H6dcvh6Wn#DJBLw58i{ldtY6}F-OyEI6d5QTI48% z^L;6ns!{>x)-(%t(}%eX>@6R`>8#1&*Xoas?T1NYp?X3 z>}37KnpU{V(^FcZFV|2*y0QgxDN;*M`?|k$4@L2z%Z-sJ(E-dETo71B5<<)XNvjRI zF$3Zl_`h(3QB?7wt?`aB2k-GC=4~niV)t52X^zVx(@C?g+OUZg+UW{)+XThvIh2Nc zipfcJCRjYm9{T@#5`I^2|g3-S!f2*8N{+*O&kAc9#A>7V|i_WAWC+ z_&aP_9CzFLewp(J{pm7q|CYvyRq!!<{^Rk=AOAWxYyTKe&H9(6e>Wn48}R?Tuq7<{ z{{=iV#veGVFYfv3|D<{pq*o8i3Ml=K(h}DL6BV{zjN^YqJAmUNE2j|GuZ0Z(Y996+n`T*q{5a ztsPEzJpG=ZfGA9>qwh!rq^eMjOM7Jit#LT{I@5d7;By56B6KKA0gMK=^>ua5u8`~B z$mA@u*_5ho$D)2KTj1u9P<94t=h?Y0ZAWE8@BYIM)o~9g4`*i zFaYPo`Y5`HMq+T%wq3Cs^I;M{;B@45s$8@ip+nhGxot$=aKs(bb)Nh3T2V({%3(a3 z>!klQ{nnU6%+T~ZJDj7l!$hxYyw(g$y=ywp2pDYd4t_6Hi!_w!Mp=%Lm%dPNZuPsi zHKdxNB^ta1ql!dUD0`qdFCubdHCLwPX43v7bN@(+7zTDQT4 zxdCsxVq6h}{LUf|d{qrEDDVfSh*K9rO38FH3|~gaQ-mhb=b;l|FA|6j;s(|_)uTO?rVgR7zEnmAimKyEoUIH;rI zd`}$=!A(_g-BgAPR4F+bwLC$gxqgfMhPF`OsTDN7`wsQue*b+gyVIG5rrMAc4Mo$> zvu#LAB%-6A=k`{up{UW=BZZ!bwa0o~*fOYe6!#8&RhS$$#(a21TjDSsjndD!L6S-m z9v=hNv6Br|^izD(c*t$X-&zk2F6nq51mk!x7)AUs2;fUMM8~VBbcTmcQFK?TMiS44 zJ#W@H2UE}aWZ7zZW#ngFL1i53mBQq@A}^|{>9I>xCyk9qP<^u+kTxVFLnF&>5atn@ z+_WlJO`vVw;Q!64YK?hYZPl9C@XIO_%IjMU`g7K^9n`0?XFFI|tjdb8vO8v55>B-$ ztlB^v%ffup!#}0jSfBNG+t!fNY9}s35evi03M?zbMso(7sdjPD4ZbxSBS(gDf+uF2 z)=p(aXBDj|<6G8MLY&9CPO$pgN8`W5Q$zn9e7ZC5AG7^`Zf@=P^530@+e`Xy5szb> z@yVk3=tnDv_)lYRj|FmYgv*!414+_8;H5yvWb8N@Gy#aYu15_b<+cD{AvY)O?!Z0B z!KdjopI8h`&I#Ti>c{N-Bl6w`4?rg)k->~oHpw_g3nWyxCO!2$g^r8I zN@l^tI#cvoPaOv}hA$*a$;fYuqK7D+fD%4LpIoQ1itq)BVHjTq+n537>E519v@R`6 z=)@>zc`Yf-qg4hwVS#@AfE`P7>I3#vn2=I^8$~}EAPFM}M4EvXG@uIfBHC@Q5OCL) zT17u7qsW=rt5<_32E|_>_bH|UoDJIWV9%n1qE8m_58r(^Er8RW$>g~ui9U%p7AR=@ zVv#~+KP=HFwEIG5vBH*pLZBO%3>9WfN`BPh>&T)cyt1VjP zK|%ekEi~Pcg0sz(%1(8uV6T7Qv4Zugq1$I;XWHRdai>ysK59xO7+Qnfsot@MjLfjK zv6`4I)=KC|18lH+H0PNRvg#OeErZ$B)A z%y$&PJ2jf$`KG7&op*x!LB46!y;mw~U{ZOnLW8_i!If4mkB6A8J&KZ3An*5pmf2{k z{8#y7o#(11&!yxsD(Z|3R`IW^onBu5Vu86h`Tx%5hVTD-`{B+~{=bli<#;mY%AW}n z1SFBnqIF)tc`y#Oca9;t2Mk#%D%o?((#*tTT|ZZl+0lkL{%J(*(>CShV3rnVe-=lO zuYY#cQr}T3WilYodhn@*flOOLRP@_T$A!t&x$;e9>2k3jj$R~U3fA=+r?4M!bF>o> z5=@_G>2E*-vFnv2NY5iB-|*tj@{3gQ`s%gRMckCh=ovOcngy|(k0bGhSAcDU{?I)E z7VD2v}wDyw!J33PVq>*R9@Q)*;E&1a=kUb*RpCiX)ojmtNuWCtuwW#%E6gS$wwia zX5omgT83#h4lCLSNWw9j`jy&Cz6hhpZGJk%)be&n7``2&q``h z#N8Gw;$gAhX#t}Gn-MwKijrm(s4E)?Ix*uvjkTw!9QI_5|FsMW?De3^;(LqY_Nx2M z=W*T-aZ?(omw5QP6`&m)0^ECoaQB|jpQn3N*VCtaveM6%IPpYllsoY1cO&sd}1aUf|oXXTz4G>C?1{2w1pHx50|^)REkPVJ%r<>iX6{Fw%9j4OK_UeXRQ zhED25#plS{b)0=RXs9>7`JdYpB-Fg_D?b>MCvUm7B$9WPyObL{k-V-!l~J_7{b!!g zEMJ=p{+E5q{2$)f+?neydaiH(yS25m^#8Sxr)K==N@h|1pF1W0Upw2KttJ1zh{s&9 z(~!$0JF>x|mxS6!c=emd#YBv$_+pv$uo82?Dn%<5UrO1f#mV^uowWmjQ=lu%Br=9r zXwwBO_N0I;aDGliV}&DADJQ{20K{h=<6Sf(gv}1<+7I7`Z&q zC6f@W^pPvE2#jRLR*KvIExde7o|MViu90AnxRlfBB46p6y|ofmMil>OAdvBesD_YL zoFg(ZdL1x&Fsftm;hP6tySA>HNhp(I^)&1d#G@N_WwvMsvB^E%B_^+O+6z=j5s&Q7 zBqzK*Al8VW8%5{}7g18)bw?AK(`pDqcZJ04EcWjE*Rlb;i(dXEBCW&(h&GgqxjkIE zgFl98^o(a9es6YVi47*o6EAebTW5@LxplpAT~fh;RgEJ;hL@IkJIz&@^+>x1=-yl0 zSCs(`<18=8g|)Q->FI5EW#MmP;tM%1l)rMb0aeGogQiYKBXJ}>VN(?{4z+Eekn0TC zDj9dlaY(Ma2N8^P#%mj~n>8;!cnz%`f|~5g7_@R-!KD8jUWtw?!sG>E`-Tp^wN|P@ zgqt|JK)b^x3xzn`U`)DHwqNu(*ZR04s5sZ#b<5Xv)%(6aBa~#;3{B9xQg*%AatwM*}xAihj7U0~|1ze);_SAr9wT*&$#^etP@LUx=iuHbjG5S$gGy zdqKNxA3VdT=$|3$~>1}UrYXPF^?w2mC8l#`N1Ft@C-N(pr2R=Od|Ed z!2kq7q1BD?jJ3l9kFM4~MptMI9-i~XDxS%GRv;rv9B5LY`9@(c8mSmGDrg~`@NQ%Z z4E>_DLPCo8Q83w7&73NZ0tpbg-8_6RG(Fb8q(R)mzV(1Xfd=WzHRFCkC ze3`;dMdU2iHVoZ%u(o!B-6O_436e>g@Iol$85KBC0X+_r7zj^}*u1u;&Scl3%s!T% zokjhRIr6P`f58vv@0Pea30070#SX)_wy&s5cxv&gbIT+I@%WyzOc|=`P(>Bygw%Ued zSGjS_JbA6k?N(8+nCrC(*A&Y&q~JZ$*(RYvf&qr&VGQ#DSb81k8&}4FZLUZix-A`} zITDhii1}NbQ1~O@r_jvj<~?-m_E0CtHr-wdOY=be*!24>i^jkx{7ZD(^9W@Ie$Uk| z9n)oBd&BphS?B*<1OT0J{&%)Fef#g7?X9K$*Fv5O9dA4jY~4&cplj4KVI_GApnw0P z4TsU2&k68*=TTN%wNdth8C!hRH|umm&5nKZ+9Bl~m3TkBmg8&GLVKB4PbCfk${?Vc zbJT79`yY=b5qG))m_X=ra_=MSiccaZ#^Y<2qd`dEMeW5xYTxw%gu zKQ%^Wmd0QXPWoedugRu&gW;*@n5o@@or3BAwztQ#1`U?3(@WF(h8SAEpK66EAcb3^T3G?c#6^jGD+n(AbO8A#Xh#|qA+fL#vUL{`t!-{1@D(uz3nZry zWKIeI?7Ap!pN$CW@{K|u3$ymh%4Qh>sL1L8g@D|cOv7P?IGbzY5Jl?l>%>EDnL8*8MBt5~ILI9h$S%4vwOjO5s;>Z7NK!LH8^jVMNU&HJNmzO1!Hi zcsYfSo55NEEGc4+3b`%JakH zXRr1TUmrXNw&nv9m#a(19mb8-t^Rm#+?A9vnS8z^`u*$}fk<2Td5O z!(+?OKS+1C z@18Po&;RqV|1ms8t}jjQu8x`Ze-F34^M4!uw>@B&ANJ3p)fSiOP2$l2%S8DIqZ8*@9}8f{ zfWDzw%ldjhO7b*YAH_ZRfk#xoP!P~h5dxqh^aF`>!f+5>JdaS-HA?z%WMRscVX6Q9 zu-(~dZzy24SyCQShK=^Ww>QoA(K)7ea4~GG>vG&c%D;ungm75-)-!%Hkb7OLY{Aeva#OE%DaDi_bSBY2E;6^`#gHzI4g?tM-Lvv z7VV5!yKsRo8TDo-PwP9nnc;vo~q@4Z0$cg zywAYlUBF{AfX~V&abapY=Y+O_-A=Ppbs*<8uZa03xF1XZb9je~Rb-2QuJ5+5pf300;6!s{jB1 diff --git a/app/fixtures/registry-index/pr/el/prelude b/app/fixtures/registry-index/pr/el/prelude index e6c7d0759..8a01e7d6c 100644 --- a/app/fixtures/registry-index/pr/el/prelude +++ b/app/fixtures/registry-index/pr/el/prelude @@ -1 +1 @@ -{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"description":"The PureScript Prelude","dependencies":{}} +{"name":"prelude","version":"6.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-prelude"},"ref":"v6.0.1","description":"The PureScript Prelude","dependencies":{}} diff --git a/app/fixtures/registry-index/ty/pe/type-equality b/app/fixtures/registry-index/ty/pe/type-equality index 8fbce8f14..8d5fc1d6e 100644 --- a/app/fixtures/registry-index/ty/pe/type-equality +++ b/app/fixtures/registry-index/ty/pe/type-equality @@ -1 +1 @@ -{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"dependencies":{}} \ No newline at end of file +{"name":"type-equality","version":"4.0.1","license":"BSD-3-Clause","location":{"githubOwner":"purescript","githubRepo":"purescript-type-equality"},"ref":"v4.0.1","dependencies":{}} \ No newline at end of file diff --git a/app/fixtures/registry-storage/console-6.1.0.tar.gz b/app/fixtures/registry-storage/console-6.1.0.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..52c94f426bf4508c1de99bd0c3d2cbf94d04bd89 GIT binary patch literal 1646 zcmV-!29fz6iwFP!000001ML`HPvb_AXa9;3PjCv(7bF#TImt;DV#(O4?ZBa`cQW>d zxJ|s#dUF)D_uo6acAOA$CG>zk)boHh^Zn%=4|;X))uD#aJ~@%kdi$iNe%FtW)iZ~^ z`boXfZnTc^iTHNA+1!JZ*LQ7iZ;^_C1MCStd4iAf^er5EThOaF?2dB%x0EHXWkVLY zmDPJ|{oBor`q%4iBtz}B99oC}dHw&?0QQq$M)yx)f4<-}4S75lMsyXClNrJ*`%6V=-CW=pma`S7O!gX_&?#O;V+x>-CSCN4;r)8O?~C&RH4@#_y3X z9YUPApDS)OqnHr2;M0HOfG$;_hHuJNe48*XHUE++efO@azgt>RufiA9IA;k>M7qQ$ zETUg#ELu!yfny6XUlhIxnu-Tn)C;x5%!BZEOkajW)UV2wh|Xye(IkvfYZ3Tb0CYU2 zp^!0uKGth`tx&gOnR7Z_L@HCCTZ%_Us(sUTt-oGPdHBjR;92*76Y=%hNu#{~-vqMy z6V++2)dtU=|9Jn`TV?&<0v^=Am*1Fp!MtRsPt5;Dt$n;v|KmopRnGsnfTJV$3;ao( zLN?!E&e_*EqA83QNho6M2pBUC*|-yNato>8_((x85EBZ^F{|YYvR=~jUczy708@60 z&x(9{6LzQ0cL7g|LxH87Grl?%_&6D};!t2Cdb7A)oeI2&XS6t2xp8?WNu2(Ur>zGR~cu zRrEV^%cShgtWvczx6InJn5}1|BrnO%{Hkn{d!0wiyGr7bKbTG-&_Q7W=zPRK8%me_ z9aX*$A3i`T8%J9<;?9Z+flgN3m8zn7ch#*fZVYm#h{tglNCg!w;J98;K8uB*(Q?s!xh9hV$oCf%Ya$VCJj~zfmYZ+KNfT2YTbm|{`(4-IWOdlYRD`MedMiUV%3sc3V zi6Mo>D0R?n{DZ*w;Wl4PD}S;J9F33@sCtp?24>eUo?YMg? z1yBLljt`c3Vfx7CI~8R#7t>Z^;GDq)aeD*&X>?7?^skj8XQppUTW4s@fT7{~W^ZH} zE(}NR(D4XhSX$rodX`~ckiL%A(ISvbV*B6?49nUYrS$zlj9r3G8eNM>ODf8~>5`r= z!&#krm^t*%s(?2nJyRaYCxXEk?sX;C>5-pD$b>}b8yCho@w9^{Qi3(UeA?l^swXpgv;ID@?Rn&o&ZTO*HD(1LF$!>9sN<-M2Z?#MG$(oEYYt~(m~ zrehys%8sU|EjzZNDhcKp82w9A sx}7s)?L9N^6P2>wAWyemu3ROQP(leMlu$wmCHxrt4F+&0Q~)Rd05MlE_W%F@ literal 0 HcmV?d00001 diff --git a/app/fixtures/registry-storage/prelude-6.0.1.tar.gz b/app/fixtures/registry-storage/prelude-6.0.1.tar.gz index 1df21a5803b31594923bd710f97edb55cda75cc7..87d64d2c1de45193569f4d37a2a9fd11439b8400 100644 GIT binary patch literal 31313 zcmV)PK()UgiwFP!000001MFRUciKp@pTG4fdOde%!GjfpUwe``dk7{m_xN=IPc|oa zt`X1}O@zegfz4*}-EUQOx72z7aqN)0chE^d>PJ;|b$9izx=VQKb0-?|okMfitUmkU zn_Rna9UUBq??dqom-^Q3_xGwbxUW_ZclUo_2j51V^c4l62d0@{T?(Z+^|JvcfQKtWEhqb*Q*zOa{QF#5y`@i8$=e~VA4q0ut z`ge8}`Mfjm?P+KjZ9cSv(6{?hXnQWR+#!ns&TN+jUgQrrrSxsr^5<;i`IDf+?(A^P zJfFV3C}b0FXpiiHg#;>w<#RUW{=^PLK4eqho7qFS3CC6l9~_7{j(2Cfw`}0KLmMjv z3@c4|_}nn6CQFMHFmI$}8F)h|2c5}$9$FwH614i>46{@dT`#l;yaIW4U^qYt3Fy4s zVMY*OH*hR_!hO>)YIy;Gk){D905okFfly0iFi-~eKpB{9*U%e86YhqV>X>JqZ+MX7 zvxyaQ-?p3}(JyrhRntU==Ioj5g4P!9;&GblEK4C)K$#8G&MJuL^05ls02M-Ei7 zaXk4rW?V6Pf8-w_9&dyX^Gx z?4;T5u=>Rbq+WE}t=HGxOGwzNccA7LW*GI0_pJHPt9G-~VV7;zI=?z=0WqN7u3vOp z%}#~2E*fXoC#{RO6$S*@#buYBwa!~zDAv8K5K~#rNUE{RH|)IGZk)nn{dMcC)qPKb zylHhWkk=bvR%ciBcDL2IKC8Fc)ph&oveRVHEaRlrX`Izt=gkunxC2Ml{H1x(Wu4Rd z*;!#LQT9|DUpGO>`s=eMa-vqAwA#%^7n_sZH9&J9-&uuqu9}S&-ZcN&gvQj{?<d(}y3=g`0_#ra zg>iP-A#GiEnib&CtrKG)0jlb}!28$NofavrbIrH0 z@`9QQ8oF%1N1~_$QhtTKJ8eRG8x=~Ls-p@zps_|*F9!TTfnBYQ?4tShto62e(P(1s zB~p3U>NLyHSFH{f0Fub|T^)E{QKJOoR;VXg>kX@){L(_PWnt+1PD_px zQd#3vs@pW4-2SK6|GswzmhQK}bDxsOiuFIvOVviX3XK$zBSg@*yOjujJz=j294*4iMq=oY-l?I8Z5F|yu^EI-XNCGH) zCSLX}ta?cKgCM%S{mk4}9B8K80L7{2!b%*dbQf^Q4Zxnr;c*%)XwxW`E#yI%qC)dc zrA)2CN6@rLgQQ=L&rd(;wJSaU+s*pPdDEN>pOnXn`Cr}JJM zUPrbwWYA|}6nq7(JJ0_(0$d$DYqXyAt>G<6^^d-9xr6cX#0mgCe~paRfYAFtevi06 zKUQ4+x>s+seip>xUXsUVfbk0=J4`rDMS_{oHa-9LnKTDM5l<4o{1_5t0MThnth&lD ze*BTOT+p-Qke6W?yW_n)>wxJj_rml@CR4{3o^=MJ3H_X z@t)`JICdW%aEoG)21|rYp$a|3@NCFuCw$7?A$JEh*CZ9pu-5+^nbiYxH^x?)=?E+; zuxtL^tR?x*6jQ4>8FqK#dZK|;eh+eLB*QnxW$U^!^B;`wle>Q_=l>qe|LQ@tc63lX z*gvcx{yW&KZRYZbqKaV_lsYd5e)=m|WY$Ey9G{n|mM{}1+#HtYXdE`C4t{1A`wf{;bj z=ciV{j@c-3X-|Wd9IMZrGOg%74aI^3)hrST+Vr zh@{WUwRXZC9`dj0o&v=zRSPNw)BkdOEJh9bjStb%G93xcd^QSHAlm-DC*>NiRrVh} z!pHJhVgG9fht;h8KRVc)|E=ZP*fJksR(NsSUAzRFC$}o?+znLXnF@C3doYw z7|e5%h#5-2AQ6#7vfU6Jx*^$-zz!ozNDuww6MZa$ktG<_LP0Kf)*Sc=f)pI%-eN=D z+W>=fA`ycGvPT0j32`Wl6ljO|@Ox`Ab$A82xGV~wvY^*%dv{&$%nrhy$+owiU8Q z(04^h1rMSZh#|HE@=OCBxP9v~dqTE7S?`}e$AY-2?^jLM5mk3yNg*H+A6|PNZURav zgkUf#Zh45zX=M>HJxZ+Q!X(|>8mUw^^mu?VQlM&#jiP~gXMSyL)T5R}C5v>S=oE)WIEA+y8BXsDCcP?h<$w3e*gI8YC@tj>Hs@n-x_ zp*9JHGMZaBbrO|*Z-bw?^6P&WH1)KbKwshi?^QSP-)p%R^gkf%`}vcAzaLiOzy0cA zw*L?IHu!HnS8Q~9)a5;4C=soZ2^aC##Sn`us*7>Y>S$kBT%Z3Q*}lX&V5DuA46w(B zU@`rP?F#ru^#No-31OovtPkY`aWKw`9D;$V7X)_Sp>SewNN3!&xl1%ZYcs@la7~0@BDlGRLan#(407A`>%)qe%iAHhFk>Si^(1^p6RmJB)>%Up|Z5~XabCIOLnv{fXkhP14*JP zh{!J{0tMg5Ws|^_5+JkEL+?SaPMAXLJ)K)mjU)|FjD%u(R!@!=$^OoB{}!txDNAn_ zUd^(4$wsF(bo;d1Ou94LP&j(NwYF@EyqDnq}~PG3q}?yfxADn`nF?-bCaEVcj(wwq-_&U zo8}_*1Ew<=Ym}F-Jf8BI0v>G-5Yu>g6jQGt<-;o)HfR;}a{iGxY~)@87Boe+zR@63NV~L_ zqy6}3yTe{!8bcH)CM{d_(PSS{ej2%22`e73-y832uJo11|2WdVyZ=|)-}rxPxfb;Q z8{BI3{3kj7T^;|mm-GK>)x(Ydw~i}y%4hk$H9xnesT01@g3~=Hte@`T$^=QK!`<6( z{31@5p3*UM`5mhxgOWG`)=mIz$liScUoV*hUqAnxoaBk>A8b6w8zE1)mDN9|4}P)S zpYjOrQP+a`-{vC+98v;bkKP4ZG5`0f)xFI8Kiq?2oB6+nO9DXbbW`hem`5e)yrxqt zajHGC<1}BCA=oFL?a^F5l@+9v`{%^!(jT%Cd~nbLGEG3MYWoI(=#F5I zro&GJqpZ}4A-%ie4tNmn2PH4mBZL(jTj1xfhx@`wVB7J5mwarwp}JL~ ztLM&q()S!Ar?Cat7{43j5L-47%~zYT5a`aZoZJ!vO%g9zO1VhA8Tsh=IzBp3JHj)? zpCI6Ydg;a_^Bwn4;tB}*Dg82Czp{&$X_t0caF-;hO9w3IfDDz(+zWdwn7eQr*#F`~ zK_jDdaioeJBF<$wZZyOS>1hm1BQpfCK})n6w*rQslv_(xeO!=*{auh8_kBU0IQ}7x zVlcF$?;oH}2*P&2x|+K`MV zTAIc&E8W0ctCiWymoHN`ClP^aT#mXh7m#O~l1x8N>=Ub1(@YZAI4P6J9x~t5M-<8( zqQs>|jmtI-FcW@i)M2uLL^3X7#6IZ_d(Qs*2FnNRv-F=?Yz;&W!ayM40|cNdaQCU$ zDBK96^H~*>sp`>>S%cL2VBJ8jf5&S9{*zX*?%eXeZ#@zMuEKw{y)6Da0vB+D|JHHo z_%Ge*8U)N`RG41xl+Qz0_4MqbdLjm55h6?zVI@H!ovP@jQ`<7YWHhRe#pq z4C~r&75vpB_;fwUDgE?Q89x8`qg+n$7U)g!Yz5*uUFI5AaaO|5IQ`1ECuZBb!`*cF z$|hqp7-gL;#J39D(gAE*Tl%V-;5h-be3adFY~o9c~O-&MQr-Gwr0{w0;H7}5t5NWxMa7_ z%s6b2f}HT@z$%5aM*LD3mVy7}BDN~R2(lNkj(6*`b=AQ@>jfQarC$MT3UM)czk z;;c0{W)_BC+(KA4_5!)l25PehV{PMR#(fM8gXPOpK{(@RV!1maAj3h@wC~i*ZTxA0 zKw}#UA_7yf7lkn}h$}_h-W-F{fEdN*^zM6=1PRIz?jCkJyYHcx{GJaYvFD%#YS6E; zisoFs73m>W_Xbet zR^;N(y`}4En3Afh=TvR{l@ug^-&-niWH|w)(oL2wZgMG_c%PzotctV%CS@vKM3X-E zWiEL3UuTb6R;4v(p^bpSmh#Wl!wHG z?MYLNVXUB|z-vJVQ&zSo$TFzf^pFuin2SuU7ut7J?@7^4w}1aRa$XYqiO1TmFu(GIYp$Ef&mdTGv$Nqj3;9`z%g zW9_XhtnSCooM`20buHCZ(Ku1A%cB&U^H82ls})KNg+uj?wTmT@leDo*jY+!2V&g7* zQl+g~@~B0dSOsO11qzGHg=}dJwJ2eETDX>$PL?2=xbcfvi)$pyOgb=AgB5gKBbO?T z*!PC*Pj)Tv{~CY@BLtB%-iG!*&J|o4|8bc8{g0!)-Oc+yYq@m)FVpFoE11top8yml zqO0etj6yogPXw@p^3m`<4n2u$mUFJ9uhmh{72M7S(IW69J(pM5zqs#JSZN#20)GBK zc@&V*Q%|df5&S9 z{yXO0etxTAN%`9o8SLi%eB1!Pwe2gZyx_$JJ`*|f7gyS@jq+1bigh;-Na7M z;PETuZhk%Rva9dSqLI^?c)&}ue5w7wW$0H+Fbx2$E z7ALjf9(PvgZqAO6#a-zh!%*h(1<2AwynqbrHRaxvWwjYH6jbN%1NXUrZVnTU8i!NW@8*i>O0|s#aqa z{k|9BS&*vnT09Cq>LKQp-mX)1vU~lqbb5z7PCaH@;y&3SvuJm$yk+DXGZJs6DNgc5 zHQA)Nlr^0?b5*8}4@?`@Oyo-mMrC<(UsfGk8o@(`9JwY0@|tB^^C*!*&ym z!hO09<|aa5JTM5L)t-2+6<0sUhb;YguBcME$SUlN*yd6M{OlxYcixT_i>T>UIz^zAjW3_X7J9ERA?;g_|Np&PrTBnB6X zlHc2eKk8b({-0}}{OqgW1Y5QLx4WN-|ETTm?{4Bh)^ZiB|LIPDBocV&Dz9cg{@fe> z;nDP7(yE0&BRWBVOd5fKOd7y~?IKi=DJJkiCK2pQ9wubUAYjNQErSmK+uob+w~Zuu z;`6tj0}&j9@Ao8| zapauD3XG3C zW7_^uZD5ANWXPX2#4v+YTR~uk0zD2aQcaG;KKWItVWj-biKVKgNG(x;0VFvQm%*wZ zNs`s_W4J-1P;a0x-RHuVktR8@db$)(7FglF^ftA5KGW08|2@Gwpqq1k^emJA`2HVG z`iH#@|F@1O!2iik|H7P~ynkHA^C@zX+f%=g&r`nyhj%~3;i(A3+o_*$jP%($^%)pC z{iBJMQ?VQ-PNxpCZ+5c2Dy)%ZGZWd&T2pDjra38BDUVq*Gi8`5Gs|{VLyfeacBoF} zuwdpFV7}DvYp`8r$&%sHRkm3zGcGY%&ID2Hl?n&ON)OAnIanH)DYqcG|9ZtS0~H_` z7$K`ifHCsY6T4&*Si>lpSrLmAC{xX1t2^u1qENp|hRFP;e~j$*i^W6^us`*MOU5Tu zvSNAciT|Y+5y z(z_gpPj@-+Mt3>z)u(Ionl%3@-Bx{)ZFE8SWUB6R%&g^Ij!h8W<;WJryBsqO-{mk7b$2;koade=BkXQ*o&E}mYlbYsX#<6Yutf{;D8E^nMd^yQ5)y}tB9ws~&! zH1GdUvq}Eh_y7KJ|9GSS@r9ni{tumAxBoXE;l&b~oaV(JFH`QG_yrE3_<{#e>Zlk8P2l(`3;bTrfU&uO`X#9? z)E;8%pa0G4{}14)E**bf{ICA;;l8{6pB!)Q|JU*a)_?AF!~BOs=sRkNpK=He<9mKk zolTFN5tJ|e>hNr^kC1Zc{hXX{Z3W0M&;V>=5EsChpr>Z4m-&<8bTLQB1rg8A(fnAG z_rQ!)SID2yVG7!gv>ci{PLU&A1MRSuOb^;c^@PY+&IhMwYDoNEdSl58za8QLO=f~D zo?B8hk2Ev#YV^o9b&ntp6)IX&=_(SROr({{kTV-<>k=}WVmC`eia&&OU@?TQEb#*s zZ)4C}#ackEMRMMigws#ICL9<$*$lw;#c8q6JO7*4f6$0GhxmpvLwsvpf*>M7Z)D|VWLsWXmpjM{jTFoBXGUM92+Ip)Sq{t$r)iPE&|vO4zQfwfP+;xx{TdQO zS(cO-Ql)H(p^QTkLt{`Yi6Mr8#E`~ITXt=O#L&!CN(`;6Sz_oA1TsO@rXU}*(vZYZ z5>X>Dl*vhA=;W_vij1{H``hgidGL|p^fJ;dqP3mflTTBL2J;Zs{+4si*Zy8s5z_v4 zGiL2?nXT0RrjDLU?e7(8$=ctwF}0rfmtMec`PA(HVB@?l{J(zh$UXlZ^-ebTe`|Tn z{ojB|Gq4CClv!W$G7o;dXRs!p#iJA?cpY>tDRzb-AgT?!(euiHwlg7#Nz9k!DBoKU z`yJ1u(^G3(r(#o(`-yL89o~s93RtNo2v0!EqF}L!E7wPk4nxUNM(iI*d7CwhW;69X zn-K=W`(!#z##Q+qor4gw65Dj40F&}04Fkvh0+EeJhd7I#J$tSsm>tupt>bqR+(Hat zcb$S0hG7T%+!9nD5-&@M!NcOr^GJ@4_9R*XN*cKh7qSfzZ!fnxO`Upl{{~#0Q=W); z@TEuU=J^~?9sVoA4cv|Ux268y`upzv&r$Di1 zat8jK|EYm4OWzU$=A4GFq$zN@i5%sBiiL;6lE(%53Rk8l7yO4^E5U+K>c&S}D5JSlK`6Ou_9~~VWwSze~r0TT; zr7*bPfU*3#fBxy!^ViS+(ZPykQ?D;~0ctSx#O|DN=HomX)PP$kEdhL}$~*41{RO zES;yBC<*0S=XqJCqjZ*5S0wkNtVRil?T8|_Jph-c(wk|=m*_oBv1Zsf%*qap$^o#Y ztlE#swBqDk0!S*gKMb=5uR+a`ow0$ak6e9B5pf@-q)>?u$rZF{H2g&R2jHCp1cxZG z1W{;QfF32^dhSl+c`824;_P%0p8}{RHey^x{}z9WALuJhd^ul?X6cw!e4gdGU@6&O z8lshsm?@uwFJ8vy}Bx^WZumeRrg%1&ebeyZJ#BxSq_~(oBUzB{ZKHNn8R;& zNfLY3XbBo3O0-)lF=QUaY*dQgoQbK)GFedzMh69BfrvO=;nZWEAKY3 z;|BB({Ncg;lt+u`d+8yX2Nj7zG19-&;6n3M5E-LqV$lbsXc=;DGAiOsG%={N&S}Wa z5;cmi^2I$lI=166xn0@zhZvRHB1b7A5e%<;Rol7dc5k_h^z^Ly2}Z&L!$EB~G6!r$ z9-f`}EgGn8o@*HIJTN=_H18{Mndo=t# z`iI$Pzt*S8|JPkffR^n42fYJN{&Upd@c(OhEbiF!=wQk}GE4rEb*r2h^j13{@Nrx{ z1o8pNMp*&>ZD*xQNXwV;%N9hrA8mJg+wJI|F~NgKht-s9NP%>@V_hyT+Jk7^9rL;3 zNrfj15~JYPM^W!`zXw2(6nQWZ0VmI-$fXp!vsk8OrtpkovDk0Fz3iQAKah3UzW4TR zd~f@q9c5%k7w;}hk zTO{YRc$~DbPJ3_O_IBT1_Mg2K9W4lZw|Cl!_V%KroC!{6caoOi*=$$*0M>LD?yHM@ zwp%2p$t8JU(POO>rFvup28(Lu?d=C{&|4$OAekEIp&Q#CZ(_J@dr?MKgpnU%YDJj( z5i%=6=0_;32!$V^vLaM|gilt4Pus4z61ww)=+0EMLjR!=DAkBQh&ek0&A#%@hj-Cq zF}8Z!>gOZz^S|Lpd;9iV9ABycr-AJ?!5!3Cn4SnY&fjj=4bYnkl5uSWUp{Z%J!IcL zrYyLFBoEa z&w#xb+YD$MflkyK48Z5ox8Jr`we>;;ddJd76s~-~;vbgE`L;z*KfcTjTO`xuOJwYP z3zR~B9O0TYoOj5U3^T5^BU_&9tHs*ll>bKsUq}=R`q+|CmAaV%%E3| z0JUMWIBCUUkhp?598zk=!;Y{5!W|Nz%QTM*P}5)*hg9hAK!W~-<;S4Gf#t4hCzpH- z*vLKo7_^TQ@LBc$EqF@0bnQia`n5{$zSJFxBoB3vveXF9=$)_ zQV_UI|Eu3Sboc**-bVhnmd84{rdjo;tAuXi)L4p8znz>4rWr&ztE%(z!QNhyb?4oS^h0`{OwzcU7pHshYwrbcC;9vQ zJSiCe-J>`C)IhAEk0p=CAm+Owp`KEb>OC~i##hnfs2BY+)pR<`^P+Vb?E#3=gJ{}J`&HW_yCn$vFCq$@iZf;{sGAR@H;PWWlmdxY;ef&S#bRe_ILQvitx2V|V>OK0Mmk|E}e+ zjK4tXqgDb4UrnJO{J!S~Y-wNbMzSZxY3at~uq9GMh}pplwNBJ}^teH8xZH_*2aG;u z$+&WSx_hqJTivcGXv?dHFN(xAP{K&ME(|1ux`q?u7nvUK3{@_!6bPc%iZhYCW`QQr z8f)1J2ZT!k!omVhMRtRjQ^k3YH<{rg97!j&aL1~VB^o3!kve%N)rJI}ZNHL!yn^)5 ztl&|7V`xDpP7SYjaeTX>qb=n$+x^ri<*bP{@V+BbiW^u{3tPC^Pz&X493-2;8muG$ z<(9T2o+7I)b@T66CZF;Z_M3MwAs`J0#1KIQZMP66#^hAC#s6AwFmm(`K0@5Kgph&6 zlCUU}l3ku;DrjJ@4bsQGh)FL{#diJRFy*rvFWc+#203jwl9)Ua5$E0N@>xg`kkG!u z;DC%4DPCDhi?K~PEw`na*`+(Nq>OgUg%~h9QuT|w7#gK~>HYHW?WyDcBv#N}D1k0J z{~hjo_8&(_8~;yhd93rF_&bg(o^#d#uueEwy28P-4|+wl*1%K(&Fw+p1leFmYg^IU zGSaK^p_ZT4#JMajajD3Ph=C&haZjf;^B=E$)@1*6cyx5&*?%2x+{G6SVDslF5DNta!aMp~k~%!QT!-A+_}(HJx{V2DVI8Q5<@m;2%@5 z(!r-6E1czu;rOOK-(kqcUN$SsnVWWFR4m28`|ILCaX|GbXl zL%GA&Y5=gg7*$0~E3LX?v+V4iGs9boOhB_$#3-i(mNm_1v-|=&YZhOW2+74If)y8W zoZMY(a0c;A!2VtBb}P+3B}JJ$Kwy77q(WZ~WxVh6`MD@soAV6?=|il;kVfT@la4^6 zL2N-SgaQ=5+ruq;?7*36fFsj5l8fwYogzFIPt(g;bc`QGckUYeKWd*7zmGY=yP4Mj z7~5Bik&}cCk7X+UX(!AUb3dU(_rhcbP8%g69UK^$*;h&eP9FCAm0zySOr{%dQh z`6~w6EvA+g@7M-e$*IEwCAuW*%~67wauGq=orxK92Ez#^(~!r`lM zzU6qRa3ncwVFZT`I3c|$t0xx0(s)!C#Lc?(fdS}GKaRNg5Zogp7 zyv~B0AlWjxj)>F7$Uu=S^9Nm~Js@O5MM!w#%40VZw8PhVw5Z|#U&i8)6wh84aaM}S zKPT;*8y*Oj>3a&sLd|FSYy!rCLoKv8FY-_61i>Ge zc5S-93#&q@2u2`*DQ6K;)d4{qEDXgK3H4y)+>Ap8Tc2Yj;Nx=fkx7W@IjSBw-cyWlED3uYl0n|;WA3L8Gq zoaLNRl8J)Hf_7+#hjeykK}-> zF6E`m`H3jH4Os!ZJTO?oDY1g5>fBI7hfza%F(UuZyX!QSaM-reV>;K8gv8(@k zbhy8<|5?l9`~NT`K!(GoxpeE)iO%zjq@e5kMa+^NR}O@C2ZkVsaxz3glwY$nh;{#? z7Geusq-E0f9FTxXkrQgFu176FStn}kwFAyaT94ecA~+%WlYArLiz}mIHwyavf&r$K z3In8=%GX@@`lL}Si=dupe@M8G4^?%C1Y9KHZKOXX*y~cl$QkAEouxs5K?6Gpg3b`u zxJ2R+jgpU3a<8HH@!_M#LtB1?I5v>-K{S-!T+qKQ7)dMX(a)*?h=zV)*yEUv#+w`0 zjP@Uev?K)uK$_Ud7c)dphi%&soAH*;83|GuoeBZoMOGsK4-nf~DtH6%n@bsqWVoA3 z-&3ce%4R{79hZi3hm6hM$N3C~3h(I&o!%|Z<(qH5iCz*45=jVnKI<5O@WdN#E9stg zqv7+c5*;>_Kv!*)m1yArixLx%{b>B;~Q=6u5^FujI(;_7?rl2bpU30p~;WV9A3)z`~PsRKV z5Dz`Vh*FG0#bEuwG!0GnNsqY6U&ix95J)SdBUGh94meez(#1$hn0#zEDJA8q%Skd# z3lPc5R1r42A?p1EJSoR5#grF~BP!M1au%s*4)PUt)iQT|v!4~H|Afw^d!)f&htzkGp#KMwHd*xvTi(ha6g z+MRO;P2x~ax)6*z zNF+h}2885s%kFJmSbhifGJ>B1PHo5Gy`3>0y*Vab6U=NOeVLAc_}Va%-TpdYsJIcZ&QP zwRW?%SgF%)($#?(>}_TYH4cUb)*%#wYs>|%G4k3*9A&OV+rfr~s}o(ez;M7*rrKht zP>Ff?18RvhBh?W_Z~C46chLce&jthh)#3l%`O*4Dw7}<{{2ch!+C6BS-xLfXMbf9h zc~>ucVph!!sr-CPw&EZ<&9}6G@-#O@_;U(4N0Fc)0v6JqUT|cnprsF-h6)&pO<@@A z!v}~yORDJd9+nif&ZE6(zYV*J2m)T_9w$>w@bVP+SI%y@rGvd;yDAo-?~nihA=jJc zy52JxBQplB1x~3j#%XYLw~*PdZ#~JZ?{Y)VIRV(YwMpX2ZL^Ft;GSzp!Wt-(Xn9c^4RFo{d)+ zJYu9E{Tg=~q4o;jYVb9NmEZ;!8D5R~W`Z}^z}3aIE*$Reo`dfq8uxp%vXd~e$`Ln6k2S3E8ruF! zBd@`D0jr}UT$&&=C}dN=THt{7GQvI3sjG?uDl|@QSIZk*w%U1fQ>Z9#N~>*)0927~ zl8)CPwr!GXrY5rMKYjeCPQqXwjG8zMuDc38Cb#Q54U-*O%f(^NWw6eB<=x+ zA`CU&>pLZk$h2gru3(%*U9kc@lNl1|5Wn%w1Wj#z!?KN*_gXnnG_~%kmP4p?8L%9Z zdYmEaA*kFntcPSLNedz?n*p3itwvj!bTPHBOSx#9J0ScgbTO^zzFv!q_3*l6i_v!r zvW>mY$d5p^L?!-6R9r?wW3017+pE>9C6?mrR!a=*JFV6W+6`)|1=C1Y>r3-sU-DUM z|8)l{089A)qoX4q|MT!*!~d`2sk^vuxp&zpgo*L0fGj{BJ#HHp{EpDeh`q$(;|?#v z>zo*UC1xAOJ-J@~qI3cm4AV5hd(rU4H;Q7lTN>9i1MaGeguKhZVpnUW;TT`2veN2G z-HczM<@8@cUJWlTyiosZ+q@-*9nVmSvsD1nQvAe=G`RT3D+?GZLv>#&TDT-zUd)V) z@RE$~ccLd5Ig*n3Lw2mxR+k;C6Qs(#P-fP26|Tp%r6~vEX3!(N$fZN8_^L!PxsoZv zeYhz}#!DvMq0+zmXZ%OPILfF7LJ7Y)xApfQPV* zc~ti53fsuhyTx`I4ZnvnQ{rYLGtWlIYB|?TsI)hbrps~ng2lTZH;4#bk*i9X8&usD zxnrASnMhlJ1s+sEFr{jBtvkw%u0apX*X!SonQQTC7i57Hp*u3S*+2g^S(90h)pvJg z+2agCz8-Ta4d0Mi1O?4}oZRom)~BW!>kNMM)NrhR+v)3l-!63e zD%u}lfhFBL^#c}G^i90>r6g3JKj`?%qN1TV$C9Wm{=oJx(q!S>mE{eG!^qVzMuJh= zfp6eLf#0+auyhEH;h>R9FI!t+fAC3}cjz;4@)2VgK6jwLl&URL8L|61dLJ{d&syxr zGV)L4G5ayT)aEV{^o#=9esXLL^k9V! zqHTIdcuvNwWC%{LGHfj@OJYl5u9Au+M#k()#(4riB4b=e^qnaXj10Y?=@-7WrieFb zWycl@W~LluXAQ%P^D%N5rI~I~Fif`6z*kMgl;m%=j%=)wzknZZ{z^Wex5VFpGLrZBiDI4 zs%%<*fc(~U2&5>^DUj0zkAX&lJ{8b-5LD6alVEKLa1=DtEG>f%gDOqXGmSV)0zF62?rR=wSDGPH7*M?*V* zJ{!7(!SPSGFq{SLWOO`KwA7prRStAObPLhCA*xq2kKlDkEceOx^=HJI^`A$@-2gx> zS^tlFCw&k9f204op2uAOxzmkPpAP8&c2AYh@wLk7{AVWqiIqOBgoZLGB{u>@Ce5m$ z_68zz7OxUk$;4>t0K^X5e}QHy;D0|tSM!hYj|=nkJ4RnZO;T=MOOI5r%VJB!gJYFf zh~U88Cf1I-3z0ifnL?HfN>oTC%i=UC2`H!D}ZL}A?K%lGjMsc~y?njkOey5tj)pk6tpl(w*GJ@J~N|;Gv0*>=Z5*2ZF zn)swcN|Sc{?GX>5KJ^$HAYbN*--8vfVt6!}kVld(5A%p<`{A`o){n3K%%pCr%*fJA zrA$yI)yRfaRQ-BsEP6&157y0#<&mUgNTM8Ad)4logV*wPIV*)K#iM#Er79;7NqR>M!XQIsi)A-S5=)1c3C5)h@IjFDw1Ya z`~l!sl&9@iS67Sh6{{N_)^bP(RoDZg&(vY}##>m8U9GE43hbd;wEZRc`dNJGgUjZ* zlc&l5*Ig+9FFF7B4))#q-@~Kc=KQ~w$MygI3O(w0%DVMxx_dj(FXb74Ac`{vz|%co zgtwhNQ-Kt`3S9{vs8^?u%T901va>p`gPH4fFoUC3wtpK%$bZnM?*0dN-u2+W_xs20 z{67*eoB6+v$GZQ)PB+AVk{?HY#p200B%NNV->wXA35dA1XZw)l7g{I0tQw6s8N2Qe%ju>`&Ht`V;oUHC|%&Q}`ciqs%J($}!^Cg<3o zxheBrg2_nHOtnBuQNu2-Nh45~ry}v?ZB3c47IVKsp+X^INEuX@_&l4rW22_ffR?H( zx#N^;>8DCsnv_#DJr3}o{ID8{VJ*{!_E|x{RFf7!>r?c&GshpWFn?-BtN0Wis*q8` zIK?cN3{nwi*$9PFnuaGAqhf$IL8u&iR6?#Jx0Jf^$VEKQ3TwFul@b_+*xnth;I>h=5gfe`)my=>7&25tQcdcj~fxHO*g+@4_p7 zCk53SVNuPJRp|$v&B5-{iAB-k){$@E=%1SpsW<;rzzqNrn#!;oD$AlIXrCx&B5GstgmwHMG@HX!V%vuJN!0ZQwU2G% z>-LIGpTq`AUCe8!D1TRWb9SfCLtYlG^wih-vkSp1h16^)YQGA*kF?OvulDD+J*G4t9dC#fB$h0{WSxhi2) zeK+%lBTvL~r7f{*pTU{w|CO$O;8m;PKoda-I}kylr15b9GArCBS2$0wdZVGWCsI>l z4Vihj`+u$c7uBTKV*k-QIPm;`A02G$|JL%@@?TRoo);Ujmdw(az50Tlyz=L)5sRmb z8O$E??2(lFZ11=%w@g*AY{wn@H1~51A45X=^d~BzO29%2qMi~^6y=BHlh5j$y zw!ep`X8u3@ct^S)@GP1C$NLBF{m69D6K5BEzC@V%K-;E_{A@?-Ez#ceug%@lzssK zZWCdKaP^mQL<7&0s!$OSfDvP=D6fk+D^uxTAf3vX?5G`M|8)q|Wc=x`rkjDCotJr) zJlNTx<{>MQp@Pu|FU#ObKC4o(hFzfv8hfOdvR;(`rpy~st4_$nHMw1e<|W2*_%ePu zlonX#P6q=X zXhWQqq7jpl&gtE5X@6mlVjH&m~QZYGX=mP6s2qMX;x6Tc4VA1Q^P@FIz za~s@T+RXbTQe*=0Xwkqxx6N#OCo4eFL>=}9#t9|)XxR3N@2**&plP5Z0q&`lm`%<^ zLesj96EV!Il|UOgi8PLV6DQMvuU1NF9469P)@Ync!nj&#G`ze><+w56bQ&(!N-6Eb zL^{hfOr#=19Au2l6w*_HNMX4ss869F>`gob&vO9Nsc~n;PGaUF7R)A$f4N&I!-%#l zDP#)4g*7Aj8-=R~kfCQ(=2YF$3$#X+!G9WK+Z^UzYKbEJK)Dg;e`-@0P{PU%4-)9N z+N264n9UQ>rj7rtO=Lh3*LXNk%Z!&+_qVkSw7mJ>4*NT>ayR$Je$(a{kg0|CN5!c` zQ=_x{;T3C2#7w<~RRet(GojBFFtZ)tm?uWaC}T>c01-5#8;aw^YLy%&&=A%_d0qby z7H6OwL+GnhmZY3HQ9~Xf)FfSQROuN~s3Ts78tHoB+i%zHeSuBvYFFvs zhrfisx2Nv>zZlQbiI_=f2zE@R-$;p9>|8;!Q+noQ`@>u78v(rscfD_&ko9k(j zB=nko+F~pi77LJ6;B-vCXXCm2t_u1-pU)Q+f-9*k&F{fL>GHEqWWYvzy0aj~9PZu#X|;X@Q_4B1e_=C$@FXAlb|5K{6iYL=FKi*(Kxp+ix>Xr$yC zx~l3TYDhC;_&{c}8@4pUc48`mkiooGR7Qw)M;K!X& zbU!M%;h>{+W=V?D0!&VxqTx<}t58Ibcv&!7>eXVzciR$6E%DmUXtF>X>6+@cdNhzF zwXkC>=aewCM~Pr{(|myvAUX5_8HXKQgA>MVEICqixX7?OFuu6?=7LDNG>k!<1KCUf zuSZMOU>r?>3^7p0XL&Bt!v&}Kg#;w>Mv;X^Vk&i`SLr;R#l`HZ10}M%$t(x4kvLtA z>9#TlK}Jm8*;t;GPg&%`HJ2567>QHodkh&D6-@ZWRz7&EyILtzmQD+6~7m)w2_g7UX<2 z9*Hm5v2+pJVZ>CPnPs%C28m4_&nv0@0J94{-HJ{oK0qu9!=bTFs2mxoHB%d$Sl9j& zdf4CrR6(-JSl@CEP(Nr2)W?$IziGKHN-Wa7d^q7heIf0<*dp;51JPsYR@hOQz1AM-kk-Hf=m@} zDhq&4O_1vfP)1{}1YgxsjO6m=xWn7@>KhN<%;c>H0nv%fUAX@U4i5nz0Mqnx7M)-U zxKuDDJZ6dVhQMt#mkxk5s{SO8Ly#+l;Hd~Gz@IPjNnJt4&4N{ybIHKPfnchy3d$C9 zT@XpFdflEUl~oklGUreK)ig3uVQr__p0Q)vfh z$VJZgW!)`tzzhp+-I3#h$d4zaQn`g5+(vlsC{F=6aM0@-L>mEv$?Kna65A06O--QG z-t;IGN8cM!2U@-o2`NtRH^gj`GVyV8z9B;nQ7V|b*Lz^mZtBc{CGBRDca zBfqyaX0U)pl^GOJ!5Xi#07+Yu?hLXrv66>85v*gW6lYK@1relRW<;^IIA&nTQhaS1 zKkZnj#a~~IHq9f_ril{R8)18gWJA2c9GZG9A|ByBYZx1xeKsSf=B(zav_D_zNx@YK zkvR=q-@iY;j-LJnuy?K8m;r>GOt#O;MPB8HbI_a}Wa991RzAiBe>f|x5CT~_8X;Cz z#@&55E2)nH8CeP`RyIm+Ae@O*Gl5JjJrkJ;F{4laVkeWjCXmchHfc_#E((7tLKwi! ziPasw?OnsuSJBmfds#+T2p%fKdQr=Ap6TP-7R@V)>Z#wN#l`Qw`uzj`-tBvgwLOftBCe#|&CGcc_$6=3{U3UE`2|YdsB0ER zr!@1WsSrbx>YC*!glXr!sWd~A>YAl7%9!`wRGQNFx@K97S#G^Gm9tzAM|4Gl4i28= z%z0$On8OV6Ak5|P7UNg^A19&?6oXv+-w6~W`v)(kNc*0^&#ly$%9C-d~H##O-m3pF>$tNYBYf-w_*kkFe^0cPhS&qHiRSV7 zIosRK@g|DcOya{cht=+GsZ*T`vk1+Gm@l+!jc~nc`Fvp8$YRRMa&kpLXHW3=N*yQQ zatp6j(kz`X=8o7VdXAFVD-;OBy-`YDR8Wnuqdk-H&9^eTEaA#Su)y6dk?+#(k1>|y z5$gB@>gvla%_q=1jpQq@Lhp1stJC+8{cT0sSKikZHgW zjq+LAHq<~Ac_z-yXDJ#~zz`6xEoeCizHEoqqFAp)OPV5Tp@DUoeoES1eXn}WwvmOH zt<&Siz=g(XF;Y`@^Z|a;OcTs+H+8rbMe;kbX4D2}L9CMBD`f>^cr#vG-IiEX0T5sC!VivlQM_FB?&e zZs9;P}gd(J)ouN!Vg4!%Kl~O)YI5^{^DCZyvZ^t$3Bc+Ml;PzdCoNOHQ?G zP%^g!yVuq<57*vupreZq=$qT6zps&A_&##HI^A0D9J<$0JlBO>NAujR604u%1~10< zESXlIm?!pq9O3XAzu_Ra6R)5gehbZYC4atWDX?3{E_G_`Sc`#~1r#kv(u}(MZ4~O$ zivIELxTOx?cIPZ!++=>c=%r;!RG#N9napLt$D5c z-SN+1!Qq=;I>gRvx#UTJ%r=bDH$@1Ax=u`)p;vX{@$HoHkV( zbzUR{ukz@RRQhE!G-@-Xz^f+xzdPIhZ?OL#oSba%pV#s<^#5IL|I7CO{o|9qhyQ%M z+5gw_II58KzF(v|D6b-X^LH;07SNGo_Caz9KY=DB%O^TYlIH6$T8>{_YJ5gO(zH}r z>{MmqpegIE235A~k9DO5BzI;3A)bSBuVpz_%UZ%5n@!R&OR)BLlo-|d%TNUp1~P1o z_<~!a8wYp7aN2@+0(=Rk#XY~S-1nUHeOsz^1dDjzR?!ExW}0rB00*>6`!umXnuujA z(3Vp93O1cq{-PVux;BCbl?xyez67fpW|fr8-mKmVjFPV93d~Y?WQ2$`8RH;n4Q#O{ zNk{6NyfG0YC7(>hs(hM~+Iqa&Pes@_$oE<$bv1SkD(G=*YFTn}CC1IGIx%r=e!03c z|J(KZKT>xmMj*@1|HsGf`TywTaO3}VEl>WCXO5H6yzc}&Z#3jQ&r32*&KH%K(MhQch_z;rR*A7RA%1*41nqjM1!Jgv z8G34Xft-xsb+>!P$LHrsHn9{uIY?`5svtoD zB8obw^j1+Ya=cOyV;<|c{o`xpu>q^N{eup`8g4)Ri-e*7IRV>Tw467G9rjHpn$A+v zXfyj3!3NYA0sE5~YqGH30VL4ib2N)gtzT;!;LlQ}m48a5S^g=tVDE1J*R21Dc5qh~ zfJ@f@!`{Ao|97+ppGMq-A6 zCEYkZzXK>J%W79{duE^n#zIsqtOep>As7}SVJ$P!BfTKRkh**LC`K@`V57)%j!+Rm zS=NA&YAbIvdb50CU`7FiO-ykR6e8kfm~kacKq|yw%J>^Bv<%lEaefkF%%d~(`{E!5 z;nG(o4;i5c_C!W(80UppXwOA}LLEWI-RVU(kTb^ww?cw=Q-59*$tyz-67o#Oka~92 zjs7*i0E!t!!D>Sw*M(q-B{5EUB5JufPYNzq0&tW`GB2Z9`T@+h#px=~P7{FH{J2O9 z1C6uArgUD-u;F#aXnbc=)HWb*(n?`?fW4nAPg?4MN*pO_BW+MU6Y0e(ofU@t#?@Ao zg0OiiIJM+5M&)_Xt2gp&hOy~J|B0{^0Q=)aF{+F(D`_UdSXlMXL@|kVzbVZzz*vg4 z7K5boDqleAh0;kC6L|=6F(YUkW!8vau>G}tq(i0+`J-^vpkcF|%}EPRj(rCtU8$A(Ok;p>^G71aoXXaua&f;^@3q@5>84D4q zM3Yilm&#J(sxt(T_V_bPM};^Zpqjjhp2U-m1dXCtE6;EPQx}nHfWC`#Hsi4cLyOj1 z0tn+iGE9^GK`PZXTMl4E$J*dPWb$Z0yR~)oNozBRz?P;TJXYzyG)J;|*7?-%|F-{) zTdDvq;s1LF$FBdUNG#OUG}N=d=#SF4@BICdY|!OpFXHhqdve&kM;V9$G5eQ!W#?{X{)f>bcUj8%qH0;~swDDu>vgTXcdWecy#MRue?U`Sti}K1 z(Lvw4|2gg*Zsz|w9!vg5oo-41RS2ML5w=$A(H|MAON2ZF!Fa;R?~lIW51+s(wS4&* z&`V`X{ryL>{t=-vyXj1o@Gp;Ry?sAZvjyiD>5=4REtnMm{iZ7dO+683Fsd=1>|Fyo z8)OR4{%q==i&I{#aaV^T+&cbafOR|vX_YzsOF(S`D|a|bijOLsGf>8NBS1w4Y+YsZ z2{%VTm?ih}8-% zcjT*rxcVNu5~X__yETm-J$e+K!UaG4rl0V3DsiK@#|$e*`0OAFm;eI0Aq~g`NCS`v ze|)^9JB&v+4hHH#JgIG@F1m@9yrh5iF!og&fLha{op z1c1I}-V&&H3UK2jt}Jpu8DXL4mb(2-GZ>0_@l*`IevijK&=yUUtAXO5v2vJ7SX%%TnlQ!5GcyZI+$#tb`d90^VzmDNboB`vLiIqgd zDkt%zyQQ|kCDT~0NxVWC=`L-mFWbhlrDNgZrS>rfnnszliW6MErZoSFP}r1LTW&^j zol(iANGQ`tON~P)MiX?ZHTKCl$XOM>MP@g(-h~K5M#akV(DsDNR`D$TtJt81P%C@? z^~d5Y&rY9a`Qr4)rBC8iI5kHWMugG-FON5p zoohcU$$xKY3WDdpYuSGs_Ye9T`;T=ztM&iwOhJ~M|BsIko%8>`c-fr)*YcR>f9KNY z>#_uG@(w?pc2K=Lh0E8fa?M7a=$_rkvu9*7qC;N4c0-<1_MV}sp{1VvQvVgMJ?g){ z%q!!guRw2CAZ$|NRIvRd`7`J@`aH-&ZDpdkV^Lqh~?iKV8tc3n-|0mCfgk9Fd4%L)UR$AI*}nzzlJ*X+J?_6sbl=C z!r1`Y?fYvNMCqJTD9|l}$H%pcpma_V6zCQ~W60_)KR=8Vx9#7frDH2>>vWAtzk&r6 zjU_W;{U8d&su?{uG(C65kT+Itn5{Wtyuoq9Jk25Ejs`1-x(s+stZuqBod&G7>>dvq za6Nmry6u)TfTS6hkri6m8$)De@~i4*XXL+rZIKUScV+T<|GHTncwFDQB0l)R zbG@zB03gUYB|#9bV=I{bam#Y zt~}STW|rS@0)mp!omA4y@KRRJS~%J4^YfkmR@48xof<&%`Md*g%8i4nqLkFJ@9gWoCdyD*KWxs5ZXZVs{#6+We{cpden|^T3zNP(b+1VSex299$ zM^za=xIoKISN4nX6ACwe3KE(fc-_DYSFMi0ow>*AFzVu~YkA75U%`B*S<3Qf#O`Qk zaaE;o#}8LU(!hh7d(6uxYB~tMtz3-H*e=5N<9?RUCbk~Hld{amsq}{~@Ah^<>*3d5 zqt*!ij)6C9VSqR`K)}$7`0ck4QT&w=LEp5E0j3fGTd9P~83DjKyu@D4^hvxJ{<=nG zUYt8#%9&tU_#lv#s)FhK>-i)RT?p=Dz0BfZF0q@O^eEiK(`hhuRoTgP7n(VUxSErL z0tHt(w7*k*bS$bC*4~af3P&Be8S3wLXIkmptj6u3r6)o=dNQ#pgI>&#I-O9Q5~Qj~ ztsqb|A&dK}O1`Cow+X4RCAK% zjMbhp$nFUliv1>k$TpY1u_IX-r-n+6x84e3Uw4B1fWp8gxa&Yqh0w5R7EGHkutQMe zZ8c}AnzsdPfa{mVM-Z*hDp1@355`cJXj4xpUEND7okV9kx|76BBMOV>v6(u@_S%n7rFI$DP-&mgjWWmn~=E zwQ7bp?$qgmv;6({dObTDYL=>|RRm}L)%ADnLBXromlpUsPm}#0c)VK!{^Rk%(UEKa z2kN35`TrUo&;Acg*noKh4SFe9XNOcVsOysepAGpi#IgbW#`y3OUok%g=_S~$s>o;E z@8c>t&5NsmJl>5WHbNGCDq}ye5q$IrFs>dye(Zoo%VT5b1_jX1K4uB8rZRu(m{LV!WlRNs z#%=YAfn!PnRxMJnHBxws(L*@h@Aln zXj(qcURC+{g9zeCWoXm^-qy!OD(Z|9r+mrSVFFrp<+r@zW#R31sQ8}>>84!twMdu| zu%0G~*J~L1%^k-Vg4BDf)dJ0TYbpZYR%%MVBk9TURYrewhk)*M8erOIZ}Yss5y3W% zJViR6abl1W8&cTN)I2eK17N*Yn3H5=N55oqUvHY!wNYT2{Bh|tk-ZdsWg-ehHOE~!UTLkjk$wkhG8+nCC-+ZqsJRDL7P+KDq0qazzye2ZE^htnZS z$c?=&8j8qOI!`Px(B~o#=raw04EFB~VIB244&y_ZlT_rg-JFSRy^rok{WjPM_gv5G z9K~`}OmiZ}Iq^KJjGI_h>p^syZ|QU6X)fvzkH>hH6O$Hz^m?%EsvyNhVp6a|-UX82 zCT={o^e*x&Y$I^GMGPPT^iK^e3o<8(xz*7k#r_M3CzZD& zqZrCY_Jw_zm#p@d7U1<7#*!$vN|R-Sle7OHMnf0f=x<_;5dkmJkvH5*M882h*90q! zYdc5nvH_d!efLuLZk`1g&5^@sdVB&iogy*ZrLrdlP!ycP73|g5cJ}@no|^N&i4Xa? z{XZV|k39UpqYeJodY%SkZ>jy~0e&N%Hok!~Z1I^w0b)S@xHYP`1Ge4T3m(_khMgS% zkCqVHS)T18iZ$d}Iyl$rE9JEc&~;ZeIOhl}B(Ryd0prn?7gDh{idBB6{E%GmE%?rj z@On@vF(FiK1)@zg_2GP8n$>*(OLqcJ-63e`#R=C?q{1O2nqY_3wRR#IGJw7qLd#0U zF4zK8BM)PfWqXZmYbi=Wu9fmYQiw5`0>}rW{XP`MPejX3fI-pF!1I{F$j4_nfGUA= z2+;;&&z{T@Lh%Lm-eG(g8VdT6V2&NMZ$LTr!Me3Ff@orfH%Tjknj)}XNXTHtsL&95 zV8&7nfqh{Le#OS}Rjds)hM|0h#UTdAgi~}B5;7o}^JHetFiJjx9vXHj8Ny5RNy{n~j0udT%D|BxCF7VOydAh?1<};#yf4UfNq8{!E?YCBiT;}Aq9JTTmj=o%3=Oq6k%n|!S?zC z0=s^<7wtut8}|Dve3s$=e*XKve((6i^Z#>lxWWHk%TvRXOWxlILUx%h`R>ytAM&-x z*e^&0OR&y-<(C-dJu(l&97p72{1VBHUm~&b3(EM1vD}d@Rqv#Skw;OKD>-)Mkiz7) zm7xxZz;ALWu(^JNmj%>6fYJlTx=>UYW*D}iPc&@B(S4B8+>ge%z1#;$&V4ZG&}j^i zP-OG(^SR(Y4gkl##USPZ7`xnO)1CW2Aa$pfR5<(S6Ab(^ zEpA@e-2r@1k&YG>$l^YQn~u42jwq>OI_@h&nH_3Qe6whiT%41E>;bA4C!>BaG7!y? zW$9Tuiy43wn_kzCdqSV|DA~@=%e+b+?Cj7OCN6-FK+m&k$io*{K{RGq02d@iSY-JC zGMwO4hNqlV4B=vYb`?!X*bd{AY^YLkibL-+h2VzZa6INq9MQB>xVPy@w^NV?z45{k zk;xGh)8@JrT+;;eL)j{X;k)Bpf6fPH1{o#hO=)-!?(q&Tia-U z#j&$dssfIkjh`T7Ve`rxc(<{wadNw+RgE5&x3weD1NXK$>7jjXgB@dBUo&mpy1-^E zPAi8ESaf4rh9|mZtYBSDBhaWoz#_#a?QU#Q!sDmT3Pmk-j{S*~%MC0~%*Jyq1;{`E z_BS#RShuD)YeCnq;Q@{QyVJvVhEPr8?0n(Iso%IhcjLGdi8F?M^0rOEb*tZOt-96h zxp!DqZ+YOHaa+Hm$~N!@>#XhF$bV|&f55`tiSI9XmfZjMdIzrmufwDL4gSws9?Mr1 zoH}q=$}7{2$o}vB@#jASJl~D$FMO8F|D*k$H~&xi8~OiQ9ujtOpG$w%bSHoU=OWpq zB?2@hly~MSphZb19TdGZu1mEN;qJN2nH(*%u0<>9o^~a;yvTn^#_W0< z#pv=F?P#sC_!)edEKWoB~4O8szo`4|-c9_`Ry!>rJp4>B#hcR+`G(ot%X5ESEOP*$s;Q-AQ2W+)CcudBN1HTk)zd-rQ-yP*I&r3p!D96Vi)EqBVbG)GT zcmcKblABZ0RFafowPu&svb(hBvT8>w)J;K{geofj?108yMQ$O^?F7!eS1o7<+i5Hn zmX@#a%IdCmqNg8aRM?7=j|eacudTfScC%|;>Zr4FS~Cz-u194gbca?T9@p zlaCA52^(u4;-mqF#sJXo!1?4;$2J>naf?w}fp7|y$*%6ohAPwqs1-+z3i*a^423Av!b8jmvZN=1dA!e4u1! z-)uw#pS!|7~()4lCEFI^20YgV&1jB z!P3Nd2cw95QAuCcoGLC|U)eH)fN$BT&p~#%jC3s+f-j-=# zJtt+vc{GwK36^>S%5&fyco!8FaRh>K{5`pn{5dJ$5Ef~7;jy$|PsrnQ#?vM(5v07s z>PD!yDL=;A9Wpq9W*Z+wQ_^0@*(WsaPiDoeO3h86;W;VmsW(nLA3 zMJ|g`SL~I0y2g9{was2ymW!m^+dtUfvK)E)YsI{7wE#EXdZn1+N-+(s8TW9}_|gpV zKiE@${*!X&+aCd!od5a<-u>_KQEzkpTgy{_wI`2&)@`4>f>_~(ukMI;^ViYTopS|KH@uD!QfK*cr*iEv^W>5>b#V)S=cA$9eqNHOY&m`<|%K6CLJa4X{8a+ zrd3*Olj#Sssgzb+X@ON%Y`CAPwvwn1H#t|-A1h2!@i1YZC5)qiq66U8fD=5&Hx(=b z$kvX4?2gm8tWu0MAV9eZ~P|%n9i%qVL1t zG~(q1_8J@0;u}Bp>p${2Yr+3LJ~(#Q|K9%bX8m8sQzMp>i@z4r8Q{Obzw5APfZi%U z7~!uFwv|4}1i+B)tG5I7cBtNt8ia^J^WQs@jyX4`4fpciJ^XUZUOs^kjp|56_>*)T zB7v3x8z&JXrKpN>v++%(5iX0?w1CVyBFVxM}06|W)txwJ$BrcYvMPDlw( z)&Ws@cc8LSmA2FIPJ^CwGXsvP2&BP z5-X~2R>;dxkKTeSi?nlLJhcw{dO9id z zK?;fW>&T@>iMuIB`e{>RTHsO_eovPvxb7xmI*zL;Sa8#V+=)IU$vNIxfcU7gkj`*K z!oDkkf)PR8M$DXAYoNR2#b!cU@W=+hT=bmY~63N*Q&$FN@-R?A9?C) zBa@X83{b4jkFX^g2&=p&T6>`fz#5Oo$vK}q3qrNO!A7~tE(eCvt$%}+2>@vEWxox$ zxcGYDMtzxAXQ=IC#N;rHs@6Vt55*nSY5NuS(5CIPzJ1S%Bw0@jBg^SkbJ9>z_%jLD zmj0P=rp$G?)us)ozVcH5l2}{a4+C{imen5qB#li#|0G<*RAn7(3;a82Xc5>Q*VZCO z;TDNbS7;OXbGk~y&_AbK;M6Fnp@r~ovIen6{rMkmEAGY!sQLWg@AXey{NJPF-p2pW zTAr|ajXeG<3y_g~K?M~RWV!wUMV2iM`^Eb37&0;5LHmWGk(l?t&yuv9rJVbKB?b0J zs~pgqAz}3N#b;1C`r1{CLUM>2K?dl2ucqn!S#*g!DF|JF8tO#JWhKrdBnWxK=a_e` zZ?G-eW<*@f*|WwrjJ$2FeIyQ0Fes%6g-1?lp8QZfhm?fI&^e{Tog%Ijb`idRhr5I< zF%!csG)X{6fvm1bkO{iY7?uSZR8-J2)CYId<4|%5hcpnu@NM`cAA;U{%nD1ZUQBDX zph+HG!AA>6)G|mU>MA4GOX6M1|l4^SQ2}QLeACz_lIt zwNVNSX8HlSO2h3>dLinYTOZU}5Mjsz@FNO1ND0)HZ;XfnIS^a(=(4R`_2BA(?X*XW zh?<}`fB zX_j<&_$C*W(wzTD%DvL2mI%Z9(O4ua!PFKA2Q8Lv^b{!KQ6N?Xsq)4?cV=P`%wYL} zbU+r-|Y{%Ojv3G30qqz^w^ORcWd4_5GVgTqh^v(7d z;!o=1TXIU$VF_jW=y4QC+cp=xU9p<7QqKhfVM*DhsDX+i0#S_&1ShZ_$2!`Zs3mF# zB?+Qozm~ry^lM~&8>w&6JC!@@)1qJADf$G;jqJB$Y>aqYNmdF*+l&`hqakgzpbrlRa<{=>uWg!;gp)6;h zmQ6dc>J*vU$1rrWU}58I7N3_SZ8UC3NQYbLN_xkAT+&1vcDLZ@VU4P}xgpG9Eso`G zK#@UBa*hjt(tJ_k_lp>y@%V&diu7w-IW1E)G`MRPL7ydH$qTSU=5kS;rPwqGM1$qn{ zz+o5M)<(VV5#1FL+tYTq{ucNjNTTh36+@xiuKNmzQtB%%-_sXO6T?y}j_rRiEsD}B zJdfZjZkh0Q4`QX$h~CKogWyWUES_$ZcD{nAS^tNH8@Ch#F46xvJUVgxf1m94Huyhl zc^XAwrWTNClLt$b(%5;@H%tMQec(z0xFz~MHxrkAZh4nu(!QGRPrwFlW#JdgY)}G5 zMq&C`*s+;F<#LjoMH-$T(L2smq0^(9iu*aX1U&zVJg85YrM(o7(iuue>h+0u7xR>g zbW~!n^+(XfmA5!7C7TWno!zh{)#`L9ep(jUkx&8q;Ki(+sy`uC%UPmqgA`CvM?USIM67rs|W}_dhA& z+xM1s3!RKf#bswj6&I1ozvlB?YHzDjr<4c%xCCzq(%$S+r+l7ONkL9mMZ$%V;tKsI zP?Go2AH{FVZ()nYlpc%}_iJq!Ol$d8ilshU{G1FzS5b7;2E)!OuvfeyX+E?C!QqF*ABVsuS$+xNf(#+pK zE!Y3Q3k={T{D1GL=kotYhX)(||Ft|8{;#1EInc~TYg)FWzimbGo>YIhmae@+g22b=l7j;FzfQ=Z28TEww96kBGQ&eNG_JiIY{ zqYX0!De9Bprv0WYM(g=2Eq|20|2Gu_iwFP!000001MEF(bK5wQ^I5+F=b7C|8JV^$JK4&_*^+HJ(Vbr>$&*ca zeGw#q5@S9z1Z7)yH&ypH?(f|%x$Xu)f|TS}9M8^zB`VD##3x7HktFN4O zr?&RpH@Vi~dcCzp-*6Sbt^3V-Z3FIWwR-LKcWmq1pp(C*GT||F|Fv+6gZrIalid6t zw04_Eo#rF=f%JH_xw&lqH(qVMF3kT%z5ePu_9R67gU|nNG?~TTXq>Q(_1a(9$ut(7 zLF`SEQmHM5o=jq|KTW(SWIPzcYB8F@dN0;6ZZjv!i;6nfq-;b`maKr{tIP|cQWY{PW$(vHC z=CC|Ul0~k;We^Ra9!w^SMZy6`1jzf*B|b7k45P#wh$=kuWXT6e2*Bh z0uei<(#AXj(2+#}g8&R|I0d9`z@daVuzTXb^tgu6U>b-p;bvmiqPP^nlb8iO5wXX8 znbDUfg_>EY!*J>jI}#qX!lxk*1mc4C8M@=hA3}vN$|@1ry+k6`3SJb;Qov`dFK~qb z{t*-55K09u20$c;62TNzpdA48E}@zW4`?o665ZqqXId|ulAMSEt{iCO;d+X3&4g<8 zNU8AeerR=A=Xk&SvC(caxIbwh|J>SZ?y;RutoxzKc8^a!wOj8$blHdFgS}?E!x~3> zPs4m)nM*5S!P3xEOicH^kqYIdrub+mhM zy4N~-Uu6J*9UXVsLF=&9g=*d7Ds`%xm9l2+c%L0M+q)m&XJe;z(CU67MD|KH%(sX9O5{myBpMMP^Ib(`(>=}EVBd{lvve+04sjtywKN7Ow&qM-tX zj@zFQDAIt4Uu7RZG@-nW1SLu}kOUo|*lyRZ2K@m6yVe-lQS<#l>wWWRw~0@W5z5C_ zr&)oyYIU#zfW&S;HlWW_8ZyoiV5M$boswCxRhmiG+GmZupIeBwt_*YEY3X%BB-{O< z$?cRLo&V?Mf4kY(J8U|^@KO7?MgG_7>#vIPf9v(L{C|?`do}@)FG=LD>cvnho&C$% z0SFQjX1?H3oR`N*GI_JM<^>~1jy>TIrQ=0wBjDH5{+h0}mcd$^2kXeoQAGig07*4< z22pTxGXq>!5Uri5=MNdoSu&LmfOQqc7cR(ExwhL{>+|7=u=;C1=HXzx9dHTK^C39$ z0ZH$_{xTKuY}@qmr+QXXo~ zd<}>S2vHE~nbOD1RFz8Kf6rPW(AoD%%P5t4y&i~VK8ipKp>k81o?!hwTP-P-4nf@e zMP&tq{n=#N2gPR$8UPgJDDkjSd;_-lhq?0nzaL$Jru#)k;bZo33;(yC8UA0d*EgT} z|5IH5Rbp%fwfdDeY-N6LtyJ;(I0^*pe5!L_)~Fy1bt$j_1XAESCw5gI%%f0JUpt+> z)%xnL&%vrt55cyIqz4Aa44xzIP?pRlR2wj&N>DV7{fFS{AP9i)EA{1#!0JKBXA0=c zmut>T(*y04&j4`}g}~gTDG$L8{TvYIOUuBZnM_mMP^{*l(0(%|n$M{+C^|3w`c1i8 ze*fFOKVBcV?f=c!bNl~QZR2_WKgC7+AB^lvQ1@V|C-GF!;#V6(X1i18XvDH=hkNZ7}SADM!KhQ-n z$ZrT1~@!F|vIqT`(vNpWrRAJrui?fkE_a|6d z!_Bb1nzmB|r22JuvOzd}<9+#5S7HBy^7ZKA|JMCq->Mb&|Hk_B{(q7yFaK>hd?{k7 z#WgLCJ4k0gQm)=?FDl7(&0(??smytJwWa`5)F(o+0iA|k!piuGL-msMX?Hde&Pg0y z1BUcfb{xb4ESvvksHF5P>h%~pD>PNbbpl$7q`2xC)tdFr%iq(;Dpo}WilVrj^^o>G z=Hfsws179a24gnhp&fl4!~*O&cFFyz3fB^}p38VoL8H)X@iFf?^i);(h;y+3F&$Ty zI=b!oUNXZzaV?~HDX3QwgNY?*K?J~fJR(=H#QIs)t8palhZG2op*w*Y2N(kHxxFgJ zxfQIlo}Nd<%2l`b!7B=d0%W?Z8nS9KN+CkV6(rMA6B?n(95@B)^M8ss`kW2Ifw|`i z^JKNK05ram$?1d^5GR;0N(CsIh*-JGU0Am~oUw2k^hJz_r?_O@SQAF>`fvagpfm&t zP^3F6Bop}jin12?P$e87g%Yz$XL@yDr8gGjRn`J+Ist;dVZAbvig~)A!eH02OSa7} zd#3U(dc`Dxf1@Z1JL>IQRn?`>5q*xKnfhJU1(g)YhbVx5@nr1DAnnyvFw~Fh3N%;H zUHFlslEoRt4-IhPIAK=y=7K0~+dK~n6k}qlyEkueZZHDE z9hkmV925t>qv=n)NjKdOv9=AXTwZ-wVR0o7@|t}H)UFo;X~Z#~72&AD0+4NX2A}7P z!6+tG0HX?XU=R~3)=aFYxO^|$VVn(x%cnl9?_lDKEV=`0v`5UdXI8J-vGyL&yhQ)3 zm@QFF(}wh>*N}LY%c7w5XPRd&U<0c+cLkdc)<(uvLHi-h<=C6pdnp|=as*6~5iT+? zi^XC}$`Txyp}y_}2B0=zZd3Joz0BNIMd_?*Nc%WjMuhF_S)!T@B^b<;7QhBzN&U7~ zD6CA@cM7X{8#V;3+kQo_UiH;C?Xg7fEww+nUJEB8rbH*8ibYq(4U0s+nPoF!v=2Rs zPKlmmSOs~E0>o%1iqOS~m;{W(ZF;AgBP6WK0`=SFzErD~5JjncCoGkwvKUYO?BSSd z017vStS%z-^Vt5@%HMal>Hjn5%U9_A=DJ1yU*B3U>i=8t?^*wUic4IR5&~NRR0fVR zMajVJOxY;AvO@m6SR~Upv>sgR2MkonnEBX9I=%=;$@r(V_yPsnd@*o76d>Cy6jw@^ zmHn3&m8?Pt&(_sXlqF>>^Qa8I-ZCG)e*Dq&ubQ}o!I^jdlP*jfeo+m-K*J08vKumu z%31j1kLL&ghIRJkykcPdTFO2(1ylp9@as<{`2Sl^;#XX^?fkCYE%x7Z;1MM|126~!ZUPJ+nc89eR7EmHN zC`1u;j~L4&iiSq#Wniua(xje~=Dm@5IBrpysdiD{WdkQQTbV z2p~NX2rYngZi$v4)=O0>WVeJl=8g!vk7IY}*(b2B|K;0ITe%k#>&c z!33D8GX72|y&tMTjhbn^^g^mGX}oc##wpeh>*Q$BjNa9Qve1EI48dhCl!lbiz*A<# zAm<^`&XFJP=p0c1_rbt{1wX5%`y0=l2&p`14CX!}WUrYNfwvk&A@D-()4>dFUOJ^5 z&&>QDGt1+l*pTJ&Ky%VvW%`2D~>4ndaG+u3H$K0%Z?SmGJDgU&JQoTbUKSDhiv=bB>kW zDCm2kAigVLI}Q?e>T4?%n8bw;6I^gn=&LoTRc6g7U6i{{DU-qZaWFT>1B)z=+YJ@N zQ#OFj^a``7E8~@pp!hLdJX-ZN)Ihb?Q5?1sK+6a3M$`D| z#=f~=?Bcf3wd_C{~C%6j!Kb&;S{ilCc@g)b^mG|!5b|o#^_dS%g>d7`( zv2>@}?s;;+WA$uk-G7NZl^vvvVDWFh@TBi)=-b*;n17h-=k2!c$8!&F@D|>lezZ90 z>832CG>AW!7iDsxRC1I_hf=vvbFReuB{bc^54psV=>pjnI5{6TAL<@nP+U$AN`cbZ zPYTouuDlg1QKmf&240B^g7ZN1Sz0iRyxxI@RQaesom1sNr?%%)+~!*_zHSESrMRp6RI9~AH!MhxuS zRd&6=9ppfMrQRXVsDMxy=75W-sK#}gs zf4@-L^@Vzngi1a+untpYpIDL{uG>3c!ytisJz^oRw@!cm4$pq*lY|Ta9ySp}2fdni9Q-846J;QCSPYh66l5c``n3}|)KK(j zx93M=e&*W4Qh_@q{?uS&Jy=r%#tcYU{y=(24o>Q8&cE4L7?_1v4lt4J+GdkPr!~by z(hZ8sqDSg+kK!>B)1YWg<3_Wr%hBc9J;d?YS_<*$Q8&g6vp1S9>4H(^S-Hh19kN3n1g6 zXDb7&KU`iv7pG83g^Pf`ea9BGt=J3?a~7GR0#I-$)7S=#Aguz0l&6G_Q{=}yt9Hi9 zFgolBkVbiJeht;He(qU6ea6mH=!$Jg%;)o54`Y*&1%8DSG5C;3NcVjZ!ROgbed6}A;iyiYj3%HQwg)iH_c*{lKFYLqAEl=?BUU@=f-YMVE8V4?0R6tC6 zvi2?^+m_rFKE3Tq>$M__W%9g4XMH~!T;%pmXx6cr%^qq|N-e75##1j2i|ILe&eMEl z&}*dE_cb#4)NiH401qwY)Neq@3|J$Z3ziy=X#3a8#+_BMqB@$W8U|^90LOGfzl;5B zr))FdIGq&JT4{@nSC)d%!3N*w6RDyNR)my7#~hUUkPp4Vyt|KB8qm-wq3MxZRe>!K z0_4TqNCQ=wGWduA7-hSZhTpCLL&>y-Vu$qX>At%_v$bm-f*K1B3VFgk-)0xd*dx+l zIRk?!yTSUFq|cd9bFR){xiBpE)i34g*N+B8fEc3VB*VB;zbpw^sEUfgwu!Rr@WI6m zv|DKT-GY4%-~3pXU~k}NgFsbcr1H2I4#E=nV1eqfG_S>Anh0z8=_FIn%_W%PYxLy9 z)*uIf*%JwUrOB6y(aXr(IYv=~H6g0R4uIk;{1H z`7dfqp|phT5xOOjiFYrsmZ#ou_yYMF2zAg(U>Cd3&bfbpYq|f=_!++52Y8GBzgAx_ z`u|^T;QN1{{eR#7D!9VTq+32ftEieu!3A!#jtD$5ha+!zgCEVVX8cfgS?c?%s#8Pc zb9|RNl6nT89=(86>NluH{V!smqk^hEM@7k;AOdSiBv(TDGXm=@8OMWI7*(XRZZ;kg zJyFss08ILLI3*ALLfyP)&Y`c!2#?I=ArndP!l zBL&Q|>5ZG+oS{~soy6n5ZA&UgCq-WL5$p80b}sLo$9)fE8A_@cM9O1DITn!s5z<7O z4|^O4H?ak`KGbUhJ+28)K-D(o)LV5`HEOYq_0)8gb7vQ;+NP_ZbS9Fsz!JM;!77=n z9vatmu}OD8 zeE9n~DtuTtDtuU*kC5nvQ9LAU1xR#Dbx3#^VTSVyj5~32g1iimUQ~*Qaf>dJWv7L! zR(vvj$bBox4{Or4CWos5i+_$lTqo+VvF*+Xa)SzPJ!yQ%f^P)@T$=9>Qp*YFIurs3pe#%J zQy4H7Y-#?42u8o8T6r`m^%qtsRJ3?Hh>?^%M>y$A3@Svhju7nL?=|+NQQs&5 z6S;|31k1Pu#?~`V;S#5C7x9c2#58_uh4>G4Ew}${gZ}Hxz}x(PTXXq;uWPmEzyI(g z*9|^AV`AyBXu;!zf3?R8JfFuZOaCl$%z=JmTsReCY{Lk6ZWw8v8iTU##S2|znX?LH z#~0MN7y0sX{6B})ZEE_>X-3K~R{n*)V)d>dy8or3itOy5<4~l5T#7%=(0NKe&U$a` z%sRd^=iVg81W8arOad|h>M=>qZ@=~GCmJAS$#iyxn2AL+x~r?-Rn_(QpE?KLw>^2{ zU%NVN9`>eWZ`UtUmw-*TiqK52TZT@1tsa>Aux6x~rm#MRGp^Pa>k9Gd)2DxkqprUH zqp09+oxtLLctiiMkB+zTA2;$ejK6RZo6!G9`=6Tr|H0P(&t@K5T&q||=iEotm3kI}>sUzKXG0#8;@7%9_kW_yNz**J1vRIi1V2M5&gg#A=dJ zw~NuQZ z)Q;fGD2{$3I|z~lRh7D+@J5~V2HbvdJgbhKPwKofXMifBj)aDSN`2KUMKaQcA}>PK zj4f}Fdg6){tO*d$Rs`00^Bqo;w>eFe0_G8h1MUS^4Mz&bJn&yPRITL0&rMYuZD_Bo z(!UL;Ko z_ByV`(C-@W8`?my>vS)d%Q60`&Bo@ezv)D%kkc9`E<<^p&g+9A6+q+gGXm~(SHQv6 z0U+A1)!r4oPxy8nx}i@QAOU)JvMdETQ~_0ZV#c<1QeM+md<~Rg@U*`VpTeIYM4a>< zK2{}NtClVKwuz^a{zE@))4P!a+?@YELYEL*{(l3{it+z8$&wIycS_A)gD;{@4I}XlH7wL}g&o zfZIo6Xt%nK&S#lT)#}rkxJR1m{9(DMHz-soW{mNQx3R{T+1#HS4VAJtMrvortGVaZ zJMIQ>QRUn5QMtSi>!J9KbzQSo|JVh{XufT5?94ts{>rIOSX@61vlTvtrz!$!nWqGD z#UzD1t7a(H(l$M%8l3}HX|Nu5RwC4~UrW<`lqz0kg|pp+Y6(n3>hGRYao4Z8ZT3m+ z-qfSiRoH}Q98UlbgEi)wAQG5pjDWh?6|kd#W)HDzynBqIMM9Bi$j7blaWwq4tfIf; z%w^|_tw_%Mstx>FUqg}@dtiphMf$5`UHoB&-4J=~;k7Zx%s$$fX5&m9{<#nPz|&6u zZN~rG(P54LKY6r`|GJUK@&5+v_%lALLeEagR&)>a# z{o>{8mv8^yU@d;B*U!Iw=9T)~CqniEC!SR;=+sG5(|D1~-m7Ghk{kI^t5Yt{6;WQHXZR?` z{y{}(b7ds9siKd4evZK-vS~EWFX7i1!Nx^{0nDn4qoAKJ7^0A+i!|VCMOvbuo$h{+ zQrpo3t?q*qkD?t*XzJf{LgAH(U)NoR}l#Q28b8-9jt&P@qZ|lI!%_ReJ9Pcm?iY#Y=;kbD9 zb_o##!1!0VZM9rAluQnVl%&-9T_DtYz6r8*Unz3Fnxh~Mi4DwDWnyj?IE!w3bEuCW z=MK>9Qa8FFw7bZl$Bncy>jc7%^XZ#A`&_+g=~|C_7n632oEymi;_ON5*WZ1Vbb3Q& zt+$tI;ML7y@IF&rW~!Ua8(et?hAe#7sjoEc66@b(2Kx)rd2EJwo+P{~$(a**v{pqT z0zY(#6SWldfJ2@xWfoAl?uD?R@+8ba4E;$!?G4PUh}}vJEtLH_{l?0wt4)A5sUl*cpbo)$IK5_%zUee@-vb5;pJK6Mz4*lKwjy9Q*cvCx`pn``?W` zHo;~Ox<&kX$tXUaqfa}?~_Y8X7}zcLJPOFPUyY1n+voZ2FrOkq`n)~@`PIm zrDzI8NEf0#5dKvuLmUh8B&2=o02QG-c@*1D7~VQDWd$msf}?}8lj4n&JB7zk{S@pd4m!e? zYaDU|6!QS`H4k63@l{!M6#93>oQq(belXiQr_NB}IYy$l_V+I$uWbGn`*|6$@{w^)LX}7z&oI zw$N=A)W&`+VBT9D!W%+=M~#%Fq1N}O!BAY^%jh=_uFMN|C>Wj?YG;?iX0?vDGvg07 zGxSj1U}`iEuM@k2K@<*0?Ev(^p zgQKne*Cw9k@xK$(FM3w+e+P#LzWo3AWN^IY|2Fa1JlXRP&8A<^KiI}v&p+IAUjuC1 zUwCFIuIu>+^mR>2d0&0yF>{WAmsf4w@&zh4WG()qto>^S70*As&cCs}MjMj~0PKAW z)WW(8q#PE69kQ_@z>Ow0tD%b3OULSfo(0>nM1l846Bw+igVtE_pesKP(50RAIUYb- ztwa%xxdi<1ONp4 zX2Pmn&p$l&^SsQe*6++)wq`-?+tv(hRODvy^H3}U$zobPYv_%u((4WL=~{n^F#H{4 z?a!0!6d3nEL3drNx2`UU2k2udfMVqij!X(@>_3J-Pa+KfUhm&Gz#06XW>CYA>CauL z82^Of{w{yz831`FpL;@XpN&4v`~RJoe$lgH|35i6^!dM|gQG3~w~@!?|17>w z7`}x6Am>Iuvb>TnorjX7@(-lIz+@aa87$+k)RVyb-5ID-RGYh@@Se&(kWT|kQtE|w}^n5ORkkp|J*yiH`s?)$}Rd8@&{MKvg$iN z=-@?C6*>bF<=2!`XL(!18M^3ZIB-l!PPreW+#dLyx#}9ZBI^=6Oj0rp{3o&i5*1ZtikzXN@fRmvI^= z_I(6iqCjKS#h)V*Nj@}+LT2ezbc7jTo(_j9qzf%e=)|OC;RHnz!8=?XedZl@EX}ER z$Xzqhwso5n5?a(LV6Hq+zT;kWNHIgAIwh5H7?g9Y z(WI23TAeaXsY)q%@yZ!u)+woU7NMMD8V03;VyEQ8UnP0~l=!O&`p{NFhyPMz%OVI& zwB!$hry;1}B(b5uhS{dss7?#5%-B^mg-ju&vNkIQr*fS^#7$Q|>UPsTyEvDjF%BH9 zxouDLT4}}Vu*oB4l|PLoEapes;b8&utFfqNio`sTY}({+VQ@W)(DXe$NQGy-eY?L?E!F4Euo(`3zWqqQ9d*)>gY1-;V zy4t9+W=N^a+IqL9_gV}ZO}YA8XqW&E(aP0zT8g-}D&IBjdjQAQ$@Lqs$_CLlQ>^Lq z!c6cKZFLpv=+*`r0+e%sSNfgPq{5dW%1zT`R`!ArvJNtu+{(F$m7UAVR!y>a#|#bBmKN`q@Sa4#=vkv`;$0dC4Sq`JRECYV{kOB_kF{0-ZE0sw=%nY-rc!WvTU-U22_vv{uP2+xE zoIb?2hp$jNk-yV{hWk4;eA9~-WG#8Dl9-~j^qOSW`&vl4|JWAdUPn)({per0rL%dS z7jW=C#6$P%=z*8`;JVu+_y*(v{X0Vr#lhFOJL`Vusx`O{AoPDhmTq*P{>9%-xMjOGoWs zJSF(2vG@?NFiV9`C;UF2ENaFdw>2jYij4iB7WFVMHABXrQLK z4}5X;9T=g6w2+k*N=+A71d2*pEQ-2)18xE5NCN+)lXxC6^SmsS#e)Jpj*&g&0M@Pz zikP2>-gC5Po$~nt_`JI|MQJiTeu-YKfUlNnJ&TkciJDRL9E;R>JR*TAS#4x_Mn)yZ z8Y4mhEr;$*3kkwpi2<{gYn!I(Q*y*BVq}H9d7Wouk|g5q7pU;VmMqGUrKgHP|2>5x ze`E^(*D|kCV8$O73ASpQ6b6FT6ctY#TSX#qg55Jw)aJmKjXsc8_rqu`Q^I>lc^g|&XXN-;TNm7y)Q*k!3!I*H;gd%z(+k(|oKP<G~!?6c{N*z4?4*j1k7y(Ma0{LFGy!q2rI23C%C*z6BtO(~{Ov z&~ymr(G1amiRxMpNn|Fmcu?kAXxsGz$IL|ofr{lbB;ChrgQ`fWLrPq4mso3Mn>UHb zU0SXh@)!+>FurbfuYx809;2{kC7~3T?>TCpTO>0Pol4YPyNs`;jZJ{4>51t-y2Xf3 zq3e3e6IODdFI41sZX95q<*(HV$tN3W>UTg98D?^!e^vv`d z%~x(`kK$@$gyVP&Uua-C5GSmRb8Nn>%re^5lf=bN)|E2!!qtVYLn9burUPJ07>%rB zLPLz_uxHR|qwwuVf5Q>BXaHT2YBIIAT+v)SsjfS1WEX z#LHxE6&^9w8E%RGY~kqRND!qm|1J}~HAgs45*uPRsU9{$V0lPEWBGiT}`vgnx^%OUBUi6G^o{zAKwLz7E5wtJB^_g7) zQ2`W+oX$X_5K$$E3Xq)hG$E=hfEmxZGA$?^cZdMz6W+zw+<41jrf$925$TjI-fXM3 z2$=^zk#Cjf(IYB>m&&$;->y;J5c|F6<1sWsGr?mhVkyC3D1xA-imaPur!Mc3S*pW} ziGi+&#W+$C-&*YV_iMLuMy%)DIfk-vKf3pl;jFK?ck5aGydrCAxVcomKlQP=2Z+X~ zo}hPhUP^nkLN0S1+!7ef@Wq`!b3AiLEJv1(J0x!#lirEq0%+nSa}3}P*~Um<%Km3_ zVtc@78U%&HO==hd>-$j{VmS-LSY!CxZ$7%@T0k&nFG~+2Y2bQdxd!R586gX++*XhW z3sF-Ny3!U%kqpN1iz4)&@cHYwMR_b7I&gARq1YyQfu77EL315pZ(&5G1YEczByai) zkh?!vq3Ip(=+S@Vmx!@YQBEI{^LLL0ixo1yM-X(-b)FPbuOh(9Bw3Uk!8t}DT%MgK zsAlnD2?UbS#^{J*V%?<(F|Hag@fa>fU5n*SD^0^AU#sd{L}Bw31_v4~tq7y_7_&J(*;sDU~bw%`&81X`aDI7GN%wsAmk zjw223p(tA~iCdUAEKNeGh7v16S2`e>Xjz-j;}Qa9C!!iABzC5Lr5kIz|} zK;jmaizHIerCip>vNU?NA_5Se7Z(3K9T#ztV&y2LPU2}#35__{+AG}BsUl7l@w-fc z@Z-6~emtn3NI)oKgiVPBUTu;p2C3r_j6KhzM;95U7wkaD;xl+}MkULY<$(fKmvcJ<=_N}&G|P*GKbHy8{42UiH|8vv&#^huUP9cG!7_f0Ylx55*sZc)rxzEF;_^*kC& zz8RUJ!N2Qv%GYxLLL4L!=&(VJR!k+a)BZ9#E%N30zh5gg0KFf52{U$HA(2|X%zmiy z$$LnWOl4@+A-!$;ms>G+1c6Rd{heC%s&d%80@Z(CC@U@)Ybcn>RlG(?_G_H_?E|N@ zJ!tgSYX$7njU9>K*6K_ZDq7h^!)i2NS zX2x!INsR(#7c2lB({R7%5k6wE%a?;I=9VDYc^BP}2Hj||4@)0On0?by$r7?M>smKb za;2hMJ&aEA!bE#X%ApPMWJ3DQuxOEy-p6BCCuu5@ibkCbgF2$GAP z_pX0sMmNs_gyz^IG;4H%5S>tX7M&_bQScY1^l5ax6>Zd?HvS(bkkC6{a?+nI~uO32O8+;eZl z#eYnHMx3xt^ll_hiyS{xly7U3a?@wU`rkzS_v6FCi7)><96Z{t|4lqK`Cnex8UQ+_ z7cF9z2X>x(`LwH29NFKKFXY2To=)`-@+_}Pn(yd0USx?p7GR+B<5As**_0 z;8hZQ)ae31I|vw~;3h8mw(1{DBhEa!Kt&L6}%_Y7>j59~7#RtLmeS zI6MOl5#D}HeGn;GE-J>!Pi-#n4Gm=~QR6T-kl7?aKu4wc&ae3ERK<~#5wl!6;O7>R zlIcjT4|qgnfK7tAy<-MUW;aa+9zeRUvst53vCY%IO}4L~s(&2xqJv&^_;UnnCi;mS zhGfxRlo<-xRPVZuz}A>18^;lD3&(c(Su6qpXdS%l7%tJugpL2=seOCMm2f~iRb!{~ z;AuB1x=vpY)f%jq)*dnx0M|^ zI^3zkTpi1V%__SrSf)R|RuZ5e^=9&O=L>027qyYj=kbJ8=PU6ah5Hm=jlWM4-cC@_ zU%^(2x-&F4V0DEeM{Q$exKdx|Fh!I3%-MjrDQH+QtCCE##aT&US(1LoG#??49xFkr z#kg9#i^$4w+AUzKn5GkZ>sT@IiTxeoU_6KE2YfxxeZ&ozW6l$yqz-na%zf?90Lv88eR9NZEN8;iE$i2HD)qa7tTQz(eh{+SilL!i-`tl1!Z>)=tgpJ(wqFaY z;WulIQZ1_{R85Y9^NLE&W@h&PG~-WKT}ZZO7G=cQ|8GyXT;GkK2L4YxvfoV*803d; z7XRhw@bGXO|78=;it&GwoZnssu!8?TJUQ|8|4$%w%l~iUvH5>H3`;El3~}aF&YXIY zvr`A4Wxt=%FE^NqBQa9PNQ>ABSTnTWERGDj&5L+LeH#jPgL#Q9+qJ&#uGtmJoXYDELR5m?0AvbiLqdtd ziBpGFs}MSbU)*CnIm!%Ahg!@WLYRZ(prm1!e(pw3p7NI%f9-a6t(*k`q@0D7bF6aS zXh?LEmBj@$)(V5e9IkrvnrnyHX>4*ZrcPP^giTTjeh1vDoopA9P4Kkg8)s^y&9q`5 z9l8Hi!nX%=AT7x~3Hl+@tHO?=)1mk~I30PNlB$^#X2vj+$<@c`XJQM#1Ff}h;FWDi7xJyirgyIdA zwJ+EW`n|C=BxcbOhjYsxkel$@kET}#3v?{=E6qE zb+_k#1O4av-@hdraC7?q7|F#g{l9@{z45Q8US1Re-n>Q|GR^( z*nx0Z$DO7Cj*5(yB$tG*5&1PP!C?z};y$LzyRFedX0smop-CyUI?8k%9K7tcfRZm> za6n0u^7gHt@{+-OkteMT(6C>czy3KbTfV-|E9+xD0HE7|aNJ3+ku5POvAHgl@3gz& zlkNb%a0%w&K4{6n-O!SOdcyy@qCad(r8d+ix}ZO4T7o8W59tiEncqYDQ&k0e$bi1G z!!4rMc2lw=6P99U_+7Xj1fXJm@rrW?uUC8udA;J){Z47S0P1gEbyp2XAmXP#D!qm= zaUy~GGq>rEgP zpVk63L=MoJB5R{xRUe1`+qZXyK7g{($GN*U`f`4KTpMIw-?|~TUndtbYkx!AzGTd< zqLH_zM*QJ={U+mq`*H2X1@{RW`;AZl4K1Z*e)DD`VeoOy#692>H1p3wsWdc`cAlFz zGs{160m4RgH;uFbUa7`88>fV}{&f7`TKxYl%>Y*L|NDn0wfG+g`&<9-8+jc4|8btr zX-l#7|Hn;%A;x}S2%rSN`U;4_rl=DLo|f`QgGUE=t>AGbInW*8e;RLX1H5Ow^?y4R zfcEu2I5z92uhIA8^pJUjJ8L z@vT0FX4o;2fnD%cl;GLSdZ{YwMJlvXuv+UCE4N+&h4z5Mw+bih*%;he@Zg?ASNq{c zafF*Gn0icM?fGvk?x^Q+RizWTiw0eMTy)(vtC&(LiC;@hEOc*&C;dp_J@UO~<=M4q3o7hGK;-xNB4_>8H zRdEe6e}W&ehV^V*=Eb=er9co?l@9};bQ6Xau$R*WhOpAULo8r%5K8P9XFCeN@N5<= zT~%&zGlVt-$ZI$`tPr@;k^7z7V{&n`sP%5vaX9PP2WY;#gXxs>K~1_N$4-PP`ef=f zhAhlExP>k_n~jvINaIDoY^KEhRi%EVhDCLS_7mad+|;SCK5lBptb*fyx`+-L2J8V} zeWlV47y_jI_M0UJJ9ALBf*yNq@dF^!q~Hjl5l;n*XK9+nS|!|JI9FA}hMEdgjT&mg zO&T;*q*f!IDh#oQq!W^uFvNXRAt=8Uf_zILiG-SyLZ~?@h?;(F!c3cw)v+5RW9ADC zm1?quHP%7rUd8oQHr8l(u6!MtIe2O&j-3JK_^FvZ_7@nDQ$2ym1vLFauLIr2wrORO zfau(faBOuaD-}5Xsgm50P^mO->P7XrdZ&RDW1I#oO=Q=m*v}z)K*xdBv7r4%hT=l= zDwws!g$~6b?dmmCeN0hVM}{J4h)Av1mmyXfbE>fzx};BIeqN`V>}iUL?HNpI2Jog~ ztu<@+F$G1Q63DD%IconYrE)Gg&8CA(Od0P4$m0>CVNQ25a`AS5c9Wf`nX{eQ0)zaEqWk<-aMYs+`#pbS}z}ULS8%_W( z&bC!FP{t3X9fY_}SZ!~WIb)#WYeSDDg}RWJVB1~!G46IZ9cf$885-_UmEbn|6P0>2 zF!OcH?v|4*5VhN%k$x}Hq4y8iOD(5(xR?C3=y#Ml^Zx<+s-1`lcbB!DA%eP}fXKUT zciegV8fi{{f7$UiUZ-WGal?ZSl;wZ_QEw(kL)}s}^h#Cz@y5H6QK&WSa|?XEr$PSz zD$nwCdN<~uA7lS{u)o#+-pJEF{v7)4BDviK(2D!tgQElA|NF_w@z(xxBaeOm!-L*e zgT9X9w;djP5b(3k-{9v!T~Tl)hMUM{mlDzlWAkDV7ZnED^WqTsjtn%qS7=zI z->%U^fqv^;MfXwDl}dCL$>O}CO>8>hml^VFk2D-B5s^IXk3t{h5Wi$+JwL!swS8|>D?ojL#pGyT#dy{=Yz*|pw zmDxSvo1o+AZk1RCUru#%2NXKdRb4f+`d=&`1g-)7FF~XEUo30+Uv6@KtGd+L&5d%; zZIy@5Y->{M^P19TUHNgKLbqf0T+oSRmSJ$CAUWbs8pl?==TjM z09BKs0Jupm1)%aU1z=5UO#x6EQUE+-x@ursC;%I%q5vGwLIHRP;Tf-c6wiC73{wCK zi3SQl6=w>-gRj?$%C&+5@JB>Z04kk-j7^W26VImr+GTE6Q&4Vc@M|xR-NUTpU-mq#b-JG>Re;uRp2bguX2{-PyP}(%kd+f<@BTV zEXN%FSx%+GXSvQkAmHt1xn%av@&@R1mYWi*;H$G-N;I71QkQp@OPShP&bj@wTvF*Q z$KN+O%cUlLmQ$0wvt07XS#GANImiNgD=?t}KYKOQ%h{$i}@b)S(hqtx7 zKKDkpeLm`GKmVU+)BKa4|AUiS{HHDdzo{p1{^OuGo&PID^eQwd>_Pb=FV4@>a^b`b z{TqfG8BHh-3GYBv8e=G34utaH>4<%BNDpsFw+3&bqxqYMkpwnaxjo|k_F8nCsbK+$gK3bq1)tA)N|*lNmS2qn;^sXyER90Tq^84Qy{kZ%v7*fIHqJk0S|}@=bl%k zYSS$AiPI|Idv)b^6cZ z`z?llq1gIfbe>-(1+)CiSp4hRVDu)qV_f_(B+DTm!)y1+ryRm`a^S*>%d||oHL|la z9C{J|o-Zf!bPDC(kb($c){8n1y8*Ja^TaO;$qDO~s9)?|uOZx^gcVc?sjSllH;&x8*IZhQ3U?eQia4uWE_~MJ`HIvlrJW1BG$?-$JNW>Fl zC+VN|qtVN(f&m-J(i?T7iMW4cZ%H~*;XJ;45RcQT3TbUTEm2LXBl|E!)jjG5hE1XU zpb1wo!sMcZKKIuoUqX+LA?N*(+bibFzKrTYt;}9ynSm1t{B4fQv`A;y*v)`f3`29e z%+VQ5S}oOJqTU$L@CXBu#}N_sL}JZZVdF+N`}{<@{B^uY072dvopE`Y@gyJT7s1+L zCMAMC^_wzIlcuXhGD{0s*}7Cj-R_WXBb~Q|B@dkgjZ$u`%r>@&o(vUvxbUe#pd%MG zT2s0AlR}PLKTSn?Zuq#Q-e65>om9UY`L9~}7nxOZY}2*AqdtLxPK%Tr(1Rs0*9|;L zsNu1@TPw>^+D*|8Cxoc!AAhcuUhM+UJ}>L8?^LBBmv_2e`-E+B|Hd0S^CN28M(=1< znaoi|YmyXI3==$Cl0*~*rRf4DbM%7emD)AES%=e~_caPl8D)9)AWKepKPU2`$Je|m zZ*?UJlx1CId9i@)QL01_-&sPp#Gd@;<;8hv>?F{@fozImgaJ5T_$Y*bv z&C~bEUTfvuKKs{Ir{6?-af{-)76W1Zc@-sDzC1m{&b%zr3PnI8YWT>;r#ho1oELbC zRM+fGm-7MF%Dpkd9e9$=B~+~X8g(9IUD&D9e$qFI8J=x+4m}TK3zUJ2E81f&X^&Bc z_VFB!x-7v1i*~1Uo+3^#9kL7bmZFXru2g&jN8jQe;>qQFu^eOc&bsV*Ru_*Y=O`F>T}>*=fPpu{x%ZFu{)f}i0T)8v+cpETHp=>XB7Tf?5JKGMyD9` zTHiL%HLuP^3FqVq29Uswbl+?^Div@UAf=%bMsO&Mq67SZ+2=_WUEQOaqRx5rFgobs zsRBtz%iNP{XD06e;~1%y9HYqM+7d4e zMC>>Bp5?u9!F|I8(CsYy=Uywt4`R8KZnm-M}Sa zsK3Il0L9~!{nc75PNU?2{lmGSYCZ4m#yY~h2KmqH7>>z!{&n=-9viJ2>3x|8L~61vkM#SArSYq&faJDFIMs*x&8k4xjU^Ars3D7V-HxlH22? zx=fNR5SSat&G*1C%PFq)=z!|si09Yz8NQ5!_on?`!8LM&vlwYIMbT&7Fb&zEyJ z5vg37WpLor3Fx*$bE~WobfV2q${K^(hj6q9rTav^0#H9>=b#$;iS%kQGm1@fw?k`=^_ZcX`Z^0v;_*_BUAf3|=wv328Bmfz#S_3CJL0hkB*Eec zl)uRM+e42KgtW#vkS`0N|CR;mi}rV@f5J04psK00G`&~S*K7fWzS@Uc<)cq7+M(E~ z<>db;YAFvh^8Y;jBiyPq?Ek=&-;wjbfB6{ye}`xizV-jNfv08s??(CaUsl`yojeNo z|JmyQZ{)Ghe;D*;<9`!k5?iH#Ws#R3Whf?_*PK>>1q=R@xPW6iyj>qqgaKpnq5{Y!y@L zlAc|Iqg+ZiURWfgRjMoOyv*ht^p}26uwOq9h4>AZEjXMs{Roz&V9`zWzLTP;XBB<- zwyBDDU7dXxefu`pHvtvgb^51G^VT4-Dtb0?Ws|r}!d-`?Ttv_F|01HHp^xw(p$}%F zAHk-ERe80tmfgHHlz2@m*9Ze0o*5w|ZT;RLYAu?*1|^eeV8^v*WMn^4NUW>$E5!m@ zzkpXmEMKl@ZJl3iU@f?<-j+P59OkyxbO{72Kd$f0>p=7Qf6Jh+*0YNLe^j&oJK8_m z>i=%!xrhsqK*?OgIB}63LUmp>1ZYh@cmZ0?r_)M(3VggW-vVE+?XUX#RrCbb#lJ># z*g;SzKOE9)@&R5?tH-*)@9L-g_`NAuY+g)7Vl54>b;9FXftOjee&*M7g|vQtzIWb_3ygBb_Y%6v#?Q74c7gtsrj z$(CF5$rQSOU(c-_1BY9S)6v6KVCk;;JxcK2Lzj8lRN}$FD^klba1pWQ_|4aU%l>~i z%76c|GXC$;qer#<|7ffKx0%P||17@mdmUI;adkcOWnh2emw8f_Z_nb4UOG>@PU82) z^AGZ2$X`y2B(9Relz3W8H-(-`A+8XqCek5Hx#%LkQrIn6=}=_K^gAB&O;eekX3IrQ zTT#0bCBt%gE(agYloId!SSyiXy{qyH9S~u^y>rwS zr~;$KCbI1kxP_8c#W=7r@(l(AS}M6JiEnV&t~mHGx+!>K2jP8|0krGIBB=`a3TFXl z7>h`K%(=0$3_lI=lPXIR2vh-$Qwzu~>uad9#_P9HlJdqw1ofXu}0|enj zxW=|dqTi=DBf0%xa6AZ8))!(W795QUN$p3kwn_Yz@de_P85W#1|BuT`2r ze(NPtX(#8mTpIT)CLNNJHYXCJqDnVwxWR$F2Ew*AuoA&F#<*<`Q-&J;n?3%$UTbFzCOl6Bl}2Or1RQ%+htax{_V%2 zjvBygbvV-eH|}&Qv@Z6Hv7k>)xf%AWJO4&)@XM_`e*(=Q#e;ct6q(6bMo;R=Tn~I2 zns3fLD5IyrFRI|v;f@NlLAb$aUgkPOeE-PMQF_|k!YuOkvQS|YszRz~cTim(Zw%%XHdgJFnD!Ji*h!?GK6K6jGJYuY`La@HfRcXqfw3N!(G{to zDf=(j4@W8PI!}k}eVjOOG2b8oE=yaWSbqnLhuii>Tk6Yfp_)+bxDML|YvGMDE{*MD zkrxAgaGZ@05E*t@3;aAMi9*to5fG3~*_B~Uk8I5lpF48f3wPSV#LJq-nSLmI*-)(Dz@hOg>g0B5JkVtu>vtu{aq$d5odF7)e;P4Xdxx?C zXxsfC&F(w71YD*6adP6{{~aG4A8qgdHt~e6!rW`6p4fk`y!TqyO@^0`{Q+|p`0Zc% z5FVpw3VHcFv(Gy;7dR^tG%7C36iq1dWd#%8vX?NNp<#&}O8C3CBfMSF2K9#Rx8xMY z(pX4{(YKAoTmeD7!|a+WzNo zFsQ}<9Bl1>HuE$sexnEGruKAdI!kq$@Lgmz$?xgxTHb`dG5vQIU*0M^6}d?4nUATbP#l{34m+l?CQNQW4JZ;%m{E(E>(!#XC{?sgG@W zXO45H)?=>CskSi% z_6Qj!km0+w-FE-V5V!TNGKsdz0>25BpQBeE(G40^qo};j(A`%0_e2c3y-sJz9}bAz z97Q9mLtH})Zwg|AT6$Ht!lETjsv6_KCmioUvPH7hzKI$Dws4+LfS9)|udz?jpBFj| z(+gze-`zXstal&Aw_uuLMHgo3azLWva91xUXKYWMR^-2J%u>MRg6F!Z!s#Pi8)%!Q z(Af7VI~CD(@Kym23z5}crjz7FkuTJola^fcy_t^cRl}$Dl++8&g)xje?_jMRbSVmp zz2_JdsKocPx3Ct(QBOU6X!fV*VBl6_t|CkxgL@SfeS1bB-XtmPG&Rm|T0wW1g$2Fj z3n?D(S@f&ju&<+^=zh11erF&%ktOND5+svCE4TF#pd#?=*YKvm7s{er{y>#|r(O$P zwPkrgn0)S|Z2Nq~(?tKlE_P=Q0FWNKq5c2K@yXWye-lsJ_}`TSfED!rpl<(hGC0}N z{~LK6`X2@U%7}H1upgFrzLMZdQcnlrVF|KjI?M@{@jEAo4K=@@jcX5x$F67fFSo3o zeee&*md!?ySYw8a>oz_SFjHTDz2vCW*#m9Sk=o1rt+WHMaYHqPX$Sig13 z&%wp9e`oC44B#2PRmSf+E>*`aMp|uMH;wGv8(I)HYH#S$HO$%> z_cct~?UX7FL z{r}Dkz*p@5`-cZ!{HMXu!8ZQWW*$ez3tThKonhTryV*?p;a>Dhd4_5|)2WXz)TZ4=RwjxjzVOAkr>KWJ;3`AfZro#fTWJk*Fw$z+N+zzty2PjwQlJtC2P_!L+(E5hDdgzG)|7oE zlbW_TgKIW2&EPmo_xyO9*>O`9-zE?023b+`wU7&YPd7LyMPJ8#x&hegJbi6n>GjSEdrIe= zzVfeC`Xk(?+f5Y(VCCx^_~{DMV1L0Mumc60sNbDm^c$%JBSy9^u+-V41uQ<-n|Gdo z!t&=P^@2|W{jb&d?&1;v>EWBn|BoIW4z}|DO*|{d{{`x(-i{Sq!T${&)%d@YM@L)z zzl}V>z>R$x^iLD{+b9S#oibvDUm)h^m+>|5F!6f{fXCX=`T075zZqc)?Z-bU%(tYP zJAD;knm-`DCcoOEadQ?g2)C3A#@@CH47h^UNZUHQwzHzHYpel{tn4x?>yRkgx=c8D zwyw|BxYnioP}wTK7a?0GV_f2;7uj%uHaRTF|D-rszO)Lc0G;v(Cb$cxc;@et5>hp zxd^s)l(ySoh0c23(EUqc-5B>?SS2@I4Qt%O_rvz22sUVqn_}IrkGw2)Th`J9TUTHu z;w@SO8sS#&=tgxwPKBG^XDc$TcBO5Fe-7MhOK{P(ueME_oA0-usdG)Pxc#oLIZMAY zZ|VPd&&u=vyZE;3-)jDE-#`CPj<@!|n|Z?LKMwlmX8%;k>b3YkLYUY;`bFFy|Ee>8 zl#aX~|7c_VD2K=Saj78VXR;3ncq`kdGP_(~6Le(ytP-o>3(KcUggHJ{m&@>}GCsd2 zxjlAIr6RXSzi+_oshSk8$4zosJ(Y(!J!@WSMvu~v&*M4JRRi0?<=H?LljndI9?wGv zSvcLJ$j&)sn88y>H1Kz-IJ0*ie9hhIAut4h-}`ckzhi&+AAi2B+W+rD32epwzkhV# z%YTm#P7b&G|0W(s2Mhj%)gV4lY)!1PNfWE|HL=P;bU!K&nzX#Q26hTG3`3c0Y~Rr#;W$2!k-L!K+iqhI@s3|8^at2nlD{ojES*oyUke0+TDuYa=t*slLi z{{-njOYY+ed}1Zm!9WfYupt=q+{(Q|q$==7 z=bB!-b|``Gv&0UG96L=z|9^afpl?0&;5WjlGP3<8mX)!c=1n-s*it!q#{P6Qd;}*! zjhn!Ua#^a67X{2h`HL^Uh+Z?1%*rxmUOSPJQE>+2NMGg>dXJfs+|rMUGgmLhGk<1K zMRFP!Q*sWI&m=U33blZGH+`(EOsselwocMmxR%SpIMP+_byIQ;sKhdeXr|gVPGG3T zczw{1Msy*7zJ~svGO=#dw5vpPc>}W~U69@#6O^2V-2G5#aawc$;E)8hq0Qq}z}|75 zA^jQkcf75_65YmHOjK9FE23ld9i0|G4N=S{K!Snpg-H+)Lx;)UnWtj3_jVZ8o`JO!Wl=(^{e-uBBj+oJl zir{q`eGdWC+Aps3_$mY}tR`5x2S}T)pm)~P6$ULNLRy2mQT74Vq&w*8<~by?<1o5r z0ocO-6?f$W0QsRC>i-=d9&PpiHu0<)|GV%3u!8?TI@&+-#{bd&!8ZQmMxKxX*B{yk zfMv$xUxZKVN+0^;|Cbawe>G{ZmhAH>!;^~wf7u4FS^PmZh*V^@9FP3u;wseKgf==}t z`=UW@rf@wc=%kv`OtI2~0{7F6Jt$ByUsGmk)MRR1sQ@_1_KV-WH%5tyc_>kukCKdl zhY_V(zncf8S<$Cus;;BvGrf(Dj~S49=i3F^`urp7wtBwS%Ymx7ch^<&;gB9d6`!KX za|Jf;X8Jva?Y_bRL+U(Er>%`T#VolQ<|xO!J>m$(#`|VH4*TIv(T3S~3$z33RMTmT zyAYh~%3a9%&4R7?rpF?6R=a76r+A zgW}d36t>>5j?HNh`~frsiuIhkSl(9ROgw74eqN0Jz(+ZBadid0$rp=d#d-;gJj1(8 zR3a@j7IN(zh4c#09}6(U_oIn&&N9Xq(J7!ZFG&l6vXcjm`=p?YdH>FQL2PBVkRh)x z_sQ!E!i=@Ym{&HEZpXBvx`(Ll395UD>RuzNcoDxRnC{WF{Ln_h9gb;=Y7gUOg?r1{c#A6;^@=b{YD>798*MF9)|b>FHSjfEodAmK5W0umC`JWLbKa&bgOlBux8c zB#dJoaelw|Ie(o6cg#gw4cHSZN!x@p=DLlH`OqqWU7Hr^@@y6M4_D;8M%+|*a$1Nl_Bnv zE`+X~eXGkUhDg|X_arZ4!&zPNp~A`57b$A~nGgp{S43AXZ&q>AnAlqQEOh`ux!)5VCl$Mz1*N8GY#m+*gfMUn`T;xRs$L| zZ0_2bR(Srj>DLqir&=DAkI}A~u2ikL|G6}4ls-)39^MeY_ii+DyeZVsIeVYEbD9sX zPu)5GLK47uPTsaFq;1WIt<$!KGxrXgTH783GRIZFo5r@#2Agc{-N=8MtE#OP*#{!GB&0%RlzB&ienv@t+P4PW=79i=3Pz`*)^uKGb9P7KHL{+sAn0!LxVbZob|@{4+UU~!_EX130N{UW z549D}HLQIXa~i+aU@Kfz%9d7WSqbV+23xRR%q{L|N3Bu$gQNSHdSmh#eT`tw;DW$1 zk`QACNZRd?gBhDr*@gdgex%IN&-fAZc9ruPm)}!*q_W6#(kdPhn;3HDCa61V)Y@-O zZOGeVzXdbF;!(bIe!fBIF(?tzzN6$&>@loiebqb6ZL8f$U)sQ~{Dq}lvV}zCdLSKh zN_#W3YWdvfS2?f7 zPB){r%l;^siLl>&T=v)cnm^T(5Ux z>Ye0Md;c_s>_^*C%7-p`nHqfOR zXbR6Eo}jF$wfoa9LNEJpg3aSe(xEmz{PCy#2R~g6Ui<_D4T$~x@U#~_d>AF=91f@l z)3n5B>JQ)rl~^7C9#xUg9~8-Hazy~Fxz>wPGcy8{MR)V=?$>_OPgatJGI!8pKes#I zV7l$rvW%S!<9ddflVMiRkU1H$dWOQuP}DP2PKK(U;ljyqp)>q0e{ne9qbwhKVer^W zOcv)A%J1L9nw>{qeHE2IzW+IT3Uh0JSHC`i*Z)Z~?WdoqOKpD6RtnH)Z!C#x;=1sDyo0GJ>dW+KUTZgKk0BMB4u9_7w}jL_HWdH!_~@h-|Mz6;|9>-&FaNXTep;Sa zx`I7S=c~vUqzt6KE*XpU7mLuY-7M7`T%z>$k44b%#? zyK{7_MXIvwE=(MP!I5&26cwqMDy^zGI$JK{4Cs$|iiC;wxMm>M6uKen6|2lLEcV~Vf(DGII7WYP$z)j(GJjbX z$q(e4nF(laPJ(K*F)BZc8gGfhB?3nZw%FU4>{hB(!d&rdDH01u?CN+i)=HU}(+!x3 zDJYlRMJ3B!uiU@q`80Cmogc15mYpC=Zz&erGieI%$Z3kLSFSYEdLMZzDefJ~uIcbAhSC`Yt9Mt3VQC_Xd zS=Zk5Vc7bcc8B>DtGK`wdKQ|0b*+8i_&kTI>m5&rMW_DqH-%&cUNm)h31LU z)4WxO)>Jd^tXF%Kcw5D^^?Da`s<@@AHWRnSY=7sA?rmwI(OVBYKlFrxjvjh*2`$Wn z{B&@&=C)))0mqD?66Tn395Qw+vZCv`W&B6GhG^pdXvW>q4ZuqMFJSgO{_pr?8~<}7 z&%=k2!vfN{d-5#aR=&xTV&dwB#l@*3(1Ldshkfo=ZJ%|Y_Vs@^5}+09e=w-Ye~t%7 z+xSl#c>;^yNPrZTMZ}Xsi%3d3yVmc%xTjSfVlyT0v-~oPN`h1FxHU%|NT=hRWj~ri zaw#KCw_t9Ix`(>v`6QmRw|F*}$10cPdOaFhEg1O(g0t({+7aVElLF(N9jm;RrRZDK zM@JF?v%YB-;yHZkB3%%zQ8++L3M4dPBxcdCZo9u0yVNrJT^n<%4&pLI^r4cK!D8T| zF>vr0Fd|e8e*N0i&%3*p%j@4&cJ)oY7a9*uoN|9|4~Rd?)3E=e#B5XgAN%`#1v2Lv}Rc@`$ zVpP&C;mpU#JlR=1POCVpdJ&LV^JE&0a03|ueNkCgTh<*<$q{_A9QK>V`I0ZGfI35c z8p!Gd`Q&O-^aumrND|%SfH>@JCFX#N=|nEnjFxdvu98$F9%4-?69p@>sunqTp3T_< z(z|3f17fFqN8W0f4vbyA{&oh)FiiIA!k1U5dW$s(+NA9oXVHrnFEzU?*KG&`7F zdUT|>-BB+>t!lceW4>BlkyR+O@B5VM6b3I+7hXtq%kc4VU?0+bTkZ^+))qG7TLZil zn3!dI6#PRz4f{Xdi*0KEKR5`)e>mCt|J%r8@Baav3FkO?mf?=TI|%GswC&Ko5xvY* zxHP=~BOiXGzmQ>6uxL^wag`|5g<_dulrVqe#A;EKU=rhI2uDP@oSaFfv(g$`i}lQrIu0W zh&I5;Y8>92s-}_qHZCqT;7MilA?y~sMK5RNS-zZ4X{%>dj~U&#(kY3Jc%NKjk4B>l zYA$JK;K4yB79NFhff3;t;x`Xw@ggPXYT4;BKE(@M>O?pW|1-Xbzc!(;>BsqUJcr|x z1uif0Jo;5RlEd1fY5qgLNUAeZ&W?VKez-2HWYN!;Ko|nsU(K`5?pZRQ=Q!x|>7RDH zkE7oi!tZWP%JZPr6FmuHCr5j!kD)w@Cub@2ERY${7*z7)Gg$}vqeF}P1IDP{XqdYB z1)OCnCN>S+kpJL6GMS&tY=N=oD1Lelnm~&f>JBFtW+;(lpiq41h7o#(m0QL|oIw}E zSmK$9Q*NHU^*%9b&w12t9`g0#~W$paPO1XYIs=|+ z&p-5{?+c((l4{a-wn^ZAQe;M{G&Dw-mThdlg>;wjKII*CtqOGS87XunH0q zQooAMgts@6^bMP-^q-xJ7upMU%N+Q89s2O^slLak zYOl~$P3h0(rwNQu;4|{fb#Ru0;5W*7wf+6i=|zefrooTTKh%?!lX*G?M1loKg{Gb= zY=8Iz*)_ZA|B+lHfzi}fUI?}1*KxIkgZM3wAtfBii#S;4_eH%Vq$NeDJ;6r0aljkA zhI=2B+PE)cswMuFTjt*&2IjBQ*|w5;5pFj$!pgcKP}eq)YYJ12 ze>!_lyXa(MkN>rF)9v-3%Hr={G^T$!<RWCvV8w5Cff^dgV=+Dz3 z)%Eo0uwIK=U8*Ma-jh(_)pe~1Z=#Z)Ubs76>F#*N{qYK0TPtoaP3?HYO6!r#3^pHZ zhcw_R_K%}+uQa7Qw5MV;hHmJ)%x~R~PO4vmmbb}Duh8pxRdH~*bGLxE?sX5k$!-~Y zFHi;SG`!}Xq+UhOKd7vD6eaKmC{SG1UCPHGUKaLrJiXli%k+O{S!pGMRDL+z)pWAEs;Rdyku-2<5h=cMla(+#I$(5G@ZNZO5 z87bjtdxJo~BW<0ARvvc#h-&x|l7A)(kqP}YP8F2b1ofA4bDuzdYK-Uq0CR|sfc9Sd zb23jn*Pv?-PoQI3y9GN1)BiLa@~lC34matgwm$m3?hiqA!9ShvgH`+gr;q=(e}Kjw z-u{1dvep0F$fNaq)Uqu(l#!90qAJW;dU`g$mR&&9Yp$aT?>Mzh={$Y!n1|1=AHbpm z_8s^~7Odi>si*^^XcrCX@)<9032zoFqcvFP?qw;4R?<5QY3HEI5K0H=axSfwF=Njv zJr(_nH>`M5CwJ2^-UQ3tXOUmX^n28KZz-=2Z0&VrG)sEnN=hqtqf{7X#c6`Fa=FuF z%_l*t9U1Ftc&1XLX`YmHA5M4vJP}EH&o`<@eIO|c9FTdLP}elH4XFvMZFpT-ZyaA< zdaWw$R^gnxVViJG!K6B0sugmN>`48%VP$R{J>be9buheWfxLa`7zu2~z(|UL*UcFK z1;`bumEEF;esFte5ETb*FU7Tapk5CAJ}Z(%ev$lBbeoVuUe7fy9n)o>d&Bph`uUFv zgm=mPe-!_}f3S`Ju#u->{K=B%u2cY4(0@mRn*aCX{Vn~snaAjP(~ygzH&Sb&v5mA= zn8czTH8TQsqub1iPU1%h^3VpHM^o4_iv{{n#EoBTWM{HR_oDGq^v`WoX(R@;W>sFW zL!>MvKO{L5a#P&D-x0#^^ua>a+DFT?=1a8F9wC(otyNww=ar!U>_4CVFKTsy7cB^D0i@<-%oyV7LS&5HSC9?_hJgpnHk?UN zm#D()-eD3{K*Om=&`CAR!M9YZdJGO}ajNudS7Raab;#r*^q2VRO;Vo2;3nG3lIQ{! zl1?64l*^?zR-(#?l%ECy8DGe;NyJq^AMh;&V9jAv7m=dYM|N!^HIvG^##}s-*&@|n z=Gi^nB_=P9A{D^1eQtLqQOx5c$>>4{&+;incohyNpD$%jt09ct6@p$$l6ZRe{VVQ_ z?y8snCW^Q~LZPt4?cti+57hYlhG!st?~Y`NO_vj`X{45x>VFohC5S9BrGhTQx7Veu<2&=vk%Aw8e9%xYXNq zkM|{@sx94J8XJ8^D9tQ0G|?ERKjpfJixgeT^`mE{?N;tg=>nIN9L<58%&7Xojh*I- zzh8Sl#Nm7+I|LfOy!p;wh~y7uHjCb=rB^RFjQV~1;F%@WeP){I<0Fw?sjX+W0tMHEok5r>Rmd^asPOqX46sG-y!XP4O&|D zz8}3u{AuP|IbuCwKR3OzeYVf`**@E6`)r@>vwgPD_Srt$XZviQ?X!Kh&p*@i{{ug- JkC6b-0s!Lprzijb diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index d25e9a0f6..4d1ab32e6 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -8,7 +8,9 @@ "bytes": 31129, "compilers": [ "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11", + "0.15.12" ], "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index b57b9fd09..6b675a80e 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -8,7 +8,9 @@ "bytes": 2179, "compilers": [ "0.15.9", - "0.15.10" + "0.15.10", + "0.15.11", + "0.15.12" ], "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 17df7ab86..b9b272613 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -296,9 +296,13 @@ authenticated auth = case auth.payload of { published = Map.delete payload.version prev.published , unpublished = Map.insert payload.version unpublished prev.unpublished } + -- Delete the manifest entry first so ManifestIndex.delete can fail if other + -- packages still depend on this version. This way, we detect dependency + -- violations before performing any irreversible side effects like deleting + -- the tarball from storage. + Registry.deleteManifest payload.name payload.version Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated - Registry.deleteManifest payload.name payload.version Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do @@ -479,7 +483,7 @@ publish maybeLegacyIndex payload = do Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest payload.ref config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do Log.notice $ Array.fold @@ -506,7 +510,7 @@ publish maybeLegacyIndex payload = do ] Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." - let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest + let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location payload.ref legacyManifest Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" @@ -772,7 +776,7 @@ publish maybeLegacyIndex payload = do Storage.upload (un Manifest manifest).name (un Manifest manifest).version tarballPath Log.debug $ "Adding the new version " <> Version.print (un Manifest manifest).version <> " to the package metadata file." - let newPublishedVersion = { hash, ref: payload.ref, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } + let newPublishedVersion = { hash, compilers: NonEmptyArray.singleton payload.compiler, publishedTime, bytes } let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) diff --git a/app/src/App/CLI/Git.purs b/app/src/App/CLI/Git.purs index ac9ffc398..baf513748 100644 --- a/app/src/App/CLI/Git.purs +++ b/app/src/App/CLI/Git.purs @@ -214,8 +214,8 @@ gitCommit { address: { owner, repo }, committer, commit, message } cwd = Except. -- Git will error if we try to commit without any changes actually staged, -- so the below command lists file paths (--name-only) that have changed -- between the index and current HEAD (--cached), only including files that - -- have been added or modified (--diff-filter=AM). - staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AM" ] \error -> + -- have been added, modified, or deleted (--diff-filter=AMD). + staged <- exec [ "diff", "--name-only", "--cached", "--diff-filter=AMD" ] \error -> "Failed to check whether any changes are staged " <> inRepoErr error -- If there are no staged files, then we have nothing to commit. diff --git a/app/src/App/Effect/Archive.purs b/app/src/App/Effect/Archive.purs index 8c26092ad..17ca0675e 100644 --- a/app/src/App/Effect/Archive.purs +++ b/app/src/App/Effect/Archive.purs @@ -35,13 +35,13 @@ import Node.Buffer as Buffer import Node.FS.Aff as FS.Aff import Node.Path as Path import Registry.App.CLI.Tar as Tar -import Registry.Foreign.FSExtra as FS.Extra import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Legacy.Types (RawVersion(..)) import Registry.Constants as Constants +import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit as Octokit import Registry.Foreign.Tar as Foreign.Tar import Registry.Internal.Format as Internal.Format diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index cd7880602..873162264 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -285,6 +285,16 @@ pacchettibottiED25519Pub = EnvKey githubEventPath :: EnvKey FilePath githubEventPath = EnvKey { key: "GITHUB_EVENT_PATH", decode: pure } +-- Test environment variables (used by E2E tests) + +-- | Root directory for test state (database, scratch repos, etc). +stateDir :: EnvKey FilePath +stateDir = EnvKey { key: "STATE_DIR", decode: pure } + +-- | Directory containing git repository fixtures for tests. +repoFixturesDir :: EnvKey FilePath +repoFixturesDir = EnvKey { key: "REPO_FIXTURES_DIR", decode: pure } + decodeDatabaseUrl :: String -> Either String DatabaseUrl decodeDatabaseUrl input = do let prefix = "sqlite:" diff --git a/app/src/App/Effect/Registry.purs b/app/src/App/Effect/Registry.purs index bd406ff25..48fbdf4a8 100644 --- a/app/src/App/Effect/Registry.purs +++ b/app/src/App/Effect/Registry.purs @@ -388,6 +388,7 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Right Git.Changed -> do Log.info "Registry repo has changed, clearing metadata cache..." + Cache.delete _registryCache AllMetadata resetFromDisk WriteMetadata name metadata reply -> map (map reply) Except.runExcept do @@ -501,10 +502,9 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << Log.info $ "Mirroring legacy package set " <> name <> " to the legacy package sets repo" manifests <- Except.rethrow =<< handle env (ReadAllManifests identity) - metadata <- Except.rethrow =<< handle env (ReadAllMetadata identity) Log.debug $ "Converting package set..." - converted <- case Legacy.PackageSet.convertPackageSet manifests metadata set of + converted <- case Legacy.PackageSet.convertPackageSet manifests set of Left error -> Except.throw $ "Failed to convert package set " <> name <> " to a legacy package set: " <> error Right converted -> pure converted @@ -733,17 +733,30 @@ handle env = Cache.interpret _registryCache (Cache.handleMemory env.cacheRef) << result <- Git.gitPull { address, pullMode: env.pull } path pure result - now <- nowUTC - debouncers <- Run.liftEffect $ Ref.read env.debouncer - case Map.lookup path debouncers of - -- We will be behind the upstream by at most this amount of time. - Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> - pure $ Right Git.NoChange - -- If we didn't debounce, then we should fetch the upstream. - _ -> do + -- Check if the repo directory exists before consulting the debouncer. + -- This ensures that if the scratch directory is deleted (e.g., for test + -- isolation), we always re-clone rather than returning a stale NoChange. + repoExists <- Run.liftAff $ Aff.attempt (FS.Aff.stat path) + case repoExists of + Left _ -> do + -- Repo doesn't exist, bypass debouncer entirely and clone fresh result <- fetchLatest + now <- nowUTC Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer pure result + Right _ -> do + -- Repo exists, check debouncer + now <- nowUTC + debouncers <- Run.liftEffect $ Ref.read env.debouncer + case Map.lookup path debouncers of + -- We will be behind the upstream by at most this amount of time. + Just prev | DateTime.diff now prev <= Duration.Minutes 1.0 -> + pure $ Right Git.NoChange + -- If we didn't debounce, then we should fetch the upstream. + _ -> do + result <- fetchLatest + Run.liftEffect $ Ref.modify_ (Map.insert path now) env.debouncer + pure result -- | Commit the file(s) indicated by the commit key with a commit message. commit :: CommitKey -> String -> Run _ (Either String GitResult) diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index bd3e4c6b2..e3eb353aa 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -29,7 +29,6 @@ import Node.Process as Process import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth - import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env diff --git a/app/src/App/Legacy/Manifest.purs b/app/src/App/Legacy/Manifest.purs index 65aad78ec..8d997342f 100644 --- a/app/src/App/Legacy/Manifest.purs +++ b/app/src/App/Legacy/Manifest.purs @@ -59,13 +59,13 @@ type LegacyManifest = , dependencies :: Map PackageName Range } -toManifest :: PackageName -> Version -> Location -> LegacyManifest -> Manifest -toManifest name version location legacy = do +toManifest :: PackageName -> Version -> Location -> String -> LegacyManifest -> Manifest +toManifest name version location ref legacy = do let { license, description, dependencies } = patchLegacyManifest name version legacy let includeFiles = Nothing let excludeFiles = Nothing let owners = Nothing - Manifest { name, version, location, license, description, dependencies, includeFiles, excludeFiles, owners } + Manifest { name, version, location, ref, license, description, dependencies, includeFiles, excludeFiles, owners } -- | Attempt to retrieve a license, description, and set of dependencies from a -- | PureScript repo that does not have a Registry-supported manifest, but does diff --git a/app/src/App/Legacy/PackageSet.purs b/app/src/App/Legacy/PackageSet.purs index eb1ce8021..62b718d7c 100644 --- a/app/src/App/Legacy/PackageSet.purs +++ b/app/src/App/Legacy/PackageSet.purs @@ -102,8 +102,8 @@ printPscTag (PscTag { compiler, date }) = , Format.DateTime.format pscDateFormat (DateTime date bottom) ] -convertPackageSet :: ManifestIndex -> Map PackageName Metadata -> PackageSet -> Either String ConvertedLegacyPackageSet -convertPackageSet index metadataMap (PackageSet { compiler, packages, published, version }) = do +convertPackageSet :: ManifestIndex -> PackageSet -> Either String ConvertedLegacyPackageSet +convertPackageSet index (PackageSet { compiler, packages, published, version }) = do converted <- case separate $ mapWithIndex convertPackage packages of { left, right } | Map.isEmpty left -> Right right { left } -> do @@ -130,17 +130,14 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versions <- note noIndexPackageError $ Map.lookup packageName $ ManifestIndex.toMap index Manifest manifest <- note noIndexVersionError $ Map.lookup packageVersion versions - Metadata metadata <- note noMetadataPackageError $ Map.lookup packageName metadataMap - { ref } <- note noMetadataVersionError $ Map.lookup packageVersion metadata.published - - repo <- case metadata.location of + repo <- case manifest.location of GitHub { owner, repo, subdir: Nothing } -> Right $ "https://github.com/" <> owner <> "/" <> repo <> ".git" Git { url, subdir: Nothing } -> Right url GitHub _ -> Left usesSubdirError Git _ -> Left usesSubdirError pure - { version: RawVersion ref + { version: RawVersion manifest.ref , dependencies: Array.fromFoldable $ Map.keys $ manifest.dependencies , repo } @@ -149,8 +146,6 @@ convertPackageSet index metadataMap (PackageSet { compiler, packages, published, versionStr = Version.print packageVersion noIndexPackageError = "No registry index entry found for " <> nameStr noIndexVersionError = "Found registry index entry for " <> nameStr <> " but none for version " <> versionStr - noMetadataPackageError = "No metadata entry found for " <> nameStr - noMetadataVersionError = "Found metadata entry for " <> nameStr <> " but no published version for " <> versionStr usesSubdirError = "Package " <> nameStr <> " uses the 'subdir' key, which is not supported for legacy package sets." printDhall :: LegacyPackageSet -> String diff --git a/app/src/App/Manifest/SpagoYaml.purs b/app/src/App/Manifest/SpagoYaml.purs index 1d701e57c..66ffa1c48 100644 --- a/app/src/App/Manifest/SpagoYaml.purs +++ b/app/src/App/Manifest/SpagoYaml.purs @@ -27,9 +27,10 @@ import Registry.Range (Range) import Registry.Range as Range import Registry.Version as Version --- | Attempt to convert a spago.yaml file to a Manifest -spagoYamlToManifest :: SpagoYaml -> Either String Manifest -spagoYamlToManifest config = do +-- | Attempt to convert a spago.yaml file to a Manifest. The ref parameter is +-- | the Git reference (tag or commit) used to fetch this version's source. +spagoYamlToManifest :: String -> SpagoYaml -> Either String Manifest +spagoYamlToManifest ref config = do package@{ name, description, dependencies: spagoDependencies } <- note "No 'package' key found in config." config.package publish@{ version, license, owners } <- note "No 'publish' key found under the 'package' key in config." package.publish location <- note "No 'location' key found under the 'publish' key in config." publish.location @@ -43,6 +44,7 @@ spagoYamlToManifest config = do , description , license , location + , ref , owners , includeFiles , excludeFiles diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 302332346..1b2ea7b4a 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -12,12 +12,12 @@ import Node.Path as Path import Registry.API.V1 (JobId, Route) import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.CLI.Git as Git +import Registry.App.Effect.Archive (ARCHIVE) +import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db -import Registry.App.Effect.Archive (ARCHIVE) -import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) diff --git a/app/test/App/API.purs b/app/test/App/API.purs index a8431a9bb..d60bcab3f 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -142,7 +142,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10", "0.15.11", "0.15.12" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -191,7 +191,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10" ] + let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10", "0.15.11", "0.15.12" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') diff --git a/app/test/App/Legacy/PackageSet.purs b/app/test/App/Legacy/PackageSet.purs index 414b09a57..2d4a7a2dc 100644 --- a/app/test/App/Legacy/PackageSet.purs +++ b/app/test/App/Legacy/PackageSet.purs @@ -2,8 +2,6 @@ module Test.Registry.App.Legacy.PackageSet (spec) where import Registry.App.Prelude -import Data.Array.NonEmpty as NonEmptyArray -import Data.DateTime (DateTime(..)) import Data.Either as Either import Data.Map as Map import Data.Set as Set @@ -14,7 +12,6 @@ import Registry.App.Legacy.PackageSet as Legacy.PackageSet import Registry.App.Legacy.Types (legacyPackageSetCodec) import Registry.ManifestIndex as ManifestIndex import Registry.PackageName as PackageName -import Registry.Sha256 as Sha256 import Registry.Test.Assert as Assert import Registry.Test.Utils as Utils import Registry.Version as Version @@ -93,7 +90,7 @@ packageSet = PackageSet convertedPackageSet :: ConvertedLegacyPackageSet convertedPackageSet = - case Legacy.PackageSet.convertPackageSet index metadata packageSet of + case Legacy.PackageSet.convertPackageSet index packageSet of Left err -> unsafeCrashWith err Right value -> value where @@ -104,13 +101,6 @@ convertedPackageSet = , mkManifest prelude [] ] - metadata = Map.fromFoldable - [ unsafeMetadataEntry assert - , unsafeMetadataEntry console - , unsafeMetadataEntry effect - , unsafeMetadataEntry prelude - ] - legacyPackageSetJson :: String legacyPackageSetJson = """{ @@ -201,23 +191,3 @@ mkManifest (Tuple name version) deps = do (PackageName.print name) (LenientVersion.print version) (map (bimap PackageName.print (LenientVersion.version >>> toRange)) deps) - -unsafeMetadataEntry :: Tuple PackageName LenientVersion -> Tuple PackageName Metadata -unsafeMetadataEntry (Tuple name version) = do - let - published = - { ref: LenientVersion.raw version - , hash: unsafeFromRight $ Sha256.parse "sha256-gb24ZRec6mgR8TFBVR2eIh5vsMdhuL+zK9VKjWP74Cw=" - , bytes: 0.0 - , compilers: NonEmptyArray.singleton (Utils.unsafeVersion "0.15.2") - , publishedTime: DateTime (Utils.unsafeDate "2022-07-07") bottom - } - - metadata = Metadata - { location: GitHub { owner: "purescript", repo: "purescript-" <> PackageName.print name, subdir: Nothing } - , owners: Nothing - , published: Map.singleton (LenientVersion.version version) published - , unpublished: Map.empty - } - - Tuple name metadata diff --git a/app/test/App/Manifest/SpagoYaml.purs b/app/test/App/Manifest/SpagoYaml.purs index 973af0a99..52174063c 100644 --- a/app/test/App/Manifest/SpagoYaml.purs +++ b/app/test/App/Manifest/SpagoYaml.purs @@ -19,6 +19,6 @@ spec = do config <- SpagoYaml.readSpagoYaml (Path.concat [ fixturesPath, path ]) >>= case _ of Left err -> Aff.throwError $ Aff.error err Right config -> pure config - case SpagoYaml.spagoYamlToManifest config of + case SpagoYaml.spagoYamlToManifest "v1.0.0" config of Left err -> Assert.fail $ path <> " failed: " <> err Right _ -> pure unit diff --git a/flake.nix b/flake.nix index 610221dd7..bbec41157 100644 --- a/flake.nix +++ b/flake.nix @@ -198,7 +198,8 @@ # Integration test - exercises the server API integration = import ./nix/test/integration.nix { - inherit pkgs spagoSrc testEnv; + inherit pkgs spagoSrc; + testSupport = testEnv; }; # VM smoke test - verifies deployment without full API testing @@ -232,11 +233,19 @@ nodejs jq dbmate + sqlite purs spago purs-tidy-unstable purs-backend-es-unstable process-compose + + # E2E test runner script - uses same fixed test environment as test-env + (writeShellScriptBin "spago-test-e2e" '' + set -euo pipefail + ${testEnv.envToExports testEnv.testEnv} + exec spago run -p registry-app-e2e + '') ]; }; } diff --git a/lib/fixtures/manifests/aff-5.1.2.json b/lib/fixtures/manifests/aff-5.1.2.json index 22684f05c..77bb331dd 100644 --- a/lib/fixtures/manifests/aff-5.1.2.json +++ b/lib/fixtures/manifests/aff-5.1.2.json @@ -6,6 +6,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-aff" }, + "ref": "v5.1.2", "dependencies": { "datetime": ">=4.0.0 <5.0.0", "effect": ">=2.0.0 <3.0.0", diff --git a/lib/fixtures/manifests/mysql-4.1.1.json b/lib/fixtures/manifests/mysql-4.1.1.json index 6f9703b61..e0e8c70fe 100644 --- a/lib/fixtures/manifests/mysql-4.1.1.json +++ b/lib/fixtures/manifests/mysql-4.1.1.json @@ -6,6 +6,7 @@ "githubOwner": "oreshinya", "githubRepo": "purescript-mysql" }, + "ref": "v4.1.1", "dependencies": { "aff": ">=5.0.2 <6.0.0", "js-date": ">=6.0.0 <7.0.0", diff --git a/lib/fixtures/manifests/prelude-4.1.1.json b/lib/fixtures/manifests/prelude-4.1.1.json index 3dd47411c..56ac6db20 100644 --- a/lib/fixtures/manifests/prelude-4.1.1.json +++ b/lib/fixtures/manifests/prelude-4.1.1.json @@ -7,6 +7,7 @@ "githubOwner": "purescript", "githubRepo": "purescript-prelude" }, + "ref": "v4.1.1", "owners": [ { "keytype": "ed-25519", diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index fee64ef3c..862025980 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -268,6 +268,7 @@ jobInfo = case _ of newtype JobId = JobId String derive instance Newtype JobId _ +derive newtype instance Eq JobId jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string diff --git a/lib/src/Manifest.purs b/lib/src/Manifest.purs index d660b459b..49bb62f2c 100644 --- a/lib/src/Manifest.purs +++ b/lib/src/Manifest.purs @@ -48,6 +48,7 @@ newtype Manifest = Manifest , version :: Version , license :: License , location :: Location + , ref :: String , owners :: Maybe (NonEmptyArray Owner) , description :: Maybe String , includeFiles :: Maybe (NonEmptyArray NonEmptyString) @@ -77,6 +78,7 @@ codec = Profunctor.wrapIso Manifest $ CJ.named "Manifest" $ CJ.object $ CJ.recordProp @"license" License.codec $ CJ.recordPropOptional @"description" (Internal.Codec.limitedString 300) $ CJ.recordProp @"location" Location.codec + $ CJ.recordProp @"ref" CJ.string $ CJ.recordPropOptional @"owners" (CJ.Common.nonEmptyArray Owner.codec) $ CJ.recordPropOptional @"includeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) $ CJ.recordPropOptional @"excludeFiles" (CJ.Common.nonEmptyArray CJ.Common.nonEmptyString) diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index c54bed31e..3235661de 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -63,17 +63,11 @@ codec = Profunctor.wrapIso Metadata $ CJ.named "Metadata" $ CJ.object $ CJ.record -- | Metadata about a published package version. --- | --- | NOTE: The `ref` field is UNSPECIFIED and WILL BE REMOVED in the future. Do --- | not rely on its presence! type PublishedMetadata = { bytes :: Number , compilers :: NonEmptyArray Version , hash :: Sha256 , publishedTime :: DateTime - - -- UNSPECIFIED: Will be removed in the future. - , ref :: String } publishedMetadataCodec :: CJ.Codec PublishedMetadata @@ -82,7 +76,6 @@ publishedMetadataCodec = CJ.named "PublishedMetadata" $ CJ.Record.object , compilers: CJ.Common.nonEmptyArray Version.codec , hash: Sha256.codec , publishedTime: Internal.Codec.iso8601DateTime - , ref: CJ.string } -- | Metadata about an unpublished package version. diff --git a/lib/test/Registry/ManifestIndex.purs b/lib/test/Registry/ManifestIndex.purs index 18e0863ef..1fb7e13a6 100644 --- a/lib/test/Registry/ManifestIndex.purs +++ b/lib/test/Registry/ManifestIndex.purs @@ -151,9 +151,9 @@ spec = do contextEntry :: String contextEntry = - """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} -{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"dependencies":{}} + """{"name":"context","version":"0.0.1","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.1","dependencies":{}} +{"name":"context","version":"0.0.2","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.2","dependencies":{}} +{"name":"context","version":"0.0.3","license":"MIT","location":{"githubOwner":"Fresheyeball","githubRepo":"purescript-owner"},"ref":"v0.0.3","dependencies":{}} """ testIndex @@ -242,6 +242,7 @@ manifestCodec' = Profunctor.dimap to from $ CJ.named "ManifestRep" $ CJ.Record.o { url: "https://github.com/purescript/purescript-" <> PackageName.print name <> ".git" , subdir: Nothing } + , ref: "v" <> Version.print version , description: Nothing , owners: Nothing , includeFiles: Nothing diff --git a/lib/test/Registry/Metadata.purs b/lib/test/Registry/Metadata.purs index 02e12c053..8daffc02c 100644 --- a/lib/test/Registry/Metadata.purs +++ b/lib/test/Registry/Metadata.purs @@ -29,8 +29,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-LPRUC8ozZc7VCeRhKa4CtSgAfNqgAoVs2lH+7mYEcTk=", - "publishedTime": "2021-03-27T10:03:46.000Z", - "ref": "v0.1.0" + "publishedTime": "2021-03-27T10:03:46.000Z" }, "0.2.1": { "bytes": 3365, @@ -38,8 +37,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-ySKKKp3rUJa4UmYTZshaOMO3jE+DW7IIqKJsurA2PP8=", - "publishedTime": "2022-05-15T10:51:57.000Z", - "ref": "v0.2.1" + "publishedTime": "2022-05-15T10:51:57.000Z" }, "1.0.0": { "bytes": 5155, @@ -47,8 +45,7 @@ recordStudio = "0.13.0" ], "hash": "sha256-0iMF8Rq88QBGuxTNrh+iuruw8l5boCP6J2JWBpQ4b7w=", - "publishedTime": "2022-11-03T17:30:28.000Z", - "ref": "v1.0.0" + "publishedTime": "2022-11-03T17:30:28.000Z" }, "1.0.1": { "bytes": 5635, @@ -57,8 +54,7 @@ recordStudio = "0.13.1" ], "hash": "sha256-Xm9pwDBHW5zYUEzxfVSgjglIcwRI1gcCOmcpyQ/tqeY=", - "publishedTime": "2022-11-04T12:21:09.000Z", - "ref": "v1.0.1" + "publishedTime": "2022-11-04T12:21:09.000Z" } }, "unpublished": { diff --git a/lib/test/Registry/Operation/Validation.purs b/lib/test/Registry/Operation/Validation.purs index cf474f103..955b08164 100644 --- a/lib/test/Registry/Operation/Validation.purs +++ b/lib/test/Registry/Operation/Validation.purs @@ -15,7 +15,8 @@ import Registry.Manifest (Manifest(..)) import Registry.Metadata (Metadata(..)) import Registry.Operation.Validation (UnpublishError(..), forbiddenModules, getUnresolvedDependencies, validatePursModule, validateUnpublish) import Registry.Test.Assert as Assert -import Registry.Test.Utils (defaultHash, defaultLocation, fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) +import Registry.Test.Fixtures (defaultHash, defaultLocation) +import Registry.Test.Utils (fromJust, unsafeDateTime, unsafeManifest, unsafePackageName, unsafeVersion) import Test.Spec (Spec) import Test.Spec as Spec @@ -66,7 +67,7 @@ spec = do inRange = unsafeDateTime "2022-12-11T12:00:00.000Z" compilers = NonEmptyArray.singleton (unsafeVersion "0.13.0") - publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers, ref: "" } + publishedMetadata = { bytes: 100.0, hash: defaultHash, publishedTime: outOfRange, compilers } metadata = Metadata { location: defaultLocation diff --git a/nix/test/config.nix b/nix/test/config.nix index fdd24a537..77b5cfaff 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -19,22 +19,21 @@ let ports = { server = serverPort; github = serverPort + 1; - bucket = serverPort + 2; - s3 = serverPort + 3; - pursuit = serverPort + 4; - healthchecks = serverPort + 5; + # Single storage WireMock instance for bucket + s3 + pursuit (merged for stateful scenarios) + storage = serverPort + 2; + healthchecks = serverPort + 3; }; - # Default state directory for tests - defaultStateDir = "/var/lib/registry-server"; + # Fixed state directory for tests - not configurable to avoid mismatch between + # test-env and spago-test-e2e shells. The test-env script cleans this up on start. + stateDir = "/tmp/registry-test-env"; # Mock service URLs for test environment + # All storage-related APIs (s3, bucket, pursuit) now share a single WireMock instance mockUrls = { registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; - s3 = "http://localhost:${toString ports.s3}"; - bucket = "http://localhost:${toString ports.bucket}"; - pursuit = "http://localhost:${toString ports.pursuit}"; + storage = "http://localhost:${toString ports.storage}"; healthchecks = "http://localhost:${toString ports.healthchecks}"; }; @@ -48,16 +47,20 @@ let }; # Complete test environment - starts with .env.example defaults which include - # mock secrets, then overrides external services with mock URLs. The DATABASE_URL - # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are - # implemented in the script directly. + # mock secrets, then overrides external services with mock URLs. + # All storage-related APIs share a single WireMock instance for stateful scenarios. testEnv = envDefaults // { + # State directory and derived paths + STATE_DIR = stateDir; + REPO_FIXTURES_DIR = "${stateDir}/repo-fixtures"; + DATABASE_URL = "sqlite:${stateDir}/db/registry.sqlite3"; # Mock service URLs (override production endpoints) REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; - S3_API_URL = mockUrls.s3; - S3_BUCKET_URL = mockUrls.bucket; - PURSUIT_API_URL = mockUrls.pursuit; + # All storage-related APIs share a single base URL for stateful scenarios + S3_API_URL = mockUrls.storage; + S3_BUCKET_URL = mockUrls.storage; + PURSUIT_API_URL = mockUrls.storage; HEALTHCHECKS_URL = mockUrls.healthchecks; PACCHETTIBOTTI_ED25519_PUB = testKeys.public; PACCHETTIBOTTI_ED25519 = testKeys.private; @@ -140,6 +143,30 @@ let }; }; + # Console package helpers (console@6.1.0) + consoleBase64Response = + fileName: + base64Response { + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + inherit fileName; + filePath = rootPath + "/app/fixtures/github-packages/console-6.1.0/${fileName}"; + }; + + console404Response = fileName: { + request = { + method = "GET"; + url = "/repos/purescript/purescript-console/contents/${fileName}?ref=v6.1.0"; + }; + response = { + status = 404; + headers."Content-Type" = "application/json"; + jsonBody = { + message = "Not Found"; + documentation_url = "https://docs.github.com/rest/repos/contents#get-repository-content"; + }; + }; + }; + # GitHub API wiremock mappings githubMappings = [ (effectBase64Response "bower.json") @@ -149,6 +176,13 @@ let (effect404Response "spago.dhall") (effect404Response "purs.json") (effect404Response "package.json") + # Console package (console@6.1.0) + (consoleBase64Response "bower.json") + (consoleBase64Response "LICENSE") + (console404Response "spago.yaml") + (console404Response "spago.dhall") + (console404Response "purs.json") + (console404Response "package.json") { request = { method = "GET"; @@ -216,97 +250,452 @@ let } ]; - # S3 API wiremock mappings (serves package tarballs) - s3Mappings = [ - { - request = { - method = "GET"; - url = "/effect/4.0.0.tar.gz"; - }; - response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "effect-4.0.0.tar.gz"; - }; - } - { - request = { - method = "GET"; - url = "/prelude/6.0.1.tar.gz"; - }; - response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "prelude-6.0.1.tar.gz"; - }; - } - { - request = { - method = "GET"; - url = "/type-equality/4.0.1.tar.gz"; - }; - response = { - status = 200; - headers."Content-Type" = "application/octet-stream"; - bodyFileName = "type-equality-4.0.1.tar.gz"; - }; - } - ]; + # Fixture directory for storage (tarballs) + storageFixturesDir = rootPath + "/app/fixtures/registry-storage"; - s3Files = [ - { - name = "effect-4.0.0.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/effect-4.0.0.tar.gz"; - } - { - name = "prelude-6.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/prelude-6.0.1.tar.gz"; - } + # Parse tarball filename into package name and version + # e.g. "effect-4.0.0.tar.gz" -> { name = "effect"; version = "4.0.0"; fileName = "effect-4.0.0.tar.gz"; } + # e.g. "type-equality-4.0.1.tar.gz" -> { name = "type-equality"; version = "4.0.1"; ... } + parseTarball = + fileName: + let + base = lib.removeSuffix ".tar.gz" fileName; + parts = lib.splitString "-" base; + # Version is the last part; name is everything before + version = lib.last parts; + name = lib.concatStringsSep "-" (lib.init parts); + in { - name = "type-equality-4.0.1.tar.gz"; - path = rootPath + "/app/fixtures/registry-storage/type-equality-4.0.1.tar.gz"; - } - ]; + inherit name version fileName; + }; + + # List all .tar.gz files in storage fixtures + storageTarballs = map parseTarball ( + builtins.filter (f: lib.hasSuffix ".tar.gz" f) ( + builtins.attrNames (builtins.readDir storageFixturesDir) + ) + ); + + # Metadata fixtures directory (to determine which packages are "published") + metadataFixturesDir = rootPath + "/app/fixtures/registry/metadata"; + metadataFiles = builtins.attrNames (builtins.readDir metadataFixturesDir); + publishedPackageNames = map (f: lib.removeSuffix ".json" f) metadataFiles; + + # ============================================================================ + # UNIFIED STORAGE MAPPINGS WITH WIREMOCK SCENARIOS + # ============================================================================ + # + # All storage-related APIs (S3 downloads, bucket uploads, Pursuit) are now served + # by a single WireMock instance with stateful scenarios. This enables proper + # read-after-write semantics - when a test publishes a package, subsequent + # downloads will succeed. + # + # Scenario design: + # - One scenario per package-version (e.g., "effect-4.0.0") + # - WireMock scenarios always start at state "Started" + # - Published packages (has metadata): "Started" means Present (tarball available) + # - After DELETE, transitions to "Deleted" state (404 on GET) + # - Unpublished packages (no metadata): "Started" means Absent (tarball 404) + # - After PUT upload, transitions to "Present" state + # - After DELETE, transitions to "Deleted" state (404 on GET) + # + # State machine: + # Published: Started(Present) --DELETE--> Deleted(404) + # Unpublished: Started(404) --PUT--> Present(200) --DELETE--> Deleted(404) + # + # Reset between tests via POST /__admin/scenarios/reset + # ============================================================================ - # S3 Bucket API wiremock mappings (handles upload/list operations) + # Generate S3 GET mappings with scenario support + s3Mappings = lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + tarPath = "/${pkg.name}/${pkg.version}.tar.gz"; + in + if isPublished then + # Published package: tarball available in Started state, 404 in Deleted state + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: 404 in Started, 200 in Present, 404 in Deleted + [ + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 200; + headers."Content-Type" = "application/octet-stream"; + bodyFileName = pkg.fileName; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = tarPath; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs; + + # Generate s3Files list from fixtures (tarballs for bodyFileName references) + s3Files = map (pkg: { + name = pkg.fileName; + path = storageFixturesDir + "/${pkg.fileName}"; + }) storageTarballs; + + # Generate bucket PUT/DELETE/listObjects mappings with scenario support # The AWS SDK uses virtual-hosted style URLs by default, where the bucket name # is in the hostname (purescript-registry.localhost:9002) and the path contains - # only the key. For example: GET /?prefix=effect/ instead of GET /purescript-registry?prefix=effect/ - bucketMappings = [ - # List objects - virtual-hosted style (bucket in hostname, path is just /?prefix=...) - { - request = { - method = "GET"; - urlPattern = "/\\?.*prefix=.*"; - }; - response = { - status = 200; - headers."Content-Type" = "application/xml"; - body = ''prelude/6.0.1.tar.gz16298"abc123"type-equality/4.0.1.tar.gz2184"def456"''; - }; - } - # Upload effect@4.0.0 - virtual-hosted style (path is /effect/4.0.0.tar.gz) - { - request = { - method = "PUT"; - urlPattern = "/effect/4\\.0\\.0\\.tar\\.gz.*"; - }; - response = { - status = 200; - headers."ETag" = ''"abc123"''; - headers."Content-Type" = "application/xml"; - body = ""; - }; - } - # Fail upload for prelude (to test error handling) - { - request = { - method = "PUT"; - urlPattern = "/prelude/6\\.0\\.1\\.tar\\.gz.*"; - }; - response.status = 500; - } - ]; + # only the key. + bucketMappings = + # Generate per-package listObjects mappings with scenario support + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedName = lib.replaceStrings [ "-" ] [ "\\-" ] pkg.name; + listUrlPattern = "/\\?.*prefix=${escapedName}.*"; + presentContents = ''${pkg.name}/${pkg.version}.tar.gz1000"abc123"''; + in + if isPublished then + # Published package: listObjects returns contents in Started, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: listObjects returns empty in Started, contents in Present, empty in Deleted + [ + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = "${presentContents}"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + urlPattern = listUrlPattern; + }; + response = { + status = 200; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ ( + # Generate PUT/DELETE mappings for all packages with scenario support + lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + escapedVersion = lib.replaceStrings [ "." ] [ "\\." ] pkg.version; + urlPattern = "/${pkg.name}/${escapedVersion}\\.tar\\.gz.*"; + in + if isPublished then + # Published package: PUT fails (already exists), DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already published"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Started state (package exists) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: PUT succeeds and transitions to Present, DELETE transitions to Deleted + [ + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 200; + headers."ETag" = ''"abc123"''; + headers."Content-Type" = "application/xml"; + body = ""; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + newScenarioState = "Present"; + } + # PUT in Present state fails (already uploaded) + { + request = { + method = "PUT"; + urlPattern = urlPattern; + }; + response = { + status = 500; + body = "Package already uploaded"; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + # DELETE in Started state fails (doesn't exist yet) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + # DELETE in Present state (after publish) transitions to Deleted + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 204; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + newScenarioState = "Deleted"; + } + # DELETE in Deleted state fails (already deleted) + { + request = { + method = "DELETE"; + urlPattern = urlPattern; + }; + response = { + status = 404; + body = "Not Found"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs + ); + + # Pursuit API mappings with scenario support + pursuitMappings = + (lib.concatMap ( + pkg: + let + scenario = "${pkg.name}-${pkg.version}"; + isPublished = builtins.elem pkg.name publishedPackageNames; + versionsUrl = "/packages/purescript-${pkg.name}/available-versions"; + publishedVersionsBody = ''[["${pkg.version}","https://pursuit.purescript.org/packages/purescript-${pkg.name}/${pkg.version}"]]''; + in + if isPublished then + # Published package: versions available in Started, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + else + # Unpublished package: empty in Started, has version in Present, empty in Deleted + [ + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Started"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = publishedVersionsBody; + }; + scenarioName = scenario; + requiredScenarioState = "Present"; + } + { + request = { + method = "GET"; + url = versionsUrl; + }; + response = { + status = 200; + body = "[]"; + }; + scenarioName = scenario; + requiredScenarioState = "Deleted"; + } + ] + ) storageTarballs) + ++ [ + # Accept documentation uploads (POST /packages) + { + request = { + method = "POST"; + url = "/packages"; + }; + response.status = 201; + } + ]; # Healthchecks API wiremock mappings (simple ping endpoint) healthchecksMappings = [ @@ -322,46 +711,9 @@ let } ]; - # Pursuit API wiremock mappings - pursuitMappings = [ - { - request = { - method = "GET"; - url = "/packages/purescript-prelude/available-versions"; - }; - response = { - status = 200; - body = ''[["6.0.1","https://pursuit.purescript.org/packages/purescript-prelude/6.0.1"]]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-effect/available-versions"; - }; - response = { - status = 200; - body = ''[]''; - }; - } - { - request = { - method = "GET"; - url = "/packages/purescript-type-equality/available-versions"; - }; - response = { - status = 200; - body = ''[["4.0.1","https://pursuit.purescript.org/packages/purescript-type-equality/4.0.1"]]''; - }; - } - { - request = { - method = "POST"; - url = "/packages"; - }; - response.status = 201; - } - ]; + # Combined storage mappings (S3 + bucket + Pursuit) + storageMappings = s3Mappings ++ bucketMappings ++ pursuitMappings; + storageFiles = s3Files; # Wiremock root directory builder mkWiremockRoot = @@ -380,7 +732,9 @@ let ${lib.concatMapStrings (f: "cp ${f.path} $out/__files/${f.name}\n") files} ''; - # All wiremock configurations + # All WireMock configurations. + # Add new WireMock services here; both test-env.nix and integration.nix + # derive their processes from this attribute set automatically. wiremockConfigs = { github = { port = ports.github; @@ -389,26 +743,13 @@ let mappings = githubMappings; }; }; - s3 = { - port = ports.s3; - rootDir = mkWiremockRoot { - name = "s3"; - mappings = s3Mappings; - files = s3Files; - }; - }; - bucket = { - port = ports.bucket; - rootDir = mkWiremockRoot { - name = "bucket"; - mappings = bucketMappings; - }; - }; - pursuit = { - port = ports.pursuit; + # Single storage WireMock instance with stateful scenarios + storage = { + port = ports.storage; rootDir = mkWiremockRoot { - name = "pursuit"; - mappings = pursuitMappings; + name = "storage"; + mappings = storageMappings; + files = storageFiles; }; }; healthchecks = { @@ -437,7 +778,7 @@ let name = "setup-git-fixtures"; runtimeInputs = [ pkgs.git ]; text = '' - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" + FIXTURES_DIR="''${1:-${stateDir}/repo-fixtures}" # Run git as pacchettibotti gitbot() { @@ -453,6 +794,7 @@ let # Copy fixtures and make writable (nix store files are read-only) cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + cp -r ${rootPath}/app/fixtures/github-packages/console-6.1.0 "$FIXTURES_DIR/purescript/purescript-console" chmod -R u+w "$FIXTURES_DIR/purescript" for repo in "$FIXTURES_DIR"/purescript/*/; do @@ -460,10 +802,13 @@ let git init -b master && git add . gitbot commit -m "Fixture commit" git config receive.denyCurrentBranch ignore + # Tag the initial commit so we can reset to it for test isolation + gitbot tag -m "initial-fixture" initial-fixture done gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + gitbot -C "$FIXTURES_DIR/purescript/purescript-console" tag -m "v6.1.0" v6.1.0 ''; }; @@ -550,7 +895,7 @@ in { inherit ports - defaultStateDir + stateDir mockUrls testEnv envToExports @@ -564,10 +909,8 @@ in serverStartScript # For custom wiremock setups githubMappings - s3Mappings - s3Files - bucketMappings - pursuitMappings + storageMappings + storageFiles mkWiremockRoot ; } diff --git a/nix/test/integration.nix b/nix/test/integration.nix index bc4f333e0..75b6e6487 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -1,7 +1,9 @@ { pkgs, spagoSrc, - testEnv, + # Test support module from test-env.nix. Named 'testSupport' to avoid confusion + # with testSupport.testEnv (the environment variables attribute set). + testSupport, }: if pkgs.stdenv.isDarwin then pkgs.runCommand "integration-skip" { } '' @@ -29,7 +31,7 @@ else ''; }; - ports = testEnv.ports; + ports = testSupport.ports; in pkgs.runCommand "e2e-integration" { @@ -38,10 +40,11 @@ else pkgs.curl pkgs.jq pkgs.git + pkgs.sqlite pkgs.nss_wrapper - testEnv.wiremockStartScript - testEnv.serverStartScript - testEnv.setupGitFixtures + testSupport.wiremockStartScript + testSupport.serverStartScript + testSupport.setupGitFixtures ]; NODE_PATH = "${pkgs.registry-package-lock}/node_modules"; # Use nss_wrapper to resolve S3 bucket subdomain in the Nix sandbox. @@ -57,9 +60,10 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state + export REPO_FIXTURES_DIR="$STATE_DIR/repo-fixtures" # Export test environment variables for E2E test runners - ${testEnv.testConfig.envToExports testEnv.testConfig.testEnv} + ${testSupport.envToExports testSupport.testEnv} mkdir -p $STATE_DIR @@ -68,8 +72,8 @@ else start-wiremock & WIREMOCK_PID=$! - # Wait for wiremock (github, bucket, s3, pursuit) - for port in ${toString ports.github} ${toString ports.bucket} ${toString ports.s3} ${toString ports.pursuit}; do + # Wait for wiremock (github, storage, healthchecks) + for port in ${toString ports.github} ${toString ports.storage} ${toString ports.healthchecks}; do until curl -s "http://localhost:$port/__admin" > /dev/null 2>&1; do sleep 0.5 done diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 1365d8283..c67919158 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -46,11 +46,14 @@ else timeout=30 ) - # Verify we get a valid JSON response (empty array for jobs) + # Verify we get a valid JSON response (the jobs endpoint responds) result = registry.succeed( "curl -s http://localhost:${envVars.SERVER_PORT}/api/v1/jobs" ) - assert result.strip() == "[]", f"Expected empty jobs array, got: {result}" + + # The server may create matrix jobs on startup for new compilers, so we just verify + # the response is valid JSON (starts with '[') + assert result.strip().startswith("["), f"Expected JSON array, got: {result}" # Verify the database was created and migrations ran registry.succeed("test -f ${stateDir}/db/registry.sqlite3") diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index f7d7fb058..a68b393af 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -59,18 +59,15 @@ let version = "0.5"; processes = { wiremock-github = mkWiremockProcess "github" ports.github; - wiremock-s3 = mkWiremockProcess "s3" ports.s3; - wiremock-bucket = mkWiremockProcess "bucket" ports.bucket; - wiremock-pursuit = mkWiremockProcess "pursuit" ports.pursuit; + # Unified storage WireMock instance for S3 + bucket + Pursuit with stateful scenarios + wiremock-storage = mkWiremockProcess "storage" ports.storage; wiremock-healthchecks = mkWiremockProcess "healthchecks" ports.healthchecks; registry-server = { command = "${serverStartScript}/bin/start-server"; depends_on = { wiremock-github.condition = "process_healthy"; - wiremock-s3.condition = "process_healthy"; - wiremock-bucket.condition = "process_healthy"; - wiremock-pursuit.condition = "process_healthy"; + wiremock-storage.condition = "process_healthy"; wiremock-healthchecks.condition = "process_healthy"; }; readiness_probe = { @@ -92,22 +89,26 @@ let processComposeYaml = pkgs.writeText "process-compose.yaml" (builtins.toJSON processComposeConfig); + testEnvExports = testConfig.envToExports testConfig.testEnv; + + # The state directory is fixed (not configurable) to avoid mismatch between + # the test-env and spago-test-e2e shells. + stateDir = testConfig.testEnv.STATE_DIR; + testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - # Export test environment variables for E2E test runners - ${testConfig.envToExports testConfig.testEnv} + # Clean up previous test state and create fresh directory + rm -rf ${stateDir} + mkdir -p ${stateDir} - if [ -z "''${STATE_DIR:-}" ]; then - STATE_DIR="$(mktemp -d)" - export STATE_DIR - echo "Using temporary directory: $STATE_DIR" - trap 'echo "Cleaning up $STATE_DIR..."; rm -rf "$STATE_DIR"' EXIT - else - export STATE_DIR - fi + # Export all test environment variables + ${testEnvExports} - mkdir -p "$STATE_DIR" + echo + echo "[test-env] State directory: ${stateDir}" + echo "[test-env] In another terminal, run: spago-test-e2e" + echo exec ${pkgs.process-compose}/bin/process-compose up \ -f ${processComposeYaml} \ diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 732937f50..05e73ae84 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -761,7 +761,7 @@ buildLegacyPackageManifests rawPackage rawUrl = Run.Except.runExceptAt _exceptPa Legacy.Manifest.fetchLegacyManifest package.name package.address (RawVersion tag.name) >>= case _ of Left error -> throwVersion { error: InvalidManifest error, reason: "Legacy manifest could not be parsed." } Right result -> pure result - pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location legacyManifest + pure $ Legacy.Manifest.toManifest package.name (LenientVersion.version version) location tag.name legacyManifest case manifest of Left err -> Log.info $ "Failed to build manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ": " <> printJson versionValidationErrorCodec err Right val -> Log.info $ "Built manifest for " <> PackageName.print package.name <> "@" <> tag.name <> ":\n" <> printJson Manifest.codec val @@ -1463,7 +1463,7 @@ fetchSpagoYaml address ref = do | location /= GitHub { owner: address.owner, repo: address.repo, subdir: Nothing } -> do Log.warn "spago.yaml file does not use the same location it was fetched from, this is disallowed..." pure Nothing - Right config -> case SpagoYaml.spagoYamlToManifest config of + Right config -> case SpagoYaml.spagoYamlToManifest (un RawVersion ref) config of Left err -> do Log.warn $ "Failed to convert parsed spago.yaml file to purs.json " <> contents <> "\nwith errors:\n" <> err pure Nothing diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 0e7fd131a..257a7b1a2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -228,21 +228,24 @@ deleteVersion arguments name version = do Just published, Nothing -> pure (Just (Right published)) Nothing, Just unpublished -> pure (Just (Left unpublished)) Nothing, Nothing -> pure Nothing + -- Read manifest before deleting it (needed for reimport) + maybeManifest <- Registry.readManifest name version let newMetadata = Metadata $ oldMetadata { published = Map.delete version oldMetadata.published, unpublished = Map.delete version oldMetadata.unpublished } Registry.writeMetadata name newMetadata Registry.deleteManifest name version -- --reimport when arguments.reimport do - case publishment of - Nothing -> Log.error "Cannot reimport a version that was not published" - Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" - Just (Right specificPackageMetadata) -> do + case publishment, maybeManifest of + Nothing, _ -> Log.error "Cannot reimport a version that was not published" + Just (Left _), _ -> Log.error "Cannot reimport a version that was specifically unpublished" + Just (Right _), Nothing -> Log.error $ "Cannot reimport: manifest not found for " <> formatted + Just (Right _), Just (Manifest manifest) -> do -- Obtains `newMetadata` via cache void $ API.publish Nothing { location: Just oldMetadata.location , name: name - , ref: specificPackageMetadata.ref + , ref: manifest.ref , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing diff --git a/spago.lock b/spago.lock index a6dbae907..ea939ddeb 100644 --- a/spago.lock +++ b/spago.lock @@ -227,7 +227,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -239,14 +238,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -254,7 +249,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -299,11 +293,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -316,22 +306,25 @@ "codec-json", "console", "datetime", - "effect", - "either", - "foldable-traversable", + "exceptions", + "fetch", + "integers", "json", - "maybe", + "node-child-process", + "node-execa", "node-fs", "node-path", "node-process", - "prelude", + "ordered-collections", "registry-app", "registry-foreign", "registry-lib", "registry-test-utils", + "routing-duplex", "spec", "spec-node", - "strings" + "strings", + "transformers" ], "build_plan": [ "aff", @@ -644,7 +637,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -656,14 +648,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -672,7 +660,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -722,11 +709,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -886,7 +869,6 @@ "exceptions", "exists", "exitcodes", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -898,14 +880,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-timers", "js-uri", "json", @@ -914,7 +892,6 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", @@ -964,11 +941,7 @@ "unicode", "unsafe-coerce", "unsafe-reference", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] } }, @@ -1144,22 +1117,16 @@ "path": "test-utils", "core": { "dependencies": [ - "aff", "arrays", "bifunctors", "codec-json", "datetime", - "effect", "either", "exceptions", - "fetch", "foldable-traversable", "formatters", - "integers", "json", "maybe", - "newtype", - "node-process", "ordered-collections", "partial", "prelude", @@ -1192,7 +1159,6 @@ "enums", "exceptions", "exists", - "fetch", "fixed-points", "foldable-traversable", "foreign", @@ -1204,14 +1170,10 @@ "functors", "gen", "graphs", - "http-methods", "identity", "integers", "invariant", "js-date", - "js-fetch", - "js-promise", - "js-promise-aff", "js-uri", "json", "language-cst-parser", @@ -1219,14 +1181,12 @@ "lcg", "lists", "maybe", - "media-types", "mmorph", "newtype", "node-buffer", "node-event-emitter", "node-fs", "node-path", - "node-process", "node-streams", "nonempty", "now", @@ -1238,7 +1198,6 @@ "parsing", "partial", "pipes", - "posix-types", "prelude", "profunctor", "profunctor-lenses", @@ -1260,11 +1219,7 @@ "unfoldable", "unicode", "unsafe-coerce", - "variant", - "web-dom", - "web-events", - "web-file", - "web-streams" + "variant" ] }, "test": { diff --git a/test-utils/spago.yaml b/test-utils/spago.yaml index d85190964..4362f8e77 100644 --- a/test-utils/spago.yaml +++ b/test-utils/spago.yaml @@ -3,22 +3,16 @@ package: build: pedanticPackages: true dependencies: - - aff - arrays - bifunctors - codec-json - datetime - - effect - either - exceptions - - fetch - foldable-traversable - formatters - - integers - json - maybe - - newtype - - node-process - ordered-collections - partial - prelude diff --git a/test-utils/src/Registry/Test/Assert.purs b/test-utils/src/Registry/Test/Assert.purs index 55c0f2277..2d15e7a74 100644 --- a/test-utils/src/Registry/Test/Assert.purs +++ b/test-utils/src/Registry/Test/Assert.purs @@ -38,6 +38,18 @@ shouldNotContain container elem = when (elem `Foldable.elem` container) do fail (Utils.unsafeStringify elem <> "\n\nshould not be a member of\n\n" <> Utils.unsafeStringify container) +-- | Assert that all elements in `expected` are present in `actual`. +-- | This is a subset check, not an equality check - `actual` may contain +-- | additional elements. +-- | +-- | Useful for E2E tests where a shared database means we can't predict +-- | exact contents, only that certain expected items are present. +shouldContainAll :: forall m a. MonadThrow Error m => Eq a => Array a -> Array a -> m Unit +shouldContainAll actual expected = + Foldable.for_ expected \elem -> + when (elem `Foldable.notElem` actual) do + fail ("Expected element not found:\n" <> Utils.unsafeStringify elem <> "\n\nin array:\n" <> Utils.unsafeStringify actual) + shouldSatisfy :: forall m a. MonadThrow Error m => a -> (a -> Boolean) -> m Unit shouldSatisfy a predicate = unless (predicate a) do diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs deleted file mode 100644 index 8d31b9850..000000000 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ /dev/null @@ -1,180 +0,0 @@ --- | HTTP client for making requests to the registry server during E2E tests. --- | This module provides typed helpers for interacting with the Registry API. -module Registry.Test.E2E.Client - ( Config - , ClientError(..) - , defaultConfig - , configFromEnv - , getJobs - , getJob - , getStatus - , publish - , pollJob - , printClientError - ) where - -import Prelude - -import Codec.JSON.DecodeError as CJ.DecodeError -import Control.Monad.Error.Class (class MonadThrow, throwError) -import Control.Monad.Except (runExceptT) -import Control.Monad.Trans.Class (lift) -import Data.Array as Array -import Data.Bifunctor (lmap) -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime) -import Data.Either (Either(..)) -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Int as Int -import Data.Maybe (Maybe(..)) -import Data.Newtype (unwrap) -import Effect (Effect) -import Effect.Aff (Aff, Milliseconds(..), delay) -import Effect.Aff.Class (class MonadAff, liftAff) -import Effect.Exception (Error, error) -import Effect.Exception as Effect.Exception -import Fetch (Method(..)) -import Fetch as Fetch -import JSON as JSON -import Node.Process as Process -import Registry.API.V1 (Job, JobId(..), LogLevel) -import Registry.API.V1 as V1 -import Registry.Internal.Format as Internal.Format -import Registry.Operation (PublishData) -import Registry.Operation as Operation - --- | Configuration for the E2E test client -type Config = - { baseUrl :: String - , timeout :: Milliseconds - , pollInterval :: Milliseconds - , maxPollAttempts :: Int - } - --- | Default configuration for production use (port 8080 matches HTTPurple default) -defaultConfig :: Config -defaultConfig = - { baseUrl: "http://localhost:8080" - , timeout: Milliseconds 30000.0 - , pollInterval: Milliseconds 2000.0 - , maxPollAttempts: 30 - } - --- | Create config from environment, reading SERVER_PORT. --- | --- | SERVER_PORT is required and must be set by the test environment. --- | See `nix/lib/env.nix` for the centralized environment configuration. -configFromEnv :: Effect Config -configFromEnv = do - maybePort <- Process.lookupEnv "SERVER_PORT" - case maybePort of - Nothing -> Effect.Exception.throw "SERVER_PORT environment variable is not set. Run tests via 'nix run .#test-env' or 'nix build .#checks.x86_64-linux.integration'." - Just port -> pure $ defaultConfig { baseUrl = "http://localhost:" <> port } - --- | Errors that can occur during client operations -data ClientError - = HttpError { status :: Int, body :: String } - | ParseError { msg :: String, raw :: String } - | Timeout String - | NetworkError String - -printClientError :: ClientError -> String -printClientError = case _ of - HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw - Timeout msg -> "Timeout: " <> msg - NetworkError msg -> "Network Error: " <> msg - --- | Convert a ClientError to an Effect Error for throwing -toError :: ClientError -> Error -toError = error <<< printClientError - --- | Parse JSON response body using a codec -parseResponse :: forall a. CJ.Codec a -> String -> Either String a -parseResponse codec body = do - json <- lmap (append "JSON parse error: ") $ JSON.parse body - lmap CJ.DecodeError.print $ CJ.decode codec json - --- | Make a GET request and decode the response -get :: forall a. CJ.Codec a -> Config -> String -> Aff (Either ClientError a) -get codec config path = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> path) { method: GET } - body <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse codec body of - Left err -> throwError $ ParseError { msg: err, raw: body } - Right a -> pure a - else - throwError $ HttpError { status: response.status, body } - --- | Make a POST request with JSON body and decode the response -post :: forall req res. CJ.Codec req -> CJ.Codec res -> Config -> String -> req -> Aff (Either ClientError res) -post reqCodec resCodec config path reqBody = runExceptT do - let jsonBody = JSON.print $ CJ.encode reqCodec reqBody - response <- lift $ Fetch.fetch (config.baseUrl <> path) - { method: POST - , headers: { "Content-Type": "application/json" } - , body: jsonBody - } - responseBody <- lift response.text - if response.status >= 200 && response.status < 300 then - case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError { msg: err, raw: responseBody } - Right a -> pure a - else - throwError $ HttpError { status: response.status, body: responseBody } - --- | Get the list of jobs -getJobs :: Config -> Aff (Either ClientError (Array Job)) -getJobs config = get (CJ.array V1.jobCodec) config "/api/v1/jobs?include_completed=true" - --- | Get a specific job by ID, with optional log filtering -getJob :: Config -> JobId -> Maybe LogLevel -> Maybe DateTime -> Aff (Either ClientError Job) -getJob config (JobId jobId) level since = do - let - params = Array.catMaybes - [ level <#> \l -> "level=" <> V1.printLogLevel l - , since <#> \s -> "since=" <> Formatter.DateTime.format Internal.Format.iso8601DateTime s - ] - query = case params of - [] -> "" - ps -> "?" <> Array.intercalate "&" ps - get V1.jobCodec config ("/api/v1/jobs/" <> jobId <> query) - --- | Check if the server is healthy -getStatus :: Config -> Aff (Either ClientError Unit) -getStatus config = runExceptT do - response <- lift $ Fetch.fetch (config.baseUrl <> "/api/v1/status") { method: GET } - if response.status == 200 then - pure unit - else do - body <- lift response.text - throwError $ HttpError { status: response.status, body } - --- | Publish a package -publish :: Config -> PublishData -> Aff (Either ClientError V1.JobCreatedResponse) -publish config publishData = - post Operation.publishCodec V1.jobCreatedResponseCodec config "/api/v1/publish" publishData - --- | Poll a job until it completes or times out -pollJob - :: forall m - . MonadAff m - => MonadThrow Error m - => Config - -> JobId - -> m Job -pollJob config jobId = go 1 - where - go attempt - | attempt > config.maxPollAttempts = - throwError $ toError $ Timeout $ "Job " <> unwrap jobId <> " did not complete after " <> Int.toStringAs Int.decimal config.maxPollAttempts <> " attempts" - | otherwise = do - liftAff $ delay config.pollInterval - result <- liftAff $ getJob config jobId (Just V1.Debug) Nothing - case result of - Left err -> throwError $ toError err - Right job -> - case (V1.jobInfo job).finishedAt of - Just _ -> pure job - Nothing -> go (attempt + 1) diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs deleted file mode 100644 index 70f1242b0..000000000 --- a/test-utils/src/Registry/Test/E2E/Fixtures.purs +++ /dev/null @@ -1,76 +0,0 @@ --- | Test fixtures for E2E tests. --- | Contains package operation data used across multiple test suites. -module Registry.Test.E2E.Fixtures - ( effectPublishData - , failingTransferData - , trusteeAuthenticatedData - ) where - -import Prelude - -import Data.Codec.JSON as CJ -import Data.Maybe (Maybe(..)) -import JSON as JSON -import Registry.Location as Location -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) -import Registry.Operation as Operation -import Registry.SSH (Signature(..)) -import Registry.Test.Utils as Utils - --- | Standard publish data for effect@4.0.0, used by E2E tests. --- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 -effectPublishData :: Operation.PublishData -effectPublishData = - { name: Utils.unsafePackageName "effect" - , location: Just $ Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - , version: Utils.unsafeVersion "4.0.0" - } - --- | Authenticated transfer data for prelude, which has no owners in fixtures. --- | Used to test failure scenarios in E2E tests - will fail because no owners --- | are listed to verify the signature against. -failingTransferData :: AuthenticatedData -failingTransferData = - let - transferPayload :: TransferData - transferPayload = - { name: Utils.unsafePackageName "prelude" - , newLocation: Location.GitHub - { owner: "someone-else" - , repo: "purescript-prelude" - , subdir: Nothing - } - } - rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload - in - { payload: Transfer transferPayload - , rawPayload - , signature: Signature "invalid-signature-for-testing" - } - --- | Authenticated data with an intentionally invalid signature. --- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. --- | If re-signing works, the job succeeds; if not, signature verification fails. --- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. -trusteeAuthenticatedData :: AuthenticatedData -trusteeAuthenticatedData = - let - unpublishPayload :: UnpublishData - unpublishPayload = - { name: Utils.unsafePackageName "prelude" - , version: Utils.unsafeVersion "6.0.1" - , reason: "Testing trustee re-signing" - } - rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload - in - { payload: Unpublish unpublishPayload - , rawPayload - , signature: Signature "invalid-signature-for-testing" - } diff --git a/test-utils/src/Registry/Test/Fixtures.purs b/test-utils/src/Registry/Test/Fixtures.purs new file mode 100644 index 000000000..28692c13c --- /dev/null +++ b/test-utils/src/Registry/Test/Fixtures.purs @@ -0,0 +1,18 @@ +module Registry.Test.Fixtures where + +import Prelude + +import Data.Either as Either +import Data.Maybe (Maybe(..)) +import Partial.Unsafe as Partial +import Registry.Location (Location(..)) +import Registry.Sha256 (Sha256) +import Registry.Sha256 as Sha256 + +-- | A Location for use within tests. +defaultLocation :: Location +defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } + +-- | A Sha256 for use within tests. +defaultHash :: Sha256 +defaultHash = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Failed to parse Sha256") $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/test-utils/src/Registry/Test/Utils.purs b/test-utils/src/Registry/Test/Utils.purs index 2db7280e5..57f177890 100644 --- a/test-utils/src/Registry/Test/Utils.purs +++ b/test-utils/src/Registry/Test/Utils.purs @@ -25,7 +25,6 @@ import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range as Range import Registry.SSH as SSH -import Registry.Sha256 (Sha256) import Registry.Sha256 as Sha256 import Registry.Version (Version) import Registry.Version as Version @@ -149,6 +148,7 @@ unsafeManifest name version dependencies = Manifest { url: "https://github.com/purescript/purescript-" <> name <> ".git" , subdir: Nothing } + , ref: "v" <> version , description: Nothing , owners: Nothing , includeFiles: Nothing @@ -158,11 +158,3 @@ unsafeManifest name version dependencies = Manifest -- | Format a package version as a string in the form 'name@X.Y.Z' formatPackageVersion :: PackageName -> Version -> String formatPackageVersion name version = PackageName.print name <> "@" <> Version.print version - --- | A Location for use within tests. -defaultLocation :: Location -defaultLocation = GitHub { owner: "purescript", repo: "registry-dev", subdir: Nothing } - --- | A Sha256 for use within tests. -defaultHash :: Sha256 -defaultHash = fromRight "Failed to parse Sha256" $ Sha256.parse "sha256-fN9RUAzN21ZY4Y0UwqUSxwUPVz1g7/pcqoDvbJZoT04=" diff --git a/types/v1/Manifest.dhall b/types/v1/Manifest.dhall index e9fe88850..2f1a6fa5b 100644 --- a/types/v1/Manifest.dhall +++ b/types/v1/Manifest.dhall @@ -13,6 +13,7 @@ let Manifest = , license : License , version : Version , location : ./Location.dhall + , ref : Text , owners : Optional (List ./Owner.dhall) , description : Optional Text , includeFiles : Optional (List Text) From 06ff81f5d0e6cbec261085486ad055fa9d4690d0 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 14:19:19 -0800 Subject: [PATCH 31/36] trim tests down a bit to optimize speed to ~60s --- app-e2e/src/Test/E2E/Endpoint/Jobs.purs | 31 ++++++------------- app-e2e/src/Test/E2E/Endpoint/Transfer.purs | 2 +- app-e2e/src/Test/E2E/Endpoint/Unpublish.purs | 2 +- app-e2e/src/Test/E2E/Support/Fixtures.purs | 4 +-- app-e2e/src/Test/E2E/Workflow.purs | 23 ++------------ app/fixtures/registry/metadata/prelude.json | 4 +-- .../registry/metadata/type-equality.json | 4 +-- app/test/App/API.purs | 12 +++---- nix/test/config.nix | 17 ++++++---- nix/test/smoke.nix | 2 +- nix/test/test-env.nix | 5 --- 11 files changed, 35 insertions(+), 71 deletions(-) diff --git a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs index 2aa157673..e02b623b5 100644 --- a/app-e2e/src/Test/E2E/Endpoint/Jobs.purs +++ b/app-e2e/src/Test/E2E/Endpoint/Jobs.purs @@ -18,52 +18,39 @@ spec = do Spec.it "can reach the status endpoint" do Client.getStatus - Spec.describe "Jobs list" do - Spec.it "excludes completed jobs when include_completed is false" do - -- Create a job and wait for it to complete + Spec.describe "Jobs API" do + Spec.it "query parameters and filtering work correctly" do + -- Publish once and test all Jobs API features { jobId } <- Client.publish Fixtures.effectPublishData - _ <- Env.pollJobOrFail jobId + job <- Env.pollJobOrFail jobId + let info = V1.jobInfo job - -- Now we have at least one completed job + -- Test: include_completed filtering recentJobs <- Client.getJobsWith Client.ActiveOnly allJobs <- Client.getJobsWith Client.IncludeCompleted - - -- All jobs should include the completed publish job let allCount = Array.length allJobs Assert.shouldSatisfy allCount (_ > 0) - - -- Active-only should return fewer or equal jobs let recentCount = Array.length recentJobs Assert.shouldSatisfy recentCount (_ <= allCount) - - -- Verify completed jobs are excluded from active-only results - let completedJob = Array.find (\job -> isJust (V1.jobInfo job).finishedAt) allJobs + let completedJob = Array.find (\j -> isJust (V1.jobInfo j).finishedAt) allJobs case completedJob of Just completed -> do let completedId = (V1.jobInfo completed).jobId - inRecent = Array.any (\job -> (V1.jobInfo job).jobId == completedId) recentJobs + inRecent = Array.any (\j -> (V1.jobInfo j).jobId == completedId) recentJobs when inRecent do Assert.fail $ "Completed job " <> unwrap completedId <> " should be excluded from include_completed=false results" Nothing -> pure unit - Spec.describe "Job query parameters" do - Spec.it "accepts level and since parameters" do - { jobId } <- Client.publish Fixtures.effectPublishData - job <- Env.pollJobOrFail jobId - let info = V1.jobInfo job - + -- Test: query parameters (level and since) baseJob <- Client.getJob jobId Nothing Nothing Assert.shouldEqual (V1.jobInfo baseJob).jobId info.jobId - debugJob <- Client.getJob jobId (Just V1.Debug) Nothing Assert.shouldEqual (V1.jobInfo debugJob).jobId info.jobId - let sinceTime = fromMaybe info.createdAt info.finishedAt sinceJob <- Client.getJob jobId Nothing (Just sinceTime) Assert.shouldEqual (V1.jobInfo sinceJob).jobId info.jobId - Spec.describe "Jobs API error handling" do Spec.it "returns HTTP 404 for non-existent job ID" do let fakeJobId = JobId "nonexistent-job-id-12345" result <- Client.tryGetJob fakeJobId Nothing Nothing diff --git a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs index e06b466a2..6e3d49eef 100644 --- a/app-e2e/src/Test/E2E/Endpoint/Transfer.purs +++ b/app-e2e/src/Test/E2E/Endpoint/Transfer.purs @@ -21,7 +21,7 @@ spec = do Spec.it "can transfer effect to a new location with full state verification" do { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData _ <- Env.pollJobOrFail publishJobId - Env.waitForAllMatrixJobs Fixtures.effect + -- Note: we don't wait for matrix jobs - transfer only modifies metadata Metadata originalMetadata <- Env.readMetadata Fixtures.effect.name case originalMetadata.location of diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs index 9a82e943b..6153d0921 100644 --- a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -20,7 +20,7 @@ spec = do Spec.it "can publish effect@4.0.0 then unpublish it with full state verification" do { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData _ <- Env.pollJobOrFail publishJobId - Env.waitForAllMatrixJobs Fixtures.effect + -- Note: we don't wait for matrix jobs - unpublish only modifies metadata/storage existsBefore <- Env.manifestIndexEntryExists Fixtures.effect unless existsBefore do diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs index f23b494e3..56f5a1afe 100644 --- a/app-e2e/src/Test/E2E/Support/Fixtures.purs +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -57,7 +57,7 @@ effectPublishData = , subdir: Nothing } , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" + , compiler: Utils.unsafeVersion "0.15.10" , resolutions: Nothing , version: effect.version } @@ -89,7 +89,7 @@ consolePublishData = , subdir: Nothing } , ref: "v6.1.0" - , compiler: Utils.unsafeVersion "0.15.9" + , compiler: Utils.unsafeVersion "0.15.10" , resolutions: Nothing , version: console.version } diff --git a/app-e2e/src/Test/E2E/Workflow.purs b/app-e2e/src/Test/E2E/Workflow.purs index 3e65b5840..a03be0562 100644 --- a/app-e2e/src/Test/E2E/Workflow.purs +++ b/app-e2e/src/Test/E2E/Workflow.purs @@ -1,8 +1,7 @@ -- | End-to-end tests for multi-operation workflows. -- | --- | These tests verify complex scenarios involving multiple operations: --- | 1. Git state remains clean after multiple matrix jobs complete --- | 2. Dependency state is validated correctly across publish/unpublish sequences +-- | These tests verify complex scenarios involving multiple operations, +-- | specifically dependency state validation across publish/unpublish sequences. module Test.E2E.Workflow (spec) where import Registry.App.Prelude @@ -22,24 +21,6 @@ import Test.Spec as Spec spec :: E2ESpec spec = do - Spec.describe "Concurrent git operations" do - Spec.it "multiple matrix jobs complete without conflict" do - { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData - _ <- Env.pollJobOrFail publishJobId - Env.waitForAllMatrixJobs Fixtures.effect - - uploadOccurred <- Env.hasStorageUpload Fixtures.effect - unless uploadOccurred do - Assert.fail "Expected tarball upload to S3 for effect@4.0.0" - - Metadata metadata <- Env.readMetadata Fixtures.effect.name - unless (isJust $ Map.lookup Fixtures.effect.version metadata.published) do - Assert.fail "Expected effect@4.0.0 to be in published metadata" - - manifestExists <- Env.manifestIndexEntryExists Fixtures.effect - unless manifestExists do - Assert.fail "Expected effect@4.0.0 to exist in manifest index" - Spec.describe "Dependency and unpublish interactions" do Spec.it "publishing a package fails when its dependency was unpublished" do { jobId: effectJobId } <- Client.publish Fixtures.effectPublishData diff --git a/app/fixtures/registry/metadata/prelude.json b/app/fixtures/registry/metadata/prelude.json index 4d1ab32e6..8c14057ad 100644 --- a/app/fixtures/registry/metadata/prelude.json +++ b/app/fixtures/registry/metadata/prelude.json @@ -7,10 +7,8 @@ "6.0.1": { "bytes": 31129, "compilers": [ - "0.15.9", "0.15.10", - "0.15.11", - "0.15.12" + "0.15.11" ], "hash": "sha256-EbbFV0J5xV0WammfgCv6HRFSK7Zd803kkofE8aEoam0=", "publishedTime": "2022-08-18T20:04:00.000Z", diff --git a/app/fixtures/registry/metadata/type-equality.json b/app/fixtures/registry/metadata/type-equality.json index 6b675a80e..e51b52614 100644 --- a/app/fixtures/registry/metadata/type-equality.json +++ b/app/fixtures/registry/metadata/type-equality.json @@ -7,10 +7,8 @@ "4.0.1": { "bytes": 2179, "compilers": [ - "0.15.9", "0.15.10", - "0.15.11", - "0.15.12" + "0.15.11" ], "hash": "sha256-3lDTQdbTM6/0oxav/0V8nW9fWn3lsSM3b2XxwreDxqs=", "publishedTime": "2022-04-27T18:00:18.000Z", diff --git a/app/test/App/API.purs b/app/test/App/API.purs index d60bcab3f..320223b09 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -94,7 +94,7 @@ spec = do version = Utils.unsafeVersion "4.0.0" ref = "v4.0.0" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref @@ -142,7 +142,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion name version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10", "0.15.11", "0.15.12" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion name version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -157,7 +157,7 @@ spec = do -- but did not have documentation make it to Pursuit. let pursuitOnlyPublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" @@ -172,7 +172,7 @@ spec = do let transitive = { name: Utils.unsafePackageName "transitive", version: Utils.unsafeVersion "1.0.0" } transitivePublishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version @@ -191,7 +191,7 @@ spec = do Nothing -> Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to be in metadata." Just published -> do let many' = NonEmptyArray.toArray published.compilers - let expected = map Utils.unsafeVersion [ "0.15.9", "0.15.10", "0.15.11", "0.15.12" ] + let expected = map Utils.unsafeVersion [ "0.15.10", "0.15.11" ] unless (many' == expected) do Except.throw $ "Expected " <> formatPackageVersion transitive.name transitive.version <> " to have a compiler matrix of " <> Utils.unsafeStringify (map Version.print expected) <> " but got " <> Utils.unsafeStringify (map Version.print many') @@ -250,7 +250,7 @@ spec = do version = Utils.unsafeVersion "6.0.2" ref = "v6.0.2" publishArgs = - { compiler: Utils.unsafeVersion "0.15.9" + { compiler: Utils.unsafeVersion "0.15.10" , location: Just $ GitHub { owner: "purescript", repo: "purescript-prelude", subdir: Nothing } , name , ref diff --git a/nix/test/config.nix b/nix/test/config.nix index 77b5cfaff..26eb9d7f8 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -77,17 +77,22 @@ let exec ${pkgs.nodejs}/bin/node ${./git-mock.mjs} "$@" ''; - # Apply git mock overlay to get registry packages with mocked git. + # Test overlay: mocks git and limits compilers for faster tests. # Using pkgs.extend avoids a second nixpkgs instantiation (more efficient). - # This substitutes gitMock for git in registry-runtime-deps, which causes - # registry-server to be rebuilt with the mock baked into its PATH wrapper. - gitMockOverlay = _: prev: { + testOverlay = _: prev: { + # Substitute gitMock for git in registry-runtime-deps registry-runtime-deps = map ( pkg: if pkg == prev.git then gitMock else pkg ) prev.registry-runtime-deps; + + # Limit to 2 compilers for faster matrix job tests. + # These versions match the compilers referenced in app/fixtures. + registry-supported-compilers = lib.filterAttrs ( + name: _: name == "purs-0_15_10" || name == "purs-0_15_11" + ) prev.registry-supported-compilers; }; - registryPkgs = pkgs.extend gitMockOverlay; + registryPkgs = pkgs.extend testOverlay; # Helper to create GitHub contents API response, as it returns base64-encoded content base64Response = @@ -900,7 +905,7 @@ in testEnv envToExports gitMock - gitMockOverlay + testOverlay wiremockConfigs combinedWiremockRoot setupGitFixtures diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index c67919158..d754f36b6 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -78,7 +78,7 @@ else ]; # Apply the git mock overlay on top of the standard overlays - nixpkgs.overlays = overlays ++ [ testConfig.gitMockOverlay ]; + nixpkgs.overlays = overlays ++ [ testConfig.testOverlay ]; virtualisation = { graphics = false; diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index a68b393af..764d01c47 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -105,11 +105,6 @@ let # Export all test environment variables ${testEnvExports} - echo - echo "[test-env] State directory: ${stateDir}" - echo "[test-env] In another terminal, run: spago-test-e2e" - echo - exec ${pkgs.process-compose}/bin/process-compose up \ -f ${processComposeYaml} \ --ordered-shutdown \ From 198ffcdd1d1ea4adb017a1a814ec64ef6ebbd31b Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Thu, 8 Jan 2026 01:20:23 +0200 Subject: [PATCH 32/36] Add endpoint for package set jobs + e2e tests for it --- .../src/Test/E2E/Endpoint/PackageSets.purs | 52 +++++++++++++++ app-e2e/src/Test/E2E/Support/Client.purs | 33 ++++++++-- app-e2e/src/Test/E2E/Support/Fixtures.purs | 62 ++++++++++++++++- app-e2e/src/Test/Main.purs | 2 + app/fixtures/registry/package-sets/0.0.1.json | 8 +++ app/src/App/API.purs | 66 +++++-------------- app/src/App/Auth.purs | 20 +++++- app/src/App/Effect/Db.purs | 10 +++ app/src/App/GitHubIssue.purs | 40 +++++++++-- app/src/App/SQLite.js | 14 +++- app/src/App/SQLite.purs | 24 +++++-- app/src/App/Server/Env.purs | 5 +- app/src/App/Server/JobExecutor.purs | 5 +- app/src/App/Server/Router.purs | 45 ++++++++++++- ...20240914171030_create_job_queue_tables.sql | 2 + lib/src/API/V1.purs | 2 + lib/src/Operation.purs | 32 +++++++++ 17 files changed, 348 insertions(+), 74 deletions(-) create mode 100644 app-e2e/src/Test/E2E/Endpoint/PackageSets.purs create mode 100644 app/fixtures/registry/package-sets/0.0.1.json diff --git a/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs new file mode 100644 index 000000000..502853fbd --- /dev/null +++ b/app-e2e/src/Test/E2E/Endpoint/PackageSets.purs @@ -0,0 +1,52 @@ +module Test.E2E.Endpoint.PackageSets (spec) where + +import Registry.App.Prelude + +import Control.Monad.Reader (ask) +import Effect.Aff as Aff +import Registry.API.V1 as V1 +import Registry.Test.Assert as Assert +import Test.E2E.Support.Client as Client +import Test.E2E.Support.Env (E2ESpec) +import Test.E2E.Support.Env as Env +import Test.E2E.Support.Fixtures as Fixtures +import Test.Spec as Spec + +spec :: E2ESpec +spec = do + Spec.describe "Package Sets endpoint" do + Spec.it "accepts unauthenticated add/upgrade requests" do + { jobId } <- Client.packageSets Fixtures.packageSetAddRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "rejects unauthenticated compiler change requests" do + result <- Client.tryPackageSets Fixtures.packageSetCompilerChangeRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated compiler change" + + Spec.it "rejects unauthenticated package removal requests" do + result <- Client.tryPackageSets Fixtures.packageSetRemoveRequest + case result of + Left err -> do + Assert.shouldSatisfy (Client.clientErrorStatus err) (_ == Just 400) + Right _ -> + Assert.fail "Expected 400 error for unauthenticated package removal" + + Spec.it "accepts authenticated compiler change requests" do + { privateKey } <- ask + case Fixtures.signPackageSet privateKey Fixtures.packageSetCompilerChangeRequest of + Left err -> + liftAff $ Aff.throwError $ Aff.error $ "Failed to sign request: " <> err + Right signedRequest -> do + { jobId } <- Client.packageSets signedRequest + job <- Env.pollJobOrFail jobId + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust + + Spec.it "returns existing job for duplicate requests" do + { jobId: firstJobId } <- Client.packageSets Fixtures.packageSetAddRequest + { jobId: secondJobId } <- Client.packageSets Fixtures.packageSetAddRequest + Assert.shouldEqual firstJobId secondJobId diff --git a/app-e2e/src/Test/E2E/Support/Client.purs b/app-e2e/src/Test/E2E/Support/Client.purs index 6985b9611..3c1c02e62 100644 --- a/app-e2e/src/Test/E2E/Support/Client.purs +++ b/app-e2e/src/Test/E2E/Support/Client.purs @@ -16,6 +16,8 @@ module Test.E2E.Support.Client , publish , unpublish , transfer + , packageSets + , tryPackageSets , pollJob , printClientError , clientErrorStatus @@ -38,7 +40,7 @@ import Fetch as Fetch import JSON as JSON import Registry.API.V1 (Job, JobId, LogLevel, Route(..)) import Registry.API.V1 as V1 -import Registry.Operation (AuthenticatedData, PublishData) +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest, PublishData) import Registry.Operation as Operation import Routing.Duplex as Routing import Test.E2E.Support.Types (E2E) @@ -89,9 +91,9 @@ tryGet codec baseUrl path = do get :: forall a. CJ.Codec a -> String -> String -> Aff a get codec baseUrl path = tryGet codec baseUrl path >>= either throw pure --- | Make a POST request with JSON body and decode the response. Throws on error. -post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res -post reqCodec resCodec baseUrl path reqBody = do +-- | Make a POST request with JSON body, returning Either on error. +tryPost :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff (Either ClientError res) +tryPost reqCodec resCodec baseUrl path reqBody = do let jsonBody = JSON.print $ CJ.encode reqCodec reqBody response <- Fetch.fetch (baseUrl <> path) { method: POST @@ -101,10 +103,14 @@ post reqCodec resCodec baseUrl path reqBody = do responseBody <- response.text if response.status >= 200 && response.status < 300 then case parseJson resCodec responseBody of - Left err -> throw $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } - Right a -> pure a + Left err -> pure $ Left $ ParseError { msg: CJ.DecodeError.print err, raw: responseBody } + Right a -> pure $ Right a else - throw $ HttpError { status: response.status, body: responseBody } + pure $ Left $ HttpError { status: response.status, body: responseBody } + +-- | Make a POST request with JSON body and decode the response. Throws on error. +post :: forall req res. CJ.Codec req -> CJ.Codec res -> String -> String -> req -> Aff res +post reqCodec resCodec baseUrl path reqBody = tryPost reqCodec resCodec baseUrl path reqBody >>= either throw pure data JobFilter = ActiveOnly | IncludeCompleted @@ -168,6 +174,19 @@ transfer authData = do { clientConfig } <- ask liftAff $ post Operation.authenticatedCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute Transfer) authData +-- | Submit a package set update request +packageSets :: PackageSetUpdateRequest -> E2E V1.JobCreatedResponse +packageSets request = do + { clientConfig } <- ask + liftAff $ post Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + +-- | Try to submit a package set update, returning Left on HTTP/parse errors. +-- | Use this when testing error responses (e.g., expecting 400 for unauthorized restricted ops). +tryPackageSets :: PackageSetUpdateRequest -> E2E (Either ClientError V1.JobCreatedResponse) +tryPackageSets request = do + { clientConfig } <- ask + liftAff $ tryPost Operation.packageSetUpdateRequestCodec V1.jobCreatedResponseCodec clientConfig.baseUrl (printRoute PackageSets) request + -- | Poll a job until it completes or times out. -- | -- | This is the recommended way to wait for job completion in E2E tests. diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs index 56f5a1afe..ca5a2967e 100644 --- a/app-e2e/src/Test/E2E/Support/Fixtures.purs +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -17,15 +17,20 @@ module Test.E2E.Support.Fixtures , preludeUnpublishData , signUnpublish , signTransfer + , packageSetAddRequest + , packageSetCompilerChangeRequest + , packageSetRemoveRequest + , signPackageSet , invalidJsonIssueEvent ) where import Registry.App.Prelude import Data.Codec.JSON as CJ +import Data.Map as Map import JSON as JSON import Registry.Location (Location(..)) -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetOperation(..), PackageSetUpdateRequest, TransferData, UnpublishData) import Registry.Operation as Operation import Registry.PackageName (PackageName) import Registry.SSH as SSH @@ -217,6 +222,61 @@ signTransfer privateKey transferData = do , signature } +-- | type-equality@4.0.1 fixture package (exists in registry-index but not in initial package set) +typeEquality :: PackageFixture +typeEquality = { name: Utils.unsafePackageName "type-equality", version: Utils.unsafeVersion "4.0.1" } + +-- | Package set request to add type-equality@4.0.1. +-- | This is an unauthenticated request (no signature) since adding packages +-- | doesn't require trustee authentication. +packageSetAddRequest :: PackageSetUpdateRequest +packageSetAddRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton typeEquality.name (Just typeEquality.version) + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to change the compiler version. +-- | This requires authentication (pacchettibotti signature) since changing +-- | the compiler is a restricted operation. +packageSetCompilerChangeRequest :: PackageSetUpdateRequest +packageSetCompilerChangeRequest = + let + payload = PackageSetUpdate + { compiler: Just (Utils.unsafeVersion "0.15.10") + , packages: Map.empty + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Package set request to remove a package. +-- | This requires authentication (pacchettibotti signature) since removing +-- | packages is a restricted operation. +packageSetRemoveRequest :: PackageSetUpdateRequest +packageSetRemoveRequest = + let + payload = PackageSetUpdate + { compiler: Nothing + , packages: Map.singleton effect.name Nothing + } + rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload + in + { payload, rawPayload, signature: Nothing } + +-- | Sign a package set update request using the given private key. +-- | The private key should be the base64-decoded PACCHETTIBOTTI_ED25519 env var. +signPackageSet :: String -> PackageSetUpdateRequest -> Either String PackageSetUpdateRequest +signPackageSet privateKey request = do + private <- SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } + # lmap SSH.printPrivateKeyParseError + let signature = SSH.sign private request.rawPayload + pure request { signature = Just signature } + -- | GitHub issue event with invalid JSON in the body. -- | Used to test that malformed JSON is handled gracefully with an error comment. -- | Note: The inner JSON has a trailing comma (`"v1.0.0",}`) which is intentionally diff --git a/app-e2e/src/Test/Main.purs b/app-e2e/src/Test/Main.purs index bf3a108fb..a5b18d43c 100644 --- a/app-e2e/src/Test/Main.purs +++ b/app-e2e/src/Test/Main.purs @@ -4,6 +4,7 @@ import Registry.App.Prelude import Data.Time.Duration (Milliseconds(..)) import Test.E2E.Endpoint.Jobs as Jobs +import Test.E2E.Endpoint.PackageSets as PackageSets import Test.E2E.Endpoint.Publish as Publish import Test.E2E.Endpoint.Transfer as Transfer import Test.E2E.Endpoint.Unpublish as Unpublish @@ -26,6 +27,7 @@ main = do Spec.describe "Jobs" Jobs.spec Spec.describe "Unpublish" Unpublish.spec Spec.describe "Transfer" Transfer.spec + Spec.describe "PackageSets" PackageSets.spec Spec.describe "Workflows" do Spec.describe "GitHubIssue" GitHubIssue.spec diff --git a/app/fixtures/registry/package-sets/0.0.1.json b/app/fixtures/registry/package-sets/0.0.1.json new file mode 100644 index 000000000..cd4470bcc --- /dev/null +++ b/app/fixtures/registry/package-sets/0.0.1.json @@ -0,0 +1,8 @@ +{ + "version": "0.0.1", + "compiler": "0.15.9", + "published": "2024-01-01", + "packages": { + "prelude": "6.0.1" + } +} diff --git a/app/src/App/API.purs b/app/src/App/API.purs index b9b272613..8ebc66ba5 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,8 +9,8 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions + , getPacchettiBotti , packageSetUpdate - , packageSetUpdate2 , packagingTeam , publish , removeIgnoredTarballFiles @@ -51,6 +51,7 @@ import Parsing as Parsing import Parsing.Combinators as Parsing.Combinators import Parsing.Combinators.Array as Parsing.Combinators.Array import Parsing.String as Parsing.String +import Registry.API.V1 (PackageSetJobData) import Registry.App.Auth as Auth import Registry.App.CLI.Purs (CompilerFailure(..), compilerFailureCodec) import Registry.App.CLI.Purs as Purs @@ -60,10 +61,9 @@ import Registry.App.Effect.Archive (ARCHIVE) import Registry.App.Effect.Archive as Archive import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.PackageSets (Change(..), PACKAGE_SETS) @@ -82,20 +82,18 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml -import Registry.App.SQLite (PackageSetJobDetails) import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob -import Registry.Foreign.Octokit (IssueNumber(..), Team) -import Registry.Foreign.Octokit as Octokit +import Registry.Foreign.Octokit (Team) import Registry.Foreign.Tmp as Tmp import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Path as Internal.Path import Registry.Location as Location import Registry.Manifest as Manifest import Registry.Metadata as Metadata -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageSetUpdateData, PublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData) import Registry.Operation as Operation import Registry.Operation.Validation (UnpublishError(..), ValidateDepsError(..), validateNoExcludedObligatoryFiles) import Registry.Operation.Validation as Operation.Validation @@ -115,23 +113,17 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) +-- | Effect row for package set updates. Authentication is done at the API +-- | boundary, so we don't need GITHUB or GITHUB_EVENT_ENV effects here. +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + LOG + EXCEPT String + r) -packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate2 {} = do - -- TODO: have github call into this - pure unit +-- | Process a package set update from a queued job. Authentication has already +-- | been verified at the API boundary, so we don't need to check team membership. +packageSetUpdate :: forall r. PackageSetJobData -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate details = do + let Operation.PackageSetUpdate payload = details.payload --- | Process a package set update. Package set updates are only processed via --- | GitHub and not the HTTP API, so they require access to the GitHub env. -packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit -packageSetUpdate payload = do - { issue, username } <- Env.askGitHubEvent - - Log.debug $ Array.fold - [ "Package set update created from issue " <> show (un IssueNumber issue) <> " by user " <> username - , " with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload - ] + Log.debug $ "Package set update job starting with payload:\n" <> stringifyJson Operation.packageSetUpdateCodec payload latestPackageSet <- Registry.readLatestPackageSet >>= case _ of Nothing -> do @@ -143,34 +135,8 @@ packageSetUpdate payload = do let prevCompiler = (un PackageSet latestPackageSet).compiler let prevPackages = (un PackageSet latestPackageSet).packages - Log.debug "Determining whether authentication is required (the compiler changed or packages were removed)..." - let didChangeCompiler = maybe false (not <<< eq prevCompiler) payload.compiler - let didRemovePackages = any isNothing payload.packages - - -- Changing the compiler version or removing packages are both restricted - -- to only the packaging team. We throw here if this is an authenticated - -- operation and we can't verify they are a member of the packaging team. - when (didChangeCompiler || didRemovePackages) do - Log.debug "Authentication is required. Verifying the user can take authenticated actions..." - GitHub.listTeamMembers packagingTeam >>= case _ of - Left githubError -> do - Log.error $ "Failed to retrieve the members of the packaging team from GitHub: " <> Octokit.printGitHubError githubError - Except.throw $ Array.fold - [ "This package set update changes the compiler version or removes a " - , "package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but we were " - , "unable to authenticate your account." - ] - Right members -> do - unless (Array.elem username members) do - Log.error $ "Username " <> username <> " is not a member of the packaging team, aborting..." - Except.throw $ Array.fold - [ "This package set update changes the compiler version or " - , "removes a package from the package set. Only members of the " - , "@purescript/packaging team can take these actions, but your " - , "username is not a member of the packaging team." - ] - Log.debug $ "Authentication verified for package set update by user " <> username + -- Note: authentication for restricted operations (compiler change, package removal) + -- is handled at the API boundary in the Router, not here. -- The compiler version cannot be downgraded. for_ payload.compiler \version -> when (version < prevCompiler) do diff --git a/app/src/App/Auth.purs b/app/src/App/Auth.purs index c8647304f..f9303fea8 100644 --- a/app/src/App/Auth.purs +++ b/app/src/App/Auth.purs @@ -1,6 +1,7 @@ module Registry.App.Auth ( SignAuthenticated , signPayload + , verifyPackageSetPayload , verifyPayload ) where @@ -8,7 +9,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.String as String -import Registry.Operation (AuthenticatedData) +import Registry.Operation (AuthenticatedData, PackageSetUpdateRequest) import Registry.SSH as SSH -- We take pacchettibotti as an extra owner because pacchettibotti can always @@ -35,3 +36,20 @@ signPayload :: SignAuthenticated -> Either String SSH.Signature signPayload { privateKey, rawPayload } = do private <- lmap SSH.printPrivateKeyParseError $ SSH.parsePrivateKey { key: privateKey, passphrase: Nothing } pure $ SSH.sign private rawPayload + +-- | Verify a package set update request using pacchettibotti's key. +-- | Returns an error if the signature is invalid or missing. +verifyPackageSetPayload :: Owner -> PackageSetUpdateRequest -> Aff (Either String Unit) +verifyPackageSetPayload pacchettiBotti request = do + case request.signature of + Nothing -> + pure $ Left "Package set update requires a signature for restricted operations." + Just signature -> do + let eitherKey = SSH.parsePublicKey (formatOwner pacchettiBotti) + pure do + key <- eitherKey + unless (SSH.verify key request.rawPayload signature) do + Left "The pacchettibotti signature is not valid for this payload." + where + formatOwner (Owner owner) = + String.joinWith " " [ owner.keytype, owner.public, fromMaybe "id" owner.id ] diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c78188ac1..96b75ca94 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -10,6 +10,7 @@ import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageSetJob, InsertPublishJob, InsertTransferJob, InsertUnpublishJob, MatrixJobDetails, PackageSetJobDetails, PublishJobDetails, SQLite, SelectJobRequest, SelectJobsRequest, StartJob, TransferJobDetails, UnpublishJobDetails) import Registry.App.SQLite as SQLite +import Registry.Operation (PackageSetOperation) import Run (EFFECT, Run) import Run as Run import Run.Except (EXCEPT) @@ -42,6 +43,7 @@ data Db a | SelectPublishJob PackageName Version (Either String (Maybe PublishJobDetails) -> a) | SelectUnpublishJob PackageName Version (Either String (Maybe UnpublishJobDetails) -> a) | SelectTransferJob PackageName (Either String (Maybe TransferJobDetails) -> a) + | SelectPackageSetJobByPayload PackageSetOperation (Either String (Maybe PackageSetJobDetails) -> a) | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) | ResetIncompleteJobs a @@ -130,6 +132,10 @@ selectUnpublishJob packageName packageVersion = Run.lift _db (SelectUnpublishJob selectTransferJob :: forall r. PackageName -> Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) selectTransferJob packageName = Run.lift _db (SelectTransferJob packageName identity) >>= Except.rethrow +-- | Lookup a pending package set job from the database by payload (for duplicate detection). +selectPackageSetJobByPayload :: forall r. PackageSetOperation -> Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectPackageSetJobByPayload payload = Run.lift _db (SelectPackageSetJobByPayload payload identity) >>= Except.rethrow + -- | Delete all incomplete jobs from the database. resetIncompleteJobs :: forall r. Run (DB + r) Unit resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) @@ -214,6 +220,10 @@ handleSQLite env = case _ of result <- Run.liftEffect $ SQLite.selectTransferJob env.db packageName pure $ reply result + SelectPackageSetJobByPayload payload reply -> do + result <- Run.liftEffect $ SQLite.selectPackageSetJobByPayload env.db payload + pure $ reply result + InsertLogLine log next -> do Run.liftEffect $ SQLite.insertLogLine env.db log pure next diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index e3eb353aa..b0ab0f02c 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -86,10 +86,13 @@ runGitHubIssue env = do run do -- Determine endpoint and prepare the JSON payload { endpoint, jsonBody } <- case env.operation of - Left (PackageSetUpdate payload) -> pure - { endpoint: "/v1/package-sets" - , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload - } + Left packageSetOp@(PackageSetUpdate payload) -> do + -- Sign with pacchettibotti if submitter is a trustee + request <- signPackageSetIfTrustee packageSetOp payload + pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateRequestCodec request + } Right (Publish payload) -> pure { endpoint: "/v1/publish" @@ -428,3 +431,32 @@ signPacchettiBottiIfTrustee auth = do else do Log.info "Authenticated payload not submitted by a registry trustee, continuing with original signature." pure auth + +-- | Sign a package set update with pacchettibotti's key if the submitter is a trustee. +-- | Non-trustees get an unsigned request (signature = Nothing). +signPackageSetIfTrustee + :: forall r + . PackageSetOperation + -> Operation.PackageSetUpdateData + -> Run (GITHUB + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) Operation.PackageSetUpdateRequest +signPackageSetIfTrustee packageSetOp payload = do + let rawPayload = JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + GitHub.listTeamMembers API.packagingTeam >>= case _ of + Left githubError -> do + Log.warn $ Array.fold + [ "Unable to fetch members of packaging team, not signing package set request: " + , Octokit.printGitHubError githubError + ] + pure { payload: packageSetOp, rawPayload, signature: Nothing } + Right members -> do + { username } <- Env.askGitHubEvent + if Array.elem username members then do + Log.info "Package set update submitted by a registry trustee, signing with pacchettibotti keys." + { privateKey } <- Env.askPacchettiBotti + signature <- case Auth.signPayload { privateKey, rawPayload } of + Left _ -> Except.throw "Error signing package set update. cc: @purescript/packaging" + Right sig -> pure sig + pure { payload: packageSetOp, rawPayload, signature: Just signature } + else do + Log.info "Package set update not submitted by a registry trustee, sending unsigned request." + pure { payload: packageSetOp, rawPayload, signature: Nothing } diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 7814c9219..0ff5bd696 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -89,7 +89,7 @@ export const insertMatrixJobImpl = (db, job) => { }; export const insertPackageSetJobImpl = (db, job) => { - const columns = ['jobId', 'payload'] + const columns = ['jobId', 'payload', 'rawPayload', 'signature'] return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; @@ -141,6 +141,18 @@ export const selectPackageSetJobImpl = (db, jobId) => { return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; +// Find a pending package set job by payload (for duplicate detection) +export const selectPackageSetJobByPayloadImpl = (db, payload) => { + const stmt = db.prepare(` + SELECT job.*, info.* + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE job.payload = ? AND info.finishedAt IS NULL + ORDER BY info.createdAt ASC LIMIT 1 + `); + return stmt.get(payload); +}; + const _selectJobs = (db, { table, since, includeCompleted }) => { let query = ` SELECT job.*, info.* diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 249ac4d09..e51196d47 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -38,6 +38,7 @@ module Registry.App.SQLite , selectNextPublishJob , selectNextTransferJob , selectNextUnpublishJob + , selectPackageSetJobByPayload , selectPublishJob , selectTransferJob , selectUnpublishJob @@ -67,7 +68,7 @@ import Registry.Internal.Format as Internal.Format import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) import Registry.Operation as Operation import Registry.PackageName as PackageName -import Registry.SSH (Signature) +import Registry.SSH (Signature(..)) import Registry.Version as Version -- | An active database connection acquired with `connect` @@ -253,7 +254,7 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do maybeJobDetails selectPackageSetJobById logs = ExceptT do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (Nullable.notNull jobId) + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (notNull jobId) pure $ traverse ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) <<< packageSetJobDetailsFromJSRep @@ -728,28 +729,43 @@ packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) +foreign import selectPackageSetJobByPayloadImpl :: EffectFn2 SQLite String (Nullable JSPackageSetJobDetails) + foreign import selectPackageSetJobsImpl :: EffectFn3 SQLite String Boolean (Array JSPackageSetJobDetails) selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) selectNextPackageSetJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db Nullable.null + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db null + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails + +-- | Find a pending package set job by payload (for duplicate detection) +selectPackageSetJobByPayload :: SQLite -> PackageSetOperation -> Effect (Either String (Maybe PackageSetJobDetails)) +selectPackageSetJobByPayload db payload = do + let payloadStr = stringifyJson Operation.packageSetOperationCodec payload + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobByPayloadImpl db payloadStr pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails type InsertPackageSetJob = { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature } type JSInsertPackageSetJob = { jobId :: String , createdAt :: String , payload :: String + , rawPayload :: String + , signature :: Nullable String } insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep jobId now { payload } = +insertPackageSetJobToJSRep jobId now { payload, rawPayload, signature } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now , payload: stringifyJson Operation.packageSetOperationCodec payload + , rawPayload + , signature: Nullable.toNullable $ map (\(Signature s) -> s) signature } foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 1b2ea7b4a..70e5698f5 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -24,6 +24,8 @@ import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log +import Registry.App.Effect.PackageSets (PACKAGE_SETS) +import Registry.App.Effect.PackageSets as PackageSets import Registry.App.Effect.Pursuit (PURSUIT) import Registry.App.Effect.Pursuit as Pursuit import Registry.App.Effect.Registry (REGISTRY) @@ -120,7 +122,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + ARCHIVE + REGISTRY + PACKAGE_SETS + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv @@ -148,6 +150,7 @@ runEffects env operation = Aff.attempt do let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" let logPath = Path.concat [ env.logsDir, logFile ] operation + # PackageSets.interpret (PackageSets.handle { workdir: scratchDir }) # Registry.interpret ( Registry.handle { repos: Registry.defaultRepos diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index b329b8253..4970fa935 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -154,10 +154,7 @@ executeJob _ = case _ of , packageName: solvedPackage , packageVersion: solvedVersion } - PackageSetJob _details -> - -- TODO: need to pass in the package_sets effect - -- API.packageSetUpdate2 details - pure unit + PackageSetJob payload -> API.packageSetUpdate payload upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit upgradeRegistryToNewCompiler newCompilerVersion = do diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 095232b4b..2553ea4a6 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -12,12 +12,16 @@ import HTTPurple as HTTPurple import HTTPurple.Status as Status import Registry.API.V1 (Route(..)) import Registry.API.V1 as V1 +import Registry.App.API as API +import Registry.App.Auth as Auth import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.Operation (PackageSetOperation(..)) import Registry.Operation as Operation import Run (Run) +import Run as Run import Run.Except as Run.Except runRouter :: ServerEnv -> Effect Unit @@ -123,7 +127,46 @@ router { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.notFound Right (Just job) -> jsonOk V1.jobCodec job - -- TODO packageset jobs? + PackageSets, Post -> do + request <- HTTPurple.fromJson (jsonDecoder Operation.packageSetUpdateRequestCodec) body + lift $ Log.info $ "Received PackageSet request: " <> request.rawPayload + + -- Check if the operation requires authentication (compiler change or package removal) + let + PackageSetUpdate payload = request.payload + didChangeCompiler = isJust payload.compiler + didRemovePackages = any isNothing payload.packages + requiresAuth = didChangeCompiler || didRemovePackages + + -- If restricted operation, verify pacchettibotti signature + authResult <- + if requiresAuth then do + pacchettiBotti <- lift API.getPacchettiBotti + lift $ Run.liftAff $ Auth.verifyPackageSetPayload pacchettiBotti request + else + pure (Right unit) + + case authResult of + Left err -> do + lift $ Log.error $ "Package set authentication failed: " <> err + HTTPurple.badRequest err + Right _ -> do + when requiresAuth do + lift $ Log.info "Package set authentication successful." + + -- Check for duplicate pending job with the same payload + jobId <- lift (Db.selectPackageSetJobByPayload request.payload) >>= case _ of + Just job -> do + lift $ Log.warn $ "Duplicate package set job insertion, returning existing one: " <> unwrap job.jobId + pure job.jobId + Nothing -> do + lift $ Db.insertPackageSetJob + { payload: request.payload + , rawPayload: request.rawPayload + , signature: request.signature + } + + jsonOk V1.jobCreatedResponseCodec { jobId } Status, Get -> HTTPurple.emptyResponse Status.ok diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index 71727f473..cdb137ad4 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -52,6 +52,8 @@ CREATE TABLE matrix_jobs ( CREATE TABLE package_set_jobs ( jobId TEXT PRIMARY KEY NOT NULL, payload JSON NOT NULL, + rawPayload TEXT NOT NULL, + signature TEXT, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 862025980..4c399342e 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -61,6 +61,7 @@ data Route = Publish | Unpublish | Transfer + | PackageSets | Jobs { since :: Maybe DateTime, include_completed :: Maybe Boolean } | Job JobId { level :: Maybe LogLevel, since :: Maybe DateTime } | Status @@ -72,6 +73,7 @@ routes = Routing.root $ Routing.prefix "api" $ Routing.prefix "v1" $ RoutingG.su { "Publish": "publish" / RoutingG.noArgs , "Unpublish": "unpublish" / RoutingG.noArgs , "Transfer": "transfer" / RoutingG.noArgs + , "PackageSets": "package-sets" / RoutingG.noArgs , "Jobs": "jobs" ? { since: Routing.optional <<< timestampP <<< Routing.string , include_completed: Routing.optional <<< Routing.boolean diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 262ceb3db..7327001e6 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -19,6 +19,7 @@ module Registry.Operation , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData + , PackageSetUpdateRequest , PublishData , TransferData , UnpublishData @@ -27,6 +28,7 @@ module Registry.Operation , packageOperationCodec , packageSetOperationCodec , packageSetUpdateCodec + , packageSetUpdateRequestCodec , publishCodec , transferCodec , unpublishCodec @@ -228,3 +230,33 @@ packageSetUpdateCodec = CJ.named "PackageSetUpdate" $ CJ.Record.object -- `Compat` version of the `maybe` codec. , packages: Internal.Codec.packageMap (CJ.Common.nullable Version.codec) } + +-- | A package set update request that can be optionally authenticated. +-- | +-- | Non-trustees can submit add/upgrade operations without authentication. +-- | Trustees must sign requests for restricted operations (compiler changes, +-- | package removals) with pacchettibotti's key. +type PackageSetUpdateRequest = + { payload :: PackageSetOperation + , rawPayload :: String + , signature :: Maybe Signature + } + +-- | A codec for encoding and decoding a `PackageSetUpdateRequest` as JSON. +packageSetUpdateRequestCodec :: CJ.Codec PackageSetUpdateRequest +packageSetUpdateRequestCodec = CJ.named "PackageSetUpdateRequest" $ Codec.codec' decode encode + where + decode json = do + rep <- Codec.decode repCodec json + payloadJson <- except $ lmap JSON.DecodeError.basic $ JSON.parse rep.payload + operation <- Codec.decode packageSetOperationCodec payloadJson + pure { payload: operation, rawPayload: rep.payload, signature: map Signature rep.signature } + + encode { rawPayload, signature } = + CJ.encode repCodec { payload: rawPayload, signature: map (\(Signature s) -> s) signature } + + repCodec :: CJ.Codec { payload :: String, signature :: Maybe String } + repCodec = CJ.named "PackageSetUpdateRequestRep" $ CJ.Record.object + { payload: CJ.string + , signature: CJ.Record.optional CJ.string + } From 31d247b1fd836582bc03a00a1fb04f920686c13c Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 15:07:47 -0800 Subject: [PATCH 33/36] tweak unpublish test to verify matrix jobs fail gracefully --- app-e2e/src/Test/E2E/Endpoint/Unpublish.purs | 47 +++++++++++++++++++- app/src/App/Effect/Storage.purs | 1 + 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs index 6153d0921..c58e88ea6 100644 --- a/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs +++ b/app-e2e/src/Test/E2E/Endpoint/Unpublish.purs @@ -2,6 +2,7 @@ module Test.E2E.Endpoint.Unpublish (spec) where import Registry.App.Prelude +import Data.Array as Array import Data.Map as Map import Data.String as String import Registry.API.V1 as V1 @@ -17,10 +18,9 @@ import Test.Spec as Spec spec :: E2ESpec spec = do Spec.describe "Publish-Unpublish workflow" do - Spec.it "can publish effect@4.0.0 then unpublish it with full state verification" do + Spec.it "can publish then unpublish with full state verification" do { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData _ <- Env.pollJobOrFail publishJobId - -- Note: we don't wait for matrix jobs - unpublish only modifies metadata/storage existsBefore <- Env.manifestIndexEntryExists Fixtures.effect unless existsBefore do @@ -50,3 +50,46 @@ spec = do existsAfter <- Env.manifestIndexEntryExists Fixtures.effect when existsAfter do Assert.fail "Expected version to be removed from manifest index after unpublish" + + -- Test race condition: submit unpublish while publish is still running. + -- Job priority (Unpublish > Matrix) ensures unpublish runs before matrix jobs. + Spec.it "unpublishing before matrix jobs complete causes them to fail gracefully" do + -- Submit publish, don't wait for it to complete + { jobId: publishJobId } <- Client.publish Fixtures.effectPublishData + + -- Immediately submit unpublish - it will be queued and run after publish + -- but BEFORE matrix jobs due to job priority ordering + authData <- Env.signUnpublishOrFail Fixtures.effectUnpublishData + { jobId: unpublishJobId } <- Client.unpublish authData + + -- Now wait for publish to complete + _ <- Env.pollJobOrFail publishJobId + + -- Wait for unpublish to complete + unpublishJob <- Env.pollJobOrFail unpublishJobId + Assert.shouldSatisfy (V1.jobInfo unpublishJob).finishedAt isJust + + -- Verify unpublish succeeded + Metadata metadata <- Env.readMetadata Fixtures.effect.name + case Map.lookup Fixtures.effect.version metadata.unpublished of + Nothing -> + Assert.fail "Expected version 4.0.0 to be in 'unpublished' metadata" + Just _ -> pure unit + + -- Wait for matrix jobs to complete + Env.waitForAllMatrixJobs Fixtures.effect + + -- Verify matrix jobs failed (they tried to download deleted tarball) + jobs <- Client.getJobs + let + matrixJobs = Array.filter (Env.isMatrixJobFor Fixtures.effect) jobs + allFailed = Array.all (\j -> not (V1.jobInfo j).success) matrixJobs + + unless (Array.null matrixJobs || allFailed) do + Assert.fail "Expected matrix jobs to fail after unpublish deleted the tarball" + + -- Critical: verify no bad writes occurred - the version should NOT be + -- back in published metadata (Map.update on missing key is a no-op) + Metadata metadataAfterMatrix <- Env.readMetadata Fixtures.effect.name + when (Map.member Fixtures.effect.version metadataAfterMatrix.published) do + Assert.fail "Matrix job incorrectly wrote to published metadata for unpublished version" diff --git a/app/src/App/Effect/Storage.purs b/app/src/App/Effect/Storage.purs index c9a52a7bb..b6d6a0ad4 100644 --- a/app/src/App/Effect/Storage.purs +++ b/app/src/App/Effect/Storage.purs @@ -199,6 +199,7 @@ handleS3 env = Cache.interpret _storageCache (Cache.handleFs env.cache) <<< case Except.throw $ "Could not delete package " <> package <> " due to an error connecting to the storage backend." Succeeded _ -> do Log.debug $ "Deleted release of " <> package <> " from S3 at the path " <> packagePath + Cache.delete _storageCache (Package name version) pure unit else do Log.error $ packagePath <> " does not exist on S3 (available: " <> String.joinWith ", " published <> ")" From 3e278f4400fe33db283b5bd93ad46c35a1a53c43 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 15:35:56 -0800 Subject: [PATCH 34/36] tweak agents to refer to scratch logs --- AGENTS.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index d97738fe2..129cbe9e7 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -43,7 +43,12 @@ spago-test-e2e Options: `nix run .#test-env -- --tui` for interactive TUI, `-- --detached` for background mode to use a single terminal. -State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. +State is stored in `/tmp/registry-test-env` and cleaned up on each `nix run .#test-env`. To examine state after a test run (for debugging), stop the test-env but don't restart it. This is useful, for example, to read the logs of the most recent run. For example: + +```sh +# after a test run, see the logs (log name is today's date) +cat /tmp/registry-test-env/scratch/logs/*.log +``` #### Smoke Test (Linux only) From f195b37cf43766c68233ae9bf8ccc475661a828a Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 19:26:29 -0500 Subject: [PATCH 35/36] remove slow archive seeder test --- .../registry-archive/prelude-6.0.2.tar.gz | Bin 31321 -> 0 bytes app/test/App/API.purs | 60 ------------------ 2 files changed, 60 deletions(-) delete mode 100644 app/fixtures/registry-archive/prelude-6.0.2.tar.gz diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz deleted file mode 100644 index c06e9b2767ae864e73f4bc4f1d9ef76583387aa1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 31321 zcmV)8K*qlxiwFP!000001MFRUciKp@&)@nKy`Hh9`ab(iqW=T0=@dq?KJS$q8G zn_T;FJvlrS-$&vbF7-|K)x&D-NxfFvKd3$0uU3!h`;XY+w-G0OML}r!j6Dkd$;xuK z6V9DY2(;O2mg@i0cC&TfZ9Om#?BJt=gJu0+J34$K?+@zL8l+ciN40}TZ2y7fD7^mU z{onLv3*Ww(gsiq-{X4sieBK@U_AE4v4jCT-Ta%lm+h-aVMTLf W z6e6|TVSrK#dY&H`Q)|Hn97hPq?=kLU+7Tipt4pEpmhPTJ?MD+~y*^NSukZJ)J!P^@=RA*QmLkyK+Buh?0u z(|iMujhF4ycJCbt@~YiCM_#XhS%Y0RI=y!D>a@{emsg$3i*AcSvy78=w|UxVpS4a* z;0_#F>*v;ak9FTPPEQM4iL$5K__75;HeQ~#kQ24?q}^#Xd)S=ht_hk0`A#dWd)aEX z@uu}l3mVhtysJo>-PV6!K_SRwCylel>sHq&tx*Z|N%N}HIzyR358bPm-CnzQ)oZcW z7Z)d_=x(d?Gpsw^XU6G8m$Y@&ZB>9nuR)A~1gNU}4DVlFb=#!0_Ia<>>0Dj*+85_# zX!%=E3&_}jswbq~i*sr!Xy~Hz4vC@;Nck1^_Du`YJE&07R0CDe1&uX(dNJS!3hZfZ zWaq8dr|s9R^JWWkFObUHcDGfAzG`=|0FXqsZyUh#ikggl1X79HcGplNwn9D0+OJsS z!~{U3O@VCjAfJoh1atXco#ykz~aAJuBx^?wuB zCxfvawCX!g+0G1B|A1EiJvlCRDwsd>&Vr|pAK!qbjt1t?n?6peJzhp( zBvZk1Bat2~B?;7EmwHhLmPa=rUN5^Rd-c7hW5KE-GGT4?0vi^b1>~dbkQOdxR2n3r zLXZ^s&ezClA_<`InRwYZu<9Y@_k!r|?qhRTaiE!U4HRdd3oCJ;(p|tI*8qDWhsSBK zpv|IKwvY#5iVDp)l`^x2A3)P04U&E}K0o}V*RJ&Z@3a~xXDxF&dQcu~=6|)mf0Uj7 zP<}iAH*x*I^3%XD-v8zOsf7a-z^ucqfd5*;xdc9+Xk6myzv^81z{f1z$ny*7HA%0api)o9)K~Yji_W{bS%;?r?HEwE{rT zUn8S6B=r7|-y`lXjun@`)*H?CPl7nyOY+zZFn&Q~hY6>dNH7!Hrsv;0mgXQR;z{C{ z??a*tAUbV{RaY6t4?nQB3wm}O@-hsg-|qt~w{AS}LMUAorW`DPqPr!AaYj}!;XLB7R4|PmI#?b6?%x_*@(|i_>8+F?hb9PNh+9Ot^a*u zRu9en7+YzkBe1BzzWH~vmgGA#Os(Q%*x!rmi3U>nI!+Uk;Tz+!O=6(-v9NZYHi#9o4A(t|CJk9WAp^>&tu*G&q3`l)BlI{C-v?6zln=~ zoOynTM|nZWBI@%uR=|$gIC5!EgOwa>z@0L!=spi4-_`PbJ}k?%UgmxJY)-5IR{Trf zn{hu}m_y5PuwaD=O3!dfe`dfxqk#Ks9^QjHq`apZzpm!ey#trI=bB!`ut2Ljy)fV`0N8;;0bQ324nV^l~ySYYL)f!j5xf96D(b6g%3C(;q3REE4 z{=FyV2CsGYA3ehR@>pa4YllbGto;X9aBKfJaqaD~KNvbrYT5=O$ixnqIK`p>oQZui zaTd}FFjybL2~5t==N6t2;CR7UmJi$@a=3-#-Z+arm~?!^rq)b^wG1tsXxx{PF}`<; zkt8&|0}%yeNoow{xk_}jTktL*ue)5SvmchspjB24Ems@KA z`~*P?j&X0Wq3%t9K{}C$K?2#MA((_X6h;cPM|}84YdUjy1-ZB^3ZSx}*Y9|@J@3>G z!oJCNcYCOwVB$s2h`Ag*={Ph7soZdIhb>2uo?5Owi^Mj&xw~sfy^DEhig_rSWe8@? z2_Jq4&_?R|f(OudMMwn?q8ErEwgd7^10J}2<1%|nwmn(zpFYKcxT)_~P1Y4v_g+XL zAQ2y4dLC{9N-2b3Fe+|%h|Fna5ivbVtkuFK-P<0kR5tQ>fH6{_YK)Ddfp}|!TtnPu zK+2ID+R&%m_pw8qfRk#Lv<=aO);4ApH4g8sY^wsjRXFDE*e0A)a*tG0{98aH9@+z< ziSQ;E4}nM1V!-9hpx`OXU0_*LDn<~L#PGBmiP$a>1P(8}_|6zTL|2A{QMz>F0-WP@v(Hfa>5r16_vB;vj827A>_Jzgu`R|eK zORNJ%+IGnRdu#+2)1TU|fPYjUKo*n`HoC$FP)-mB%wjD8+C5v+)bHng`Gbq=muIx0a9)R~LdYx~?D{07gO&oNZW>fEUJ8Q`4k& z1O%0&Vbvq*A~IW-7hx`8SwRl5I5B;ywZGr*Yr#GuP1J7uW_|4NU-J&elZa!_)0FD1g?|-nUx-T4|;XV6k6}=-1=%HX@FuZ6w|l*a?90s6!6Gf69jORouTt#-Fm7XCO1H-M+!|Rur zJe-!_aso;&Kq%ZSNo$yu1h<%N7lu(&j6lpU_vIABw;2RIP(vrxM>yU3nR@;B1rX>F za_Ja@*ovRz5czElo46{Z|0z`}z!wC=gG&Vp0 zN6Eqv#+*)tBZ*Co94Cg2;++XTwq0AzS+ZN+j5*-bIb}k#oR{e){Tqn zOhcD;!II*~qIB`RfVr(0m6>Zf4uT@}F5q4;vQP=!{gE}W9Xni@ z?2UJej%`KSHsQ2c4loX4b6pX32~c&A^bz$IbGVLV;q^vQ-~U_5tOmk*k%k;sN`;@y_;2UwQnGBkjBU zf3<_H|F@BAN&mmXtya%}kmKL=@n7|v|5vL%+4_H*xKgKlmhW4OGi#PQ;TtbG-GjpV z=^n03kYqaCy$L7J;&kaL9W$5TvIa6Ji6da`1ki@;{b%s?oH_9I(@)7so~Zub#&f(e z@`PJi{d4-@7t8%AkMQnwEt&rvK6bz%CGd6cU7$7dzh14@GxPtbesr{*|C_iZ0K`r= zwN8h5RFckXI<*p~+G9IT^F%FUy z1hlHQZxD#?3HE3@{6sLyN}U+eyDRRH2l0MT@CASxFPsFn z8y|Sd$F>`)TP3=B?k=VS&oOcuTY`=8yGahQWdqTCwK)rc?i|a>Eiup}@sg#Ki`1W! zkB+b7qXV@kJX8D$0v@QBZcH-YaStV~fS{k!FVpoayLgp$X_p0eNs_vBz=96QNV&}Y zu+M^p3%7y&FFq19GD;Ums@NgoT$bZTL#&X##=tZ(Ll7IZLaT8rUGYe84_S|Cz&n~JbVjz|w!ZZ<95){&@if%f!BLhsv z;|dGI3NECMjoZMmuKiZQUp<0P*MpqWk3W{-^N&BuX0=(ZTJ z$*8mdkYO-hWRS6A-BSFYH5%z>9&%AktG$n<_0@lmwLT8fn2gxe9*q<f}IOx+3*uz7{4gtMaOd%>dWdOj=2R zwDK}SG7<=v?9QnfhYeDY6aE}nrEu1WUkbxA@V{KdRz(;=_A=J-Zhf|{Iv9vO0o@4@ z(VgO09udZve*8h4wdTgG;v7`yoJnFYkLDzw(!qAoP>%)HSSpxbs19g^&>rbdfq$Hf zMfW+UpvZur>S8Si(Ez`4qj7oBKjK8hYm~Nh;0h||wv+D+$p9LM*+{rlUPA|42ZYF8 zE8B%(b!3P^4JH-9;L5Q#i~=cK&jR{VPAs^pom4hyD?S#~5#-6fAHRUFLW(%(GxBIm z${Lp~p4`N>1pjp>Fm4_w{A(TlJ3Khb{{H7-wYvTNpN(8Pj>AsZP@jCPAY{1JP#va= z+5C!@g1Ag3u@L5U`P9Dgy=cZtrROic2#3Yni(d%KDE(*824jPt6=n^B$RFilo8)sI z4RU#T?3ND4FU8^IU$%Kk01p+n6xL0=KyI{w+U(&(+qju?A49`n`SMf{&Lo;z?w$z9 zaF8_ZJN0uLe_9~W#D;>1z*Ou-VGIo7N)flWz@RiBMzJ~lkNrx51Z4(A0b&*p*p*gXV`t(px8`BHw%5Zj7TRs-l7dZtK=*TZ7U?Fs+ zyOPMT%0<*D4~YrelcpHMSV2dD*MbhFtZYw^Wl**0AtQh=7nxcy?j!>VCWHAfbA;_n z~4VEA<5BIKS^zPI@q+FMyz-H)9)(aQDeTB)m|aiUz8dnq*Mp*)yYE0h=thw2+^7fT{1 zX=7I!lXQ#4#$EQLN?Wt!QHwUQ3d$x66c(2Y*~%JfQNr@HaIGwzEI~AJ;}@}(*GQI` zbYP|iE9kgJE>#+_?+x1@>{{agH31Pu2qNda1MR(^E4ViP<0$+4A5ZH0+xLGqa_RnG zrqeZ7FrSk?0VqsFSI<`&g>+V*2w(~2qv3rVdJ@+x=Uhu)tD~MPxSb86Mc_$#F0ZhE zao?-3(k`9_{PchFC?KP!zHGTIQ5?mz3k(OX3hkLA|HelA($p{^P<#wPf2m^WAwaySP^l|96?Cw*EWfb>Qd6*-??&@gJu1q#Kp3k&4vF zR7o7DY%P(qw4!n$v(?ha1er?WoFH3;4hph~`ICZS(ezk#R8Tmd@@EA(&V`2s8SXJ6 z$kqP($g)iPsYRcjmdC{ZKywc=z`|4i27dfB$bI*XsU1v4h{fdHi?na6cFSU3;>P z|Jlf;19s8rCU$xTk6)@k457o|*6#`?w|=jdR|fNl(42BU1YFdI+p0nnLI`@dHi*xP zaFxe69vQK!L)xOZIH?66ac70@7VP*~+?76J7|KGv09l%f7m#7SrrhgtTyWrpp*Kx} zL0x`J=QfmyAuQs?FvQ<7V=6yh()sUH0!V|JXxAInC2EybEvo)oQGxKkyqKQz znluhZNkm3vI;vRIeg9b2(*y9 zNx#aQL86h782N*8S}d5Wt2oNpoMR_vo+LgVXIg?D?z)BuS3gS_eS3{8LyuoQhaom} z^kuDN=*F%niNS@U~KWh62``h@Bja&umf4bA} zi3IMt%B$IrKlg@zcr?A2v~J5TlTt>yq!C_ha@4&n`q1Rwze2OGP<2I4a+W6^+ z$qCL~#qKy?f$?!?Oxqu-4a`uO4EeK$7-o=aD+tU`pvQqls>zYqC%-B+jFg`_u~fAb zsU<2ffFvj4GFbH^NwQjg3^!;P>J1d8`&`&E(j+HVPnY7!0xR5?-ljIsXL_3XzbAMH zbaT#+o@MeM-~YpD@2I!o|JLyY_&?d{Uzqcg_m9hXK1D8ad+HbRdFq$o@a~5=JQaa> zJM|Nekv?0eJ_AFie>AajDwe~<>C{2?%}&-+g*CElW+IzeYbp)cG$-XM zX4#HvsFBvw4%Mj~7R>wt%$NFo4YtcHSu$L@$~LQI#w8}pnIMY2QsKZ@>0#M62TKDp zY=ZDEN46;5 z<(O&sE{BPzyUXF^e3xVAukUhX7pVQyyT)-kLtW!=@$_<~8$(VS?-EB7gzUL>dE*qK zFK>+L^`#fG&2yutdH;W!jq}gG|MyOMCma2bFZ2ZVf9UkO{lEDLFP6|`nip4*ScCF; zUYwt$<-D}5P?Ibgj0WV22UegejVr_fug;>u?&AT$$pI={UIKcba!bYtXS|Q2?rc0* zhUblSnZTNdZ&P3j@J;Lr03tT^*$4bg>;t~0<3tb{Vj%Fd1@PBGgg*uI*S3Dy=|;$U z5H0Wy`)@r1B8x(v3u8+*L}u2IT<{DsJuzk~1N^x{4$MZ#TomV^b5cP@ZWcQs(-IS* z6Vh*$%3&)o{F2xTeAjHnrD7}mjZ+mS17N(4oW0X8Q|_Ml1rDM3f(KCQs2B%L;P@#E z{9ew0k-35TC8;gc9%AdC|IO?F58$aT9erN>uinYgfxG^no^0;_*YX6`f9`a{{D(v6 zJ8FneIRuCCJwK??qLPk^UW@$k2 z2apadhR~HIexTxQ3|gyL3#he7&byLu`svq%17jzfKG?oEE%tfmfAjh;I&v-g-@g3s zbhG}iLGuBiWwH9-Cl-%w_VZ>>x4BObU1#IMBk<7-v|_% zCaBsJUt+ za<2K>-^(gO+TU)*to<#smD=Cb(G#ity+SQn`@1%#))W8I3-~Rcn*ASaoY#f_*V{jK z&wt1Jrmx^pq2wqd z_79}I&6-8CnR=eh2m|4LGMOZ!s(g>mL5Nw2ZMsl^NqLfnf#ZIG$i|~XoJG%`Jy#OU zj_K6a@jD4_AqKF!PQVGnpaXtx391i?m!-tuVR7bpBu7Vk60HCwjogL{*@lR>ms_2t zPCdGR1Fp^~PeeTU(j#^Ae2%9M{}tf|?#BJwQvYwg1NZ*t7)*jT>;D>_y*(SVhx{0A zS#IGhmo$CL1>Dqf4kv!2^R>-d@w<6y=Ks@=cQp6mS<3$%xbmN)-q8mCYb_7CXq-I# zXie;*SL5`tIFPOxrQgL~)bIDB;y}Azv=@)$BSYvS&R(Aid+5fIID&QG4eY?WC<-jF zE`os%sG_)WG$Pm=Z zc&zoW%FodmUv$trIz9TYhbKp;&MHc{C=_W|#rm)ALMk#syw*PBrFP*!3?bj=eO>aug98u$>v;e8a5Mkc@tB9dfX@s|9QY&yi4aMUk}-*$P;L}G&!W6K z1HS|S#E3Af=)WRMK8e6lk;GLZT~`!~tV-vJD>(ZPykQ?EqZou?Sx#O|DN=HomX)PP z$kEdhL}!!541{ROES;yBC<*0S=XqJC!*rHbS0wkNtcD4Q?T8|_Jph-c(wk|=m*_oB zv1Zsf%*qao$^o#YtlE#sq~hdU0!S*gKMb=5uR+a`ow0$ak6e9B5OE)+q)>?u$rZF{ zH26gN2jHCp1cxZG1W{;QfF32^dhSl*c`824;%vHzrvR#njTo2Fzr~;82l`4AU(Of9 zSvq1BpJ#b4SW5QS2Bk&g{8c_rsx#4EEx~R^udd1}nRoL=)jb!hb2ZCa+h@samP4n{ z#(&suKNJim=J1JL=76op!?P2=MFX|1oLxkpyeL1zYg9oT<1U<|TX5-E5NPAduW(0Ro`Z~j zOGDaY04^?ckA}a8|1kUP*ZMU1|GFy)(31WCaR1Pg{~RA|`2V#$7I$oVbTH)~nI-?o zx>ZgLdaE4}_&BZ}0{MVsqpX1cwzE@ zBV8^o+WlzM9r3y1Nrfj15~JYPN74S}!9D;*QsjPL1e`vTB9~I^&SIICnZh%Q#bUqx z_HzGp`+=;(_Pw`n<9pi=?I=S#!qAH_u_H{p2$>xr^CA>>gu;tZ*%2x)!Y4bzCmG>4 zQ`7}twnbk+8|hx`?YE(SD8+BOM@QI$Lag?f&lD%igoMqN4?2@AjshXm2k{%9-GFcE@Q6 zp3Qc}4`5Aq;l8@aXS+o*O)kj;iymv8DAgk)Fj!PGZ*M(_i?Ox8t$scdKmQw!w6|}+#qp&Ia2nWN6Wl?Kh3ScaBpD3VT)v%e2I*mZ-G+Ck0V@@hVu^Dl3~WRc4W(QeYKdIhGWnG9V2hcU<}L(Ab!YD z3`%M}&t_5FYxBzi$X$%jaZiMKDT-)FDa4DLV%QU5q1xazbErB2TeziFYO;SrB2|PQ z(W-tQ_#|T`ff@9w5ui3~7ALJZ3=&r`heJxuc-RqEK)6E!beZOH0csk|;*bjc9Z1l> zu>2S_II!GR?c|b=0UNoeAA|OBg1nV&<3wC(7w64wo!uqa^H|kGA^_|JU#@=lI{E8- z>h}M|XqJve!=v}dTM7b~>3{Y1kKFzLu(y%_t>v)}u4z{NDVY|Dt>t-X9a>++)me8g z4vqK{4@>dAV<-%;NN{g2dX>;koEl3J>bH|q!8C&?XH|7xKG@q!vhKWlk$ysr`rM+4w4Y9PLN{Of{X% z^1NtWMtcCF^dQ=G0(Y<4b&79-I6?lM5eFiGD@13O?S9p^$ZiRO{)>nLCwce_?{&0g zi@DoyS)IyX=BoTKdSxGU9XP`r4U8^MLaRI6ltLi~tvS3*!-0T)#=d;TEzpVst+5P{) ziM#%v935}$f7kL@#$TZHQ7Zw2ucpuse&2HgwzRKzBiWN;TDmbgY>CtmVs`LCtrN8# zJ#LU2E_dSI0i%yuGO8S(?w%|5R<|n(+VZO5iz2ZNlrWO63j+zEuHnS^MW)9)LzRmw z1%l|c;!Gs3S)fU@##(m50pXH>u&{trk=-EXRB_(pO=h?VN76|x+_7q8i3SNwq)y&R zwIM-g+pnY_uOK}%D|lGn7+R2tQ^V_B9N%u}a7#JOc0V;rIcs9|z3+&W;s(~#!WM2e z)Ixb12gzoz1}h0bxuq?Mr^sqc-TeEN$tQe;{pKA^2uQ;LF+>nS+bx8NF*%iO@xRs^ zj2yj#j}W&lA!H!2BrJ-gWS1wI3hLWygYVr)}R%WY|9cIi$mDWlzTAqLEjRQ=*EhDIr0dcXX8 zd+PW8Fu5hsIfnHIqH87Px zbGsinK{nXY+E%o-jP$B}sO6_MaV|?sTq?36VxWkB+|y~z{KspbHQ9fOv)-X+|8=sF z|E%RP&woD?OC-;I{1c3bhWKxmOu>!|UR%3u{MR}@GAx^M281krPDXh#mKmvEL3m(| z?8e7G{w?#9zbTgvz`1n*sSVAEItc$JXxjlLll#tD@p@-Mjfd5Oza4}^YU@L4I`94r zY?-K{IP^NfKc-@(gHJ!oFGR1%5P(#o76GdKkYK*>`YNfG9!E7I7qsetTO=*Xd`ToK z**0l$j`>Lcc^$`xa)+(e0AO=5tcsXcT6M=}+1Wj3hPM=%fM%= zEWRial8Z|OD=y+Vxx3il4C0x9{kz)jR+@cEiZXeC!2Wnhg}xlfc;Dyqb5XQ5=Nkyp z2Uv#zjmiNh9fC%K*n(OJ1t@;E2V3^ofiqJdN2YNk7unf5MR+Wpq?fbk1V4)I+%@=r z)IKMEA9I3tGp{}{wyzdLCkY!K%T)Z+PM9s`enN@vg~<$@HcCV~I50$=WY02{XbAi| zzO}G|#-S6km_$?L+9%@gF1$NAsq+wJK|kYmfDHWPq^9wYr~RLQgemem$4OF%IW!Y5 z9b=`y0Q@5U-_}&~R}8dUOf4(ku?@14Q-=pibV=5m!vrzqB7(F#6Eo%vh7(Mt0gs)_ z0ay`E`DmgI-hNiT<#?!YBspwh1cwefuV|3LSP{weKW zPV#_Qw8vIxQHKszBS7>l{KNCLKEkgmP!zyn#u(9b(y%Zh>pW|JQGUS@{W6`&Il+-d zLqH(p&}s*0c+a!sZ@&dIgvOD=86}4KkX+&RDO$0M5k!FKlF_1M+Fr+^NG1~_dX0rK zbc4Z96_c{kavmc!%vPsFBg3}Y1)-`Iaa7P+SSDgzx28!2VPbuO7qJ3WM29J%;kbRh zGh*R50ik@iU$ACgXF*PoY?)j~#A#z>ph%YagD%q^5VD~nB)oCuv6~6n;p;qF)bRf= zV{u4|XRnJmE5+oWllILG4+P8fzm88G{qKYQ+K{ zu`TBDNlheRTH|OQpPwURl2jK-l6f$$=<(@8nvHq;R(_NQ9V8&muJE(?B0^}>gS|`U zs~B2@t7ZB;3|d- zW+51xeZYMR8$QsS<(yHHiGoLhc4&x)basJ+bd&)AUhaRh>mi3E%NNr#90U2HkXRN4 z`U~rBVf%zfa==xW^3vt}L=@eItbkqa8!X|3Siz|}Hx$uP)R10`$p7>BYuFJQ{eRq% z1Zc_ne+tf?9Qn`D!N&e)EsyX2!;k-qn8SCL z1_1^Q>?8;}Ls;VyiAOX_K1#{GhTg{qj~)+f`4QsSK*|TvKzegQ|F&Qxt)xdks{$Yz z`h{VSV>%jdZd^0ke-zS^6chkyVk2M75Ir5XZ9{CvTRLYXNM&>?1b7!&4FNnrY-g$9 z4Zv?MWh9c}ZYq6GO+}T>f+#yK4do6Qo4t?n84MNP(-S(qTb#=`-+U9jBori)5b%7~ zF#zF-H{4dzopz(a^Q;mbHjqG9ZIqR0;Q)&g6OiTY3Wf?uVgZ7Z6ru=)8?r09sJd4Y zz&+%R@)pHWQPN(Q+G`}aiNXDW-75OaKKJT@mu9CnP0!|sa+sz?N@7ewS1h__y2#-) zomLCknSf8l{0$HfJ;I1mj6=m>{lGL0O!rBTxXE9}^F$CxE2AS+r9lojRiVP;ipL8w;GeYJm7aQKFgW*r>8XnXH&!6q0j1<;$WFI=#L8?Rr zBA!^39DhBZm?JxuS~GCOkVRn`>R5J>&SvuVSFRH52yADO&C(Ca zPGjNiE?vLK-Oo7J#4{0(*Fiku$U}-~%!?vL+3)}>J}~KFls|D^6uU@uMQ0$27hQ5| zptO3F%-DB|{2H}(v$j~N({9q$ff?*=W(+kB1_st46oYHb1+Fpj+D05@u0-3xhJ~vW zUADk*z*DB$VyIAwdH4fri8LeC5k+r$or8DLA&AfVef-to|G)F2^^9nN&-?Oo;9G0= zux)-*FoYCIp91Gyz3houH8-U4^DWtm{b-tRX#wRlH$?bz3OGlRpdbPk(tW+)$WTE` z4>%1KFc6!yv#jLrkLR6De$kH-Ed0>d&72B zEI{8O0RTd-H_dguXEH`+3|tGGQelkK;OK54vtQqOl3Cy7hMaQ(uye9lIt8Zy!+wMU*w%r5z}<>CCFGtVj#lGwROrPq*#QPJ z%s&2CQsnF!Ub_rm%<2>TlqGepMz1GJ`@k^{WLAST7^o1D(36IG{q~)ONMJ!DXwRH#dcf z0;javwg^BK=_cuT4Px6Ssb*?2>!Eg9ie4lGT4plD25Q@3h+`CJN|}KbZ2oYY_u3?3 zGKVA#b4lVJa45o1uhUyB&Nz@f9z%!X4fe!H--%QZd<~Jx}#eR7+Iik3_{~G%&_GJG8x8y;@=^zHYU|z`oOJy`bHordlwK zWVOCD5B4RWrS@NUpaQUj|35xH_VGVO^bP;NhNtf0zUAI!qYx&>s{*nBdGxq#T<|+W zFC+F6i;p|J2(NQu^p%)x8299Q`HRvqTrf=11n)(|7vCs~)oy8A(+s$)E)w!C1B+d) zm4;({p~_0DD|It|g_hHQ1$i~RwD3axuWj>|9CkcICC*j>NK5e(FVf)RAFnK6s0`G7 zsc7MnYUW^#pFt+4EN!tBpEN6bcag+?$fv!&yo_g7)k#D+Qf8&^a&_*` zbuMSox2#-4dCA*tos)IJt?CxAxlwuGDrDt>AzmKR0aGXsy6e_Cl=pzsG1QMxNL=1) zyV;tyJOB@28S|*@)fKjpqj!t#G#Y*nWv0ZEYD{{v+$1;(&01G^*f?!J3>RNY{8(o7Qn6KBr9W&SB)h@^aDMEK-ZnJ;> zYqBP@9;@%}%Cg59gnT{bQX0M?vj_^B`8c`Xjjc~jGu9dW=&9jY{kZwu;Ks~pfg0-{ z1gxnw|M{FoJ9mEN*0_HBJ}iK0&VN4@cjWsSo@Mqwr@a#w|MT#0bN*Y)1F*t09ni+Q zNIaiKv#JJ~Bl+lHf9OxsN_!*n(IQP>SoK)Q)_1|kL|4&2M>D~h0dLdq@IJ3g!-+qgZtsuHN958##`%U`pp%r|E z*?Pe_xxs2HuxyDElSh9bbeO4*6Wzb>`Xxg|8mtivM~7A$y;~%T63}~6X&%?5 z*zGwoD}(#zHs*dKgS<{kPBoP5L>hR+?SFQsbBR7UKc zj^4w}>$4U+vW)x_dCY!{FSWUg1U)0gviyhnA{!^FjPK|tl_P(svWyeznK;dJ8jWK~ zh@Tu=13g%w{b-xs5uTGVD;a{*s|;HU%aYhqn5(2>iIFk8l5w8EkH{F85q)P01S3N) zX!?b3tqI~yTG_FMf|)4?*;&Kz;(Ux8hH0i-6bzHCH1JguF(LVzts@(&6Ra+I(46Q9oj7g&3SX7|0 zY82VHxjKHSo+_J`;MeQ2F*!?h$MB5eskF(;I9hES%vxmiRjgMFlY=5yp;Z*aWW8>x z8)nt4p$e|*Wh3ED%9oPhMlXMc1}0ynF?}m5DlwlHRO<1rwY9R6y4YJ?v4`s1ps+-F zD&-!x={<*l8;^js&(i%LFOhF|09<8ZG`Hu+3 z10cQP36MkmBOrZ%Ga$b;9Rewea|+~i!DFD2phpEX9t2f%`y^Of0vrX+G)v3i!=OqN zJ`Jk!?Bk$H<(>y+eCI%@g5g9+Kdx~kRHex?Ay>^l6slNsDl|u{Iu=qOoC|prv{i36 zoD8j;<=usZWP=0K2Ek=lELXbpA6F|HMk4RzgD= zl#&|(B9ms-PS$p(ZJ}uBAsR z*k!RL;=!@XE5vfhp?yH@M}vc-icrk3gbT4g6bREXL$CwfC*tBNtYykzIJL^9K?HEo zMS}qDH&@_6G@xMt2Jr~XEe3_e>!#gMgz4={E^H!}H09&;j-ej!coLJq*z;tNnN=S_ z3Up@5oqeFi3B*Vh1kjQLCI`PH9|7A*RR7?AANE8|dj{P4Koe`n-G#`Vs7xVC1|=$_ zl4UVXN&?C%=b~H14+*$bCDWuZdLA((hgYB!TJFMhU?s1^gWX|TsRS_2%X5H^`jpH> zXS=P&xaP9OOlWg%t^6ENtBv-e7YKB<-Y70t+5M=J$?sG%xY~|K71V74M@CTlO$jqe zOu$h-PNE{trio8Fq%>*A-yZQ0>Qj%Q0rF*@_&rzwD~3mt33(*x@-UBxwjW-bWc~Qs z&rIs3%8V?{RLTTZQjKg#Mb)pD#-e9L@nGG&SRP3_h9t^?wO8%VIe0B!m$Oo+Qaq}s zQmS$yby8UvE0+fLqT6aMs_nRX2imVw$YrNDX4zSt*TKy7I+(#xD?7N2BIG~lQ+NM^ zJMVh%-+R3icm5wA9d7jh*78{QKiKJp_)qfV$gfyD8Hc2kEA`uz;Vl6X*Y<25viu^8 zO3bI+vP+IyA{^!1B$HFjkIf+})&yc3p4dfMvwRfKOa~ZH7yjJ8y3rCXc_#=<=KVT_`#XD+MI$ zOAq_a!%y@5&z-u8ZD^kr^h-5q0kl3vk2`bx0Soh| zX0(b=;h_o{HH=fta>*bSah8oxD5Yt5axp3fXcL6Wu}3B3I&@2^8;@MX^Q^Fzn@}l% zVTkSBu?lXxB{z*eF5MY=7`lv_@Q9-U;83u_SYtv01C0n+Rri-xkAUuPKoLP{{(h$} zyI0fv<@heV;&)O|tq~U0ELoL)&?>kd@vrGhe>KV>vP<@B7S3A@{-rnjz*EEjam~IU z|KHp2|Lb^`^uJ)aZZ8B}#{Zw39Ju(82dBLa|G$pM1p7q@TAfc%Y%b~I?N`m%@a>k-I7Ef5m|3R}kTqU+` zh@V7VZ&3T#Hok7J*c8gepxq!yDD!)K&WKSzu>e7=MJ6+jEz(%;5S_%r(QxqCMp9kJ zJV|jKUIS|DI%a_xV$8ybIaCVn*Xa^fWdd^J${H-D!A-_qbxjVow#q?%we&whm1Kb1 z0Jo|qxd6>Zu-lM*2Uc?AaIvf{Su)`1?kxF~%os3K&kD=%4w+x$iXe7a-q>j`hsSOy zX*guWO&1$Y*TiU-UJM>UX3)Rrc55|jwm|mnw%ge83XKievC0J=z{R?)s~b-r&CFAn z+e(&sqGS3II^31s7ib3cyS6r{UaJ^*T7t#@lcJ(gl3b=`m7v|rga?Jb2_|M<`{X3G zTu-f-lJSgy1scI`7bGyT8P)epRCH5_On2w?{zNR%`_E?{@#MmH(oe^jhpc_5tk8x&J*r+}QuE<+0_zrfxhhHexNAr7?T; z1wDD?&sifDPZl$nJ>=g{OdAnoDf|aqLeAq$G~oSJ{Cx~yZsO@nQ$6|EP7e(z7afc; zC?tE!jEf5WU%G974^PeffBNx`bU)x(GXGBw4&D2oqvOMk|F5+?=KO#9(Kz0cn7$e8S?9b^A>0Mums z>93}nft{U~d6hia*`ekkE0KYM(FZTf;7LBKQn7|zp$Qs$q?fW@l>es88&a!|$-_0d zU54f*#&Ym7emRg9Snwm-69-`t){UMR?vXpvXTkh@QK9_~dL)pf!Hy_06{k7k!Dl() z|BI4FX^EcQ%Ut{?8V=lOQ)Z0okLG#YPebg~RD3~Jx=5h1k|9Mabjf@I0ICUdBlki= z&X0~Df*7a$J`c13PD{~sX(N#Tolx&P!RSe9)jmNfa%n@vtlPPa}f(> z6UM*XEtFwITb2|uh2X-Pk^GIqRRqY;Gb(ec?&t+tqsrhvjj?SGb1$_-5q_ZDi1RiD=Wt|JEilponWc9H?c+ORM|aS_WF){BMW-9ay=Wdtvz=)hSC-&YY+rj}U5-E;p+53@KGw(^OKo-VGf)gt|$P!4{3G zOv9MbEAoBay7x1Q4y9$6TY+UZh_0EUXY43Og5#4x6|st?VW1+?1A}zEAO?+ez3}a~ z>-N6DCU&)}^zXx8!r$9dcm7|DX6abWBs2uOr6^#@`Tz9v(8d2cIo;o!|JU+Z=YO-) zO;Lao-VvMYv`7+qO+Rfh77U98NGdQL(eK%4F2AdSzR&0LMTOu>DogWwFi^VutP>gV z(yv7Zzy9io44h)6?7#_b*8-f87{vZ=W(4mYuqqZ2cU^IXa!#y)PYaA6kH;Oj0qnpr ztb^XKMfx28uDS0B2+V$vcO_}bmu;#N5#%cmlu}wC4&kN3QN!%!vDo9qpA_t4hQ_j^b!98gjZpfdK<)}CZoau7 zk}eHn5a&QPW5DatQZ*Py6Cgtj)X`a~1p4L2M*WS0lQu%t4S5lXo_jC*>0sxp2*8MIJ`tlt0dsGP_raDg6O3;<6G+ipvjl zYhx!tARWrAgxlqF9#GWqC12q^(8C)iUf1ogN!>ok42XG_rA} z($e(uInU9z>3p7NRM=QbdF{qBhNhS`V=P(*OQ&a*sDRE< zPgKTtGGwxp6EC3&55i0wV?`I^oCdWZ+R1eD7#%?Kb6ad|;CE4wx=lIC^+2wj)+B>P z5Esf6PFSSdUgZ}toTv9`P2B$P*+TSnoN<1FobAah&tccyzH#nOZ9)2(fy_dj>{Ywr zc%^!FqTzy^uf{|11v{25VmpkO$}_W!w$&i9spEMiwI5)1p{HBX$;1bUC1Efywh5IZ zBeiB~gA?o8e?ku%Jb)@lRvGJCE~zf;)TkOn{R@Si=XiyXP~*$KkfACi6Xzw@6vAZ! z)e5$l>I{}dceIc*W?+a?9)A~!TACvmr*Tz56yi>d7BI#q&OJFmEhZ@XloTZv*6Ut& zq67E?u@AeKa;h`RZifXh-Vz}fTwW(gdZUK8=JTQ_c=K=sMXa1v8R$Xt0z~cErC=%q zhr+wF;8BpN;!R}%(5VS>T>;8y%$4A)T8fcez8rUWn_hk6!JC=9^&lWRk+}=^AHm@v z-~(WiUe2OZOaYe)ri8~VQQi=^t>)4JkVe&?HJ3T>HlC>ph+Etd{QeL=13M|I;8k#&-5iT$Rw#T`Lt z4A)a?MW?B>gEQnJ=linmmN;OB1-I_VaY5uqV^XQyLJw|3ymyqR030~zbq%5ofx+bU z&pe6kh=ZmkP-<^_6pEwojVOe428FSr@Yi2ma^X! zcXWE{`TsmU*vNm@^7s(GEe*6w+8czcYiM!+=D7KZegJp!TCn&e1$xxB0)Hx`*2p8) z8lh=k=oF8=aGmI}R5if@m2@bjr1SVvQa2|X<6%-=Bnf?uhh&aWiZyBB%c3L{r(Hio zRfQ|rtgm8=iPSxltNTJ)Gq+Y`QShNH69RdeV2UJ5C}cveCHY-x#~_mM>gq8(QcB=e z@bnQ=UhfecnV^y1TN*Q1K%>eG3aDU>*I9t1tx0zVS(#YLL!JoMu~dpPD3*czXXPTV^20f3&JHqh_&F;duUCYGLw%!HWHr+=}NNnI02W+|IA zCsP-NKNTSi;O4~Yj^6gJ;pwaB>c71#qbmdtm0|l)%W|Py<$p?MRUBc&wsVK7KEj>- z7AZ1=v`gg%AM(kTRB{&0D~j4zzeS6S-#zvF2mHO;^B8M;7;i;fNx7Su^Ca*~-jw@4 z^z8Brl)6#ZER0TR=1Wr{h9=cD%TWl^&U;g7h9=cDOJkHV@4cxsrSEmkvKX`6dTT0Y zxgL(_iUu7VJjt2!$b>P68RS8j%i%4?ulNrqq7D>;T>R$*ijn<;7gMBtPvGZPYRvKn zClFIxF@K=cuwD`CkdO9p5@u@N(~M?|Fs((6Rk=; zQOx9%lFzlMU*7oFs)L(u)$tRk1`D^2B0U((=#d{x<%myx{mmK&W%M}sMJ9YK)rr3+ z+Db$00oO$H`23vh?dEtBMQkSV;hDo~cem83&V^ZoW<$&uTDFF`UbTEaux(^9VP!eF zBA~M;_Y!f|4N$eF0gyG&OB`+$dM%dAw$@u158C{leoWb6 zw7dFV^_p!X3o%<~pBn=g8mGl*C30HLjM&=`gmGgUiJ?7Z*(7 zByADNx?7MuTsKz~j8S2b1oF1CHRuKzm!kF|qhpt-5;KsOW99@`muu3VgR-oPE-$+} zsQAJ91b#I7Vhq5^ET>DL^BjP}Gvv%!E(0WsR)i#=j(hJ7E>8s3Uq5~Y3rzmr7%77!0l?qtNhje zyyf}Txhq|As$GMUxh2@Wwx)Tw_Lc)3U3@^_+%Elnjr79zk>l0r)_UjAy^i9!F626z z=Wdl){Tw%VF}`QXqyoh}vG3yuhu`=O2f3Yi1?BKtXs#>y^F2#}-7}PK824orjL+{YT{}K^5 z_kZhn_V#SeYu)dTe+~-{-}KTUc3#U(hxom5!STC!8vOs<4fx|f-3b4&2mXII_>Z6e zY3lzw0e}Ce<^ErM{SOiQWV8OS<8jSyBnYIVQ*Sn4oF;NN5(+1dNR( z=?EaUR6oGvSA(^+%Eb^t7s6x&Fs+H2jzx=wYE9HN!CDh}Dn>6F_S(kpgJ^irHh>CS zW-^p&x9zZD4^o6{5XQl-Y0?#tu!(jHrw@p?0+4zOSX_4_hXR<>q^BPMoCe=# zD+5krwI$-TsoJRXA|ZH{M}MT!FQb7`n*jx0HR=D|+4g^f{r~Xvbc6rAmZzcr?`r#B zw*McTh|YHQ|AUjw{=b&TQH8Ac{UX&tc@^QCzk7kOfQ}@y50Xpx2{a*DKG9KrmVLbRN1mW)|D2J+?fT0cn-?FmgQJ2YYB5~Hc7`U!P?(Z zVpQiZLlsCE$gnly3vP*S9NY=RX$#^B@FkcQ_x!qY-*eLUZK>7~EaH7zMep01X}WC! z9MCH5)5QL0B9^g0TT10C*mPR?i*7*c+6W$0E`Uh*60B;NRZ=p0vwABqO1hRSFiYW) z5hBuLjDw^#u*I4r9jRyX#zc&id@>QM@@YzH>+xzo6=B~X-)oiB)z~$tpvSGLWy#5v z7&ou##Kg7v11pPwh$ z*i!K1Ag#5jf&>MKDC(fnTSdXh@k&9Ad9361kFS--2CU-t4>|yAxc&4m5{CZg1Z;EB za^4(v*f*VMGD}IL&FotQ8&G2e>`!K_$-;UEkU)da(JV5xeyweQKTDNX{wbAa`KQ!^ zy}S8ev;HI6!ChGZE?NJN_7B|qzvF}bjsKsuJm&h3oo<}``(v8ydJAi_+tajzLX9h}OEJVe^S|A=4f?**N)-n@4(hEWisk?`d zVgwTlHi}H=2o(X8Wepgqw(>@!H_I0WW)wi!#1scXAtGLe8CSvtq(Tg)jK9G`%Ww@6 z=O-b?JUTKD&HAHaND zoUZb0ngGn^$3&D$j#ny^&`#j7>NCPlTla*dHf~QDuZ#Nizw?!m58J zib<^dO=*q+#!{@c7$lun`2tceluoLc$ODLr8A0PHvqt=a?XT@49WrgmABC$14V&d` zPFiqs>^mSKzsL~O2_4^*Ap@AbQCLtvGZ*u4 z7N3(|D5`?WScpg^nv~MIRF)c7ogsj<$Ddg`EX45u)#OF=Bp!DpXcWa-d4?O9x`V6lSh5pt*xt1TAM)xwlsbJu}c4? zIg-t@&Zma|xBYM2QU!1c|G$5D;`)C&IXd0of2`%Pbo_>SKKnk;ezElXK*VjR_Zc4! z=!5z+>;bIw@Zg~c{0(&gx4i8Cl$N*19sqC}vwxXacJ5Z@e;95w6P>4K{>$^l?UjH3 zY5Dv=IX-pI|A(jhoB6+v$C^Qc<#;k4TNCKAW_Di5$+@=^{ZgI*?>S5>@V@qd1clMg zo;i6hqx(mvT4hfE5>T7K${mi9;-d=Z z43zQR5Kxf;TUXgU!g-Td3=-%O9ytuD-{vMCl&KZcU;`j~+!+xZsE1^b_8u5;ux_%&=mF&kmA+ z2_T>w(tu2WGysY4$H!Z`!+3P#V4x1f^EyarlnZsyj3}QRH`$@Prq*wIo#>zw9lndi zoQd9|!>A0FdTj}||L2Y!EwNa*B!=age5~E%B1ZpRKs9N`o8H|_POPXeRVa&3NBF1(cUP*b*8?~6cYkk8NYQt^B`79p6TmjrH z^dAW86p6+@Bnd4i0Q4>MmO#A|fEy=qWsw8gumrC6=eZc7$!uaRKvWd~7N1l}Cf)X; zkdVA%561Zbh?FyU?}FrQ!)H{`#ko6 zwrH$e4V)KfCT{<1;RI>x8S;tI#7wtZp_ws3h%$;!t4K?lw86%}i<|CFt}9*3V?B-f zbqrVH3>cq`tt1*&If=*JEwu$MnZ|NW;uXqBcWG07**2Cf9SavPwU05-G|Hq^oZ#{` zq4`gQ!lu01ax;|cj7l~^LYYQdY8*l_nxIpyu}{uH&Z_V&GP|kuE<_kIDpr<bZ=+O=b5oSi8ezatY9!7$1#vH{`mJKL#IaXUUrkqUl=zlw*PvTTK zF-I0ggwg*mk2jK?Ydkmr=WXJ~3@ zsb{~`e}!w0`mZnZ%J}Fj(AyOVo0K?}Y_KkY>e{xfR=sOJsRr;(VVyQ;h(OKI5CL0a zsj019SJSq@>uRlq?+%&T$*F-? zOFo9HI6FBuzm?id06IgL0fSw;Wdo)+Zm)D|`ouEGar>*uHitS)#xMf)tJ{T6WC+x+ zp-#QF;j>EW7(c6UHh^~f{@MjmI;Rv0bc^8eaqS`~ol^t_x<$|!vUA zZdL~#*SD^S4}S1mZ>u!`2y#wI5QOX43TD6DtiGsKbN7|~o;iS;n&LFSel>wHxLGw( zyXWq!`6uR7YO0AV&-JUBl{7QFl$Em$DZ++%eZb@A1;JZ06d zV7}8VW%)B?ceJy(s#3V)hpQrK;6cqj;^h-H9R%N2E=Ff;7vcMHKg(xhTMyt#S>~fu z`a_p@d%K|Z@awNpYY2Zwz#FzOKpYz&U|>c3_FIT3{>q4;Z`#HHQ;C4BR6^wp0pJ{7 zVlQXE)27F7#tZ$}-5qYm8+^>@27t#ocyqxQhk6QLbF8C#V> zFJ?%cPAEzEa%g z3iYL`buFFJKbS+Ld@1OD=?5( zlLpmj3qK*M-{XuisGdZKP=`02dJInWz%e6;F+SCU$NVCCWU2-bx`4&E)&vj8HFZoQF>Cuz=D?J0xoo{*u~Z}Nw1bNL%Pl9h34sML7ttswSwC&&*d3~YkC4)jzA z4Vz}cqzMB%1U24PbEdjUQl3YeBdI~(R`W|UE449YU-WI(r#3##Q^oew#B&vas(tT7% z!tkNbvTx)fyTMIlWv2y=4h({Mnm!B4#Ss4a)$~rlVS21Lcrs7{2DCk;$Dw}Hl=fJ> zt_uY>H)9&yHY;Q9-8L&dhgQys(>AYw>?9)C^@hKQmc42_{v!VNo84|!xEc{gnB8Mm zbX`#+u=2v1Yh}8s@H|Zfyi-t9p@Rz5;x#*fIemS4lqsl=?m)#B4UGH< z32)xX0**P{5$U!PZhC)*t<=i*7^7J^$~pt(uvbaB~^US%Hqdm@2{K-L^UI zyneMjr@OvvISa2y z-5T&8PY#cdUHiY&;|>1vS{~2-4@}sAc>@i4DOhKRR57UQlK`I$`7p$?0sO}J@Dg7! zKLzO}*sZF_XWj4PDw*cRRX`r^MiCn!i$0aHAJ_;!dIT6(j~_pFK%?a`vV~wUU=?hC zN4VM6qkcOC#?{(Yvh*L)Pl<&QM56Ba1G~FbCKOcLG_f5KmKG+JwjhEm%L}p}4zDcBk+yhZZe2*xM`gK1eal64h*HqkR{n@LsXESb%q zI?|xQHiURi5J`w`Gz4=Wu%oEZ%BPG*0Pcu@I8LXV)JQUy7&=LM`q5kT&}>jcpB$4k z-H0(qi1EM`8>TWOw$-HNa5W?8=@NVsIQ1;jd0eEkE3#FKXU0d-~n5+#&B^X zlEsC{1~Ar2MkvbXX;l$S5?P4Gh+GCbJf$>p%VN=w@hrNCuaF?N=oy`Xp_gb@p0H!X z%vBrnbhjvegLxjkay+sInJ9;tOB>|@8+4(CCK^MD1!}2XP?AlnGq{Nd9MkWvz}gO! z*VKCSM=@e&fC8G9&$CxmKKdYnI8qrJb%3|^agmBTqr@p+GIp4NR$cimuXtH_yB#Y2 zr$V|Z7kw=fW(2IKN#gYyhJJI$@r5Au-fFc#^WB7bK^7@b%;kJJj;nm3qX4NuXCyBY$ z(IUnE3y3F`woNjs<#8S z-P#Kt*Vl%f9RQD(5ZYOu?IMabeE>^$0#4l_XyC;O*HEOwAtah$ht;)q zA{o+$z8FBuO2sbN0#qXpVv}WijcjWvNfpZAa24c@1&k{oM1@_)yd>9%E`jKFc9kg#iIrhQ2wK9TeVum+KD}tIL zuwF>WV8y7=5PM+8QVoH9VG4f5#`0CH4K;?Le1^p#2FI9FbQBUYAer-IX3a24K7<|` zbSW9aOd*;VW+ijM0Xk00Diw?gjHSxJksc1Ma%JbldbSQXBjYO@8LF$UF*NGC*UEkP4Pyo%zZyG0b~p9)>xN$jSI6k{iE7V&fN-@egCUBU`H8Ne?5B zq9|8#?8+g9$!#k`9T0)vq)SUQc(I&Y#Cj;37 zR4q9?l|0zlp)pKc03U&#XVrj*FR+4W z%&-71NQ|(^@&RNx!Kn;SIjI=J#pvuR8k4Xc#wpoQrQ#Ha-e(HI4Z-1f#FsdtX(w=R z(~)kcAPsusg(D)9N5W|`h8yvh_@(LG<#{F;U;w9O`DqucXZg?(Q(PfHQaL5x1<6C2 z5B)AXx%8JtBWC^bX|=2b81~8A zHU-zMezUdeR|7mX{|6j{P!Y=M} z>Cc+(1W@2yB)hakfTo1<&O8CMDCwkwqL;>Xsa7J~J(oF?qh;2$XeHfg7vsJF7cwS9 zl@j@NIiVeeEP8iN`;*$sYczl<-+>vT>7Rr@0!sM^DyDlFp2BtxmfY2I?4$& ziVV+-{Fh|JuD4N)E|1ZU)+$RbsfZU>FSsmZE)1<^3pYIyVMdOAJiMTI5nl;vTlf#& z5KipJ*iUBLC;q0P*zuFCReP~50{sZLGka=4*fOE3eEW-)53cP zbhyVIr6yU9Vpx>M@+FcC

6og5rqT{SL61U}1kvwvun@@7tNxnMS z(I>g{WOtzKv$NStM+oYOa61XE6C06GLJ1<&&Wp(7`2dxX#F3T zOtc54Op&KxqXeiD)(bn7rYpSCXY8;!%cWZiMMQAG%P>_`wi1!YtxLvHMU!hj}sP!M%eetkd6k_Op zX*T#D_VLeuyer(&=a>25x$j#3zmCLz8~QqGnY6=}ykhaRvzR9)A zVZ*<1X**(%%H-pMb;3s4hd60~p)mmTJ8(Yv#IemrTijxpRv?^0WwNWgvY`q!0cyoj zBl${}sQqWC7s4QoE+a7{q(Q&Z)FVYdBQ8b56n%(C-PJPZgE%qau$;HSYKYE^bK~+H zy*ZPD6(1lRUq&{8sxUaUdnI8G)pSaJETItDn92c+wl zvHb82kC=C@Z?H5m-oYp$UsTeUHK&S8*H^a8AmE#O*UJ_LSW-yrd{T@o&(d>C5Gbny zR@#*u)F5q?2H+QBR3+ge^^j)+l9I>OGP`WgO*^!;Tq&LzLdZyZS?2{@$qnhZVJI>u zbO|09ptofjSkFlraUKn2N`j>xgYq1B2i`?RMI3=(9Dh%)B!5l{ID|#oU3e_**Aw#i zobj|tO9UzJu(}cIZOV_ac83g3pxMR;(Ui0oa`p+0`;%EQt5S0lXn0P_dg_hS&W8|} z>NbR`&EZNO2_C|nX|5O>^?Z)1@rHZD%E{E>>M_~6f`j;c^+~#8Oj5a^t653;sj0aW zo|?2)UJZ8Tez3~y0~(jiJkHkGUamjQ2u}mro%jN0iV?gEz;ao)1E?7t{gff}3$Obl zrJ#nEZb?Eitwkk`P0=uvz&?^_mgnasjXbOr+J4ckMYua~A!()Eq)RK)liq-CNM0pL zbXHa8<%7MwsmNt9?25f|PuFq;j25M z-TbzJR#r*Rpv0vVG*`tAln?pNRxr4fJ|5117cI`ksyZ*FY!>#3c}Jg6;*$IrfqBZC zfk{URd|GJ)v}u(V+hqDdY$~M{S6X0|6&vnns;wmI!%fZ=^~Va6R6I=BX9?qIpy&X& zHQ)r#@l6Gb0C`?ecTaky|Im$EA7CxVKX6`b*Q!~oZbe)Im66i|G3>IE{EYfxX7YwD`tP{rZo5&RXz)PYzGq^?(21bhG}iZAB^x<2-`{@WCCDF57gVCdOK2Y#|=Wnp!x5eNynTU(}sI_?;d`+WiOvV zh(>j!BK%1@4v|31fQ^%gky2Dex!L%p(g>GDYg#~N9g$?=BB60Mg4VX>V1OVeT5=AD zC3@3L{5QZ@xNE85vK?0zzbpdd9jX_C+mQyI+!WqX^No$V8UfbC54X?DB1)|jL%&&ZfH-$?5zysX|EGFt58oE zg#UQ~Ho}uh0=TAPjqi3Hm9|NqM}2v;l|;Rc0q5l96ID1g53pJbI(YsWPpojYUrc*4 zaqR=GT9f!-rNoNrn-%gh)T6iH$|5D*#*ygz9ErU|qo`dn+K*>Zl8tkg8A^LQxr9!DYm@UvnH_}? znkUv_Ur#4xo_wT{dIjkQZ%123rUfZd$m|qon&OK3U$)&mln|P-p`UgheKAu+DD7dc zMWp`ZR*teVD@Y-+ejU2hC~-ICNIz|AObcA+lZMHYYlXlyx2@g3m(}3n2Vm%n;aRs^iFCZf!z&u;aYVV zS}Dy+=p#>kZDg`Cf&q%v`4P5617Vf-L~Aef09fPEC^_eoXF;g;H`pjw+2z1cy7h0c zG64WBz8tgx7Z+a--Ka0~>I}7gjF=pTQPtY#?xDDYI&Htg9@?~h*0=9jktFMBVPrYI zYK|K!3V+7o+R{H`&Xl>~H-4tmO%-*U00)vH%&%7gSJDL6++uP-NNCuwSeXk0E2@9kgF48i{%T`z%Sz zS<1QhSyEtsw8{a!84^ZMUVH|Xqpw}HC?tod5oCbQ_iCEnpGB9*lY-C%sG&}jTvp;d zLV}Pte2#h7`UcygZAQe!oIPu7!^qp#+DGC51%pzGPO1f5eV+$rKp zVHe^1ceqQ)5;HOELgNI46v*m|1eu`QjA2=zK}7{U1ATBeJq{(8a7Y6Y4Bv*2^8x6+ z$E>il>czBH3mWIq72Kp=29Cp%=qhy1)k+9j;vgkTaGO6tp~e&)U}1gZxv13D;O@|+ zsjHVCl^MT`YumcQ0m+i8y$5j8n8ES`>3}SznbLX#!tPwrwtWyJ9tErJf4}!jiI0Q3Dl41fm)l2u@%< zj&-y*QA^YgN)klFel35E>DSQuHdNoDcPe+*r$xWK6Z8p`8`*Ei*ckB`n-JfJVhet! zd%_z2N;Zcxz!uHn@Mp;!vX?=@+;@Rh$dJOdOj(>Eq8mE%P>JO*KU*q?g!n=^6aI{l zD=*H#F94q|Vc7=I7DDo7zBbW!8L_>iN5fe@`e4nQOpasOn*-pYh%M^C#2Y1~%m&|4 zat5%2wC3eynSxpau`0~((6tURG2mIrV!w^ccP7@lERidsDPL>J<80fI>M9s;%tJyR z%0eu}Ls?E=Et_^?)hRNyk74L$!NSJbEIuzu+GyO6kPf%fmGqALxTJ|T>~6u)!x~j{ zb3>TJS{%#WfFgsMhu~5xtWG2EmnxSv=V&?R*7Kv;GeYH*P5gT%!MTbbRXi z|2{p~-{AkOgl67c*d@}NFpmiAISOlK$^ zsn;jsUCdJ|(ou=Q)*nF^SKi{Vlx#9Ebaum*RIAga_-R>WM>Z=rV+o4FvEvt&8X7fr ztn4VO_kfx3URT0Y_OS*4(_3m+4$?T0GSXsAX-tz5PBYw&xzf6FT@pD%ow$WZT_tcEu;v6bh0 z)ME~2VyU{weC$&-XVMpE^-FOq4`yTOsK{*WGcji4o`|Ix(XH$?%{jnp3IA-%DY1eL zC*{W0Vs$p0O+{&oGRmsasPoD1!>IF#W7PSi=juatozd)=b{hIyoSqD}Xv*OXiCvq= z%p6(EEIP>~(>7YPER%tvWmcJ(6CKFu_aug6#Cu67xlCAr%6jumXr=PHwv>+_SF!9f$ zXeR!-e_xhQNf0HWe3QO=$bl;1FO={Xk?@1~$4nS8DQ~j-_uoMVqFQf$dDoVC{I;e4 zatlxbi16FPt>1pvD9QEn-!$mDod95&{pYc#|8sh@f3%tZ>v$S$IOS=auSFb-L$PI+ z={%i@#={%KH`*{$utL&=uZ<>?<2*5Zx)$JTwFsI>YGXxl-unekF(F${i&3+vT6Xkl zRx=>G$ks>vr=YU}zJF{}kf|N;Q)mi3{2mr;!Bk{4#dDugDy=qb_ahYuazhVzi#W((*^?`+rk0 zK;Hi7k$df8!TdmLa`a~X0(l8gcdH<(=`>5hn}DyB%Pr_sc(i*beU`0?Yf zV%u1v|9K}aAeQleho1k3;Z2 zyd)MK?26-lmBWH5_>S_di-N}%<=+wfnaD)&cx~p;{$4q=&9iwn&*s@Yn`iTE gp3Sp)HqYkSJez0pY@W@td2avwKWcGkECA2~014F$H~;_u diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 320223b09..28f17f90e 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -217,66 +217,6 @@ spec = do Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) Assert.fail $ "Expected to publish effect@4.0.0 and type-equality@4.0.1 and transitive@1.0.0 but got error: " <> err Right (Right _) -> pure unit - - Spec.it "Falls back to archive when GitHub repo is inaccessible during legacy import" \{ workdir, index, metadata, storageDir, archiveDir, githubDir } -> do - logs <- liftEffect (Ref.new []) - - let - toLegacyIndex :: ManifestIndex -> Solver.TransitivizedRegistry - toLegacyIndex = - Solver.exploreAllTransitiveDependencies - <<< Solver.initializeRegistry - <<< map (map (_.dependencies <<< un Manifest)) - <<< ManifestIndex.toMap - - testEnv = - { workdir - , logs - , index - , metadata - , pursuitExcludes: Set.empty - , username: "jon" - , storage: storageDir - , archive: archiveDir - , github: githubDir - } - - -- The prelude@6.0.2 package exists in registry-archive but NOT in - -- github-packages or registry-storage. This simulates an archive-backed - -- package whose original GitHub repo is gone. - result <- Assert.Run.runTestEffects testEnv $ Except.runExcept do - let - name = Utils.unsafePackageName "prelude" - version = Utils.unsafeVersion "6.0.2" - ref = "v6.0.2" - publishArgs = - { compiler: Utils.unsafeVersion "0.15.10" - , location: Just $ GitHub { owner: "purescript", repo: "purescript-prelude", subdir: Nothing } - , name - , ref - , version - , resolutions: Nothing - } - - -- Legacy import with archive fallback - Registry.readAllManifests >>= \idx -> - void $ API.publish (Just (toLegacyIndex idx)) publishArgs - - -- Verify the package was published to storage - Storage.query name >>= \versions -> - unless (Set.member version versions) do - Except.throw $ "Expected " <> formatPackageVersion name version <> " to be published to registry storage." - - case result of - Left exn -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Got an Aff exception! " <> Aff.message exn - Right (Left err) -> do - recorded <- liftEffect (Ref.read logs) - Console.error $ String.joinWith "\n" (map (\(Tuple _ msg) -> msg) recorded) - Assert.fail $ "Expected prelude@6.0.2 to be published via archive fallback but got error: " <> err - Right (Right _) -> pure unit where withCleanEnv :: (PipelineEnv -> Aff Unit) -> Aff Unit withCleanEnv action = do From de4c19ec26dd7c1199bd2467f03e7a375f3a5a53 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Wed, 7 Jan 2026 19:42:52 -0500 Subject: [PATCH 36/36] fix tests by bumping compiler --- AGENTS.md | 26 +++++++++++++++--- app-e2e/src/Test/E2E/Support/Fixtures.purs | 2 +- .../package-sets/latest-compatible-sets.json | 2 +- .../registry-archive/prelude-6.0.2.tar.gz | Bin 0 -> 31321 bytes app/fixtures/registry/package-sets/0.0.1.json | 2 +- nix/test/config.nix | 2 +- 6 files changed, 26 insertions(+), 8 deletions(-) create mode 100644 app/fixtures/registry-archive/prelude-6.0.2.tar.gz diff --git a/AGENTS.md b/AGENTS.md index 129cbe9e7..5ce5268dc 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -14,20 +14,38 @@ Watch out for these Nix quirks: - If Nix tries to fetch from git during a build, it is likely that spago.yaml files were changed but the lock file was not updated; if so, update the lockfile with `spago build` - If a Nix build appears to be stale, then it is likely files were modified but are untracked by Git; if so, add modified files with `git add` and retry. -### Build and Test +### Build -The registry is implemented in PureScript. Use spago to build it and run PureScript tests. These are cheap and fast and should be used when working on the registry packages. +The registry is implemented in PureScript. Use spago to build it. ```sh spago build # Build all PureScript code -spago test # Run unit tests +``` + +The registry infrastructure is defined in Nix. Build it with Nix: + +```sh +nix build .#server +``` + +### Test + +The registry contains a mixture of unit tests, e2e tests, and nix flake checks. When you complete a change you should generally run the unit tests. When working on the server, you should generally also run the e2e tests. If you are on a Linux system, you can run `nix flake check -L` to run the flake checks prior to committing code to ensure it works. + +#### Unit Tests + +Unit tests can be run with `spago`. They are fast and cheap. + +```sh +spago test # Run all unit tests +spago test -p # Run tests for a specific package ``` #### End-to-End Tests The end-to-end (integration) tests are in `app-e2e`. They can be run via Nix on Linux: -``` +```sh nix build .#checks.x86_64-linux.integration ``` diff --git a/app-e2e/src/Test/E2E/Support/Fixtures.purs b/app-e2e/src/Test/E2E/Support/Fixtures.purs index ca5a2967e..7fe0b556a 100644 --- a/app-e2e/src/Test/E2E/Support/Fixtures.purs +++ b/app-e2e/src/Test/E2E/Support/Fixtures.purs @@ -247,7 +247,7 @@ packageSetCompilerChangeRequest :: PackageSetUpdateRequest packageSetCompilerChangeRequest = let payload = PackageSetUpdate - { compiler: Just (Utils.unsafeVersion "0.15.10") + { compiler: Just (Utils.unsafeVersion "0.15.11") , packages: Map.empty } rawPayload = JSON.print $ CJ.encode Operation.packageSetOperationCodec payload diff --git a/app/fixtures/package-sets/latest-compatible-sets.json b/app/fixtures/package-sets/latest-compatible-sets.json index ceba8dd7a..5cdbbb9c2 100644 --- a/app/fixtures/package-sets/latest-compatible-sets.json +++ b/app/fixtures/package-sets/latest-compatible-sets.json @@ -1,3 +1,3 @@ { - "0.15.9": "psc-0.15.9-20230105" + "0.15.10": "psc-0.15.10-20230105" } diff --git a/app/fixtures/registry-archive/prelude-6.0.2.tar.gz b/app/fixtures/registry-archive/prelude-6.0.2.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..c06e9b2767ae864e73f4bc4f1d9ef76583387aa1 GIT binary patch literal 31321 zcmV)8K*qlxiwFP!000001MFRUciKp@&)@nKy`Hh9`ab(iqW=T0=@dq?KJS$q8G zn_T;FJvlrS-$&vbF7-|K)x&D-NxfFvKd3$0uU3!h`;XY+w-G0OML}r!j6Dkd$;xuK z6V9DY2(;O2mg@i0cC&TfZ9Om#?BJt=gJu0+J34$K?+@zL8l+ciN40}TZ2y7fD7^mU z{onLv3*Ww(gsiq-{X4sieBK@U_AE4v4jCT-Ta%lm+h-aVMTLf W z6e6|TVSrK#dY&H`Q)|Hn97hPq?=kLU+7Tipt4pEpmhPTJ?MD+~y*^NSukZJ)J!P^@=RA*QmLkyK+Buh?0u z(|iMujhF4ycJCbt@~YiCM_#XhS%Y0RI=y!D>a@{emsg$3i*AcSvy78=w|UxVpS4a* z;0_#F>*v;ak9FTPPEQM4iL$5K__75;HeQ~#kQ24?q}^#Xd)S=ht_hk0`A#dWd)aEX z@uu}l3mVhtysJo>-PV6!K_SRwCylel>sHq&tx*Z|N%N}HIzyR358bPm-CnzQ)oZcW z7Z)d_=x(d?Gpsw^XU6G8m$Y@&ZB>9nuR)A~1gNU}4DVlFb=#!0_Ia<>>0Dj*+85_# zX!%=E3&_}jswbq~i*sr!Xy~Hz4vC@;Nck1^_Du`YJE&07R0CDe1&uX(dNJS!3hZfZ zWaq8dr|s9R^JWWkFObUHcDGfAzG`=|0FXqsZyUh#ikggl1X79HcGplNwn9D0+OJsS z!~{U3O@VCjAfJoh1atXco#ykz~aAJuBx^?wuB zCxfvawCX!g+0G1B|A1EiJvlCRDwsd>&Vr|pAK!qbjt1t?n?6peJzhp( zBvZk1Bat2~B?;7EmwHhLmPa=rUN5^Rd-c7hW5KE-GGT4?0vi^b1>~dbkQOdxR2n3r zLXZ^s&ezClA_<`InRwYZu<9Y@_k!r|?qhRTaiE!U4HRdd3oCJ;(p|tI*8qDWhsSBK zpv|IKwvY#5iVDp)l`^x2A3)P04U&E}K0o}V*RJ&Z@3a~xXDxF&dQcu~=6|)mf0Uj7 zP<}iAH*x*I^3%XD-v8zOsf7a-z^ucqfd5*;xdc9+Xk6myzv^81z{f1z$ny*7HA%0api)o9)K~Yji_W{bS%;?r?HEwE{rT zUn8S6B=r7|-y`lXjun@`)*H?CPl7nyOY+zZFn&Q~hY6>dNH7!Hrsv;0mgXQR;z{C{ z??a*tAUbV{RaY6t4?nQB3wm}O@-hsg-|qt~w{AS}LMUAorW`DPqPr!AaYj}!;XLB7R4|PmI#?b6?%x_*@(|i_>8+F?hb9PNh+9Ot^a*u zRu9en7+YzkBe1BzzWH~vmgGA#Os(Q%*x!rmi3U>nI!+Uk;Tz+!O=6(-v9NZYHi#9o4A(t|CJk9WAp^>&tu*G&q3`l)BlI{C-v?6zln=~ zoOynTM|nZWBI@%uR=|$gIC5!EgOwa>z@0L!=spi4-_`PbJ}k?%UgmxJY)-5IR{Trf zn{hu}m_y5PuwaD=O3!dfe`dfxqk#Ks9^QjHq`apZzpm!ey#trI=bB!`ut2Ljy)fV`0N8;;0bQ324nV^l~ySYYL)f!j5xf96D(b6g%3C(;q3REE4 z{=FyV2CsGYA3ehR@>pa4YllbGto;X9aBKfJaqaD~KNvbrYT5=O$ixnqIK`p>oQZui zaTd}FFjybL2~5t==N6t2;CR7UmJi$@a=3-#-Z+arm~?!^rq)b^wG1tsXxx{PF}`<; zkt8&|0}%yeNoow{xk_}jTktL*ue)5SvmchspjB24Ems@KA z`~*P?j&X0Wq3%t9K{}C$K?2#MA((_X6h;cPM|}84YdUjy1-ZB^3ZSx}*Y9|@J@3>G z!oJCNcYCOwVB$s2h`Ag*={Ph7soZdIhb>2uo?5Owi^Mj&xw~sfy^DEhig_rSWe8@? z2_Jq4&_?R|f(OudMMwn?q8ErEwgd7^10J}2<1%|nwmn(zpFYKcxT)_~P1Y4v_g+XL zAQ2y4dLC{9N-2b3Fe+|%h|Fna5ivbVtkuFK-P<0kR5tQ>fH6{_YK)Ddfp}|!TtnPu zK+2ID+R&%m_pw8qfRk#Lv<=aO);4ApH4g8sY^wsjRXFDE*e0A)a*tG0{98aH9@+z< ziSQ;E4}nM1V!-9hpx`OXU0_*LDn<~L#PGBmiP$a>1P(8}_|6zTL|2A{QMz>F0-WP@v(Hfa>5r16_vB;vj827A>_Jzgu`R|eK zORNJ%+IGnRdu#+2)1TU|fPYjUKo*n`HoC$FP)-mB%wjD8+C5v+)bHng`Gbq=muIx0a9)R~LdYx~?D{07gO&oNZW>fEUJ8Q`4k& z1O%0&Vbvq*A~IW-7hx`8SwRl5I5B;ywZGr*Yr#GuP1J7uW_|4NU-J&elZa!_)0FD1g?|-nUx-T4|;XV6k6}=-1=%HX@FuZ6w|l*a?90s6!6Gf69jORouTt#-Fm7XCO1H-M+!|Rur zJe-!_aso;&Kq%ZSNo$yu1h<%N7lu(&j6lpU_vIABw;2RIP(vrxM>yU3nR@;B1rX>F za_Ja@*ovRz5czElo46{Z|0z`}z!wC=gG&Vp0 zN6Eqv#+*)tBZ*Co94Cg2;++XTwq0AzS+ZN+j5*-bIb}k#oR{e){Tqn zOhcD;!II*~qIB`RfVr(0m6>Zf4uT@}F5q4;vQP=!{gE}W9Xni@ z?2UJej%`KSHsQ2c4loX4b6pX32~c&A^bz$IbGVLV;q^vQ-~U_5tOmk*k%k;sN`;@y_;2UwQnGBkjBU zf3<_H|F@BAN&mmXtya%}kmKL=@n7|v|5vL%+4_H*xKgKlmhW4OGi#PQ;TtbG-GjpV z=^n03kYqaCy$L7J;&kaL9W$5TvIa6Ji6da`1ki@;{b%s?oH_9I(@)7so~Zub#&f(e z@`PJi{d4-@7t8%AkMQnwEt&rvK6bz%CGd6cU7$7dzh14@GxPtbesr{*|C_iZ0K`r= zwN8h5RFckXI<*p~+G9IT^F%FUy z1hlHQZxD#?3HE3@{6sLyN}U+eyDRRH2l0MT@CASxFPsFn z8y|Sd$F>`)TP3=B?k=VS&oOcuTY`=8yGahQWdqTCwK)rc?i|a>Eiup}@sg#Ki`1W! zkB+b7qXV@kJX8D$0v@QBZcH-YaStV~fS{k!FVpoayLgp$X_p0eNs_vBz=96QNV&}Y zu+M^p3%7y&FFq19GD;Ums@NgoT$bZTL#&X##=tZ(Ll7IZLaT8rUGYe84_S|Cz&n~JbVjz|w!ZZ<95){&@if%f!BLhsv z;|dGI3NECMjoZMmuKiZQUp<0P*MpqWk3W{-^N&BuX0=(ZTJ z$*8mdkYO-hWRS6A-BSFYH5%z>9&%AktG$n<_0@lmwLT8fn2gxe9*q<f}IOx+3*uz7{4gtMaOd%>dWdOj=2R zwDK}SG7<=v?9QnfhYeDY6aE}nrEu1WUkbxA@V{KdRz(;=_A=J-Zhf|{Iv9vO0o@4@ z(VgO09udZve*8h4wdTgG;v7`yoJnFYkLDzw(!qAoP>%)HSSpxbs19g^&>rbdfq$Hf zMfW+UpvZur>S8Si(Ez`4qj7oBKjK8hYm~Nh;0h||wv+D+$p9LM*+{rlUPA|42ZYF8 zE8B%(b!3P^4JH-9;L5Q#i~=cK&jR{VPAs^pom4hyD?S#~5#-6fAHRUFLW(%(GxBIm z${Lp~p4`N>1pjp>Fm4_w{A(TlJ3Khb{{H7-wYvTNpN(8Pj>AsZP@jCPAY{1JP#va= z+5C!@g1Ag3u@L5U`P9Dgy=cZtrROic2#3Yni(d%KDE(*824jPt6=n^B$RFilo8)sI z4RU#T?3ND4FU8^IU$%Kk01p+n6xL0=KyI{w+U(&(+qju?A49`n`SMf{&Lo;z?w$z9 zaF8_ZJN0uLe_9~W#D;>1z*Ou-VGIo7N)flWz@RiBMzJ~lkNrx51Z4(A0b&*p*p*gXV`t(px8`BHw%5Zj7TRs-l7dZtK=*TZ7U?Fs+ zyOPMT%0<*D4~YrelcpHMSV2dD*MbhFtZYw^Wl**0AtQh=7nxcy?j!>VCWHAfbA;_n z~4VEA<5BIKS^zPI@q+FMyz-H)9)(aQDeTB)m|aiUz8dnq*Mp*)yYE0h=thw2+^7fT{1 zX=7I!lXQ#4#$EQLN?Wt!QHwUQ3d$x66c(2Y*~%JfQNr@HaIGwzEI~AJ;}@}(*GQI` zbYP|iE9kgJE>#+_?+x1@>{{agH31Pu2qNda1MR(^E4ViP<0$+4A5ZH0+xLGqa_RnG zrqeZ7FrSk?0VqsFSI<`&g>+V*2w(~2qv3rVdJ@+x=Uhu)tD~MPxSb86Mc_$#F0ZhE zao?-3(k`9_{PchFC?KP!zHGTIQ5?mz3k(OX3hkLA|HelA($p{^P<#wPf2m^WAwaySP^l|96?Cw*EWfb>Qd6*-??&@gJu1q#Kp3k&4vF zR7o7DY%P(qw4!n$v(?ha1er?WoFH3;4hph~`ICZS(ezk#R8Tmd@@EA(&V`2s8SXJ6 z$kqP($g)iPsYRcjmdC{ZKywc=z`|4i27dfB$bI*XsU1v4h{fdHi?na6cFSU3;>P z|Jlf;19s8rCU$xTk6)@k457o|*6#`?w|=jdR|fNl(42BU1YFdI+p0nnLI`@dHi*xP zaFxe69vQK!L)xOZIH?66ac70@7VP*~+?76J7|KGv09l%f7m#7SrrhgtTyWrpp*Kx} zL0x`J=QfmyAuQs?FvQ<7V=6yh()sUH0!V|JXxAInC2EybEvo)oQGxKkyqKQz znluhZNkm3vI;vRIeg9b2(*y9 zNx#aQL86h782N*8S}d5Wt2oNpoMR_vo+LgVXIg?D?z)BuS3gS_eS3{8LyuoQhaom} z^kuDN=*F%niNS@U~KWh62``h@Bja&umf4bA} zi3IMt%B$IrKlg@zcr?A2v~J5TlTt>yq!C_ha@4&n`q1Rwze2OGP<2I4a+W6^+ z$qCL~#qKy?f$?!?Oxqu-4a`uO4EeK$7-o=aD+tU`pvQqls>zYqC%-B+jFg`_u~fAb zsU<2ffFvj4GFbH^NwQjg3^!;P>J1d8`&`&E(j+HVPnY7!0xR5?-ljIsXL_3XzbAMH zbaT#+o@MeM-~YpD@2I!o|JLyY_&?d{Uzqcg_m9hXK1D8ad+HbRdFq$o@a~5=JQaa> zJM|Nekv?0eJ_AFie>AajDwe~<>C{2?%}&-+g*CElW+IzeYbp)cG$-XM zX4#HvsFBvw4%Mj~7R>wt%$NFo4YtcHSu$L@$~LQI#w8}pnIMY2QsKZ@>0#M62TKDp zY=ZDEN46;5 z<(O&sE{BPzyUXF^e3xVAukUhX7pVQyyT)-kLtW!=@$_<~8$(VS?-EB7gzUL>dE*qK zFK>+L^`#fG&2yutdH;W!jq}gG|MyOMCma2bFZ2ZVf9UkO{lEDLFP6|`nip4*ScCF; zUYwt$<-D}5P?Ibgj0WV22UegejVr_fug;>u?&AT$$pI={UIKcba!bYtXS|Q2?rc0* zhUblSnZTNdZ&P3j@J;Lr03tT^*$4bg>;t~0<3tb{Vj%Fd1@PBGgg*uI*S3Dy=|;$U z5H0Wy`)@r1B8x(v3u8+*L}u2IT<{DsJuzk~1N^x{4$MZ#TomV^b5cP@ZWcQs(-IS* z6Vh*$%3&)o{F2xTeAjHnrD7}mjZ+mS17N(4oW0X8Q|_Ml1rDM3f(KCQs2B%L;P@#E z{9ew0k-35TC8;gc9%AdC|IO?F58$aT9erN>uinYgfxG^no^0;_*YX6`f9`a{{D(v6 zJ8FneIRuCCJwK??qLPk^UW@$k2 z2apadhR~HIexTxQ3|gyL3#he7&byLu`svq%17jzfKG?oEE%tfmfAjh;I&v-g-@g3s zbhG}iLGuBiWwH9-Cl-%w_VZ>>x4BObU1#IMBk<7-v|_% zCaBsJUt+ za<2K>-^(gO+TU)*to<#smD=Cb(G#ity+SQn`@1%#))W8I3-~Rcn*ASaoY#f_*V{jK z&wt1Jrmx^pq2wqd z_79}I&6-8CnR=eh2m|4LGMOZ!s(g>mL5Nw2ZMsl^NqLfnf#ZIG$i|~XoJG%`Jy#OU zj_K6a@jD4_AqKF!PQVGnpaXtx391i?m!-tuVR7bpBu7Vk60HCwjogL{*@lR>ms_2t zPCdGR1Fp^~PeeTU(j#^Ae2%9M{}tf|?#BJwQvYwg1NZ*t7)*jT>;D>_y*(SVhx{0A zS#IGhmo$CL1>Dqf4kv!2^R>-d@w<6y=Ks@=cQp6mS<3$%xbmN)-q8mCYb_7CXq-I# zXie;*SL5`tIFPOxrQgL~)bIDB;y}Azv=@)$BSYvS&R(Aid+5fIID&QG4eY?WC<-jF zE`os%sG_)WG$Pm=Z zc&zoW%FodmUv$trIz9TYhbKp;&MHc{C=_W|#rm)ALMk#syw*PBrFP*!3?bj=eO>aug98u$>v;e8a5Mkc@tB9dfX@s|9QY&yi4aMUk}-*$P;L}G&!W6K z1HS|S#E3Af=)WRMK8e6lk;GLZT~`!~tV-vJD>(ZPykQ?EqZou?Sx#O|DN=HomX)PP z$kEdhL}!!541{ROES;yBC<*0S=XqJC!*rHbS0wkNtcD4Q?T8|_Jph-c(wk|=m*_oB zv1Zsf%*qao$^o#YtlE#sq~hdU0!S*gKMb=5uR+a`ow0$ak6e9B5OE)+q)>?u$rZF{ zH26gN2jHCp1cxZG1W{;QfF32^dhSl*c`824;%vHzrvR#njTo2Fzr~;82l`4AU(Of9 zSvq1BpJ#b4SW5QS2Bk&g{8c_rsx#4EEx~R^udd1}nRoL=)jb!hb2ZCa+h@samP4n{ z#(&suKNJim=J1JL=76op!?P2=MFX|1oLxkpyeL1zYg9oT<1U<|TX5-E5NPAduW(0Ro`Z~j zOGDaY04^?ckA}a8|1kUP*ZMU1|GFy)(31WCaR1Pg{~RA|`2V#$7I$oVbTH)~nI-?o zx>ZgLdaE4}_&BZ}0{MVsqpX1cwzE@ zBV8^o+WlzM9r3y1Nrfj15~JYPN74S}!9D;*QsjPL1e`vTB9~I^&SIICnZh%Q#bUqx z_HzGp`+=;(_Pw`n<9pi=?I=S#!qAH_u_H{p2$>xr^CA>>gu;tZ*%2x)!Y4bzCmG>4 zQ`7}twnbk+8|hx`?YE(SD8+BOM@QI$Lag?f&lD%igoMqN4?2@AjshXm2k{%9-GFcE@Q6 zp3Qc}4`5Aq;l8@aXS+o*O)kj;iymv8DAgk)Fj!PGZ*M(_i?Ox8t$scdKmQw!w6|}+#qp&Ia2nWN6Wl?Kh3ScaBpD3VT)v%e2I*mZ-G+Ck0V@@hVu^Dl3~WRc4W(QeYKdIhGWnG9V2hcU<}L(Ab!YD z3`%M}&t_5FYxBzi$X$%jaZiMKDT-)FDa4DLV%QU5q1xazbErB2TeziFYO;SrB2|PQ z(W-tQ_#|T`ff@9w5ui3~7ALJZ3=&r`heJxuc-RqEK)6E!beZOH0csk|;*bjc9Z1l> zu>2S_II!GR?c|b=0UNoeAA|OBg1nV&<3wC(7w64wo!uqa^H|kGA^_|JU#@=lI{E8- z>h}M|XqJve!=v}dTM7b~>3{Y1kKFzLu(y%_t>v)}u4z{NDVY|Dt>t-X9a>++)me8g z4vqK{4@>dAV<-%;NN{g2dX>;koEl3J>bH|q!8C&?XH|7xKG@q!vhKWlk$ysr`rM+4w4Y9PLN{Of{X% z^1NtWMtcCF^dQ=G0(Y<4b&79-I6?lM5eFiGD@13O?S9p^$ZiRO{)>nLCwce_?{&0g zi@DoyS)IyX=BoTKdSxGU9XP`r4U8^MLaRI6ltLi~tvS3*!-0T)#=d;TEzpVst+5P{) ziM#%v935}$f7kL@#$TZHQ7Zw2ucpuse&2HgwzRKzBiWN;TDmbgY>CtmVs`LCtrN8# zJ#LU2E_dSI0i%yuGO8S(?w%|5R<|n(+VZO5iz2ZNlrWO63j+zEuHnS^MW)9)LzRmw z1%l|c;!Gs3S)fU@##(m50pXH>u&{trk=-EXRB_(pO=h?VN76|x+_7q8i3SNwq)y&R zwIM-g+pnY_uOK}%D|lGn7+R2tQ^V_B9N%u}a7#JOc0V;rIcs9|z3+&W;s(~#!WM2e z)Ixb12gzoz1}h0bxuq?Mr^sqc-TeEN$tQe;{pKA^2uQ;LF+>nS+bx8NF*%iO@xRs^ zj2yj#j}W&lA!H!2BrJ-gWS1wI3hLWygYVr)}R%WY|9cIi$mDWlzTAqLEjRQ=*EhDIr0dcXX8 zd+PW8Fu5hsIfnHIqH87Px zbGsinK{nXY+E%o-jP$B}sO6_MaV|?sTq?36VxWkB+|y~z{KspbHQ9fOv)-X+|8=sF z|E%RP&woD?OC-;I{1c3bhWKxmOu>!|UR%3u{MR}@GAx^M281krPDXh#mKmvEL3m(| z?8e7G{w?#9zbTgvz`1n*sSVAEItc$JXxjlLll#tD@p@-Mjfd5Oza4}^YU@L4I`94r zY?-K{IP^NfKc-@(gHJ!oFGR1%5P(#o76GdKkYK*>`YNfG9!E7I7qsetTO=*Xd`ToK z**0l$j`>Lcc^$`xa)+(e0AO=5tcsXcT6M=}+1Wj3hPM=%fM%= zEWRial8Z|OD=y+Vxx3il4C0x9{kz)jR+@cEiZXeC!2Wnhg}xlfc;Dyqb5XQ5=Nkyp z2Uv#zjmiNh9fC%K*n(OJ1t@;E2V3^ofiqJdN2YNk7unf5MR+Wpq?fbk1V4)I+%@=r z)IKMEA9I3tGp{}{wyzdLCkY!K%T)Z+PM9s`enN@vg~<$@HcCV~I50$=WY02{XbAi| zzO}G|#-S6km_$?L+9%@gF1$NAsq+wJK|kYmfDHWPq^9wYr~RLQgemem$4OF%IW!Y5 z9b=`y0Q@5U-_}&~R}8dUOf4(ku?@14Q-=pibV=5m!vrzqB7(F#6Eo%vh7(Mt0gs)_ z0ay`E`DmgI-hNiT<#?!YBspwh1cwefuV|3LSP{weKW zPV#_Qw8vIxQHKszBS7>l{KNCLKEkgmP!zyn#u(9b(y%Zh>pW|JQGUS@{W6`&Il+-d zLqH(p&}s*0c+a!sZ@&dIgvOD=86}4KkX+&RDO$0M5k!FKlF_1M+Fr+^NG1~_dX0rK zbc4Z96_c{kavmc!%vPsFBg3}Y1)-`Iaa7P+SSDgzx28!2VPbuO7qJ3WM29J%;kbRh zGh*R50ik@iU$ACgXF*PoY?)j~#A#z>ph%YagD%q^5VD~nB)oCuv6~6n;p;qF)bRf= zV{u4|XRnJmE5+oWllILG4+P8fzm88G{qKYQ+K{ zu`TBDNlheRTH|OQpPwURl2jK-l6f$$=<(@8nvHq;R(_NQ9V8&muJE(?B0^}>gS|`U zs~B2@t7ZB;3|d- zW+51xeZYMR8$QsS<(yHHiGoLhc4&x)basJ+bd&)AUhaRh>mi3E%NNr#90U2HkXRN4 z`U~rBVf%zfa==xW^3vt}L=@eItbkqa8!X|3Siz|}Hx$uP)R10`$p7>BYuFJQ{eRq% z1Zc_ne+tf?9Qn`D!N&e)EsyX2!;k-qn8SCL z1_1^Q>?8;}Ls;VyiAOX_K1#{GhTg{qj~)+f`4QsSK*|TvKzegQ|F&Qxt)xdks{$Yz z`h{VSV>%jdZd^0ke-zS^6chkyVk2M75Ir5XZ9{CvTRLYXNM&>?1b7!&4FNnrY-g$9 z4Zv?MWh9c}ZYq6GO+}T>f+#yK4do6Qo4t?n84MNP(-S(qTb#=`-+U9jBori)5b%7~ zF#zF-H{4dzopz(a^Q;mbHjqG9ZIqR0;Q)&g6OiTY3Wf?uVgZ7Z6ru=)8?r09sJd4Y zz&+%R@)pHWQPN(Q+G`}aiNXDW-75OaKKJT@mu9CnP0!|sa+sz?N@7ewS1h__y2#-) zomLCknSf8l{0$HfJ;I1mj6=m>{lGL0O!rBTxXE9}^F$CxE2AS+r9lojRiVP;ipL8w;GeYJm7aQKFgW*r>8XnXH&!6q0j1<;$WFI=#L8?Rr zBA!^39DhBZm?JxuS~GCOkVRn`>R5J>&SvuVSFRH52yADO&C(Ca zPGjNiE?vLK-Oo7J#4{0(*Fiku$U}-~%!?vL+3)}>J}~KFls|D^6uU@uMQ0$27hQ5| zptO3F%-DB|{2H}(v$j~N({9q$ff?*=W(+kB1_st46oYHb1+Fpj+D05@u0-3xhJ~vW zUADk*z*DB$VyIAwdH4fri8LeC5k+r$or8DLA&AfVef-to|G)F2^^9nN&-?Oo;9G0= zux)-*FoYCIp91Gyz3houH8-U4^DWtm{b-tRX#wRlH$?bz3OGlRpdbPk(tW+)$WTE` z4>%1KFc6!yv#jLrkLR6De$kH-Ed0>d&72B zEI{8O0RTd-H_dguXEH`+3|tGGQelkK;OK54vtQqOl3Cy7hMaQ(uye9lIt8Zy!+wMU*w%r5z}<>CCFGtVj#lGwROrPq*#QPJ z%s&2CQsnF!Ub_rm%<2>TlqGepMz1GJ`@k^{WLAST7^o1D(36IG{q~)ONMJ!DXwRH#dcf z0;javwg^BK=_cuT4Px6Ssb*?2>!Eg9ie4lGT4plD25Q@3h+`CJN|}KbZ2oYY_u3?3 zGKVA#b4lVJa45o1uhUyB&Nz@f9z%!X4fe!H--%QZd<~Jx}#eR7+Iik3_{~G%&_GJG8x8y;@=^zHYU|z`oOJy`bHordlwK zWVOCD5B4RWrS@NUpaQUj|35xH_VGVO^bP;NhNtf0zUAI!qYx&>s{*nBdGxq#T<|+W zFC+F6i;p|J2(NQu^p%)x8299Q`HRvqTrf=11n)(|7vCs~)oy8A(+s$)E)w!C1B+d) zm4;({p~_0DD|It|g_hHQ1$i~RwD3axuWj>|9CkcICC*j>NK5e(FVf)RAFnK6s0`G7 zsc7MnYUW^#pFt+4EN!tBpEN6bcag+?$fv!&yo_g7)k#D+Qf8&^a&_*` zbuMSox2#-4dCA*tos)IJt?CxAxlwuGDrDt>AzmKR0aGXsy6e_Cl=pzsG1QMxNL=1) zyV;tyJOB@28S|*@)fKjpqj!t#G#Y*nWv0ZEYD{{v+$1;(&01G^*f?!J3>RNY{8(o7Qn6KBr9W&SB)h@^aDMEK-ZnJ;> zYqBP@9;@%}%Cg59gnT{bQX0M?vj_^B`8c`Xjjc~jGu9dW=&9jY{kZwu;Ks~pfg0-{ z1gxnw|M{FoJ9mEN*0_HBJ}iK0&VN4@cjWsSo@Mqwr@a#w|MT#0bN*Y)1F*t09ni+Q zNIaiKv#JJ~Bl+lHf9OxsN_!*n(IQP>SoK)Q)_1|kL|4&2M>D~h0dLdq@IJ3g!-+qgZtsuHN958##`%U`pp%r|E z*?Pe_xxs2HuxyDElSh9bbeO4*6Wzb>`Xxg|8mtivM~7A$y;~%T63}~6X&%?5 z*zGwoD}(#zHs*dKgS<{kPBoP5L>hR+?SFQsbBR7UKc zj^4w}>$4U+vW)x_dCY!{FSWUg1U)0gviyhnA{!^FjPK|tl_P(svWyeznK;dJ8jWK~ zh@Tu=13g%w{b-xs5uTGVD;a{*s|;HU%aYhqn5(2>iIFk8l5w8EkH{F85q)P01S3N) zX!?b3tqI~yTG_FMf|)4?*;&Kz;(Ux8hH0i-6bzHCH1JguF(LVzts@(&6Ra+I(46Q9oj7g&3SX7|0 zY82VHxjKHSo+_J`;MeQ2F*!?h$MB5eskF(;I9hES%vxmiRjgMFlY=5yp;Z*aWW8>x z8)nt4p$e|*Wh3ED%9oPhMlXMc1}0ynF?}m5DlwlHRO<1rwY9R6y4YJ?v4`s1ps+-F zD&-!x={<*l8;^js&(i%LFOhF|09<8ZG`Hu+3 z10cQP36MkmBOrZ%Ga$b;9Rewea|+~i!DFD2phpEX9t2f%`y^Of0vrX+G)v3i!=OqN zJ`Jk!?Bk$H<(>y+eCI%@g5g9+Kdx~kRHex?Ay>^l6slNsDl|u{Iu=qOoC|prv{i36 zoD8j;<=usZWP=0K2Ek=lELXbpA6F|HMk4RzgD= zl#&|(B9ms-PS$p(ZJ}uBAsR z*k!RL;=!@XE5vfhp?yH@M}vc-icrk3gbT4g6bREXL$CwfC*tBNtYykzIJL^9K?HEo zMS}qDH&@_6G@xMt2Jr~XEe3_e>!#gMgz4={E^H!}H09&;j-ej!coLJq*z;tNnN=S_ z3Up@5oqeFi3B*Vh1kjQLCI`PH9|7A*RR7?AANE8|dj{P4Koe`n-G#`Vs7xVC1|=$_ zl4UVXN&?C%=b~H14+*$bCDWuZdLA((hgYB!TJFMhU?s1^gWX|TsRS_2%X5H^`jpH> zXS=P&xaP9OOlWg%t^6ENtBv-e7YKB<-Y70t+5M=J$?sG%xY~|K71V74M@CTlO$jqe zOu$h-PNE{trio8Fq%>*A-yZQ0>Qj%Q0rF*@_&rzwD~3mt33(*x@-UBxwjW-bWc~Qs z&rIs3%8V?{RLTTZQjKg#Mb)pD#-e9L@nGG&SRP3_h9t^?wO8%VIe0B!m$Oo+Qaq}s zQmS$yby8UvE0+fLqT6aMs_nRX2imVw$YrNDX4zSt*TKy7I+(#xD?7N2BIG~lQ+NM^ zJMVh%-+R3icm5wA9d7jh*78{QKiKJp_)qfV$gfyD8Hc2kEA`uz;Vl6X*Y<25viu^8 zO3bI+vP+IyA{^!1B$HFjkIf+})&yc3p4dfMvwRfKOa~ZH7yjJ8y3rCXc_#=<=KVT_`#XD+MI$ zOAq_a!%y@5&z-u8ZD^kr^h-5q0kl3vk2`bx0Soh| zX0(b=;h_o{HH=fta>*bSah8oxD5Yt5axp3fXcL6Wu}3B3I&@2^8;@MX^Q^Fzn@}l% zVTkSBu?lXxB{z*eF5MY=7`lv_@Q9-U;83u_SYtv01C0n+Rri-xkAUuPKoLP{{(h$} zyI0fv<@heV;&)O|tq~U0ELoL)&?>kd@vrGhe>KV>vP<@B7S3A@{-rnjz*EEjam~IU z|KHp2|Lb^`^uJ)aZZ8B}#{Zw39Ju(82dBLa|G$pM1p7q@TAfc%Y%b~I?N`m%@a>k-I7Ef5m|3R}kTqU+` zh@V7VZ&3T#Hok7J*c8gepxq!yDD!)K&WKSzu>e7=MJ6+jEz(%;5S_%r(QxqCMp9kJ zJV|jKUIS|DI%a_xV$8ybIaCVn*Xa^fWdd^J${H-D!A-_qbxjVow#q?%we&whm1Kb1 z0Jo|qxd6>Zu-lM*2Uc?AaIvf{Su)`1?kxF~%os3K&kD=%4w+x$iXe7a-q>j`hsSOy zX*guWO&1$Y*TiU-UJM>UX3)Rrc55|jwm|mnw%ge83XKievC0J=z{R?)s~b-r&CFAn z+e(&sqGS3II^31s7ib3cyS6r{UaJ^*T7t#@lcJ(gl3b=`m7v|rga?Jb2_|M<`{X3G zTu-f-lJSgy1scI`7bGyT8P)epRCH5_On2w?{zNR%`_E?{@#MmH(oe^jhpc_5tk8x&J*r+}QuE<+0_zrfxhhHexNAr7?T; z1wDD?&sifDPZl$nJ>=g{OdAnoDf|aqLeAq$G~oSJ{Cx~yZsO@nQ$6|EP7e(z7afc; zC?tE!jEf5WU%G974^PeffBNx`bU)x(GXGBw4&D2oqvOMk|F5+?=KO#9(Kz0cn7$e8S?9b^A>0Mums z>93}nft{U~d6hia*`ekkE0KYM(FZTf;7LBKQn7|zp$Qs$q?fW@l>es88&a!|$-_0d zU54f*#&Ym7emRg9Snwm-69-`t){UMR?vXpvXTkh@QK9_~dL)pf!Hy_06{k7k!Dl() z|BI4FX^EcQ%Ut{?8V=lOQ)Z0okLG#YPebg~RD3~Jx=5h1k|9Mabjf@I0ICUdBlki= z&X0~Df*7a$J`c13PD{~sX(N#Tolx&P!RSe9)jmNfa%n@vtlPPa}f(> z6UM*XEtFwITb2|uh2X-Pk^GIqRRqY;Gb(ec?&t+tqsrhvjj?SGb1$_-5q_ZDi1RiD=Wt|JEilponWc9H?c+ORM|aS_WF){BMW-9ay=Wdtvz=)hSC-&YY+rj}U5-E;p+53@KGw(^OKo-VGf)gt|$P!4{3G zOv9MbEAoBay7x1Q4y9$6TY+UZh_0EUXY43Og5#4x6|st?VW1+?1A}zEAO?+ez3}a~ z>-N6DCU&)}^zXx8!r$9dcm7|DX6abWBs2uOr6^#@`Tz9v(8d2cIo;o!|JU+Z=YO-) zO;Lao-VvMYv`7+qO+Rfh77U98NGdQL(eK%4F2AdSzR&0LMTOu>DogWwFi^VutP>gV z(yv7Zzy9io44h)6?7#_b*8-f87{vZ=W(4mYuqqZ2cU^IXa!#y)PYaA6kH;Oj0qnpr ztb^XKMfx28uDS0B2+V$vcO_}bmu;#N5#%cmlu}wC4&kN3QN!%!vDo9qpA_t4hQ_j^b!98gjZpfdK<)}CZoau7 zk}eHn5a&QPW5DatQZ*Py6Cgtj)X`a~1p4L2M*WS0lQu%t4S5lXo_jC*>0sxp2*8MIJ`tlt0dsGP_raDg6O3;<6G+ipvjl zYhx!tARWrAgxlqF9#GWqC12q^(8C)iUf1ogN!>ok42XG_rA} z($e(uInU9z>3p7NRM=QbdF{qBhNhS`V=P(*OQ&a*sDRE< zPgKTtGGwxp6EC3&55i0wV?`I^oCdWZ+R1eD7#%?Kb6ad|;CE4wx=lIC^+2wj)+B>P z5Esf6PFSSdUgZ}toTv9`P2B$P*+TSnoN<1FobAah&tccyzH#nOZ9)2(fy_dj>{Ywr zc%^!FqTzy^uf{|11v{25VmpkO$}_W!w$&i9spEMiwI5)1p{HBX$;1bUC1Efywh5IZ zBeiB~gA?o8e?ku%Jb)@lRvGJCE~zf;)TkOn{R@Si=XiyXP~*$KkfACi6Xzw@6vAZ! z)e5$l>I{}dceIc*W?+a?9)A~!TACvmr*Tz56yi>d7BI#q&OJFmEhZ@XloTZv*6Ut& zq67E?u@AeKa;h`RZifXh-Vz}fTwW(gdZUK8=JTQ_c=K=sMXa1v8R$Xt0z~cErC=%q zhr+wF;8BpN;!R}%(5VS>T>;8y%$4A)T8fcez8rUWn_hk6!JC=9^&lWRk+}=^AHm@v z-~(WiUe2OZOaYe)ri8~VQQi=^t>)4JkVe&?HJ3T>HlC>ph+Etd{QeL=13M|I;8k#&-5iT$Rw#T`Lt z4A)a?MW?B>gEQnJ=linmmN;OB1-I_VaY5uqV^XQyLJw|3ymyqR030~zbq%5ofx+bU z&pe6kh=ZmkP-<^_6pEwojVOe428FSr@Yi2ma^X! zcXWE{`TsmU*vNm@^7s(GEe*6w+8czcYiM!+=D7KZegJp!TCn&e1$xxB0)Hx`*2p8) z8lh=k=oF8=aGmI}R5if@m2@bjr1SVvQa2|X<6%-=Bnf?uhh&aWiZyBB%c3L{r(Hio zRfQ|rtgm8=iPSxltNTJ)Gq+Y`QShNH69RdeV2UJ5C}cveCHY-x#~_mM>gq8(QcB=e z@bnQ=UhfecnV^y1TN*Q1K%>eG3aDU>*I9t1tx0zVS(#YLL!JoMu~dpPD3*czXXPTV^20f3&JHqh_&F;duUCYGLw%!HWHr+=}NNnI02W+|IA zCsP-NKNTSi;O4~Yj^6gJ;pwaB>c71#qbmdtm0|l)%W|Py<$p?MRUBc&wsVK7KEj>- z7AZ1=v`gg%AM(kTRB{&0D~j4zzeS6S-#zvF2mHO;^B8M;7;i;fNx7Su^Ca*~-jw@4 z^z8Brl)6#ZER0TR=1Wr{h9=cD%TWl^&U;g7h9=cDOJkHV@4cxsrSEmkvKX`6dTT0Y zxgL(_iUu7VJjt2!$b>P68RS8j%i%4?ulNrqq7D>;T>R$*ijn<;7gMBtPvGZPYRvKn zClFIxF@K=cuwD`CkdO9p5@u@N(~M?|Fs((6Rk=; zQOx9%lFzlMU*7oFs)L(u)$tRk1`D^2B0U((=#d{x<%myx{mmK&W%M}sMJ9YK)rr3+ z+Db$00oO$H`23vh?dEtBMQkSV;hDo~cem83&V^ZoW<$&uTDFF`UbTEaux(^9VP!eF zBA~M;_Y!f|4N$eF0gyG&OB`+$dM%dAw$@u158C{leoWb6 zw7dFV^_p!X3o%<~pBn=g8mGl*C30HLjM&=`gmGgUiJ?7Z*(7 zByADNx?7MuTsKz~j8S2b1oF1CHRuKzm!kF|qhpt-5;KsOW99@`muu3VgR-oPE-$+} zsQAJ91b#I7Vhq5^ET>DL^BjP}Gvv%!E(0WsR)i#=j(hJ7E>8s3Uq5~Y3rzmr7%77!0l?qtNhje zyyf}Txhq|As$GMUxh2@Wwx)Tw_Lc)3U3@^_+%Elnjr79zk>l0r)_UjAy^i9!F626z z=Wdl){Tw%VF}`QXqyoh}vG3yuhu`=O2f3Yi1?BKtXs#>y^F2#}-7}PK824orjL+{YT{}K^5 z_kZhn_V#SeYu)dTe+~-{-}KTUc3#U(hxom5!STC!8vOs<4fx|f-3b4&2mXII_>Z6e zY3lzw0e}Ce<^ErM{SOiQWV8OS<8jSyBnYIVQ*Sn4oF;NN5(+1dNR( z=?EaUR6oGvSA(^+%Eb^t7s6x&Fs+H2jzx=wYE9HN!CDh}Dn>6F_S(kpgJ^irHh>CS zW-^p&x9zZD4^o6{5XQl-Y0?#tu!(jHrw@p?0+4zOSX_4_hXR<>q^BPMoCe=# zD+5krwI$-TsoJRXA|ZH{M}MT!FQb7`n*jx0HR=D|+4g^f{r~Xvbc6rAmZzcr?`r#B zw*McTh|YHQ|AUjw{=b&TQH8Ac{UX&tc@^QCzk7kOfQ}@y50Xpx2{a*DKG9KrmVLbRN1mW)|D2J+?fT0cn-?FmgQJ2YYB5~Hc7`U!P?(Z zVpQiZLlsCE$gnly3vP*S9NY=RX$#^B@FkcQ_x!qY-*eLUZK>7~EaH7zMep01X}WC! z9MCH5)5QL0B9^g0TT10C*mPR?i*7*c+6W$0E`Uh*60B;NRZ=p0vwABqO1hRSFiYW) z5hBuLjDw^#u*I4r9jRyX#zc&id@>QM@@YzH>+xzo6=B~X-)oiB)z~$tpvSGLWy#5v z7&ou##Kg7v11pPwh$ z*i!K1Ag#5jf&>MKDC(fnTSdXh@k&9Ad9361kFS--2CU-t4>|yAxc&4m5{CZg1Z;EB za^4(v*f*VMGD}IL&FotQ8&G2e>`!K_$-;UEkU)da(JV5xeyweQKTDNX{wbAa`KQ!^ zy}S8ev;HI6!ChGZE?NJN_7B|qzvF}bjsKsuJm&h3oo<}``(v8ydJAi_+tajzLX9h}OEJVe^S|A=4f?**N)-n@4(hEWisk?`d zVgwTlHi}H=2o(X8Wepgqw(>@!H_I0WW)wi!#1scXAtGLe8CSvtq(Tg)jK9G`%Ww@6 z=O-b?JUTKD&HAHaND zoUZb0ngGn^$3&D$j#ny^&`#j7>NCPlTla*dHf~QDuZ#Nizw?!m58J zib<^dO=*q+#!{@c7$lun`2tceluoLc$ODLr8A0PHvqt=a?XT@49WrgmABC$14V&d` zPFiqs>^mSKzsL~O2_4^*Ap@AbQCLtvGZ*u4 z7N3(|D5`?WScpg^nv~MIRF)c7ogsj<$Ddg`EX45u)#OF=Bp!DpXcWa-d4?O9x`V6lSh5pt*xt1TAM)xwlsbJu}c4? zIg-t@&Zma|xBYM2QU!1c|G$5D;`)C&IXd0of2`%Pbo_>SKKnk;ezElXK*VjR_Zc4! z=!5z+>;bIw@Zg~c{0(&gx4i8Cl$N*19sqC}vwxXacJ5Z@e;95w6P>4K{>$^l?UjH3 zY5Dv=IX-pI|A(jhoB6+v$C^Qc<#;k4TNCKAW_Di5$+@=^{ZgI*?>S5>@V@qd1clMg zo;i6hqx(mvT4hfE5>T7K${mi9;-d=Z z43zQR5Kxf;TUXgU!g-Td3=-%O9ytuD-{vMCl&KZcU;`j~+!+xZsE1^b_8u5;ux_%&=mF&kmA+ z2_T>w(tu2WGysY4$H!Z`!+3P#V4x1f^EyarlnZsyj3}QRH`$@Prq*wIo#>zw9lndi zoQd9|!>A0FdTj}||L2Y!EwNa*B!=age5~E%B1ZpRKs9N`o8H|_POPXeRVa&3NBF1(cUP*b*8?~6cYkk8NYQt^B`79p6TmjrH z^dAW86p6+@Bnd4i0Q4>MmO#A|fEy=qWsw8gumrC6=eZc7$!uaRKvWd~7N1l}Cf)X; zkdVA%561Zbh?FyU?}FrQ!)H{`#ko6 zwrH$e4V)KfCT{<1;RI>x8S;tI#7wtZp_ws3h%$;!t4K?lw86%}i<|CFt}9*3V?B-f zbqrVH3>cq`tt1*&If=*JEwu$MnZ|NW;uXqBcWG07**2Cf9SavPwU05-G|Hq^oZ#{` zq4`gQ!lu01ax;|cj7l~^LYYQdY8*l_nxIpyu}{uH&Z_V&GP|kuE<_kIDpr<bZ=+O=b5oSi8ezatY9!7$1#vH{`mJKL#IaXUUrkqUl=zlw*PvTTK zF-I0ggwg*mk2jK?Ydkmr=WXJ~3@ zsb{~`e}!w0`mZnZ%J}Fj(AyOVo0K?}Y_KkY>e{xfR=sOJsRr;(VVyQ;h(OKI5CL0a zsj019SJSq@>uRlq?+%&T$*F-? zOFo9HI6FBuzm?id06IgL0fSw;Wdo)+Zm)D|`ouEGar>*uHitS)#xMf)tJ{T6WC+x+ zp-#QF;j>EW7(c6UHh^~f{@MjmI;Rv0bc^8eaqS`~ol^t_x<$|!vUA zZdL~#*SD^S4}S1mZ>u!`2y#wI5QOX43TD6DtiGsKbN7|~o;iS;n&LFSel>wHxLGw( zyXWq!`6uR7YO0AV&-JUBl{7QFl$Em$DZ++%eZb@A1;JZ06d zV7}8VW%)B?ceJy(s#3V)hpQrK;6cqj;^h-H9R%N2E=Ff;7vcMHKg(xhTMyt#S>~fu z`a_p@d%K|Z@awNpYY2Zwz#FzOKpYz&U|>c3_FIT3{>q4;Z`#HHQ;C4BR6^wp0pJ{7 zVlQXE)27F7#tZ$}-5qYm8+^>@27t#ocyqxQhk6QLbF8C#V> zFJ?%cPAEzEa%g z3iYL`buFFJKbS+Ld@1OD=?5( zlLpmj3qK*M-{XuisGdZKP=`02dJInWz%e6;F+SCU$NVCCWU2-bx`4&E)&vj8HFZoQF>Cuz=D?J0xoo{*u~Z}Nw1bNL%Pl9h34sML7ttswSwC&&*d3~YkC4)jzA z4Vz}cqzMB%1U24PbEdjUQl3YeBdI~(R`W|UE449YU-WI(r#3##Q^oew#B&vas(tT7% z!tkNbvTx)fyTMIlWv2y=4h({Mnm!B4#Ss4a)$~rlVS21Lcrs7{2DCk;$Dw}Hl=fJ> zt_uY>H)9&yHY;Q9-8L&dhgQys(>AYw>?9)C^@hKQmc42_{v!VNo84|!xEc{gnB8Mm zbX`#+u=2v1Yh}8s@H|Zfyi-t9p@Rz5;x#*fIemS4lqsl=?m)#B4UGH< z32)xX0**P{5$U!PZhC)*t<=i*7^7J^$~pt(uvbaB~^US%Hqdm@2{K-L^UI zyneMjr@OvvISa2y z-5T&8PY#cdUHiY&;|>1vS{~2-4@}sAc>@i4DOhKRR57UQlK`I$`7p$?0sO}J@Dg7! zKLzO}*sZF_XWj4PDw*cRRX`r^MiCn!i$0aHAJ_;!dIT6(j~_pFK%?a`vV~wUU=?hC zN4VM6qkcOC#?{(Yvh*L)Pl<&QM56Ba1G~FbCKOcLG_f5KmKG+JwjhEm%L}p}4zDcBk+yhZZe2*xM`gK1eal64h*HqkR{n@LsXESb%q zI?|xQHiURi5J`w`Gz4=Wu%oEZ%BPG*0Pcu@I8LXV)JQUy7&=LM`q5kT&}>jcpB$4k z-H0(qi1EM`8>TWOw$-HNa5W?8=@NVsIQ1;jd0eEkE3#FKXU0d-~n5+#&B^X zlEsC{1~Ar2MkvbXX;l$S5?P4Gh+GCbJf$>p%VN=w@hrNCuaF?N=oy`Xp_gb@p0H!X z%vBrnbhjvegLxjkay+sInJ9;tOB>|@8+4(CCK^MD1!}2XP?AlnGq{Nd9MkWvz}gO! z*VKCSM=@e&fC8G9&$CxmKKdYnI8qrJb%3|^agmBTqr@p+GIp4NR$cimuXtH_yB#Y2 zr$V|Z7kw=fW(2IKN#gYyhJJI$@r5Au-fFc#^WB7bK^7@b%;kJJj;nm3qX4NuXCyBY$ z(IUnE3y3F`woNjs<#8S z-P#Kt*Vl%f9RQD(5ZYOu?IMabeE>^$0#4l_XyC;O*HEOwAtah$ht;)q zA{o+$z8FBuO2sbN0#qXpVv}WijcjWvNfpZAa24c@1&k{oM1@_)yd>9%E`jKFc9kg#iIrhQ2wK9TeVum+KD}tIL zuwF>WV8y7=5PM+8QVoH9VG4f5#`0CH4K;?Le1^p#2FI9FbQBUYAer-IX3a24K7<|` zbSW9aOd*;VW+ijM0Xk00Diw?gjHSxJksc1Ma%JbldbSQXBjYO@8LF$UF*NGC*UEkP4Pyo%zZyG0b~p9)>xN$jSI6k{iE7V&fN-@egCUBU`H8Ne?5B zq9|8#?8+g9$!#k`9T0)vq)SUQc(I&Y#Cj;37 zR4q9?l|0zlp)pKc03U&#XVrj*FR+4W z%&-71NQ|(^@&RNx!Kn;SIjI=J#pvuR8k4Xc#wpoQrQ#Ha-e(HI4Z-1f#FsdtX(w=R z(~)kcAPsusg(D)9N5W|`h8yvh_@(LG<#{F;U;w9O`DqucXZg?(Q(PfHQaL5x1<6C2 z5B)AXx%8JtBWC^bX|=2b81~8A zHU-zMezUdeR|7mX{|6j{P!Y=M} z>Cc+(1W@2yB)hakfTo1<&O8CMDCwkwqL;>Xsa7J~J(oF?qh;2$XeHfg7vsJF7cwS9 zl@j@NIiVeeEP8iN`;*$sYczl<-+>vT>7Rr@0!sM^DyDlFp2BtxmfY2I?4$& ziVV+-{Fh|JuD4N)E|1ZU)+$RbsfZU>FSsmZE)1<^3pYIyVMdOAJiMTI5nl;vTlf#& z5KipJ*iUBLC;q0P*zuFCReP~50{sZLGka=4*fOE3eEW-)53cP zbhyVIr6yU9Vpx>M@+FcC

6og5rqT{SL61U}1kvwvun@@7tNxnMS z(I>g{WOtzKv$NStM+oYOa61XE6C06GLJ1<&&Wp(7`2dxX#F3T zOtc54Op&KxqXeiD)(bn7rYpSCXY8;!%cWZiMMQAG%P>_`wi1!YtxLvHMU!hj}sP!M%eetkd6k_Op zX*T#D_VLeuyer(&=a>25x$j#3zmCLz8~QqGnY6=}ykhaRvzR9)A zVZ*<1X**(%%H-pMb;3s4hd60~p)mmTJ8(Yv#IemrTijxpRv?^0WwNWgvY`q!0cyoj zBl${}sQqWC7s4QoE+a7{q(Q&Z)FVYdBQ8b56n%(C-PJPZgE%qau$;HSYKYE^bK~+H zy*ZPD6(1lRUq&{8sxUaUdnI8G)pSaJETItDn92c+wl zvHb82kC=C@Z?H5m-oYp$UsTeUHK&S8*H^a8AmE#O*UJ_LSW-yrd{T@o&(d>C5Gbny zR@#*u)F5q?2H+QBR3+ge^^j)+l9I>OGP`WgO*^!;Tq&LzLdZyZS?2{@$qnhZVJI>u zbO|09ptofjSkFlraUKn2N`j>xgYq1B2i`?RMI3=(9Dh%)B!5l{ID|#oU3e_**Aw#i zobj|tO9UzJu(}cIZOV_ac83g3pxMR;(Ui0oa`p+0`;%EQt5S0lXn0P_dg_hS&W8|} z>NbR`&EZNO2_C|nX|5O>^?Z)1@rHZD%E{E>>M_~6f`j;c^+~#8Oj5a^t653;sj0aW zo|?2)UJZ8Tez3~y0~(jiJkHkGUamjQ2u}mro%jN0iV?gEz;ao)1E?7t{gff}3$Obl zrJ#nEZb?Eitwkk`P0=uvz&?^_mgnasjXbOr+J4ckMYua~A!()Eq)RK)liq-CNM0pL zbXHa8<%7MwsmNt9?25f|PuFq;j25M z-TbzJR#r*Rpv0vVG*`tAln?pNRxr4fJ|5117cI`ksyZ*FY!>#3c}Jg6;*$IrfqBZC zfk{URd|GJ)v}u(V+hqDdY$~M{S6X0|6&vnns;wmI!%fZ=^~Va6R6I=BX9?qIpy&X& zHQ)r#@l6Gb0C`?ecTaky|Im$EA7CxVKX6`b*Q!~oZbe)Im66i|G3>IE{EYfxX7YwD`tP{rZo5&RXz)PYzGq^?(21bhG}iZAB^x<2-`{@WCCDF57gVCdOK2Y#|=Wnp!x5eNynTU(}sI_?;d`+WiOvV zh(>j!BK%1@4v|31fQ^%gky2Dex!L%p(g>GDYg#~N9g$?=BB60Mg4VX>V1OVeT5=AD zC3@3L{5QZ@xNE85vK?0zzbpdd9jX_C+mQyI+!WqX^No$V8UfbC54X?DB1)|jL%&&ZfH-$?5zysX|EGFt58oE zg#UQ~Ho}uh0=TAPjqi3Hm9|NqM}2v;l|;Rc0q5l96ID1g53pJbI(YsWPpojYUrc*4 zaqR=GT9f!-rNoNrn-%gh)T6iH$|5D*#*ygz9ErU|qo`dn+K*>Zl8tkg8A^LQxr9!DYm@UvnH_}? znkUv_Ur#4xo_wT{dIjkQZ%123rUfZd$m|qon&OK3U$)&mln|P-p`UgheKAu+DD7dc zMWp`ZR*teVD@Y-+ejU2hC~-ICNIz|AObcA+lZMHYYlXlyx2@g3m(}3n2Vm%n;aRs^iFCZf!z&u;aYVV zS}Dy+=p#>kZDg`Cf&q%v`4P5617Vf-L~Aef09fPEC^_eoXF;g;H`pjw+2z1cy7h0c zG64WBz8tgx7Z+a--Ka0~>I}7gjF=pTQPtY#?xDDYI&Htg9@?~h*0=9jktFMBVPrYI zYK|K!3V+7o+R{H`&Xl>~H-4tmO%-*U00)vH%&%7gSJDL6++uP-NNCuwSeXk0E2@9kgF48i{%T`z%Sz zS<1QhSyEtsw8{a!84^ZMUVH|Xqpw}HC?tod5oCbQ_iCEnpGB9*lY-C%sG&}jTvp;d zLV}Pte2#h7`UcygZAQe!oIPu7!^qp#+DGC51%pzGPO1f5eV+$rKp zVHe^1ceqQ)5;HOELgNI46v*m|1eu`QjA2=zK}7{U1ATBeJq{(8a7Y6Y4Bv*2^8x6+ z$E>il>czBH3mWIq72Kp=29Cp%=qhy1)k+9j;vgkTaGO6tp~e&)U}1gZxv13D;O@|+ zsjHVCl^MT`YumcQ0m+i8y$5j8n8ES`>3}SznbLX#!tPwrwtWyJ9tErJf4}!jiI0Q3Dl41fm)l2u@%< zj&-y*QA^YgN)klFel35E>DSQuHdNoDcPe+*r$xWK6Z8p`8`*Ei*ckB`n-JfJVhet! zd%_z2N;Zcxz!uHn@Mp;!vX?=@+;@Rh$dJOdOj(>Eq8mE%P>JO*KU*q?g!n=^6aI{l zD=*H#F94q|Vc7=I7DDo7zBbW!8L_>iN5fe@`e4nQOpasOn*-pYh%M^C#2Y1~%m&|4 zat5%2wC3eynSxpau`0~((6tURG2mIrV!w^ccP7@lERidsDPL>J<80fI>M9s;%tJyR z%0eu}Ls?E=Et_^?)hRNyk74L$!NSJbEIuzu+GyO6kPf%fmGqALxTJ|T>~6u)!x~j{ zb3>TJS{%#WfFgsMhu~5xtWG2EmnxSv=V&?R*7Kv;GeYH*P5gT%!MTbbRXi z|2{p~-{AkOgl67c*d@}NFpmiAISOlK$^ zsn;jsUCdJ|(ou=Q)*nF^SKi{Vlx#9Ebaum*RIAga_-R>WM>Z=rV+o4FvEvt&8X7fr ztn4VO_kfx3URT0Y_OS*4(_3m+4$?T0GSXsAX-tz5PBYw&xzf6FT@pD%ow$WZT_tcEu;v6bh0 z)ME~2VyU{weC$&-XVMpE^-FOq4`yTOsK{*WGcji4o`|Ix(XH$?%{jnp3IA-%DY1eL zC*{W0Vs$p0O+{&oGRmsasPoD1!>IF#W7PSi=juatozd)=b{hIyoSqD}Xv*OXiCvq= z%p6(EEIP>~(>7YPER%tvWmcJ(6CKFu_aug6#Cu67xlCAr%6jumXr=PHwv>+_SF!9f$ zXeR!-e_xhQNf0HWe3QO=$bl;1FO={Xk?@1~$4nS8DQ~j-_uoMVqFQf$dDoVC{I;e4 zatlxbi16FPt>1pvD9QEn-!$mDod95&{pYc#|8sh@f3%tZ>v$S$IOS=auSFb-L$PI+ z={%i@#={%KH`*{$utL&=uZ<>?<2*5Zx)$JTwFsI>YGXxl-unekF(F${i&3+vT6Xkl zRx=>G$ks>vr=YU}zJF{}kf|N;Q)mi3{2mr;!Bk{4#dDugDy=qb_ahYuazhVzi#W((*^?`+rk0 zK;Hi7k$df8!TdmLa`a~X0(l8gcdH<(=`>5hn}DyB%Pr_sc(i*beU`0?Yf zV%u1v|9K}aAeQleho1k3;Z2 zyd)MK?26-lmBWH5_>S_di-N}%<=+wfnaD)&cx~p;{$4q=&9iwn&*s@Yn`iTE gp3Sp)HqYkSJez0pY@W@td2avwKWcGkECA2~014F$H~;_u literal 0 HcmV?d00001 diff --git a/app/fixtures/registry/package-sets/0.0.1.json b/app/fixtures/registry/package-sets/0.0.1.json index cd4470bcc..cc82ad7d4 100644 --- a/app/fixtures/registry/package-sets/0.0.1.json +++ b/app/fixtures/registry/package-sets/0.0.1.json @@ -1,6 +1,6 @@ { "version": "0.0.1", - "compiler": "0.15.9", + "compiler": "0.15.10", "published": "2024-01-01", "packages": { "prelude": "6.0.1" diff --git a/nix/test/config.nix b/nix/test/config.nix index 26eb9d7f8..07917444f 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -822,7 +822,7 @@ let builtins.toJSON { name = "effect"; ref = "v4.0.0"; - compiler = "0.15.9"; + compiler = "0.15.10"; location = { githubOwner = "purescript"; githubRepo = "purescript-effect";