From 20d6b216f11cfaa5e2d4f98da38c4736e4bcdd0e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Sat, 14 Sep 2024 14:07:21 -0400 Subject: [PATCH 01/19] Update database schemas and add job executor loop --- app/src/App/Effect/Db.purs | 130 +++-- app/src/App/Effect/Log.purs | 2 +- app/src/App/SQLite.js | 173 +++++-- app/src/App/SQLite.purs | 458 +++++++++++++----- app/src/App/Server.purs | 208 +++++--- .../20240914170550_delete_jobs_logs_table.sql | 22 + ...20240914171030_create_job_queue_tables.sql | 56 +++ db/schema.sql | 52 +- lib/src/API/V1.purs | 2 - lib/src/Operation.purs | 21 + 10 files changed, 875 insertions(+), 249 deletions(-) create mode 100644 db/migrations/20240914170550_delete_jobs_logs_table.sql create mode 100644 db/migrations/20240914171030_create_job_queue_tables.sql diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index c2c6dc67c..142149bc0 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -8,10 +8,12 @@ import Data.String as String import Registry.API.V1 (JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (JobResult, NewJob, SQLite) +import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except -- We could separate these by database if it grows too large. Also, for now these -- simply lift their Effect-based equivalents in the SQLite module, but ideally @@ -21,13 +23,20 @@ import Run as Run -- Also, this does not currently include setup and teardown (those are handled -- outside the effect), but we may wish to add those in the future if they'll -- be part of app code we want to test. + data Db a - = InsertLog LogLine a + = InsertPackageJob InsertPackageJob a + | InsertMatrixJob InsertMatrixJob a + | InsertPackageSetJob InsertPackageSetJob a + | FinishJob FinishJob a + | StartJob StartJob a + | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) + | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) + | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) + | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | CreateJob NewJob a - | FinishJob JobResult a - | SelectJob JobId (Either String SQLite.Job -> a) - | RunningJobForPackage PackageName (Either String SQLite.Job -> a) + | DeleteIncompleteJobs a derive instance Functor Db @@ -39,28 +48,51 @@ _db = Proxy -- | Insert a new log line into the database. insertLog :: forall r. LogLine -> Run (DB + r) Unit -insertLog log = Run.lift _db (InsertLog log unit) +insertLog log = Run.lift _db (InsertLogLine log unit) --- | Select all logs for a given job, filtered by loglevel and a time cutoff. +-- | Select all logs for a given job, filtered by loglevel. selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) --- | Create a new job in the database. -createJob :: forall r. NewJob -> Run (DB + r) Unit -createJob newJob = Run.lift _db (CreateJob newJob unit) - -- | Set a job in the database to the 'finished' state. -finishJob :: forall r. JobResult -> Run (DB + r) Unit -finishJob jobResult = Run.lift _db (FinishJob jobResult unit) +finishJob :: forall r. FinishJob -> Run (DB + r) Unit +finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJob :: forall r. JobId -> Run (DB + r) (Either String SQLite.Job) -selectJob jobId = Run.lift _db (SelectJob jobId identity) +selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) +selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow + +-- | Insert a new package job into the database. +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit +insertPackageJob job = Run.lift _db (InsertPackageJob job unit) + +-- | Insert a new matrix job into the database. +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit +insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) + +-- | Insert a new package set job into the database. +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) + +-- | Start a job in the database. +startJob :: forall r. StartJob -> Run (DB + r) Unit +startJob job = Run.lift _db (StartJob job unit) --- | Select a job by package name from the database, failing if there is no --- | current job available for that package name. -runningJobForPackage :: forall r. PackageName -> Run (DB + r) (Either String SQLite.Job) -runningJobForPackage name = Run.lift _db (RunningJobForPackage name identity) +-- | Select the next package job from the database. +selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) +selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow + +-- | Select the next matrix job from the database. +selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) +selectNextMatrixJob = Run.lift _db (SelectNextMatrixJob identity) >>= Except.rethrow + +-- | Select the next package set job from the database. +selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageSetJobDetails) +selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow + +-- | Delete all incomplete jobs from the database. +deleteIncompleteJobs :: forall r. Run (DB + r) Unit +deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -70,28 +102,52 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertLog log next -> do - Run.liftEffect $ SQLite.insertLog env.db log + InsertPackageJob job next -> do + Run.liftEffect $ SQLite.insertPackageJob env.db job pure next - SelectLogsByJob jobId logLevel since reply -> do - logs <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since - unless (Array.null logs.fail) do - Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" logs.fail - pure $ reply logs.success + InsertMatrixJob job next -> do + Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure next - CreateJob newJob next -> do - Run.liftEffect $ SQLite.createJob env.db newJob + InsertPackageSetJob job next -> do + Run.liftEffect $ SQLite.insertPackageSetJob env.db job pure next - FinishJob jobResult next -> do - Run.liftEffect $ SQLite.finishJob env.db jobResult + FinishJob job next -> do + Run.liftEffect $ SQLite.finishJob env.db job pure next - SelectJob jobId reply -> do - job <- Run.liftEffect $ SQLite.selectJob env.db jobId - pure $ reply job + StartJob job next -> do + Run.liftEffect $ SQLite.startJob env.db job + pure next + + SelectJobInfo jobId reply -> do + result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + pure $ reply result + + SelectNextPackageJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + pure $ reply result + + SelectNextMatrixJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextMatrixJob env.db + pure $ reply result + + SelectNextPackageSetJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPackageSetJob env.db + pure $ reply result - RunningJobForPackage name reply -> do - job <- Run.liftEffect $ SQLite.runningJobForPackage env.db name - pure $ reply job + InsertLogLine log next -> do + Run.liftEffect $ SQLite.insertLogLine env.db log + pure next + + SelectLogsByJob jobId logLevel since reply -> do + { fail, success } <- Run.liftEffect $ SQLite.selectLogsByJob env.db jobId logLevel since + unless (Array.null fail) do + Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail + pure $ reply success + + DeleteIncompleteJobs next -> do + Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index 6fc4b31b6..a1cb72c0a 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -134,5 +134,5 @@ handleDb env = case _ of let msg = Dodo.print Dodo.plainText Dodo.twoSpaces (toLog message) row = { timestamp, level, jobId: env.job, message: msg } - Run.liftEffect $ SQLite.insertLog env.db row + Run.liftEffect $ SQLite.insertLogLine env.db row pure next diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 8158695fc..fa9a8b539 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -1,5 +1,11 @@ import Database from "better-sqlite3"; +const JOB_INFO_TABLE = 'job_info' +const LOGS_TABLE = 'logs' +const PACKAGE_JOBS_TABLE = 'package_jobs'; +const MATRIX_JOBS_TABLE = 'matrix_jobs'; +const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; + export const connectImpl = (path, logger) => { logger("Connecting to database at " + path); let db = new Database(path, { @@ -11,49 +17,152 @@ export const connectImpl = (path, logger) => { return db; }; -export const insertLogImpl = (db, logLine) => { - db.prepare( - "INSERT INTO logs (jobId, level, message, timestamp) VALUES (@jobId, @level, @message, @timestamp)" - ).run(logLine); +export const selectJobInfoImpl = (db, jobId) => { + const stmt = db.prepare(` + SELECT * FROM ${JOB_INFO_TABLE} + WHERE jobId = ? LIMIT 1 + `); + return stmt.get(jobId); +} + +// A generic helper function for inserting a new package, matrix, or package set +// job Not exported because this should always be done as part of a more general +// job insertion. A job is expected to always include a 'jobId' and 'createdAt' +// field, though other fields will be required depending on the job. +const _insertJob = (db, table, columns, job) => { + const requiredFields = Array.from(new Set(['jobId', 'createdAt', ...columns])); + const missingFields = requiredFields.filter(field => !(field in job)); + const extraFields = Object.keys(job).filter(field => !requiredFields.includes(field)); + + if (missingFields.length > 0) { + throw new Error(`Missing required fields for insertion: ${missingFields.join(', ')}`); + } + + if (extraFields.length > 0) { + throw new Error(`Unexpected extra fields for insertion: ${extraFields.join(', ')}`); + } + + const insertInfo = db.prepare(` + INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + `); + + const insertJob = db.prepare(` + INSERT INTO ${table} (${columns.join(', ')}) + VALUES (${columns.map(col => `@${col}`).join(', ')}) + `); + + const insert = db.transaction((job) => { + insertInfo.run({ + jobId: job.jobId, + createdAt: job.createdAt, + startedAt: null, + finishedAt: null, + success: 0 + }); + insertJob.run(job); + }); + + return insert(job); +}; + +export const insertPackageJobImpl = (db, job) => { + const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; -export const selectLogsByJobImpl = (db, jobId, logLevel) => { - const row = db - .prepare( - "SELECT * FROM logs WHERE jobId = ? AND level >= ? ORDER BY timestamp ASC" - ) - .all(jobId, logLevel); - return row; +export const insertMatrixJobImpl = (db, job) => { + const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; -export const createJobImpl = (db, job) => { - db.prepare( - "INSERT INTO jobs (jobId, jobType, createdAt, packageName, ref) VALUES (@jobId, @jobType, @createdAt, @packageName, @ref)" - ).run(job); +export const insertPackageSetJobImpl = (db, job) => { + const columns = [ 'jobId', 'payload' ] + return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const finishJobImpl = (db, result) => { - db.prepare( - "UPDATE jobs SET success = @success, finishedAt = @finishedAt WHERE jobId = @jobId" - ).run(result); +export const selectNextPackageJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const selectJobImpl = (db, jobId) => { - const row = db - .prepare("SELECT * FROM jobs WHERE jobId = ? LIMIT 1") - .get(jobId); - return row; +export const selectNextMatrixJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${MATRIX_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; -export const runningJobForPackageImpl = (db, packageName) => { - const row = db - .prepare( - "SELECT * FROM jobs WHERE finishedAt IS NULL AND packageName = ? ORDER BY createdAt ASC LIMIT 1" - ) - .get(packageName); - return row; +export const selectNextPackageSetJobImpl = (db) => { + const stmt = db.prepare(` + SELECT job.*, info.createdAt, info.startedAt + FROM ${PACKAGE_SET_JOBS_TABLE} job + JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId + WHERE info.finishedAt IS NULL + ORDER BY info.createdAt DESC + LIMIT 1 + `); + return stmt.get(); }; +export const startJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = @startedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + +export const finishJobImpl = (db, args) => { + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET success = @success, finishedAt = @finishedAt + WHERE jobId = @jobId + `); + return stmt.run(args); +} + export const deleteIncompleteJobsImpl = (db) => { - db.prepare("DELETE FROM jobs WHERE finishedAt IS NULL").run(); + const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + return stmt.run(); +}; + +export const insertLogLineImpl = (db, logLine) => { + const stmt = db.prepare(` + INSERT INTO ${LOGS_TABLE} (jobId, level, message, timestamp) + VALUES (@jobId, @level, @message, @timestamp) + `); + return stmt.run(logLine); +}; + +export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { + let query = ` + SELECT * FROM ${LOGS_TABLE} + WHERE jobId = ? AND level >= ? + `; + + const params = [jobId, logLevel]; + + if (since !== null) { + query += ' AND timestamp >= ?'; + params.push(since); + } + + query += ' ORDER BY timestamp ASC'; + + const stmt = db.prepare(query); + return stmt.all(...params); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b3683e84e..8c117fda7 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -1,184 +1,426 @@ +-- | Bindings for the specific SQL queries we emit to the SQLite database. Use the +-- | Registry.App.Effect.Db module in production code instead of this module; +-- | the bindings here are still quite low-level and simply exist to provide a +-- | nicer interface with PureScript types for higher-level modules to use. + +-- TOMORROW: +-- +-- * Add the job executor to server startup +-- * Move the various job details to the API.V1 module since it'll be returned by the UI +-- * Update the router to just create a job when received, and on lookup to return relevant details from the db +-- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the +-- same authentication requirements as for GitHub today. +-- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to +-- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to +-- do the whole toposort thing. +-- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn +-- +-- LATER +-- * Update tests that refer to the DB effect +-- * Adjust the integration test(s) to verify we're getting enforced concurrency control +-- * Update the GitHub issue module so it only submits a request to the registry and returns +-- a job id, rather than actually running the fns directly. Poll for a result still and +-- comment when the job completes. +-- +-- FOLLOWUP +-- * Punt on the squash commit until later. module Registry.App.SQLite - ( Job - , JobLogs - , JobResult - , NewJob - , SQLite + ( SQLite + , ConnectOptions , connect - , createJob - , deleteIncompleteJobs + , JobInfo + , selectJobInfo + , InsertPackageJob + , insertPackageJob + , InsertMatrixJob + , insertMatrixJob + , InsertPackageSetJob + , insertPackageSetJob + , FinishJob , finishJob - , insertLog - , runningJobForPackage - , selectJob + , StartJob + , startJob + , deleteIncompleteJobs + , insertLogLine , selectLogsByJob + , PackageJobDetails + , selectNextPackageJob + , MatrixJobDetails + , selectNextMatrixJob + , PackageSetJobDetails + , selectNextPackageSetJob ) where import Registry.App.Prelude -import Data.Array as Array +import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime -import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn3) +import Data.Nullable as Nullable +import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) import Registry.API.V1 as API.V1 +import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.Version as Version +-- | An active database connection acquired with `connect` data SQLite foreign import connectImpl :: EffectFn2 FilePath (EffectFn1 String Unit) SQLite -foreign import insertLogImpl :: EffectFn2 SQLite JSLogLine Unit - -foreign import selectLogsByJobImpl :: EffectFn3 SQLite String Int (Array JSLogLine) +type ConnectOptions = + { database :: FilePath + , logger :: String -> Effect Unit + } -foreign import createJobImpl :: EffectFn2 SQLite JSNewJob Unit +-- Connect to the indicated SQLite database +connect :: ConnectOptions -> Effect SQLite +connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) -foreign import finishJobImpl :: EffectFn2 SQLite JSJobResult Unit +-- | Metadata about a particular package, package set, or matrix job. +type JobInfo = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + } -foreign import selectJobImpl :: EffectFn2 SQLite String (Nullable JSJob) +type JSJobInfo = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + } -foreign import runningJobForPackageImpl :: EffectFn2 SQLite String (Nullable JSJob) +jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + isSuccess <- case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: isSuccess + } + +foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) + +selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +selectJobInfo db (JobId jobId) = do + maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId + pure $ traverse jobInfoFromJSRep maybeJobInfo + +type FinishJob = + { jobId :: JobId + , success :: Boolean + , finishedAt :: DateTime + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +type JSFinishJob = + { jobId :: String + , success :: Int + , finishedAt :: String + } -type ConnectOptions = - { database :: FilePath - , logger :: String -> Effect Unit +finishJobToJSRep :: FinishJob -> JSFinishJob +finishJobToJSRep { jobId, success, finishedAt } = + { jobId: un JobId jobId + , success: if success then 1 else 0 + , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } -connect :: ConnectOptions -> Effect SQLite -connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -type JSLogLine = - { level :: Int - , message :: String - , timestamp :: String - , jobId :: String +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime } -jsLogLineToLogLine :: JSLogLine -> Either String LogLine -jsLogLineToLogLine { level: rawLevel, message, timestamp: rawTimestamp, jobId } = case API.V1.logLevelFromPriority rawLevel, DateTime.unformat Internal.Format.iso8601DateTime rawTimestamp of - Left err, _ -> Left err - _, Left err -> Left $ "Invalid timestamp " <> show rawTimestamp <> ": " <> err - Right level, Right timestamp -> Right { level, message, jobId: JobId jobId, timestamp } +type JSStartJob = + { jobId :: String + , startedAt :: String + } -logLineToJSLogLine :: LogLine -> JSLogLine -logLineToJSLogLine { level, message, timestamp, jobId: JobId jobId } = - { level: API.V1.logLevelToPriority level - , message - , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp - , jobId +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt } -insertLog :: SQLite -> LogLine -> Effect Unit -insertLog db = Uncurried.runEffectFn2 insertLogImpl db <<< logLineToJSLogLine +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit -type JobLogs = { fail :: Array String, success :: Array LogLine } +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect JobLogs -selectLogsByJob db (JobId jobId) level maybeDatetime = do - logs <- Uncurried.runEffectFn3 selectLogsByJobImpl db jobId (API.V1.logLevelToPriority level) - let { success, fail } = partitionEithers $ map jsLogLineToLogLine logs - pure { fail, success: Array.filter (\{ timestamp } -> timestamp > (fromMaybe bottom maybeDatetime)) success } +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit + +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl -type NewJob = +type InsertPackageJob = { jobId :: JobId , jobType :: JobType - , createdAt :: DateTime , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation } -type JSNewJob = +type JSInsertPackageJob = { jobId :: String , jobType :: String - , createdAt :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String } -newJobToJSNewJob :: NewJob -> JSNewJob -newJobToJSNewJob { jobId: JobId jobId, jobType, createdAt, packageName, ref } = - { jobId +insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = + { jobId: un JobId jobId , jobType: API.V1.printJobType jobType - , createdAt: DateTime.format Internal.Format.iso8601DateTime createdAt , packageName: PackageName.print packageName - , ref + , packageVersion: Version.print packageVersion + , payload: stringifyJson Operation.packageOperationCodec payload + } + +foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep + +type InsertMatrixJob = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + } + +type JSInsertMatrixJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + } + +insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = + { jobId: un JobId jobId + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion + , compilerVersion: Version.print compilerVersion + , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } -type JobResult = +foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit + +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep + +type InsertPackageSetJob = { jobId :: JobId - , finishedAt :: DateTime - , success :: Boolean + , payload :: PackageSetOperation } -type JSJobResult = +type JSInsertPackageSetJob = { jobId :: String - , finishedAt :: String - , success :: Int + , payload :: String } -jobResultToJSJobResult :: JobResult -> JSJobResult -jobResultToJSJobResult { jobId: JobId jobId, finishedAt, success } = - { jobId - , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt - , success: if success then 1 else 0 +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload } -type Job = +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +type PackageJobDetails = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String + , packageVersion :: Version + , payload :: PackageOperation , createdAt :: DateTime - , finishedAt :: Maybe DateTime - , success :: Boolean + , startedAt :: Maybe DateTime } -type JSJob = +type JSPackageJobDetails = { jobId :: String , jobType :: String , packageName :: String - , ref :: String + , packageVersion :: String + , payload :: String , createdAt :: String - , finishedAt :: Nullable String - , success :: Int + , startedAt :: Nullable String } -jsJobToJob :: JSJob -> Either String Job -jsJobToJob raw = do - let jobId = JobId raw.jobId - jobType <- API.V1.parseJobType raw.jobType - packageName <- PackageName.parse raw.packageName - createdAt <- DateTime.unformat Internal.Format.iso8601DateTime raw.createdAt - finishedAt <- case toMaybe raw.finishedAt of - Nothing -> pure Nothing - Just rawFinishedAt -> Just <$> DateTime.unformat Internal.Format.iso8601DateTime rawFinishedAt - success <- case raw.success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show raw.success - pure $ { jobId, jobType, createdAt, finishedAt, success, packageName, ref: raw.ref } +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do + ty <- API.V1.parseJobType jobType + name <- PackageName.parse packageName + version <- Version.parse packageVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , jobType: ty + , packageName: name + , packageVersion: version + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) + +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails + +type MatrixJobDetails = + { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSMatrixJobDetails = + { jobId :: String + , packageName :: String + , packageVersion :: String + , compilerVersion :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -createJob :: SQLite -> NewJob -> Effect Unit -createJob db = Uncurried.runEffectFn2 createJobImpl db <<< newJobToJSNewJob +matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + version <- Version.parse packageVersion + compiler <- Version.parse compilerVersion + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload + pure + { jobId: JobId jobId + , packageName: name + , packageVersion: version + , compilerVersion: compiler + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) + +selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) +selectNextMatrixJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails + +type PackageSetJobDetails = + { jobId :: JobId + , payload :: PackageSetOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime + } + +type JSPackageSetJobDetails = + { jobId :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String + } -finishJob :: SQLite -> JobResult -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< jobResultToJSJobResult +packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + pure + { jobId: JobId jobId + , payload: parsed + , createdAt: created + , startedAt: started + } + +foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) + +selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) +selectNextPackageSetJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails -selectJob :: SQLite -> JobId -> Effect (Either String Job) -selectJob db (JobId jobId) = do - maybeJob <- toMaybe <$> Uncurried.runEffectFn2 selectJobImpl db jobId - pure $ jsJobToJob =<< note ("Couldn't find job with id " <> jobId) maybeJob +type JSLogLine = + { level :: Int + , message :: String + , jobId :: String + , timestamp :: String + } -runningJobForPackage :: SQLite -> PackageName -> Effect (Either String Job) -runningJobForPackage db packageName = do - let pkgStr = PackageName.print packageName - maybeJSJob <- toMaybe <$> Uncurried.runEffectFn2 runningJobForPackageImpl db pkgStr - pure $ jsJobToJob =<< note ("Couldn't find running job for package " <> pkgStr) maybeJSJob +logLineToJSRep :: LogLine -> JSLogLine +logLineToJSRep { level, message, jobId, timestamp } = + { level: API.V1.logLevelToPriority level + , message + , jobId: un JobId jobId + , timestamp: DateTime.format Internal.Format.iso8601DateTime timestamp + } -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +logLineFromJSRep :: JSLogLine -> Either String LogLine +logLineFromJSRep { level, message, jobId, timestamp } = do + logLevel <- API.V1.logLevelFromPriority level + time <- DateTime.unformat Internal.Format.iso8601DateTime timestamp + pure + { level: logLevel + , message + , jobId: JobId jobId + , timestamp: time + } + +foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit + +insertLogLine :: SQLite -> LogLine -> Effect Unit +insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep + +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) + +-- | Select all logs for a given job at or above the indicated log level. To get all +-- | logs, pass the DEBUG log level. +selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob db jobId level since = do + let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + jsLogLines <- + Uncurried.runEffectFn4 + selectLogsByJobImpl + db + (un JobId jobId) + (API.V1.logLevelToPriority level) + (Nullable.toNullable timestamp) + pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs index b9aa35b1c..c9a8aac8a 100644 --- a/app/src/App/Server.purs +++ b/app/src/App/Server.purs @@ -3,13 +3,22 @@ module Registry.App.Server where import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) +import Control.Parallel as Parallel import Data.Codec.JSON as CJ +import Data.DateTime (DateTime(..)) +import Data.DateTime as DateTime import Data.Formatter.DateTime as Formatter.DateTime +import Data.Lens (Lens') +import Data.Lens as Lens +import Data.Lens.Record as Lens.Record import Data.Newtype (unwrap) import Data.String as String +import Data.Time.Duration (Minutes(..)) import Data.UUID.Random as UUID +import Effect.Aff (Fiber, Milliseconds(..)) import Effect.Aff as Aff import Effect.Class.Console as Console +import Effect.Ref as Ref import Fetch.Retry as Fetch.Retry import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) import HTTPurple as HTTPurple @@ -43,7 +52,7 @@ import Registry.App.Effect.Source as Source import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (SQLite) +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) import Registry.App.SQLite as SQLite import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (GitHubToken, Octokit) @@ -56,40 +65,121 @@ import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except +import Run.Except as Run.Except newJobId :: forall m. MonadEffect m => m JobId newJobId = liftEffect do id <- UUID.make pure $ JobId $ UUID.toString id +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = map Just (runEffects env (executeJob now job)) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env $ case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED + +squashCommitRegistry :: Run ServerEffects Unit +squashCommitRegistry = do + pure unit + router :: ServerEnv -> Request Route -> Run ServerEffects Response router env { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do - publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - Log.info $ "Received Publish request, job id: " <> unwrap jobId - API.publish Nothing publish + -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do + -- Log.info $ "Received Publish request, job id: " <> unwrap jobId + -- API.publish Nothing publish + HTTPurple.emptyResponse Status.ok Unpublish, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Unpublish { name, version } -> do - forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected unpublish operation." + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Unpublish { name, version } -> do + -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do + -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected unpublish operation." + HTTPurple.emptyResponse Status.ok Transfer, Post -> do - auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - case auth.payload of - Operation.Transfer { name } -> do - forkPipelineJob name "" TransferJob \jobId -> do - Log.info $ "Received Transfer request, job id: " <> unwrap jobId - API.authenticated auth - _ -> - HTTPurple.badRequest "Expected transfer operation." + HTTPurple.emptyResponse Status.ok + -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + -- case auth.payload of + -- Operation.Transfer { name } -> do + -- forkPipelineJob name "" TransferJob \jobId -> do + -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId + -- API.authenticated auth + -- _ -> + -- HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do jsonOk (CJ.array V1.jobCodec) [] @@ -97,12 +187,17 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Db.selectJob jobId) >>= case _ of + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound - Right job -> do - jsonOk V1.jobCodec (Record.insert (Proxy :: _ "logs") logs job) + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) Status, Get -> HTTPurple.emptyResponse Status.ok @@ -112,35 +207,34 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound - where - forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - forkPipelineJob packageName ref jobType action = do - -- First thing we check if the package already has a pipeline in progress - lift (Db.runningJobForPackage packageName) >>= case _ of - -- If yes, we error out if it's the wrong kind, return it if it's the same type - Right { jobId, jobType: runningJobType } -> do - lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - case runningJobType == jobType of - true -> jsonOk V1.jobCreatedResponseCodec { jobId } - false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- otherwise spin up a new thread - Left _err -> do - lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - jobId <- newJobId - now <- nowUTC - let newJob = { createdAt: now, jobId, jobType, packageName, ref } - lift $ Db.createJob newJob - let newEnv = env { jobId = Just jobId } - - _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - result <- runEffects newEnv (action jobId) - case result of - Left _ -> pure unit - Right _ -> do - finishedAt <- nowUTC - void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - - jsonOk V1.jobCreatedResponseCodec { jobId } + -- where + -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response + -- forkPipelineJob packageName ref jobType action = do + -- -- First thing we check if the package already has a pipeline in progress + -- lift (Db.runningJobForPackage packageName) >>= case _ of + -- -- If yes, we error out if it's the wrong kind, return it if it's the same type + -- Right { jobId, jobType: runningJobType } -> do + -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId + -- case runningJobType == jobType of + -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } + -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName + -- -- otherwise spin up a new thread + -- Left _err -> do + -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" + -- jobId <- newJobId + -- now <- nowUTC + -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } + -- lift $ Db.createJob newJob + -- let newEnv = env { jobId = Just jobId } + + -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do + -- result <- runEffects newEnv (action jobId) + -- case result of + -- Left _ -> pure unit + -- Right _ -> do + -- finishedAt <- nowUTC + -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) + -- jsonOk V1.jobCreatedResponseCodec { jobId } type ServerEnvVars = { token :: GitHubToken @@ -219,7 +313,11 @@ createServerEnv = do type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) -runServer :: ServerEnv -> (ServerEnv -> Request Route -> Run ServerEffects Response) -> Request Route -> Aff Response +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response runServer env router' request = do result <- runEffects env (router' env request) case result of diff --git a/db/migrations/20240914170550_delete_jobs_logs_table.sql b/db/migrations/20240914170550_delete_jobs_logs_table.sql new file mode 100644 index 000000000..9dc12c365 --- /dev/null +++ b/db/migrations/20240914170550_delete_jobs_logs_table.sql @@ -0,0 +1,22 @@ +-- migrate:up +DROP TABLE IF EXISTS jobs; +DROP TABLE IF EXISTS logs; + +-- migrate:down +CREATE TABLE IF NOT EXISTS jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + ref TEXT NOT NULL, + createdAt TEXT NOT NULL, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES jobs (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql new file mode 100644 index 000000000..2b01deb0b --- /dev/null +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -0,0 +1,56 @@ +-- migrate:up + +-- Common job information table +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); + +-- Package-oriented jobs (publish/unpublish/transfer) +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Compiler matrix jobs (one compiler, all packages) +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package set jobs +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +CREATE TABLE IF NOT EXISTS logs ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL +); + +-- migrate:down + +DROP TABLE job_info; +DROP TABLE package_jobs; +DROP TABLE matrix_jobs; +DROP TABLE package_set_jobs; +DROP TABLE logs; diff --git a/db/schema.sql b/db/schema.sql index 116de1dda..2ad866068 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -1,21 +1,45 @@ CREATE TABLE IF NOT EXISTS "schema_migrations" (version varchar(128) primary key); -CREATE TABLE jobs ( - jobId text primary key not null, - jobType text not null, - packageName text not null, - ref text not null, - createdAt text not null, - finishedAt text, - success integer not null default 0 +CREATE TABLE job_info ( + jobId TEXT PRIMARY KEY NOT NULL, + createdAt TEXT NOT NULL, + startedAt TEXT, + finishedAt TEXT, + success INTEGER NOT NULL DEFAULT 0 +); +CREATE TABLE package_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + jobType TEXT NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE matrix_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + compilerVersion TEXT NOT NULL, + -- the build plan, which should be computed before the job is stored in the + -- queue so that if multiple jobs targeting one package get interrupted by + -- a higher-priority job then the build plan is not affected. + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); +CREATE TABLE package_set_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); CREATE TABLE logs ( - id integer primary key autoincrement, - jobId text not null references jobs on delete cascade, - level integer not null, - message text not null, - timestamp text not null + id INTEGER PRIMARY KEY AUTOINCREMENT, + jobId TEXT NOT NULL REFERENCES job_info (jobId) ON DELETE CASCADE, + level INTEGER NOT NULL, + message TEXT NOT NULL, + timestamp TEXT NOT NULL ); -- Dbmate schema migrations INSERT INTO "schema_migrations" (version) VALUES ('20230711143615'), - ('20230711143803'); + ('20230711143803'), + ('20240914170550'), + ('20240914171030'); diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index a6193b5f7..67216ca35 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -68,7 +68,6 @@ type Job = { jobId :: JobId , jobType :: JobType , packageName :: PackageName - , ref :: String , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -80,7 +79,6 @@ jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec , jobType: jobTypeCodec , packageName: PackageName.codec - , ref: CJ.string , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 98c35f092..521bc2883 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -23,6 +23,8 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageOperationCodec + , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec @@ -58,6 +60,18 @@ data PackageOperation derive instance Eq PackageOperation +-- | A codec for encoding and decoding a `PackageOperation` as JSON. +packageOperationCodec :: CJ.Codec PackageOperation +packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode + where + decode json = + map Publish (Codec.decode publishCodec json) + <|> map Authenticated (Codec.decode authenticatedCodec json) + + encode = case _ of + Publish publish -> CJ.encode publishCodec publish + Authenticated authenticated -> CJ.encode authenticatedCodec authenticated + -- | An operation supported by the registry HTTP API for package operations and -- | which must be authenticated. data AuthenticatedPackageOperation @@ -178,6 +192,13 @@ data PackageSetOperation = PackageSetUpdate PackageSetUpdateData derive instance Eq PackageSetOperation +-- | A codec for encoding and decoding a `PackageSetOperation` as JSON. +packageSetOperationCodec :: CJ.Codec PackageSetOperation +packageSetOperationCodec = CJ.named "PackageSetOperation" $ Codec.codec' decode encode + where + decode json = map PackageSetUpdate (Codec.decode packageSetUpdateCodec json) + encode (PackageSetUpdate update) = CJ.encode packageSetUpdateCodec update + -- | Submit a batch update to the most recent package set. -- | -- | For full details, see the registry spec: From 4b9743ce99f2f28ce94c66252cfdf194c4c46b6b Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 21 Jun 2025 23:13:52 -0400 Subject: [PATCH 02/19] Split Server module into Env, Router, JobExecutor, and Main --- app/spago.yaml | 2 +- app/src/App/JobExecutor.purs | 89 +++++++ app/src/App/Main.purs | 85 +++++++ app/src/App/SQLite.js | 4 +- app/src/App/SQLite.purs | 29 +-- app/src/App/Server.purs | 441 --------------------------------- app/src/App/Server/Env.purs | 188 ++++++++++++++ app/src/App/Server/Router.purs | 84 +++++++ lib/src/API/V1.purs | 25 +- lib/src/JobType.purs | 26 ++ lib/src/Operation.purs | 17 +- 11 files changed, 502 insertions(+), 488 deletions(-) create mode 100644 app/src/App/JobExecutor.purs create mode 100644 app/src/App/Main.purs delete mode 100644 app/src/App/Server.purs create mode 100644 app/src/App/Server/Env.purs create mode 100644 app/src/App/Server/Router.purs create mode 100644 lib/src/JobType.purs diff --git a/app/spago.yaml b/app/spago.yaml index be3c3bec6..03a600425 100644 --- a/app/spago.yaml +++ b/app/spago.yaml @@ -1,7 +1,7 @@ package: name: registry-app run: - main: Registry.App.Server + main: Registry.App.Main publish: license: BSD-3-Clause version: 0.0.1 diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs new file mode 100644 index 000000000..0bd6fa44f --- /dev/null +++ b/app/src/App/JobExecutor.purs @@ -0,0 +1,89 @@ +module Registry.App.JobExecutor where + +import Registry.App.Prelude hiding ((/)) + +import Control.Parallel as Parallel +import Data.DateTime (DateTime) +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import Registry.API.V1 (JobId(..)) +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) +import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Run (Run) +import Run.Except (EXCEPT) + +data JobDetails + = PackageJob PackageJobDetails + | MatrixJob MatrixJobDetails + | PackageSetJob PackageSetJobDetails + +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = do + Db.selectNextPackageJob >>= case _ of + Just job -> pure $ Just $ PackageJob job + Nothing -> Db.selectNextMatrixJob >>= case _ of + Just job -> pure $ Just $ MatrixJob job + Nothing -> Db.selectNextPackageSetJob >>= case _ of + Just job -> pure $ Just $ PackageSetJob job + Nothing -> pure Nothing + +runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) +runJobExecutor env = do + runEffects env Db.deleteIncompleteJobs >>= case _ of + Left err -> pure $ Left err + Right _ -> loop + where + loop = runEffects env findNextAvailableJob >>= case _ of + Left err -> + pure $ Left err + + Right Nothing -> do + Aff.delay (Milliseconds 100.0) + loop + + Right (Just job) -> do + now <- nowUTC + + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId + + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- do + let execute = Just <$> runEffects env (executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + + finishResult <- runEffects env case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Db.finishJob { jobId, finishedAt: now, success: false } + + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + Db.finishJob { jobId, finishedAt: now, success: true } + + case finishResult of + Left err -> pure $ Left err + Right _ -> loop + +executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob now = case _ of + PackageJob { jobId } -> do + Db.startJob { jobId, startedAt: now } + pure unit -- UNIMPLEMENTED + MatrixJob _details -> + pure unit -- UNIMPLEMENTED + PackageSetJob _details -> + pure unit -- UNIMPLEMENTED diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs new file mode 100644 index 000000000..c734d90f7 --- /dev/null +++ b/app/src/App/Main.purs @@ -0,0 +1,85 @@ +module Registry.App.Main where + +import Registry.App.Prelude hiding ((/)) + +import Data.String as String +import Effect.Aff as Aff +import Effect.Class.Console as Console +import Fetch.Retry as Fetch.Retry +import HTTPurple (Request, Response) +import HTTPurple as HTTPurple +import Node.Process as Process +import Registry.API.V1 (Route) +import Registry.API.V1 as V1 +import Registry.App.Effect.Env as Env +import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Router as Router + +main :: Effect Unit +main = + createServerEnv # Aff.runAff_ case _ of + Left error -> do + Console.log $ "Failed to start server: " <> Aff.message error + Process.exit' 1 + Right env -> do + -- Start healthcheck ping loop if URL is configured + case env.vars.resourceEnv.healthchecksUrl of + Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" + Just healthchecksUrl -> do + _healthcheck <- Aff.launchAff do + let + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + + loop n = + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + Aff.delay fiveMinutes + loop n + + Cancelled | n >= 0 -> do + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) + + Failed error | n >= 0 -> do + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) + + Succeeded { status } | status /= 200, n >= 0 -> do + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) + + Cancelled -> + Console.error "Healthchecks cancelled and failure limit reached, will not retry." + + Failed error -> do + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + + Succeeded _ -> do + Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + + loop limit + pure unit + + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + + _close <- HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer env + } + pure unit + where + runServer :: ServerEnv -> Request Route -> Aff Response + runServer env request = do + result <- runEffects env (Router.router env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index fa9a8b539..97521d202 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -67,12 +67,12 @@ const _insertJob = (db, table, columns, job) => { }; export const insertPackageJobImpl = (db, job) => { - const columns = [ 'jobId', 'jobType', 'packageName', 'packageVersion', 'payload' ] + const columns = [ 'jobId', 'jobType', 'payload' ] return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { - const columns = [ 'jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload' ] + const columns = [ 'jobId', 'compilerVersion', 'payload' ] return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 8c117fda7..b56575757 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -59,10 +59,11 @@ import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), JobType, LogLevel, LogLine) +import Registry.API.V1 (JobId(..), LogLevel, LogLine) import Registry.API.V1 as API.V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.Operation (PackageOperation, PackageSetOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -175,26 +176,22 @@ deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl type InsertPackageJob = { jobId :: JobId - , jobType :: JobType - , packageName :: PackageName - , packageVersion :: Version , payload :: PackageOperation } type JSInsertPackageJob = { jobId :: String , jobType :: String - , packageName :: String - , packageVersion :: String , payload :: String } insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep { jobId, jobType, packageName, packageVersion, payload } = +insertPackageJobToJSRep { jobId, payload } = { jobId: un JobId jobId - , jobType: API.V1.printJobType jobType - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion + , jobType: JobType.print case payload of + Operation.Publish _ -> JobType.PublishJob + Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob + Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob , payload: stringifyJson Operation.packageOperationCodec payload } @@ -206,25 +203,19 @@ insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertP type InsertMatrixJob = { jobId :: JobId - , packageName :: PackageName - , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version } type JSInsertMatrixJob = { jobId :: String - , packageName :: String - , packageVersion :: String , compilerVersion :: String , payload :: String } insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep { jobId, packageName, packageVersion, compilerVersion, payload } = +insertMatrixJobToJSRep { jobId, compilerVersion, payload } = { jobId: un JobId jobId - , packageName: PackageName.print packageName - , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } @@ -257,7 +248,7 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -277,7 +268,7 @@ type JSPackageJobDetails = packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- API.V1.parseJobType jobType + ty <- JobType.parse jobType name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt diff --git a/app/src/App/Server.purs b/app/src/App/Server.purs deleted file mode 100644 index c9a8aac8a..000000000 --- a/app/src/App/Server.purs +++ /dev/null @@ -1,441 +0,0 @@ -module Registry.App.Server where - -import Registry.App.Prelude hiding ((/)) - -import Control.Monad.Cont (ContT) -import Control.Parallel as Parallel -import Data.Codec.JSON as CJ -import Data.DateTime (DateTime(..)) -import Data.DateTime as DateTime -import Data.Formatter.DateTime as Formatter.DateTime -import Data.Lens (Lens') -import Data.Lens as Lens -import Data.Lens.Record as Lens.Record -import Data.Newtype (unwrap) -import Data.String as String -import Data.Time.Duration (Minutes(..)) -import Data.UUID.Random as UUID -import Effect.Aff (Fiber, Milliseconds(..)) -import Effect.Aff as Aff -import Effect.Class.Console as Console -import Effect.Ref as Ref -import Fetch.Retry as Fetch.Retry -import HTTPurple (JsonDecoder(..), JsonEncoder(..), Method(..), Request, Response) -import HTTPurple as HTTPurple -import HTTPurple.Status as Status -import Node.Path as Path -import Node.Process as Process -import Record as Record -import Registry.API.V1 (JobId(..), JobType(..), LogLevel(..), Route(..)) -import Registry.API.V1 as V1 -import Registry.App.API (COMPILER_CACHE, _compilerCache) -import Registry.App.API as API -import Registry.App.CLI.Git as Git -import Registry.App.Effect.Cache (CacheRef) -import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Db (DB) -import Registry.App.Effect.Db as Db -import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv, serverPort) -import Registry.App.Effect.Env as Env -import Registry.App.Effect.GitHub (GITHUB) -import Registry.App.Effect.GitHub as GitHub -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.App.Effect.Pursuit (PURSUIT) -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry (REGISTRY) -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source (SOURCE) -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage (STORAGE) -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) -import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, SQLite, PackageSetJobDetails) -import Registry.App.SQLite as SQLite -import Registry.Foreign.FSExtra as FS.Extra -import Registry.Foreign.Octokit (GitHubToken, Octokit) -import Registry.Foreign.Octokit as Octokit -import Registry.Internal.Format as Internal.Format -import Registry.Operation as Operation -import Registry.PackageName as PackageName -import Registry.Version as Version -import Run (AFF, EFFECT, Run) -import Run as Run -import Run.Except (EXCEPT) -import Run.Except as Except -import Run.Except as Run.Except - -newJobId :: forall m. MonadEffect m => m JobId -newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id - -data JobDetails - = PackageJob PackageJobDetails - | MatrixJob MatrixJobDetails - | PackageSetJob PackageSetJobDetails - -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do - Db.selectNextPackageJob >>= case _ of - Just job -> pure $ Just $ PackageJob job - Nothing -> Db.selectNextMatrixJob >>= case _ of - Just job -> pure $ Just $ MatrixJob job - Nothing -> Db.selectNextPackageSetJob >>= case _ of - Just job -> pure $ Just $ PackageSetJob job - Nothing -> pure Nothing - -runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop - where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err - - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop - - Right (Just job) -> do - now <- nowUTC - - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId - - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = map Just (runEffects env (executeJob now job)) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - - finishResult <- runEffects env $ case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } - - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } - - case finishResult of - Left err -> pure $ Left err - Right _ -> loop - -executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } - pure unit -- UNIMPLEMENTED - MatrixJob _details -> - pure unit -- UNIMPLEMENTED - PackageSetJob _details -> - pure unit -- UNIMPLEMENTED - -squashCommitRegistry :: Run ServerEffects Unit -squashCommitRegistry = do - pure unit - -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of - Publish, Post -> do - -- publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body - -- lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - -- forkPipelineJob publish.name publish.ref PublishJob \jobId -> do - -- Log.info $ "Received Publish request, job id: " <> unwrap jobId - -- API.publish Nothing publish - HTTPurple.emptyResponse Status.ok - - Unpublish, Post -> do - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Unpublish { name, version } -> do - -- forkPipelineJob name (Version.print version) UnpublishJob \jobId -> do - -- Log.info $ "Received Unpublish request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected unpublish operation." - HTTPurple.emptyResponse Status.ok - - Transfer, Post -> do - HTTPurple.emptyResponse Status.ok - -- auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body - -- case auth.payload of - -- Operation.Transfer { name } -> do - -- forkPipelineJob name "" TransferJob \jobId -> do - -- Log.info $ "Received Transfer request, job id: " <> unwrap jobId - -- API.authenticated auth - -- _ -> - -- HTTPurple.badRequest "Expected transfer operation." - - Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] - - Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of - Left err -> do - lift $ Log.error $ "Error while fetching job: " <> err - HTTPurple.notFound - Right Nothing -> - HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) - - Status, Get -> - HTTPurple.emptyResponse Status.ok - - Status, Head -> - HTTPurple.emptyResponse Status.ok - - _, _ -> - HTTPurple.notFound - -- where - -- forkPipelineJob :: PackageName -> String -> JobType -> (JobId -> Run _ Unit) -> ContT Response (Run _) Response - -- forkPipelineJob packageName ref jobType action = do - -- -- First thing we check if the package already has a pipeline in progress - -- lift (Db.runningJobForPackage packageName) >>= case _ of - -- -- If yes, we error out if it's the wrong kind, return it if it's the same type - -- Right { jobId, jobType: runningJobType } -> do - -- lift $ Log.info $ "Found running job for package " <> PackageName.print packageName <> ", job id: " <> unwrap jobId - -- case runningJobType == jobType of - -- true -> jsonOk V1.jobCreatedResponseCodec { jobId } - -- false -> HTTPurple.badRequest $ "There is already a " <> V1.printJobType runningJobType <> " job running for package " <> PackageName.print packageName - -- -- otherwise spin up a new thread - -- Left _err -> do - -- lift $ Log.info $ "No running job for package " <> PackageName.print packageName <> ", creating a new one" - -- jobId <- newJobId - -- now <- nowUTC - -- let newJob = { createdAt: now, jobId, jobType, packageName, ref } - -- lift $ Db.createJob newJob - -- let newEnv = env { jobId = Just jobId } - - -- _fiber <- liftAff $ Aff.forkAff $ Aff.attempt $ do - -- result <- runEffects newEnv (action jobId) - -- case result of - -- Left _ -> pure unit - -- Right _ -> do - -- finishedAt <- nowUTC - -- void $ runEffects newEnv (Db.finishJob { jobId, finishedAt, success: true }) - -- jsonOk V1.jobCreatedResponseCodec { jobId } - -type ServerEnvVars = - { token :: GitHubToken - , publicKey :: String - , privateKey :: String - , spacesKey :: String - , spacesSecret :: String - , resourceEnv :: ResourceEnv - } - -readServerEnvVars :: Aff ServerEnvVars -readServerEnvVars = do - Env.loadEnvFile ".env" - token <- Env.lookupRequired Env.pacchettibottiToken - publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub - privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret - resourceEnv <- Env.lookupResourceEnv - pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } - -type ServerEnv = - { cacheDir :: FilePath - , logsDir :: FilePath - , githubCacheRef :: CacheRef - , legacyCacheRef :: CacheRef - , registryCacheRef :: CacheRef - , octokit :: Octokit - , vars :: ServerEnvVars - , debouncer :: Registry.Debouncer - , db :: SQLite - , jobId :: Maybe JobId - } - -createServerEnv :: Aff ServerEnv -createServerEnv = do - vars <- readServerEnvVars - - let cacheDir = Path.concat [ scratchDir, ".cache" ] - let logsDir = Path.concat [ scratchDir, "logs" ] - for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory - - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl - debouncer <- Registry.newDebouncer - - db <- liftEffect $ SQLite.connect - { database: vars.resourceEnv.databaseUrl.path - -- To see all database queries logged in the terminal, use this instead - -- of 'mempty'. Turned off by default because this is so verbose. - -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info - , logger: mempty - } - - -- At server startup we clean out all the jobs that are not completed, - -- because they are stale runs from previous startups of the server. - -- We can just remove the jobs, and all the logs belonging to them will be - -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db - - pure - { debouncer - , githubCacheRef - , legacyCacheRef - , registryCacheRef - , cacheDir - , logsDir - , vars - , octokit - , db - , jobId: Nothing - } - -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) - -runServer - :: ServerEnv - -> (ServerEnv -> Request Route -> Run ServerEffects Response) - -> Request Route - -> Aff Response -runServer env router' request = do - result <- runEffects env (router' env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response - -main :: Effect Unit -main = do - createServerEnv # Aff.runAff_ case _ of - Left error -> do - Console.log $ "Failed to start server: " <> Aff.message error - Process.exit' 1 - Right env -> do - -- Start healthcheck ping loop if URL is configured - case env.vars.resourceEnv.healthchecksUrl of - Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n - - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) - - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) - - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) - - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." - - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." - - loop limit - pure unit - - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional serverPort - - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env router - } - pure unit - -jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a -jsonDecoder codec = JsonDecoder (parseJson codec) - -jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a -jsonEncoder codec = JsonEncoder (stringifyJson codec) - -jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response -jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum - -runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) -runEffects env operation = Aff.attempt do - today <- nowUTC - let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" - let logPath = Path.concat [ env.logsDir, logFile ] - operation - # Registry.interpret - ( Registry.handle - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) - , workdir: scratchDir - , debouncer: env.debouncer - , cacheRef: env.registryCacheRef - } - ) - # Pursuit.interpret (Pursuit.handleAff env.vars.token) - # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) - # Source.interpret (Source.handle Source.Recent) - # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) - # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) - # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) - # Except.catch - ( \msg -> do - finishedAt <- nowUTC - case env.jobId of - -- Important to make sure that we mark the job as completed - Just jobId -> Db.finishJob { jobId, finishedAt, success: false } - Nothing -> pure unit - Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) - ) - # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog - # Log.interpret - ( \log -> case env.jobId of - Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log - Just jobId -> - Log.handleTerminal Verbose log - *> Log.handleFs Verbose logPath log - *> Log.handleDb { db: env.db, job: jobId } log - ) - # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } - # Env.runResourceEnv env.vars.resourceEnv - # Run.runBaseAff' diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs new file mode 100644 index 000000000..1f6fdc489 --- /dev/null +++ b/app/src/App/Server/Env.purs @@ -0,0 +1,188 @@ +module Registry.App.Server.Env where + +import Registry.App.Prelude hiding ((/)) + +import Data.Codec.JSON as CJ +import Data.Formatter.DateTime as Formatter.DateTime +import Data.String as String +import Effect.Aff as Aff +import HTTPurple (JsonDecoder(..), JsonEncoder(..), Request, Response) +import HTTPurple as HTTPurple +import Node.Path as Path +import Registry.API.V1 (JobId, Route) +import Registry.App.API (COMPILER_CACHE, _compilerCache) +import Registry.App.CLI.Git as Git +import Registry.App.Effect.Cache (CacheRef) +import Registry.App.Effect.Cache as Cache +import Registry.App.Effect.Comment (COMMENT) +import Registry.App.Effect.Comment as Comment +import Registry.App.Effect.Db (DB) +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) +import Registry.App.Effect.Env as Env +import Registry.App.Effect.GitHub (GITHUB) +import Registry.App.Effect.GitHub as GitHub +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Pursuit (PURSUIT) +import Registry.App.Effect.Pursuit as Pursuit +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Source (SOURCE) +import Registry.App.Effect.Source as Source +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.Legacy.Manifest (LEGACY_CACHE, _legacyCache) +import Registry.App.SQLite (SQLite) +import Registry.App.SQLite as SQLite +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Octokit (GitHubToken, Octokit) +import Registry.Foreign.Octokit as Octokit +import Registry.Internal.Format as Internal.Format +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +type ServerEnvVars = + { token :: GitHubToken + , publicKey :: String + , privateKey :: String + , spacesKey :: String + , spacesSecret :: String + , resourceEnv :: ResourceEnv + } + +readServerEnvVars :: Aff ServerEnvVars +readServerEnvVars = do + Env.loadEnvFile ".temp/local-server/.env.local" + Env.loadEnvFile ".env" + token <- Env.lookupRequired Env.pacchettibottiToken + publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub + privateKey <- Env.lookupRequired Env.pacchettibottiED25519 + spacesKey <- Env.lookupRequired Env.spacesKey + spacesSecret <- Env.lookupRequired Env.spacesSecret + resourceEnv <- Env.lookupResourceEnv + pure { token, publicKey, privateKey, spacesKey, spacesSecret, resourceEnv } + +type ServerEnv = + { cacheDir :: FilePath + , logsDir :: FilePath + , githubCacheRef :: CacheRef + , legacyCacheRef :: CacheRef + , registryCacheRef :: CacheRef + , octokit :: Octokit + , vars :: ServerEnvVars + , debouncer :: Registry.Debouncer + , db :: SQLite + , jobId :: Maybe JobId + } + +createServerEnv :: Aff ServerEnv +createServerEnv = do + vars <- readServerEnvVars + + let cacheDir = Path.concat [ scratchDir, ".cache" ] + let logsDir = Path.concat [ scratchDir, "logs" ] + for_ [ cacheDir, logsDir ] FS.Extra.ensureDirectory + + githubCacheRef <- Cache.newCacheRef + legacyCacheRef <- Cache.newCacheRef + registryCacheRef <- Cache.newCacheRef + + octokit <- Octokit.newOctokit vars.token vars.resourceEnv.githubApiUrl + debouncer <- Registry.newDebouncer + + db <- liftEffect $ SQLite.connect + { database: vars.resourceEnv.databaseUrl.path + -- To see all database queries logged in the terminal, use this instead + -- of 'mempty'. Turned off by default because this is so verbose. + -- Run.runBaseEffect <<< Log.interpret (Log.handleTerminal Normal) <<< Log.info + , logger: mempty + } + + -- At server startup we clean out all the jobs that are not completed, + -- because they are stale runs from previous startups of the server. + -- We can just remove the jobs, and all the logs belonging to them will be + -- removed automatically by the foreign key constraint. + liftEffect $ SQLite.deleteIncompleteJobs db + + pure + { debouncer + , githubCacheRef + , legacyCacheRef + , registryCacheRef + , cacheDir + , logsDir + , vars + , octokit + , db + , jobId: Nothing + } + +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) + +runServer + :: ServerEnv + -> (ServerEnv -> Request Route -> Run ServerEffects Response) + -> Request Route + -> Aff Response +runServer env router' request = do + result <- runEffects env (router' env request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +jsonDecoder :: forall a. CJ.Codec a -> JsonDecoder CJ.DecodeError a +jsonDecoder codec = JsonDecoder (parseJson codec) + +jsonEncoder :: forall a. CJ.Codec a -> JsonEncoder a +jsonEncoder codec = JsonEncoder (stringifyJson codec) + +jsonOk :: forall m a. MonadAff m => CJ.Codec a -> a -> m Response +jsonOk codec datum = HTTPurple.ok' HTTPurple.jsonHeaders $ HTTPurple.toJson (jsonEncoder codec) datum + +runEffects :: forall a. ServerEnv -> Run ServerEffects a -> Aff (Either Aff.Error a) +runEffects env operation = Aff.attempt do + today <- nowUTC + let logFile = String.take 10 (Formatter.DateTime.format Internal.Format.iso8601Date today) <> ".log" + let logPath = Path.concat [ env.logsDir, logFile ] + operation + # Registry.interpret + ( Registry.handle + { repos: Registry.defaultRepos + , pull: Git.ForceClean + , write: Registry.CommitAs (Git.pacchettibottiCommitter env.vars.token) + , workdir: scratchDir + , debouncer: env.debouncer + , cacheRef: env.registryCacheRef + } + ) + # Pursuit.interpret (Pursuit.handleAff env.vars.token) + # Storage.interpret (Storage.handleS3 { s3: { key: env.vars.spacesKey, secret: env.vars.spacesSecret }, cache: env.cacheDir }) + # Source.interpret (Source.handle Source.Recent) + # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache: env.cacheDir, ref: env.githubCacheRef }) + # Cache.interpret _legacyCache (Cache.handleMemoryFs { cache: env.cacheDir, ref: env.legacyCacheRef }) + # Cache.interpret _compilerCache (Cache.handleFs env.cacheDir) + # Except.catch + ( \msg -> do + finishedAt <- nowUTC + case env.jobId of + -- Important to make sure that we mark the job as completed + Just jobId -> Db.finishJob { jobId, finishedAt, success: false } + Nothing -> pure unit + Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) + ) + # Db.interpret (Db.handleSQLite { db: env.db }) + # Comment.interpret Comment.handleLog + # Log.interpret + ( \log -> case env.jobId of + Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log + Just jobId -> + Log.handleTerminal Verbose log + *> Log.handleFs Verbose logPath log + *> Log.handleDb { db: env.db, job: jobId } log + ) + # Env.runPacchettiBottiEnv { publicKey: env.vars.publicKey, privateKey: env.vars.privateKey } + # Env.runResourceEnv env.vars.resourceEnv + # Run.runBaseAff' diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs new file mode 100644 index 000000000..27af29a24 --- /dev/null +++ b/app/src/App/Server/Router.purs @@ -0,0 +1,84 @@ +module Registry.App.Server.Router where + +import Registry.App.Prelude hiding ((/)) + +import Control.Monad.Cont (ContT) +import Data.Codec.JSON as CJ +import Data.UUID.Random as UUID +import HTTPurple (Method(..), Request, Response) +import HTTPurple as HTTPurple +import HTTPurple.Status as Status +import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 as V1 +import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log as Log +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.Operation (PackageOperation) +import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Run (Run) +import Run.Except as Run.Except + +router :: ServerEnv -> Request Route -> Run ServerEffects Response +router env { route, method, body } = HTTPurple.usingCont case route, method of + Publish, Post -> do + publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body + lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish + forkPackageJob $ Operation.Publish publish + + Unpublish, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Unpublish payload -> do + lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected unpublish operation." + + Transfer, Post -> do + auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body + case auth.payload of + Operation.Transfer payload -> do + lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload + forkPackageJob $ Operation.Authenticated auth + _ -> + HTTPurple.badRequest "Expected transfer operation." + + Jobs, Get -> do + jsonOk (CJ.array V1.jobCodec) [] + + Job jobId { level: maybeLogLevel, since }, Get -> do + let logLevel = fromMaybe Error maybeLogLevel + logs <- lift $ Db.selectLogsByJob jobId logLevel since + lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + Left err -> do + lift $ Log.error $ "Error while fetching job: " <> err + HTTPurple.notFound + Right Nothing -> + HTTPurple.notFound + Right (Just job) -> do + HTTPurple.emptyResponse Status.ok + -- TODO: Return the job details (will need to update the jobCodec and move the various + -- details into the API module). + -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + + Status, Get -> + HTTPurple.emptyResponse Status.ok + + Status, Head -> + HTTPurple.emptyResponse Status.ok + + _, _ -> + HTTPurple.notFound + where + forkPackageJob :: PackageOperation -> ContT Response (Run _) Response + forkPackageJob operation = do + lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) + jobId <- newJobId + lift $ Db.insertPackageJob { jobId, payload: operation } + jsonOk V1.jobCreatedResponseCodec { jobId } + + newJobId :: forall m. MonadEffect m => m JobId + newJobId = liftEffect do + id <- UUID.make + pure $ JobId $ UUID.toString id diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 67216ca35..31c15866c 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,6 +15,7 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.JobType as JobType import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') @@ -66,7 +67,7 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType + , jobType :: JobType.JobType , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime @@ -77,7 +78,7 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: jobTypeCodec + , jobType: JobType.codec , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime @@ -92,26 +93,6 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parseJobType :: String -> Either String JobType -parseJobType = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -printJobType :: JobType -> String -printJobType = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -jobTypeCodec :: CJ.Codec JobType -jobTypeCodec = CJ.Sum.enumSum printJobType (hush <<< parseJobType) - type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs new file mode 100644 index 000000000..b8dceaf38 --- /dev/null +++ b/lib/src/JobType.purs @@ -0,0 +1,26 @@ +module Registry.JobType where + +import Prelude +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Sum as CJ.Sum +import Data.Either (Either(..), hush) + +data JobType = PublishJob | UnpublishJob | TransferJob + +derive instance Eq JobType + +parse :: String -> Either String JobType +parse = case _ of + "publish" -> Right PublishJob + "unpublish" -> Right UnpublishJob + "transfer" -> Right TransferJob + j -> Left $ "Invalid job type " <> show j + +print :: JobType -> String +print = case _ of + PublishJob -> "publish" + UnpublishJob -> "unpublish" + TransferJob -> "transfer" + +codec :: CJ.Codec JobType +codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 521bc2883..518c1a6de 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -14,8 +14,8 @@ -- | are well-formed, and JSON codecs package managers can use to construct the -- | requests necessary to send to the Registry API or publish in a GitHub issue. module Registry.Operation - ( AuthenticatedPackageOperation(..) - , AuthenticatedData + ( AuthenticatedData + , AuthenticatedPackageOperation(..) , PackageOperation(..) , PackageSetOperation(..) , PackageSetUpdateData @@ -23,13 +23,15 @@ module Registry.Operation , TransferData , UnpublishData , authenticatedCodec + , packageName , packageOperationCodec , packageSetOperationCodec , packageSetUpdateCodec , publishCodec , transferCodec , unpublishCodec - ) where + ) + where import Prelude @@ -60,6 +62,13 @@ data PackageOperation derive instance Eq PackageOperation +packageName :: PackageOperation -> PackageName +packageName = case _ of + Publish { name } -> name + Authenticated { payload } -> case payload of + Unpublish { name } -> name + Transfer { name } -> name + -- | A codec for encoding and decoding a `PackageOperation` as JSON. packageOperationCodec :: CJ.Codec PackageOperation packageOperationCodec = CJ.named "PackageOperation" $ Codec.codec' decode encode @@ -88,6 +97,7 @@ type PublishData = { name :: PackageName , location :: Maybe Location , ref :: String + , version :: Version , compiler :: Version , resolutions :: Maybe (Map PackageName Version) } @@ -98,6 +108,7 @@ publishCodec = CJ.named "Publish" $ CJ.Record.object { name: PackageName.codec , location: CJ.Record.optional Location.codec , ref: CJ.string + , version: Version.codec , compiler: Version.codec , resolutions: CJ.Record.optional (Internal.Codec.packageMap Version.codec) } From 2fe96357cd553051ae70088891279dc5c6b22b67 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Wed, 25 Jun 2025 20:04:48 -0400 Subject: [PATCH 03/19] Fix up build --- app/src/App/JobExecutor.purs | 76 ++-- app/test/App/API.purs | 3 + app/test/App/GitHubIssue.purs | 3 + package-lock.json | 642 +++++++++++++++++++++++++++++++- package.json | 5 +- scripts/src/LegacyImporter.purs | 1 + scripts/src/PackageDeleter.purs | 1 + 7 files changed, 690 insertions(+), 41 deletions(-) diff --git a/app/src/App/JobExecutor.purs b/app/src/App/JobExecutor.purs index 0bd6fa44f..e5d29bd95 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/JobExecutor.purs @@ -11,7 +11,7 @@ import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) -import Registry.App.Server.Env (ServerEnv, ServerEffects, runEffects) +import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Run (Run) import Run.Except (EXCEPT) @@ -31,57 +31,55 @@ findNextAvailableJob = do Nothing -> pure Nothing runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) -runJobExecutor env = do - runEffects env Db.deleteIncompleteJobs >>= case _ of - Left err -> pure $ Left err - Right _ -> loop +runJobExecutor env = runEffects env do + Db.deleteIncompleteJobs + loop where - loop = runEffects env findNextAvailableJob >>= case _ of - Left err -> - pure $ Left err + loop = do + mJob <- findNextAvailableJob + case mJob of + Nothing -> do + liftAff $ Aff.delay (Milliseconds 100.0) + loop - Right Nothing -> do - Aff.delay (Milliseconds 100.0) - loop + Just job -> do + now <- nowUTC - Right (Just job) -> do - now <- nowUTC + let + jobId = case job of + PackageJob details -> details.jobId + MatrixJob details -> details.jobId + PackageSetJob details -> details.jobId - let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId + Db.startJob { jobId, startedAt: now } - -- We race the job execution against a timeout; if the timeout happens first, - -- we kill the job and move on to the next one. - jobResult <- do - let execute = Just <$> runEffects env (executeJob now job) - let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes - let timeout = Aff.delay (Milliseconds delay) $> Nothing - Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout + -- We race the job execution against a timeout; if the timeout happens first, + -- we kill the job and move on to the next one. + jobResult <- liftAff do + let execute = Just <$> (runEffects env $ executeJob now job) + let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes + let timeout = Aff.delay (Milliseconds delay) $> Nothing + Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout - finishResult <- runEffects env case jobResult of - Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." - Db.finishJob { jobId, finishedAt: now, success: false } + success <- case jobResult of + Nothing -> do + Log.error $ "Job " <> un JobId jobId <> " timed out." + pure false - Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err - Db.finishJob { jobId, finishedAt: now, success: false } + Just (Left err) -> do + Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + pure false - Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." - Db.finishJob { jobId, finishedAt: now, success: true } + Just (Right _) -> do + Log.info $ "Job " <> un JobId jobId <> " succeeded." + pure true - case finishResult of - Left err -> pure $ Left err - Right _ -> loop + Db.finishJob { jobId, finishedAt: now, success } + loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob now = case _ of PackageJob { jobId } -> do - Db.startJob { jobId, startedAt: now } pure unit -- UNIMPLEMENTED MatrixJob _details -> pure unit -- UNIMPLEMENTED diff --git a/app/test/App/API.purs b/app/test/App/API.purs index caaf6c215..36a2e61a2 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,6 +96,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref + , version , resolutions: Nothing } @@ -158,6 +159,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-type-equality", subdir: Nothing } , name: Utils.unsafePackageName "type-equality" , ref: "v4.0.1" + , version: Utils.unsafeVersion "4.0.1" , resolutions: Nothing } Registry.readAllManifests >>= \idx -> @@ -172,6 +174,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-transitive", subdir: Nothing } , name: transitive.name , ref: "v" <> Version.print transitive.version + , version: transitive.version , resolutions: Nothing } Registry.readAllManifests >>= \idx -> diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 70b3ccb3a..8276bf708 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -32,6 +32,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "something" , ref: "v1.2.3" + , version: Utils.unsafeVersion "1.2.3" , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] , location: Nothing @@ -47,6 +48,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Just $ Map.fromFoldable [ Utils.unsafePackageName "prelude" /\ Utils.unsafeVersion "1.0.0" ] @@ -75,6 +77,7 @@ decodeEventsToOps = do operation = Publish { name: Utils.unsafePackageName "prelude" , ref: "v5.0.0" + , version: Utils.unsafeVersion "5.0.0" , location: Just $ GitHub { subdir: Nothing, owner: "purescript", repo: "purescript-prelude" } , compiler: Utils.unsafeVersion "0.15.0" , resolutions: Nothing diff --git a/package-lock.json b/package-lock.json index 93959c062..f4b4a86cc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,7 +10,10 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } }, "app": { "name": "registry-app", @@ -1598,6 +1601,12 @@ "node": ">=14.0.0" } }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "license": "Python-2.0" + }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1648,6 +1657,15 @@ "prebuild-install": "^7.1.1" } }, + "node_modules/big-integer": { + "version": "1.6.52", + "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", + "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", + "license": "Unlicense", + "engines": { + "node": ">=0.6" + } + }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -1676,6 +1694,27 @@ "resolved": "https://registry.npmjs.org/bowser/-/bowser-2.11.0.tgz", "integrity": "sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" }, + "node_modules/bplist-parser": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", + "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", + "license": "MIT", + "dependencies": { + "big-integer": "^1.6.44" + }, + "engines": { + "node": ">= 5.10.0" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -1719,6 +1758,21 @@ "node": ">=10.0.0" } }, + "node_modules/bundle-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", + "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", + "license": "MIT", + "dependencies": { + "run-applescript": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", @@ -1741,6 +1795,20 @@ "node": ">=10.0.0" } }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -1763,6 +1831,52 @@ "node": ">=4.0.0" } }, + "node_modules/default-browser": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-4.0.0.tgz", + "integrity": "sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==", + "license": "MIT", + "dependencies": { + "bundle-name": "^3.0.0", + "default-browser-id": "^3.0.0", + "execa": "^7.1.1", + "titleize": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/default-browser-id": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-browser-id/-/default-browser-id-3.0.0.tgz", + "integrity": "sha512-OZ1y3y0SqSICtE8DE4S8YOE9UZOJ8wO16fKWVP5J1Qz42kV9jcnMVFrEE/noXb/ss3Q4pZIH79kxofzyNNtUNA==", + "license": "MIT", + "dependencies": { + "bplist-parser": "^0.2.0", + "untildify": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-lazy-prop": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-3.0.0.tgz", + "integrity": "sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/deprecation": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", @@ -1784,6 +1898,50 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", + "license": "BSD-2-Clause", + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/env-paths": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", + "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/execa": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", + "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.1", + "human-signals": "^4.3.0", + "is-stream": "^3.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^5.1.0", + "onetime": "^6.0.0", + "signal-exit": "^3.0.7", + "strip-final-newline": "^3.0.0" + }, + "engines": { + "node": "^14.18.0 || ^16.14.0 || >=18.0.0" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -1900,6 +2058,18 @@ "node": ">=10" } }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", @@ -1921,6 +2091,15 @@ "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, + "node_modules/human-signals": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", + "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=14.18.0" + } + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -1950,6 +2129,21 @@ "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" }, + "node_modules/is-docker": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", + "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -1969,6 +2163,24 @@ "node": ">=0.10.0" } }, + "node_modules/is-inside-container": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", + "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", + "license": "MIT", + "dependencies": { + "is-docker": "^3.0.0" + }, + "bin": { + "is-inside-container": "cli.js" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -1985,6 +2197,51 @@ "node": ">=0.10.0" } }, + "node_modules/is-stream": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", + "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-wsl/node_modules/is-docker": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.2.1.tgz", + "integrity": "sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==", + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", @@ -2004,6 +2261,15 @@ "jsonrepair": "bin/cli.js" } }, + "node_modules/linkify-it": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", + "license": "MIT", + "dependencies": { + "uc.micro": "^1.0.1" + } + }, "node_modules/lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2015,6 +2281,34 @@ "node": ">=10" } }, + "node_modules/markdown-it": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", + "license": "MIT", + "dependencies": { + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", + "mdurl": "^1.0.1", + "uc.micro": "^1.0.5" + }, + "bin": { + "markdown-it": "bin/markdown-it.js" + } + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==", + "license": "MIT" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -2035,6 +2329,18 @@ "node": ">=8.6" } }, + "node_modules/mimic-fn": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", + "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2142,6 +2448,33 @@ } } }, + "node_modules/npm-run-path": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", + "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "license": "MIT", + "dependencies": { + "path-key": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/npm-run-path/node_modules/path-key": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", + "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -2150,6 +2483,56 @@ "wrappy": "1" } }, + "node_modules/onetime": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", + "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^4.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", + "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", + "license": "MIT", + "dependencies": { + "default-browser": "^4.0.0", + "define-lazy-prop": "^3.0.0", + "is-inside-container": "^1.0.0", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2195,6 +2578,15 @@ "once": "^1.3.1" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2262,6 +2654,124 @@ "node": ">=0.10.0" } }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-applescript": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", + "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", + "license": "MIT", + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/run-applescript/node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/run-applescript/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/run-applescript/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/run-applescript/node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/run-applescript/node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -2322,6 +2832,33 @@ "node": ">=10" } }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -2365,6 +2902,34 @@ "simple-concat": "^1.0.0" } }, + "node_modules/spago": { + "version": "0.93.19", + "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", + "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", + "license": "BSD-3-Clause", + "dependencies": { + "better-sqlite3": "^8.6.0", + "env-paths": "^3.0.0", + "fast-glob": "^3.2.11", + "fs-extra": "^10.0.0", + "fuse.js": "^6.5.3", + "glob": "^7.1.6", + "markdown-it": "^12.0.4", + "open": "^9.1.0", + "punycode": "^2.3.0", + "semver": "^7.3.5", + "spdx-expression-parse": "^3.0.1", + "ssh2": "^1.14.0", + "supports-color": "^9.2.3", + "tar": "^6.1.11", + "tmp": "^0.2.1", + "xhr2": "^0.2.1", + "yaml": "^2.1.1" + }, + "bin": { + "spago": "bin/bundle.js" + } + }, "node_modules/spdx-exceptions": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", @@ -2409,6 +2974,18 @@ "safe-buffer": "~5.2.0" } }, + "node_modules/strip-final-newline": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", + "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -2422,6 +2999,18 @@ "resolved": "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz", "integrity": "sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==" }, + "node_modules/supports-color": { + "version": "9.4.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", + "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", @@ -2469,6 +3058,18 @@ "node": ">=6" } }, + "node_modules/titleize": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", + "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tmp": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.4.tgz", @@ -2515,6 +3116,12 @@ "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==" }, + "node_modules/uc.micro": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", + "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", + "license": "MIT" + }, "node_modules/universal-user-agent": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", @@ -2528,6 +3135,15 @@ "node": ">= 10.0.0" } }, + "node_modules/untildify": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", + "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", @@ -2555,11 +3171,35 @@ "webidl-conversions": "^3.0.0" } }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, + "node_modules/xhr2": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", + "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/package.json b/package.json index 76bc4e96e..5066e42c0 100644 --- a/package.json +++ b/package.json @@ -6,5 +6,8 @@ "app", "foreign", "lib" - ] + ], + "dependencies": { + "spago": "^0.93.19" + } } diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index d642d41dc..0fdc94a06 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -471,6 +471,7 @@ runLegacyImport logs = do { name: manifest.name , location: Just manifest.location , ref + , version: manifest.version , compiler , resolutions: Just resolutions } diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index f0cb1c63f..db9b54d23 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -243,6 +243,7 @@ deleteVersion arguments name version = do { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref + , version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } From a4f1047e56c4d11666ac4aa622620ae16aa5d9f2 Mon Sep 17 00:00:00 2001 From: Fyodor Soikin Date: Sat, 5 Jul 2025 23:27:56 -0400 Subject: [PATCH 04/19] Run job executor --- app/src/App/Main.purs | 133 ++++++++++++---------- app/src/App/Prelude.purs | 2 +- app/src/App/SQLite.purs | 6 +- app/src/App/{ => Server}/JobExecutor.purs | 22 ++-- app/src/App/Server/Router.purs | 46 ++++++-- lib/src/API/V1.purs | 7 -- lib/src/JobType.purs | 1 + lib/src/Operation.purs | 3 +- 8 files changed, 129 insertions(+), 91 deletions(-) rename app/src/App/{ => Server}/JobExecutor.purs (82%) diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index c734d90f7..df94b6e17 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -2,84 +2,103 @@ module Registry.App.Main where import Registry.App.Prelude hiding ((/)) -import Data.String as String +import Data.DateTime (diff) +import Data.Time.Duration (Milliseconds(..), Seconds(..)) +import Debug (traceM) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry -import HTTPurple (Request, Response) -import HTTPurple as HTTPurple import Node.Process as Process -import Registry.API.V1 (Route) -import Registry.API.V1 as V1 -import Registry.App.Effect.Env as Env -import Registry.App.Server.Env (ServerEnv, createServerEnv, runEffects) +import Registry.App.Server.Env (ServerEnv, createServerEnv) +import Registry.App.Server.JobExecutor as JobExecutor import Registry.App.Server.Router as Router main :: Effect Unit -main = +main = do + traceM 1 createServerEnv # Aff.runAff_ case _ of Left error -> do + traceM 2 Console.log $ "Failed to start server: " <> Aff.message error Process.exit' 1 Right env -> do - -- Start healthcheck ping loop if URL is configured case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" - Just healthchecksUrl -> do - _healthcheck <- Aff.launchAff do - let - limit = 10 - oneMinute = Aff.Milliseconds (1000.0 * 60.0) - fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) + Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + Aff.launchAff_ $ jobExecutor env + Router.runRouter env + where + healthcheck :: String -> Aff Unit + healthcheck healthchecksUrl = loop limit + where + limit = 10 + oneMinute = Aff.Milliseconds (1000.0 * 60.0) + fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) - loop n = - Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of - Succeeded { status } | status == 200 -> do - Aff.delay fiveMinutes - loop n + loop n = do + traceM 4 + Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of + Succeeded { status } | status == 200 -> do + traceM 5 + Aff.delay fiveMinutes + loop n - Cancelled | n >= 0 -> do - Console.warn $ "Healthchecks cancelled, will retry..." - Aff.delay oneMinute - loop (n - 1) + Cancelled | n >= 0 -> do + traceM 6 + Console.warn $ "Healthchecks cancelled, will retry..." + Aff.delay oneMinute + loop (n - 1) - Failed error | n >= 0 -> do - Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error - Aff.delay oneMinute - loop (n - 1) + Failed error | n >= 0 -> do + traceM 7 + Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error + Aff.delay oneMinute + loop (n - 1) - Succeeded { status } | status /= 200, n >= 0 -> do - Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status - Aff.delay oneMinute - loop (n - 1) + Succeeded { status } | status /= 200, n >= 0 -> do + traceM 8 + Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status + Aff.delay oneMinute + loop (n - 1) - Cancelled -> - Console.error "Healthchecks cancelled and failure limit reached, will not retry." + Cancelled -> do + traceM 9 + Console.error + "Healthchecks cancelled and failure limit reached, will not retry." - Failed error -> do - Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error + Failed error -> do + traceM 10 + Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error - Succeeded _ -> do - Console.error $ "Healthchecks returned non-200 status and failure limit reached, will not retry." + Succeeded _ -> do + traceM 11 + Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." - loop limit - pure unit + jobExecutor :: ServerEnv -> Aff Unit + jobExecutor env = do + traceM 12 + loop initialRestartDelay + where + initialRestartDelay = Milliseconds 100.0 - -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) - port <- liftEffect $ Env.lookupOptional Env.serverPort + loop restartDelay = do + traceM 13 + start <- nowUTC + result <- JobExecutor.runJobExecutor env + end <- nowUTC - _close <- HTTPurple.serve - { hostname: "0.0.0.0" - , port - } - { route: V1.routes - , router: runServer env - } - pure unit - where - runServer :: ServerEnv -> Request Route -> Aff Response - runServer env request = do - result <- runEffects env (Router.router env request) - case result of - Left error -> HTTPurple.badRequest (Aff.message error) - Right response -> pure response + traceM 14 + Console.error case result of + Left error -> "Job executor failed: " <> Aff.message error + Right _ -> "Job executor exited for no reason." + + -- This is a heuristic: if the executor keeps crashing immediately, we + -- restart with an exponentially increasing delay, but once the executor + -- had a run longer than a minute, we start over with a small delay. + let + nextRestartDelay + | end `diff` start > Seconds 60.0 = initialRestartDelay + | otherwise = restartDelay <> restartDelay + + Aff.delay nextRestartDelay + loop nextRestartDelay diff --git a/app/src/App/Prelude.purs b/app/src/App/Prelude.purs index 7a046414d..5e586ebae 100644 --- a/app/src/App/Prelude.purs +++ b/app/src/App/Prelude.purs @@ -60,7 +60,7 @@ import Data.List (List) as Extra import Data.Map (Map) as Extra import Data.Map as Map import Data.Maybe (Maybe(..), fromJust, fromMaybe, isJust, isNothing, maybe) as Maybe -import Data.Newtype (class Newtype, un) as Extra +import Data.Newtype (class Newtype, un, unwrap, wrap) as Extra import Data.Newtype as Newtype import Data.Nullable (Nullable, toMaybe, toNullable) as Extra import Data.Set (Set) as Extra diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index b56575757..208befb9a 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -248,7 +248,6 @@ insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< i type PackageJobDetails = { jobId :: JobId - , jobType :: JobType.JobType , packageName :: PackageName , packageVersion :: Version , payload :: PackageOperation @@ -258,7 +257,6 @@ type PackageJobDetails = type JSPackageJobDetails = { jobId :: String - , jobType :: String , packageName :: String , packageVersion :: String , payload :: String @@ -267,8 +265,7 @@ type JSPackageJobDetails = } packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payload, createdAt, startedAt } = do - ty <- JobType.parse jobType +packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do name <- PackageName.parse packageName version <- Version.parse packageVersion created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt @@ -276,7 +273,6 @@ packageJobDetailsFromJSRep { jobId, jobType, packageName, packageVersion, payloa parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload pure { jobId: JobId jobId - , jobType: ty , packageName: name , packageVersion: version , payload: parsed diff --git a/app/src/App/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs similarity index 82% rename from app/src/App/JobExecutor.purs rename to app/src/App/Server/JobExecutor.purs index e5d29bd95..125a9a7a3 100644 --- a/app/src/App/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,4 +1,4 @@ -module Registry.App.JobExecutor where +module Registry.App.Server.JobExecutor where import Registry.App.Prelude hiding ((/)) @@ -6,12 +6,13 @@ import Control.Parallel as Parallel import Data.DateTime (DateTime) import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff -import Registry.API.V1 (JobId(..)) +import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.Operation as Operation import Run (Run) import Run.Except (EXCEPT) @@ -21,7 +22,7 @@ data JobDetails | PackageSetJob PackageSetJobDetails findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = do +findNextAvailableJob = Db.selectNextPackageJob >>= case _ of Just job -> pure $ Just $ PackageJob job Nothing -> Db.selectNextMatrixJob >>= case _ of @@ -63,24 +64,27 @@ runJobExecutor env = runEffects env do success <- case jobResult of Nothing -> do - Log.error $ "Job " <> un JobId jobId <> " timed out." + Log.error $ "Job " <> unwrap jobId <> " timed out." pure false Just (Left err) -> do - Log.warn $ "Job " <> un JobId jobId <> " failed:\n" <> Aff.message err + Log.warn $ "Job " <> unwrap jobId <> " failed:\n" <> Aff.message err pure false Just (Right _) -> do - Log.info $ "Job " <> un JobId jobId <> " succeeded." + Log.info $ "Job " <> unwrap jobId <> " succeeded." pure true Db.finishJob { jobId, finishedAt: now, success } loop executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit -executeJob now = case _ of - PackageJob { jobId } -> do - pure unit -- UNIMPLEMENTED +executeJob _ = case _ of + PackageJob { payload: Operation.Publish p } -> + API.publish Nothing p + PackageJob { payload: Operation.Authenticated auth } -> + API.authenticated auth + MatrixJob _details -> pure unit -- UNIMPLEMENTED PackageSetJob _details -> diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 27af29a24..840dab5a0 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,23 +4,46 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ +import Data.String as String import Data.UUID.Random as UUID +import Effect.Aff as Aff +import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db +import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log -import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk) +import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName import Run (Run) import Run.Except as Run.Except -router :: ServerEnv -> Request Route -> Run ServerEffects Response -router env { route, method, body } = HTTPurple.usingCont case route, method of +runRouter :: ServerEnv -> Effect Unit +runRouter env = do + -- Read port from SERVER_PORT env var (optional, HTTPurple defaults to 8080) + port <- liftEffect $ Env.lookupOptional Env.serverPort + void $ HTTPurple.serve + { hostname: "0.0.0.0" + , port + } + { route: V1.routes + , router: runServer + } + where + runServer :: Request Route -> Aff Response + runServer request = do + result <- runEffects env (router request) + case result of + Left error -> HTTPurple.badRequest (Aff.message error) + Right response -> pure response + +router :: Request Route -> Run ServerEffects Response +router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish @@ -45,22 +68,25 @@ router env { route, method, body } = HTTPurple.usingCont case route, method of HTTPurple.badRequest "Expected transfer operation." Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [] + jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] } ] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept (Db.selectJobInfo jobId)) >>= case _ of + lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound Right Nothing -> HTTPurple.notFound - Right (Just job) -> do - HTTPurple.emptyResponse Status.ok - -- TODO: Return the job details (will need to update the jobCodec and move the various - -- details into the API module). - -- jsonOk V1.jobCodec (jobDetailstoV1Job job logs) + Right (Just job) -> + jsonOk V1.jobCodec + { jobId + , createdAt: job.createdAt + , finishedAt: job.finishedAt + , success: job.success + , logs + } Status, Get -> HTTPurple.emptyResponse Status.ok diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 31c15866c..4bae692f5 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -15,9 +15,6 @@ import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.JobType as JobType -import Registry.PackageName (PackageName) -import Registry.PackageName as PackageName import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG @@ -67,8 +64,6 @@ jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { job type Job = { jobId :: JobId - , jobType :: JobType.JobType - , packageName :: PackageName , createdAt :: DateTime , finishedAt :: Maybe DateTime , success :: Boolean @@ -78,8 +73,6 @@ type Job = jobCodec :: CJ.Codec Job jobCodec = CJ.named "Job" $ CJ.Record.object { jobId: jobIdCodec - , jobType: JobType.codec - , packageName: PackageName.codec , createdAt: Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs index b8dceaf38..dbc4eaf01 100644 --- a/lib/src/JobType.purs +++ b/lib/src/JobType.purs @@ -1,6 +1,7 @@ module Registry.JobType where import Prelude + import Data.Codec.JSON as CJ import Data.Codec.JSON.Sum as CJ.Sum import Data.Either (Either(..), hush) diff --git a/lib/src/Operation.purs b/lib/src/Operation.purs index 518c1a6de..262ceb3db 100644 --- a/lib/src/Operation.purs +++ b/lib/src/Operation.purs @@ -30,8 +30,7 @@ module Registry.Operation , publishCodec , transferCodec , unpublishCodec - ) - where + ) where import Prelude From dfd7e78bc04e9d1ad9a9213a71c61dc922309fc8 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Tue, 9 Dec 2025 12:27:40 +0200 Subject: [PATCH 05/19] Fix integration tests --- app-e2e/src/Test/E2E/Publish.purs | 7 +- app/src/App/API.purs | 2 +- app/src/App/Main.purs | 14 -- app/src/App/SQLite.js | 11 +- app/src/App/SQLite.purs | 197 ++++++++++-------- app/src/App/Server/JobExecutor.purs | 27 ++- app/src/App/Server/Router.purs | 18 +- ...20240914171030_create_job_queue_tables.sql | 1 - db/schema.sql | 1 - lib/src/Metadata.purs | 5 - nix/overlay.nix | 4 +- package-lock.json | 76 +++++-- scripts/src/Solver.purs | 1 - test-utils/src/Registry/Test/E2E/Client.purs | 8 +- 14 files changed, 210 insertions(+), 162 deletions(-) diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index f7bd1d63e..051d1931b 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -56,6 +56,7 @@ spec = do , ref: "v4.0.0" , compiler: Utils.unsafeVersion "0.15.9" , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" } -- Submit publish request @@ -79,6 +80,6 @@ spec = do Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages Assert.shouldSatisfy job.finishedAt isJust - Assert.shouldEqual job.jobType V1.PublishJob - Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") - Assert.shouldEqual job.ref "v4.0.0" +-- Assert.shouldEqual job.jobType JobType.PublishJob +-- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") +-- Assert.shouldEqual job.ref "v4.0.0" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 89322d52b..8972a8230 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -31,7 +31,7 @@ import Data.FoldableWithIndex (foldMapWithIndex) import Data.List.NonEmpty as NonEmptyList import Data.Map (SemigroupMap(..)) import Data.Map as Map -import Data.Newtype (over, unwrap) +import Data.Newtype (over) import Data.Number.Format as Number.Format import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index df94b6e17..e638cc684 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -4,7 +4,6 @@ import Registry.App.Prelude hiding ((/)) import Data.DateTime (diff) import Data.Time.Duration (Milliseconds(..), Seconds(..)) -import Debug (traceM) import Effect.Aff as Aff import Effect.Class.Console as Console import Fetch.Retry as Fetch.Retry @@ -15,10 +14,8 @@ import Registry.App.Server.Router as Router main :: Effect Unit main = do - traceM 1 createServerEnv # Aff.runAff_ case _ of Left error -> do - traceM 2 Console.log $ "Failed to start server: " <> Aff.message error Process.exit' 1 Right env -> do @@ -36,58 +33,47 @@ main = do fiveMinutes = Aff.Milliseconds (1000.0 * 60.0 * 5.0) loop n = do - traceM 4 Fetch.Retry.withRetryRequest healthchecksUrl {} >>= case _ of Succeeded { status } | status == 200 -> do - traceM 5 Aff.delay fiveMinutes loop n Cancelled | n >= 0 -> do - traceM 6 Console.warn $ "Healthchecks cancelled, will retry..." Aff.delay oneMinute loop (n - 1) Failed error | n >= 0 -> do - traceM 7 Console.warn $ "Healthchecks failed, will retry: " <> Fetch.Retry.printRetryRequestError error Aff.delay oneMinute loop (n - 1) Succeeded { status } | status /= 200, n >= 0 -> do - traceM 8 Console.error $ "Healthchecks returned non-200 status, will retry: " <> show status Aff.delay oneMinute loop (n - 1) Cancelled -> do - traceM 9 Console.error "Healthchecks cancelled and failure limit reached, will not retry." Failed error -> do - traceM 10 Console.error $ "Healthchecks failed and failure limit reached, will not retry: " <> Fetch.Retry.printRetryRequestError error Succeeded _ -> do - traceM 11 Console.error "Healthchecks returned non-200 status and failure limit reached, will not retry." jobExecutor :: ServerEnv -> Aff Unit jobExecutor env = do - traceM 12 loop initialRestartDelay where initialRestartDelay = Milliseconds 100.0 loop restartDelay = do - traceM 13 start <- nowUTC result <- JobExecutor.runJobExecutor env end <- nowUTC - traceM 14 Console.error case result of Left error -> "Job executor failed: " <> Aff.message error Right _ -> "Job executor exited for no reason." diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 97521d202..1e8042cca 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -44,7 +44,7 @@ const _insertJob = (db, table, columns, job) => { const insertInfo = db.prepare(` INSERT INTO ${JOB_INFO_TABLE} (jobId, createdAt, startedAt, finishedAt, success) - VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success + VALUES (@jobId, @createdAt, @startedAt, @finishedAt, @success) `); const insertJob = db.prepare(` @@ -67,17 +67,17 @@ const _insertJob = (db, table, columns, job) => { }; export const insertPackageJobImpl = (db, job) => { - const columns = [ 'jobId', 'jobType', 'payload' ] + const columns = ['jobId', 'jobType', 'packageName', 'payload'] return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { - const columns = [ 'jobId', 'compilerVersion', 'payload' ] + const columns = ['jobId', 'packageName', 'packageVersion', 'compilerVersion', 'payload'] return _insertJob(db, MATRIX_JOBS_TABLE, columns, job); }; export const insertPackageSetJobImpl = (db, job) => { - const columns = [ 'jobId', 'payload' ] + const columns = ['jobId', 'payload'] return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; @@ -87,6 +87,7 @@ export const selectNextPackageJobImpl = (db) => { FROM ${PACKAGE_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); @@ -99,6 +100,7 @@ export const selectNextMatrixJobImpl = (db) => { FROM ${MATRIX_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); @@ -111,6 +113,7 @@ export const selectNextPackageSetJobImpl = (db) => { FROM ${PACKAGE_SET_JOBS_TABLE} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId WHERE info.finishedAt IS NULL + AND info.startedAt IS NULL ORDER BY info.createdAt DESC LIMIT 1 `); diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 208befb9a..dd4268451 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -83,6 +83,9 @@ type ConnectOptions = connect :: ConnectOptions -> Effect SQLite connect { database, logger } = Uncurried.runEffectFn2 connectImpl database (Uncurried.mkEffectFn1 logger) +-------------------------------------------------------------------------------- +-- job_info table + -- | Metadata about a particular package, package set, or matrix job. type JobInfo = { jobId :: JobId @@ -124,6 +127,30 @@ selectJobInfo db (JobId jobId) = do maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId pure $ traverse jobInfoFromJSRep maybeJobInfo +finishJob :: SQLite -> FinishJob -> Effect Unit +finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep + +type StartJob = + { jobId :: JobId + , startedAt :: DateTime + } + +type JSStartJob = + { jobId :: String + , startedAt :: String + } + +startJobToJSRep :: StartJob -> JSStartJob +startJobToJSRep { jobId, startedAt } = + { jobId: un JobId jobId + , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt + } + +foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit + +startJob :: SQLite -> StartJob -> Effect Unit +startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep + type FinishJob = { jobId :: JobId , success :: Boolean @@ -145,34 +172,52 @@ finishJobToJSRep { jobId, success, finishedAt } = foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -finishJob :: SQLite -> FinishJob -> Effect Unit -finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep +foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit -type StartJob = +-- TODO: we shouldn't delete them I think? just remove the startedAt so they +-- can be retried +deleteIncompleteJobs :: SQLite -> Effect Unit +deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl + +-------------------------------------------------------------------------------- +-- package_jobs table + +type PackageJobDetails = { jobId :: JobId - , startedAt :: DateTime + , packageName :: PackageName + , payload :: PackageOperation + , createdAt :: DateTime + , startedAt :: Maybe DateTime } -type JSStartJob = +type JSPackageJobDetails = { jobId :: String - , startedAt :: String - } - -startJobToJSRep :: StartJob -> JSStartJob -startJobToJSRep { jobId, startedAt } = - { jobId: un JobId jobId - , startedAt: DateTime.format Internal.Format.iso8601DateTime startedAt + , packageName :: String + , payload :: String + , createdAt :: String + , startedAt :: Nullable String } -foreign import startJobImpl :: EffectFn2 SQLite JSStartJob Unit - -startJob :: SQLite -> StartJob -> Effect Unit -startJob db = Uncurried.runEffectFn2 startJobImpl db <<< startJobToJSRep +packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails +packageJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt } = do + name <- PackageName.parse packageName + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + pure + { jobId: JobId jobId + , packageName: name + , payload: parsed + , createdAt: created + , startedAt: started + } -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) +selectNextPackageJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db + pure $ traverse packageJobDetailsFromJSRep maybeJobDetails type InsertPackageJob = { jobId :: JobId @@ -182,24 +227,35 @@ type InsertPackageJob = type JSInsertPackageJob = { jobId :: String , jobType :: String + , packageName :: String , payload :: String + , createdAt :: String } -insertPackageJobToJSRep :: InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep { jobId, payload } = +insertPackageJobToJSRep :: DateTime -> InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep now { jobId, payload } = { jobId: un JobId jobId - , jobType: JobType.print case payload of - Operation.Publish _ -> JobType.PublishJob - Operation.Authenticated { payload: Operation.Unpublish _ } -> JobType.UnpublishJob - Operation.Authenticated { payload: Operation.Transfer _ } -> JobType.TransferJob + , jobType: JobType.print jobType + , packageName: PackageName.print name , payload: stringifyJson Operation.packageOperationCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now } + where + { jobType, name } = case payload of + Operation.Publish { name } -> { jobType: JobType.PublishJob, name } + Operation.Authenticated { payload: Operation.Unpublish { name } } -> { jobType: JobType.UnpublishJob, name } + Operation.Authenticated { payload: Operation.Transfer { name } } -> { jobType: JobType.TransferJob, name } foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit -insertPackageJob db = Uncurried.runEffectFn2 insertPackageJobImpl db <<< insertPackageJobToJSRep +insertPackageJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep now job + +-------------------------------------------------------------------------------- +-- matrix_jobs table type InsertMatrixJob = { jobId :: JobId @@ -225,68 +281,6 @@ foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep -type InsertPackageSetJob = - { jobId :: JobId - , payload :: PackageSetOperation - } - -type JSInsertPackageSetJob = - { jobId :: String - , payload :: String - } - -insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep { jobId, payload } = - { jobId: un JobId jobId - , payload: stringifyJson Operation.packageSetOperationCodec payload - } - -foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit - -insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit -insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep - -type PackageJobDetails = - { jobId :: JobId - , packageName :: PackageName - , packageVersion :: Version - , payload :: PackageOperation - , createdAt :: DateTime - , startedAt :: Maybe DateTime - } - -type JSPackageJobDetails = - { jobId :: String - , packageName :: String - , packageVersion :: String - , payload :: String - , createdAt :: String - , startedAt :: Nullable String - } - -packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt } = do - name <- PackageName.parse packageName - version <- Version.parse packageVersion - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload - pure - { jobId: JobId jobId - , packageName: name - , packageVersion: version - , payload: parsed - , createdAt: created - , startedAt: started - } - -foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) - -selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) -selectNextPackageJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db - pure $ traverse packageJobDetailsFromJSRep maybeJobDetails - type MatrixJobDetails = { jobId :: JobId , packageName :: PackageName @@ -332,6 +326,9 @@ selectNextMatrixJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails +-------------------------------------------------------------------------------- +-- package_set_jobs table + type PackageSetJobDetails = { jobId :: JobId , payload :: PackageSetOperation @@ -365,6 +362,30 @@ selectNextPackageSetJob db = do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails +type InsertPackageSetJob = + { jobId :: JobId + , payload :: PackageSetOperation + } + +type JSInsertPackageSetJob = + { jobId :: String + , payload :: String + } + +insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep { jobId, payload } = + { jobId: un JobId jobId + , payload: stringifyJson Operation.packageSetOperationCodec payload + } + +foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit + +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep + +-------------------------------------------------------------------------------- +-- logs table + type JSLogLine = { level :: Int , message :: String diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 125a9a7a3..fa2f70f24 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,7 +1,8 @@ -module Registry.App.Server.JobExecutor where +module Registry.App.Server.JobExecutor (runJobExecutor) where import Registry.App.Prelude hiding ((/)) +import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.DateTime (DateTime) import Effect.Aff (Milliseconds(..)) @@ -21,31 +22,21 @@ data JobDetails | MatrixJob MatrixJobDetails | PackageSetJob PackageSetJobDetails -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) -findNextAvailableJob = - Db.selectNextPackageJob >>= case _ of - Just job -> pure $ Just $ PackageJob job - Nothing -> Db.selectNextMatrixJob >>= case _ of - Just job -> pure $ Just $ MatrixJob job - Nothing -> Db.selectNextPackageSetJob >>= case _ of - Just job -> pure $ Just $ PackageSetJob job - Nothing -> pure Nothing - runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do + Log.info "Starting Job Executor" Db.deleteIncompleteJobs loop where loop = do - mJob <- findNextAvailableJob - case mJob of + maybeJob <- findNextAvailableJob + case maybeJob of Nothing -> do - liftAff $ Aff.delay (Milliseconds 100.0) + liftAff $ Aff.delay (Milliseconds 1000.0) loop Just job -> do now <- nowUTC - let jobId = case job of PackageJob details -> details.jobId @@ -78,6 +69,12 @@ runJobExecutor env = runEffects env do Db.finishJob { jobId, finishedAt: now, success } loop +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +findNextAvailableJob = runMaybeT + $ (PackageJob <$> MaybeT Db.selectNextPackageJob) + <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) + executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of PackageJob { payload: Operation.Publish p } -> diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 840dab5a0..5ebfd4823 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,10 +4,8 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ -import Data.String as String import Data.UUID.Random as UUID import Effect.Aff as Aff -import Effect.Class.Console as Console import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status @@ -47,14 +45,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - forkPackageJob $ Operation.Publish publish + insertPackageJob $ Operation.Publish publish Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body case auth.payload of Operation.Unpublish payload -> do lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload - forkPackageJob $ Operation.Authenticated auth + insertPackageJob $ Operation.Authenticated auth _ -> HTTPurple.badRequest "Expected unpublish operation." @@ -63,12 +61,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Transfer payload -> do lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload - forkPackageJob $ Operation.Authenticated auth + insertPackageJob $ Operation.Authenticated auth _ -> HTTPurple.badRequest "Expected transfer operation." + -- TODO return jobs Jobs, Get -> do - jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: bottom, finishedAt: Nothing, success: true, logs: [] } ] + now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: now, finishedAt: Nothing, success: true, logs: [] } ] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel @@ -77,7 +77,7 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound - Right Nothing -> + Right Nothing -> do HTTPurple.notFound Right (Just job) -> jsonOk V1.jobCodec @@ -97,8 +97,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound where - forkPackageJob :: PackageOperation -> ContT Response (Run _) Response - forkPackageJob operation = do + insertPackageJob :: PackageOperation -> ContT Response (Run _) Response + insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) jobId <- newJobId lift $ Db.insertPackageJob { jobId, payload: operation } diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index 2b01deb0b..f4f1e68f3 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -14,7 +14,6 @@ CREATE TABLE package_jobs ( jobId TEXT PRIMARY KEY NOT NULL, jobType TEXT NOT NULL, packageName TEXT NOT NULL, - packageVersion TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); diff --git a/db/schema.sql b/db/schema.sql index 2ad866068..1baf6403f 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -10,7 +10,6 @@ CREATE TABLE package_jobs ( jobId TEXT PRIMARY KEY NOT NULL, jobType TEXT NOT NULL, packageName TEXT NOT NULL, - packageVersion TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); diff --git a/lib/src/Metadata.purs b/lib/src/Metadata.purs index ddc39b48b..c54bed31e 100644 --- a/lib/src/Metadata.purs +++ b/lib/src/Metadata.purs @@ -20,20 +20,15 @@ module Registry.Metadata import Prelude -import Control.Alt ((<|>)) -import Control.Monad.Except (Except, except) import Data.Array.NonEmpty (NonEmptyArray) -import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Common as CJ.Common import Data.Codec.JSON.Record as CJ.Record import Data.DateTime (DateTime) -import Data.Either (Either(..)) import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor -import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Location (Location) import Registry.Location as Location diff --git a/nix/overlay.nix b/nix/overlay.nix index 2ac1705fb..6c8d5b848 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -183,7 +183,7 @@ in ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-vm6k4DUDWUgPcPeym3YhA1hIg1LbHCDRBSH+7Zs52Uw="; + npmDepsHash = "sha256-Ju7R6Sa+NIHD8fkxLxicqToPLxLD4RM4wvl6bktE/7Y="; installPhase = '' mkdir -p $out @@ -236,7 +236,7 @@ in registry-server = prev.callPackage (buildRegistryPackage { name = "registry-server"; - module = "Registry.App.Server"; + module = "Registry.App.Main"; description = "PureScript Registry API server"; src = ../app; spagoLock = app; diff --git a/package-lock.json b/package-lock.json index f4b4a86cc..e22731749 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1615,6 +1615,12 @@ "safer-buffer": "~2.1.0" } }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1781,6 +1787,12 @@ "node": ">=10" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, "node_modules/cpu-features": { "version": "0.0.9", "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.9.tgz", @@ -2050,6 +2062,12 @@ "node": ">=8" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/fuse.js": { "version": "6.6.2", "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-6.6.2.tgz", @@ -2075,6 +2093,27 @@ "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==" }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -2119,6 +2158,17 @@ } ] }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2352,6 +2402,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -2654,20 +2716,6 @@ "node": ">=0.10.0" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/run-applescript": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index 8fa9a7070..aa2820e16 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -17,7 +17,6 @@ import Data.DateTime.Instant as Instant import Data.Foldable (foldMap) import Data.Formatter.DateTime as Formatter.DateTime import Data.Map as Map -import Data.Newtype (unwrap) import Data.String as String import Data.Time.Duration (Milliseconds(..)) import Effect.Class.Console as Aff diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index 960484609..ff34107df 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -74,14 +74,14 @@ configFromEnv = do -- | Errors that can occur during client operations data ClientError = HttpError { status :: Int, body :: String } - | ParseError String + | ParseError { msg :: String, raw :: String } | Timeout String | NetworkError String printClientError :: ClientError -> String printClientError = case _ of HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body - ParseError msg -> "Parse Error: " <> msg + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw Timeout msg -> "Timeout: " <> msg NetworkError msg -> "Network Error: " <> msg @@ -102,7 +102,7 @@ get codec config path = runExceptT do body <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse codec body of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: body } Right a -> pure a else throwError $ HttpError { status: response.status, body } @@ -119,7 +119,7 @@ post reqCodec resCodec config path reqBody = runExceptT do responseBody <- lift response.text if response.status >= 200 && response.status < 300 then case parseResponse resCodec responseBody of - Left err -> throwError $ ParseError err + Left err -> throwError $ ParseError { msg: err, raw: responseBody } Right a -> pure a else throwError $ HttpError { status: response.status, body: responseBody } From cdbac72b0a2b89732c8b40aff2ffb1567c835d08 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 10:48:21 +0200 Subject: [PATCH 06/19] WIP matrix builds --- app/src/App/API.purs | 139 +++++++----------- app/src/App/Effect/Db.purs | 10 +- app/src/App/Main.purs | 6 + app/src/App/SQLite.js | 3 +- app/src/App/SQLite.purs | 30 +--- app/src/App/Server/Env.purs | 2 +- app/src/App/Server/JobExecutor.purs | 49 ++++++- app/src/App/Server/MatrixBuilder.purs | 202 ++++++++++++++++++++++++++ app/src/App/Server/Router.purs | 7 +- lib/src/ManifestIndex.purs | 17 ++- lib/src/Solver.purs | 5 +- scripts/src/LegacyImporter.purs | 5 +- 12 files changed, 334 insertions(+), 141 deletions(-) create mode 100644 app/src/App/Server/MatrixBuilder.purs diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 8972a8230..1e69a129e 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -9,11 +9,10 @@ module Registry.App.API , copyPackageSourceFiles , findAllCompilers , formatPursuitResolutions - , installBuildPlan , packageSetUpdate + , packageSetUpdate2 , packagingTeam , publish - , readCompilerIndex , removeIgnoredTarballFiles ) where @@ -83,6 +82,8 @@ import Registry.App.Legacy.Manifest (LEGACY_CACHE) import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.SQLite (PackageSetJobDetails) +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Constants (ignoredDirectories, ignoredFiles, ignoredGlobs, includedGlobs, includedInsensitiveGlobs) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.FastGlob as FastGlob @@ -116,6 +117,11 @@ import Safe.Coerce as Safe.Coerce type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit +packageSetUpdate2 {} = do + -- TODO: have github call into this + pure unit + -- | Process a package set update. Package set updates are only processed via -- | GitHub and not the HTTP API, so they require access to the GitHub env. packageSetUpdate :: forall r. PackageSetUpdateData -> Run (PackageSetUpdateEffects + r) Unit @@ -338,7 +344,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) Unit +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe (Map PackageName Range)) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -556,16 +562,17 @@ publish maybeLegacyIndex payload = do , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish , ". Please try with a later compiler." ] + pure Nothing Nothing -> do Comment.comment $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex verifiedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan verifiedResolutions installedResolutions + MatrixBuilder.installBuildPlan verifiedResolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ "src/**/*.purs", Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -573,7 +580,7 @@ publish maybeLegacyIndex payload = do } case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Log.error $ "Compilation failed, cannot upload to pursuit: " <> error Except.throw "Cannot publish to Pursuit because this package failed to compile." Right _ -> do @@ -590,12 +597,13 @@ publish maybeLegacyIndex payload = do Right _ -> do FS.Extra.remove tmp Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + pure Nothing -- In this case the package version has not been published, so we proceed -- with ordinary publishing. Nothing -> do Log.info "Verifying the package build plan..." - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions Comment.comment "Verifying unused and/or missing dependencies..." @@ -604,7 +612,7 @@ publish maybeLegacyIndex payload = do -- manifest as needed, but we defer compilation until after this check -- in case the package manifest and resolutions are adjusted. let installedResolutions = Path.concat [ tmp, ".registry" ] - installBuildPlan validatedResolutions installedResolutions + MatrixBuilder.installBuildPlan validatedResolutions installedResolutions let srcGlobs = Path.concat [ downloadedPackage, "src", "**", "*.purs" ] let depGlobs = Path.concat [ installedResolutions, "*", "src", "**", "*.purs" ] @@ -699,7 +707,7 @@ publish maybeLegacyIndex payload = do -- We clear the installation directory so that no old installed resolutions -- stick around. Run.liftAff $ FS.Extra.remove installedResolutions - installBuildPlan resolutions installedResolutions + MatrixBuilder.installBuildPlan resolutions installedResolutions compilationResult <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ packageSource, "src/**/*.purs" ], Path.concat [ installedResolutions, "*/src/**/*.purs" ] ] } , version: Just payload.compiler @@ -708,7 +716,7 @@ publish maybeLegacyIndex payload = do case compilationResult of Left compileFailure -> do - let error = printCompilerFailure payload.compiler compileFailure + let error = MatrixBuilder.printCompilerFailure payload.compiler compileFailure Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit @@ -770,28 +778,35 @@ publish maybeLegacyIndex payload = do , "). If you want to publish documentation, please try again with a later compiler." ] - Comment.comment "Determining all valid compiler versions for this package..." - allCompilers <- PursVersions.pursVersions - { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of - Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } - Just try -> do - found <- findAllCompilers - { source: packageSource - , manifest - , compilers: try - } - pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + -- Note: this only runs for the Legacy Importer. In daily circumstances (i.e. + -- when running the server) this will be taken care of by followup jobs invoking + -- the MatrixBuilder for each compiler version + for_ maybeLegacyIndex \_idx -> do + Comment.comment "Determining all valid compiler versions for this package..." + allCompilers <- PursVersions.pursVersions + { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of + Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } + Just try -> do + found <- findAllCompilers + { source: packageSource + , manifest + , compilers: try + } + pure { failed: found.failed, succeeded: NonEmptySet.cons payload.compiler found.succeeded } + + unless (Map.isEmpty invalidCompilers) do + Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - unless (Map.isEmpty invalidCompilers) do - Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) + Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) - let compilersMetadata = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } - Registry.writeMetadata (un Manifest manifest).name (Metadata compilersMetadata) - Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata compilersMetadata) + Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Comment.comment "Wrote completed metadata to the registry!" - Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp + pure $ Just (un Manifest manifest).dependencies -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the @@ -876,32 +891,30 @@ findAllCompilers . { source :: FilePath, manifest :: Manifest, compilers :: NonEmptyArray Version } -> Run (REGISTRY + STORAGE + COMPILER_CACHE + LOG + AFF + EFFECT + EXCEPT String + r) FindAllCompilersResult findAllCompilers { source, manifest, compilers } = do - compilerIndex <- readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex checkedCompilers <- for compilers \target -> do Log.debug $ "Trying compiler " <> Version.print target case Solver.solveWithCompiler (Range.exact target) compilerIndex (un Manifest manifest).dependencies of Left solverErrors -> do Log.info $ "Failed to solve with compiler " <> Version.print target pure $ Left $ Tuple target (Left solverErrors) - Right (Tuple mbCompiler resolutions) -> do + Right (Tuple compiler resolutions) -> do Log.debug $ "Solved with compiler " <> Version.print target <> " and got resolutions:\n" <> printJson (Internal.Codec.packageMap Version.codec) resolutions - case mbCompiler of - Nothing -> Except.throw "Produced a compiler-derived build plan with no compiler!" - Just selected | selected /= target -> Except.throw $ Array.fold + when (compiler /= target) do + Except.throw $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" - , Version.print selected + , Version.print compiler , ") that differs from the target compiler (" , Version.print target , ")." ] - Just _ -> pure unit Cache.get _compilerCache (Compilation manifest resolutions target) >>= case _ of Nothing -> do Log.debug $ "No cached compilation, compiling with compiler " <> Version.print target workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] FS.Extra.ensureDirectory installed - installBuildPlan resolutions installed + MatrixBuilder.installBuildPlan resolutions installed result <- Run.liftAff $ Purs.callCompiler { command: Purs.Compile { globs: [ Path.concat [ source, "src/**/*.purs" ], Path.concat [ installed, "*/src/**/*.purs" ] ] } , version: Just target @@ -910,7 +923,7 @@ findAllCompilers { source, manifest, compilers } = do FS.Extra.remove workdir case result of Left err -> do - Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> printCompilerFailure target err + Log.info $ "Compilation failed with compiler " <> Version.print target <> ":\n" <> MatrixBuilder.printCompilerFailure target err Right _ -> do Log.debug $ "Compilation succeeded with compiler " <> Version.print target Cache.put _compilerCache (Compilation manifest resolutions target) { target, result: map (const unit) result } @@ -921,49 +934,6 @@ findAllCompilers { source, manifest, compilers } = do let results = partitionEithers $ NonEmptyArray.toArray checkedCompilers pure { failed: Map.fromFoldable results.fail, succeeded: Set.fromFoldable results.success } -printCompilerFailure :: Version -> CompilerFailure -> String -printCompilerFailure compiler = case _ of - MissingCompiler -> Array.fold - [ "Compilation failed because the build plan compiler version " - , Version.print compiler - , " is not supported. Please try again with a different compiler." - ] - CompilationError errs -> String.joinWith "\n" - [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" - , "```" - , Purs.printCompilerErrors errs - , "```" - ] - UnknownError err -> String.joinWith "\n" - [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" - , "```" - , err - , "```" - ] - --- | Install all dependencies indicated by the build plan to the specified --- | directory. Packages will be installed at 'dir/package-name-x.y.z'. -installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit -installBuildPlan resolutions dependenciesDir = do - Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir - -- We fetch every dependency at its resolved version, unpack the tarball, and - -- store the resulting source code in a specified directory for dependencies. - forWithIndex_ resolutions \name version -> do - let - -- This filename uses the format the directory name will have once - -- unpacked, ie. package-name-major.minor.patch - filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" - filepath = Path.concat [ dependenciesDir, filename ] - Storage.download name version filepath - Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of - Left error -> do - Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error - Except.throw "Failed to unpack dependency tarball, cannot continue." - Right _ -> - Log.debug $ "Unpacked " <> filename - Run.liftAff $ FS.Aff.unlink filepath - Log.debug $ "Installed " <> formatPackageVersion name version - -- | Parse the name and version from a path to a module installed in the standard -- | form: '-...' parseModulePath :: FilePath -> Either String { name :: PackageName, version :: Version } @@ -1034,7 +1004,7 @@ publishToPursuit { source, compiler, resolutions, installedResolutions } = Excep publishJson <- case compilerOutput of Left error -> - Except.throw $ printCompilerFailure compiler error + Except.throw $ MatrixBuilder.printCompilerFailure compiler error Right publishResult -> do -- The output contains plenty of diagnostic lines, ie. "Compiling ..." -- but we only want the final JSON payload. @@ -1181,13 +1151,6 @@ getPacchettiBotti = do packagingTeam :: Team packagingTeam = { org: "purescript", team: "packaging" } -readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex -readCompilerIndex = do - metadata <- Registry.readAllMetadata - manifests <- Registry.readAllManifests - allCompilers <- PursVersions.pursVersions - pure $ Solver.buildCompilerIndex allCompilers manifests metadata - type AdjustManifest = { source :: FilePath , compiler :: Version diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 142149bc0..e30f76f1a 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -36,7 +36,7 @@ data Db a | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) | InsertLogLine LogLine a | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) - | DeleteIncompleteJobs a + | ResetIncompleteJobs a derive instance Functor Db @@ -91,8 +91,8 @@ selectNextPackageSetJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Package selectNextPackageSetJob = Run.lift _db (SelectNextPackageSetJob identity) >>= Except.rethrow -- | Delete all incomplete jobs from the database. -deleteIncompleteJobs :: forall r. Run (DB + r) Unit -deleteIncompleteJobs = Run.lift _db (DeleteIncompleteJobs unit) +resetIncompleteJobs :: forall r. Run (DB + r) Unit +resetIncompleteJobs = Run.lift _db (ResetIncompleteJobs unit) interpret :: forall r a. (Db ~> Run r) -> Run (DB + r) a -> Run r a interpret handler = Run.interpret (Run.on _db handler Run.send) @@ -148,6 +148,6 @@ handleSQLite env = case _ of Log.warn $ "Some logs are not readable: " <> String.joinWith "\n" fail pure $ reply success - DeleteIncompleteJobs next -> do - Run.liftEffect $ SQLite.deleteIncompleteJobs env.db + ResetIncompleteJobs next -> do + Run.liftEffect $ SQLite.resetIncompleteJobs env.db pure next diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index e638cc684..90bef72d0 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -22,6 +22,12 @@ main = do case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl + -- TODO: here before starting the executor we should check if we need to run + -- a whole-registry-compiler update. + -- To do that, we ask PursVersions what the compilers are, then we ask in the + -- metadata what the compilers for the latest prelude are, and if the latest + -- compiler is missing we enqueue a "compile everything", so that the executor + -- can pick it up first thing Aff.launchAff_ $ jobExecutor env Router.runRouter env where diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 1e8042cca..50bc82905 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -138,7 +138,8 @@ export const finishJobImpl = (db, args) => { return stmt.run(args); } -export const deleteIncompleteJobsImpl = (db) => { +// TODO this needs to be an update, no deletes +export const resetIncompleteJobsImpl = (db) => { const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); return stmt.run(); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index dd4268451..44a7f27d7 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -3,27 +3,6 @@ -- | the bindings here are still quite low-level and simply exist to provide a -- | nicer interface with PureScript types for higher-level modules to use. --- TOMORROW: --- --- * Add the job executor to server startup --- * Move the various job details to the API.V1 module since it'll be returned by the UI --- * Update the router to just create a job when received, and on lookup to return relevant details from the db --- * Update the router to have an endpoint for creating a package set job and compiler matrix job using the --- same authentication requirements as for GitHub today. --- * Move the compiler matrix out of publish into its own functionality so it can be called. We want to --- be able to spawn a matrix job at any time for a compiler/package version pair, but need a helper to --- do the whole toposort thing. --- * Update job execution to actually call the relevant publish/unpublish/transfer/package set API fn --- --- LATER --- * Update tests that refer to the DB effect --- * Adjust the integration test(s) to verify we're getting enforced concurrency control --- * Update the GitHub issue module so it only submits a request to the registry and returns --- a job id, rather than actually running the fns directly. Poll for a result still and --- comment when the job completes. --- --- FOLLOWUP --- * Punt on the squash commit until later. module Registry.App.SQLite ( SQLite , ConnectOptions @@ -40,7 +19,7 @@ module Registry.App.SQLite , finishJob , StartJob , startJob - , deleteIncompleteJobs + , resetIncompleteJobs , insertLogLine , selectLogsByJob , PackageJobDetails @@ -172,12 +151,12 @@ finishJobToJSRep { jobId, success, finishedAt } = foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit -foreign import deleteIncompleteJobsImpl :: EffectFn1 SQLite Unit +foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit -- TODO: we shouldn't delete them I think? just remove the startedAt so they -- can be retried -deleteIncompleteJobs :: SQLite -> Effect Unit -deleteIncompleteJobs = Uncurried.runEffectFn1 deleteIncompleteJobsImpl +resetIncompleteJobs :: SQLite -> Effect Unit +resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl -------------------------------------------------------------------------------- -- package_jobs table @@ -260,6 +239,7 @@ insertPackageJob db job = do type InsertMatrixJob = { jobId :: JobId , compilerVersion :: Version + -- TODO this is missing a buncha stuff , payload :: Map PackageName Version } diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 1f6fdc489..07baa935c 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -105,7 +105,7 @@ createServerEnv = do -- because they are stale runs from previous startups of the server. -- We can just remove the jobs, and all the logs belonging to them will be -- removed automatically by the foreign key constraint. - liftEffect $ SQLite.deleteIncompleteJobs db + liftEffect $ SQLite.resetIncompleteJobs db pure { debouncer diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index fa2f70f24..5c9e1f883 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,18 +1,25 @@ -module Registry.App.Server.JobExecutor (runJobExecutor) where +module Registry.App.Server.JobExecutor + ( runJobExecutor + , newJobId + ) where import Registry.App.Prelude hiding ((/)) import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.DateTime (DateTime) +import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff +import Registry.API.V1 (JobId(..)) import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) +import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation import Run (Run) import Run.Except (EXCEPT) @@ -25,7 +32,7 @@ data JobDetails runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" - Db.deleteIncompleteJobs + Db.resetIncompleteJobs loop where loop = do @@ -69,20 +76,46 @@ runJobExecutor env = runEffects env do Db.finishJob { jobId, finishedAt: now, success } loop +-- TODO: here we only get a single package for each operation, but really we should +-- have all of them and toposort them. There is something in ManifestIndex but not +-- sure that's what we need findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) findNextAvailableJob = runMaybeT $ (PackageJob <$> MaybeT Db.selectNextPackageJob) <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish p } -> - API.publish Nothing p + PackageJob { payload: Operation.Publish payload@{ compiler, name, version } } -> do + maybeDependencies <- API.publish Nothing payload + -- The above operation will throw if not successful, and return a map of + -- dependencies of the package only if it has not been published before. + for_ maybeDependencies \dependencies -> do + -- At this point this package has been verified with one compiler only. + -- So we need to enqueue compilation jobs for (1) same package, all the other + -- compilers, and (2) same compiler, all packages that depend on this one + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler, name, version, dependencies, compilerIndex } + samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (samePackageAllCompilers <> sameCompilerAllDependants) \matrixJob -> do + Log.info $ "Enqueuing matrix job" -- TODO print details + jobId <- newJobId + Db.insertMatrixJob { jobId, payload: matrixJob } PackageJob { payload: Operation.Authenticated auth } -> API.authenticated auth - - MatrixJob _details -> - pure unit -- UNIMPLEMENTED + MatrixJob details -> + -- TODO this job should return the success result, because if successful we need + -- to enqueue more matrix jobs: all its dependents for this same compiler version + MatrixBuilder.runMatrixJob details PackageSetJob _details -> - pure unit -- UNIMPLEMENTED + -- TODO: need to pass in the package_sets effect + -- API.packageSetUpdate2 details + pure unit diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs new file mode 100644 index 000000000..6a194ae3d --- /dev/null +++ b/app/src/App/Server/MatrixBuilder.purs @@ -0,0 +1,202 @@ +module Registry.App.Server.MatrixBuilder + ( installBuildPlan + , printCompilerFailure + , readCompilerIndex + , runMatrixJob + , solveForAllCompilers + , solveDependantsForCompiler + ) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Array.NonEmpty as NonEmptyArray +import Data.Map as Map +import Data.Set.NonEmpty as NonEmptySet +import Data.String as String +import Effect.Aff as Aff +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Registry.App.CLI.Purs (CompilerFailure(..)) +import Registry.App.CLI.Purs as Purs +import Registry.App.CLI.PursVersions as PursVersions +import Registry.App.CLI.Tar as Tar +import Registry.App.Effect.Log (LOG) +import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry +import Registry.App.Effect.Storage (STORAGE) +import Registry.App.Effect.Storage as Storage +import Registry.App.SQLite (MatrixJobDetails) +import Registry.Foreign.FSExtra as FS.Extra +import Registry.Foreign.Tmp as Tmp +import Registry.ManifestIndex as ManifestIndex +import Registry.Metadata as Metadata +import Registry.PackageName as PackageName +import Registry.Range as Range +import Registry.Solver as Solver +import Registry.Version as Version +import Run (AFF, EFFECT, Run) +import Run as Run +import Run.Except (EXCEPT) +import Run.Except as Except + +runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) Unit +runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do + workdir <- Tmp.mkTmpDir + let installed = Path.concat [ workdir, ".registry" ] + FS.Extra.ensureDirectory installed + installBuildPlan (Map.insert packageName packageVersion buildPlan) installed + result <- Run.liftAff $ Purs.callCompiler + { command: Purs.Compile { globs: [ Path.concat [ installed, "*/src/**/*.purs" ] ] } + , version: Just compilerVersion + , cwd: Just workdir + } + FS.Extra.remove workdir + case result of + Left err -> do + Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion + <> ":\n" + <> printCompilerFailure compilerVersion err + Right _ -> do + Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion + + Registry.readMetadata packageName >>= case _ of + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName + Except.throw $ "Cannot run Matrix Job for " <> PackageName.print packageName + Just (Metadata metadata) -> do + let + metadataWithCompilers = metadata + { published = Map.update + ( \publishedMetadata@{ compilers } -> + Just $ publishedMetadata { compilers = NonEmptySet.toUnfoldable1 $ NonEmptySet.fromFoldable1 $ NonEmptyArray.cons compilerVersion compilers } + ) + packageVersion + metadata.published + } + Registry.writeMetadata packageName (Metadata metadataWithCompilers) + Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) + + Log.info "Wrote completed metadata to the registry!" + +-- TODO feels like we should be doing this at startup and use the cache instead +-- of reading files all over again +readCompilerIndex :: forall r. Run (REGISTRY + AFF + EXCEPT String + r) Solver.CompilerIndex +readCompilerIndex = do + metadata <- Registry.readAllMetadata + manifests <- Registry.readAllManifests + allCompilers <- PursVersions.pursVersions + pure $ Solver.buildCompilerIndex allCompilers manifests metadata + +-- | Install all dependencies indicated by the build plan to the specified +-- | directory. Packages will be installed at 'dir/package-name-x.y.z'. +installBuildPlan :: forall r. Map PackageName Version -> FilePath -> Run (STORAGE + LOG + AFF + EXCEPT String + r) Unit +installBuildPlan resolutions dependenciesDir = do + Run.liftAff $ FS.Extra.ensureDirectory dependenciesDir + -- We fetch every dependency at its resolved version, unpack the tarball, and + -- store the resulting source code in a specified directory for dependencies. + forWithIndex_ resolutions \name version -> do + let + -- This filename uses the format the directory name will have once + -- unpacked, ie. package-name-major.minor.patch + filename = PackageName.print name <> "-" <> Version.print version <> ".tar.gz" + filepath = Path.concat [ dependenciesDir, filename ] + Storage.download name version filepath + Run.liftAff (Aff.attempt (Tar.extract { cwd: dependenciesDir, archive: filename })) >>= case _ of + Left error -> do + Log.error $ "Failed to unpack " <> filename <> ": " <> Aff.message error + Except.throw "Failed to unpack dependency tarball, cannot continue." + Right _ -> + Log.debug $ "Unpacked " <> filename + Run.liftAff $ FS.Aff.unlink filepath + Log.debug $ "Installed " <> formatPackageVersion name version + +printCompilerFailure :: Version -> CompilerFailure -> String +printCompilerFailure compiler = case _ of + MissingCompiler -> Array.fold + [ "Compilation failed because the build plan compiler version " + , Version.print compiler + , " is not supported. Please try again with a different compiler." + ] + CompilationError errs -> String.joinWith "\n" + [ "Compilation failed because the build plan does not compile with version " <> Version.print compiler <> " of the compiler:" + , "```" + , Purs.printCompilerErrors errs + , "```" + ] + UnknownError err -> String.joinWith "\n" + [ "Compilation failed with version " <> Version.print compiler <> " because of an error :" + , "```" + , err + , "```" + ] + +type MatrixSolverData = + { compilerIndex :: Solver.CompilerIndex + , compiler :: Version + , name :: PackageName + , version :: Version + , dependencies :: Map PackageName Range + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Map Version (Map PackageName Version)) +solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do + -- remove the compiler we tested with from the set of all of them + compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions + newJobs <- for compilers \target -> do + Log.debug $ "Trying compiler " <> Version.print target <> " for package " <> PackageName.print name + case Solver.solveWithCompiler (Range.exact target) compilerIndex dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print target + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right res@(Tuple solvedCompiler _resolutions) -> case solvedCompiler == target of + true -> pure $ Just res + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print target + , ")." + ] + pure Nothing + pure $ Map.fromFoldable $ Array.catMaybes newJobs + +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Map Version (Map PackageName Version)) +solveDependantsForCompiler { compilerIndex, name, version, compiler } = do + manifestIndex <- Registry.readAllManifests + let dependentManifests = ManifestIndex.dependants manifestIndex name version + newJobs <- for dependentManifests \(Manifest manifest) -> do + -- we first verify if we have already attempted this package with this compiler, + -- either in the form of having it in the metadata already, or as a failed compilation + -- (i.e. if we find compilers in the metadata for this version we only check this one + -- if it's newer, because all the previous ones have been tried) + shouldAttemptToCompile <- Registry.readMetadata manifest.name >>= case _ of + Nothing -> pure false + Just metadata -> pure $ case Map.lookup version (un Metadata metadata).published of + Nothing -> false + Just { compilers } -> any (_ > compiler) compilers + case shouldAttemptToCompile of + false -> pure Nothing + true -> do + -- if all good then run the solver + Log.debug $ "Trying compiler " <> Version.print compiler <> " for package " <> PackageName.print manifest.name + case Solver.solveWithCompiler (Range.exact compiler) compilerIndex manifest.dependencies of + Left _solverErrors -> do + Log.info $ "Failed to solve with compiler " <> Version.print compiler + -- Log.debug $ Solver.printSolverError solverErrors + pure Nothing + Right res@(Tuple solvedCompiler _resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just res + false -> do + Log.debug $ Array.fold + [ "Produced a compiler-derived build plan that selects a compiler (" + , Version.print solvedCompiler + , ") that differs from the target compiler (" + , Version.print compiler + , ")." + ] + pure Nothing + pure $ Map.fromFoldable $ Array.catMaybes newJobs diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 5ebfd4823..bdbb1eff5 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -15,6 +15,7 @@ import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) +import Registry.App.Server.JobExecutor as JobExecutor import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -100,11 +101,7 @@ router { route, method, body } = HTTPurple.usingCont case route, method of insertPackageJob :: PackageOperation -> ContT Response (Run _) Response insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- newJobId + jobId <- JobExecutor.newJobId lift $ Db.insertPackageJob { jobId, payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } - newJobId :: forall m. MonadEffect m => m JobId - newJobId = liftEffect do - id <- UUID.make - pure $ JobId $ UUID.toString id diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 4837b49ed..8602d4982 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -7,11 +7,13 @@ -- | https://github.com/purescript/registry-index module Registry.ManifestIndex ( ManifestIndex + , IncludeRanges(..) + , delete + , dependants , empty , fromSet , insert , insertIntoEntryFile - , delete , lookup , maximalIndex , packageEntryDirectory @@ -21,9 +23,8 @@ module Registry.ManifestIndex , readEntryFile , removeFromEntryFile , toMap - , toSortedArray , topologicalSort - , IncludeRanges(..) + , toSortedArray , writeEntryFile ) where @@ -45,7 +46,7 @@ import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..)) import Data.Maybe as Maybe -import Data.Newtype (un) +import Data.Newtype (un, unwrap) import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -66,6 +67,7 @@ import Node.Path as Path import Partial.Unsafe (unsafeCrashWith) import Registry.Manifest (Manifest(..)) import Registry.Manifest as Manifest +import Registry.Operation (packageName) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) @@ -199,6 +201,13 @@ topologicalSort includeRanges manifests = IgnoreRanges -> versions [ Tuple dependency included ] +dependants :: ManifestIndex -> PackageName -> Version -> Array Manifest +dependants idx packageName version = idx + # toSortedArray ConsiderRanges + # Array.filter \(Manifest { dependencies }) -> case Map.lookup packageName dependencies of + Nothing -> false + Just range -> Range.includes range version + -- | Calculate the directory containing this package in the registry index, -- | using the following format: -- | diff --git a/lib/src/Solver.purs b/lib/src/Solver.purs index 929894645..d3dcec10c 100644 --- a/lib/src/Solver.purs +++ b/lib/src/Solver.purs @@ -19,6 +19,7 @@ import Data.List.NonEmpty as NEL import Data.Map (Map, SemigroupMap(..)) import Data.Map as Map import Data.Maybe (Maybe(..), fromMaybe, maybe, maybe') +import Data.Maybe as Maybe import Data.Monoid.Disj (Disj(..)) import Data.Monoid.Endo (Endo(..)) import Data.Newtype (class Newtype, over, un, unwrap, wrap) @@ -81,11 +82,11 @@ buildCompilerIndex pursCompilers index metadata = CompilerIndex do -- | Solve the given dependencies using a dependency index that includes compiler -- | versions, such that the solution prunes results that would fall outside -- | a compiler range accepted by all dependencies. -solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple (Maybe Version) (Map PackageName Version)) +solveWithCompiler :: Range -> CompilerIndex -> Map PackageName Range -> Either SolverErrors (Tuple Version (Map PackageName Version)) solveWithCompiler pursRange (CompilerIndex index) required = do let purs = Either.fromRight' (\_ -> Partial.unsafeCrashWith "Invalid package name!") (PackageName.parse "purs") results <- solveFull { registry: initializeRegistry index, required: initializeRequired (Map.insert purs pursRange required) } - let pursVersion = Map.lookup purs results + let pursVersion = Maybe.fromMaybe' (\_ -> Partial.unsafeCrashWith "Produced a compiler-derived build plan with no compiler!") $ Map.lookup purs results pure $ Tuple pursVersion $ Map.delete purs results -- | Data from the registry index, listing dependencies for each version of diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 0fdc94a06..783ee353c 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -76,6 +76,7 @@ import Registry.App.Legacy.Manifest (LegacyManifestError(..), LegacyManifestVali import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.App.Legacy.Types (RawPackageName(..), RawVersion(..), rawPackageNameMapCodec, rawVersionMapCodec) import Registry.App.Manifest.SpagoYaml as SpagoYaml +import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Octokit (Address, Tag) import Registry.Foreign.Octokit as Octokit @@ -300,7 +301,7 @@ runLegacyImport logs = do Just ref -> pure ref Log.debug "Building dependency index with compiler versions..." - compilerIndex <- API.readCompilerIndex + compilerIndex <- MatrixBuilder.readCompilerIndex Log.debug $ "Solving dependencies for " <> formatted eitherResolutions <- do @@ -405,7 +406,7 @@ runLegacyImport logs = do Log.debug "Downloading dependencies..." let installDir = Path.concat [ tmp, ".registry" ] FS.Extra.ensureDirectory installDir - API.installBuildPlan resolutions installDir + MatrixBuilder.installBuildPlan resolutions installDir Log.debug $ "Installed to " <> installDir Log.debug "Trying compilers one-by-one..." selected <- findFirstCompiler From 253f85c704a93cb03c5ce57d19f6931e0ffa1272 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:16:21 +0100 Subject: [PATCH 07/19] add missing version to publish fixtures the publishCodec requires a version file but the test fixtures weren't updated to include it --- app/fixtures/addition_issue_created.json | 2 +- app/fixtures/update_issue_comment.json | 2 +- app/test/App/GitHubIssue.purs | 2 ++ lib/test/Registry/Operation.purs | 6 ++++-- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/app/fixtures/addition_issue_created.json b/app/fixtures/addition_issue_created.json index d0b205555..b0aa93e6c 100644 --- a/app/fixtures/addition_issue_created.json +++ b/app/fixtures/addition_issue_created.json @@ -5,7 +5,7 @@ "assignee": null, "assignees": [], "author_association": "CONTRIBUTOR", - "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", + "body": "{\"location\": {\"githubOwner\": \"purescript\",\"githubRepo\": \"purescript-prelude\"},\"ref\": \"v5.0.0\",\"name\": \"prelude\", \"version\": \"5.0.0\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }", "closed_at": null, "comments": 0, "comments_url": "https://api.github.com/repos/purescript/registry/issues/149/comments", diff --git a/app/fixtures/update_issue_comment.json b/app/fixtures/update_issue_comment.json index 5400a7c2e..c5673c4da 100644 --- a/app/fixtures/update_issue_comment.json +++ b/app/fixtures/update_issue_comment.json @@ -2,7 +2,7 @@ "action": "created", "comment": { "author_association": "MEMBER", - "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", + "body": "```json\n{\"name\":\"something\",\"ref\":\"v1.2.3\", \"version\": \"1.2.3\", \"compiler\": \"0.15.0\", \"resolutions\": { \"prelude\": \"1.0.0\" } }```", "created_at": "2021-03-09T02:03:56Z", "html_url": "https://github.com/purescript/registry/issues/43#issuecomment-793265839", "id": 793265839, diff --git a/app/test/App/GitHubIssue.purs b/app/test/App/GitHubIssue.purs index 8276bf708..d2c6baf18 100644 --- a/app/test/App/GitHubIssue.purs +++ b/app/test/App/GitHubIssue.purs @@ -106,6 +106,7 @@ preludeAdditionString = { "name": "prelude", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" @@ -124,6 +125,7 @@ packageNameTooLongString = { "name": "packagenamewayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyytoolong", "ref": "v5.0.0", + "version": "5.0.0", "location": { "githubOwner": "purescript", "githubRepo": "purescript-prelude" diff --git a/lib/test/Registry/Operation.purs b/lib/test/Registry/Operation.purs index 2ccb4075a..1400e70ee 100644 --- a/lib/test/Registry/Operation.purs +++ b/lib/test/Registry/Operation.purs @@ -54,7 +54,8 @@ minimalPublish = { "compiler": "0.15.6", "name": "my-package", - "ref": "v1.0.0" + "ref": "v1.0.0", + "version": "1.0.0" }""" fullPublish :: String @@ -67,7 +68,8 @@ fullPublish = "subdir": "core" }, "name": "my-package", - "ref": "c23snabhsrib39" + "ref": "c23snabhsrib39", + "version": "1.0.0" }""" unpublish :: String From 13eaf3a2e5225b50c3ad5236ba5e28ca18284768 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:21:58 +0100 Subject: [PATCH 08/19] Add missing packageName and packageVersion to InsertMatrixJob The JS insertMatrixJobImpl expects columns [jobId, packageName, packageVersion, compilerVersion, payload] but the PureScript types were missing packageName and packageVersion --- app/src/App/SQLite.purs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 44a7f27d7..1485697c5 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -238,6 +238,8 @@ insertPackageJob db job = do type InsertMatrixJob = { jobId :: JobId + , packageName :: PackageName + , packageVersion :: Version , compilerVersion :: Version -- TODO this is missing a buncha stuff , payload :: Map PackageName Version @@ -245,6 +247,8 @@ type InsertMatrixJob = type JSInsertMatrixJob = { jobId :: String + , packageName :: String + , packageVersion :: String , compilerVersion :: String , payload :: String } @@ -252,6 +256,8 @@ type JSInsertMatrixJob = insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob insertMatrixJobToJSRep { jobId, compilerVersion, payload } = { jobId: un JobId jobId + , packageName: PackageName.print packageName + , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion , payload: stringifyJson (Internal.Codec.packageMap Version.codec) payload } From 301d3488dcfb081d60bbe7d1ec4b5d04f432faa5 Mon Sep 17 00:00:00 2001 From: pacchettibotti Date: Fri, 12 Dec 2025 12:22:29 +0100 Subject: [PATCH 09/19] Fix finishedAt timestamp to capture time after job execution --- app/src/App/Server/JobExecutor.purs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 5c9e1f883..cd9152ea2 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -73,7 +73,8 @@ runJobExecutor env = runEffects env do Log.info $ "Job " <> unwrap jobId <> " succeeded." pure true - Db.finishJob { jobId, finishedAt: now, success } + finishedAt <- nowUTC + Db.finishJob { jobId, finishedAt, success } loop -- TODO: here we only get a single package for each operation, but really we should From 0a1399568c83cc7525a7b36570847b5252fa4c2e Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 17:03:57 +0200 Subject: [PATCH 10/19] Implement matrix jobs, and the recursive enqueuing of new ones --- app/src/App/GitHubIssue.purs | 2 +- app/src/App/SQLite.purs | 21 ++++++---- app/src/App/Server/JobExecutor.purs | 58 ++++++++++++++++++++++----- app/src/App/Server/MatrixBuilder.purs | 36 ++++++++++++----- app/src/App/Server/Router.purs | 3 +- app/test/App/API.purs | 6 +-- lib/src/ManifestIndex.purs | 3 +- scripts/src/PackageDeleter.purs | 2 +- 8 files changed, 93 insertions(+), 38 deletions(-) diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 56422ab64..3764398cf 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -58,7 +58,7 @@ main = launchAff_ $ do Right packageOperation -> case packageOperation of Publish payload -> - API.publish Nothing payload + void $ API.publish Nothing payload Authenticated payload -> do -- If we receive an authenticated operation via GitHub, then we -- re-sign it with pacchettibotti credentials if and only if the diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 1485697c5..153993a44 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -241,21 +241,22 @@ type InsertMatrixJob = , packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version - -- TODO this is missing a buncha stuff , payload :: Map PackageName Version } type JSInsertMatrixJob = { jobId :: String + , createdAt :: String , packageName :: String , packageVersion :: String , compilerVersion :: String , payload :: String } -insertMatrixJobToJSRep :: InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep { jobId, compilerVersion, payload } = +insertMatrixJobToJSRep :: DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion, payload } = { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now , packageName: PackageName.print packageName , packageVersion: Version.print packageVersion , compilerVersion: Version.print compilerVersion @@ -265,7 +266,9 @@ insertMatrixJobToJSRep { jobId, compilerVersion, payload } = foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit -insertMatrixJob db = Uncurried.runEffectFn2 insertMatrixJobImpl db <<< insertMatrixJobToJSRep +insertMatrixJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep now job type MatrixJobDetails = { jobId :: JobId @@ -355,19 +358,23 @@ type InsertPackageSetJob = type JSInsertPackageSetJob = { jobId :: String + , createdAt :: String , payload :: String } -insertPackageSetJobToJSRep :: InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep { jobId, payload } = +insertPackageSetJobToJSRep :: DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep now { jobId, payload } = { jobId: un JobId jobId + , createdAt: DateTime.format Internal.Format.iso8601DateTime now , payload: stringifyJson Operation.packageSetOperationCodec payload } foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit -insertPackageSetJob db = Uncurried.runEffectFn2 insertPackageSetJobImpl db <<< insertPackageSetJobToJSRep +insertPackageSetJob db job = do + now <- nowUTC + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep now job -------------------------------------------------------------------------------- -- logs table diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index cd9152ea2..30975d7f2 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -7,7 +7,9 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel +import Data.Array as Array import Data.DateTime (DateTime) +import Data.Set as Set import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff @@ -19,8 +21,9 @@ import Registry.App.Effect.Log as Log import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder -import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation +import Registry.PackageName as PackageName +import Registry.Version as Version import Run (Run) import Run.Except (EXCEPT) @@ -93,7 +96,7 @@ newJobId = do executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish payload@{ compiler, name, version } } -> do + PackageJob { payload: Operation.Publish payload@{ name, version } } -> do maybeDependencies <- API.publish Nothing payload -- The above operation will throw if not successful, and return a map of -- dependencies of the package only if it has not been published before. @@ -103,19 +106,52 @@ executeJob _ = case _ of -- compilers, and (2) same compiler, all packages that depend on this one -- TODO here we are building the compiler index, but we should really cache it compilerIndex <- MatrixBuilder.readCompilerIndex - let solverData = { compiler, name, version, dependencies, compilerIndex } + let solverData = { compiler: payload.compiler, name, version, dependencies, compilerIndex } samePackageAllCompilers <- MatrixBuilder.solveForAllCompilers solverData sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData - for (samePackageAllCompilers <> sameCompilerAllDependants) \matrixJob -> do - Log.info $ "Enqueuing matrix job" -- TODO print details - jobId <- newJobId - Db.insertMatrixJob { jobId, payload: matrixJob } + for (Array.fromFoldable $ Set.union samePackageAllCompilers sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + jobId <- newJobId + Db.insertMatrixJob + { jobId + , payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } PackageJob { payload: Operation.Authenticated auth } -> API.authenticated auth - MatrixJob details -> - -- TODO this job should return the success result, because if successful we need - -- to enqueue more matrix jobs: all its dependents for this same compiler version - MatrixBuilder.runMatrixJob details + MatrixJob details@{ packageName, packageVersion } -> do + maybeDependencies <- MatrixBuilder.runMatrixJob details + -- Unlike the publishing case, after verifying a compilation here we only need + -- to followup with trying to compile the packages that depend on this one + for_ maybeDependencies \dependencies -> do + -- TODO here we are building the compiler index, but we should really cache it + compilerIndex <- MatrixBuilder.readCompilerIndex + let solverData = { compiler: details.compilerVersion, name: packageName, version: packageVersion, dependencies, compilerIndex } + sameCompilerAllDependants <- MatrixBuilder.solveDependantsForCompiler solverData + for (Array.fromFoldable sameCompilerAllDependants) + \{ compiler: solvedCompiler, resolutions, name: solvedPackage, version: solvedVersion } -> do + Log.info $ "Enqueuing matrix job: compiler " + <> Version.print solvedCompiler + <> ", package " + <> PackageName.print solvedPackage + <> "@" + <> Version.print solvedVersion + jobId <- newJobId + Db.insertMatrixJob + { jobId + , payload: resolutions + , compilerVersion: solvedCompiler + , packageName: solvedPackage + , packageVersion: solvedVersion + } PackageSetJob _details -> -- TODO: need to pass in the package_sets effect -- API.packageSetUpdate2 details diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 6a194ae3d..13097c2a6 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -12,6 +12,7 @@ import Registry.App.Prelude import Data.Array as Array import Data.Array.NonEmpty as NonEmptyArray import Data.Map as Map +import Data.Set as Set import Data.Set.NonEmpty as NonEmptySet import Data.String as String import Effect.Aff as Aff @@ -41,7 +42,7 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except -runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) Unit +runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] @@ -58,13 +59,14 @@ runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan Log.info $ "Compilation failed with compiler " <> Version.print compilerVersion <> ":\n" <> printCompilerFailure compilerVersion err + pure Nothing Right _ -> do Log.info $ "Compilation succeeded with compiler " <> Version.print compilerVersion Registry.readMetadata packageName >>= case _ of Nothing -> do Log.error $ "No existing metadata for " <> PackageName.print packageName - Except.throw $ "Cannot run Matrix Job for " <> PackageName.print packageName + pure Nothing Just (Metadata metadata) -> do let metadataWithCompilers = metadata @@ -79,6 +81,11 @@ runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) Log.info "Wrote completed metadata to the registry!" + Registry.readManifest packageName packageVersion >>= case _ of + Just (Manifest manifest) -> pure (Just manifest.dependencies) + Nothing -> do + Log.error $ "No existing metadata for " <> PackageName.print packageName <> "@" <> Version.print packageVersion + pure Nothing -- TODO feels like we should be doing this at startup and use the cache instead -- of reading files all over again @@ -140,8 +147,15 @@ type MatrixSolverData = , dependencies :: Map PackageName Range } -solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Map Version (Map PackageName Version)) -solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do +type MatrixSolverResult = + { name :: PackageName + , version :: Version + , compiler :: Version + , resolutions :: Map PackageName Version + } + +solveForAllCompilers :: forall r. MatrixSolverData -> Run (AFF + EXCEPT String + LOG + r) (Set MatrixSolverResult) +solveForAllCompilers { compilerIndex, name, version, compiler, dependencies } = do -- remove the compiler we tested with from the set of all of them compilers <- (Array.filter (_ /= compiler) <<< NonEmptyArray.toArray) <$> PursVersions.pursVersions newJobs <- for compilers \target -> do @@ -151,8 +165,8 @@ solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do Log.info $ "Failed to solve with compiler " <> Version.print target -- Log.debug $ Solver.printSolverError solverErrors pure Nothing - Right res@(Tuple solvedCompiler _resolutions) -> case solvedCompiler == target of - true -> pure $ Just res + Right (Tuple solvedCompiler resolutions) -> case solvedCompiler == target of + true -> pure $ Just { compiler: target, resolutions, name, version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" @@ -162,9 +176,9 @@ solveForAllCompilers { compilerIndex, name, compiler, dependencies } = do , ")." ] pure Nothing - pure $ Map.fromFoldable $ Array.catMaybes newJobs + pure $ Set.fromFoldable $ Array.catMaybes newJobs -solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Map Version (Map PackageName Version)) +solveDependantsForCompiler :: forall r. MatrixSolverData -> Run (EXCEPT String + LOG + REGISTRY + r) (Set MatrixSolverResult) solveDependantsForCompiler { compilerIndex, name, version, compiler } = do manifestIndex <- Registry.readAllManifests let dependentManifests = ManifestIndex.dependants manifestIndex name version @@ -188,8 +202,8 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do Log.info $ "Failed to solve with compiler " <> Version.print compiler -- Log.debug $ Solver.printSolverError solverErrors pure Nothing - Right res@(Tuple solvedCompiler _resolutions) -> case compiler == solvedCompiler of - true -> pure $ Just res + Right (Tuple solvedCompiler resolutions) -> case compiler == solvedCompiler of + true -> pure $ Just { compiler, resolutions, name: manifest.name, version: manifest.version } false -> do Log.debug $ Array.fold [ "Produced a compiler-derived build plan that selects a compiler (" @@ -199,4 +213,4 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do , ")." ] pure Nothing - pure $ Map.fromFoldable $ Array.catMaybes newJobs + pure $ Set.fromFoldable $ Array.catMaybes newJobs diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index bdbb1eff5..c95fbcf8c 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -4,12 +4,11 @@ import Registry.App.Prelude hiding ((/)) import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ -import Data.UUID.Random as UUID import Effect.Aff as Aff import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status -import Registry.API.V1 (JobId(..), LogLevel(..), Route(..)) +import Registry.API.V1 (LogLevel(..), Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 36a2e61a2..63dcccc3d 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -102,7 +102,7 @@ spec = do -- First, we publish the package. Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) publishArgs + void $ API.publish (Just (toLegacyIndex idx)) publishArgs -- Then, we can check that it did make it to "Pursuit" as expected Pursuit.getPublishedVersions name >>= case _ of @@ -163,7 +163,7 @@ spec = do , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs + void $ API.publish (Just (toLegacyIndex idx)) pursuitOnlyPublishArgs -- We can also verify that transitive dependencies are added for legacy -- packages. @@ -178,7 +178,7 @@ spec = do , resolutions: Nothing } Registry.readAllManifests >>= \idx -> - API.publish (Just (toLegacyIndex idx)) transitivePublishArgs + void $ API.publish (Just (toLegacyIndex idx)) transitivePublishArgs -- We should verify the resulting metadata file is correct Metadata transitiveMetadata <- Registry.readMetadata transitive.name >>= case _ of diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index 8602d4982..b5ecd390f 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -46,7 +46,7 @@ import Data.Map (Map) import Data.Map as Map import Data.Maybe (Maybe(..)) import Data.Maybe as Maybe -import Data.Newtype (un, unwrap) +import Data.Newtype (un) import Data.Set (Set) import Data.Set as Set import Data.Set.NonEmpty (NonEmptySet) @@ -67,7 +67,6 @@ import Node.Path as Path import Partial.Unsafe (unsafeCrashWith) import Registry.Manifest (Manifest(..)) import Registry.Manifest as Manifest -import Registry.Operation (packageName) import Registry.PackageName (PackageName) import Registry.PackageName as PackageName import Registry.Range (Range) diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index db9b54d23..e0de363ca 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -239,7 +239,7 @@ deleteVersion arguments name version = do Just (Left _) -> Log.error "Cannot reimport a version that was specifically unpublished" Just (Right specificPackageMetadata) -> do -- Obtains `newMetadata` via cache - API.publish Nothing + void $ API.publish Nothing { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref From 50cd04be517bdf64524c8379dc60a7ca80cc6e31 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 17:25:00 +0100 Subject: [PATCH 11/19] Reset incomplete jobs so they can be picked up again --- app/src/App/SQLite.js | 10 ++++++++-- app/src/App/SQLite.purs | 2 -- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 50bc82905..9fbbeeec9 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -138,9 +138,15 @@ export const finishJobImpl = (db, args) => { return stmt.run(args); } -// TODO this needs to be an update, no deletes +// TODO I think we should keep track of this somehow. So either we save +// how many times this is being retried and give up at some point, notifying +// the trustees, or we notify right away for any retry so we can look at them export const resetIncompleteJobsImpl = (db) => { - const stmt = db.prepare(`DELETE FROM ${JOB_INFO_TABLE} WHERE finishedAt IS NULL`); + const stmt = db.prepare(` + UPDATE ${JOB_INFO_TABLE} + SET startedAt = NULL + WHERE finishedAt IS NULL + AND startedAt IS NOT NULL`); return stmt.run(); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 153993a44..5e4d98293 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -153,8 +153,6 @@ foreign import finishJobImpl :: EffectFn2 SQLite JSFinishJob Unit foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit --- TODO: we shouldn't delete them I think? just remove the startedAt so they --- can be retried resetIncompleteJobs :: SQLite -> Effect Unit resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl From 6a57d75ea0e29b31ac76b4c22a169d01b5b06c16 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Sun, 14 Dec 2025 23:47:12 +0100 Subject: [PATCH 12/19] Run matrix jobs for the whole registry when finding a new compiler version --- app/src/App/Effect/Db.purs | 36 ++++++++--------- app/src/App/Main.purs | 6 --- app/src/App/SQLite.purs | 45 ++++++++++++--------- app/src/App/Server/JobExecutor.purs | 56 ++++++++++++++++++++------- app/src/App/Server/MatrixBuilder.purs | 16 +++++++- app/src/App/Server/Router.purs | 4 +- lib/src/ManifestIndex.purs | 12 ++++-- 7 files changed, 111 insertions(+), 64 deletions(-) diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index e30f76f1a..1e90a8163 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -25,9 +25,9 @@ import Run.Except as Except -- be part of app code we want to test. data Db a - = InsertPackageJob InsertPackageJob a - | InsertMatrixJob InsertMatrixJob a - | InsertPackageSetJob InsertPackageSetJob a + = InsertPackageJob InsertPackageJob (JobId -> a) + | InsertMatrixJob InsertMatrixJob (JobId -> a) + | InsertPackageSetJob InsertPackageSetJob (JobId -> a) | FinishJob FinishJob a | StartJob StartJob a | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) @@ -63,16 +63,16 @@ selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow -- | Insert a new package job into the database. -insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) Unit -insertPackageJob job = Run.lift _db (InsertPackageJob job unit) +insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) JobId +insertPackageJob job = Run.lift _db (InsertPackageJob job identity) -- | Insert a new matrix job into the database. -insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) Unit -insertMatrixJob job = Run.lift _db (InsertMatrixJob job unit) +insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId +insertMatrixJob job = Run.lift _db (InsertMatrixJob job identity) -- | Insert a new package set job into the database. -insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) Unit -insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job unit) +insertPackageSetJob :: forall r. InsertPackageSetJob -> Run (DB + r) JobId +insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) -- | Start a job in the database. startJob :: forall r. StartJob -> Run (DB + r) Unit @@ -102,17 +102,17 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertPackageJob job next -> do - Run.liftEffect $ SQLite.insertPackageJob env.db job - pure next + InsertPackageJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageJob env.db job + pure $ reply result - InsertMatrixJob job next -> do - Run.liftEffect $ SQLite.insertMatrixJob env.db job - pure next + InsertMatrixJob job reply -> do + result <- Run.liftEffect $ SQLite.insertMatrixJob env.db job + pure $ reply result - InsertPackageSetJob job next -> do - Run.liftEffect $ SQLite.insertPackageSetJob env.db job - pure next + InsertPackageSetJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPackageSetJob env.db job + pure $ reply result FinishJob job next -> do Run.liftEffect $ SQLite.finishJob env.db job diff --git a/app/src/App/Main.purs b/app/src/App/Main.purs index 90bef72d0..e638cc684 100644 --- a/app/src/App/Main.purs +++ b/app/src/App/Main.purs @@ -22,12 +22,6 @@ main = do case env.vars.resourceEnv.healthchecksUrl of Nothing -> Console.log "HEALTHCHECKS_URL not set, healthcheck pinging disabled" Just healthchecksUrl -> Aff.launchAff_ $ healthcheck healthchecksUrl - -- TODO: here before starting the executor we should check if we need to run - -- a whole-registry-compiler update. - -- To do that, we ask PursVersions what the compilers are, then we ask in the - -- metadata what the compilers for the latest prelude are, and if the latest - -- compiler is missing we enqueue a "compile everything", so that the executor - -- can pick it up first thing Aff.launchAff_ $ jobExecutor env Router.runRouter env where diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 5e4d98293..09f91f612 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -36,6 +36,7 @@ import Codec.JSON.DecodeError as JSON.DecodeError import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable +import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried import Registry.API.V1 (JobId(..), LogLevel, LogLine) @@ -156,6 +157,11 @@ foreign import resetIncompleteJobsImpl :: EffectFn1 SQLite Unit resetIncompleteJobs :: SQLite -> Effect Unit resetIncompleteJobs = Uncurried.runEffectFn1 resetIncompleteJobsImpl +newJobId :: forall m. MonadEffect m => m JobId +newJobId = do + id <- UUID.make + pure $ JobId $ UUID.toString id + -------------------------------------------------------------------------------- -- package_jobs table @@ -197,8 +203,7 @@ selectNextPackageJob db = do pure $ traverse packageJobDetailsFromJSRep maybeJobDetails type InsertPackageJob = - { jobId :: JobId - , payload :: PackageOperation + { payload :: PackageOperation } type JSInsertPackageJob = @@ -209,8 +214,8 @@ type JSInsertPackageJob = , createdAt :: String } -insertPackageJobToJSRep :: DateTime -> InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep now { jobId, payload } = +insertPackageJobToJSRep :: JobId -> DateTime -> InsertPackageJob -> JSInsertPackageJob +insertPackageJobToJSRep jobId now { payload } = { jobId: un JobId jobId , jobType: JobType.print jobType , packageName: PackageName.print name @@ -226,17 +231,18 @@ insertPackageJobToJSRep now { jobId, payload } = foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. -insertPackageJob :: SQLite -> InsertPackageJob -> Effect Unit +insertPackageJob :: SQLite -> InsertPackageJob -> Effect JobId insertPackageJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep now job + Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep jobId now job + pure jobId -------------------------------------------------------------------------------- -- matrix_jobs table type InsertMatrixJob = - { jobId :: JobId - , packageName :: PackageName + { packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version @@ -251,8 +257,8 @@ type JSInsertMatrixJob = , payload :: String } -insertMatrixJobToJSRep :: DateTime -> InsertMatrixJob -> JSInsertMatrixJob -insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion, payload } = +insertMatrixJobToJSRep :: JobId -> DateTime -> InsertMatrixJob -> JSInsertMatrixJob +insertMatrixJobToJSRep jobId now { packageName, packageVersion, compilerVersion, payload } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now , packageName: PackageName.print packageName @@ -263,10 +269,12 @@ insertMatrixJobToJSRep now { jobId, packageName, packageVersion, compilerVersion foreign import insertMatrixJobImpl :: EffectFn2 SQLite JSInsertMatrixJob Unit -insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect Unit +insertMatrixJob :: SQLite -> InsertMatrixJob -> Effect JobId insertMatrixJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep now job + Uncurried.runEffectFn2 insertMatrixJobImpl db $ insertMatrixJobToJSRep jobId now job + pure jobId type MatrixJobDetails = { jobId :: JobId @@ -350,8 +358,7 @@ selectNextPackageSetJob db = do pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails type InsertPackageSetJob = - { jobId :: JobId - , payload :: PackageSetOperation + { payload :: PackageSetOperation } type JSInsertPackageSetJob = @@ -360,8 +367,8 @@ type JSInsertPackageSetJob = , payload :: String } -insertPackageSetJobToJSRep :: DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob -insertPackageSetJobToJSRep now { jobId, payload } = +insertPackageSetJobToJSRep :: JobId -> DateTime -> InsertPackageSetJob -> JSInsertPackageSetJob +insertPackageSetJobToJSRep jobId now { payload } = { jobId: un JobId jobId , createdAt: DateTime.format Internal.Format.iso8601DateTime now , payload: stringifyJson Operation.packageSetOperationCodec payload @@ -369,10 +376,12 @@ insertPackageSetJobToJSRep now { jobId, payload } = foreign import insertPackageSetJobImpl :: EffectFn2 SQLite JSInsertPackageSetJob Unit -insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect Unit +insertPackageSetJob :: SQLite -> InsertPackageSetJob -> Effect JobId insertPackageSetJob db job = do + jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep now job + Uncurried.runEffectFn2 insertPackageSetJobImpl db $ insertPackageSetJobToJSRep jobId now job + pure jobId -------------------------------------------------------------------------------- -- logs table diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 30975d7f2..63a5cbddd 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -1,6 +1,5 @@ module Registry.App.Server.JobExecutor ( runJobExecutor - , newJobId ) where import Registry.App.Prelude hiding ((/)) @@ -9,18 +8,21 @@ import Control.Monad.Maybe.Trans (MaybeT(..), runMaybeT) import Control.Parallel as Parallel import Data.Array as Array import Data.DateTime (DateTime) +import Data.Map as Map import Data.Set as Set -import Data.UUID.Random as UUID import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff -import Registry.API.V1 (JobId(..)) import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db +import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log +import Registry.App.Effect.Registry (REGISTRY) +import Registry.App.Effect.Registry as Registry import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder +import Registry.ManifestIndex as ManifestIndex import Registry.Operation as Operation import Registry.PackageName as PackageName import Registry.Version as Version @@ -35,6 +37,18 @@ data JobDetails runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" + -- Before starting the executor we check if we need to run a whole-registry + -- compiler update: whenever a new compiler is published we need to see which + -- packages are compatible with it; this is a responsibility of the MatrixBuilder, + -- but it needs to be triggered to know there's a new version out. + -- To do that, we ask PursVersions what the compilers are, then we look for + -- the compatibility list of the latest `prelude` version. If the new compiler + -- is missing, then we know that we have not attempted to check compatibility + -- with it (since the latest `prelude` has to be compatible by definition), + -- and we can enqueue a "compile everything" here, which will be the first + -- thing that the JobExecutor picks up + void $ MatrixBuilder.checkIfNewCompiler + >>= traverse upgradeRegistryToNewCompiler Db.resetIncompleteJobs loop where @@ -82,18 +96,13 @@ runJobExecutor env = runEffects env do -- TODO: here we only get a single package for each operation, but really we should -- have all of them and toposort them. There is something in ManifestIndex but not --- sure that's what we need +-- sure that's what we need findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) findNextAvailableJob = runMaybeT $ (PackageJob <$> MaybeT Db.selectNextPackageJob) <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) -newJobId :: forall m. MonadEffect m => m JobId -newJobId = do - id <- UUID.make - pure $ JobId $ UUID.toString id - executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit executeJob _ = case _ of PackageJob { payload: Operation.Publish payload@{ name, version } } -> do @@ -117,10 +126,8 @@ executeJob _ = case _ of <> PackageName.print solvedPackage <> "@" <> Version.print solvedVersion - jobId <- newJobId Db.insertMatrixJob - { jobId - , payload: resolutions + { payload: resolutions , compilerVersion: solvedCompiler , packageName: solvedPackage , packageVersion: solvedVersion @@ -144,10 +151,8 @@ executeJob _ = case _ of <> PackageName.print solvedPackage <> "@" <> Version.print solvedVersion - jobId <- newJobId Db.insertMatrixJob - { jobId - , payload: resolutions + { payload: resolutions , compilerVersion: solvedCompiler , packageName: solvedPackage , packageVersion: solvedVersion @@ -156,3 +161,24 @@ executeJob _ = case _ of -- TODO: need to pass in the package_sets effect -- API.packageSetUpdate2 details pure unit + +upgradeRegistryToNewCompiler :: forall r. Version -> Run (DB + LOG + EXCEPT String + REGISTRY + r) Unit +upgradeRegistryToNewCompiler newCompilerVersion = do + allManifests <- Registry.readAllManifests + for_ (ManifestIndex.toArray allManifests) \(Manifest manifest) -> do + -- Note: we enqueue compilation jobs only for packages with no dependencies, + -- because from them we should be able to reach the whole of the registry, + -- as they complete new jobs for their dependants will be queued up. + when (not (Map.isEmpty manifest.dependencies)) do + Log.info $ "Enqueuing matrix job for _new_ compiler " + <> Version.print newCompilerVersion + <> ", package " + <> PackageName.print manifest.name + <> "@" + <> Version.print manifest.version + void $ Db.insertMatrixJob + { payload: Map.empty + , compilerVersion: newCompilerVersion + , packageName: manifest.name + , packageVersion: manifest.version + } diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 13097c2a6..7ae98d972 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -1,5 +1,6 @@ module Registry.App.Server.MatrixBuilder - ( installBuildPlan + ( checkIfNewCompiler + , installBuildPlan , printCompilerFailure , readCompilerIndex , runMatrixJob @@ -214,3 +215,16 @@ solveDependantsForCompiler { compilerIndex, name, version, compiler } = do ] pure Nothing pure $ Set.fromFoldable $ Array.catMaybes newJobs + +checkIfNewCompiler :: forall r. Run (EXCEPT String + LOG + REGISTRY + AFF + r) (Maybe Version) +checkIfNewCompiler = do + Log.info "Checking if there's a new compiler in town..." + latestCompiler <- NonEmptyArray.foldr1 max <$> PursVersions.pursVersions + maybeMetadata <- Registry.readMetadata $ unsafeFromRight $ PackageName.parse "prelude" + pure $ maybeMetadata >>= \(Metadata metadata) -> + Map.findMax metadata.published + >>= \{ key: _version, value: { compilers } } -> do + case all (_ < latestCompiler) compilers of + -- all compilers compatible with the latest prelude are older than this one + true -> Just latestCompiler + false -> Nothing diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index c95fbcf8c..9a3f08074 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -14,7 +14,6 @@ import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) -import Registry.App.Server.JobExecutor as JobExecutor import Registry.Operation (PackageOperation) import Registry.Operation as Operation import Registry.PackageName as PackageName @@ -100,7 +99,6 @@ router { route, method, body } = HTTPurple.usingCont case route, method of insertPackageJob :: PackageOperation -> ContT Response (Run _) Response insertPackageJob operation = do lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- JobExecutor.newJobId - lift $ Db.insertPackageJob { jobId, payload: operation } + jobId <- lift $ Db.insertPackageJob { payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } diff --git a/lib/src/ManifestIndex.purs b/lib/src/ManifestIndex.purs index b5ecd390f..eb3b08480 100644 --- a/lib/src/ManifestIndex.purs +++ b/lib/src/ManifestIndex.purs @@ -22,6 +22,7 @@ module Registry.ManifestIndex , printEntry , readEntryFile , removeFromEntryFile + , toArray , toMap , topologicalSort , toSortedArray @@ -88,13 +89,18 @@ empty = ManifestIndex Map.empty toMap :: ManifestIndex -> Map PackageName (Map Version Manifest) toMap (ManifestIndex index) = index --- | Produce an array of manifests topologically sorted by dependencies. -toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest -toSortedArray includeRanges (ManifestIndex index) = topologicalSort includeRanges $ Set.fromFoldable do +-- | Produce an array of all the manifests +toArray :: ManifestIndex -> Array Manifest +toArray (ManifestIndex index) = do Tuple _ versions <- Map.toUnfoldableUnordered index Tuple _ manifest <- Map.toUnfoldableUnordered versions [ manifest ] +-- | Produce an array of all the manifests, topologically sorted by dependencies. +toSortedArray :: IncludeRanges -> ManifestIndex -> Array Manifest +toSortedArray includeRanges index = + topologicalSort includeRanges $ Set.fromFoldable $ toArray index + -- | Look up a package version's manifest in the manifest index. lookup :: PackageName -> Version -> ManifestIndex -> Maybe Manifest lookup name version (ManifestIndex index) = From f1a602b1a9323b3fce4e20dcb0e290986ba78c50 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 19 Dec 2025 10:45:23 -0500 Subject: [PATCH 13/19] resolve build issues --- app/src/App/Server/Router.purs | 5 +- nix/overlay.nix | 5 +- package-lock.json | 1667 +++++++++++--------------------- package.json | 5 +- 4 files changed, 573 insertions(+), 1109 deletions(-) diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index 9a3f08074..f371d1e71 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -66,8 +66,8 @@ router { route, method, body } = HTTPurple.usingCont case route, method of -- TODO return jobs Jobs, Get -> do - now <- liftEffect nowUTC - jsonOk (CJ.array V1.jobCodec) [ { jobId: wrap "foo", createdAt: now, finishedAt: Nothing, success: true, logs: [] } ] + _now <- liftEffect nowUTC + jsonOk (CJ.array V1.jobCodec) [] Job jobId { level: maybeLogLevel, since }, Get -> do let logLevel = fromMaybe Error maybeLogLevel @@ -101,4 +101,3 @@ router { route, method, body } = HTTPurple.usingCont case route, method of lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) jobId <- lift $ Db.insertPackageJob { payload: operation } jsonOk V1.jobCreatedResponseCodec { jobId } - diff --git a/nix/overlay.nix b/nix/overlay.nix index 8f2d68973..7eed2916d 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -181,8 +181,9 @@ in ] ++ prev.lib.optionals prev.stdenv.isDarwin [ prev.darwin.cctools ]; - # To update: run `nix build .#server` and copy the hash from the error - npmDepsHash = "sha256-iWHvXmTcWr4A/VerriuewnH0qNIYBtYkQnqv1VO8Jhs="; + # To update: change to prev.lib.fakeHash, run `nix build .#server`, and copy the + # hash from the error + npmDepsHash = "sha256-AQcHoiM7CcBGFR0ZjOwunuq5oWhpWkTI3QGqeE3ASpI="; installPhase = '' mkdir -p $out diff --git a/package-lock.json b/package-lock.json index fc22de8a0..5c5c89ccd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,10 +10,7 @@ "app", "foreign", "lib" - ], - "dependencies": { - "spago": "^0.93.19" - } + ] }, "app": { "name": "registry-app", @@ -256,65 +253,65 @@ } }, "node_modules/@aws-sdk/client-s3": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.948.0.tgz", - "integrity": "sha512-uvEjds8aYA9SzhBS8RKDtsDUhNV9VhqKiHTcmvhM7gJO92q0WTn8/QeFTdNyLc6RxpiDyz+uBxS7PcdNiZzqfA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.955.0.tgz", + "integrity": "sha512-bFvSM6UB0R5hpWfXzHI3BlKwT2qYHto9JoDtzSr5FxVguTMzJyr+an11VT1Hi5wgO03luXEeXeloURFvaMs6TQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha1-browser": "5.2.0", "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-node": "3.948.0", - "@aws-sdk/middleware-bucket-endpoint": "3.936.0", - "@aws-sdk/middleware-expect-continue": "3.936.0", - "@aws-sdk/middleware-flexible-checksums": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-location-constraint": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/middleware-ssec": "3.936.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/signature-v4-multi-region": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/eventstream-serde-browser": "^4.2.5", - "@smithy/eventstream-serde-config-resolver": "^4.3.5", - "@smithy/eventstream-serde-node": "^4.2.5", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-blob-browser": "^4.2.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/hash-stream-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/md5-js": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-node": "3.955.0", + "@aws-sdk/middleware-bucket-endpoint": "3.953.0", + "@aws-sdk/middleware-expect-continue": "3.953.0", + "@aws-sdk/middleware-flexible-checksums": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-location-constraint": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/middleware-ssec": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/signature-v4-multi-region": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/eventstream-serde-browser": "^4.2.6", + "@smithy/eventstream-serde-config-resolver": "^4.3.6", + "@smithy/eventstream-serde-node": "^4.2.6", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-blob-browser": "^4.2.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/hash-stream-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/md5-js": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", - "@smithy/util-waiter": "^4.2.5", + "@smithy/util-waiter": "^4.2.6", "tslib": "^2.6.2" }, "engines": { @@ -322,47 +319,47 @@ } }, "node_modules/@aws-sdk/client-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.948.0.tgz", - "integrity": "sha512-iWjchXy8bIAVBUsKnbfKYXRwhLgRg3EqCQ5FTr3JbR+QR75rZm4ZOYXlvHGztVTmtAZ+PQVA1Y4zO7v7N87C0A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.955.0.tgz", + "integrity": "sha512-+nym5boDFt2ksba0fElocMKxCFJbJcd31PI3502hoI1N5VK7HyxkQeBtQJ64JYomvw8eARjWWC13hkB0LtZILw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -371,22 +368,22 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.947.0.tgz", - "integrity": "sha512-Khq4zHhuAkvCFuFbgcy3GrZTzfSX7ZIjIcW1zRDxXRLZKRtuhnZdonqTUfaWi5K42/4OmxkYNpsO7X7trQOeHw==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/xml-builder": "3.930.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.954.0.tgz", + "integrity": "sha512-5oYO5RP+mvCNXNj8XnF9jZo0EP0LTseYOJVNQYcii1D9DJqzHL3HJWurYh7cXxz7G7eDyvVYA01O9Xpt34TdoA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "3.953.0", + "@aws-sdk/xml-builder": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-base64": "^4.3.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -395,15 +392,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.947.0.tgz", - "integrity": "sha512-VR2V6dRELmzwAsCpK4GqxUi6UW5WNhAXS9F9AzWi5jvijwJo3nH92YNJUP4quMpgFZxJHEWyXLWgPjh9u0zYOA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.954.0.tgz", + "integrity": "sha512-2HNkqBjfsvyoRuPAiFh86JBFMFyaCNhL4VyH6XqwTGKZffjG7hdBmzXPy7AT7G3oFh1k/1Zc27v0qxaKoK7mBA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -411,20 +408,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.947.0.tgz", - "integrity": "sha512-inF09lh9SlHj63Vmr5d+LmwPXZc2IbK8lAruhOr3KLsZAIHEgHgGPXWDC2ukTEMzg0pkexQ6FOhXXad6klK4RA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.954.0.tgz", + "integrity": "sha512-CrWD5300+NE1OYRnSVDxoG7G0b5cLIZb7yp+rNQ5Jq/kqnTmyJXpVAsivq+bQIDaGzPXhadzpAMIoo7K/aHaag==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/util-stream": "^4.5.7", "tslib": "^2.6.2" }, "engines": { @@ -432,24 +429,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.948.0.tgz", - "integrity": "sha512-Cl//Qh88e8HBL7yYkJNpF5eq76IO6rq8GsatKcfVBm7RFVxCqYEPSSBtkHdbtNwQdRQqAMXc6E/lEB/CZUDxnA==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.955.0.tgz", + "integrity": "sha512-90isLovxsPzaaSx3IIUZuxym6VXrsRetnQ3AuHr2kiTFk2pIzyIwmi+gDcUaLXQ5nNBoSj1Z/4+i1vhxa1n2DQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-login": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-login": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -457,18 +454,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.948.0.tgz", - "integrity": "sha512-gcKO2b6eeTuZGp3Vvgr/9OxajMrD3W+FZ2FCyJox363ZgMoYJsyNid1vuZrEuAGkx0jvveLXfwiVS0UXyPkgtw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.955.0.tgz", + "integrity": "sha512-xlkmSvg8oDN5LIxLAq3N1QWK8F8gUAsBWZlp1IX8Lr5XhcKI3GVarIIUcZrvCy1NjzCd/LDXYdNL6MRlNP4bAw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -476,22 +473,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.948.0.tgz", - "integrity": "sha512-ep5vRLnrRdcsP17Ef31sNN4g8Nqk/4JBydcUJuFRbGuyQtrZZrVT81UeH2xhz6d0BK6ejafDB9+ZpBjXuWT5/Q==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.955.0.tgz", + "integrity": "sha512-XIL4QB+dPOJA6DRTmYZL52wFcLTslb7V1ydS4FCNT2DVLhkO4ExkPP+pe5YmIpzt/Our1ugS+XxAs3e6BtyFjA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "3.947.0", - "@aws-sdk/credential-provider-http": "3.947.0", - "@aws-sdk/credential-provider-ini": "3.948.0", - "@aws-sdk/credential-provider-process": "3.947.0", - "@aws-sdk/credential-provider-sso": "3.948.0", - "@aws-sdk/credential-provider-web-identity": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/credential-provider-env": "3.954.0", + "@aws-sdk/credential-provider-http": "3.954.0", + "@aws-sdk/credential-provider-ini": "3.955.0", + "@aws-sdk/credential-provider-process": "3.954.0", + "@aws-sdk/credential-provider-sso": "3.955.0", + "@aws-sdk/credential-provider-web-identity": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/credential-provider-imds": "^4.2.6", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -499,16 +496,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.947.0.tgz", - "integrity": "sha512-WpanFbHe08SP1hAJNeDdBDVz9SGgMu/gc0XJ9u3uNpW99nKZjDpvPRAdW7WLA4K6essMjxWkguIGNOpij6Do2Q==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.954.0.tgz", + "integrity": "sha512-Y1/0O2LgbKM8iIgcVj/GNEQW6p90LVTCOzF2CI1pouoKqxmZ/1F7F66WHoa6XUOfKaCRj/R6nuMR3om9ThaM5A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -516,18 +513,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.948.0.tgz", - "integrity": "sha512-gqLhX1L+zb/ZDnnYbILQqJ46j735StfWV5PbDjxRzBKS7GzsiYoaf6MyHseEopmWrez5zl5l6aWzig7UpzSeQQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.955.0.tgz", + "integrity": "sha512-Y99KI73Fn8JnB4RY5Ls6j7rd5jmFFwnY9WLHIWeJdc+vfwL6Bb1uWKW3+m/B9+RC4Xoz2nQgtefBcdWq5Xx8iw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/client-sso": "3.948.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/token-providers": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/client-sso": "3.955.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/token-providers": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -535,17 +532,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.948.0.tgz", - "integrity": "sha512-MvYQlXVoJyfF3/SmnNzOVEtANRAiJIObEUYYyjTqKZTmcRIVVky0tPuG26XnB8LmTYgtESwJIZJj/Eyyc9WURQ==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.955.0.tgz", + "integrity": "sha512-+lFxkZ2Vz3qp/T68ZONKzWVTQvomTu7E6tts1dfAbEcDt62Y/nPCByq/C2hQj+TiN05HrUx+yTJaGHBklhkbqA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -553,16 +550,16 @@ } }, "node_modules/@aws-sdk/middleware-bucket-endpoint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.936.0.tgz", - "integrity": "sha512-XLSVVfAorUxZh6dzF+HTOp4R1B5EQcdpGcPliWr0KUj2jukgjZEcqbBmjyMF/p9bmyQsONX80iURF1HLAlW0qg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.953.0.tgz", + "integrity": "sha512-YHVRIOowtGIl/L2WuS83FgRlm31tU0aL1yryWaFtF+AFjA5BIeiFkxIZqaRGxJpJvFEBdohsyq6Ipv5mgWfezg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", "tslib": "^2.6.2" }, @@ -571,14 +568,14 @@ } }, "node_modules/@aws-sdk/middleware-expect-continue": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.936.0.tgz", - "integrity": "sha512-Eb4ELAC23bEQLJmUMYnPWcjD3FZIsmz2svDiXEcxRkQU9r7NRID7pM7C5NPH94wOfiCk0b2Y8rVyFXW0lGQwbA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.953.0.tgz", + "integrity": "sha512-BQTVXrypQ0rbb7au/Hk4IS5GaJZlwk6O44Rjk6Kxb0IvGQhSurNTuesFiJx1sLbf+w+T31saPtODcfQQERqhCQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -586,22 +583,22 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.947.0.tgz", - "integrity": "sha512-kXXxS2raNESNO+zR0L4YInVjhcGGNI2Mx0AE1ThRhDkAt2se3a+rGf9equ9YvOqA1m8Jl/GSI8cXYvSxXmS9Ag==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.954.0.tgz", + "integrity": "sha512-hHOPDJyxucNodkgapLhA0VdwDBwVYN9DX20aA6j+3nwutAlZ5skaV7Bw0W3YC7Fh/ieDKKhcSZulONd4lVTwMg==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -610,14 +607,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.936.0.tgz", - "integrity": "sha512-tAaObaAnsP1XnLGndfkGWFuzrJYuk9W0b/nLvol66t8FZExIAf/WdkT2NNAWOYxljVs++oHnyHBCxIlaHrzSiw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.953.0.tgz", + "integrity": "sha512-jTGhfkONav+r4E6HLOrl5SzBqDmPByUYCkyB/c/3TVb8jX3wAZx8/q9bphKpCh+G5ARi3IdbSisgkZrJYqQ19Q==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -625,13 +622,13 @@ } }, "node_modules/@aws-sdk/middleware-location-constraint": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.936.0.tgz", - "integrity": "sha512-SCMPenDtQMd9o5da9JzkHz838w3327iqXk3cbNnXWqnNRx6unyW8FL0DZ84gIY12kAyVHz5WEqlWuekc15ehfw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.953.0.tgz", + "integrity": "sha512-h0urrbteIQEybyIISaJfQLZ/+/lJPRzPWAQT4epvzfgv/4MKZI7K83dK7SfTwAooVKFBHiCMok2Cf0iHDt07Kw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -639,13 +636,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.936.0.tgz", - "integrity": "sha512-aPSJ12d3a3Ea5nyEnLbijCaaYJT2QjQ9iW+zGh5QcZYXmOGWbKVyPSxmVOboZQG+c1M8t6d2O7tqrwzIq8L8qw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.953.0.tgz", + "integrity": "sha512-PlWdVYgcuptkIC0ZKqVUhWNtSHXJSx7U9V8J7dJjRmsXC40X7zpEycvrkzDMJjeTDGcCceYbyYAg/4X1lkcIMw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -653,15 +650,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.948.0.tgz", - "integrity": "sha512-Qa8Zj+EAqA0VlAVvxpRnpBpIWJI9KUwaioY1vkeNVwXPlNaz9y9zCKVM9iU9OZ5HXpoUg6TnhATAHXHAE8+QsQ==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.953.0.tgz", + "integrity": "sha512-cmIJx0gWeesUKK4YwgE+VQL3mpACr3/J24fbwnc1Z5tntC86b+HQFzU5vsBDw6lLwyD46dBgWdsXFh1jL+ZaFw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", + "@aws-sdk/types": "3.953.0", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -669,23 +666,23 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.947.0.tgz", - "integrity": "sha512-DS2tm5YBKhPW2PthrRBDr6eufChbwXe0NjtTZcYDfUCXf0OR+W6cIqyKguwHMJ+IyYdey30AfVw9/Lb5KB8U8A==", - "license": "Apache-2.0", - "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-arn-parser": "3.893.0", - "@smithy/core": "^3.18.7", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.954.0.tgz", + "integrity": "sha512-274CNmnRjknmfFb2o0Azxic54fnujaA8AYSeRUOho3lN48TVzx85eAFWj2kLgvUJO88pE3jBDPWboKQiQdXeUQ==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-arn-parser": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-stream": "^4.5.7", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -694,13 +691,13 @@ } }, "node_modules/@aws-sdk/middleware-ssec": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.936.0.tgz", - "integrity": "sha512-/GLC9lZdVp05ozRik5KsuODR/N7j+W+2TbfdFL3iS+7un+gnP6hC8RDOZd6WhpZp7drXQ9guKiTAxkZQwzS8DA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-ssec/-/middleware-ssec-3.953.0.tgz", + "integrity": "sha512-OrhG1kcQ9zZh3NS3RovR028N0+UndQ957zF1k5HPLeFLwFwQN1uPOufzzPzAyXIIKtR69ARFsQI4mstZS4DMvw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -708,17 +705,17 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.947.0.tgz", - "integrity": "sha512-7rpKV8YNgCP2R4F9RjWZFcD2R+SO/0R4VHIbY9iZJdH2MzzJ8ZG7h8dZ2m8QkQd1fjx4wrFJGGPJUTYXPV3baA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.954.0.tgz", + "integrity": "sha512-5PX8JDe3dB2+MqXeGIhmgFnm2rbVsSxhz+Xyuu1oxLtbOn+a9UDA+sNBufEBjt3UxWy5qwEEY1fxdbXXayjlGg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@smithy/core": "^3.18.7", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@smithy/core": "^3.19.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -726,47 +723,47 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.948.0.tgz", - "integrity": "sha512-zcbJfBsB6h254o3NuoEkf0+UY1GpE9ioiQdENWv7odo69s8iaGBEQ4BDpsIMqcuiiUXw1uKIVNxCB1gUGYz8lw==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.955.0.tgz", + "integrity": "sha512-RBi6CQHbPF09kqXAoiEOOPkVnSoU5YppKoOt/cgsWfoMHwC+7itIrEv+yRD62h14jIjF3KngVIQIrBRbX3o3/Q==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "3.947.0", - "@aws-sdk/middleware-host-header": "3.936.0", - "@aws-sdk/middleware-logger": "3.936.0", - "@aws-sdk/middleware-recursion-detection": "3.948.0", - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/region-config-resolver": "3.936.0", - "@aws-sdk/types": "3.936.0", - "@aws-sdk/util-endpoints": "3.936.0", - "@aws-sdk/util-user-agent-browser": "3.936.0", - "@aws-sdk/util-user-agent-node": "3.947.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/core": "^3.18.7", - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/hash-node": "^4.2.5", - "@smithy/invalid-dependency": "^4.2.5", - "@smithy/middleware-content-length": "^4.2.5", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-retry": "^4.4.14", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/middleware-host-header": "3.953.0", + "@aws-sdk/middleware-logger": "3.953.0", + "@aws-sdk/middleware-recursion-detection": "3.953.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/region-config-resolver": "3.953.0", + "@aws-sdk/types": "3.953.0", + "@aws-sdk/util-endpoints": "3.953.0", + "@aws-sdk/util-user-agent-browser": "3.953.0", + "@aws-sdk/util-user-agent-node": "3.954.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/core": "^3.19.0", + "@smithy/fetch-http-handler": "^5.3.7", + "@smithy/hash-node": "^4.2.6", + "@smithy/invalid-dependency": "^4.2.6", + "@smithy/middleware-content-length": "^4.2.6", + "@smithy/middleware-endpoint": "^4.4.0", + "@smithy/middleware-retry": "^4.4.16", + "@smithy/middleware-serde": "^4.2.7", + "@smithy/middleware-stack": "^4.2.6", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/node-http-handler": "^4.4.6", + "@smithy/protocol-http": "^5.3.6", + "@smithy/smithy-client": "^4.10.1", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", "@smithy/util-body-length-node": "^4.2.1", - "@smithy/util-defaults-mode-browser": "^4.3.13", - "@smithy/util-defaults-mode-node": "^4.2.16", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "@smithy/util-defaults-mode-browser": "^4.3.15", + "@smithy/util-defaults-mode-node": "^4.2.18", + "@smithy/util-endpoints": "^3.2.6", + "@smithy/util-middleware": "^4.2.6", + "@smithy/util-retry": "^4.2.6", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -775,15 +772,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.936.0.tgz", - "integrity": "sha512-wOKhzzWsshXGduxO4pqSiNyL9oUtk4BEvjWm9aaq6Hmfdoydq6v6t0rAGHWPjFwy9z2haovGRi3C8IxdMB4muw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.953.0.tgz", + "integrity": "sha512-5MJgnsc+HLO+le0EK1cy92yrC7kyhGZSpaq8PcQvKs9qtXCXT5Tb6tMdkr5Y07JxYsYOV1omWBynvL6PWh08tQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/config-resolver": "^4.4.3", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/config-resolver": "^4.4.4", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -791,16 +788,16 @@ } }, "node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.947.0.tgz", - "integrity": "sha512-UaYmzoxf9q3mabIA2hc4T6x5YSFUG2BpNjAZ207EA1bnQMiK+d6vZvb83t7dIWL/U1de1sGV19c1C81Jf14rrA==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.954.0.tgz", + "integrity": "sha512-GJJbUaSlGrMSRWui3Oz8ByygpQlzDGm195yTKirgGyu4tfYrFr/QWrWT42EUktY/L4Irev1pdHTuLS+AGHO1gw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-sdk-s3": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/signature-v4": "^5.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-sdk-s3": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/protocol-http": "^5.3.6", + "@smithy/signature-v4": "^5.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -808,17 +805,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.948.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.948.0.tgz", - "integrity": "sha512-V487/kM4Teq5dcr1t5K6eoUKuqlGr9FRWL3MIMukMERJXHZvio6kox60FZ/YtciRHRI75u14YUqm2Dzddcu3+A==", + "version": "3.955.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.955.0.tgz", + "integrity": "sha512-LVpWkxXvMPgZofP2Gc8XBfQhsyecBMVARDHWMvks6vPbCLSTM7dw6H1HI9qbGNCurYcyc2xBRAkEDhChQlbPPg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "3.947.0", - "@aws-sdk/nested-clients": "3.948.0", - "@aws-sdk/types": "3.936.0", - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/core": "3.954.0", + "@aws-sdk/nested-clients": "3.955.0", + "@aws-sdk/types": "3.953.0", + "@smithy/property-provider": "^4.2.6", + "@smithy/shared-ini-file-loader": "^4.4.1", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -826,12 +823,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.936.0.tgz", - "integrity": "sha512-uz0/VlMd2pP5MepdrHizd+T+OKfyK4r3OA9JI+L/lPKg0YFQosdJNCKisr6o70E3dh8iMpFYxF1UN/4uZsyARg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.953.0.tgz", + "integrity": "sha512-M9Iwg9kTyqTErI0vOTVVpcnTHWzS3VplQppy8MuL02EE+mJ0BIwpWfsaAPQW+/XnVpdNpWZTsHcNE29f1+hR8g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -839,9 +836,9 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.893.0.tgz", - "integrity": "sha512-u8H4f2Zsi19DGnwj5FSZzDMhytYF/bCh37vAtBsn3cNDL3YG578X5oc+wSX54pM3tOxS+NY7tvOAo52SW7koUA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.953.0.tgz", + "integrity": "sha512-9hqdKkn4OvYzzaLryq2xnwcrPc8ziY34i9szUdgBfSqEC6pBxbY9/lLXmrgzfwMSL2Z7/v2go4Od0p5eukKLMQ==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -851,15 +848,15 @@ } }, "node_modules/@aws-sdk/util-endpoints": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.936.0.tgz", - "integrity": "sha512-0Zx3Ntdpu+z9Wlm7JKUBOzS9EunwKAb4KdGUQQxDqh5Lc3ta5uBoub+FgmVuzwnmBu9U1Os8UuwVTH0Lgu+P5w==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.953.0.tgz", + "integrity": "sha512-rjaS6jrFksopXvNg6YeN+D1lYwhcByORNlFuYesFvaQNtPOufbE5tJL4GJ3TMXyaY0uFR28N5BHHITPyWWfH/g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-endpoints": "^3.2.5", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", + "@smithy/url-parser": "^4.2.6", + "@smithy/util-endpoints": "^3.2.6", "tslib": "^2.6.2" }, "engines": { @@ -867,9 +864,9 @@ } }, "node_modules/@aws-sdk/util-locate-window": { - "version": "3.893.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.893.0.tgz", - "integrity": "sha512-T89pFfgat6c8nMmpI8eKjBcDcgJq36+m9oiXbcUzeU55MP9ZuGgBomGjGnHaEyF36jenW9gmg3NfZDm0AO2XPg==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.953.0.tgz", + "integrity": "sha512-mPxK+I1LcrgC/RSa3G5AMAn8eN2Ay0VOgw8lSRmV1jCtO+iYvNeCqOdxoJUjOW6I5BA4niIRWqVORuRP07776Q==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -879,27 +876,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.936.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.936.0.tgz", - "integrity": "sha512-eZ/XF6NxMtu+iCma58GRNRxSq4lHo6zHQLOZRIeL/ghqYJirqHdenMOwrzPettj60KWlv827RVebP9oNVrwZbw==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.953.0.tgz", + "integrity": "sha512-UF5NeqYesWuFao+u7LJvpV1SJCaLml5BtFZKUdTnNNMeN6jvV+dW/eQoFGpXF94RCqguX0XESmRuRRPQp+/rzQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "3.936.0", - "@smithy/types": "^4.9.0", + "@aws-sdk/types": "3.953.0", + "@smithy/types": "^4.10.0", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.947.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.947.0.tgz", - "integrity": "sha512-+vhHoDrdbb+zerV4noQk1DHaUMNzWFWPpPYjVTwW2186k5BEJIecAMChYkghRrBVJ3KPWP1+JnZwOd72F3d4rQ==", + "version": "3.954.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.954.0.tgz", + "integrity": "sha512-fB5S5VOu7OFkeNzcblQlez4AjO5hgDFaa7phYt7716YWisY3RjAaQPlxgv+G3GltHHDJIfzEC5aRxdf62B9zMg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "3.947.0", - "@aws-sdk/types": "3.936.0", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@aws-sdk/middleware-user-agent": "3.954.0", + "@aws-sdk/types": "3.953.0", + "@smithy/node-config-provider": "^4.3.6", + "@smithy/types": "^4.10.0", "tslib": "^2.6.2" }, "engines": { @@ -915,12 +912,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.930.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.930.0.tgz", - "integrity": "sha512-YIfkD17GocxdmlUVc3ia52QhcWuRIUJonbF8A2CYfcWNV3HzvAqpcPeC0bYUhkK+8e8YO1ARnLKZQE0TlwzorA==", + "version": "3.953.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.953.0.tgz", + "integrity": "sha512-Zmrj21jQ2OeOJGr9spPiN00aQvXa/WUqRXcTVENhrMt+OFoSOfDFpYhUj9NQ09QmQ8KMWFoWuWW6iKurNqLvAA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.10.0", "fast-xml-parser": "5.2.5", "tslib": "^2.6.2" }, @@ -1172,12 +1169,12 @@ } }, "node_modules/@smithy/abort-controller": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.5.tgz", - "integrity": "sha512-j7HwVkBw68YW8UmFRcjZOmssE77Rvk0GWAIN1oFBhsaovQmZWYCIcGa9/pwRB0ExI8Sk9MWNALTjftjHZea7VA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/abort-controller/-/abort-controller-4.2.7.tgz", + "integrity": "sha512-rzMY6CaKx2qxrbYbqjXWS0plqEy7LOdKHS0bg4ixJ6aoGDPNUcLWk/FRNuCILh7GKLG9TFUXYYeQQldMBBwuyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1210,16 +1207,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.3.tgz", - "integrity": "sha512-ezHLe1tKLUxDJo2LHtDuEDyWXolw8WGOR92qb4bQdWq/zKenO5BvctZGrVJBK08zjezSk7bmbKFOXIVyChvDLw==", + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz", + "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-config-provider": "^4.2.0", - "@smithy/util-endpoints": "^3.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-endpoints": "^3.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1227,18 +1224,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.18.7", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.18.7.tgz", - "integrity": "sha512-axG9MvKhMWOhFbvf5y2DuyTxQueO0dkedY9QC3mAfndLosRI/9LJv8WaL0mw7ubNhsO4IuXX9/9dYGPFvHrqlw==", + "version": "3.20.0", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.20.0.tgz", + "integrity": "sha512-WsSHCPq/neD5G/MkK4csLI5Y5Pkd9c1NMfpYEKeghSGaD4Ja1qLIohRQf2D5c1Uy5aXp76DeKHkzWZ9KAlHroQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/middleware-serde": "^4.2.6", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-body-length-browser": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-stream": "^4.5.6", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-stream": "^4.5.8", "@smithy/util-utf8": "^4.2.0", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" @@ -1248,15 +1245,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.5.tgz", - "integrity": "sha512-BZwotjoZWn9+36nimwm/OLIcVe+KYRwzMjfhd4QT7QxPm9WY0HiOV8t/Wlh+HVUif0SBVV7ksq8//hPaBC/okQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.7.tgz", + "integrity": "sha512-CmduWdCiILCRNbQWFR0OcZlUPVtyE49Sr8yYL0rZQ4D/wKxiNzBNS/YHemvnbkIWj623fplgkexUd/c9CAKdoA==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1264,13 +1261,13 @@ } }, "node_modules/@smithy/eventstream-codec": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.5.tgz", - "integrity": "sha512-Ogt4Zi9hEbIP17oQMd68qYOHUzmH47UkK7q7Gl55iIm9oKt27MUGrC5JfpMroeHjdkOliOA4Qt3NQ1xMq/nrlA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-codec/-/eventstream-codec-4.2.7.tgz", + "integrity": "sha512-DrpkEoM3j9cBBWhufqBwnbbn+3nf1N9FP6xuVJ+e220jbactKuQgaZwjwP5CP1t+O94brm2JgVMD2atMGX3xIQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", "tslib": "^2.6.2" }, @@ -1279,13 +1276,13 @@ } }, "node_modules/@smithy/eventstream-serde-browser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.5.tgz", - "integrity": "sha512-HohfmCQZjppVnKX2PnXlf47CW3j92Ki6T/vkAT2DhBR47e89pen3s4fIa7otGTtrVxmj7q+IhH0RnC5kpR8wtw==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-4.2.7.tgz", + "integrity": "sha512-ujzPk8seYoDBmABDE5YqlhQZAXLOrtxtJLrbhHMKjBoG5b4dK4i6/mEU+6/7yXIAkqOO8sJ6YxZl+h0QQ1IJ7g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1293,12 +1290,12 @@ } }, "node_modules/@smithy/eventstream-serde-config-resolver": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.5.tgz", - "integrity": "sha512-ibjQjM7wEXtECiT6my1xfiMH9IcEczMOS6xiCQXoUIYSj5b1CpBbJ3VYbdwDy8Vcg5JHN7eFpOCGk8nyZAltNQ==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-4.3.7.tgz", + "integrity": "sha512-x7BtAiIPSaNaWuzm24Q/mtSkv+BrISO/fmheiJ39PKRNH3RmH2Hph/bUKSOBOBC9unqfIYDhKTHwpyZycLGPVQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1306,13 +1303,13 @@ } }, "node_modules/@smithy/eventstream-serde-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.5.tgz", - "integrity": "sha512-+elOuaYx6F2H6x1/5BQP5ugv12nfJl66GhxON8+dWVUEDJ9jah/A0tayVdkLRP0AeSac0inYkDz5qBFKfVp2Gg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-node/-/eventstream-serde-node-4.2.7.tgz", + "integrity": "sha512-roySCtHC5+pQq5lK4be1fZ/WR6s/AxnPaLfCODIPArtN2du8s5Ot4mKVK3pPtijL/L654ws592JHJ1PbZFF6+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-serde-universal": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-serde-universal": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1320,13 +1317,13 @@ } }, "node_modules/@smithy/eventstream-serde-universal": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.5.tgz", - "integrity": "sha512-G9WSqbST45bmIFaeNuP/EnC19Rhp54CcVdX9PDL1zyEB514WsDVXhlyihKlGXnRycmHNmVv88Bvvt4EYxWef/Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-4.2.7.tgz", + "integrity": "sha512-QVD+g3+icFkThoy4r8wVFZMsIP08taHVKjE6Jpmz8h5CgX/kk6pTODq5cht0OMtcapUx+xrPzUTQdA+TmO0m1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/eventstream-codec": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/eventstream-codec": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1334,14 +1331,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.6", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.6.tgz", - "integrity": "sha512-3+RG3EA6BBJ/ofZUeTFJA7mHfSYrZtQIrDP9dI8Lf7X6Jbos2jptuLrAAteDiFVrmbEmLSuRG/bUKzfAXk7dhg==", + "version": "5.3.8", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.8.tgz", + "integrity": "sha512-h/Fi+o7mti4n8wx1SR6UHWLaakwHRx29sizvp8OOm7iqwKGFneT06GCSFhml6Bha5BT6ot5pj3CYZnCHhGC2Rg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "tslib": "^2.6.2" }, @@ -1350,14 +1347,14 @@ } }, "node_modules/@smithy/hash-blob-browser": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.6.tgz", - "integrity": "sha512-8P//tA8DVPk+3XURk2rwcKgYwFvwGwmJH/wJqQiSKwXZtf/LiZK+hbUZmPj/9KzM+OVSwe4o85KTp5x9DUZTjw==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/hash-blob-browser/-/hash-blob-browser-4.2.8.tgz", + "integrity": "sha512-07InZontqsM1ggTCPSRgI7d8DirqRrnpL7nIACT4PW0AWrgDiHhjGZzbAE5UtRSiU0NISGUYe7/rri9ZeWyDpw==", "license": "Apache-2.0", "dependencies": { "@smithy/chunked-blob-reader": "^5.2.0", "@smithy/chunked-blob-reader-native": "^4.2.1", - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1365,12 +1362,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.5.tgz", - "integrity": "sha512-DpYX914YOfA3UDT9CN1BM787PcHfWRBB43fFGCYrZFUH0Jv+5t8yYl+Pd5PW4+QzoGEDvn5d5QIO4j2HyYZQSA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.7.tgz", + "integrity": "sha512-PU/JWLTBCV1c8FtB8tEFnY4eV1tSfBc7bDBADHfn1K+uRbPgSJ9jnJp0hyjiFN2PMdPzxsf1Fdu0eo9fJ760Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1380,12 +1377,12 @@ } }, "node_modules/@smithy/hash-stream-node": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.5.tgz", - "integrity": "sha512-6+do24VnEyvWcGdHXomlpd0m8bfZePpUKBy7m311n+JuRwug8J4dCanJdTymx//8mi0nlkflZBvJe+dEO/O12Q==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/hash-stream-node/-/hash-stream-node-4.2.7.tgz", + "integrity": "sha512-ZQVoAwNYnFMIbd4DUc517HuwNelJUY6YOzwqrbcAgCnVn+79/OK7UjwA93SPpdTOpKDVkLIzavWm/Ck7SmnDPQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1394,12 +1391,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.5.tgz", - "integrity": "sha512-2L2erASEro1WC5nV+plwIMxrTXpvpfzl4e+Nre6vBVRR2HKeGGcvpJyyL3/PpiSg+cJG2KpTmZmq934Olb6e5A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.7.tgz", + "integrity": "sha512-ncvgCr9a15nPlkhIUx3CU4d7E7WEuVJOV7fS7nnK2hLtPK9tYRBkMHQbhXU1VvvKeBm/O0x26OEoBq+ngFpOEQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1419,12 +1416,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.5.tgz", - "integrity": "sha512-Bt6jpSTMWfjCtC0s79gZ/WZ1w90grfmopVOWqkI2ovhjpD5Q2XRXuecIPB9689L2+cCySMbaXDhBPU56FKNDNg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.7.tgz", + "integrity": "sha512-Wv6JcUxtOLTnxvNjDnAiATUsk8gvA6EeS8zzHig07dotpByYsLot+m0AaQEniUBjx97AC41MQR4hW0baraD1Xw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" }, @@ -1433,13 +1430,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.5.tgz", - "integrity": "sha512-Y/RabVa5vbl5FuHYV2vUCwvh/dqzrEY/K2yWPSqvhFUwIY0atLqO4TienjBXakoy4zrKAMCZwg+YEqmH7jaN7A==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.7.tgz", + "integrity": "sha512-GszfBfCcvt7kIbJ41LuNa5f0wvQCHhnGx/aDaZJCCT05Ld6x6U2s0xsc/0mBFONBZjQJp2U/0uSJ178OXOwbhg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1447,18 +1444,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.3.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.3.14.tgz", - "integrity": "sha512-v0q4uTKgBM8dsqGjqsabZQyH85nFaTnFcgpWU1uydKFsdyyMzfvOkNum9G7VK+dOP01vUnoZxIeRiJ6uD0kjIg==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.1.tgz", + "integrity": "sha512-gpLspUAoe6f1M6H0u4cVuFzxZBrsGZmjx2O9SigurTx4PbntYa4AJ+o0G0oGm1L2oSX6oBhcGHwrfJHup2JnJg==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-serde": "^4.2.6", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", - "@smithy/url-parser": "^4.2.5", - "@smithy/util-middleware": "^4.2.5", + "@smithy/core": "^3.20.0", + "@smithy/middleware-serde": "^4.2.8", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", + "@smithy/url-parser": "^4.2.7", + "@smithy/util-middleware": "^4.2.7", "tslib": "^2.6.2" }, "engines": { @@ -1466,18 +1463,18 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.4.14", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.14.tgz", - "integrity": "sha512-Z2DG8Ej7FyWG1UA+7HceINtSLzswUgs2np3sZX0YBBxCt+CXG4QUxv88ZDS3+2/1ldW7LqtSY1UO/6VQ1pND8Q==", - "license": "Apache-2.0", - "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/service-error-classification": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", - "@smithy/util-middleware": "^4.2.5", - "@smithy/util-retry": "^4.2.5", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.4.17.tgz", + "integrity": "sha512-MqbXK6Y9uq17h+4r0ogu/sBT6V/rdV+5NvYL7ZV444BKfQygYe8wAhDrVXagVebN6w2RE0Fm245l69mOsPGZzg==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/node-config-provider": "^4.3.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", + "@smithy/util-middleware": "^4.2.7", + "@smithy/util-retry": "^4.2.7", "@smithy/uuid": "^1.1.0", "tslib": "^2.6.2" }, @@ -1486,13 +1483,13 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.6", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.6.tgz", - "integrity": "sha512-VkLoE/z7e2g8pirwisLz8XJWedUSY8my/qrp81VmAdyrhi94T+riBfwP+AOEEFR9rFTSonC/5D2eWNmFabHyGQ==", + "version": "4.2.8", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.8.tgz", + "integrity": "sha512-8rDGYen5m5+NV9eHv9ry0sqm2gI6W7mc1VSFMtn6Igo25S507/HaOX9LTHAS2/J32VXD0xSzrY0H5FJtOMS4/w==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1500,12 +1497,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.5.tgz", - "integrity": "sha512-bYrutc+neOyWxtZdbB2USbQttZN0mXaOyYLIsaTbJhFsfpXyGWUxJpEuO1rJ8IIJm2qH4+xJT0mxUSsEDTYwdQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.7.tgz", + "integrity": "sha512-bsOT0rJ+HHlZd9crHoS37mt8qRRN/h9jRve1SXUhVbkRzu0QaNYZp1i1jha4n098tsvROjcwfLlfvcFuJSXEsw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1513,14 +1510,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.5.tgz", - "integrity": "sha512-UTurh1C4qkVCtqggI36DGbLB2Kv8UlcFdMXDcWMbqVY2uRg0XmT9Pb4Vj6oSQ34eizO1fvR0RnFV4Axw4IrrAg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz", + "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/shared-ini-file-loader": "^4.4.0", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/shared-ini-file-loader": "^4.4.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1528,15 +1525,15 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.4.5", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.5.tgz", - "integrity": "sha512-CMnzM9R2WqlqXQGtIlsHMEZfXKJVTIrqCNoSd/QpAyp+Dw0a1Vps13l6ma1fH8g7zSPNsA59B/kWgeylFuA/lw==", + "version": "4.4.7", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.4.7.tgz", + "integrity": "sha512-NELpdmBOO6EpZtWgQiHjoShs1kmweaiNuETUpuup+cmm/xJYjT4eUjfhrXRP4jCOaAsS3c3yPsP3B+K+/fyPCQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/querystring-builder": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/querystring-builder": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1544,12 +1541,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.5.tgz", - "integrity": "sha512-8iLN1XSE1rl4MuxvQ+5OSk/Zb5El7NJZ1td6Tn+8dQQHIjp59Lwl6bd0+nzw6SKm2wSSriH2v/I9LPzUic7EOg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz", + "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1557,12 +1554,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.5.tgz", - "integrity": "sha512-RlaL+sA0LNMp03bf7XPbFmT5gN+w3besXSWMkA8rcmxLSVfiEXElQi4O2IWwPfxzcHkxqrwBFMbngB8yx/RvaQ==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.7.tgz", + "integrity": "sha512-1r07pb994I20dD/c2seaZhoCuNYm0rWrvBxhCQ70brNh11M5Ml2ew6qJVo0lclB3jMIXirD4s2XRXRe7QEi0xA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1570,12 +1567,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.5.tgz", - "integrity": "sha512-y98otMI1saoajeik2kLfGyRp11e5U/iJYH/wLCh3aTV/XutbGT9nziKGkgCaMD1ghK7p6htHMm6b6scl9JRUWg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.7.tgz", + "integrity": "sha512-eKONSywHZxK4tBxe2lXEysh8wbBdvDWiA+RIuaxZSgCMmA0zMgoDpGLJhnyj+c0leOQprVnXOmcB4m+W9Rw7sg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "@smithy/util-uri-escape": "^4.2.0", "tslib": "^2.6.2" }, @@ -1584,12 +1581,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.5.tgz", - "integrity": "sha512-031WCTdPYgiQRYNPXznHXof2YM0GwL6SeaSyTH/P72M1Vz73TvCNH2Nq8Iu2IEPq9QP2yx0/nrw5YmSeAi/AjQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.7.tgz", + "integrity": "sha512-3X5ZvzUHmlSTHAXFlswrS6EGt8fMSIxX/c3Rm1Pni3+wYWB6cjGocmRIoqcQF9nU5OgGmL0u7l9m44tSUpfj9w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1597,24 +1594,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.5.tgz", - "integrity": "sha512-8fEvK+WPE3wUAcDvqDQG1Vk3ANLR8Px979te96m84CbKAjBVf25rPYSzb4xU4hlTyho7VhOGnh5i62D/JVF0JQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.7.tgz", + "integrity": "sha512-YB7oCbukqEb2Dlh3340/8g8vNGbs/QsNNRms+gv3N2AtZz9/1vSBx6/6tpwQpZMEJFs7Uq8h4mmOn48ZZ72MkA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0" + "@smithy/types": "^4.11.0" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.0.tgz", - "integrity": "sha512-5WmZ5+kJgJDjwXXIzr1vDTG+RhF9wzSODQBfkrQ2VVkYALKGvZX1lgVSxEkgicSAFnFhPj5rudJV0zoinqS0bA==", + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz", + "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1622,16 +1619,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.5", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.5.tgz", - "integrity": "sha512-xSUfMu1FT7ccfSXkoLl/QRQBi2rOvi3tiBZU2Tdy3I6cgvZ6SEi9QNey+lqps/sJRnogIS+lq+B1gxxbra2a/w==", + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.7.tgz", + "integrity": "sha512-9oNUlqBlFZFOSdxgImA6X5GFuzE7V2H7VG/7E70cdLhidFbdtvxxt81EHgykGK5vq5D3FafH//X+Oy31j3CKOg==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.0", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-middleware": "^4.2.5", + "@smithy/util-middleware": "^4.2.7", "@smithy/util-uri-escape": "^4.2.0", "@smithy/util-utf8": "^4.2.0", "tslib": "^2.6.2" @@ -1641,17 +1638,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.9.10", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.9.10.tgz", - "integrity": "sha512-Jaoz4Jw1QYHc1EFww/E6gVtNjhoDU+gwRKqXP6C3LKYqqH2UQhP8tMP3+t/ePrhaze7fhLE8vS2q6vVxBANFTQ==", + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.10.2.tgz", + "integrity": "sha512-D5z79xQWpgrGpAHb054Fn2CCTQZpog7JELbVQ6XAvXs5MNKWf28U9gzSBlJkOyMl9LA1TZEjRtwvGXfP0Sl90g==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.18.7", - "@smithy/middleware-endpoint": "^4.3.14", - "@smithy/middleware-stack": "^4.2.5", - "@smithy/protocol-http": "^5.3.5", - "@smithy/types": "^4.9.0", - "@smithy/util-stream": "^4.5.6", + "@smithy/core": "^3.20.0", + "@smithy/middleware-endpoint": "^4.4.1", + "@smithy/middleware-stack": "^4.2.7", + "@smithy/protocol-http": "^5.3.7", + "@smithy/types": "^4.11.0", + "@smithy/util-stream": "^4.5.8", "tslib": "^2.6.2" }, "engines": { @@ -1659,9 +1656,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.9.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.9.0.tgz", - "integrity": "sha512-MvUbdnXDTwykR8cB1WZvNNwqoWVaTRA0RLlLmf/cIFNMM2cKWz01X4Ly6SMC4Kks30r8tT3Cty0jmeWfiuyHTA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz", + "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -1671,13 +1668,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.5.tgz", - "integrity": "sha512-VaxMGsilqFnK1CeBX+LXnSuaMx4sTL/6znSZh2829txWieazdVxr54HmiyTsIbpOTLcf5nYpq9lpzmwRdxj6rQ==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.7.tgz", + "integrity": "sha512-/RLtVsRV4uY3qPWhBDsjwahAtt3x2IsMGnP5W1b2VZIe+qgCqkLxI1UOHDZp1Q1QSOrdOR32MF3Ph2JfWT1VHg==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/querystring-parser": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1748,14 +1745,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.13.tgz", - "integrity": "sha512-hlVLdAGrVfyNei+pKIgqDTxfu/ZI2NSyqj4IDxKd5bIsIqwR/dSlkxlPaYxFiIaDVrBy0he8orsFy+Cz119XvA==", + "version": "4.3.16", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.16.tgz", + "integrity": "sha512-/eiSP3mzY3TsvUOYMeL4EqUX6fgUOj2eUOU4rMMgVbq67TiRLyxT7Xsjxq0bW3OwuzK009qOwF0L2OgJqperAQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1763,17 +1760,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.16", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.16.tgz", - "integrity": "sha512-F1t22IUiJLHrxW9W1CQ6B9PN+skZ9cqSuzB18Eh06HrJPbjsyZ7ZHecAKw80DQtyGTRcVfeukKaCRYebFwclbg==", + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.19.tgz", + "integrity": "sha512-3a4+4mhf6VycEJyHIQLypRbiwG6aJvbQAeRAVXydMmfweEPnLLabRbdyo/Pjw8Rew9vjsh5WCdhmDaHkQnhhhA==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.3", - "@smithy/credential-provider-imds": "^4.2.5", - "@smithy/node-config-provider": "^4.3.5", - "@smithy/property-provider": "^4.2.5", - "@smithy/smithy-client": "^4.9.10", - "@smithy/types": "^4.9.0", + "@smithy/config-resolver": "^4.4.5", + "@smithy/credential-provider-imds": "^4.2.7", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/property-provider": "^4.2.7", + "@smithy/smithy-client": "^4.10.2", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1781,13 +1778,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.5.tgz", - "integrity": "sha512-3O63AAWu2cSNQZp+ayl9I3NapW1p1rR5mlVHcF6hAB1dPZUQFfRPYtplWX/3xrzWthPGj5FqB12taJJCfH6s8A==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz", + "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.5", - "@smithy/types": "^4.9.0", + "@smithy/node-config-provider": "^4.3.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1807,12 +1804,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.5.tgz", - "integrity": "sha512-6Y3+rvBF7+PZOc40ybeZMcGln6xJGVeY60E7jy9Mv5iKpMJpHgRE6dKy9ScsVxvfAYuEX4Q9a65DQX90KaQ3bA==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz", + "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.9.0", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1820,13 +1817,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.5.tgz", - "integrity": "sha512-GBj3+EZBbN4NAqJ/7pAhsXdfzdlznOh8PydUijy6FpNIMnHPSMO2/rP4HKu+UFeikJxShERk528oy7GT79YiJg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.2.7.tgz", + "integrity": "sha512-SvDdsQyF5CIASa4EYVT02LukPHVzAgUA4kMAuZ97QJc2BpAqZfA4PINB8/KOoCXEw9tsuv/jQjMeaHFvxdLNGg==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/service-error-classification": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1834,14 +1831,14 @@ } }, "node_modules/@smithy/util-stream": { - "version": "4.5.6", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.6.tgz", - "integrity": "sha512-qWw/UM59TiaFrPevefOZ8CNBKbYEP6wBAIlLqxn3VAIo9rgnTNc4ASbVrqDmhuwI87usnjhdQrxodzAGFFzbRQ==", + "version": "4.5.8", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.8.tgz", + "integrity": "sha512-ZnnBhTapjM0YPGUSmOs0Mcg/Gg87k503qG4zU2v/+Js2Gu+daKOJMeqcQns8ajepY8tgzzfYxl6kQyZKml6O2w==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.6", - "@smithy/node-http-handler": "^4.4.5", - "@smithy/types": "^4.9.0", + "@smithy/fetch-http-handler": "^5.3.8", + "@smithy/node-http-handler": "^4.4.7", + "@smithy/types": "^4.11.0", "@smithy/util-base64": "^4.3.0", "@smithy/util-buffer-from": "^4.2.0", "@smithy/util-hex-encoding": "^4.2.0", @@ -1878,13 +1875,13 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.5", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.5.tgz", - "integrity": "sha512-Dbun99A3InifQdIrsXZ+QLcC0PGBPAdrl4cj1mTgJvyc9N2zf7QSxg8TBkzsCmGJdE3TLbO9ycwpY0EkWahQ/g==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.7.tgz", + "integrity": "sha512-vHJFXi9b7kUEpHWUCY3Twl+9NPOZvQ0SAi+Ewtn48mbiJk4JY9MZmKQjGB4SCvVb9WPiSphZJYY6RIbs+grrzw==", "license": "Apache-2.0", "dependencies": { - "@smithy/abort-controller": "^4.2.5", - "@smithy/types": "^4.9.0", + "@smithy/abort-controller": "^4.2.7", + "@smithy/types": "^4.11.0", "tslib": "^2.6.2" }, "engines": { @@ -1903,12 +1900,6 @@ "node": ">=18.0.0" } }, - "node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "license": "Python-2.0" - }, "node_modules/asn1": { "version": "0.2.6", "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", @@ -1918,12 +1909,6 @@ "safer-buffer": "~2.1.0" } }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "license": "MIT" - }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", @@ -1973,15 +1958,6 @@ "node": "20.x || 22.x || 23.x || 24.x || 25.x" } }, - "node_modules/big-integer": { - "version": "1.6.52", - "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", - "integrity": "sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg==", - "license": "Unlicense", - "engines": { - "node": ">=0.6" - } - }, "node_modules/bindings": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", @@ -2014,27 +1990,6 @@ "integrity": "sha512-OHawaAbjwx6rqICCKgSG0SAnT05bzd7ppyKLVUITZpANBaaMFBAsaNkto3LoQ31tyFP5kNujE8Cdx85G9VzOkw==", "license": "MIT" }, - "node_modules/bplist-parser": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/bplist-parser/-/bplist-parser-0.2.0.tgz", - "integrity": "sha512-z0M+byMThzQmD9NILRniCUXYsYpjwnlO8N5uCFaCqIOpqRsJCrQL9NK3JsD67CN5a08nF5oIL2bD6loTdHOuKw==", - "license": "MIT", - "dependencies": { - "big-integer": "^1.6.44" - }, - "engines": { - "node": ">= 5.10.0" - } - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, "node_modules/braces": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", @@ -2080,21 +2035,6 @@ "node": ">=10.0.0" } }, - "node_modules/bundle-name": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", - "integrity": "sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==", - "license": "MIT", - "dependencies": { - "run-applescript": "^5.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/chownr": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", @@ -2104,12 +2044,6 @@ "node": ">=18" } }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", - "license": "MIT" - }, "node_modules/cpu-features": { "version": "0.0.10", "resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz", @@ -2124,20 +2058,6 @@ "node": ">=10.0.0" } }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -2180,50 +2100,6 @@ "once": "^1.4.0" } }, - "node_modules/entities": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", - "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", - "license": "BSD-2-Clause", - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/env-paths": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-3.0.0.tgz", - "integrity": "sha512-dtJUTepzMW3Lm/NPxRf3wP4642UWhjL2sQxc+ym2YMj1m/H2zDNQOlezafzkHwn6sMstjHTwG6iQQsctDW/b1A==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/execa": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-7.2.0.tgz", - "integrity": "sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.1", - "human-signals": "^4.3.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^3.0.7", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": "^14.18.0 || ^16.14.0 || >=18.0.0" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, "node_modules/expand-template": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", @@ -2317,9 +2193,9 @@ "license": "MIT" }, "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", @@ -2330,12 +2206,6 @@ "node": ">=14.14" } }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "license": "ISC" - }, "node_modules/fuse.js": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/fuse.js/-/fuse.js-7.1.0.tgz", @@ -2345,45 +2215,12 @@ "node": ">=10" } }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/github-from-package": { "version": "0.0.0", "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", "integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==", "license": "MIT" }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -2402,15 +2239,6 @@ "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", "license": "ISC" }, - "node_modules/human-signals": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-4.3.1.tgz", - "integrity": "sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=14.18.0" - } - }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -2431,17 +2259,6 @@ ], "license": "BSD-3-Clause" }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2454,21 +2271,6 @@ "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", "license": "ISC" }, - "node_modules/is-docker": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-3.0.0.tgz", - "integrity": "sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==", - "license": "MIT", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-extglob": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", @@ -2490,24 +2292,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-inside-container": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-inside-container/-/is-inside-container-1.0.0.tgz", - "integrity": "sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==", - "license": "MIT", - "dependencies": { - "is-docker": "^3.0.0" - }, - "bin": { - "is-inside-container": "cli.js" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -2560,18 +2344,6 @@ "node": ">=8.6" } }, - "node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/mimic-response": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", @@ -2584,18 +2356,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", @@ -2666,56 +2426,6 @@ "wrappy": "1" } }, - "node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/open": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/open/-/open-9.1.0.tgz", - "integrity": "sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==", - "license": "MIT", - "dependencies": { - "default-browser": "^4.0.0", - "define-lazy-prop": "^3.0.0", - "is-inside-container": "^1.0.0", - "is-wsl": "^2.2.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/picomatch": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", @@ -2764,15 +2474,6 @@ "once": "^1.3.1" } }, - "node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -2844,110 +2545,6 @@ "node": ">=0.10.0" } }, - "node_modules/run-applescript": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/run-applescript/-/run-applescript-5.0.0.tgz", - "integrity": "sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==", - "license": "MIT", - "dependencies": { - "execa": "^5.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/run-applescript/node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/run-applescript/node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/run-applescript/node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/run-applescript/node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/run-applescript/node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -3009,33 +2606,6 @@ "node": ">=10" } }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "license": "ISC" - }, "node_modules/simple-concat": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", @@ -3081,34 +2651,6 @@ "simple-concat": "^1.0.0" } }, - "node_modules/spago": { - "version": "0.93.19", - "resolved": "https://registry.npmjs.org/spago/-/spago-0.93.19.tgz", - "integrity": "sha512-BOSwPQSbULxlFmTjf5YXrvQtvQjRsqHdcbHo60ENbj4W1N8yPlyWKHzgRiayi7VE4av+d0v6x1OBGGL5lO+vsQ==", - "license": "BSD-3-Clause", - "dependencies": { - "better-sqlite3": "^8.6.0", - "env-paths": "^3.0.0", - "fast-glob": "^3.2.11", - "fs-extra": "^10.0.0", - "fuse.js": "^6.5.3", - "glob": "^7.1.6", - "markdown-it": "^12.0.4", - "open": "^9.1.0", - "punycode": "^2.3.0", - "semver": "^7.3.5", - "spdx-expression-parse": "^3.0.1", - "ssh2": "^1.14.0", - "supports-color": "^9.2.3", - "tar": "^6.1.11", - "tmp": "^0.2.1", - "xhr2": "^0.2.1", - "yaml": "^2.1.1" - }, - "bin": { - "spago": "bin/bundle.js" - } - }, "node_modules/spdx-exceptions": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz", @@ -3157,18 +2699,6 @@ "safe-buffer": "~5.2.0" } }, - "node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -3190,18 +2720,6 @@ ], "license": "MIT" }, - "node_modules/supports-color": { - "version": "9.4.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz", - "integrity": "sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, "node_modules/tar": { "version": "7.5.2", "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", @@ -3252,18 +2770,6 @@ "node": ">=6" } }, - "node_modules/titleize": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz", - "integrity": "sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/tmp": { "version": "0.2.5", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", @@ -3309,12 +2815,6 @@ "integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==", "license": "Unlicense" }, - "node_modules/uc.micro": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-1.0.6.tgz", - "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==", - "license": "MIT" - }, "node_modules/universal-user-agent": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", @@ -3330,51 +2830,18 @@ "node": ">= 10.0.0" } }, - "node_modules/untildify": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz", - "integrity": "sha512-KK8xQ1mkzZeg9inewmFVDNkg3l5LUhoq9kN6iWYB/CC9YMG8HA+c1Q8HwDe6dEX7kErrEVNVBO3fWsVq5iDgtw==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", "license": "MIT" }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, - "node_modules/xhr2": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/xhr2/-/xhr2-0.2.1.tgz", - "integrity": "sha512-sID0rrVCqkVNUn8t6xuv9+6FViXjUVXq8H5rWOH2rz9fDNQEd4g0EA2XlcEdJXRz5BMEn4O1pJFdT+z4YHhoWw==", - "license": "MIT", - "engines": { - "node": ">= 6" - } - }, "node_modules/yallist": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", diff --git a/package.json b/package.json index 5066e42c0..76bc4e96e 100644 --- a/package.json +++ b/package.json @@ -6,8 +6,5 @@ "app", "foreign", "lib" - ], - "dependencies": { - "spago": "^0.93.19" - } + ] } From f94399117f30304d6608df4bf8c65fe9adb06460 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Fri, 19 Dec 2025 10:58:11 -0500 Subject: [PATCH 14/19] fix smoke test --- nix/test/config.nix | 65 +++++++++++++++++++++++---------------------- nix/test/smoke.nix | 34 ++++++++++++++++++++++-- 2 files changed, 65 insertions(+), 34 deletions(-) diff --git a/nix/test/config.nix b/nix/test/config.nix index 66813fe5b..454747b30 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -357,38 +357,39 @@ let ''; # Script to set up git fixtures - setupGitFixtures = pkgs.writeShellScriptBin "setup-git-fixtures" '' - set -e - FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" - - # Remove any existing fixtures (they may have wrong permissions from nix store copy) - rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true - - mkdir -p "$FIXTURES_DIR/purescript" - - # Use env vars instead of --global to avoid polluting user's git config - export GIT_AUTHOR_NAME="pacchettibotti" - export GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" - export GIT_COMMITTER_NAME="pacchettibotti" - export GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" - - # Copy fixtures and make writable (nix store files are read-only) - cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" - cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" - chmod -R u+w "$FIXTURES_DIR/purescript" - - for repo in "$FIXTURES_DIR"/purescript/*/; do - cd "$repo" - git init -b master && git add . - GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ - GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ - git commit -m "Fixture commit" - git config receive.denyCurrentBranch ignore - done - - git -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 - git -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 - ''; + setupGitFixtures = pkgs.writeShellApplication { + name = "setup-git-fixtures"; + runtimeInputs = [ pkgs.git ]; + text = '' + FIXTURES_DIR="''${1:-${defaultStateDir}/repo-fixtures}" + + # Run git as pacchettibotti + gitbot() { + GIT_AUTHOR_NAME="pacchettibotti" GIT_AUTHOR_EMAIL="pacchettibotti@purescript.org" \ + GIT_COMMITTER_NAME="pacchettibotti" GIT_COMMITTER_EMAIL="pacchettibotti@purescript.org" \ + git "$@" + } + + # Remove any existing fixtures (they may have wrong permissions from nix store copy) + rm -rf "$FIXTURES_DIR/purescript" 2>/dev/null || true + mkdir -p "$FIXTURES_DIR/purescript" + + # Copy fixtures and make writable (nix store files are read-only) + cp -r ${rootPath}/app/fixtures/{registry-index,registry,package-sets} "$FIXTURES_DIR/purescript/" + cp -r ${rootPath}/app/fixtures/github-packages/effect-4.0.0 "$FIXTURES_DIR/purescript/purescript-effect" + chmod -R u+w "$FIXTURES_DIR/purescript" + + for repo in "$FIXTURES_DIR"/purescript/*/; do + cd "$repo" + git init -b master && git add . + gitbot commit -m "Fixture commit" + git config receive.denyCurrentBranch ignore + done + + gitbot -C "$FIXTURES_DIR/purescript/package-sets" tag -m "psc-0.15.9-20230105" psc-0.15.9-20230105 + gitbot -C "$FIXTURES_DIR/purescript/purescript-effect" tag -m "v4.0.0" v4.0.0 + ''; + }; # Publish payload for testing publishPayload = pkgs.writeText "publish-effect.json" ( diff --git a/nix/test/smoke.nix b/nix/test/smoke.nix index 53addca88..1365d8283 100644 --- a/nix/test/smoke.nix +++ b/nix/test/smoke.nix @@ -9,6 +9,7 @@ # - systemd services start and stay running # - The server responds to basic HTTP requests # - Database migrations run successfully +# - The job executor starts without errors { pkgs, lib, @@ -25,11 +26,14 @@ else testConfig = import ./config.nix { inherit pkgs lib rootPath; }; envVars = testConfig.testEnv; stateDir = "/var/lib/registry-server"; + repoFixturesDir = "${stateDir}/repo-fixtures"; in pkgs.testers.nixosTest { name = "registry-smoke"; testScript = '' + import time + # Start the registry VM registry.start() @@ -54,6 +58,14 @@ else # Check that the service is still running (didn't crash) registry.succeed("systemctl is-active server.service") + # Give the job executor a moment to start and potentially fail + time.sleep(2) + + # Check that the job executor started successfully and didn't fail + logs = registry.succeed("journalctl -u server.service --no-pager") + assert "Job executor failed:" not in logs, f"Job executor failed on startup. Logs:\n{logs}" + assert "Starting Job Executor" in logs, f"Job executor did not start. Logs:\n{logs}" + print("✓ Smoke test passed: server deployed and responding") ''; @@ -62,7 +74,8 @@ else (rootPath + "/nix/registry-server.nix") ]; - nixpkgs.overlays = overlays; + # Apply the git mock overlay on top of the standard overlays + nixpkgs.overlays = overlays ++ [ testConfig.gitMockOverlay ]; virtualisation = { graphics = false; @@ -70,12 +83,29 @@ else memorySize = 2048; }; + # Set up git fixtures before the server starts + systemd.services.setup-git-fixtures = { + description = "Set up git fixtures for smoke test"; + wantedBy = [ "server.service" ]; + before = [ "server.service" ]; + serviceConfig = { + Type = "oneshot"; + RemainAfterExit = true; + }; + script = '' + ${testConfig.setupGitFixtures}/bin/setup-git-fixtures ${repoFixturesDir} + ''; + }; + services.registry-server = { enable = true; host = "localhost"; port = lib.toInt envVars.SERVER_PORT; enableCerts = false; - inherit stateDir envVars; + inherit stateDir; + envVars = envVars // { + REPO_FIXTURES_DIR = repoFixturesDir; + }; }; }; } From ea420fa07285a2a4b9a772af5769ceb712a47762 Mon Sep 17 00:00:00 2001 From: Fabrizio Ferrai Date: Mon, 22 Dec 2025 16:38:51 +0100 Subject: [PATCH 15/19] Split package jobs into separate tables, return all data from the job endpoint --- app-e2e/src/Test/E2E/Publish.purs | 10 +- app/src/App/API.purs | 4 +- app/src/App/Effect/Db.purs | 93 +++- app/src/App/SQLite.js | 99 ++-- app/src/App/SQLite.purs | 460 ++++++++++++++---- app/src/App/Server/JobExecutor.purs | 39 +- app/src/App/Server/MatrixBuilder.purs | 4 +- app/src/App/Server/Router.purs | 42 +- app/test/App/API.purs | 2 +- ...20240914171030_create_job_queue_tables.sql | 29 +- lib/src/API/V1.purs | 206 +++++++- lib/src/JobType.purs | 27 - scripts/src/PackageDeleter.purs | 2 +- test-utils/src/Registry/Test/E2E/Client.purs | 2 +- 14 files changed, 763 insertions(+), 256 deletions(-) delete mode 100644 lib/src/JobType.purs diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index 051d1931b..d06289340 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -68,18 +68,18 @@ spec = do job <- Client.pollJob config jobId -- If job failed, print logs for debugging - unless job.success do + unless (V1.jobInfo job).success do Console.log "Job failed! Logs:" - let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) job.logs + let logMessages = map (\l -> "[" <> V1.printLogLevel l.level <> "] " <> l.message) (V1.jobInfo job).logs Console.log $ String.joinWith "\n" logMessages -- Verify job completed successfully - when (not job.success) do - let errorLogs = Array.filter (\l -> l.level == V1.Error) job.logs + when (not (V1.jobInfo job).success) do + let errorLogs = Array.filter (\l -> l.level == V1.Error) (V1.jobInfo job).logs let errorMessages = map _.message errorLogs Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages - Assert.shouldSatisfy job.finishedAt isJust + Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust -- Assert.shouldEqual job.jobType JobType.PublishJob -- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") -- Assert.shouldEqual job.ref "v4.0.0" diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 1e69a129e..06d1ed943 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -344,7 +344,7 @@ type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + -- The legacyIndex argument contains the unverified manifests produced by the -- legacy importer; these manifests can be used on legacy packages to conform -- them to the registry rule that transitive dependencies are not allowed. -publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe (Map PackageName Range)) +publish :: forall r. Maybe Solver.TransitivizedRegistry -> PublishData -> Run (PublishEffects + r) (Maybe { dependencies :: Map PackageName Range, version :: Version }) publish maybeLegacyIndex payload = do let printedName = PackageName.print payload.name @@ -806,7 +806,7 @@ publish maybeLegacyIndex payload = do Comment.comment "Wrote completed metadata to the registry!" FS.Extra.remove tmp - pure $ Just (un Manifest manifest).dependencies + pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } -- | Verify the build plan for the package. If the user provided a build plan, -- | we ensure that the provided versions are within the ranges listed in the diff --git a/app/src/App/Effect/Db.purs b/app/src/App/Effect/Db.purs index 1e90a8163..031c91a62 100644 --- a/app/src/App/Effect/Db.purs +++ b/app/src/App/Effect/Db.purs @@ -5,10 +5,25 @@ import Registry.App.Prelude import Data.Array as Array import Data.DateTime (DateTime) import Data.String as String -import Registry.API.V1 (JobId, LogLevel, LogLine) +import Registry.API.V1 (Job, JobId, LogLevel, LogLine) import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.SQLite (FinishJob, InsertMatrixJob, InsertPackageJob, InsertPackageSetJob, JobInfo, MatrixJobDetails, PackageJobDetails, PackageSetJobDetails, SQLite, StartJob) +import Registry.App.SQLite + ( FinishJob + , InsertMatrixJob + , InsertPackageSetJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails + , SQLite + , SelectJobRequest + , StartJob + , TransferJobDetails + , UnpublishJobDetails + ) import Registry.App.SQLite as SQLite import Run (EFFECT, Run) import Run as Run @@ -25,17 +40,21 @@ import Run.Except as Except -- be part of app code we want to test. data Db a - = InsertPackageJob InsertPackageJob (JobId -> a) + = InsertPublishJob InsertPublishJob (JobId -> a) + | InsertUnpublishJob InsertUnpublishJob (JobId -> a) + | InsertTransferJob InsertTransferJob (JobId -> a) | InsertMatrixJob InsertMatrixJob (JobId -> a) | InsertPackageSetJob InsertPackageSetJob (JobId -> a) | FinishJob FinishJob a | StartJob StartJob a - | SelectJobInfo JobId (Either String (Maybe JobInfo) -> a) - | SelectNextPackageJob (Either String (Maybe PackageJobDetails) -> a) + | SelectJob SelectJobRequest (Either String (Maybe Job) -> a) + | SelectNextPublishJob (Either String (Maybe PublishJobDetails) -> a) + | SelectNextUnpublishJob (Either String (Maybe UnpublishJobDetails) -> a) + | SelectNextTransferJob (Either String (Maybe TransferJobDetails) -> a) | SelectNextMatrixJob (Either String (Maybe MatrixJobDetails) -> a) | SelectNextPackageSetJob (Either String (Maybe PackageSetJobDetails) -> a) | InsertLogLine LogLine a - | SelectLogsByJob JobId LogLevel (Maybe DateTime) (Array LogLine -> a) + | SelectLogsByJob JobId LogLevel DateTime (Array LogLine -> a) | ResetIncompleteJobs a derive instance Functor Db @@ -51,7 +70,7 @@ insertLog :: forall r. LogLine -> Run (DB + r) Unit insertLog log = Run.lift _db (InsertLogLine log unit) -- | Select all logs for a given job, filtered by loglevel. -selectLogsByJob :: forall r. JobId -> LogLevel -> Maybe DateTime -> Run (DB + r) (Array LogLine) +selectLogsByJob :: forall r. JobId -> LogLevel -> DateTime -> Run (DB + r) (Array LogLine) selectLogsByJob jobId logLevel since = Run.lift _db (SelectLogsByJob jobId logLevel since identity) -- | Set a job in the database to the 'finished' state. @@ -59,12 +78,20 @@ finishJob :: forall r. FinishJob -> Run (DB + r) Unit finishJob job = Run.lift _db (FinishJob job unit) -- | Select a job by ID from the database. -selectJobInfo :: forall r. JobId -> Run (DB + EXCEPT String + r) (Maybe JobInfo) -selectJobInfo jobId = Run.lift _db (SelectJobInfo jobId identity) >>= Except.rethrow +selectJob :: forall r. SelectJobRequest -> Run (DB + EXCEPT String + r) (Maybe Job) +selectJob request = Run.lift _db (SelectJob request identity) >>= Except.rethrow --- | Insert a new package job into the database. -insertPackageJob :: forall r. InsertPackageJob -> Run (DB + r) JobId -insertPackageJob job = Run.lift _db (InsertPackageJob job identity) +-- | Insert a new publish job into the database. +insertPublishJob :: forall r. InsertPublishJob -> Run (DB + r) JobId +insertPublishJob job = Run.lift _db (InsertPublishJob job identity) + +-- | Insert a new unpublish job into the database. +insertUnpublishJob :: forall r. InsertUnpublishJob -> Run (DB + r) JobId +insertUnpublishJob job = Run.lift _db (InsertUnpublishJob job identity) + +-- | Insert a new transfer job into the database. +insertTransferJob :: forall r. InsertTransferJob -> Run (DB + r) JobId +insertTransferJob job = Run.lift _db (InsertTransferJob job identity) -- | Insert a new matrix job into the database. insertMatrixJob :: forall r. InsertMatrixJob -> Run (DB + r) JobId @@ -78,9 +105,17 @@ insertPackageSetJob job = Run.lift _db (InsertPackageSetJob job identity) startJob :: forall r. StartJob -> Run (DB + r) Unit startJob job = Run.lift _db (StartJob job unit) --- | Select the next package job from the database. -selectNextPackageJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PackageJobDetails) -selectNextPackageJob = Run.lift _db (SelectNextPackageJob identity) >>= Except.rethrow +-- | Select the next publish job from the database. +selectNextPublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe PublishJobDetails) +selectNextPublishJob = Run.lift _db (SelectNextPublishJob identity) >>= Except.rethrow + +-- | Select the next unpublish job from the database. +selectNextUnpublishJob :: forall r. Run (DB + EXCEPT String + r) (Maybe UnpublishJobDetails) +selectNextUnpublishJob = Run.lift _db (SelectNextUnpublishJob identity) >>= Except.rethrow + +-- | Select the next transfer job from the database. +selectNextTransferJob :: forall r. Run (DB + EXCEPT String + r) (Maybe TransferJobDetails) +selectNextTransferJob = Run.lift _db (SelectNextTransferJob identity) >>= Except.rethrow -- | Select the next matrix job from the database. selectNextMatrixJob :: forall r. Run (DB + EXCEPT String + r) (Maybe MatrixJobDetails) @@ -102,8 +137,16 @@ type SQLiteEnv = { db :: SQLite } -- | Interpret DB by interacting with the SQLite database on disk. handleSQLite :: forall r a. SQLiteEnv -> Db a -> Run (LOG + EFFECT + r) a handleSQLite env = case _ of - InsertPackageJob job reply -> do - result <- Run.liftEffect $ SQLite.insertPackageJob env.db job + InsertPublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertPublishJob env.db job + pure $ reply result + + InsertUnpublishJob job reply -> do + result <- Run.liftEffect $ SQLite.insertUnpublishJob env.db job + pure $ reply result + + InsertTransferJob job reply -> do + result <- Run.liftEffect $ SQLite.insertTransferJob env.db job pure $ reply result InsertMatrixJob job reply -> do @@ -122,12 +165,20 @@ handleSQLite env = case _ of Run.liftEffect $ SQLite.startJob env.db job pure next - SelectJobInfo jobId reply -> do - result <- Run.liftEffect $ SQLite.selectJobInfo env.db jobId + SelectJob request reply -> do + result <- Run.liftEffect $ SQLite.selectJob env.db request + pure $ reply result + + SelectNextPublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextPublishJob env.db + pure $ reply result + + SelectNextUnpublishJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextUnpublishJob env.db pure $ reply result - SelectNextPackageJob reply -> do - result <- Run.liftEffect $ SQLite.selectNextPackageJob env.db + SelectNextTransferJob reply -> do + result <- Run.liftEffect $ SQLite.selectNextTransferJob env.db pure $ reply result SelectNextMatrixJob reply -> do diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index 9fbbeeec9..bbad2ae78 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -2,7 +2,9 @@ import Database from "better-sqlite3"; const JOB_INFO_TABLE = 'job_info' const LOGS_TABLE = 'logs' -const PACKAGE_JOBS_TABLE = 'package_jobs'; +const PUBLISH_JOBS_TABLE = 'publish_jobs'; +const UNPUBLISH_JOBS_TABLE = 'unpublish_jobs'; +const TRANSFER_JOBS_TABLE = 'transfer_jobs'; const MATRIX_JOBS_TABLE = 'matrix_jobs'; const PACKAGE_SET_JOBS_TABLE = 'package_set_jobs'; @@ -66,9 +68,19 @@ const _insertJob = (db, table, columns, job) => { return insert(job); }; -export const insertPackageJobImpl = (db, job) => { - const columns = ['jobId', 'jobType', 'packageName', 'payload'] - return _insertJob(db, PACKAGE_JOBS_TABLE, columns, job); +export const insertPublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, PUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertUnpublishJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'packageVersion', 'payload'] + return _insertJob(db, UNPUBLISH_JOBS_TABLE, columns, job); +}; + +export const insertTransferJobImpl = (db, job) => { + const columns = ['jobId', 'packageName', 'payload'] + return _insertJob(db, TRANSFER_JOBS_TABLE, columns, job); }; export const insertMatrixJobImpl = (db, job) => { @@ -81,43 +93,44 @@ export const insertPackageSetJobImpl = (db, job) => { return _insertJob(db, PACKAGE_SET_JOBS_TABLE, columns, job); }; -export const selectNextPackageJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${PACKAGE_JOBS_TABLE} job +const _selectJob = (db, { table, jobId }) => { + let query = ` + SELECT job.*, info.* + FROM ${table} job JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); + `; + + if (jobId === null) { + query += ` WHERE info.finishedAt IS NULL AND info.startedAt IS NULL`; + } else { + query += ` WHERE info.jobId = ?`; + params.push(jobId); + } + + query += ` ORDER BY info.createdAt ASC LIMIT 1`; + const stmt = db.prepare(query); + + return stmt.get(...params); +} + +export const selectPublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: PUBLISH_JOBS_TABLE, jobId }); }; -export const selectNextMatrixJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${MATRIX_JOBS_TABLE} job - JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); +export const selectUnpublishJobImpl = (db, jobId) => { + return _selectJob(db, { table: UNPUBLISH_JOBS_TABLE, jobId }); }; -export const selectNextPackageSetJobImpl = (db) => { - const stmt = db.prepare(` - SELECT job.*, info.createdAt, info.startedAt - FROM ${PACKAGE_SET_JOBS_TABLE} job - JOIN ${JOB_INFO_TABLE} info ON job.jobId = info.jobId - WHERE info.finishedAt IS NULL - AND info.startedAt IS NULL - ORDER BY info.createdAt DESC - LIMIT 1 - `); - return stmt.get(); +export const selectTransferJobImpl = (db, jobId) => { + return _selectJob(db, { table: TRANSFER_JOBS_TABLE, jobId }); +}; + +export const selectMatrixJobImpl = (db, jobId) => { + return _selectJob(db, { table: MATRIX_JOBS_TABLE, jobId }); +}; + +export const selectPackageSetJobImpl = (db, jobId) => { + return _selectJob(db, { table: PACKAGE_SET_JOBS_TABLE, jobId }); }; export const startJobImpl = (db, args) => { @@ -161,18 +174,10 @@ export const insertLogLineImpl = (db, logLine) => { export const selectLogsByJobImpl = (db, jobId, logLevel, since) => { let query = ` SELECT * FROM ${LOGS_TABLE} - WHERE jobId = ? AND level >= ? + WHERE jobId = ? AND level >= ? AND timestamp >= ? + ORDER BY timestamp ASC LIMIT 100 `; - const params = [jobId, logLevel]; - - if (since !== null) { - query += ' AND timestamp >= ?'; - params.push(since); - } - - query += ' ORDER BY timestamp ASC'; - const stmt = db.prepare(query); - return stmt.all(...params); + return stmt.all(jobId, logLevel, since); }; diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 09f91f612..814b2b82c 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -4,49 +4,61 @@ -- | nicer interface with PureScript types for higher-level modules to use. module Registry.App.SQLite - ( SQLite - , ConnectOptions - , connect - , JobInfo - , selectJobInfo - , InsertPackageJob - , insertPackageJob + ( ConnectOptions + , FinishJob , InsertMatrixJob - , insertMatrixJob , InsertPackageSetJob - , insertPackageSetJob - , FinishJob - , finishJob + , InsertPublishJob + , InsertTransferJob + , InsertUnpublishJob + , JobInfo + , MatrixJobDetails + , PackageSetJobDetails + , PublishJobDetails + , SQLite + , SelectJobRequest , StartJob - , startJob - , resetIncompleteJobs + , TransferJobDetails + , UnpublishJobDetails + , connect + , finishJob , insertLogLine + , insertMatrixJob + , insertPackageSetJob + , insertPublishJob + , insertTransferJob + , insertUnpublishJob + , resetIncompleteJobs + , selectJob , selectLogsByJob - , PackageJobDetails - , selectNextPackageJob - , MatrixJobDetails , selectNextMatrixJob - , PackageSetJobDetails , selectNextPackageSetJob + , selectNextPublishJob + , selectNextTransferJob + , selectNextUnpublishJob + , startJob ) where import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError +import Control.Monad.Except (runExceptT) import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable +import Data.String as String import Data.UUID.Random as UUID import Effect.Uncurried (EffectFn1, EffectFn2, EffectFn4) import Effect.Uncurried as Uncurried -import Registry.API.V1 (JobId(..), LogLevel, LogLine) +import Record as Record +import Registry.API.V1 (Job(..), JobId(..), LogLevel(..), LogLine) import Registry.API.V1 as API.V1 import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format -import Registry.JobType as JobType -import Registry.Operation (PackageOperation, PackageSetOperation) +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData, TransferData, UnpublishData) import Registry.Operation as Operation import Registry.PackageName as PackageName +import Registry.SSH (Signature) import Registry.Version as Version -- | An active database connection acquired with `connect` @@ -83,29 +95,26 @@ type JSJobInfo = , success :: Int } -jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo -jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) - isSuccess <- case success of - 0 -> Right false - 1 -> Right true - _ -> Left $ "Invalid success value " <> show success - pure - { jobId: JobId jobId - , createdAt: created - , startedAt: started - , finishedAt: finished - , success: isSuccess - } +-- jobInfoFromJSRep :: JSJobInfo -> Either String JobInfo +-- jobInfoFromJSRep { jobId, createdAt, startedAt, finishedAt, success } = do +-- created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt +-- started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) +-- finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) +-- isSuccess <- toSuccess success +-- pure +-- { jobId: JobId jobId +-- , createdAt: created +-- , startedAt: started +-- , finishedAt: finished +-- , success: isSuccess +-- } foreign import selectJobInfoImpl :: EffectFn2 SQLite String (Nullable JSJobInfo) -selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) -selectJobInfo db (JobId jobId) = do - maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId - pure $ traverse jobInfoFromJSRep maybeJobInfo +-- selectJobInfo :: SQLite -> JobId -> Effect (Either String (Maybe JobInfo)) +-- selectJobInfo db (JobId jobId) = do +-- maybeJobInfo <- map toMaybe $ Uncurried.runEffectFn2 selectJobInfoImpl db jobId +-- pure $ traverse jobInfoFromJSRep maybeJobInfo finishJob :: SQLite -> FinishJob -> Effect Unit finishJob db = Uncurried.runEffectFn2 finishJobImpl db <<< finishJobToJSRep @@ -146,7 +155,7 @@ type JSFinishJob = finishJobToJSRep :: FinishJob -> JSFinishJob finishJobToJSRep { jobId, success, finishedAt } = { jobId: un JobId jobId - , success: if success then 1 else 0 + , success: fromSuccess success , finishedAt: DateTime.format Internal.Format.iso8601DateTime finishedAt } @@ -162,80 +171,325 @@ newJobId = do id <- UUID.make pure $ JobId $ UUID.toString id +fromSuccess :: Boolean -> Int +fromSuccess success = if success then 1 else 0 + +toSuccess :: Int -> Either String Boolean +toSuccess success = case success of + 0 -> Right false + 1 -> Right true + _ -> Left $ "Invalid success value " <> show success + +type SelectJobRequest = + { level :: Maybe LogLevel + , since :: DateTime + , jobId :: JobId + } + +selectJob :: SQLite -> SelectJobRequest -> Effect (Either String (Maybe Job)) +selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do + let logLevel = fromMaybe Error maybeLogLevel + { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since + case fail of + [] -> runExceptT + ( selectPublishJob logs + <|> selectMatrixJob logs + <|> selectTransferJob logs + <|> selectPackageSetJob logs + <|> selectUnpublishJob logs + ) + _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail + where + selectPublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PublishJob <<< Record.merge { logs, jobType: Proxy :: _ "publish" }) + <<< publishJobDetailsFromJSRep + ) + maybeJobDetails + + selectUnpublishJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (UnpublishJob <<< Record.merge { logs, jobType: Proxy :: _ "unpublish" }) + <<< unpublishJobDetailsFromJSRep + ) + maybeJobDetails + + selectTransferJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (TransferJob <<< Record.merge { logs, jobType: Proxy :: _ "transfer" }) + <<< transferJobDetailsFromJSRep + ) + maybeJobDetails + + selectMatrixJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (MatrixJob <<< Record.merge { logs, jobType: Proxy :: _ "matrix" }) + <<< matrixJobDetailsFromJSRep + ) + maybeJobDetails + + selectPackageSetJob logs = ExceptT do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db (Nullable.notNull jobId) + pure $ traverse + ( map (PackageSetJob <<< Record.merge { logs, jobType: Proxy :: _ "packageset" }) + <<< packageSetJobDetailsFromJSRep + ) + maybeJobDetails + -------------------------------------------------------------------------------- --- package_jobs table +-- publish_jobs table -type PackageJobDetails = +type PublishJobDetails = { jobId :: JobId - , packageName :: PackageName - , payload :: PackageOperation , createdAt :: DateTime , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + } + +type JSPublishJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -type JSPackageJobDetails = +publishJobDetailsFromJSRep :: JSPublishJobDetails -> Either String PublishJobDetails +publishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.publishCodec payload + pure + { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , packageVersion: version + , payload: parsed + } + +foreign import selectPublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPublishJobDetails) + +selectNextPublishJob :: SQLite -> Effect (Either String (Maybe PublishJobDetails)) +selectNextPublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db Nullable.null + pure $ traverse publishJobDetailsFromJSRep maybeJobDetails + +type InsertPublishJob = + { payload :: PublishData + } + +type JSInsertPublishJob = { jobId :: String , packageName :: String + , packageVersion :: String , payload :: String , createdAt :: String + } + +insertPublishJobToJSRep :: JobId -> DateTime -> InsertPublishJob -> JSInsertPublishJob +insertPublishJobToJSRep jobId now { payload } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.publishCodec payload + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertPublishJobImpl :: EffectFn2 SQLite JSInsertPublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertPublishJob :: SQLite -> InsertPublishJob -> Effect JobId +insertPublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertPublishJobImpl db $ insertPublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- unpublish_jobs table + +type UnpublishJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + } + +type JSUnpublishJobDetails = + { jobId :: String + , createdAt :: String , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , packageVersion :: String + , payload :: String } -packageJobDetailsFromJSRep :: JSPackageJobDetails -> Either String PackageJobDetails -packageJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt } = do - name <- PackageName.parse packageName +unpublishJobDetailsFromJSRep :: JSUnpublishJobDetails -> Either String UnpublishJobDetails +unpublishJobDetailsFromJSRep { jobId, packageName, packageVersion, payload, createdAt, startedAt, finishedAt, success } = do created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageOperationCodec payload + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + version <- Version.parse packageVersion + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload pure { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s , packageName: name + , packageVersion: version , payload: parsed + } + +foreign import selectUnpublishJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSUnpublishJobDetails) + +selectNextUnpublishJob :: SQLite -> Effect (Either String (Maybe UnpublishJobDetails)) +selectNextUnpublishJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectUnpublishJobImpl db Nullable.null + pure $ traverse unpublishJobDetailsFromJSRep maybeJobDetails + +type InsertUnpublishJob = + { payload :: UnpublishData + , rawPayload :: String + , signature :: Signature + } + +type JSInsertUnpublishJob = + { jobId :: String + , packageName :: String + , packageVersion :: String + , payload :: String + , createdAt :: String + } + +insertUnpublishJobToJSRep :: JobId -> DateTime -> InsertUnpublishJob -> JSInsertUnpublishJob +insertUnpublishJobToJSRep jobId now { payload, rawPayload, signature } = + { jobId: un JobId jobId + , packageName: PackageName.print payload.name + , packageVersion: Version.print payload.version + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Unpublish payload + , rawPayload + , signature + } + , createdAt: DateTime.format Internal.Format.iso8601DateTime now + } + +foreign import insertUnpublishJobImpl :: EffectFn2 SQLite JSInsertUnpublishJob Unit + +-- | Insert a new package job, ie. a publish, unpublish, or transfer. +insertUnpublishJob :: SQLite -> InsertUnpublishJob -> Effect JobId +insertUnpublishJob db job = do + jobId <- newJobId + now <- nowUTC + Uncurried.runEffectFn2 insertUnpublishJobImpl db $ insertUnpublishJobToJSRep jobId now job + pure jobId + +-------------------------------------------------------------------------------- +-- transfer_jobs table + +type TransferJobDetails = + { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , packageName :: PackageName + , payload :: AuthenticatedData + } + +type JSTransferJobDetails = + { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , packageName :: String + , payload :: String + } + +transferJobDetailsFromJSRep :: JSTransferJobDetails -> Either String TransferJobDetails +transferJobDetailsFromJSRep { jobId, packageName, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + name <- PackageName.parse packageName + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.authenticatedCodec payload + pure + { jobId: JobId jobId , createdAt: created , startedAt: started + , finishedAt: finished + , success: s + , packageName: name + , payload: parsed } -foreign import selectNextPackageJobImpl :: EffectFn1 SQLite (Nullable JSPackageJobDetails) +foreign import selectTransferJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSTransferJobDetails) -selectNextPackageJob :: SQLite -> Effect (Either String (Maybe PackageJobDetails)) -selectNextPackageJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageJobImpl db - pure $ traverse packageJobDetailsFromJSRep maybeJobDetails +selectNextTransferJob :: SQLite -> Effect (Either String (Maybe TransferJobDetails)) +selectNextTransferJob db = do + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectTransferJobImpl db Nullable.null + pure $ traverse transferJobDetailsFromJSRep maybeJobDetails -type InsertPackageJob = - { payload :: PackageOperation +type InsertTransferJob = + { payload :: TransferData + , rawPayload :: String + , signature :: Signature } -type JSInsertPackageJob = +type JSInsertTransferJob = { jobId :: String - , jobType :: String , packageName :: String , payload :: String , createdAt :: String } -insertPackageJobToJSRep :: JobId -> DateTime -> InsertPackageJob -> JSInsertPackageJob -insertPackageJobToJSRep jobId now { payload } = +insertTransferJobToJSRep :: JobId -> DateTime -> InsertTransferJob -> JSInsertTransferJob +insertTransferJobToJSRep jobId now { payload, rawPayload, signature } = { jobId: un JobId jobId - , jobType: JobType.print jobType - , packageName: PackageName.print name - , payload: stringifyJson Operation.packageOperationCodec payload + , packageName: PackageName.print payload.name + , payload: stringifyJson Operation.authenticatedCodec + { payload: Operation.Transfer payload, rawPayload, signature } , createdAt: DateTime.format Internal.Format.iso8601DateTime now } - where - { jobType, name } = case payload of - Operation.Publish { name } -> { jobType: JobType.PublishJob, name } - Operation.Authenticated { payload: Operation.Unpublish { name } } -> { jobType: JobType.UnpublishJob, name } - Operation.Authenticated { payload: Operation.Transfer { name } } -> { jobType: JobType.TransferJob, name } -foreign import insertPackageJobImpl :: EffectFn2 SQLite JSInsertPackageJob Unit +foreign import insertTransferJobImpl :: EffectFn2 SQLite JSInsertTransferJob Unit -- | Insert a new package job, ie. a publish, unpublish, or transfer. -insertPackageJob :: SQLite -> InsertPackageJob -> Effect JobId -insertPackageJob db job = do +insertTransferJob :: SQLite -> InsertTransferJob -> Effect JobId +insertTransferJob db job = do jobId <- newJobId now <- nowUTC - Uncurried.runEffectFn2 insertPackageJobImpl db $ insertPackageJobToJSRep jobId now job + Uncurried.runEffectFn2 insertTransferJobImpl db $ insertTransferJobToJSRep jobId now job pure jobId -------------------------------------------------------------------------------- @@ -278,47 +532,55 @@ insertMatrixJob db job = do type MatrixJobDetails = { jobId :: JobId + , createdAt :: DateTime + , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean , packageName :: PackageName , packageVersion :: Version , compilerVersion :: Version , payload :: Map PackageName Version - , createdAt :: DateTime - , startedAt :: Maybe DateTime } type JSMatrixJobDetails = { jobId :: String + , createdAt :: String + , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int , packageName :: String , packageVersion :: String , compilerVersion :: String , payload :: String - , createdAt :: String - , startedAt :: Nullable String } matrixJobDetailsFromJSRep :: JSMatrixJobDetails -> Either String MatrixJobDetails -matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt } = do +matrixJobDetailsFromJSRep { jobId, packageName, packageVersion, compilerVersion, payload, createdAt, startedAt, finishedAt, success } = do + created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt + started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success name <- PackageName.parse packageName version <- Version.parse packageVersion compiler <- Version.parse compilerVersion - created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt - started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) parsed <- lmap JSON.DecodeError.print $ parseJson (Internal.Codec.packageMap Version.codec) payload pure { jobId: JobId jobId + , createdAt: created + , startedAt: started + , finishedAt: finished + , success: s , packageName: name , packageVersion: version , compilerVersion: compiler , payload: parsed - , createdAt: created - , startedAt: started } -foreign import selectNextMatrixJobImpl :: EffectFn1 SQLite (Nullable JSMatrixJobDetails) +foreign import selectMatrixJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSMatrixJobDetails) selectNextMatrixJob :: SQLite -> Effect (Either String (Maybe MatrixJobDetails)) selectNextMatrixJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextMatrixJobImpl db + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectMatrixJobImpl db Nullable.null pure $ traverse matrixJobDetailsFromJSRep maybeJobDetails -------------------------------------------------------------------------------- @@ -326,35 +588,43 @@ selectNextMatrixJob db = do type PackageSetJobDetails = { jobId :: JobId - , payload :: PackageSetOperation , createdAt :: DateTime , startedAt :: Maybe DateTime + , finishedAt :: Maybe DateTime + , success :: Boolean + , payload :: PackageSetOperation } type JSPackageSetJobDetails = { jobId :: String - , payload :: String , createdAt :: String , startedAt :: Nullable String + , finishedAt :: Nullable String + , success :: Int + , payload :: String } packageSetJobDetailsFromJSRep :: JSPackageSetJobDetails -> Either String PackageSetJobDetails -packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt } = do - parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload +packageSetJobDetailsFromJSRep { jobId, payload, createdAt, startedAt, finishedAt, success } = do created <- DateTime.unformat Internal.Format.iso8601DateTime createdAt started <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe startedAt) + finished <- traverse (DateTime.unformat Internal.Format.iso8601DateTime) (toMaybe finishedAt) + s <- toSuccess success + parsed <- lmap JSON.DecodeError.print $ parseJson Operation.packageSetOperationCodec payload pure { jobId: JobId jobId - , payload: parsed , createdAt: created , startedAt: started + , finishedAt: finished + , success: s + , payload: parsed } -foreign import selectNextPackageSetJobImpl :: EffectFn1 SQLite (Nullable JSPackageSetJobDetails) +foreign import selectPackageSetJobImpl :: EffectFn2 SQLite (Nullable String) (Nullable JSPackageSetJobDetails) selectNextPackageSetJob :: SQLite -> Effect (Either String (Maybe PackageSetJobDetails)) selectNextPackageSetJob db = do - maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn1 selectNextPackageSetJobImpl db + maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPackageSetJobImpl db Nullable.null pure $ traverse packageSetJobDetailsFromJSRep maybeJobDetails type InsertPackageSetJob = @@ -417,18 +687,18 @@ foreign import insertLogLineImpl :: EffectFn2 SQLite JSLogLine Unit insertLogLine :: SQLite -> LogLine -> Effect Unit insertLogLine db = Uncurried.runEffectFn2 insertLogLineImpl db <<< logLineToJSRep -foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int (Nullable String) (Array JSLogLine) +foreign import selectLogsByJobImpl :: EffectFn4 SQLite String Int String (Array JSLogLine) -- | Select all logs for a given job at or above the indicated log level. To get all -- | logs, pass the DEBUG log level. -selectLogsByJob :: SQLite -> JobId -> LogLevel -> Maybe DateTime -> Effect { fail :: Array String, success :: Array LogLine } +selectLogsByJob :: SQLite -> JobId -> LogLevel -> DateTime -> Effect { fail :: Array String, success :: Array LogLine } selectLogsByJob db jobId level since = do - let timestamp = map (DateTime.format Internal.Format.iso8601DateTime) since + let timestamp = DateTime.format Internal.Format.iso8601DateTime since jsLogLines <- Uncurried.runEffectFn4 selectLogsByJobImpl db (un JobId jobId) (API.V1.logLevelToPriority level) - (Nullable.toNullable timestamp) + timestamp pure $ partitionEithers $ map logLineFromJSRep jsLogLines diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 63a5cbddd..35e6a3991 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -12,6 +12,9 @@ import Data.Map as Map import Data.Set as Set import Effect.Aff (Milliseconds(..)) import Effect.Aff as Aff +import Record as Record +import Registry.API.V1 (Job(..)) +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db @@ -19,21 +22,14 @@ import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry -import Registry.App.SQLite (MatrixJobDetails, PackageJobDetails, PackageSetJobDetails) import Registry.App.Server.Env (ServerEffects, ServerEnv, runEffects) import Registry.App.Server.MatrixBuilder as MatrixBuilder import Registry.ManifestIndex as ManifestIndex -import Registry.Operation as Operation import Registry.PackageName as PackageName import Registry.Version as Version import Run (Run) import Run.Except (EXCEPT) -data JobDetails - = PackageJob PackageJobDetails - | MatrixJob MatrixJobDetails - | PackageSetJob PackageSetJobDetails - runJobExecutor :: ServerEnv -> Aff (Either Aff.Error Unit) runJobExecutor env = runEffects env do Log.info "Starting Job Executor" @@ -62,10 +58,7 @@ runJobExecutor env = runEffects env do Just job -> do now <- nowUTC let - jobId = case job of - PackageJob details -> details.jobId - MatrixJob details -> details.jobId - PackageSetJob details -> details.jobId + jobId = (V1.jobInfo job).jobId Db.startJob { jobId, startedAt: now } @@ -96,20 +89,22 @@ runJobExecutor env = runEffects env do -- TODO: here we only get a single package for each operation, but really we should -- have all of them and toposort them. There is something in ManifestIndex but not --- sure that's what we need -findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe JobDetails) +-- sure that's what we need +findNextAvailableJob :: forall r. Run (DB + EXCEPT String + r) (Maybe Job) findNextAvailableJob = runMaybeT - $ (PackageJob <$> MaybeT Db.selectNextPackageJob) - <|> (MatrixJob <$> MaybeT Db.selectNextMatrixJob) - <|> (PackageSetJob <$> MaybeT Db.selectNextPackageSetJob) + $ (PublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "publish" } <$> MaybeT Db.selectNextPublishJob) + <|> (UnpublishJob <<< Record.merge { logs: [], jobType: Proxy :: _ "unpublish" } <$> MaybeT Db.selectNextUnpublishJob) + <|> (TransferJob <<< Record.merge { logs: [], jobType: Proxy :: _ "transfer" } <$> MaybeT Db.selectNextTransferJob) + <|> (MatrixJob <<< Record.merge { logs: [], jobType: Proxy :: _ "matrix" } <$> MaybeT Db.selectNextMatrixJob) + <|> (PackageSetJob <<< Record.merge { logs: [], jobType: Proxy :: _ "packageset" } <$> MaybeT Db.selectNextPackageSetJob) -executeJob :: DateTime -> JobDetails -> Run ServerEffects Unit +executeJob :: DateTime -> Job -> Run ServerEffects Unit executeJob _ = case _ of - PackageJob { payload: Operation.Publish payload@{ name, version } } -> do - maybeDependencies <- API.publish Nothing payload + PublishJob { payload: payload@{ name } } -> do + maybeResult <- API.publish Nothing payload -- The above operation will throw if not successful, and return a map of -- dependencies of the package only if it has not been published before. - for_ maybeDependencies \dependencies -> do + for_ maybeResult \{ dependencies, version } -> do -- At this point this package has been verified with one compiler only. -- So we need to enqueue compilation jobs for (1) same package, all the other -- compilers, and (2) same compiler, all packages that depend on this one @@ -132,8 +127,8 @@ executeJob _ = case _ of , packageName: solvedPackage , packageVersion: solvedVersion } - PackageJob { payload: Operation.Authenticated auth } -> - API.authenticated auth + UnpublishJob { payload } -> API.authenticated payload + TransferJob { payload } -> API.authenticated payload MatrixJob details@{ packageName, packageVersion } -> do maybeDependencies <- MatrixBuilder.runMatrixJob details -- Unlike the publishing case, after verifying a compilation here we only need diff --git a/app/src/App/Server/MatrixBuilder.purs b/app/src/App/Server/MatrixBuilder.purs index 7ae98d972..8db8e883b 100644 --- a/app/src/App/Server/MatrixBuilder.purs +++ b/app/src/App/Server/MatrixBuilder.purs @@ -19,6 +19,7 @@ import Data.String as String import Effect.Aff as Aff import Node.FS.Aff as FS.Aff import Node.Path as Path +import Registry.API.V1 (MatrixJobData) import Registry.App.CLI.Purs (CompilerFailure(..)) import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions @@ -29,7 +30,6 @@ import Registry.App.Effect.Registry (REGISTRY) import Registry.App.Effect.Registry as Registry import Registry.App.Effect.Storage (STORAGE) import Registry.App.Effect.Storage as Storage -import Registry.App.SQLite (MatrixJobDetails) import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.Tmp as Tmp import Registry.ManifestIndex as ManifestIndex @@ -43,7 +43,7 @@ import Run as Run import Run.Except (EXCEPT) import Run.Except as Except -runMatrixJob :: forall r. MatrixJobDetails -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) +runMatrixJob :: forall r. MatrixJobData -> Run (REGISTRY + STORAGE + LOG + AFF + EFFECT + EXCEPT String + r) (Maybe (Map PackageName Range)) runMatrixJob { compilerVersion, packageName, packageVersion, payload: buildPlan } = do workdir <- Tmp.mkTmpDir let installed = Path.concat [ workdir, ".registry" ] diff --git a/app/src/App/Server/Router.purs b/app/src/App/Server/Router.purs index f371d1e71..9143508de 100644 --- a/app/src/App/Server/Router.purs +++ b/app/src/App/Server/Router.purs @@ -2,21 +2,18 @@ module Registry.App.Server.Router where import Registry.App.Prelude hiding ((/)) -import Control.Monad.Cont (ContT) import Data.Codec.JSON as CJ import Effect.Aff as Aff import HTTPurple (Method(..), Request, Response) import HTTPurple as HTTPurple import HTTPurple.Status as Status -import Registry.API.V1 (LogLevel(..), Route(..)) +import Registry.API.V1 (Route(..)) import Registry.API.V1 as V1 import Registry.App.Effect.Db as Db import Registry.App.Effect.Env as Env import Registry.App.Effect.Log as Log import Registry.App.Server.Env (ServerEffects, ServerEnv, jsonDecoder, jsonOk, runEffects) -import Registry.Operation (PackageOperation) import Registry.Operation as Operation -import Registry.PackageName as PackageName import Run (Run) import Run.Except as Run.Except @@ -44,14 +41,20 @@ router { route, method, body } = HTTPurple.usingCont case route, method of Publish, Post -> do publish <- HTTPurple.fromJson (jsonDecoder Operation.publishCodec) body lift $ Log.info $ "Received Publish request: " <> printJson Operation.publishCodec publish - insertPackageJob $ Operation.Publish publish + jobId <- lift $ Db.insertPublishJob { payload: publish } + jsonOk V1.jobCreatedResponseCodec { jobId } Unpublish, Post -> do auth <- HTTPurple.fromJson (jsonDecoder Operation.authenticatedCodec) body case auth.payload of Operation.Unpublish payload -> do lift $ Log.info $ "Received Unpublish request: " <> printJson Operation.unpublishCodec payload - insertPackageJob $ Operation.Authenticated auth + jobId <- lift $ Db.insertUnpublishJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected unpublish operation." @@ -60,7 +63,12 @@ router { route, method, body } = HTTPurple.usingCont case route, method of case auth.payload of Operation.Transfer payload -> do lift $ Log.info $ "Received Transfer request: " <> printJson Operation.transferCodec payload - insertPackageJob $ Operation.Authenticated auth + jobId <- lift $ Db.insertTransferJob + { payload: payload + , rawPayload: auth.rawPayload + , signature: auth.signature + } + jsonOk V1.jobCreatedResponseCodec { jobId } _ -> HTTPurple.badRequest "Expected transfer operation." @@ -70,22 +78,14 @@ router { route, method, body } = HTTPurple.usingCont case route, method of jsonOk (CJ.array V1.jobCodec) [] Job jobId { level: maybeLogLevel, since }, Get -> do - let logLevel = fromMaybe Error maybeLogLevel - logs <- lift $ Db.selectLogsByJob jobId logLevel since - lift (Run.Except.runExcept $ Db.selectJobInfo jobId) >>= case _ of + now <- liftEffect nowUTC + lift (Run.Except.runExcept $ Db.selectJob { jobId, level: maybeLogLevel, since: fromMaybe now since }) >>= case _ of Left err -> do lift $ Log.error $ "Error while fetching job: " <> err HTTPurple.notFound Right Nothing -> do HTTPurple.notFound - Right (Just job) -> - jsonOk V1.jobCodec - { jobId - , createdAt: job.createdAt - , finishedAt: job.finishedAt - , success: job.success - , logs - } + Right (Just job) -> jsonOk V1.jobCodec job Status, Get -> HTTPurple.emptyResponse Status.ok @@ -95,9 +95,3 @@ router { route, method, body } = HTTPurple.usingCont case route, method of _, _ -> HTTPurple.notFound - where - insertPackageJob :: PackageOperation -> ContT Response (Run _) Response - insertPackageJob operation = do - lift $ Log.info $ "Enqueuing job for package " <> PackageName.print (Operation.packageName operation) - jobId <- lift $ Db.insertPackageJob { payload: operation } - jsonOk V1.jobCreatedResponseCodec { jobId } diff --git a/app/test/App/API.purs b/app/test/App/API.purs index 63dcccc3d..122879e49 100644 --- a/app/test/App/API.purs +++ b/app/test/App/API.purs @@ -96,7 +96,7 @@ spec = do , location: Just $ GitHub { owner: "purescript", repo: "purescript-effect", subdir: Nothing } , name , ref - , version + , version: version , resolutions: Nothing } diff --git a/db/migrations/20240914171030_create_job_queue_tables.sql b/db/migrations/20240914171030_create_job_queue_tables.sql index f4f1e68f3..71727f473 100644 --- a/db/migrations/20240914171030_create_job_queue_tables.sql +++ b/db/migrations/20240914171030_create_job_queue_tables.sql @@ -9,16 +9,33 @@ CREATE TABLE job_info ( success INTEGER NOT NULL DEFAULT 0 ); --- Package-oriented jobs (publish/unpublish/transfer) -CREATE TABLE package_jobs ( +-- Publishing jobs +CREATE TABLE publish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Unpublishing jobs +CREATE TABLE unpublish_jobs ( + jobId TEXT PRIMARY KEY NOT NULL, + packageName TEXT NOT NULL, + packageVersion TEXT NOT NULL, + payload JSON NOT NULL, + FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE +); + +-- Package transfer jobs +CREATE TABLE transfer_jobs ( jobId TEXT PRIMARY KEY NOT NULL, - jobType TEXT NOT NULL, packageName TEXT NOT NULL, payload JSON NOT NULL, FOREIGN KEY (jobId) REFERENCES job_info (jobId) ON DELETE CASCADE ); --- Compiler matrix jobs (one compiler, all packages) +-- Compiler matrix jobs CREATE TABLE matrix_jobs ( jobId TEXT PRIMARY KEY NOT NULL, packageName TEXT NOT NULL, @@ -49,7 +66,9 @@ CREATE TABLE IF NOT EXISTS logs ( -- migrate:down DROP TABLE job_info; -DROP TABLE package_jobs; +DROP TABLE publish_jobs; +DROP TABLE unpublish_jobs; +DROP TABLE transfer_jobs; DROP TABLE matrix_jobs; DROP TABLE package_set_jobs; DROP TABLE logs; diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 4bae692f5..8c08d181d 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -1,7 +1,33 @@ -module Registry.API.V1 where +module Registry.API.V1 + ( JobCreatedResponse + , JobId(..) + , JobInfo + , JobType(..) + , Job(..) + , LogLevel(..) + , LogLine + , MatrixJobData + , PackageSetJobData + , PublishJobData + , Route(..) + , TransferJobData + , UnpublishJobData + , jobInfo + , jobCodec + , jobCreatedResponseCodec + , logLevelFromPriority + , logLevelToPriority + , printJobType + , printLogLevel + , routes + ) where import Prelude hiding ((/)) +import Codec.JSON.DecodeError as CJ.DecodeError +import Control.Alt ((<|>)) +import Control.Monad.Except (Except, except) +import Data.Codec as Codec import Data.Codec.JSON as CJ import Data.Codec.JSON.Record as CJ.Record import Data.Codec.JSON.Sum as CJ.Sum @@ -10,15 +36,26 @@ import Data.Either (Either(..), hush) import Data.Formatter.DateTime as DateTime import Data.Generic.Rep (class Generic) import Data.Lens.Iso.Newtype (_Newtype) +import Data.Map (Map) import Data.Maybe (Maybe) import Data.Newtype (class Newtype) import Data.Profunctor as Profunctor +import Data.Symbol (class IsSymbol) +import Data.Symbol as Symbol +import JSON (JSON) import Registry.Internal.Codec as Internal.Codec import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, PackageSetOperation, PublishData) +import Registry.Operation as Operation +import Registry.PackageName (PackageName) +import Registry.PackageName as PackageName +import Registry.Version (Version) +import Registry.Version as Version import Routing.Duplex (RouteDuplex') import Routing.Duplex as Routing import Routing.Duplex.Generic as RoutingG import Routing.Duplex.Generic.Syntax ((/), (?)) +import Type.Proxy (Proxy(..)) data Route = Publish @@ -62,23 +99,169 @@ type JobCreatedResponse = { jobId :: JobId } jobCreatedResponseCodec :: CJ.Codec JobCreatedResponse jobCreatedResponseCodec = CJ.named "JobCreatedResponse" $ CJ.Record.object { jobId: jobIdCodec } -type Job = +data Job + = PublishJob PublishJobData + | UnpublishJob UnpublishJobData + | TransferJob TransferJobData + | MatrixJob MatrixJobData + | PackageSetJob PackageSetJobData + +type JobInfo r = { jobId :: JobId , createdAt :: DateTime + , startedAt :: Maybe DateTime , finishedAt :: Maybe DateTime , success :: Boolean , logs :: Array LogLine + | r } +type PublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: PublishData + , jobType :: Proxy "publish" + ) + +type UnpublishJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , payload :: AuthenticatedData + , jobType :: Proxy "unpublish" + ) + +type TransferJobData = JobInfo + ( packageName :: PackageName + , payload :: AuthenticatedData + , jobType :: Proxy "transfer" + ) + +type MatrixJobData = JobInfo + ( packageName :: PackageName + , packageVersion :: Version + , compilerVersion :: Version + , payload :: Map PackageName Version + , jobType :: Proxy "matrix" + ) + +type PackageSetJobData = JobInfo + ( payload :: PackageSetOperation + , jobType :: Proxy "packageset" + ) + jobCodec :: CJ.Codec Job -jobCodec = CJ.named "Job" $ CJ.Record.object +jobCodec = Codec.codec' decode encode + where + decode :: JSON -> Except CJ.DecodeError Job + decode json = + do + map PublishJob (Codec.decode publishJobDataCodec json) + <|> map UnpublishJob (Codec.decode unpublishJobDataCodec json) + <|> map TransferJob (Codec.decode transferJobDataCodec json) + <|> map MatrixJob (Codec.decode matrixJobDataCodec json) + <|> map PackageSetJob (Codec.decode packageSetJobDataCodec json) + + encode :: Job -> JSON + encode = case _ of + PublishJob j -> CJ.encode publishJobDataCodec j + UnpublishJob j -> CJ.encode unpublishJobDataCodec j + TransferJob j -> CJ.encode transferJobDataCodec j + MatrixJob j -> CJ.encode matrixJobDataCodec j + PackageSetJob j -> CJ.encode packageSetJobDataCodec j + +publishJobDataCodec :: CJ.Codec PublishJobData +publishJobDataCodec = CJ.named "PublishJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "publish") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.publishCodec + } + +symbolCodec :: forall sym. IsSymbol sym => Proxy sym -> CJ.Codec (Proxy sym) +symbolCodec _ = Codec.codec' decode encode + where + decode json = except do + symbol <- CJ.decode CJ.string json + let expected = Symbol.reflectSymbol (Proxy :: _ sym) + case symbol == expected of + false -> Left $ CJ.DecodeError.basic + $ "Tried to decode symbol '" <> symbol <> "' as '" <> expected <> "'" + true -> Right (Proxy :: _ sym) + encode = CJ.encode CJ.string <<< Symbol.reflectSymbol + +unpublishJobDataCodec :: CJ.Codec UnpublishJobData +unpublishJobDataCodec = CJ.named "UnpublishJob" $ CJ.Record.object { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "unpublish") , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime , success: CJ.boolean , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , payload: Operation.authenticatedCodec } +transferJobDataCodec :: CJ.Codec TransferJobData +transferJobDataCodec = CJ.named "TransferJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "transfer") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , payload: Operation.authenticatedCodec + } + +matrixJobDataCodec :: CJ.Codec MatrixJobData +matrixJobDataCodec = CJ.named "MatrixJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "matrix") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , packageName: PackageName.codec + , packageVersion: Version.codec + , compilerVersion: Version.codec + , payload: Internal.Codec.packageMap Version.codec + } + +packageSetJobDataCodec :: CJ.Codec PackageSetJobData +packageSetJobDataCodec = CJ.named "PackageSetJob" $ CJ.Record.object + { jobId: jobIdCodec + , jobType: symbolCodec (Proxy :: _ "packageset") + , createdAt: Internal.Codec.iso8601DateTime + , startedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , finishedAt: CJ.Record.optional Internal.Codec.iso8601DateTime + , success: CJ.boolean + , logs: CJ.array logLineCodec + , payload: Operation.packageSetOperationCodec + } + +jobInfo :: Job -> JobInfo () +jobInfo = case _ of + PublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + UnpublishJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + TransferJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + MatrixJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + PackageSetJob { jobId, createdAt, startedAt, finishedAt, success, logs } -> + { jobId, createdAt, startedAt, finishedAt, success, logs } + newtype JobId = JobId String derive instance Newtype JobId _ @@ -86,6 +269,23 @@ derive instance Newtype JobId _ jobIdCodec :: CJ.Codec JobId jobIdCodec = Profunctor.wrapIso JobId CJ.string +data JobType + = PublishJobType + | UnpublishJobType + | TransferJobType + | MatrixJobType + | PackageSetJobType + +derive instance Eq JobType + +printJobType :: JobType -> String +printJobType = case _ of + PublishJobType -> "publish" + UnpublishJobType -> "unpublish" + TransferJobType -> "transfer" + MatrixJobType -> "matrix" + PackageSetJobType -> "packageset" + type LogLine = { level :: LogLevel , message :: String diff --git a/lib/src/JobType.purs b/lib/src/JobType.purs deleted file mode 100644 index dbc4eaf01..000000000 --- a/lib/src/JobType.purs +++ /dev/null @@ -1,27 +0,0 @@ -module Registry.JobType where - -import Prelude - -import Data.Codec.JSON as CJ -import Data.Codec.JSON.Sum as CJ.Sum -import Data.Either (Either(..), hush) - -data JobType = PublishJob | UnpublishJob | TransferJob - -derive instance Eq JobType - -parse :: String -> Either String JobType -parse = case _ of - "publish" -> Right PublishJob - "unpublish" -> Right UnpublishJob - "transfer" -> Right TransferJob - j -> Left $ "Invalid job type " <> show j - -print :: JobType -> String -print = case _ of - PublishJob -> "publish" - UnpublishJob -> "unpublish" - TransferJob -> "transfer" - -codec :: CJ.Codec JobType -codec = CJ.Sum.enumSum print (hush <<< parse) diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index e0de363ca..925361fb2 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -243,7 +243,7 @@ deleteVersion arguments name version = do { location: Just oldMetadata.location , name: name , ref: specificPackageMetadata.ref - , version + , version: version , compiler: unsafeFromRight $ Version.parse "0.15.4" , resolutions: Nothing } diff --git a/test-utils/src/Registry/Test/E2E/Client.purs b/test-utils/src/Registry/Test/E2E/Client.purs index ff34107df..9d8b6b0b4 100644 --- a/test-utils/src/Registry/Test/E2E/Client.purs +++ b/test-utils/src/Registry/Test/E2E/Client.purs @@ -175,6 +175,6 @@ pollJob config jobId = go 1 case result of Left err -> throwError $ toError err Right job -> - case job.finishedAt of + case (V1.jobInfo job).finishedAt of Just _ -> pure job Nothing -> go (attempt + 1) From 9a8d1ba152b2116bc6c0ec8f95ae674720e1dc47 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 15:05:59 -0500 Subject: [PATCH 16/19] implement thin client for github issues replaces the old GitHubIssue which ran registry jobs directly with one that hits the registry api instead. also added integration tests that ensure various jobs can be kicked off as github issue events and we get the resulting comments, issue close events, etc. --- .env.example | 64 ++-- app-e2e/spago.yaml | 8 + app-e2e/src/Test/E2E/GitHubIssue.purs | 215 ++++++++++++ app-e2e/src/Test/E2E/Main.purs | 2 + app-e2e/src/Test/E2E/Publish.purs | 63 ++-- app/src/App/Effect/Env.purs | 13 + app/src/App/GitHubIssue.purs | 329 ++++++++++++------ app/src/App/SQLite.js | 1 + app/src/App/SQLite.purs | 22 +- app/src/App/Server/JobExecutor.purs | 4 +- flake.nix | 5 +- nix/test/config.nix | 58 +++ nix/test/integration.nix | 5 +- nix/test/test-env.nix | 3 +- spago.lock | 40 +++ .../src/Registry/Test/E2E/Fixtures.purs | 70 ++++ .../src/Registry/Test/E2E/WireMock.purs | 164 +++++++++ 17 files changed, 900 insertions(+), 166 deletions(-) create mode 100644 app-e2e/src/Test/E2E/GitHubIssue.purs create mode 100644 test-utils/src/Registry/Test/E2E/Fixtures.purs create mode 100644 test-utils/src/Registry/Test/E2E/WireMock.purs diff --git a/.env.example b/.env.example index febae2d29..4873fe0a0 100644 --- a/.env.example +++ b/.env.example @@ -1,38 +1,60 @@ -# ===== -# Dev Configuration -# The devShell reads this file to set defaults, so changing values here -# affects local development. -# ===== +# ----------------------------------------------------------------------------- +# Server Configuration (dev defaults, required in all environments) +# ----------------------------------------------------------------------------- -# Server port - used by both the server and E2E tests +# Port the registry server listens on +# - Dev/Test: 9000 (from this file) +# - Prod: Set in deployment config SERVER_PORT=9000 # SQLite database path (relative to working directory) +# - Dev: Uses local ./db directory +# - Test: Overridden to use temp state directory +# - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" -# ===== -# Dev Secrets -# these must be set in .env when running scripts like legacy-importer -# ===== -# GitHub personal access token for API requests when running scripts -GITHUB_TOKEN="ghp_your_personal_access_token" +# ----------------------------------------------------------------------------- +# External Service URLs (optional overrides, have production defaults) +# ----------------------------------------------------------------------------- +# These default to production URLs in the app. Set these only when: +# - Running tests (test-env sets these automatically) +# - Using custom/staging infrastructure -# ===== -# Prod Secrets -# these must be set in .env to run the production server and some scripts -# ===== +# GITHUB_API_URL="https://api.github.com" +# S3_API_URL="https://packages.registry.purescript.org" +# S3_BUCKET_URL="https://ams3.digitaloceanspaces.com" +# PURSUIT_API_URL="https://pursuit.purescript.org" +# REGISTRY_API_URL="https://registry.purescript.org/api" +# HEALTHCHECKS_URL="https://hc-ping.com/your-uuid" -# DigitalOcean Spaces credentials for S3-compatible storage -SPACES_KEY="digitalocean_spaces_key" -SPACES_SECRET="digitalocean_spaces_secret" -# Pacchettibotti bot account credentials -# Used for automated registry operations (commits, releases, etc.) +# ----------------------------------------------------------------------------- +# Secrets (required for production, use dummy values for local dev) +# ----------------------------------------------------------------------------- +# IMPORTANT: Never commit real secrets. The values below are dummies for testing. + +# GitHub personal access token for pacchettibotti bot +# Used for: commits to registry repos, issue management PACCHETTIBOTTI_TOKEN="ghp_pacchettibotti_token" # Pacchettibotti SSH keys (base64-encoded) +# Used for: signing authenticated operations (unpublish, transfer) # Generate with: ssh-keygen -t ed25519 -C "pacchettibotti@purescript.org" # Encode with: cat key | base64 | tr -d '\n' PACCHETTIBOTTI_ED25519_PUB="c3NoLWVkMjU1MTkgYWJjeHl6IHBhY2NoZXR0aWJvdHRpQHB1cmVzY3JpcHQub3Jn" PACCHETTIBOTTI_ED25519="YWJjeHl6" + +# DigitalOcean Spaces credentials for S3-compatible storage +# Used for: uploading/downloading package tarballs +SPACES_KEY="digitalocean_spaces_key" +SPACES_SECRET="digitalocean_spaces_secret" + + +# ----------------------------------------------------------------------------- +# Script-only Secrets (not used by server, used by scripts like legacy-importer) +# ----------------------------------------------------------------------------- + +# Personal GitHub token for API requests when running scripts +# This is YOUR token, not pacchettibotti's +GITHUB_TOKEN="ghp_your_personal_access_token" diff --git a/app-e2e/spago.yaml b/app-e2e/spago.yaml index 1fa902f14..c19e78c42 100644 --- a/app-e2e/spago.yaml +++ b/app-e2e/spago.yaml @@ -5,12 +5,20 @@ package: dependencies: - aff - arrays + - codec-json - console - datetime - effect - either + - foldable-traversable + - json - maybe + - node-fs + - node-path + - node-process - prelude + - registry-app + - registry-foreign - registry-lib - registry-test-utils - spec diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs new file mode 100644 index 000000000..b1931aaab --- /dev/null +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -0,0 +1,215 @@ +-- | End-to-end tests for the GitHubIssue workflow. +-- | These tests exercise the full flow: parsing a GitHub event, submitting to +-- | the registry API, polling for completion, and posting comments. +module Test.E2E.GitHubIssue (spec) where + +import Registry.App.Prelude + +import Data.Array as Array +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.String as String +import Effect.Aff (Milliseconds(..)) +import Effect.Aff as Aff +import JSON as JSON +import Node.FS.Aff as FS.Aff +import Node.Path as Path +import Node.Process as Process +import Registry.App.GitHubIssue as GitHubIssue +import Registry.Foreign.Tmp as Tmp +import Registry.Operation (AuthenticatedData) +import Registry.Operation as Operation +import Registry.Test.E2E.Client as Client +import Registry.Test.E2E.Fixtures as Fixtures +import Registry.Test.E2E.WireMock (WireMockRequest) +import Registry.Test.E2E.WireMock as WireMock +import Test.Spec (Spec) +import Test.Spec as Spec + +spec :: Spec Unit +spec = do + Spec.describe "GitHubIssue end-to-end" do + Spec.before clearWireMockJournal do + + Spec.it "handles a publish via GitHub issue, posts comments, and closes issue on success" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.effectPublishData + + assertJobSucceeded result + assertHasComment jobStartedText result + assertHasComment jobCompletedText result + assertIssueClosed result + + Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.failingPublishData + + assertJobFailed result + assertHasComment jobStartedText result + assertHasComment jobFailedText result + assertNoComment jobCompletedText result + assertIssueOpen result + + Spec.it "re-signs authenticated operation for trustee (job fails due to unpublish time limit)" \_ -> do + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEvent Fixtures.trusteeAuthenticatedData + + assertHasComment jobStartedText result + assertTeamsApiCalled result + + where + clearWireMockJournal :: Aff Unit + clearWireMockJournal = do + wmConfig <- liftEffect WireMock.configFromEnv + WireMock.clearRequestsOrFail wmConfig + +testIssueNumber :: Int +testIssueNumber = 101 + +-- | Username configured as a packaging team member in test WireMock fixtures. +-- | See nix/test/config.nix for the GitHub Teams API stub. +packagingTeamUsername :: String +packagingTeamUsername = "packaging-team-user" + +jobStartedText :: String +jobStartedText = "Job started" + +jobCompletedText :: String +jobCompletedText = "Job completed successfully" + +jobFailedText :: String +jobFailedText = "Job failed" + +packagingTeamMembersPath :: String +packagingTeamMembersPath = "/orgs/purescript/teams/packaging/members" + +testPollConfig :: GitHubIssue.PollConfig +testPollConfig = + { maxAttempts: 60 + , interval: Milliseconds 500.0 + } + +githubEventCodec :: CJ.Codec { sender :: { login :: String }, issue :: { number :: Int, body :: String } } +githubEventCodec = CJ.named "GitHubEvent" $ CJ.Record.object + { sender: CJ.Record.object { login: CJ.string } + , issue: CJ.Record.object { number: CJ.int, body: CJ.string } + } + +mkGitHubPublishEvent :: Operation.PublishData -> String +mkGitHubPublishEvent publishData = + let + publishJson = JSON.print $ CJ.encode Operation.publishCodec publishData + body = "```json\n" <> publishJson <> "\n```" + event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +mkGitHubAuthenticatedEvent :: AuthenticatedData -> String +mkGitHubAuthenticatedEvent authData = + let + authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData + body = "```json\n" <> authJson <> "\n```" + event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + in + JSON.print $ CJ.encode githubEventCodec event + +issuePath :: Int -> String +issuePath n = "/issues/" <> show n + +issueCommentsPath :: Int -> String +issueCommentsPath n = issuePath n <> "/comments" + +commentRequests :: Array WireMockRequest -> Array WireMockRequest +commentRequests = + WireMock.filterByMethod "POST" + >>> WireMock.filterByUrlContaining (issueCommentsPath testIssueNumber) + +closeRequests :: Array WireMockRequest -> Array WireMockRequest +closeRequests = + WireMock.filterByMethod "PATCH" + >>> WireMock.filterByUrlContaining (issuePath testIssueNumber) + +teamsRequests :: Array WireMockRequest -> Array WireMockRequest +teamsRequests = + WireMock.filterByMethod "GET" + >>> WireMock.filterByUrlContaining packagingTeamMembersPath + +bodyContains :: String -> WireMockRequest -> Boolean +bodyContains text r = fromMaybe false (String.contains (String.Pattern text) <$> r.body) + +hasComment :: String -> Array WireMockRequest -> Boolean +hasComment text = Array.any (bodyContains text) + +-- | Result of running the GitHubIssue workflow. +type RunResult = + { success :: Boolean + , requests :: Array WireMockRequest + } + +-- | Run the GitHub issue workflow with a given event JSON. +-- | Handles server check, temp file creation, env setup, and request capture. +runWorkflowWithEvent :: String -> Aff RunResult +runWorkflowWithEvent eventJson = do + -- Verify server is reachable + config <- liftEffect Client.configFromEnv + statusResult <- Client.getStatus config + case statusResult of + Left err -> Aff.throwError $ Aff.error $ "Server not reachable: " <> Client.printClientError err + Right _ -> pure unit + + -- Write event to temp file + tmpDir <- Tmp.mkTmpDir + let eventPath = Path.concat [ tmpDir, "github-event.json" ] + FS.Aff.writeTextFile UTF8 eventPath eventJson + liftEffect $ Process.setEnv "GITHUB_EVENT_PATH" eventPath + + -- Initialize and run workflow + envResult <- GitHubIssue.initializeGitHub + case envResult of + Nothing -> + Aff.throwError $ Aff.error "initializeGitHub returned Nothing" + Just env -> do + let testEnv = env { pollConfig = testPollConfig, logVerbosity = Quiet } + result <- GitHubIssue.runGitHubIssue testEnv + + -- Capture WireMock requests + wmConfig <- liftEffect WireMock.configFromEnv + requests <- WireMock.getRequestsOrFail wmConfig + + case result of + Left err -> + WireMock.failWithRequests ("runGitHubIssue failed: " <> err) requests + Right success -> + pure { success, requests } + +assertJobSucceeded :: RunResult -> Aff Unit +assertJobSucceeded { success, requests } = + unless success do + WireMock.failWithRequests "Job did not succeed" requests + +assertJobFailed :: RunResult -> Aff Unit +assertJobFailed { success, requests } = + when success do + WireMock.failWithRequests "Expected job to fail but it succeeded" requests + +assertHasComment :: String -> RunResult -> Aff Unit +assertHasComment text { requests } = + unless (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Expected '" <> text <> "' comment but not found") requests + +assertNoComment :: String -> RunResult -> Aff Unit +assertNoComment text { requests } = + when (hasComment text (commentRequests requests)) do + WireMock.failWithRequests ("Did not expect '" <> text <> "' comment") requests + +assertIssueClosed :: RunResult -> Aff Unit +assertIssueClosed { requests } = + when (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to be closed, but no close request was made" requests + +assertIssueOpen :: RunResult -> Aff Unit +assertIssueOpen { requests } = + unless (Array.null (closeRequests requests)) do + WireMock.failWithRequests "Expected issue to remain open, but a close request was made" requests + +assertTeamsApiCalled :: RunResult -> Aff Unit +assertTeamsApiCalled { requests } = + when (Array.null (teamsRequests requests)) do + WireMock.failWithRequests "Expected GitHub Teams API to be called, but no such request was seen" requests diff --git a/app-e2e/src/Test/E2E/Main.purs b/app-e2e/src/Test/E2E/Main.purs index 7bc030d76..bbd7f3212 100644 --- a/app-e2e/src/Test/E2E/Main.purs +++ b/app-e2e/src/Test/E2E/Main.purs @@ -5,6 +5,7 @@ import Prelude import Data.Maybe (Maybe(..)) import Data.Time.Duration (Milliseconds(..)) import Effect (Effect) +import Test.E2E.GitHubIssue as Test.E2E.GitHubIssue import Test.E2E.Publish as Test.E2E.Publish import Test.Spec as Spec import Test.Spec.Reporter.Console (consoleReporter) @@ -15,6 +16,7 @@ main :: Effect Unit main = runSpecAndExitProcess' config [ consoleReporter ] do Spec.describe "E2E Tests" do Spec.describe "Publish" Test.E2E.Publish.spec + Spec.describe "GitHubIssue" Test.E2E.GitHubIssue.spec where config = { defaultConfig: Cfg.defaultConfig { timeout = Just $ Milliseconds 120_000.0 } diff --git a/app-e2e/src/Test/E2E/Publish.purs b/app-e2e/src/Test/E2E/Publish.purs index d06289340..4168e1610 100644 --- a/app-e2e/src/Test/E2E/Publish.purs +++ b/app-e2e/src/Test/E2E/Publish.purs @@ -6,16 +6,16 @@ import Prelude import Data.Array as Array import Data.Either (Either(..)) +import Data.Foldable (for_) import Data.Maybe (Maybe(..), isJust) import Data.String as String import Effect.Aff (Aff) import Effect.Class (liftEffect) import Effect.Class.Console as Console import Registry.API.V1 as V1 -import Registry.Location as Registry.Location import Registry.Test.Assert as Assert import Registry.Test.E2E.Client as Client -import Registry.Test.Utils as Utils +import Registry.Test.E2E.Fixtures as Fixtures import Test.Spec (Spec) import Test.Spec as Spec @@ -41,26 +41,11 @@ spec = do Right _ -> pure unit -- Jobs list may not be empty if other tests ran Spec.describe "Publish workflow" do - Spec.it "can publish effect@4.0.0" do + Spec.it "can publish effect@4.0.0 and filter logs" do config <- getConfig - let - -- Location must match what's in the fixture metadata - effectLocation = Registry.Location.GitHub - { owner: "purescript" - , repo: "purescript-effect" - , subdir: Nothing - } - publishData = - { name: Utils.unsafePackageName "effect" - , location: Just effectLocation - , ref: "v4.0.0" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - , version: Utils.unsafeVersion "4.0.0" - } -- Submit publish request - publishResult <- Client.publish config publishData + publishResult <- Client.publish config Fixtures.effectPublishData case publishResult of Left err -> Assert.fail $ "Failed to submit publish request: " <> Client.printClientError err Right { jobId } -> do @@ -80,6 +65,40 @@ spec = do Assert.fail $ "Job failed with errors:\n" <> String.joinWith "\n" errorMessages Assert.shouldSatisfy (V1.jobInfo job).finishedAt isJust --- Assert.shouldEqual job.jobType JobType.PublishJob --- Assert.shouldEqual job.packageName (Utils.unsafePackageName "effect") --- Assert.shouldEqual job.ref "v4.0.0" + + -- Test log level filtering + allLogsResult <- Client.getJob config jobId (Just V1.Debug) Nothing + case allLogsResult of + Left err -> Assert.fail $ "Failed to get job with DEBUG level: " <> Client.printClientError err + Right allLogsJob -> do + let allLogs = (V1.jobInfo allLogsJob).logs + + infoLogsResult <- Client.getJob config jobId (Just V1.Info) Nothing + case infoLogsResult of + Left err -> Assert.fail $ "Failed to get job with INFO level: " <> Client.printClientError err + Right infoLogsJob -> do + let infoLogs = (V1.jobInfo infoLogsJob).logs + let debugOnlyLogs = Array.filter (\l -> l.level == V1.Debug) allLogs + + -- INFO logs should not contain any DEBUG logs + let infoContainsDebug = Array.any (\l -> l.level == V1.Debug) infoLogs + when infoContainsDebug do + Assert.fail "INFO level filter returned DEBUG logs" + + -- If there were DEBUG logs, INFO result should be smaller + when (Array.length debugOnlyLogs > 0) do + Assert.shouldSatisfy (Array.length infoLogs) (_ < Array.length allLogs) + + -- Test timestamp filtering + let logs = (V1.jobInfo job).logs + when (Array.length logs >= 2) do + case Array.index logs 0 of + Nothing -> pure unit + Just firstLog -> do + sinceResult <- Client.getJob config jobId (Just V1.Debug) (Just firstLog.timestamp) + case sinceResult of + Left err -> Assert.fail $ "Failed to get job with since filter: " <> Client.printClientError err + Right sinceJob -> do + let sinceLogs = (V1.jobInfo sinceJob).logs + for_ sinceLogs \l -> + Assert.shouldSatisfy l.timestamp (_ >= firstLog.timestamp) diff --git a/app/src/App/Effect/Env.purs b/app/src/App/Effect/Env.purs index e832d4b84..cd7880602 100644 --- a/app/src/App/Effect/Env.purs +++ b/app/src/App/Effect/Env.purs @@ -30,6 +30,7 @@ type ResourceEnv = , s3BucketUrl :: URL , githubApiUrl :: URL , pursuitApiUrl :: URL + , registryApiUrl :: URL , healthchecksUrl :: Maybe URL } @@ -55,6 +56,7 @@ lookupResourceEnv = do s3BucketUrlEnv <- lookupWithDefault s3BucketUrl productionS3BucketUrl githubApiUrlEnv <- lookupWithDefault githubApiUrl productionGitHubApiUrl pursuitApiUrlEnv <- lookupWithDefault pursuitApiUrl productionPursuitApiUrl + registryApiUrlEnv <- lookupWithDefault registryApiUrl productionRegistryApiUrl -- Optional - if not set, healthcheck pinging is disabled healthchecksUrlEnv <- lookupOptional healthchecksUrl @@ -65,6 +67,7 @@ lookupResourceEnv = do , s3BucketUrl: s3BucketUrlEnv , githubApiUrl: githubApiUrlEnv , pursuitApiUrl: pursuitApiUrlEnv + , registryApiUrl: registryApiUrlEnv , healthchecksUrl: healthchecksUrlEnv } @@ -209,6 +212,12 @@ githubApiUrl = EnvKey { key: "GITHUB_API_URL", decode: pure } pursuitApiUrl :: EnvKey URL pursuitApiUrl = EnvKey { key: "PURSUIT_API_URL", decode: pure } +-- | Override for the Registry API URL. +-- | If not set, uses productionRegistryApiUrl. +-- | Set this to point to the local server during testing. +registryApiUrl :: EnvKey URL +registryApiUrl = EnvKey { key: "REGISTRY_API_URL", decode: pure } + -- Production URL defaults (only used by the app, not exposed to library users) -- | The URL of the package storage backend (S3-compatible) @@ -227,6 +236,10 @@ productionGitHubApiUrl = "https://api.github.com" productionPursuitApiUrl :: URL productionPursuitApiUrl = "https://pursuit.purescript.org" +-- | The Registry API base URL +productionRegistryApiUrl :: URL +productionRegistryApiUrl = "https://registry.purescript.org/api" + -- | The URL of the health checks endpoint. -- | Optional - if not set, healthcheck pinging is disabled. healthchecksUrl :: EnvKey URL diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 3764398cf..ced1add6a 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -1,3 +1,12 @@ +-- | A thin client that proxies GitHub issue operations to the registry API server. +-- | +-- | When a GitHub issue is created or commented on in the purescript/registry repo, +-- | this module: +-- | 1. Parses the issue body to determine the operation type +-- | 2. Re-signs authenticated operations with pacchettibotti keys if submitted by a trustee +-- | 3. POSTs the operation to the registry API server +-- | 4. Polls for job completion, posting logs as GitHub comments +-- | 5. Closes the issue on success module Registry.App.GitHubIssue where import Registry.App.Prelude @@ -5,121 +14,245 @@ import Registry.App.Prelude import Codec.JSON.DecodeError as CJ.DecodeError import Data.Array as Array import Data.Codec.JSON as CJ -import Data.Foldable (traverse_) +import Data.DateTime (DateTime) +import Data.Formatter.DateTime as DateTime import Data.String as String import Effect.Aff as Aff import Effect.Class.Console as Console -import Effect.Ref as Ref +import Fetch (Method(..)) +import Fetch as Fetch import JSON as JSON import JSON.Object as CJ.Object import Node.FS.Aff as FS.Aff import Node.Path as Path import Node.Process as Process +import Registry.API.V1 as V1 import Registry.App.API as API import Registry.App.Auth as Auth -import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment -import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV) +import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) import Registry.App.Effect.Log as Log -import Registry.App.Effect.PackageSets as PackageSets -import Registry.App.Effect.Pursuit as Pursuit -import Registry.App.Effect.Registry as Registry -import Registry.App.Effect.Source as Source -import Registry.App.Effect.Storage as Storage -import Registry.App.Legacy.Manifest as Legacy.Manifest import Registry.Constants as Constants -import Registry.Foreign.FSExtra as FS.Extra import Registry.Foreign.JsonRepair as JsonRepair import Registry.Foreign.Octokit (GitHubToken, IssueNumber(..), Octokit) import Registry.Foreign.Octokit as Octokit -import Registry.Foreign.S3 (SpaceKey) -import Registry.Operation (AuthenticatedData, PackageOperation(..), PackageSetOperation(..)) +import Registry.Internal.Format as Internal.Format +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PackageOperation(..), PackageSetOperation(..)) import Registry.Operation as Operation -import Run (Run) +import Run (AFF, EFFECT, Run) import Run as Run import Run.Except (EXCEPT) import Run.Except as Except main :: Effect Unit main = launchAff_ $ do - -- For now we only support GitHub events, and no formal API, so we'll jump - -- straight into the GitHub event workflow. - initializeGitHub >>= traverse_ \env -> do - let - run = case env.operation of - Left packageSetOperation -> case packageSetOperation of - PackageSetUpdate payload -> - API.packageSetUpdate payload - - Right packageOperation -> case packageOperation of - Publish payload -> - void $ API.publish Nothing payload - Authenticated payload -> do - -- If we receive an authenticated operation via GitHub, then we - -- re-sign it with pacchettibotti credentials if and only if the - -- operation was opened by a trustee. - signed <- signPacchettiBottiIfTrustee payload - API.authenticated signed - - -- Caching - let cache = Path.concat [ scratchDir, ".cache" ] - FS.Extra.ensureDirectory cache - githubCacheRef <- Cache.newCacheRef - legacyCacheRef <- Cache.newCacheRef - registryCacheRef <- Cache.newCacheRef - - -- Registry env - debouncer <- Registry.newDebouncer - let - registryEnv :: Registry.RegistryEnv - registryEnv = - { repos: Registry.defaultRepos - , pull: Git.ForceClean - , write: Registry.CommitAs (Git.pacchettibottiCommitter env.token) - , workdir: scratchDir - , debouncer - , cacheRef: registryCacheRef - } - - -- Package sets - let workdir = Path.concat [ scratchDir, "package-sets-work" ] - FS.Extra.ensureDirectory workdir + initializeGitHub >>= case _ of + Nothing -> pure unit + Just env -> do + result <- runGitHubIssue env + case result of + Left err -> do + -- Post error as comment and exit with failure + void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "❌ " <> err + } + liftEffect $ Process.exit' 1 + Right _ -> + -- Issue closing is handled inside runGitHubIssue + pure unit - thrownRef <- liftEffect $ Ref.new false +runGitHubIssue :: GitHubEventEnv -> Aff (Either String Boolean) +runGitHubIssue env = do + let cache = Path.concat [ scratchDir, ".cache" ] + githubCacheRef <- Cache.newCacheRef - run - -- App effects - # PackageSets.interpret (PackageSets.handle { workdir }) - # Registry.interpret (Registry.handle registryEnv) - # Storage.interpret (Storage.handleS3 { s3: env.spacesConfig, cache }) - # Pursuit.interpret (Pursuit.handleAff env.token) - # Source.interpret (Source.handle Source.Recent) + let + run :: forall a. Run (GITHUB + RESOURCE_ENV + PACCHETTIBOTTI_ENV + GITHUB_EVENT_ENV + LOG + EXCEPT String + AFF + EFFECT + ()) a -> Aff (Either String a) + run action = action # GitHub.interpret (GitHub.handle { octokit: env.octokit, cache, ref: githubCacheRef }) - -- Caching & logging - # Cache.interpret Legacy.Manifest._legacyCache (Cache.handleMemoryFs { cache, ref: legacyCacheRef }) - # Cache.interpret API._compilerCache (Cache.handleFs cache) - # Except.catch (\msg -> Log.error msg *> Comment.comment msg *> Run.liftEffect (Ref.write true thrownRef)) - # Comment.interpret (Comment.handleGitHub { octokit: env.octokit, issue: env.issue, registry: Registry.defaultRepos.registry }) - # Log.interpret (Log.handleTerminal Verbose) - -- Environments + # Except.runExcept # Env.runResourceEnv env.resourceEnv # Env.runGitHubEventEnv { username: env.username, issue: env.issue } # Env.runPacchettiBottiEnv { publicKey: env.publicKey, privateKey: env.privateKey } - -- Base effects + # Log.interpret (Log.handleTerminal env.logVerbosity) # Run.runBaseAff' - liftEffect (Ref.read thrownRef) >>= case _ of - true -> - liftEffect $ Process.exit' 1 - _ -> do - -- After the run, close the issue. If an exception was thrown then the issue will remain open. - _ <- Octokit.request env.octokit (Octokit.closeIssueRequest { address: Constants.registry, issue: env.issue }) - pure unit + run do + -- Determine endpoint and prepare the JSON payload + { endpoint, jsonBody } <- case env.operation of + Left (PackageSetUpdate payload) -> pure + { endpoint: "/v1/package-sets" + , jsonBody: JSON.print $ CJ.encode Operation.packageSetUpdateCodec payload + } + + Right (Publish payload) -> pure + { endpoint: "/v1/publish" + , jsonBody: JSON.print $ CJ.encode Operation.publishCodec payload + } + + Right (Authenticated auth) -> do + -- Re-sign with pacchettibotti if submitter is a trustee + signed <- signPacchettiBottiIfTrustee auth + let endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" + pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } + + -- Submit to the registry API + let registryApiUrl = env.resourceEnv.registryApiUrl + Log.debug $ "Submitting to " <> registryApiUrl <> endpoint + submitResult <- Run.liftAff $ submitJob (registryApiUrl <> endpoint) jsonBody + case submitResult of + Left err -> Except.throw $ "Failed to submit job: " <> err + Right { jobId } -> do + let jobIdStr = unwrap jobId + Log.debug $ "Job created: " <> jobIdStr + + -- Post initial comment with job ID + Run.liftAff $ void $ Octokit.request env.octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue: env.issue + , body: "Job started: `" <> jobIdStr <> "`\nLogs: " <> registryApiUrl <> "/v1/jobs/" <> jobIdStr + } + + -- Poll for completion, posting logs as comments + pollAndReport env.octokit env.issue env.pollConfig registryApiUrl jobId + +-- | Submit a job to the registry API +submitJob :: String -> String -> Aff (Either String V1.JobCreatedResponse) +submitJob url body = do + result <- Aff.attempt $ Fetch.fetch url + { method: POST + , headers: { "Content-Type": "application/json" } + , body + } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status >= 200 && response.status < 300 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCreatedResponseCodec json) of + Left err -> pure $ Left $ "Failed to parse response: " <> err + Right r -> pure $ Right r + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Poll a job until it completes, posting logs as GitHub comments. +-- | Returns true if the job succeeded, false otherwise. +pollAndReport + :: forall r + . Octokit + -> IssueNumber + -> PollConfig + -> URL + -> V1.JobId + -> Run (LOG + EXCEPT String + AFF + r) Boolean +pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 + where + maxConsecutiveErrors :: Int + maxConsecutiveErrors = 5 + + go :: Maybe DateTime -> Int -> Int -> Run (LOG + EXCEPT String + AFF + r) Boolean + go lastTimestamp attempt consecutiveErrors + | attempt >= pollConfig.maxAttempts = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "⏱️ Job timed out" + } + pure false + | consecutiveErrors >= maxConsecutiveErrors = do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "❌ Failed to poll job status after " <> show maxConsecutiveErrors <> " consecutive errors" + } + pure false + | otherwise = do + Run.liftAff $ Aff.delay pollConfig.interval + result <- Run.liftAff $ fetchJob registryApiUrl jobId lastTimestamp + case result of + Left err -> do + Log.error $ "Error polling job: " <> err + go lastTimestamp (attempt + 1) (consecutiveErrors + 1) + Right job -> do + let info = V1.jobInfo job + + -- Post any new logs (filtered to Info level and above, and after lastTimestamp) + let + newLogs = Array.filter isNewLog info.logs + isNewLog l = l.level >= V1.Info && case lastTimestamp of + Nothing -> true + Just ts -> l.timestamp > ts + unless (Array.null newLogs) do + let + formatLog l = "[" <> V1.printLogLevel l.level <> "] " <> l.message + logText = String.joinWith "\n" $ map formatLog newLogs + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: "```\n" <> logText <> "\n```" + } + + -- Check if job is done + case info.finishedAt of + Just _ -> do + let statusMsg = if info.success then "✅ Job completed successfully" else "❌ Job failed" + Run.liftAff $ void $ Octokit.request octokit $ Octokit.createCommentRequest + { address: Constants.registry + , issue + , body: statusMsg + } + -- Close the issue on success, leave open on failure + when info.success do + Run.liftAff $ void $ Octokit.request octokit $ Octokit.closeIssueRequest + { address: Constants.registry + , issue + } + pure info.success + Nothing -> do + -- Continue polling with updated timestamp, reset consecutive errors on success + let newTimestamp = Array.last newLogs <#> _.timestamp + go (newTimestamp <|> lastTimestamp) (attempt + 1) 0 + +-- | Fetch job status from the API +fetchJob :: String -> V1.JobId -> Maybe DateTime -> Aff (Either String V1.Job) +fetchJob registryApiUrl (V1.JobId jobId) since = do + let + baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId + url = case since of + Nothing -> baseUrl <> "?level=INFO" + Just ts -> baseUrl <> "?level=INFO&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + result <- Aff.attempt $ Fetch.fetch url { method: GET } + case result of + Left err -> pure $ Left $ "Network error: " <> Aff.message err + Right response -> do + responseBody <- response.text + if response.status == 200 then + case JSON.parse responseBody >>= \json -> lmap CJ.DecodeError.print (CJ.decode V1.jobCodec json) of + Left err -> pure $ Left $ "Failed to parse job: " <> err + Right job -> pure $ Right job + else + pure $ Left $ "HTTP " <> show response.status <> ": " <> responseBody + +-- | Configuration for polling job status +type PollConfig = + { maxAttempts :: Int + , interval :: Aff.Milliseconds + } + +-- | Default poll config: 30 minutes at 5 second intervals +defaultPollConfig :: PollConfig +defaultPollConfig = + { maxAttempts: 360 + , interval: Aff.Milliseconds 5000.0 + } type GitHubEventEnv = { octokit :: Octokit @@ -127,10 +260,11 @@ type GitHubEventEnv = , issue :: IssueNumber , username :: String , operation :: Either PackageSetOperation PackageOperation - , spacesConfig :: SpaceKey , publicKey :: String , privateKey :: String , resourceEnv :: Env.ResourceEnv + , pollConfig :: PollConfig + , logVerbosity :: LogVerbosity } initializeGitHub :: Aff (Maybe GitHubEventEnv) @@ -138,17 +272,12 @@ initializeGitHub = do token <- Env.lookupRequired Env.pacchettibottiToken publicKey <- Env.lookupRequired Env.pacchettibottiED25519Pub privateKey <- Env.lookupRequired Env.pacchettibottiED25519 - spacesKey <- Env.lookupRequired Env.spacesKey - spacesSecret <- Env.lookupRequired Env.spacesSecret resourceEnv <- Env.lookupResourceEnv eventPath <- Env.lookupRequired Env.githubEventPath octokit <- Octokit.newOctokit token resourceEnv.githubApiUrl readOperation eventPath >>= case _ of - -- If the issue body is not just a JSON string, then we don't consider it - -- to be an attempted operation and it is presumably just an issue on the - -- registry repository. NotJson -> pure Nothing @@ -173,10 +302,11 @@ initializeGitHub = do , issue , username , operation - , spacesConfig: { key: spacesKey, secret: spacesSecret } , publicKey , privateKey , resourceEnv + , pollConfig: defaultPollConfig + , logVerbosity: Verbose } data OperationDecoding @@ -192,16 +322,11 @@ readOperation eventPath = do IssueEvent { issueNumber, body, username } <- case JSON.parse fileContents >>= decodeIssueEvent of Left err -> - -- If we don't receive a valid event path or the contents can't be decoded - -- then this is a catastrophic error and we exit the workflow. Aff.throwError $ Aff.error $ "Error while parsing json from " <> eventPath <> " : " <> err Right event -> pure event let - -- TODO: Right now we parse all operations from GitHub issues, but we should - -- in the future only parse out package set operations. The others should be - -- handled via a HTTP API. decodeOperation :: JSON -> Either CJ.DecodeError (Either PackageSetOperation PackageOperation) decodeOperation json = do object <- CJ.decode CJ.jobject json @@ -230,10 +355,6 @@ readOperation eventPath = do Right operation -> pure $ DecodedOperation issueNumber username operation --- | Users may submit issues with contents wrapped in code fences, perhaps with --- | a language specifier, trailing lines, and other issues. This rudimentary --- | cleanup pass retrieves all contents within an opening { and closing } --- | delimiter. firstObject :: String -> String firstObject input = fromMaybe input do before <- String.indexOf (String.Pattern "{") input @@ -241,9 +362,6 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) --- | An event triggered by a GitHub workflow, specifically via an issue comment --- | or issue creation. --- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent { issueNumber :: IssueNumber , body :: String @@ -260,22 +378,9 @@ decodeIssueEvent json = lmap CJ.DecodeError.print do issueObject <- Octokit.atKey "issue" CJ.jobject object issueNumber <- Octokit.atKey "number" CJ.int issueObject - -- We accept issue creation and issue comment events, but both contain an - -- 'issue' field. However, only comments contain a 'comment' field. For that - -- reason we first try to parse the comment and fall back to the issue if - -- that fails. body <- Octokit.atKey "body" CJ.string =<< Octokit.atKey "comment" CJ.jobject object <|> pure issueObject pure $ IssueEvent { body, username, issueNumber: IssueNumber issueNumber } --- | Re-sign a payload as pacchettibotti if the authenticated operation was --- | submitted by a registry trustee. --- --- @pacchettibotti is considered an 'owner' of all packages for authenticated --- operations. Registry trustees can ask pacchettibotti to perform an action on --- behalf of a package by submitting a payload with an empty signature. If the --- payload was submitted by a trustee (ie. a member of the packaging team) then --- pacchettibotti will re-sign it and add itself as an owner before continuing --- with the authenticated operation. signPacchettiBottiIfTrustee :: forall r . AuthenticatedData diff --git a/app/src/App/SQLite.js b/app/src/App/SQLite.js index bbad2ae78..8b0a1765e 100644 --- a/app/src/App/SQLite.js +++ b/app/src/App/SQLite.js @@ -94,6 +94,7 @@ export const insertPackageSetJobImpl = (db, job) => { }; const _selectJob = (db, { table, jobId }) => { + const params = []; let query = ` SELECT job.*, info.* FROM ${table} job diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index 814b2b82c..bf7bd3f69 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -42,6 +42,7 @@ module Registry.App.SQLite import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError +import Data.Array as Array import Control.Monad.Except (runExceptT) import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime @@ -191,15 +192,22 @@ selectJob db { level: maybeLogLevel, since, jobId: JobId jobId } = do let logLevel = fromMaybe Error maybeLogLevel { fail, success: logs } <- selectLogsByJob db (JobId jobId) logLevel since case fail of - [] -> runExceptT - ( selectPublishJob logs - <|> selectMatrixJob logs - <|> selectTransferJob logs - <|> selectPackageSetJob logs - <|> selectUnpublishJob logs - ) + [] -> runExceptT $ firstJust + [ selectPublishJob logs + , selectMatrixJob logs + , selectTransferJob logs + , selectPackageSetJob logs + , selectUnpublishJob logs + ] _ -> pure $ Left $ "Some logs are not readable: " <> String.joinWith "\n" fail where + firstJust :: Array (ExceptT String Effect (Maybe Job)) -> ExceptT String Effect (Maybe Job) + firstJust = Array.foldl go (pure Nothing) + where + go acc next = acc >>= case _ of + Just job -> pure (Just job) + Nothing -> next + selectPublishJob logs = ExceptT do maybeJobDetails <- map toMaybe $ Uncurried.runEffectFn2 selectPublishJobImpl db (Nullable.notNull jobId) pure $ traverse diff --git a/app/src/App/Server/JobExecutor.purs b/app/src/App/Server/JobExecutor.purs index 35e6a3991..2ede4307a 100644 --- a/app/src/App/Server/JobExecutor.purs +++ b/app/src/App/Server/JobExecutor.purs @@ -64,8 +64,10 @@ runJobExecutor env = runEffects env do -- We race the job execution against a timeout; if the timeout happens first, -- we kill the job and move on to the next one. + -- Note: we set env.jobId so that logs are written to the database. jobResult <- liftAff do - let execute = Just <$> (runEffects env $ executeJob now job) + let envWithJobId = env { jobId = Just jobId } + let execute = Just <$> (runEffects envWithJobId $ executeJob now job) let delay = 1000.0 * 60.0 * 5.0 -- 5 minutes let timeout = Aff.delay (Milliseconds delay) $> Nothing Parallel.sequential $ Parallel.parallel execute <|> Parallel.parallel timeout diff --git a/flake.nix b/flake.nix index 56a98f696..9aacbaa80 100644 --- a/flake.nix +++ b/flake.nix @@ -216,10 +216,13 @@ name = "registry-dev"; inherit GIT_LFS_SKIP_SMUDGE; - # Development defaults from .env.example SERVER_PORT = envDefaults.SERVER_PORT; DATABASE_URL = envDefaults.DATABASE_URL; + # NOTE: Test-specific env vars (REGISTRY_API_URL, GITHUB_API_URL, PACCHETTIBOTTI_*) + # are NOT set here to avoid conflicting with .env files used by production scripts + # like legacy-importer. Use `nix run .#test-env` to run E2E tests with mocked services. + packages = with pkgs; registry-runtime-deps diff --git a/nix/test/config.nix b/nix/test/config.nix index 454747b30..441d67765 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -30,6 +30,7 @@ let # Mock service URLs for test environment mockUrls = { + registry = "http://localhost:${toString ports.server}/api"; github = "http://localhost:${toString ports.github}"; s3 = "http://localhost:${toString ports.s3}"; bucket = "http://localhost:${toString ports.bucket}"; @@ -43,6 +44,7 @@ let # implemented in the script directly. testEnv = envDefaults // { # Mock service URLs (override production endpoints) + REGISTRY_API_URL = mockUrls.registry; GITHUB_API_URL = mockUrls.github; S3_API_URL = mockUrls.s3; S3_BUCKET_URL = mockUrls.bucket; @@ -54,6 +56,16 @@ let env: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: value: ''export ${name}="${value}"'') env); + # Pre-built shell exports for E2E test runners (used by test-env.nix and integration.nix) + testRunnerExports = '' + export SERVER_PORT="${toString ports.server}" + export REGISTRY_API_URL="${testEnv.REGISTRY_API_URL}" + export GITHUB_API_URL="${testEnv.GITHUB_API_URL}" + export PACCHETTIBOTTI_TOKEN="${testEnv.PACCHETTIBOTTI_TOKEN}" + export PACCHETTIBOTTI_ED25519_PUB="${testEnv.PACCHETTIBOTTI_ED25519_PUB}" + export PACCHETTIBOTTI_ED25519="${testEnv.PACCHETTIBOTTI_ED25519}" + ''; + # Git mock that redirects URLs to local fixtures; this is necessary because otherwise # commands would reach out to GitHub or the other package origins. gitMock = pkgs.writeShellScriptBin "git" '' @@ -153,6 +165,51 @@ let }; }; } + # Accept issue comment creation (used by GitHubIssue workflow) + { + request = { + method = "POST"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+/comments"; + }; + response = { + status = 201; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + body = "ok"; + }; + }; + } + # Accept issue closing (used by GitHubIssue workflow) + { + request = { + method = "PATCH"; + urlPattern = "/repos/purescript/registry/issues/[0-9]+"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + jsonBody = { + id = 1; + state = "closed"; + }; + }; + } + # GitHub Teams API for trustee verification (used by GitHubIssue workflow) + { + request = { + method = "GET"; + urlPattern = "/orgs/purescript/teams/packaging/members.*"; + }; + response = { + status = 200; + headers."Content-Type" = "application/json"; + # Return packaging-team-user as a packaging team member for trustee re-signing tests + jsonBody = [ + { login = "packaging-team-user"; id = 1; } + ]; + }; + } ]; # S3 API wiremock mappings (serves package tarballs) @@ -477,6 +534,7 @@ in defaultStateDir mockUrls testEnv + testRunnerExports envToExports gitMock gitMockOverlay diff --git a/nix/test/integration.nix b/nix/test/integration.nix index 5f323a3f8..b178bfdf4 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -57,7 +57,10 @@ else set -e export HOME=$TMPDIR export STATE_DIR=$TMPDIR/state - export SERVER_PORT=${toString ports.server} + + # Export test environment variables for E2E test runners + ${testEnv.testConfig.testRunnerExports} + mkdir -p $STATE_DIR # Start wiremock services diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index 424f71364..e27ef6376 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -95,7 +95,8 @@ let testEnvScript = pkgs.writeShellScriptBin "test-env" '' set -e - export SERVER_PORT="${toString ports.server}" + # Export test environment variables for E2E test runners + ${testConfig.testRunnerExports} if [ -z "''${STATE_DIR:-}" ]; then STATE_DIR="$(mktemp -d)" diff --git a/spago.lock b/spago.lock index 83d2afb8d..a6dbae907 100644 --- a/spago.lock +++ b/spago.lock @@ -313,12 +313,20 @@ "dependencies": [ "aff", "arrays", + "codec-json", "console", "datetime", "effect", "either", + "foldable-traversable", + "json", "maybe", + "node-fs", + "node-path", + "node-process", "prelude", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "spec", @@ -327,6 +335,7 @@ ], "build_plan": [ "aff", + "aff-promise", "ansi", "argonaut-codecs", "argonaut-core", @@ -334,6 +343,7 @@ "arrays", "assert", "avar", + "b64", "bifunctors", "catenable-lists", "codec", @@ -342,15 +352,21 @@ "const", "contravariant", "control", + "convertable-options", "datetime", + "debug", "distributive", + "dodo-printer", + "dotenv", "effect", "either", + "encoding", "enums", "exceptions", "exists", "exitcodes", "fetch", + "filterable", "fixed-points", "foldable-traversable", "foreign", @@ -362,7 +378,9 @@ "functors", "gen", "graphs", + "heterogeneous", "http-methods", + "httpurple", "identity", "integers", "invariant", @@ -370,27 +388,39 @@ "js-fetch", "js-promise", "js-promise-aff", + "js-timers", "js-uri", "json", + "justifill", "language-cst-parser", "lazy", "lcg", "lists", + "literals", "maybe", "media-types", "mmorph", "newtype", "node-buffer", + "node-child-process", "node-event-emitter", + "node-execa", "node-fs", + "node-http", + "node-human-signals", + "node-net", + "node-os", "node-path", "node-process", "node-streams", + "node-tls", + "node-url", "nonempty", "now", "nullable", "numbers", "open-memoize", + "options", "optparse", "ordered-collections", "orders", @@ -402,19 +432,26 @@ "prelude", "profunctor", "profunctor-lenses", + "psci-support", "quickcheck", + "quickcheck-laws", "random", "record", + "record-studio", "refs", + "registry-app", + "registry-foreign", "registry-lib", "registry-test-utils", "routing-duplex", + "run", "safe-coerce", "spec", "spec-node", "st", "strings", "tailrec", + "these", "transformers", "tuples", "type-equality", @@ -422,6 +459,9 @@ "unfoldable", "unicode", "unsafe-coerce", + "unsafe-reference", + "untagged-union", + "uuidv4", "variant", "web-dom", "web-events", diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs new file mode 100644 index 000000000..c69af3645 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/Fixtures.purs @@ -0,0 +1,70 @@ +-- | Test fixtures for E2E tests. +-- | Contains package operation data used across multiple test suites. +module Registry.Test.E2E.Fixtures + ( effectPublishData + , failingPublishData + , trusteeAuthenticatedData + ) where + +import Prelude + +import Data.Codec.JSON as CJ +import Data.Maybe (Maybe(..)) +import JSON as JSON +import Registry.Location as Location +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData, UnpublishData) +import Registry.Operation as Operation +import Registry.SSH (Signature(..)) +import Registry.Test.Utils as Utils + +-- | Standard publish data for effect@4.0.0, used by E2E tests. +-- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 +effectPublishData :: PublishData +effectPublishData = + { name: Utils.unsafePackageName "effect" + , location: Just $ Location.GitHub + { owner: "purescript" + , repo: "purescript-effect" + , subdir: Nothing + } + , ref: "v4.0.0" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: Utils.unsafeVersion "4.0.0" + } + +-- | Publish data for prelude@6.0.1, which already exists in metadata fixtures. +-- | Used to test failure scenarios (duplicate publish) in E2E tests. +failingPublishData :: PublishData +failingPublishData = + { name: Utils.unsafePackageName "prelude" + , location: Just $ Location.GitHub + { owner: "purescript" + , repo: "purescript-prelude" + , subdir: Nothing + } + , ref: "v6.0.1" + , compiler: Utils.unsafeVersion "0.15.9" + , resolutions: Nothing + , version: Utils.unsafeVersion "6.0.1" + } + +-- | Authenticated data with an intentionally invalid signature. +-- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it. +-- | If re-signing works, the job succeeds; if not, signature verification fails. +-- | Uses prelude@6.0.1 which exists in app/fixtures/registry/metadata/prelude.json. +trusteeAuthenticatedData :: AuthenticatedData +trusteeAuthenticatedData = + let + unpublishPayload :: UnpublishData + unpublishPayload = + { name: Utils.unsafePackageName "prelude" + , version: Utils.unsafeVersion "6.0.1" + , reason: "Testing trustee re-signing" + } + rawPayload = JSON.print $ CJ.encode Operation.unpublishCodec unpublishPayload + in + { payload: Unpublish unpublishPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } diff --git a/test-utils/src/Registry/Test/E2E/WireMock.purs b/test-utils/src/Registry/Test/E2E/WireMock.purs new file mode 100644 index 000000000..6895d9e44 --- /dev/null +++ b/test-utils/src/Registry/Test/E2E/WireMock.purs @@ -0,0 +1,164 @@ +-- | WireMock admin API client for verifying HTTP requests in E2E tests. +-- | +-- | This module provides helpers to query WireMock's request journal, allowing +-- | tests to assert on what HTTP requests were made to mock services. +module Registry.Test.E2E.WireMock + ( WireMockConfig + , WireMockRequest + , WireMockError(..) + , configFromEnv + , getRequests + , getRequestsOrFail + , clearRequests + , clearRequestsOrFail + , filterByMethod + , filterByUrlContaining + , printWireMockError + , formatRequests + , failWithRequests + ) where + +import Prelude + +import Control.Monad.Error.Class (class MonadThrow, throwError) +import Control.Monad.Except (runExceptT) +import Control.Monad.Trans.Class (lift) +import Data.Array as Array +import Data.Bifunctor (lmap) +import Data.Codec.JSON as CJ +import Data.Codec.JSON.Record as CJ.Record +import Data.Either (Either(..)) +import Data.Int as Int +import Data.Maybe (Maybe(..)) +import Data.String as String +import Effect (Effect) +import Effect.Aff (Aff) +import Effect.Aff as Aff +import Effect.Exception as Effect.Exception +import Fetch (Method(..)) +import Fetch as Fetch +import Effect.Exception (Error) +import JSON as JSON +import Node.Process as Process +import Codec.JSON.DecodeError as CJ.DecodeError + +-- | Configuration for connecting to WireMock admin API +type WireMockConfig = + { baseUrl :: String + } + +-- | A recorded request from WireMock's journal +type WireMockRequest = + { method :: String + , url :: String + , body :: Maybe String + } + +-- | Error type for WireMock operations +data WireMockError + = HttpError { status :: Int, body :: String } + | ParseError { msg :: String, raw :: String } + +printWireMockError :: WireMockError -> String +printWireMockError = case _ of + HttpError { status, body } -> "HTTP Error " <> Int.toStringAs Int.decimal status <> ": " <> body + ParseError { msg, raw } -> "Parse Error: " <> msg <> "\nOriginal: " <> raw + +-- | Create config from GITHUB_API_URL environment variable. +-- | Convenience for tests that need to inspect GitHub mock requests. +-- | Each WireMock instance has its own admin API on the same port. +configFromEnv :: Effect WireMockConfig +configFromEnv = do + maybeUrl <- Process.lookupEnv "GITHUB_API_URL" + case maybeUrl of + Nothing -> Effect.Exception.throw "GITHUB_API_URL environment variable is not set." + Just baseUrl -> pure { baseUrl } + +-- | Codec for a single request entry in WireMock's response +requestCodec :: CJ.Codec WireMockRequest +requestCodec = CJ.named "WireMockRequest" $ CJ.Record.object + { method: CJ.string + , url: CJ.string + , body: CJ.Record.optional CJ.string + } + +-- | Codec for the nested request object in WireMock's journal response +journalEntryCodec :: CJ.Codec { request :: WireMockRequest } +journalEntryCodec = CJ.named "JournalEntry" $ CJ.Record.object + { request: requestCodec + } + +-- | Codec for the full journal response +journalCodec :: CJ.Codec { requests :: Array { request :: WireMockRequest } } +journalCodec = CJ.named "Journal" $ CJ.Record.object + { requests: CJ.array journalEntryCodec + } + +-- | Parse JSON response body using a codec +parseResponse :: forall a. CJ.Codec a -> String -> Either String a +parseResponse codec body = do + json <- lmap (append "JSON parse error: ") $ JSON.parse body + lmap CJ.DecodeError.print $ CJ.decode codec json + +-- | Get all recorded requests from WireMock's journal +getRequests :: WireMockConfig -> Aff (Either WireMockError (Array WireMockRequest)) +getRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: GET } + body <- lift response.text + if response.status == 200 then + case parseResponse journalCodec body of + Left err -> throwError $ ParseError { msg: err, raw: body } + Right journal -> pure $ map _.request journal.requests + else + throwError $ HttpError { status: response.status, body } + +-- | Clear all recorded requests from WireMock's journal +clearRequests :: WireMockConfig -> Aff (Either WireMockError Unit) +clearRequests config = runExceptT do + response <- lift $ Fetch.fetch (config.baseUrl <> "/__admin/requests") { method: DELETE } + if response.status == 200 then + pure unit + else do + body <- lift response.text + throwError $ HttpError { status: response.status, body } + +-- | Get requests, throwing on error. Useful in tests where failure should abort. +getRequestsOrFail :: WireMockConfig -> Aff (Array WireMockRequest) +getRequestsOrFail config = do + result <- getRequests config + case result of + Left err -> + throwError $ Aff.error $ "Failed to get WireMock requests: " <> printWireMockError err + Right rs -> + pure rs + +-- | Clear requests, throwing on error. Useful in test setup. +clearRequestsOrFail :: WireMockConfig -> Aff Unit +clearRequestsOrFail config = do + result <- clearRequests config + case result of + Left err -> + Aff.throwError $ Aff.error $ "Failed to clear WireMock journal: " <> printWireMockError err + Right _ -> + pure unit + +-- | Filter requests by HTTP method +filterByMethod :: String -> Array WireMockRequest -> Array WireMockRequest +filterByMethod method = Array.filter (\r -> r.method == method) + +-- | Filter requests by URL substring +filterByUrlContaining :: String -> Array WireMockRequest -> Array WireMockRequest +filterByUrlContaining substring = Array.filter (\r -> String.contains (String.Pattern substring) r.url) + +-- | Format an array of requests for debugging output +formatRequests :: Array WireMockRequest -> String +formatRequests requests = String.joinWith "\n" $ map formatRequest requests + where + formatRequest r = r.method <> " " <> r.url <> case r.body of + Nothing -> "" + Just b -> "\n Body: " <> b + +-- | Fail a test with a message and debug info about captured requests. +failWithRequests :: forall m a. MonadThrow Error m => String -> Array WireMockRequest -> m a +failWithRequests msg requests = throwError $ Effect.Exception.error $ + msg <> "\n\nCaptured requests:\n" <> formatRequests requests From 5ae9449a9d5facd554590298f9e9dc897d87276e Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 18:18:01 -0500 Subject: [PATCH 17/19] clean up test failures --- .env.example | 16 ---------------- app/src/App/GitHubIssue.purs | 7 ++++--- nix/test/config.nix | 27 +++++++++++++++------------ nix/test/integration.nix | 2 +- nix/test/test-env.nix | 2 +- 5 files changed, 21 insertions(+), 33 deletions(-) diff --git a/.env.example b/.env.example index 4873fe0a0..78a8fbebb 100644 --- a/.env.example +++ b/.env.example @@ -13,22 +13,6 @@ SERVER_PORT=9000 # - Prod: Set to production database path DATABASE_URL="sqlite:db/registry.sqlite3" - -# ----------------------------------------------------------------------------- -# External Service URLs (optional overrides, have production defaults) -# ----------------------------------------------------------------------------- -# These default to production URLs in the app. Set these only when: -# - Running tests (test-env sets these automatically) -# - Using custom/staging infrastructure - -# GITHUB_API_URL="https://api.github.com" -# S3_API_URL="https://packages.registry.purescript.org" -# S3_BUCKET_URL="https://ams3.digitaloceanspaces.com" -# PURSUIT_API_URL="https://pursuit.purescript.org" -# REGISTRY_API_URL="https://registry.purescript.org/api" -# HEALTHCHECKS_URL="https://hc-ping.com/your-uuid" - - # ----------------------------------------------------------------------------- # Secrets (required for production, use dummy values for local dev) # ----------------------------------------------------------------------------- diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index ced1add6a..812cc8131 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -99,9 +99,10 @@ runGitHubIssue env = do Right (Authenticated auth) -> do -- Re-sign with pacchettibotti if submitter is a trustee signed <- signPacchettiBottiIfTrustee auth - let endpoint = case signed.payload of - Unpublish _ -> "/v1/unpublish" - Transfer _ -> "/v1/transfer" + let + endpoint = case signed.payload of + Unpublish _ -> "/v1/unpublish" + Transfer _ -> "/v1/transfer" pure { endpoint, jsonBody: JSON.print $ CJ.encode Operation.authenticatedCodec signed } -- Submit to the registry API diff --git a/nix/test/config.nix b/nix/test/config.nix index 441d67765..3c06276e5 100644 --- a/nix/test/config.nix +++ b/nix/test/config.nix @@ -38,6 +38,15 @@ let healthchecks = "http://localhost:${toString ports.healthchecks}"; }; + # Valid ED25519 test keypair for pacchettibotti (used for signing authenticated operations). + # These are test-only keys, not used in production. + testKeys = { + # ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHXE9ia5mQG5dPyS6pirU9PSWFP8hPglwChJERBpMoki pacchettibotti@purescript.org + public = "c3NoLWVkMjU1MTkgQUFBQUMzTnphQzFsWkRJMU5URTVBQUFBSUhYRTlpYTVtUUc1ZFB5UzZwaXJVOVBTV0ZQOGhQZ2x3Q2hKRVJCcE1va2kgcGFjY2hldHRpYm90dGlAcHVyZXNjcmlwdC5vcmcK"; + # OpenSSH format private key + private = "LS0tLS1CRUdJTiBPUEVOU1NIIFBSSVZBVEUgS0VZLS0tLS0KYjNCbGJuTnphQzFyWlhrdGRqRUFBQUFBQkc1dmJtVUFBQUFFYm05dVpRQUFBQUFBQUFBQkFBQUFNd0FBQUF0emMyZ3RaVwpReU5UVXhPUUFBQUNCMXhQWW11WmtCdVhUOGt1cVlxMVBUMGxoVC9JVDRKY0FvU1JFUWFUS0pJZ0FBQUtBMVFMT3NOVUN6CnJBQUFBQXR6YzJndFpXUXlOVFV4T1FBQUFDQjF4UFltdVprQnVYVDhrdXFZcTFQVDBsaFQvSVQ0SmNBb1NSRVFhVEtKSWcKQUFBRUJ1dUErV2NqODlTcjR2RUZnU043ZVF5SGFCWlYvc0F2YVhvVGRKa2lwanlYWEU5aWE1bVFHNWRQeVM2cGlyVTlQUwpXRlA4aFBnbHdDaEpFUkJwTW9raUFBQUFIWEJoWTJOb1pYUjBhV0p2ZEhScFFIQjFjbVZ6WTNKcGNIUXViM0puCi0tLS0tRU5EIE9QRU5TU0ggUFJJVkFURSBLRVktLS0tLQo="; + }; + # Complete test environment - starts with .env.example defaults which include # mock secrets, then overrides external services with mock URLs. The DATABASE_URL # and REPO_FIXTURES_DIR vars are derived from STATE_DIR at runtime so those are @@ -50,22 +59,14 @@ let S3_BUCKET_URL = mockUrls.bucket; PURSUIT_API_URL = mockUrls.pursuit; HEALTHCHECKS_URL = mockUrls.healthchecks; + PACCHETTIBOTTI_ED25519_PUB = testKeys.public; + PACCHETTIBOTTI_ED25519 = testKeys.private; }; envToExports = env: lib.concatStringsSep "\n" (lib.mapAttrsToList (name: value: ''export ${name}="${value}"'') env); - # Pre-built shell exports for E2E test runners (used by test-env.nix and integration.nix) - testRunnerExports = '' - export SERVER_PORT="${toString ports.server}" - export REGISTRY_API_URL="${testEnv.REGISTRY_API_URL}" - export GITHUB_API_URL="${testEnv.GITHUB_API_URL}" - export PACCHETTIBOTTI_TOKEN="${testEnv.PACCHETTIBOTTI_TOKEN}" - export PACCHETTIBOTTI_ED25519_PUB="${testEnv.PACCHETTIBOTTI_ED25519_PUB}" - export PACCHETTIBOTTI_ED25519="${testEnv.PACCHETTIBOTTI_ED25519}" - ''; - # Git mock that redirects URLs to local fixtures; this is necessary because otherwise # commands would reach out to GitHub or the other package origins. gitMock = pkgs.writeShellScriptBin "git" '' @@ -206,7 +207,10 @@ let headers."Content-Type" = "application/json"; # Return packaging-team-user as a packaging team member for trustee re-signing tests jsonBody = [ - { login = "packaging-team-user"; id = 1; } + { + login = "packaging-team-user"; + id = 1; + } ]; }; } @@ -534,7 +538,6 @@ in defaultStateDir mockUrls testEnv - testRunnerExports envToExports gitMock gitMockOverlay diff --git a/nix/test/integration.nix b/nix/test/integration.nix index b178bfdf4..bc4f333e0 100644 --- a/nix/test/integration.nix +++ b/nix/test/integration.nix @@ -59,7 +59,7 @@ else export STATE_DIR=$TMPDIR/state # Export test environment variables for E2E test runners - ${testEnv.testConfig.testRunnerExports} + ${testEnv.testConfig.envToExports testEnv.testConfig.testEnv} mkdir -p $STATE_DIR diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index e27ef6376..ff5c4d57b 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -96,7 +96,7 @@ let set -e # Export test environment variables for E2E test runners - ${testConfig.testRunnerExports} + ${testConfig.envToExports testConfig.testEnv} if [ -z "''${STATE_DIR:-}" ]; then STATE_DIR="$(mktemp -d)" From ad6c3284686c593531f0ce890c48830b47a069d0 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 18:41:08 -0500 Subject: [PATCH 18/19] reinstate missing comments --- app/src/App/GitHubIssue.purs | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 812cc8131..527027607 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -323,6 +323,8 @@ readOperation eventPath = do IssueEvent { issueNumber, body, username } <- case JSON.parse fileContents >>= decodeIssueEvent of Left err -> + -- If we don't receive a valid event path or the contents can't be decoded + -- then this is a catastrophic error and we exit the workflow. Aff.throwError $ Aff.error $ "Error while parsing json from " <> eventPath <> " : " <> err Right event -> pure event @@ -356,6 +358,10 @@ readOperation eventPath = do Right operation -> pure $ DecodedOperation issueNumber username operation +-- | Users may submit issues with contents wrapped in code fences, perhaps with +-- | a language specifier, trailing lines, and other issues. This rudimentary +-- | cleanup pass retrieves all contents within an opening { and closing } +-- | delimiter. firstObject :: String -> String firstObject input = fromMaybe input do before <- String.indexOf (String.Pattern "{") input @@ -363,6 +369,9 @@ firstObject input = fromMaybe input do after <- String.lastIndexOf (String.Pattern "}") start pure (String.take (after + 1) start) +-- | An event triggered by a GitHub workflow, specifically via an issue commentAdd a comment on line L244Add diff commentMarkdown input: edit mode selected.WritePreviewHeadingBoldItalicQuoteCodeLinkUnordered listNumbered listTask listMentionReferenceSaved repliesAdd FilesPaste, drop, or click to add filesCancelCommentStart a reviewReturn to code +-- | or issue creation. +-- | https://docs.github.com/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#issue_comment newtype IssueEvent = IssueEvent { issueNumber :: IssueNumber , body :: String @@ -379,9 +388,22 @@ decodeIssueEvent json = lmap CJ.DecodeError.print do issueObject <- Octokit.atKey "issue" CJ.jobject object issueNumber <- Octokit.atKey "number" CJ.int issueObject + -- We accept issue creation and issue comment events, but both contain an + -- 'issue' field. However, only comments contain a 'comment' field. For that + -- reason we first try to parse the comment and fall back to the issue if + -- that fails. body <- Octokit.atKey "body" CJ.string =<< Octokit.atKey "comment" CJ.jobject object <|> pure issueObject pure $ IssueEvent { body, username, issueNumber: IssueNumber issueNumber } +-- | Re-sign a payload as pacchettibotti if the authenticated operation was +-- | submitted by a registry trustee. +-- +-- @pacchettibotti is considered an 'owner' of all packages for authenticated +-- operations. Registry trustees can ask pacchettibotti to perform an action on +-- behalf of a package by submitting a payload with an empty signature. If the +-- payload was submitted by a trustee (ie. a member of the packaging team) then +-- pacchettibotti will re-sign it and add itself as an owner before continuing +-- with the authenticated operation. signPacchettiBottiIfTrustee :: forall r . AuthenticatedData From 6c023cfd86e383b4b1de7e3825d552821142aa92 Mon Sep 17 00:00:00 2001 From: Thomas Honeyman Date: Mon, 22 Dec 2025 20:22:31 -0500 Subject: [PATCH 19/19] Remove COMMENT effect, add NOTIFY log --- app-e2e/src/Test/E2E/GitHubIssue.purs | 9 ++- app/src/App/API.purs | 68 +++++++++---------- app/src/App/Effect/Comment.purs | 68 ------------------- app/src/App/Effect/Log.purs | 8 ++- app/src/App/GitHubIssue.purs | 8 +-- app/src/App/SQLite.purs | 2 +- app/src/App/Server/Env.purs | 5 +- app/test/Test/Assert/Run.purs | 4 -- lib/src/API/V1.purs | 6 +- nix/test/test-env.nix | 4 +- scripts/src/LegacyImporter.purs | 2 - scripts/src/PackageDeleter.purs | 2 - scripts/src/PackageSetUpdater.purs | 2 - scripts/src/PackageTransferrer.purs | 2 - scripts/src/Solver.purs | 2 - .../src/Registry/Test/E2E/Fixtures.purs | 40 ++++++----- 16 files changed, 81 insertions(+), 151 deletions(-) delete mode 100644 app/src/App/Effect/Comment.purs diff --git a/app-e2e/src/Test/E2E/GitHubIssue.purs b/app-e2e/src/Test/E2E/GitHubIssue.purs index b1931aaab..be9f3ba8f 100644 --- a/app-e2e/src/Test/E2E/GitHubIssue.purs +++ b/app-e2e/src/Test/E2E/GitHubIssue.purs @@ -40,7 +40,7 @@ spec = do assertIssueClosed result Spec.it "posts failure comment and leaves issue open when job fails" \_ -> do - result <- runWorkflowWithEvent $ mkGitHubPublishEvent Fixtures.failingPublishData + result <- runWorkflowWithEvent $ mkGitHubAuthenticatedEventFrom "random-user" Fixtures.failingTransferData assertJobFailed result assertHasComment jobStartedText result @@ -102,11 +102,14 @@ mkGitHubPublishEvent publishData = JSON.print $ CJ.encode githubEventCodec event mkGitHubAuthenticatedEvent :: AuthenticatedData -> String -mkGitHubAuthenticatedEvent authData = +mkGitHubAuthenticatedEvent = mkGitHubAuthenticatedEventFrom packagingTeamUsername + +mkGitHubAuthenticatedEventFrom :: String -> AuthenticatedData -> String +mkGitHubAuthenticatedEventFrom username authData = let authJson = JSON.print $ CJ.encode Operation.authenticatedCodec authData body = "```json\n" <> authJson <> "\n```" - event = { sender: { login: packagingTeamUsername }, issue: { number: testIssueNumber, body } } + event = { sender: { login: username }, issue: { number: testIssueNumber, body } } in JSON.print $ CJ.encode githubEventCodec event diff --git a/app/src/App/API.purs b/app/src/App/API.purs index 06d1ed943..c5d174e94 100644 --- a/app/src/App/API.purs +++ b/app/src/App/API.purs @@ -58,8 +58,6 @@ import Registry.App.CLI.PursVersions as PursVersions import Registry.App.CLI.Tar as Tar import Registry.App.Effect.Cache (class FsEncodable, Cache) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) @@ -115,7 +113,7 @@ import Run.Except (EXCEPT) import Run.Except as Except import Safe.Coerce as Safe.Coerce -type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + COMMENT + LOG + EXCEPT String + r) +type PackageSetUpdateEffects r = (REGISTRY + PACKAGE_SETS + GITHUB + GITHUB_EVENT_ENV + LOG + EXCEPT String + r) packageSetUpdate2 :: forall r. PackageSetJobDetails -> Run (PackageSetUpdateEffects + r) Unit packageSetUpdate2 {} = do @@ -228,18 +226,18 @@ packageSetUpdate payload = do Except.throw "No packages in the suggested batch can be processed (all failed validation checks) and the compiler version was not upgraded, so there is no upgrade to perform." let changeSet = candidates.accepted <#> maybe Remove Update - Comment.comment "Attempting to build package set update." + Log.notice "Attempting to build package set update." PackageSets.upgradeAtomic latestPackageSet (fromMaybe prevCompiler payload.compiler) changeSet >>= case _ of Left error -> Except.throw $ "The package set produced from this suggested update does not compile:\n\n" <> error Right packageSet -> do let commitMessage = PackageSets.commitMessage latestPackageSet changeSet (un PackageSet packageSet).version Registry.writePackageSet packageSet commitMessage - Comment.comment "Built and released a new package set! Now mirroring to the package-sets repo..." + Log.notice "Built and released a new package set! Now mirroring to the package-sets repo..." Registry.mirrorPackageSet packageSet - Comment.comment "Mirrored a new legacy package set." + Log.notice "Mirrored a new legacy package set." -type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type AuthenticatedEffects r = (REGISTRY + STORAGE + GITHUB + PACCHETTIBOTTI_ENV + LOG + EXCEPT String + AFF + EFFECT + r) -- | Run an authenticated package operation, ie. an unpublish or a transfer. authenticated :: forall r. AuthenticatedData -> Run (AuthenticatedEffects + r) Unit @@ -299,7 +297,7 @@ authenticated auth = case auth.payload of Storage.delete payload.name payload.version Registry.writeMetadata payload.name updated Registry.deleteManifest payload.name payload.version - Comment.comment $ "Unpublished " <> formatted <> "!" + Log.notice $ "Unpublished " <> formatted <> "!" Transfer payload -> do Log.debug $ "Processing authorized transfer operation with payload: " <> stringifyJson Operation.authenticatedCodec auth @@ -330,11 +328,11 @@ authenticated auth = case auth.payload of Log.debug $ "Successfully authenticated ownership of " <> PackageName.print payload.name <> ", transferring..." let updated = metadata # over Metadata _ { location = payload.newLocation } Registry.writeMetadata payload.name updated - Comment.comment "Successfully transferred your package!" + Log.notice "Successfully transferred your package!" Registry.mirrorLegacyRegistry payload.name payload.newLocation - Comment.comment "Mirrored registry operation to the legacy registry." + Log.notice "Mirrored registry operation to the legacy registry." -type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT + r) +type PublishEffects r = (RESOURCE_ENV + PURSUIT + REGISTRY + STORAGE + SOURCE + GITHUB + COMPILER_CACHE + LEGACY_CACHE + LOG + EXCEPT String + AFF + EFFECT + r) -- | Publish a package via the 'publish' operation. If the package has not been -- | published before then it will be registered and the given version will be @@ -450,13 +448,13 @@ publish maybeLegacyIndex payload = do pure manifest else if hasSpagoYaml then do - Comment.comment $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." + Log.notice $ "Package source does not have a purs.json file, creating one from your spago.yaml file..." SpagoYaml.readSpagoYaml packageSpagoYaml >>= case _ of Left readErr -> Except.throw $ "Could not publish your package - a spago.yaml was present, but it was not possible to read it:\n" <> readErr Right config -> case SpagoYaml.spagoYamlToManifest config of Left err -> Except.throw $ "Could not publish your package - there was an error while converting your spago.yaml into a purs.json manifest:\n" <> err Right manifest -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your spago.yaml into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -465,7 +463,7 @@ publish maybeLegacyIndex payload = do pure manifest else do - Comment.comment $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." + Log.notice $ "Package source does not have a purs.json file. Creating one from your bower.json and/or spago.dhall files..." version <- case LenientVersion.parse payload.ref of Left _ -> Except.throw $ "The provided ref " <> payload.ref <> " is not a version of the form X.Y.Z or vX.Y.Z, so it cannot be used." @@ -481,7 +479,7 @@ publish maybeLegacyIndex payload = do Right legacyManifest -> do Log.debug $ "Successfully produced a legacy manifest from the package source." let manifest = Legacy.Manifest.toManifest payload.name version existingMetadata.location legacyManifest - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Converted your legacy manifest(s) into a purs.json manifest to use for publishing:" , "\n```json\n" , printJson Manifest.codec manifest @@ -556,7 +554,7 @@ publish maybeLegacyIndex payload = do ] Nothing | payload.compiler < Purs.minPursuitPublish -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Unfortunately, it is not possible to publish to Pursuit via the " , "registry using compiler versions prior to " <> Version.print Purs.minPursuitPublish @@ -565,7 +563,7 @@ publish maybeLegacyIndex payload = do pure Nothing Nothing -> do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "This version has already been published to the registry, but the docs have not been " , "uploaded to Pursuit. Skipping registry publishing and retrying Pursuit publishing..." ] @@ -596,7 +594,7 @@ publish maybeLegacyIndex payload = do Left publishErr -> Except.throw publishErr Right _ -> do FS.Extra.remove tmp - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" pure Nothing -- In this case the package version has not been published, so we proceed @@ -606,7 +604,7 @@ publish maybeLegacyIndex payload = do compilerIndex <- MatrixBuilder.readCompilerIndex validatedResolutions <- verifyResolutions compilerIndex payload.compiler (Manifest receivedManifest) payload.resolutions - Comment.comment "Verifying unused and/or missing dependencies..." + Log.notice "Verifying unused and/or missing dependencies..." -- First we install the resolutions and call 'purs graph' to adjust the -- manifest as needed, but we defer compilation until after this check @@ -695,7 +693,7 @@ publish maybeLegacyIndex payload = do -- Now that we have the package source contents we can verify we can compile -- the package with exactly what is going to be uploaded. - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Verifying package compiles using compiler " , Version.print payload.compiler , " and resolutions:\n" @@ -720,7 +718,7 @@ publish maybeLegacyIndex payload = do Except.throw $ "Publishing failed due to a compiler error:\n\n" <> error Right _ -> pure unit - Comment.comment "Package source is verified! Packaging tarball and uploading to the storage backend..." + Log.notice "Package source is verified! Packaging tarball and uploading to the storage backend..." let tarballName = packageDirname <> ".tar.gz" let tarballPath = Path.concat [ tmp, tarballName ] Tar.create { cwd: tmp, folderName: packageDirname } @@ -731,7 +729,7 @@ publish maybeLegacyIndex payload = do Operation.Validation.ExceedsMaximum maxPackageBytes -> Except.throw $ "Package tarball is " <> show bytes <> " bytes, which exceeds the maximum size of " <> show maxPackageBytes <> " bytes." Operation.Validation.WarnPackageSize maxWarnBytes -> - Comment.comment $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." + Log.notice $ "WARNING: Package tarball is " <> show bytes <> "bytes, which exceeds the warning threshold of " <> show maxWarnBytes <> " bytes." -- If a package has under ~30 bytes it's about guaranteed that packaging the -- tarball failed. This can happen if the system running the API has a non- @@ -750,7 +748,7 @@ publish maybeLegacyIndex payload = do let newMetadata = metadata { published = Map.insert (un Manifest manifest).version newPublishedVersion metadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata newMetadata) - Comment.comment "Successfully uploaded package to the registry! 🎉 🚀" + Log.notice "Successfully uploaded package to the registry! 🎉 🚀" -- We write to the registry index if possible. If this fails, the packaging -- team should manually insert the entry. @@ -758,7 +756,7 @@ publish maybeLegacyIndex payload = do Registry.writeManifest manifest Registry.mirrorLegacyRegistry payload.name newMetadata.location - Comment.comment "Mirrored registry operation to the legacy registry!" + Log.notice "Mirrored registry operation to the legacy registry!" Log.debug "Uploading package documentation to Pursuit" if payload.compiler >= Purs.minPursuitPublish then @@ -768,11 +766,11 @@ publish maybeLegacyIndex payload = do publishToPursuit { source: downloadedPackage, compiler: payload.compiler, resolutions, installedResolutions } >>= case _ of Left publishErr -> do Log.error publishErr - Comment.comment $ "Failed to publish package docs to Pursuit: " <> publishErr + Log.notice $ "Failed to publish package docs to Pursuit: " <> publishErr Right _ -> - Comment.comment "Successfully uploaded package docs to Pursuit! 🎉 🚀" + Log.notice "Successfully uploaded package docs to Pursuit! 🎉 🚀" else do - Comment.comment $ Array.fold + Log.notice $ Array.fold [ "Skipping Pursuit publishing because this package was published with a pre-0.14.7 compiler (" , Version.print payload.compiler , "). If you want to publish documentation, please try again with a later compiler." @@ -782,7 +780,7 @@ publish maybeLegacyIndex payload = do -- when running the server) this will be taken care of by followup jobs invoking -- the MatrixBuilder for each compiler version for_ maybeLegacyIndex \_idx -> do - Comment.comment "Determining all valid compiler versions for this package..." + Log.notice "Determining all valid compiler versions for this package..." allCompilers <- PursVersions.pursVersions { failed: invalidCompilers, succeeded: validCompilers } <- case NonEmptyArray.fromFoldable $ NonEmptyArray.delete payload.compiler allCompilers of Nothing -> pure { failed: Map.empty, succeeded: NonEmptySet.singleton payload.compiler } @@ -797,13 +795,13 @@ publish maybeLegacyIndex payload = do unless (Map.isEmpty invalidCompilers) do Log.debug $ "Some compilers failed: " <> String.joinWith ", " (map Version.print (Set.toUnfoldable (Map.keys invalidCompilers))) - Comment.comment $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) + Log.notice $ "Found compatible compilers: " <> String.joinWith ", " (map (\v -> "`" <> Version.print v <> "`") (NonEmptySet.toUnfoldable validCompilers)) let metadataWithCompilers = newMetadata { published = Map.update (Just <<< (_ { compilers = NonEmptySet.toUnfoldable1 validCompilers })) (un Manifest manifest).version newMetadata.published } Registry.writeMetadata (un Manifest manifest).name (Metadata metadataWithCompilers) Log.debug $ "Wrote new metadata " <> printJson Metadata.codec (Metadata metadataWithCompilers) - Comment.comment "Wrote completed metadata to the registry!" + Log.notice "Wrote completed metadata to the registry!" FS.Extra.remove tmp pure $ Just { dependencies: (un Manifest manifest).dependencies, version: (un Manifest manifest).version } @@ -969,7 +967,7 @@ type PublishToPursuit = publishToPursuit :: forall r . PublishToPursuit - -> Run (PURSUIT + COMMENT + LOG + AFF + EFFECT + r) (Either String Unit) + -> Run (PURSUIT + LOG + AFF + EFFECT + r) (Either String Unit) publishToPursuit { source, compiler, resolutions, installedResolutions } = Except.runExcept do Log.debug "Generating a resolutions file" tmp <- Tmp.mkTmpDir @@ -1170,7 +1168,7 @@ conformLegacyManifest -> CompilerIndex -> Solver.TransitivizedRegistry -> ValidateDepsError - -> Run (COMMENT + LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) + -> Run (LOG + EXCEPT String + r) (Tuple Manifest (Map PackageName Version)) conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry problem = do let manifestRequired :: SemigroupMap PackageName Intersection @@ -1267,7 +1265,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p UnusedDependencies names -> do Tuple deps resolutions <- fixUnused names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1276,7 +1274,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p MissingDependencies names -> do Tuple deps resolutions <- fixMissing names (Manifest manifest) let newManifest = Manifest (manifest { dependencies = deps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable names)) <> "\n" , newDepsMessage newManifest @@ -1287,7 +1285,7 @@ conformLegacyManifest (Manifest manifest) compiler currentIndex legacyRegistry p let trimmed = Map.difference manifest.dependencies unused' Tuple newDeps newResolutions <- fixMissing missing (Manifest (manifest { dependencies = trimmed })) let newManifest = Manifest (manifest { dependencies = newDeps }) - Comment.comment $ Array.fold + Log.notice $ Array.fold [ previousDepsMessage , "\nWe have removed the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable unused)) <> "\n" , "We have added the following packages: " <> String.joinWith ", " (map PackageName.print (NonEmptySet.toUnfoldable missing)) <> "\n" diff --git a/app/src/App/Effect/Comment.purs b/app/src/App/Effect/Comment.purs deleted file mode 100644 index 848a1b3ae..000000000 --- a/app/src/App/Effect/Comment.purs +++ /dev/null @@ -1,68 +0,0 @@ --- | An effect for notifying users of important events in the application, such --- | as failures that prevent their package from being uploaded, or successful --- | events that indicate progress. --- | --- | This is not a general logging effect. For that, you should use the Log --- | effect. This effect should be used sparingly to notify registry users of --- | events with formatted, human-readable messages providing context. -module Registry.App.Effect.Comment where - -import Registry.App.Prelude - -import Ansi.Codes (GraphicsParam) -import Data.Int as Int -import Dodo (Doc) -import Dodo as Dodo -import Dodo.Ansi as Ansi -import Registry.App.Effect.Log (LOG) -import Registry.App.Effect.Log as Log -import Registry.Foreign.Octokit (Address, IssueNumber(..), Octokit) -import Registry.Foreign.Octokit as Octokit -import Run (AFF, EFFECT, Run) -import Run as Run - -data Comment a = Comment (Doc GraphicsParam) a - -derive instance Functor Comment - --- | An effect for notifying consumers of important events in the application -type COMMENT r = (comment :: Comment | r) - -_comment :: Proxy "comment" -_comment = Proxy - -comment :: forall a r. Log.Loggable a => a -> Run (COMMENT + r) Unit -comment message = Run.lift _comment (Comment (Log.toLog message) unit) - -interpret :: forall r a. (Comment ~> Run r) -> Run (COMMENT + r) a -> Run r a -interpret handler = Run.interpret (Run.on _comment handler Run.send) - --- | Handle a notification by converting it to an info-level LOG -handleLog :: forall a r. Comment a -> Run (LOG + r) a -handleLog = case _ of - Comment message next -> do - Log.info $ Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTIFY] ") <> message - pure next - -type CommentGitHubEnv = - { octokit :: Octokit - , issue :: IssueNumber - , registry :: Address - } - --- | Handle a notification by commenting on the relevant GitHub issue. -handleGitHub :: forall a r. CommentGitHubEnv -> Comment a -> Run (LOG + AFF + EFFECT + r) a -handleGitHub env = case _ of - Comment message next -> do - let issueNumber = Int.toStringAs Int.decimal $ un IssueNumber env.issue - Log.debug $ "Commenting via a GitHub comment on issue " <> issueNumber - handleLog (Comment message unit) - let body = Dodo.print Dodo.plainText Dodo.twoSpaces (Log.toLog message) - let request = Octokit.createCommentRequest { address: env.registry, issue: env.issue, body } - Octokit.request env.octokit request >>= case _ of - Left error -> do - Log.error $ "Could not send comment to GitHub due to an unexpected error." - Log.debug $ Octokit.printGitHubError error - Right _ -> - Log.debug $ "Created GitHub comment on issue " <> issueNumber - pure next diff --git a/app/src/App/Effect/Log.purs b/app/src/App/Effect/Log.purs index a1cb72c0a..b99af947d 100644 --- a/app/src/App/Effect/Log.purs +++ b/app/src/App/Effect/Log.purs @@ -1,6 +1,6 @@ -- | A general logging effect suitable for recording events as they happen in --- | the application, including debugging logs. Should not be used to report --- | important events to registry users; for that, use the Comment effect. +-- | the application, including debugging logs. Use the `notice` level to report +-- | important events to registry users (these are posted as GitHub comments). module Registry.App.Effect.Log where import Registry.App.Prelude @@ -65,6 +65,9 @@ info = log Info <<< toLog warn :: forall a r. Loggable a => a -> Run (LOG + r) Unit warn = log Warn <<< toLog +notice :: forall a r. Loggable a => a -> Run (LOG + r) Unit +notice = log Notice <<< toLog + error :: forall a r. Loggable a => a -> Run (LOG + r) Unit error = log Error <<< toLog @@ -80,6 +83,7 @@ handleTerminal verbosity = case _ of Debug -> Ansi.foreground Ansi.Blue message Info -> message Warn -> Ansi.foreground Ansi.Yellow (Dodo.text "[WARNING] ") <> message + Notice -> Ansi.foreground Ansi.BrightBlue (Dodo.text "[NOTICE] ") <> message Error -> Ansi.foreground Ansi.Red (Dodo.text "[ERROR] ") <> message Run.liftEffect case verbosity of diff --git a/app/src/App/GitHubIssue.purs b/app/src/App/GitHubIssue.purs index 527027607..e3eb353aa 100644 --- a/app/src/App/GitHubIssue.purs +++ b/app/src/App/GitHubIssue.purs @@ -185,10 +185,10 @@ pollAndReport octokit issue pollConfig registryApiUrl jobId = go Nothing 0 0 Right job -> do let info = V1.jobInfo job - -- Post any new logs (filtered to Info level and above, and after lastTimestamp) + -- Post any new logs (filtered to Notice level and above, and after lastTimestamp) let newLogs = Array.filter isNewLog info.logs - isNewLog l = l.level >= V1.Info && case lastTimestamp of + isNewLog l = l.level >= V1.Notice && case lastTimestamp of Nothing -> true Just ts -> l.timestamp > ts unless (Array.null newLogs) do @@ -228,8 +228,8 @@ fetchJob registryApiUrl (V1.JobId jobId) since = do let baseUrl = registryApiUrl <> "/v1/jobs/" <> jobId url = case since of - Nothing -> baseUrl <> "?level=INFO" - Just ts -> baseUrl <> "?level=INFO&since=" <> DateTime.format Internal.Format.iso8601DateTime ts + Nothing -> baseUrl <> "?level=NOTICE" + Just ts -> baseUrl <> "?level=NOTICE&since=" <> DateTime.format Internal.Format.iso8601DateTime ts result <- Aff.attempt $ Fetch.fetch url { method: GET } case result of Left err -> pure $ Left $ "Network error: " <> Aff.message err diff --git a/app/src/App/SQLite.purs b/app/src/App/SQLite.purs index bf7bd3f69..783b5f756 100644 --- a/app/src/App/SQLite.purs +++ b/app/src/App/SQLite.purs @@ -42,8 +42,8 @@ module Registry.App.SQLite import Registry.App.Prelude import Codec.JSON.DecodeError as JSON.DecodeError -import Data.Array as Array import Control.Monad.Except (runExceptT) +import Data.Array as Array import Data.DateTime (DateTime) import Data.Formatter.DateTime as DateTime import Data.Nullable as Nullable diff --git a/app/src/App/Server/Env.purs b/app/src/App/Server/Env.purs index 07baa935c..335764eef 100644 --- a/app/src/App/Server/Env.purs +++ b/app/src/App/Server/Env.purs @@ -14,8 +14,6 @@ import Registry.App.API (COMPILER_CACHE, _compilerCache) import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Db (DB) import Registry.App.Effect.Db as Db import Registry.App.Effect.Env (PACCHETTIBOTTI_ENV, RESOURCE_ENV, ResourceEnv) @@ -120,7 +118,7 @@ createServerEnv = do , jobId: Nothing } -type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + COMMENT + LOG + EXCEPT String + AFF + EFFECT ()) +type ServerEffects = (RESOURCE_ENV + PACCHETTIBOTTI_ENV + REGISTRY + STORAGE + PURSUIT + SOURCE + DB + GITHUB + LEGACY_CACHE + COMPILER_CACHE + LOG + EXCEPT String + AFF + EFFECT ()) runServer :: ServerEnv @@ -174,7 +172,6 @@ runEffects env operation = Aff.attempt do Log.error msg *> Run.liftAff (Aff.throwError (Aff.error msg)) ) # Db.interpret (Db.handleSQLite { db: env.db }) - # Comment.interpret Comment.handleLog # Log.interpret ( \log -> case env.jobId of Nothing -> Log.handleTerminal Verbose log *> Log.handleFs Verbose logPath log diff --git a/app/test/Test/Assert/Run.purs b/app/test/Test/Assert/Run.purs index 42cc7d6ab..8c3e24195 100644 --- a/app/test/Test/Assert/Run.purs +++ b/app/test/Test/Assert/Run.purs @@ -30,8 +30,6 @@ import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache (CacheRef) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment (COMMENT) -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env (GITHUB_EVENT_ENV, PACCHETTIBOTTI_ENV, RESOURCE_ENV) import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB, GITHUB_CACHE, GitHub(..)) @@ -89,7 +87,6 @@ type TEST_EFFECTS = + GITHUB_CACHE + LEGACY_CACHE + COMPILER_CACHE - + COMMENT + LOG + EXCEPT String + AFF @@ -129,7 +126,6 @@ runTestEffects env operation = Aff.attempt do # runGitHubCacheMemory githubCache # runLegacyCacheMemory legacyCache -- Other effects - # Comment.interpret Comment.handleLog # Log.interpret (\(Log level msg next) -> Run.liftEffect (Ref.modify_ (_ <> [ Tuple level (Dodo.print Dodo.plainText Dodo.twoSpaces msg) ]) env.logs) *> pure next) -- Base effects # Except.catch (\err -> Run.liftAff (Aff.throwError (Aff.error err))) diff --git a/lib/src/API/V1.purs b/lib/src/API/V1.purs index 8c08d181d..fb4bd3b54 100644 --- a/lib/src/API/V1.purs +++ b/lib/src/API/V1.purs @@ -301,7 +301,7 @@ logLineCodec = CJ.named "LogLine" $ CJ.Record.object , timestamp: Internal.Codec.iso8601DateTime } -data LogLevel = Debug | Info | Warn | Error +data LogLevel = Debug | Info | Warn | Notice | Error derive instance Eq LogLevel derive instance Ord LogLevel @@ -311,6 +311,7 @@ printLogLevel = case _ of Debug -> "DEBUG" Info -> "INFO" Warn -> "WARN" + Notice -> "NOTICE" Error -> "ERROR" -- These numbers are not consecutive so that we can insert new log levels if need be @@ -319,6 +320,7 @@ logLevelToPriority = case _ of Debug -> 0 Info -> 10 Warn -> 20 + Notice -> 25 Error -> 30 logLevelFromPriority :: Int -> Either String LogLevel @@ -326,6 +328,7 @@ logLevelFromPriority = case _ of 0 -> Right Debug 10 -> Right Info 20 -> Right Warn + 25 -> Right Notice 30 -> Right Error other -> Left $ "Invalid log level priority: " <> show other @@ -334,5 +337,6 @@ parseLogLevel = case _ of "DEBUG" -> Right Debug "INFO" -> Right Info "WARN" -> Right Warn + "NOTICE" -> Right Notice "ERROR" -> Right Error other -> Left $ "Invalid log level: " <> other diff --git a/nix/test/test-env.nix b/nix/test/test-env.nix index ff5c4d57b..f7d7fb058 100644 --- a/nix/test/test-env.nix +++ b/nix/test/test-env.nix @@ -131,8 +131,8 @@ in wiremockStartScript serverStartScript setupGitFixtures - envVars - envFile + testEnv + envToExports ; # Full testConfig still available for less common access patterns diff --git a/scripts/src/LegacyImporter.purs b/scripts/src/LegacyImporter.purs index 783ee353c..910233047 100644 --- a/scripts/src/LegacyImporter.purs +++ b/scripts/src/LegacyImporter.purs @@ -59,7 +59,6 @@ import Registry.App.CLI.Purs as Purs import Registry.App.CLI.PursVersions as PursVersions import Registry.App.Effect.Cache (class FsEncodable, class MemoryEncodable, Cache, FsEncoding(..), MemoryEncoding(..)) import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -189,7 +188,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret API._compilerCache (Cache.handleFs cache) # Run.Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageDeleter.purs b/scripts/src/PackageDeleter.purs index 925361fb2..399af2e93 100644 --- a/scripts/src/PackageDeleter.purs +++ b/scripts/src/PackageDeleter.purs @@ -20,7 +20,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -158,7 +157,6 @@ main = launchAff_ do >>> Pursuit.interpret Pursuit.handlePure >>> Cache.interpret _legacyCache (Cache.handleMemoryFs { ref: legacyCacheRef, cache }) >>> Cache.interpret _compilerCache (Cache.handleFs cache) - >>> Comment.interpret Comment.handleLog >>> Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) >>> Env.runResourceEnv resourceEnv >>> Run.runBaseAff' diff --git a/scripts/src/PackageSetUpdater.purs b/scripts/src/PackageSetUpdater.purs index 95053eed1..29423cf7b 100644 --- a/scripts/src/PackageSetUpdater.purs +++ b/scripts/src/PackageSetUpdater.purs @@ -19,7 +19,6 @@ import Node.Path as Path import Node.Process as Process import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log (LOG) @@ -114,7 +113,6 @@ main = Aff.launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runResourceEnv resourceEnv # Run.runBaseAff' diff --git a/scripts/src/PackageTransferrer.purs b/scripts/src/PackageTransferrer.purs index d203c66de..31e859197 100644 --- a/scripts/src/PackageTransferrer.purs +++ b/scripts/src/PackageTransferrer.purs @@ -16,7 +16,6 @@ import Registry.App.API as API import Registry.App.Auth as Auth import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub (GITHUB) import Registry.App.Effect.GitHub as GitHub @@ -87,7 +86,6 @@ main = launchAff_ do # Storage.interpret (Storage.handleReadOnly cache) # GitHub.interpret (GitHub.handle { octokit, cache, ref: githubCacheRef }) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Env.runPacchettiBottiEnv { privateKey, publicKey } # Env.runResourceEnv resourceEnv diff --git a/scripts/src/Solver.purs b/scripts/src/Solver.purs index aa2820e16..51f2ef993 100644 --- a/scripts/src/Solver.purs +++ b/scripts/src/Solver.purs @@ -31,7 +31,6 @@ import Registry.App.API (_compilerCache) import Registry.App.API as API import Registry.App.CLI.Git as Git import Registry.App.Effect.Cache as Cache -import Registry.App.Effect.Comment as Comment import Registry.App.Effect.Env as Env import Registry.App.Effect.GitHub as GitHub import Registry.App.Effect.Log as Log @@ -150,7 +149,6 @@ main = launchAff_ do # Cache.interpret _importCache (Cache.handleMemoryFs { cache, ref: importCacheRef }) # Cache.interpret _compilerCache (Cache.handleFs cache) # Except.catch (\msg -> Log.error msg *> Run.liftEffect (Process.exit' 1)) - # Comment.interpret Comment.handleLog # Env.runResourceEnv resourceEnv # Log.interpret (\log -> Log.handleTerminal Normal log *> Log.handleFs Verbose logPath log) # Run.runBaseAff' diff --git a/test-utils/src/Registry/Test/E2E/Fixtures.purs b/test-utils/src/Registry/Test/E2E/Fixtures.purs index c69af3645..70f1242b0 100644 --- a/test-utils/src/Registry/Test/E2E/Fixtures.purs +++ b/test-utils/src/Registry/Test/E2E/Fixtures.purs @@ -2,7 +2,7 @@ -- | Contains package operation data used across multiple test suites. module Registry.Test.E2E.Fixtures ( effectPublishData - , failingPublishData + , failingTransferData , trusteeAuthenticatedData ) where @@ -12,14 +12,14 @@ import Data.Codec.JSON as CJ import Data.Maybe (Maybe(..)) import JSON as JSON import Registry.Location as Location -import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), PublishData, UnpublishData) +import Registry.Operation (AuthenticatedData, AuthenticatedPackageOperation(..), TransferData, UnpublishData) import Registry.Operation as Operation import Registry.SSH (Signature(..)) import Registry.Test.Utils as Utils -- | Standard publish data for effect@4.0.0, used by E2E tests. -- | This matches the fixtures in app/fixtures/github-packages/effect-4.0.0 -effectPublishData :: PublishData +effectPublishData :: Operation.PublishData effectPublishData = { name: Utils.unsafePackageName "effect" , location: Just $ Location.GitHub @@ -33,21 +33,27 @@ effectPublishData = , version: Utils.unsafeVersion "4.0.0" } --- | Publish data for prelude@6.0.1, which already exists in metadata fixtures. --- | Used to test failure scenarios (duplicate publish) in E2E tests. -failingPublishData :: PublishData -failingPublishData = - { name: Utils.unsafePackageName "prelude" - , location: Just $ Location.GitHub - { owner: "purescript" - , repo: "purescript-prelude" - , subdir: Nothing +-- | Authenticated transfer data for prelude, which has no owners in fixtures. +-- | Used to test failure scenarios in E2E tests - will fail because no owners +-- | are listed to verify the signature against. +failingTransferData :: AuthenticatedData +failingTransferData = + let + transferPayload :: TransferData + transferPayload = + { name: Utils.unsafePackageName "prelude" + , newLocation: Location.GitHub + { owner: "someone-else" + , repo: "purescript-prelude" + , subdir: Nothing + } } - , ref: "v6.0.1" - , compiler: Utils.unsafeVersion "0.15.9" - , resolutions: Nothing - , version: Utils.unsafeVersion "6.0.1" - } + rawPayload = JSON.print $ CJ.encode Operation.transferCodec transferPayload + in + { payload: Transfer transferPayload + , rawPayload + , signature: Signature "invalid-signature-for-testing" + } -- | Authenticated data with an intentionally invalid signature. -- | When submitted by a trustee (packaging-team-user), pacchettibotti will re-sign it.