diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 81de1875..0332d94c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,10 +10,10 @@ name: CI jobs: container-job: runs-on: ubuntu-latest - container: node:18 + container: node:20 strategy: matrix: - node: [ 16, 18 ] + node: [ 20 ] services: postgres: image: postgres @@ -27,10 +27,10 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: node-version: ${{ matrix.node }} diff --git a/README.md b/README.md index 51506c6a..c80b8ca7 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ -Queueing jobs in Node.js using PostgreSQL like a boss. +Queueing jobs in Postgres from Node.js like a boss. -[![PostgreSql Version](https://img.shields.io/badge/PostgreSQL-11+-blue.svg?maxAge=2592000)](http://www.postgresql.org) [![npm version](https://badge.fury.io/js/pg-boss.svg)](https://badge.fury.io/js/pg-boss) [![Build](https://github.com/timgit/pg-boss/actions/workflows/ci.yml/badge.svg?branch=master)](https://github.com/timgit/pg-boss/actions/workflows/ci.yml) [![Coverage Status](https://coveralls.io/repos/github/timgit/pg-boss/badge.svg?branch=master)](https://coveralls.io/github/timgit/pg-boss?branch=master) @@ -10,52 +9,47 @@ async function readme() { const PgBoss = require('pg-boss'); const boss = new PgBoss('postgres://user:pass@host/database'); - boss.on('error', error => console.error(error)); + boss.on('error', console.error) - await boss.start(); + await boss.start() - const queue = 'some-queue'; + const queue = 'readme-queue' - let jobId = await boss.send(queue, { param1: 'foo' }) + const id = await boss.send(queue, { arg1: 'read me' }) - console.log(`created job in queue ${queue}: ${jobId}`); + console.log(`created job ${id} in queue ${queue}`) - await boss.work(queue, someAsyncJobHandler); -} - -async function someAsyncJobHandler(job) { - console.log(`job ${job.id} received with data:`); - console.log(JSON.stringify(job.data)); - - await doSomethingAsyncWithThis(job.data); + await boss.work(queue, async ([ job ]) => { + console.log(`received job ${job.id} with data ${JSON.stringify(job.data)}`) + }) } ``` pg-boss is a job queue built in Node.js on top of PostgreSQL in order to provide background processing and reliable asynchronous execution to Node.js applications. -pg-boss relies on [SKIP LOCKED](https://www.2ndquadrant.com/en/blog/what-is-select-skip-locked-for-in-postgresql-9-5/), a feature added to postgres specifically for message queues, in order to resolve record locking challenges inherent with relational databases. This brings the safety of guaranteed atomic commits of a relational database to your asynchronous job processing. +pg-boss relies on [SKIP LOCKED](https://www.2ndquadrant.com/en/blog/what-is-select-skip-locked-for-in-postgresql-9-5/), a feature built specifically for message queues to resolve record locking challenges inherent with relational databases. This provides exactly-once delivery and the safety of guaranteed atomic commits to asynchronous job processing. This will likely cater the most to teams already familiar with the simplicity of relational database semantics and operations (SQL, querying, and backups). It will be especially useful to those already relying on PostgreSQL that want to limit how many systems are required to monitor and support in their architecture. -## Features + +## Summary * Exactly-once job delivery * Backpressure-compatible polling workers * Cron scheduling +* Queue storage policies to support a variety of rate limiting, debouncing, and concurrency use cases +* Priority queues, dead letter queues, job deferral, automatic retries with exponential backoff * Pub/sub API for fan-out queue relationships -* Deferral, retries (with exponential backoff), rate limiting, debouncing -* Completion jobs for orchestrations/sagas -* Direct table access for bulk loads via COPY or INSERT +* Raw SQL support for non-Node.js runtimes via INSERT or COPY +* Serverless function compatible * Multi-master compatible (for example, in a Kubernetes ReplicaSet) -* Automatic creation and migration of storage tables -* Automatic maintenance operations to manage table growth ## Requirements -* Node 16 or higher -* PostgreSQL 11 or higher +* Node 20 or higher +* PostgreSQL 13 or higher ## Installation -``` bash +```bash # npm npm install pg-boss @@ -67,7 +61,6 @@ yarn add pg-boss * [Docs](docs/readme.md) ## Contributing - To setup a development environment for this library: ```bash diff --git a/docs/readme.md b/docs/readme.md index 94625a25..8d10928c 100644 --- a/docs/readme.md +++ b/docs/readme.md @@ -25,21 +25,16 @@ - [`send()`](#send) - [`send(name, data, options)`](#sendname-data-options) - [`send(request)`](#sendrequest) - - [`sendAfter(name, data, options, seconds | ISO date string | Date)`](#sendaftername-data-options-seconds--iso-date-string--date) - - [`sendOnce(name, data, options, key)`](#sendoncename-data-options-key) - - [`sendSingleton(name, data, options)`](#sendsingletonname-data-options) - - [`sendThrottled(name, data, options, seconds [, key])`](#sendthrottledname-data-options-seconds--key) - - [`sendDebounced(name, data, options, seconds [, key])`](#senddebouncedname-data-options-seconds--key) - - [`insert([jobs])`](#insertjobs) - - [`fetch()`](#fetch) - - [`fetch(name)`](#fetchname) - - [`fetch(name, batchSize, [, options])`](#fetchname-batchsize--options) - - [`fetchCompleted(name [, batchSize] [, options])`](#fetchcompletedname--batchsize--options) + - [`sendAfter(name, data, options, value)`](#sendaftername-data-options-value) + - [`sendThrottled(name, data, options, seconds, key)`](#sendthrottledname-data-options-seconds-key) + - [`sendDebounced(name, data, options, seconds, key)`](#senddebouncedname-data-options-seconds-key) + - [`insert(Job[])`](#insertjob) + - [`fetch(name, options)`](#fetchname-options) - [`work()`](#work) - - [`work(name [, options], handler)`](#workname--options-handler) - - [`onComplete(name [, options], handler)`](#oncompletename--options-handler) + - [`work(name, options, handler)`](#workname-options-handler) + - [`work(name, handler)`](#workname-handler) + - [`notifyWorker(id)`](#notifyworkerid) - [`offWork(value)`](#offworkvalue) - - [`offComplete(value)`](#offcompletevalue) - [`publish(event, data, options)`](#publishevent-data-options) - [`subscribe(event, name)`](#subscribeevent-name) - [`unsubscribe(event, name)`](#unsubscribeevent-name) @@ -47,51 +42,54 @@ - [`schedule(name, cron, data, options)`](#schedulename-cron-data-options) - [`unschedule(name)`](#unschedulename) - [`getSchedules()`](#getschedules) - - [`cancel(id, options)`](#cancelid-options) - - [`cancel([ids], options)`](#cancelids-options) - - [`resume(id, options)`](#resumeid-options) - - [`resume([ids], options)`](#resumeids-options) - - [`complete(id [, data, options])`](#completeid--data-options) - - [`complete([ids], options)`](#completeids-options) - - [`fail(id [, data, options])`](#failid--data-options) - - [`fail([ids], options)`](#failids-options) - - [`notifyWorker(id)`](#notifyworkerid) - - [`getQueueSize(name [, options])`](#getqueuesizename--options) - - [`getJobById(id, options)`](#getjobbyidid-options) + - [`deleteJob(name, id, options)`](#deletejobname-id-options) + - [`deleteJob(name, [ids], options)`](#deletejobname-ids-options) + - [`cancel(name, id, options)`](#cancelname-id-options) + - [`cancel(name, [ids], options)`](#cancelname-ids-options) + - [`resume(name, id, options)`](#resumename-id-options) + - [`resume(name, [ids], options)`](#resumename-ids-options) + - [`complete(name, id, data, options)`](#completename-id-data-options) + - [`complete(name, [ids], options)`](#completename-ids-options) + - [`fail(name, id, data, options)`](#failname-id-data-options) + - [`fail(name, [ids], options)`](#failname-ids-options) + - [`getJobById(name, id, options)`](#getjobbyidname-id-options) + - [`createQueue(name, Queue)`](#createqueuename-queue) + - [`updateQueue(name, options)`](#updatequeuename-options) + - [`purgeQueue(name)`](#purgequeuename) - [`deleteQueue(name)`](#deletequeuename) - - [`deleteAllQueues()`](#deleteallqueues) + - [`getQueues()`](#getqueues) + - [`getQueue(name)`](#getqueuename) + - [`getQueueSize(name, options)`](#getqueuesizename-options) - [`clearStorage()`](#clearstorage) + - [`isInstalled()`](#isinstalled) + - [`schemaVersion()`](#schemaversion) # Intro -pg-boss is a job queue written in Node.js and backed by the reliability of Postgres. +pg-boss is a job queue powered by Postgres, operated by 1 or more Node.js instances. -You may use as many instances as needed to connect to the same Postgres database. Each instance maintains a connection pool or you can bring your own, limited to the maximum number of connections your database server can accept. If you need a larger number of workers, consider using a centralized connection pool such as pgBouncer. [Creating your own web API or UI](https://github.com/timgit/pg-boss/issues/266) is another option if direct database access is not available. +pg-boss relies on [SKIP LOCKED](https://www.2ndquadrant.com/en/blog/what-is-select-skip-locked-for-in-postgresql-9-5/), a feature built specifically for message queues to resolve record locking challenges inherent with relational databases. This provides exactly-once delivery and the safety of guaranteed atomic commits to asynchronous job processing. -If you require multiple installations in the same database, such as for large volume queues, you may wish to specify a separate schema per install to achieve partitioning. +This will likely cater the most to teams already familiar with the simplicity of relational database semantics and operations (SQL, querying, and backups). It will be especially useful to those already relying on PostgreSQL that want to limit how many systems are required to monitor and support in their architecture. -Architecturally, pg-boss is somewhat similar to queue products such as AWS SQS, which primarily acts as a store of jobs that are "pulled", not "pushed" from the server. If at least one pg-boss instance is running, internal maintenance jobs will be periodically run to make sure fetched jobs that are never completed are marked as expired or retried (if configured). If and when this happens, think of a job with a retry configuration to act just like the SQS message visibility timeout. In regards to job delivery, Postgres [SKIP LOCKED](http://blog.2ndquadrant.com/what-is-select-skip-locked-for-in-postgresql-9-5) will guarantee exactly-once, which is only available in SQS via FIFO queues (and its throughput limitations). However, even if you have exactly-once delivery, this is not a guarantee that a job will never be processed more than once if you opt into retries, so keep the general recommendation for idempotency with queueing systems in mind. +Internally, pg-boss uses declarative list-based partitioning to create a physical table per queue within 1 logical job table. This partitioning strategy is a balance between global maintenance operations, queue storage isolation, and query plan optimization. According to [the docs](https://www.postgresql.org/docs/13/ddl-partitioning.html#DDL-PARTITIONING-DECLARATIVE-BEST-PRACTICES), this strategy should scale to thousands of queues. If your usage exceeds this and you experience performance issues, consider grouping queues into separate schemas in the target database. + +You may use as many Node.js instances as desired to connect to the same Postgres database, even running it inside serverless functions if needed. Each instance maintains a client-side connection pool or you can substitute your own database client, limited to the maximum number of connections your database server (or server-side connection pooler) can accept. If you find yourself needing even more connections, pg-boss can easily be used behind your custom web API. ## Job states -All jobs start out in the `created` state and become `active` when picked up for work. If job processing completes successfully, jobs will go to `completed`. If a job fails, it will typically enter the `failed` state. However, if a job has retry options configured, it will enter the `retry` state on failure instead and have a chance to re-enter `active` state. It's also possible for `active` jobs to become `expired`, which happens when job processing takes too long. Jobs can also enter `cancelled` state via [`cancel(id)`](#cancelid) or [`cancel([ids])`](#cancelids). +All jobs start out in the `created` state and become `active` via [`fetch(name, options)`](#fetchname-options) or in a polling worker via [`work()`](#work). -All jobs that are `completed`, `expired`, `cancelled` or `failed` become eligible for archiving (i.e. they will transition into the `archive` state) after the configured `archiveCompletedAfterSeconds` time. Once `archive`d, jobs will be automatically deleted by pg-boss after the configured deletion period. +In a worker, when your handler function completes, jobs will be marked `completed` automatically unless previously deleted via [`deleteJob(name, id)`](#deletejobname-id-options). If an unhandled error is thrown in your handler, the job will usually enter the `retry` state, and then the `failed` state once all retries have been attempted. -Here's a state diagram that shows the possible states and their transitions: +Uncompleted jobs may also be assigned to `cancelled` state via [`cancel(name, id)`](#cancelname-id-options), where they can be moved back into `created` via [`resume(name, id)`](#resumename-id-options). -![job state diagram](./images/job-states.png) +All jobs that are `completed`, `cancelled` or `failed` become eligible for archiving according to your configuration. Once archived, jobs will be automatically deleted after the configured retention period. # Database install -pg-boss can be installed into any database. When started, it will detect if it is installed and automatically create the required schema for all queue operations if needed. If the database doesn't already have the pgcrypto extension installed, you will need to have a superuser add it before pg-boss can create its schema. - -```sql -CREATE EXTENSION pgcrypto; -``` - -Once this is completed, pg-boss requires the [CREATE](http://www.postgresql.org/docs/9.5/static/sql-grant.html) privilege in order to create and maintain its schema. +pg-boss is usually installed into a dedicated schema in the target database. When started, it will automatically create this schema and all required storage objects (requires the [CREATE](http://www.postgresql.org/docs/13/static/sql-grant.html) privilege). ```sql GRANT CREATE ON DATABASE db1 TO leastprivuser; @@ -114,12 +112,15 @@ Where `$1` is the name of your schema if you've customized it. Otherwise, the d NOTE: If an existing schema was used during installation, created objects will need to be removed manually using the following commands. ```sql -DROP TABLE ${schema}.archive; -DROP TABLE ${schema}.job; -DROP TABLE ${schema}.schedule; -DROP TABLE ${schema}.subscription; -DROP TABLE ${schema}.version; -DROP TYPE ${schema}.job_state; +DROP TABLE pgboss.version; +DROP TABLE pgboss.job; +DROP TABLE pgboss.archive; +DROP TYPE pgboss.job_state; +DROP TABLE pgboss.subscription; +DROP TABLE pgboss.schedule; +DROP FUNCTION pgboss.create_queue; +DROP FUNCTION pgboss.delete_queue; +DROP TABLE pgboss.queue; ``` # Direct database interactions @@ -128,40 +129,40 @@ If you need to interact with pg-boss outside of Node.js, such as other clients o ## Job table -The following command is the definition of the primary job table. For manual job creation, the only required column is `name`. All other columns are nullable or have sensible defaults. +The following command is the definition of the primary job table. For manual job creation, the only required column is `name`. All other columns are nullable or have defaults. ```sql - CREATE TABLE ${schema}.job ( - id uuid primary key not null default gen_random_uuid(), - name text not null, - priority integer not null default(0), - data jsonb, - state ${schema}.job_state not null default('${states.created}'), - retryLimit integer not null default(0), - retryCount integer not null default(0), - retryDelay integer not null default(0), - retryBackoff boolean not null default false, - startAfter timestamp with time zone not null default now(), - startedOn timestamp with time zone, - singletonKey text, - singletonOn timestamp without time zone, - expireIn interval not null default interval '15 minutes', - createdOn timestamp with time zone not null default now(), - completedOn timestamp with time zone, - keepUntil timestamp with time zone NOT NULL default now() + interval '14 days', - on_complete boolean not null default true, - output jsonb - ) +CREATE TABLE pgboss.job ( + id uuid not null default gen_random_uuid(), + name text not null, + priority integer not null default(0), + data jsonb, + state pgboss.job_state not null default('created'), + retry_limit integer not null default(0), + retry_count integer not null default(0), + retry_delay integer not null default(0), + retry_backoff boolean not null default false, + start_after timestamp with time zone not null default now(), + started_on timestamp with time zone, + singleton_key text, + singleton_on timestamp without time zone, + expire_in interval not null default interval '15 minutes', + created_on timestamp with time zone not null default now(), + completed_on timestamp with time zone, + keep_until timestamp with time zone NOT NULL default now() + interval '14 days', + output jsonb, + dead_letter text, + policy text, + CONSTRAINT job_pkey PRIMARY KEY (name, id) +) PARTITION BY LIST (name) ``` # Events -Each instance of pg-boss is an EventEmitter. You can run multiple instances of pg-boss for a variety of use cases including distribution and load balancing. Each instance has the freedom to process to whichever jobs you need. Because of this diversity, the job activity of one instance could be drastically different from another. - -> For example, if you were to process to `error` in instance A, it will not receive an `error` event from instance B. +The pg-boss class inherits from EventEmitter. ## `error` -The error event is raised from any errors that may occur during internal job fetching, monitoring and archiving activities. While not required, adding a listener to the error event is strongly encouraged: +The error event is raised from any errors that may occur during internal processing, such as scheduling and maintenance. While not required, adding a listener to the error event is strongly encouraged: > If an EventEmitter does not have at least one listener registered for the 'error' event, and an 'error' event is emitted, the error is thrown, a stack trace is printed, and the Node.js process exits. > @@ -173,8 +174,6 @@ Ideally, code similar to the following example would be used after creating your boss.on('error', error => logger.error(error)); ``` -> **Note: Since error events are only raised during internal housekeeping activities, they are not raised for direct API calls.** - ## `monitor-states` The `monitor-states` event is conditionally raised based on the `monitorStateInterval` configuration setting and only emitted from `start()`. If passed during instance creation, it will provide a count of jobs in each state per interval. This could be useful for logging or even determining if the job system is handling its load. @@ -189,7 +188,6 @@ The payload of the event is an object with a key per queue and state, such as th "retry": 40, "active": 26, "completed": 3400, - "expired": 4, "cancelled": 0, "failed": 49, "all": 4049 @@ -199,7 +197,6 @@ The payload of the event is an object with a key per queue and state, such as th "retry": 0, "active": 0, "completed": 645, - "expired": 0, "cancelled": 0, "failed": 0, "all": 645 @@ -209,7 +206,6 @@ The payload of the event is an object with a key per queue and state, such as th "retry": 40, "active": 26, "completed": 4045, - "expired": 4, "cancelled": 0, "failed": 4, "all": 4694 @@ -224,7 +220,7 @@ Emitted at most once every 2 seconds when workers are active and jobs are enteri { id: 'fc738fb0-1de5-4947-b138-40d6a790749e', name: 'my-queue', - options: { newJobCheckInterval: 2000 }, + options: { pollingInterval: 2000 }, state: 'active', count: 1, createdOn: 1620149137015, @@ -315,13 +311,12 @@ The following options can be set as properties in an object for additional confi ```js - const text = "select 1 as value1 from table1 where bar = $1" - const values = ['foo'] + const text = "select $1 as input" + const values = ['arg1'] - const { rows, rowCount } = await executeSql(text, values) + const { rows } = await executeSql(text, values) - assert(rows[0].value1 === 1) - assert(rowCount === 1) + assert(rows[0].input === 'arg1') ``` * **schema** - string, defaults to "pgboss" @@ -333,10 +328,6 @@ The following options can be set as properties in an object for additional confi Queue options contain the following constructor-only settings. -* **uuid** - string, defaults to "v4" - - job uuid format used, "v1" or "v4" - * **archiveCompletedAfterSeconds** Specifies how long in seconds completed jobs get archived. Note: a warning will be emitted if set to lower than 60s and cron processing will be disabled. @@ -366,13 +357,17 @@ Queue options contain the following constructor-only settings. Maintenance operations include checking active jobs for expiration, archiving completed jobs from the primary job table, and deleting archived jobs from the archive table. -* **noSupervisor**, bool, default false +* **supervise**, bool, default true + + If this is set to false, maintenance and monitoring operations will be disabled on this instance. This is an advanced use case, as bypassing maintenance operations is not something you would want to do under normal circumstances. - If this is set to true, maintenance and monitoring operations will not be started during a `start()` after the schema is created. This is an advanced use case, as bypassing maintenance operations is not something you would want to do under normal circumstances. +* **schedule**, bool, default true -* **noScheduling**, bool, default false + If this is set to false, this instance will not monitor or created scheduled jobs during. This is an advanced use case you may want to do for testing or if the clock of the server is skewed and you would like to disable the skew warnings. - If this is set to true, this instance will not monitor scheduled jobs during `start()`. However, this instance can still use the scheduling api. This is an advanced use case you may want to do for testing or if the clock of the server is skewed and you would like to disable the skew warnings. +* **migrate**, bool, default true + + If this is set to false, this instance will skip attempts to run schema migratations during `start()`. If schema migrations exist, `start()` will throw and error and block usage. This is an advanced use case when the configured user account does not have schema mutation privileges. **Archive options** @@ -430,27 +425,30 @@ If the required database objects do not exist in the specified database, **`star > While this is most likely a welcome feature, be aware of this during upgrades since this could delay the promise resolution by however long the migration script takes to run against your data. For example, if you happened to have millions of jobs in the job table just hanging around for archiving and the next version of the schema had a couple of new indexes, it may take a few seconds before `start()` resolves. Most migrations are very quick, however, and are designed with performance in mind. -Additionally, all schema operations, both first-time provisioning and migrations, are nested within advisory locks to prevent race conditions during `start()`. Internally, these locks are created using `pg_advisory_xact_lock()` which auto-unlock at the end of the transaction and don't require a persistent session or the need to issue an unlock. This should make it compatible with most connection poolers, such as pgBouncer in transactional pooling mode. +Additionally, all schema operations, both first-time provisioning and migrations, are nested within advisory locks to prevent race conditions during `start()`. Internally, these locks are created using `pg_advisory_xact_lock()` which auto-unlock at the end of the transaction and don't require a persistent session or the need to issue an unlock. -One example of how this is useful would be including `start()` inside the bootstrapping of a pod in a ReplicaSet in Kubernetes. Being able to scale up your job processing using a container orchestration tool like k8s is becoming more and more popular, and pg-boss can be dropped into this system with no additional logic, fear, or special configuration. +One example of how this is useful would be including `start()` inside the bootstrapping of a pod in a ReplicaSet in Kubernetes. Being able to scale up your job processing using a container orchestration tool like k8s is becoming more and more popular, and pg-boss can be dropped into this system without any special startup handling. ## `stop(options)` -All job monitoring will be stopped and all workers on this instance will be removed. Basically, it's the opposite of `start()`. Even though `start()` may create new database objects during initialization, `stop()` will never remove anything from the database. +Stops all background processing, such as maintenance and scheduling, as well as all polling workers started with `work()`. -By default, calling `stop()` without any arguments will gracefully wait for all workers to finish processing active jobs before closing the internal connection pool and stopping maintenance operations. This behaviour can be configured using the stop options object. In graceful stop mode, the promise returned by `stop()` will still be resolved immediately. If monitoring for the end of the stop is needed, add a listener to the `stopped` event. +By default, calling `stop()` without any arguments will gracefully wait for all workers to finish processing active jobs before resolving. Emits a `stopped` event if needed. **Arguments** * `options`: object - * `destroy`, bool - Default: `false`. If `true` and the database connection is managed by pg-boss, it will destroy the connection pool. + * `wait`, bool + Default: `true`. If `true`, the promise won't be resolved until all workers and maintenance jobs are finished. * `graceful`, bool Default: `true`. If `true`, the PgBoss instance will wait for any workers that are currently processing jobs to finish, up to the specified timeout. During this period, new jobs will not be processed, but active jobs will be allowed to finish. + * `close`, bool + Default: `true`. If the database connection is managed by pg-boss, it will close the connection pool. Use `false` if needed to continue allowing operations such as `send()` and `fetch()`. + * `timeout`, int Default: 30000. Maximum time (in milliseconds) to wait for workers to finish job processing before shutting down the PgBoss instance. @@ -458,7 +456,7 @@ By default, calling `stop()` without any arguments will gracefully wait for all ## `send()` -Creates a new job and resolves the job's unique identifier (uuid). +Creates a new job and returns the job id. > `send()` will resolve a `null` for job id under some use cases when using unique jobs or throttling (see below). These options are always opt-in on the send side and therefore don't result in a promise rejection. @@ -536,14 +534,13 @@ Available in constructor as a default, or overridden in send. **Connection options** * **db**, object - A wrapper object containing an async method called `executeSql` that performs the query to the db. Can be used to manage jobs inside a transaction. Example: + + Instead of using pg-boss's default adapter, you can use your own, as long as it implements the following interface (the same as the pg module). - ``` - const db = { - async executeSql (sql, values) { - return trx.query(sql, values) - } - } + ```ts + interface Db { + executeSql(text: string, values: any[]): Promise<{ rows: any[] }>; + } ``` **Deferred jobs** @@ -555,51 +552,29 @@ Available in constructor as a default, or overridden in send. Default: 0 -**Unique jobs** - -* **singletonKey** string - - Allows a max of 1 job (with the same name and singletonKey) to be queued or active. - - ```js - boss.send('my-job', {}, {singletonKey: '123'}) // resolves a jobId - boss.send('my-job', {}, {singletonKey: '123'}) // resolves a null jobId until first job completed - ``` - - This can be used in conjunction with throttling explained below. - - * **useSingletonQueue** boolean - - When used in conjunction with singletonKey, allows a max of 1 job to be queued. - - >By default, there is no limit on the number of these jobs that may be active. However, this behavior may be modified by passing the [enforceSingletonQueueActiveLimit](#fetch) option. - - ```js - boss.send('my-job', {}, {singletonKey: '123', useSingletonQueue: true}) // resolves a jobId - boss.send('my-job', {}, {singletonKey: '123', useSingletonQueue: true}) // resolves a null jobId until first job becomes active - ``` - -**Throttled jobs** +**Throttle or debounce jobs** * **singletonSeconds**, int * **singletonMinutes**, int * **singletonHours**, int * **singletonNextSlot**, bool +* **singletonKey** string -Throttling jobs to 'once every n units', where units could be seconds, minutes, or hours. This option is set on the send side of the API since jobs may or may not be created based on the existence of other jobs. +Throttling jobs to 'one per time slot', where units could be seconds, minutes, or hours. This option is set on the send side of the API since jobs may or may not be created based on the existence of other jobs. -For example, if you set the `singletonMinutes` to 1, then submit 2 jobs within a minute, only the first job will be accepted and resolve a job id. The second request will be discarded, but resolve a null instead of an id. +For example, if you set the `singletonMinutes` to 1, then submit 2 jobs within the same minute, only the first job will be accepted and resolve a job id. The second request will resolve a null instead of a job id. > When a higher unit is is specified, lower unit configuration settings are ignored. Setting `singletonNextSlot` to true will cause the job to be scheduled to run after the current time slot if and when a job is throttled. This option is set to true, for example, when calling the convenience function `sendDebounced()`. -**Completion jobs** +As with queue policies, using `singletonKey` will extend throttling to allow one job per key within the time slot. -* **onComplete**, bool (Default: false) +**Dead Letter Queues** -When a job completes, a completion job will be created in the queue, copying the same retention policy as the job, for the purpose of `onComplete()` or `fetchCompleted()`. If completion jobs are not used, they will be archived according to the retention policy. If the queue in question has a very high volume, this can be set to `false` to bypass creating the completion job. This can also be set in the constructor as a default for all calls to `send()`. +* **deadLetter**, string +When a job fails after all retries, if a `deadLetter` property exists, the job's payload will be copied into that queue, copying the same retention and retry configuration as the original job. ```js @@ -643,37 +618,28 @@ const jobId = await boss.send({ console.log(`job ${jobId} submitted`) ``` -### `sendAfter(name, data, options, seconds | ISO date string | Date)` +### `sendAfter(name, data, options, value)` Send a job that should start after a number of seconds from now, or after a specific date time. This is a convenience version of `send()` with the `startAfter` option assigned. -### `sendOnce(name, data, options, key)` +`value`: int: seconds | string: ISO date string | Date -Send a job with a unique key to only allow 1 job to be in created, retry, or active state at a time. -This is a convenience version of `send()` with the `singletonKey` option assigned. - -### `sendSingleton(name, data, options)` - -Send a job but only allow 1 job to be in created or retry state at at time. - -This is a convenience version of `send()` with the `singletonKey` option assigned. - -### `sendThrottled(name, data, options, seconds [, key])` +### `sendThrottled(name, data, options, seconds, key)` Only allows one job to be sent to the same queue within a number of seconds. In this case, the first job within the interval is allowed, and all other jobs within the same interval are rejected. This is a convenience version of `send()` with the `singletonSeconds` and `singletonKey` option assigned. The `key` argument is optional. -### `sendDebounced(name, data, options, seconds [, key])` +### `sendDebounced(name, data, options, seconds, key)` Like, `sendThrottled()`, but instead of rejecting if a job is already sent in the current interval, it will try to add the job to the next interval if one hasn't already been sent. This is a convenience version of `send()` with the `singletonSeconds`, `singletonKey` and `singletonNextSlot` option assigned. The `key` argument is optional. -## `insert([jobs])` +## `insert(Job[])` Create multiple jobs in one request with an array of objects. @@ -694,166 +660,115 @@ interface JobInsert { singletonKey?: string; expireInSeconds?: number; keepUntil?: Date | string; - onComplete?: boolean + deadLetter?: string; } ``` +## `fetch(name, options)` -## `fetch()` - -Typically one would use `work()` for automated polling for new jobs based upon a reasonable interval to finish the most jobs with the lowest latency. While `work()` is a yet another free service we offer and it can be awfully convenient, sometimes you may have a special use case around when a job can be retrieved. Or, perhaps like me, you need to provide jobs via other entry points such as a web API. - -`fetch()` allows you to skip all that polling nonsense that `work()` does and puts you back in control of database traffic. Once you have your shiny job, you'll use either `complete()` or `fail()` to mark it as finished. - -### `fetch(name)` +Returns an array of jobs from a queue **Arguments** -- `name`: string, queue name or pattern +- `name`: string +- `options`: object -**Resolves** -- `job`: job object, `null` if none found + * `batchSize`, int, *default: 1* -### `fetch(name, batchSize, [, options])` + Number of jobs to return -**Arguments** -- `name`: string, queue name or pattern -- `batchSize`: number, # of jobs to fetch -- `options`: object + * `priority`, bool, *default: true* - * `includeMetadata`, bool - - If `true`, all job metadata will be returned on the job object. The following table shows each property and its type, which is basically all columns from the job table. - - | Prop | Type | | - | - | - | -| - | id | string, uuid | - | name| string | - | data | object | - | priority | number | - | state | string | - | retrylimit | number | - | retrycount | number | - | retrydelay | number | - | retrybackoff | bool | - | startafter | string, timestamp | - | startedon | string, timestamp | - | singletonkey | string | - | singletonon | string, timestamp | - | expirein | object, pg interval | - | createdon | string, timestamp | - | completedon | string, timestamp | - | keepuntil | string, timestamp | - | oncomplete | bool | - | output | object | - - * `enforceSingletonQueueActiveLimit`, bool - - If `true`, modifies the behavior of the `useSingletonQueue` flag to allow a max of 1 job to be queued plus a max of 1 job to be active. - >Note that use of this option can impact performance on instances with large numbers of jobs. - - -**Resolves** -- `[job]`: array of job objects, `null` if none found + If true, allow jobs with a higher priority to be fetched before jobs with lower or no priority -**Notes** + * `includeMetadata`, bool, *default: false* -If you pass a batchSize, `fetch()` will always resolve an array response, even if only 1 job is returned. This seemed like a great idea at the time. + If `true`, all job metadata will be returned on the job object. -The following code shows how to utilize batching via `fetch()` to get and complete 20 jobs at once on-demand. + ```js + interface JobWithMetadata { + id: string; + name: string; + data: T; + priority: number; + state: 'created' | 'retry' | 'active' | 'completed' | 'cancelled' | 'failed'; + retryLimit: number; + retryCount: number; + retryDelay: number; + retryBackoff: boolean; + startAfter: Date; + startedOn: Date; + singletonKey: string | null; + singletonOn: Date | null; + expireIn: PostgresInterval; + createdOn: Date; + completedOn: Date | null; + keepUntil: Date; + deadLetter: string, + policy: string, + output: object + } + ``` -```js -const queue = 'email-daily-digest' -const batchSize = 20 +**Notes** -const jobs = await boss.fetch(queue, batchSize) +The following example shows how to fetch and delete up to 20 jobs. -if(!jobs) { - return -} +```js +const QUEUE = 'email-daily-digest' +const emailer = require('./emailer.js') -for (let i = 0; i < jobs.length; i++) { - const job = jobs[i] +const jobs = await boss.fetch(QUEUE, { batchSize: 20 }) - try { - await emailer.send(job.data) - await boss.complete(job.id) - } catch(err) { - await boss.fail(job.id, err) - } -} +await Promise.allSettled(jobs.map(async job => { + try { + await emailer.send(job.data) + await boss.deleteJob(QUEUE, job.id) + } catch(err) { + await boss.fail(QUEUE, job.id, err) + } +})) ``` -### `fetchCompleted(name [, batchSize] [, options])` - -Same as `fetch()`, but retrieves any completed jobs. See [`onComplete()`](#oncompletename--options-handler) for more information. ## `work()` -Adds a new polling worker for a queue and executes the provided callback function when jobs are found. Multiple workers can be added if needed. - -Workers can be stopped via `offWork()` all at once by queue name or individually by using the unique id resolved by `work()`. Workers may be monitored by listening to the `wip` event. +Adds a new polling worker for a queue and executes the provided callback function when jobs are found. Each call to work() will add a new worker and resolve a unqiue worker id. -Queue patterns use the `*` character to match 0 or more characters. For example, a job from queue `status-report-12345` would be fetched with pattern `status-report-*` or even `stat*5`. +Workers can be stopped via `offWork()` all at once by queue name or individually by using the worker id. Worker activity may be monitored by listening to the `wip` event. -The default concurrency for `work()` is 1 job every 2 seconds. Both the interval and the number of jobs per interval can be changed globally or per-queue with configuration options. +The default options for `work()` is 1 job every 2 seconds. -### `work(name [, options], handler)` +### `work(name, options, handler)` **Arguments** - `name`: string, *required* - `options`: object -- `handler`: function(job), *required* +- `handler`: function(jobs), *required* **Options** -* **teamSize**, int - - Default: 1. How many jobs can be fetched per polling interval. Callback will be executed once per job. - -* **teamConcurrency**, int - - Default: 1. How many callbacks will be called concurrently if promises are used for polling backpressure. Intended to be used along with `teamSize`. - -* **teamRefill**, bool - - Default: false. If true, worker will refill the queue based on the number of completed jobs from the last batch (if `teamSize` > 1) in order to keep the active job count as close to `teamSize` as possible. This could be helpful if one of the fetched jobs is taking longer than expected. +* **batchSize**, int, *(default=1)* -* **batchSize**, int + Same as in [`fetch()`](#fetch) - How many jobs can be fetched per polling interval. Callback will be executed once per batch. +* **includeMetadata**, bool, *(default=true)* -* **includeMetadata**, bool + Same as in [`fetch()`](#fetch) - Same as in [`fetch()`](#fetch) +* **priority**, bool, *(default=true)* -* **enforceSingletonQueueActiveLimit**, bool + Same as in [`fetch()`](#fetch) - Same as in [`fetch()`](#fetch) +* **pollingIntervalSeconds**, int, *(default=2)* -**Polling options** - -How often workers will poll the queue table for jobs. Available in the constructor as a default or per worker in `work()` and `onComplete()`. - -* **newJobCheckInterval**, int - - Interval to check for new jobs in milliseconds, must be >=100 - -* **newJobCheckIntervalSeconds**, int - - Interval to check for new jobs in seconds, must be >=1 - -* Default: 2 seconds - - > When a higher unit is is specified, lower unit configuration settings are ignored. + Interval to check for new jobs in seconds, must be >=0.5 (500ms) **Handler function** -`handler` should either be an `async` function or return a promise. If an error occurs in the handler, it will be caught and stored into an output storage column in addition to marking the job as failed. +`handler` should return a promise (Usually this is an `async` function). If an unhandled error occurs in a handler, `fail()` will automatically be called for the jobs, storing the error in the `output` property, making the job or jobs available for retry. -Enforcing promise-returning handlers that are awaited in the workers defers polling for new jobs until the existing jobs are completed, providing backpressure. - -The job object has the following properties. +The jobs argument is an array of jobs with the following properties. | Prop | Type | | | - | - | -| @@ -861,73 +776,42 @@ The job object has the following properties. |`name`| string | |`data`| object | -> If the job is not completed, it will expire after the configured expiration period. -Following is an example of a worker that returns a promise (`sendWelcomeEmail()`) for completion with the teamSize option set for increased job concurrency between polling intervals. +An example of a worker that checks for a job every 10 seconds. ```js -const options = { teamSize: 5, teamConcurrency: 5 } -await boss.work('email-welcome', options, job => myEmailService.sendWelcomeEmail(job.data)) +await boss.work('email-welcome', { pollingIntervalSeconds: 10 }, ([ job ]) => myEmailService.sendWelcomeEmail(job.data)) ``` -Similar to the first example, but with a batch of jobs at once. +An example of a worker that returns a maximum of 5 jobs in a batch. ```js -await boss.work('email-welcome', { batchSize: 5 }, - jobs => myEmailService.sendWelcomeEmails(jobs.map(job => job.data)) -) +await boss.work('email-welcome', { batchSize: 5 }, (jobs) => myEmailService.sendWelcomeEmails(jobs.map(job => job.data))) ``` -### `onComplete(name [, options], handler)` +### `work(name, handler)` -Sometimes when a job completes, expires or fails, it's important enough to trigger other things that should react to it. `onComplete` works identically to `work()` and was created to facilitate the creation of orchestrations or sagas between jobs that may or may not know about each other. This common messaging pattern allows you to keep multi-job flow logic out of the individual job handlers so you can manage things in a more centralized fashion while not losing your mind. As you most likely already know, asynchronous jobs are complicated enough already. Internally, these jobs have a special prefix of `__state__completed__`. +Simplified work() without an options argument -The callback for `onComplete()` returns a job containing the original job and completion details. `request` will be the original job as submitted with `id`, `name` and `data`. `response` may or may not have a value based on arguments in [complete()](#completeid--data) or [fail()](#failid--data). +```js +await boss.work('email-welcome', ([ job ]) => emailer.sendWelcomeEmail(job.data)) +``` -Here's an example from the test suite showing this in action. +work() with active job deletion ```js -const jobName = 'onCompleteFtw' -const requestPayload = { token:'trivial' } -const responsePayload = { message: 'so verbose', code: '1234' } - -boss.onComplete(jobName, job => { - assert.strictEqual(jobId, job.data.request.id) - assert.strictEqual(job.data.request.data.token, requestPayload.token) - assert.strictEqual(job.data.response.message, responsePayload.message) - assert.strictEqual(job.data.response.code, responsePayload.code) +const queue = 'email-welcome' - finished() // test suite completion callback +await boss.work(queue, async ([ job ]) => { + await emailer.sendWelcomeEmail(job.data) + await boss.deleteJob(queue, job.id) }) - -const jobId = await boss.send(jobName, requestPayload) -const job = await boss.fetch(jobName) -await boss.complete(job.id, responsePayload) ``` -The following is an example data object from the job retrieved in onComplete() above. +## `notifyWorker(id)` + +Notifies a worker by id to bypass the job polling interval (see `pollingIntervalSeconds`) for this iteration in the loop. -```js -{ - "request": { - "id": "26a608d0-79bf-11e8-8391-653981c16efd", - "name": "onCompleteFtw", - "data": { - "token": "trivial" - } - }, - "response": { - "message": "so verbose", - "code": "1234" - }, - "failed": false, - "state": "completed", - "createdOn": "2018-06-26T23:04:12.9392-05:00", - "startedOn": "2018-06-26T23:04:12.945533-05:00", - "completedOn": "2018-06-26T23:04:12.949092-05:00", - "retryCount": 0 -} -``` ## `offWork(value)` @@ -938,13 +822,9 @@ Removes a worker by name or id and stops polling. If a string, removes all workers found matching the name. If an object, only the worker with a matching `id` will be removed. -### `offComplete(value)` - -Similar to `offWork()`, but removes an `onComplete()` worker. - ## `publish(event, data, options)` -Publish an event with optional data and options (Same as `send()` args). Looks up all subscriptions for the event and sends jobs to all those queues. Returns an array of job ids. +Publish an event with optional data and options (Same as `send()` args). Looks up all subscriptions for the event and sends to each queue. ## `subscribe(event, name)` @@ -956,7 +836,7 @@ Remove the subscription of queue `name` to `event`. ## Scheduling -Jobs may be sent automatically based on a cron expression. As with other cron-based systems, at least one instance needs to be running for scheduling to work. In order to reduce the amount of evaluations, schedules are checked every 30 seconds, which means the 6-placeholder format should be discouraged in favor of the minute-level precision 5-placeholder format. +Jobs may be created automatically based on a cron expression. As with other cron-based systems, at least one instance needs to be running for scheduling to work. In order to reduce the amount of evaluations, schedules are checked every 30 seconds, which means the 6-placeholder format should be discouraged in favor of the minute-level precision 5-placeholder format. For example, use this format, which implies "any second during 3:30 am every day" @@ -976,7 +856,7 @@ If needed, the default clock monitoring interval can be adjusted using `clockMon ```js { - noScheduling: true + schedule: false } ``` @@ -1009,35 +889,41 @@ Removes a schedule by queue name. Retrieves an array of all scheduled jobs currently being monitored. -## `cancel(id, options)` +## `deleteJob(name, id, options)` -Cancels a pending or active job. +Deletes a job by id. -The promise will resolve on a successful cancel, or reject if the job could not be cancelled. +> Job deletion is offered if desired for a "fetch then delete" workflow similar to SQS. This is not the default behavior for workers so "everything just works" by default, including job throttling and debouncing, which requires jobs to exist to enforce a unique constraint. For example, if you are debouncing a queue to "only allow 1 job per hour", deleting jobs after processing would re-open that time slot, breaking your throttling policy. -## `cancel([ids], options)` +## `deleteJob(name, [ids], options)` -Cancels a set of pending or active jobs. +Deletes a set of jobs by id. + +## `cancel(name, id, options)` -The promise will resolve on a successful cancel, or reject if not all of the requested jobs could not be cancelled. +Cancels a pending or active job. -> Due to the nature of the use case of attempting a batch job cancellation, it may be likely that some jobs were in flight and even completed during the cancellation request. Because of this, cancellation will cancel as many as possible and reject with a message showing the number of jobs that could not be cancelled because they were no longer active. +## `cancel(name, [ids], options)` -## `resume(id, options)` +Cancels a set of pending or active jobs. + +When passing an array of ids, it's possible that the operation may partially succeed based on the state of individual jobs requested. Consider this a best-effort attempt. + +## `resume(name, id, options)` Resumes a cancelled job. -## `resume([ids], options)` +## `resume(name, [ids], options)` Resumes a set of cancelled jobs. -## `complete(id [, data, options])` +## `complete(name, id, data, options)` -Completes an active job. This would likely only be used with `fetch()`. Accepts an optional `data` argument for usage with [`onComplete()`](#oncompletename--options-handler) state-based workers or `fetchCompleted()`. +Completes an active job. This would likely only be used with `fetch()`. Accepts an optional `data` argument. The promise will resolve on a successful completion, or reject if the job could not be completed. -## `complete([ids], options)` +## `complete(name, [ids], options)` Completes a set of active jobs. @@ -1045,13 +931,13 @@ The promise will resolve on a successful completion, or reject if not all of the > See comments above on `cancel([ids])` regarding when the promise will resolve or reject because of a batch operation. -## `fail(id [, data, options])` +## `fail(name, id, data, options)` -Marks an active job as failed. This would likely only be used with `fetch()`. Accepts an optional `data` argument for usage with [`onFail()`](#onfailname--options-handler) state-based workers or `fetchFailed()`. +Marks an active job as failed. The promise will resolve on a successful assignment of failure, or reject if the job could not be marked as failed. -## `fail([ids], options)` +## `fail(name, [ids], options)` Fails a set of active jobs. @@ -1059,11 +945,64 @@ The promise will resolve on a successful failure state assignment, or reject if > See comments above on `cancel([ids])` regarding when the promise will resolve or reject because of a batch operation. -## `notifyWorker(id)` -Notifies a worker by id to bypass the job polling interval (see `newJobCheckInterval`) for this iteration in the loop. +## `getJobById(name, id, options)` -## `getQueueSize(name [, options])` +Retrieves a job with all metadata by name and id + +**options** + +* `includeArchive`: bool, default: false + + If `true`, it will search for the job in the archive if not found in the primary job storage. + +## `createQueue(name, Queue)` + +Creates a queue. + +Options: Same retry, expiration and retention as documented above. + +```ts +type Queue = RetryOptions & + ExpirationOptions & + RetentionOptions & + { + name: string, + policy: QueuePolicy, + deadLetter?: string + } +``` + +Allowed policy values: + +| Policy | Description | +| - | - | +| standard | (Default) Supports all standard features such as deferral, priority, and throttling | +| short | All standard features, but only allows 1 job to be queued, unlimited active. Can be extended with `singletonKey` | +| singleton | All standard features, but only allows 1 job to be active, unlimited queued. Can be extended with `singletonKey` | +| stately | Combination of short and singleton: Only allows 1 job per state, queued and/or active. Can be extended with `singletonKey` | + +## `updateQueue(name, options)` + +Updates options on an existing queue. The policy can be changed, but understand this won't impact existing jobs in flight and will only apply the new policy on new incoming jobs. + +## `purgeQueue(name)` + +Deletes all queued jobs in a queue. + +## `deleteQueue(name)` + +Deletes a queue and all jobs from the active job table. Any jobs in the archive table are retained. + +## `getQueues()` + +Returns all queues + +## `getQueue(name)` + +Returns a queue by name + +## `getQueueSize(name, options)` Returns the number of pending jobs in a queue by name. @@ -1081,18 +1020,14 @@ As an example, the following options object include active jobs along with creat } ``` -## `getJobById(id, options)` - -Retrieves a job with all metadata by id in either the primary or archive storage. - -## `deleteQueue(name)` +## `clearStorage()` -Deletes all pending jobs in the specified queue from the active job table. All jobs in the archive table are retained. +Utility function if and when needed to clear all job and archive storage tables. Internally, this issues a `TRUNCATE` command. -## `deleteAllQueues()` +## `isInstalled()` -Deletes all pending jobs from all queues in the active job table. All jobs in the archive table are retained. +Utility function to see if pg-boss is installed in the configured database. -## `clearStorage()` +## `schemaVersion()` -Utility function if and when needed to empty all job storage. Internally, this issues a `TRUNCATE` command against all jobs tables, archive included. +Utility function to get the database schema version. \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 878a73fd..6077827a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,97 +1,85 @@ { "name": "pg-boss", - "version": "9.0.3", + "version": "10.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "pg-boss", - "version": "9.0.3", + "version": "10.0.0", "license": "MIT", "dependencies": { "cron-parser": "^4.0.0", - "delay": "^5.0.0", - "lodash.debounce": "^4.0.8", - "p-map": "^4.0.0", "pg": "^8.5.1", - "serialize-error": "^8.1.0", - "uuid": "^9.0.0" + "serialize-error": "^8.1.0" }, "devDependencies": { "@types/node": "^20.3.3", "luxon": "^3.0.1", "mocha": "^10.0.0", - "nyc": "^15.1.0", + "nyc": "^17.0.0", "standard": "^17.0.0" }, "engines": { - "node": ">=16" - } - }, - "node_modules/@aashutoshrathi/word-wrap": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", - "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", - "dev": true, - "engines": { - "node": ">=0.10.0" + "node": ">=20" } }, "node_modules/@ampproject/remapping": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.1.tgz", - "integrity": "sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", "dev": true, "dependencies": { - "@jridgewell/gen-mapping": "^0.3.0", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@babel/code-frame": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.5.tgz", - "integrity": "sha512-Xmwn266vad+6DAqEB2A6V/CcZVp62BbwVmcOJc2RPuwih1kw02TjQvWVWlcKGbBPd+8/0V5DEkOcizRGYsspYQ==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", "dev": true, "dependencies": { - "@babel/highlight": "^7.22.5" + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/compat-data": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.5.tgz", - "integrity": "sha512-4Jc/YuIaYqKnDDz892kPIledykKg12Aw1PYX5i/TY28anJtacvM1Rrr8wbieB9GfEJwlzqT0hUEao0CxEebiDA==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.2.tgz", + "integrity": "sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.22.5.tgz", - "integrity": "sha512-SBuTAjg91A3eKOvD+bPEz3LlhHZRNu1nFOVts9lzDJTXshHTjII0BAtDS3Y2DAkdZdDKWVZGVwkDfc4Clxn1dg==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.5", - "@babel/helper-compilation-targets": "^7.22.5", - "@babel/helper-module-transforms": "^7.22.5", - "@babel/helpers": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.5", - "@babel/types": "^7.22.5", - "convert-source-map": "^1.7.0", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", + "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", - "json5": "^2.2.2", - "semver": "^6.3.0" + "json5": "^2.2.3", + "semver": "^6.3.1" }, "engines": { "node": ">=6.9.0" @@ -101,15 +89,30 @@ "url": "https://opencollective.com/babel" } }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/@babel/generator": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.22.5.tgz", - "integrity": "sha512-+lcUbnTRhd0jOewtFSedLyiPsD5tswKkbgcezOqqWFUVNEwoUTlpPOBmvhG7OXWLR4jMdv0czPGH5XbflnD1EA==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.0.tgz", + "integrity": "sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5", - "@jridgewell/gen-mapping": "^0.3.2", - "@jridgewell/trace-mapping": "^0.3.17", + "@babel/types": "^7.25.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" }, "engines": { @@ -117,163 +120,124 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.5.tgz", - "integrity": "sha512-Ji+ywpHeuqxB8WDxraCiqR0xfhYjiDE/e6k7FuIaANnoOFxAHskHChz4vA1mJC9Lbm01s1PVAGhQY4FUKSkGZw==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", "dev": true, "dependencies": { - "@babel/compat-data": "^7.22.5", - "@babel/helper-validator-option": "^7.22.5", - "browserslist": "^4.21.3", + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", "lru-cache": "^5.1.1", - "semver": "^6.3.0" + "semver": "^6.3.1" }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz", - "integrity": "sha512-XGmhECfVA/5sAt+H+xpSg0mfrHq6FzNr9Oxh7PSEBBRUb/mL7Kz3NICXb194rCqAEdxkhPT1a88teizAFyvk8Q==", - "dev": true, "engines": { "node": ">=6.9.0" } }, - "node_modules/@babel/helper-function-name": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.22.5.tgz", - "integrity": "sha512-wtHSq6jMRE3uF2otvfuD3DIvVhOsSNshQl0Qrd7qC9oQJzHvOL4qQXlQn2916+CXGywIjpGuIkoyZRRxHPiNQQ==", + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, - "dependencies": { - "@babel/template": "^7.22.5", - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz", - "integrity": "sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw==", - "dev": true, - "dependencies": { - "@babel/types": "^7.22.5" - }, - "engines": { - "node": ">=6.9.0" + "bin": { + "semver": "bin/semver.js" } }, "node_modules/@babel/helper-module-imports": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.22.5.tgz", - "integrity": "sha512-8Dl6+HD/cKifutF5qGd/8ZJi84QeAKh+CEe1sBzz8UayBBGg1dAIJrdHOcOM5b2MpzWL2yuotJTtGjETq0qjXg==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.22.5.tgz", - "integrity": "sha512-+hGKDt/Ze8GFExiVHno/2dvG5IdstpzCq0y4Qc9OJ25D4q3pKfiIP/4Vp3/JvhDkLKsDK2api3q3fpIgiIF5bw==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-module-imports": "^7.22.5", - "@babel/helper-simple-access": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" }, "engines": { "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-simple-access": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz", - "integrity": "sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w==", - "dev": true, - "dependencies": { - "@babel/types": "^7.22.5" }, - "engines": { - "node": ">=6.9.0" + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.5.tgz", - "integrity": "sha512-thqK5QFghPKWLhAV321lxF95yCg2K3Ob5yw+M3VHWfdia0IkPXUtoLH8x/6Fh486QUvzhb8YOWHChTVen2/PoQ==", + "node_modules/@babel/helper-simple-access": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", "dev": true, "dependencies": { - "@babel/types": "^7.22.5" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz", - "integrity": "sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz", - "integrity": "sha512-aJXu+6lErq8ltp+JhkJUfk1MTGyuA4v7f3pA+BJ5HLfNC6nAQ0Cpi9uOquUj8Hehg0aUiHzWQbOVJGao6ztBAQ==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-option": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.22.5.tgz", - "integrity": "sha512-R3oB6xlIVKUnxNUxbmgq7pKjxpru24zlimpE8WK47fACIlM0II/Hm1RS8IaOI7NgCr6LNS+jl5l75m20npAziw==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.22.5.tgz", - "integrity": "sha512-pSXRmfE1vzcUIDFQcSGA5Mr+GxBV9oiRKDuDxXvWQQBCh8HoIjs/2DlDB7H8smac1IVrB9/xdXj2N3Wol9Cr+Q==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.0.tgz", + "integrity": "sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==", "dev": true, "dependencies": { - "@babel/template": "^7.22.5", - "@babel/traverse": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.22.5.tgz", - "integrity": "sha512-BSKlD1hgnedS5XRnGOljZawtag7H1yPfQp0tdNJCHoH6AZ+Pcm9VvkrK59/Yy593Ypg0zMxH2BxD1VPYUQ7UIw==", + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", "dev": true, "dependencies": { - "@babel/helper-validator-identifier": "^7.22.5", - "chalk": "^2.0.0", - "js-tokens": "^4.0.0" + "@babel/helper-validator-identifier": "^7.24.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" }, "engines": { "node": ">=6.9.0" @@ -351,10 +315,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.22.5.tgz", - "integrity": "sha512-DFZMC9LJUG9PLOclRC32G63UXwzqS2koQC8dkx+PLdmt1xSePYpbT/NbsrJy8Q/muXz7o/h/d4A7Fuyixm559Q==", + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.3.tgz", + "integrity": "sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==", "dev": true, + "dependencies": { + "@babel/types": "^7.25.2" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -363,34 +330,31 @@ } }, "node_modules/@babel/template": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.22.5.tgz", - "integrity": "sha512-X7yV7eiwAxdj9k94NEylvbVHLiVG1nvzCV2EAowhxLTwODV1jl9UzZ48leOC0sH7OnuHrIkllaBgneUykIcZaw==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", "dev": true, "dependencies": { - "@babel/code-frame": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5" + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.22.5.tgz", - "integrity": "sha512-7DuIjPgERaNo6r+PZwItpjCZEa5vyw4eJGufeLxrPdBXBoLcCJCIasvK6pK/9DVNrLZTLFhUGqaC6X/PA007TQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.22.5", - "@babel/generator": "^7.22.5", - "@babel/helper-environment-visitor": "^7.22.5", - "@babel/helper-function-name": "^7.22.5", - "@babel/helper-hoist-variables": "^7.22.5", - "@babel/helper-split-export-declaration": "^7.22.5", - "@babel/parser": "^7.22.5", - "@babel/types": "^7.22.5", - "debug": "^4.1.0", + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.3.tgz", + "integrity": "sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/parser": "^7.25.3", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.2", + "debug": "^4.3.1", "globals": "^11.1.0" }, "engines": { @@ -398,13 +362,13 @@ } }, "node_modules/@babel/types": { - "version": "7.22.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.22.5.tgz", - "integrity": "sha512-zo3MIHGOkPOfoRXitsgHLjEXmlDaD/5KU1Uzuc9GNiZPhSqVxVRtxuPaSBZDsYZ9qV88AjtMtWW7ww98loJ9KA==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.2.tgz", + "integrity": "sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.22.5", - "@babel/helper-validator-identifier": "^7.22.5", + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", "to-fast-properties": "^2.0.0" }, "engines": { @@ -427,18 +391,18 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.5.1.tgz", - "integrity": "sha512-Z5ba73P98O1KUYCCJTUeVpja9RcGoMdncZ6T49FCUl2lN38JtCJ+3WgIDBv0AuY4WChU5PmtJmOCTlN6FZTFKQ==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz", + "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==", "dev": true, "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", - "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -469,9 +433,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -508,22 +472,23 @@ } }, "node_modules/@eslint/js": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", - "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", + "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", - "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", + "version": "0.11.14", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", + "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "deprecated": "Use @eslint/config-array instead", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^1.2.1", - "debug": "^4.1.1", + "@humanwhocodes/object-schema": "^2.0.2", + "debug": "^4.3.1", "minimatch": "^3.0.5" }, "engines": { @@ -566,9 +531,10 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", - "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", "dev": true }, "node_modules/@istanbuljs/load-nyc-config": { @@ -671,59 +637,53 @@ } }, "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz", - "integrity": "sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", "dev": true, "dependencies": { - "@jridgewell/set-array": "^1.0.1", + "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", - "@jridgewell/trace-mapping": "^0.3.9" + "@jridgewell/trace-mapping": "^0.3.24" }, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", - "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", "dev": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/set-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", - "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", "dev": true, "engines": { "node": ">=6.0.0" } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", "dev": true }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.18", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz", - "integrity": "sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA==", + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", "dev": true, "dependencies": { - "@jridgewell/resolve-uri": "3.1.0", - "@jridgewell/sourcemap-codec": "1.4.14" + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@jridgewell/trace-mapping/node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.14", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", - "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", - "dev": true - }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -766,15 +726,24 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.3.3", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.3.tgz", - "integrity": "sha512-wheIYdr4NYML61AjC8MKj/2jrR/kDQri/CIpVoZwldwhnIrD/j9jIU5bJ8yBKuB2VhpFV7Ab6G2XkBjv9r9Zzw==", + "version": "20.14.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.14.tgz", + "integrity": "sha512-d64f00982fS9YoOgJkAMolK7MN8Iq3TDdVjchbYHdEmjth/DHowx82GnoA+tVUAN+7vxfYUgAzi+JXbKNd2SDQ==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", + "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", "dev": true }, "node_modules/acorn": { - "version": "8.9.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.9.0.tgz", - "integrity": "sha512-jaVNAFBHNLXspO543WnNNPZFRtavh3skAkITqD0/2aeMkKZTN+254PyhwxFYrk3vQ1xfY+2wbesJMs/JC8/PwQ==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -796,6 +765,7 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" @@ -821,9 +791,9 @@ } }, "node_modules/ansi-colors": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", - "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.3.tgz", + "integrity": "sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==", "dev": true, "engines": { "node": ">=6" @@ -891,28 +861,32 @@ "dev": true }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array-includes": { - "version": "3.1.6", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.6.tgz", - "integrity": "sha512-sgTbLvL6cNnw24FnbaDyjmvddQ2ML8arZsgaJhoABMoplz/4QRhtrYS+alr1BUM1Bwp6dhx8vVCBSLG+StwOFw==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", "is-string": "^1.0.7" }, "engines": { @@ -922,15 +896,55 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/array.prototype.findlast": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlast/-/array.prototype.findlast-1.2.5.tgz", + "integrity": "sha512-CVvd6FHg1Z3POpBLxO6E6zr+rSKEQ9L6rZHAaY7lLfhKsWYUBBOuMs0e9o24oopj6H+geRCX0YJ+TJLBK2eHyQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/array.prototype.findlastindex": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/array.prototype.flat": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.1.tgz", - "integrity": "sha512-roTU0KWIOmJ4DRLmwKd19Otg0/mT3qPNt0Qb3GWW8iObuZXxrjB/pzn0R3hqpRSWg4HCwqx+0vwOnWnvlOyeIA==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", + "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" }, "engines": { @@ -941,14 +955,14 @@ } }, "node_modules/array.prototype.flatmap": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.1.tgz", - "integrity": "sha512-8UGn9O1FDVvMNB0UlLv4voxRMze7+FpHyF5mSMRjWHUMlpoDViniy05870VlxhfgTnLbpuwTzvD76MTtWxB/mQ==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", + "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", "es-shim-unscopables": "^1.0.0" }, "engines": { @@ -959,23 +973,51 @@ } }, "node_modules/array.prototype.tosorted": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.1.tgz", - "integrity": "sha512-pZYPXPRl2PqWcsUs6LOMn+1f1532nEoPTYowBtqLwAW+W8vSVhkIGnmOX1t/UQjD6YGI0vcD2B1U7ZFGQH9jnQ==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", + "integrity": "sha512-p6Fx8B7b7ZhL/gmUsAy0D15WhvDccw3mnGNbZpi3pmeJdxtWsj2jEaI4Y6oo3XiHfzuSgPwKc04MYt6KgvC/wA==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.1.3" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-shim-unscopables": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/arraybuffer.prototype.slice": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", + "is-shared-array-buffer": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -990,12 +1032,15 @@ "dev": true }, "node_modules/binary-extensions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", - "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", "dev": true, "engines": { "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/brace-expansion": { @@ -1008,12 +1053,12 @@ } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", "dev": true, "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" @@ -1026,9 +1071,9 @@ "dev": true }, "node_modules/browserslist": { - "version": "4.21.9", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.9.tgz", - "integrity": "sha512-M0MFoZzbUrRU4KNfCrDLnvyE7gub+peetoTid3TBIqtunaDJyXlwhakT+/VkvSXcfIzFfK/nkCs4nmyTmxdNSg==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", + "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", "dev": true, "funding": [ { @@ -1045,10 +1090,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001503", - "electron-to-chromium": "^1.4.431", - "node-releases": "^2.0.12", - "update-browserslist-db": "^1.0.11" + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -1057,56 +1102,15 @@ "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" } }, - "node_modules/buffer-writer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", - "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", - "engines": { - "node": ">=4" - } - }, "node_modules/builtins": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", - "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.1.0.tgz", + "integrity": "sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==", "dev": true, "dependencies": { "semver": "^7.0.0" } }, - "node_modules/builtins/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/builtins/node_modules/semver": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", - "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/builtins/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/caching-transform": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", @@ -1123,13 +1127,19 @@ } }, "node_modules/call-bind": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", - "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", + "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "get-intrinsic": "^1.0.2" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -1154,9 +1164,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001510", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001510.tgz", - "integrity": "sha512-z35lD6xjHklPNgjW4P68R30Er5OCIZE0C1bUf8IMXSh34WJYXfIy+GxIEraQXYJ2dvTU8TumjYAeLrPhpMlsuw==", + "version": "1.0.30001649", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001649.tgz", + "integrity": "sha512-fJegqZZ0ZX8HOWr6rcafGr72+xcgJKI9oWfDW5DrD7ExUtgZC7a7R7ZYmZqplh7XDocFdGeIFn7roAxhOeYrPQ==", "dev": true, "funding": [ { @@ -1202,16 +1212,10 @@ } }, "node_modules/chokidar": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", - "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://paulmillr.com/funding/" - } - ], "dependencies": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -1224,6 +1228,9 @@ "engines": { "node": ">= 8.10.0" }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, "optionalDependencies": { "fsevents": "~2.3.2" } @@ -1232,6 +1239,7 @@ "version": "2.2.0", "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, "engines": { "node": ">=6" } @@ -1284,9 +1292,9 @@ "dev": true }, "node_modules/cron-parser": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.8.1.tgz", - "integrity": "sha512-jbokKWGcyU4gl6jAfX97E1gDpY12DJ1cLJZmoDzaAln/shZ+S3KBFBuA2Q6WeUN4gJf/8klnV1EfvhA2lK5IRQ==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/cron-parser/-/cron-parser-4.9.0.tgz", + "integrity": "sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==", "dependencies": { "luxon": "^3.2.1" }, @@ -1308,10 +1316,61 @@ "node": ">= 8" } }, + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -1361,14 +1420,15 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/define-properties": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz", - "integrity": "sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==", + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dev": true, "dependencies": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -1377,21 +1437,27 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delay": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz", - "integrity": "sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==", + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, "engines": { - "node": ">=10" + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sponsors/ljharb" } }, "node_modules/diff": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", - "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", "dev": true, "engines": { "node": ">=0.3.1" @@ -1410,9 +1476,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.447", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.447.tgz", - "integrity": "sha512-sxX0LXh+uL41hSJsujAN86PjhrV/6c79XmpY0TvjZStV6VxIgarf8SRkUoUTuYmFcZQTemsoqo8qXOGw5npWfw==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.4.tgz", + "integrity": "sha512-orzA81VqLyIGUEA77YkVA1D+N+nNfl2isJVjjmOyrlxuooZ19ynb+dOlaDTqd/idKRS9lDCSBmtzM+kyCsMnkA==", "dev": true }, "node_modules/emoji-regex": { @@ -1431,45 +1497,57 @@ } }, "node_modules/es-abstract": { - "version": "1.21.2", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.21.2.tgz", - "integrity": "sha512-y/B5POM2iBnIxCiernH1G7rC9qQoM77lLIMQLuob0zhp8C56Po81+2Nj0WFKnd0pNReDTnkYryc+zhOzpEIROg==", - "dev": true, - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.2.0", - "get-symbol-description": "^1.0.0", + "function.prototype.name": "^1.1.6", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.10", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", - "object-inspect": "^1.12.3", + "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.7", - "string.prototype.trimend": "^1.0.6", - "string.prototype.trimstart": "^1.0.6", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.9" + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -1478,56 +1556,114 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es-set-tostringtag": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz", - "integrity": "sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==", + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.3", - "has": "^1.0.3", - "has-tostringtag": "^1.0.0" + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" } }, - "node_modules/es-shim-unscopables": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz", - "integrity": "sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==", + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", "dev": true, - "dependencies": { - "has": "^1.0.3" + "engines": { + "node": ">= 0.4" } }, - "node_modules/es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "node_modules/es-iterator-helpers": { + "version": "1.0.19", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", + "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", "dev": true, "dependencies": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.3", + "es-errors": "^1.3.0", + "es-set-tostringtag": "^2.0.3", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", + "has-symbols": "^1.0.3", + "internal-slot": "^1.0.7", + "iterator.prototype": "^1.1.2", + "safe-array-concat": "^1.1.2" }, "engines": { "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/es6-error": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", + "dev": true, + "dependencies": { + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-shim-unscopables": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", + "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", + "dev": true, + "dependencies": { + "hasown": "^2.0.0" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", "dev": true }, "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", "dev": true, "engines": { "node": ">=6" @@ -1546,27 +1682,28 @@ } }, "node_modules/eslint": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", - "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", + "version": "8.57.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", + "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", - "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.1.0", - "@eslint/js": "8.44.0", - "@humanwhocodes/config-array": "^0.11.10", + "@eslint-community/regexpp": "^4.6.1", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.57.0", + "@humanwhocodes/config-array": "^0.11.14", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", - "ajv": "^6.10.0", + "@ungap/structured-clone": "^1.2.0", + "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^7.2.0", - "eslint-visitor-keys": "^3.4.1", - "espree": "^9.6.0", + "eslint-scope": "^7.2.2", + "eslint-visitor-keys": "^3.4.3", + "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -1576,7 +1713,6 @@ "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", - "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", @@ -1588,7 +1724,6 @@ "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" }, "bin": { @@ -1655,14 +1790,14 @@ } }, "node_modules/eslint-import-resolver-node": { - "version": "0.3.7", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.7.tgz", - "integrity": "sha512-gozW2blMLJCeFpBwugLTGyvVjNoeo1knonXAcatC6bjPBZitotxdWf7Gimr25N4c0AAOo4eOUfaG82IJPDpqCA==", + "version": "0.3.9", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", + "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, "dependencies": { "debug": "^3.2.7", - "is-core-module": "^2.11.0", - "resolve": "^1.22.1" + "is-core-module": "^2.13.0", + "resolve": "^1.22.4" } }, "node_modules/eslint-import-resolver-node/node_modules/debug": { @@ -1675,9 +1810,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", + "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -1744,26 +1879,28 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.27.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.27.5.tgz", - "integrity": "sha512-LmEt3GVofgiGuiE+ORpnvP+kAm3h6MLZJ4Q5HCyHADofsb4VzXFsRiWj3c0OFiV+3DWFh0qg3v9gcPlfc3zRow==", + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", + "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", "dev": true, "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "array.prototype.flatmap": "^1.3.1", + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.7", - "eslint-module-utils": "^2.7.4", - "has": "^1.0.3", - "is-core-module": "^2.11.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.8.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.values": "^1.1.6", - "resolve": "^1.22.1", - "semver": "^6.3.0", - "tsconfig-paths": "^3.14.1" + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", + "semver": "^6.3.1", + "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" @@ -1815,6 +1952,15 @@ "node": "*" } }, + "node_modules/eslint-plugin-import/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/eslint-plugin-n": { "version": "15.7.0", "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz", @@ -1850,18 +1996,6 @@ "concat-map": "0.0.1" } }, - "node_modules/eslint-plugin-n/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/eslint-plugin-n/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -1874,66 +2008,51 @@ "node": "*" } }, - "node_modules/eslint-plugin-n/node_modules/semver": { - "version": "7.5.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", - "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/eslint-plugin-n/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/eslint-plugin-promise": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", - "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" }, + "funding": { + "url": "https://opencollective.com/eslint" + }, "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" + "eslint": "^7.0.0 || ^8.0.0 || ^9.0.0" } }, "node_modules/eslint-plugin-react": { - "version": "7.32.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.32.2.tgz", - "integrity": "sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg==", + "version": "7.35.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.35.0.tgz", + "integrity": "sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==", "dev": true, "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flatmap": "^1.3.1", - "array.prototype.tosorted": "^1.1.1", + "array-includes": "^3.1.8", + "array.prototype.findlast": "^1.2.5", + "array.prototype.flatmap": "^1.3.2", + "array.prototype.tosorted": "^1.1.4", "doctrine": "^2.1.0", + "es-iterator-helpers": "^1.0.19", "estraverse": "^5.3.0", + "hasown": "^2.0.2", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", - "object.entries": "^1.1.6", - "object.fromentries": "^2.0.6", - "object.hasown": "^1.1.2", - "object.values": "^1.1.6", + "object.entries": "^1.1.8", + "object.fromentries": "^2.0.8", + "object.values": "^1.2.0", "prop-types": "^15.8.1", - "resolve": "^2.0.0-next.4", - "semver": "^6.3.0", - "string.prototype.matchall": "^4.0.8" + "resolve": "^2.0.0-next.5", + "semver": "^6.3.1", + "string.prototype.matchall": "^4.0.11", + "string.prototype.repeat": "^1.0.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, "node_modules/eslint-plugin-react/node_modules/brace-expansion": { @@ -1971,12 +2090,12 @@ } }, "node_modules/eslint-plugin-react/node_modules/resolve": { - "version": "2.0.0-next.4", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz", - "integrity": "sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==", + "version": "2.0.0-next.5", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.5.tgz", + "integrity": "sha512-U7WjGVG9sH8tvjW5SmGbQuui75FiyjAX72HX15DwBBwF9dNiQZRQAg9nnPhYy+TUnE0+VcrttuvNI8oSxZcocA==", "dev": true, "dependencies": { - "is-core-module": "^2.9.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -1987,10 +2106,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/eslint-plugin-react/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/eslint-scope": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.0.tgz", - "integrity": "sha512-DYj5deGlHBfMt15J7rdtyKNq/Nqlv5KfU4iodrQ019XESsRnwXH9KAE0y3cwtUHDo2ob7CypAnCqefh6vioWRw==", + "version": "7.2.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", + "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, "dependencies": { "esrecurse": "^4.3.0", @@ -2031,9 +2159,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.1.tgz", - "integrity": "sha512-pZnmmLwYzf+kWaM/Qgrvpen51upAktaaiI01nsJD/Yr3lMOdNtq0cxkrrg16w64VtisN6okbs7Q8AfGqj4c9fA==", + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -2065,9 +2193,9 @@ } }, "node_modules/eslint/node_modules/globals": { - "version": "13.20.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", - "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", + "version": "13.24.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", + "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -2104,9 +2232,9 @@ } }, "node_modules/espree": { - "version": "9.6.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.0.tgz", - "integrity": "sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", + "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, "dependencies": { "acorn": "^8.9.0", @@ -2134,9 +2262,9 @@ } }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -2194,9 +2322,9 @@ "dev": true }, "node_modules/fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", "dev": true, "dependencies": { "reusify": "^1.0.4" @@ -2215,9 +2343,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dev": true, "dependencies": { "to-regex-range": "^5.0.1" @@ -2269,12 +2397,13 @@ } }, "node_modules/flat-cache": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", - "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", + "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", "dev": true, "dependencies": { - "flatted": "^3.1.0", + "flatted": "^3.2.9", + "keyv": "^4.5.3", "rimraf": "^3.0.2" }, "engines": { @@ -2282,9 +2411,9 @@ } }, "node_modules/flatted": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz", - "integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, "node_modules/for-each": { @@ -2336,9 +2465,9 @@ "dev": true }, "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "optional": true, @@ -2350,21 +2479,24 @@ } }, "node_modules/function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", - "dev": true + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", + "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" + "define-properties": "^1.2.0", + "es-abstract": "^1.22.1", + "functions-have-names": "^1.2.3" }, "engines": { "node": ">= 0.4" @@ -2401,15 +2533,19 @@ } }, "node_modules/get-intrinsic": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz", - "integrity": "sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", "dev": true, "dependencies": { - "function-bind": "^1.1.1", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", "has-proto": "^1.0.1", - "has-symbols": "^1.0.3" + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2437,13 +2573,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -2453,20 +2590,20 @@ } }, "node_modules/glob": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", - "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-8.1.0.tgz", + "integrity": "sha512-r8hpEjiQEYlF2QU0df3dS+nxxSIreXQS1qRhMJM0Q5NDdR386C7jb7Hwwod8Fgiuex+k0GFjgft18yvxm5XoCQ==", + "deprecated": "Glob versions prior to v9 are no longer supported", "dev": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "minimatch": "^5.0.1", + "once": "^1.3.0" }, "engines": { - "node": "*" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -2484,28 +2621,6 @@ "node": ">= 6" } }, - "node_modules/glob/node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/glob/node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "dev": true, - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, "node_modules/globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -2516,12 +2631,13 @@ } }, "node_modules/globalthis": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", - "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", + "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==", "dev": true, "dependencies": { - "define-properties": "^1.1.3" + "define-properties": "^1.2.1", + "gopd": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -2554,18 +2670,6 @@ "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==", "dev": true }, - "node_modules/has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, - "dependencies": { - "function-bind": "^1.1.1" - }, - "engines": { - "node": ">= 0.4.0" - } - }, "node_modules/has-bigints": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", @@ -2585,21 +2689,21 @@ } }, "node_modules/has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", "dev": true, "dependencies": { - "get-intrinsic": "^1.1.1" + "es-define-property": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true, "engines": { "node": ">= 0.4" @@ -2621,12 +2725,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -2651,6 +2755,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/he": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", @@ -2667,9 +2783,9 @@ "dev": true }, "node_modules/ignore": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", - "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", "dev": true, "engines": { "node": ">= 4" @@ -2713,6 +2829,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, "engines": { "node": ">=8" } @@ -2721,6 +2838,7 @@ "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dev": true, "dependencies": { "once": "^1.3.0", @@ -2734,13 +2852,13 @@ "dev": true }, "node_modules/internal-slot": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz", - "integrity": "sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.0", - "has": "^1.0.3", + "es-errors": "^1.3.0", + "hasown": "^2.0.0", "side-channel": "^1.0.4" }, "engines": { @@ -2748,14 +2866,16 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2767,6 +2887,21 @@ "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, + "node_modules/is-async-function": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz", + "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", @@ -2820,12 +2955,30 @@ } }, "node_modules/is-core-module": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.1.tgz", - "integrity": "sha512-Q4ZuBAe2FUsKtyQJoQHlvP8OvBERxO3jEmy1I7hcRXcJBGGHFh/aJBswbXuS9sgrDH2QUO8ilkwNPHvHMd8clg==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", + "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", "dev": true, "dependencies": { - "has": "^1.0.3" + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -2855,6 +3008,18 @@ "node": ">=0.10.0" } }, + "node_modules/is-finalizationregistry": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz", + "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -2864,6 +3029,21 @@ "node": ">=8" } }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "dev": true, + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -2876,10 +3056,22 @@ "node": ">=0.10.0" } }, + "node_modules/is-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", + "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, "engines": { "node": ">= 0.4" @@ -2946,13 +3138,28 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-set": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", + "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -3001,16 +3208,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", - "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", - "for-each": "^0.3.3", - "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -3037,6 +3240,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-weakmap": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", + "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-weakref": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", @@ -3049,6 +3264,22 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-weakset": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", + "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-windows": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", @@ -3058,6 +3289,12 @@ "node": ">=0.10.0" } }, + "node_modules/isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", + "dev": true + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -3065,9 +3302,9 @@ "dev": true }, "node_modules/istanbul-lib-coverage": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", - "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", "dev": true, "engines": { "node": ">=8" @@ -3086,18 +3323,19 @@ } }, "node_modules/istanbul-lib-instrument": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", - "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", "dev": true, "dependencies": { - "@babel/core": "^7.7.5", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.0.0", - "semver": "^6.3.0" + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" }, "engines": { - "node": ">=8" + "node": ">=10" } }, "node_modules/istanbul-lib-processinfo": { @@ -3117,39 +3355,33 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/p-map": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", "dev": true, "dependencies": { - "aggregate-error": "^3.0.0" + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-processinfo/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "bin": { - "uuid": "dist/bin/uuid" + "node": ">=10" } }, - "node_modules/istanbul-lib-report": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", - "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "node_modules/istanbul-lib-report/node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", "dev": true, "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^3.0.0", - "supports-color": "^7.1.0" + "semver": "^7.5.3" }, "engines": { - "node": ">=8" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/istanbul-lib-report/node_modules/supports-color": { @@ -3179,9 +3411,9 @@ } }, "node_modules/istanbul-reports": { - "version": "3.1.5", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz", - "integrity": "sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w==", + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", "dev": true, "dependencies": { "html-escaper": "^2.0.0", @@ -3191,6 +3423,19 @@ "node": ">=8" } }, + "node_modules/iterator.prototype": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", + "integrity": "sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==", + "dev": true, + "dependencies": { + "define-properties": "^1.2.1", + "get-intrinsic": "^1.2.1", + "has-symbols": "^1.0.3", + "reflect.getprototypeof": "^1.0.4", + "set-function-name": "^2.0.1" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -3221,6 +3466,12 @@ "node": ">=4" } }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, "node_modules/json-parse-better-errors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", @@ -3252,9 +3503,9 @@ } }, "node_modules/jsx-ast-utils": { - "version": "3.3.4", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.4.tgz", - "integrity": "sha512-fX2TVdCViod6HwKEtSWGHs57oFhVfCMwieb9PuRDgjDPh5XeqJiHFFFJCHxU5cnTc3Bu/GRL+kPiFmw8XWOfKw==", + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", + "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", "dev": true, "dependencies": { "array-includes": "^3.1.6", @@ -3266,6 +3517,15 @@ "node": ">=4.0" } }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -3328,11 +3588,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" - }, "node_modules/lodash.flattendeep": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", @@ -3383,9 +3638,9 @@ } }, "node_modules/luxon": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.3.0.tgz", - "integrity": "sha512-An0UCfG/rSiqtAIiBPO0Y9/zAnHUZxAMiCpTd5h2smgsj7GGmcenvrvww2cqNA8/4A5ZrD1gJpHN2mIHZQF+Mg==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/luxon/-/luxon-3.5.0.tgz", + "integrity": "sha512-rh+Zjr6DNfUYR3bPwJEnuwDdqMbxZW7LOQfUN4B54+Cl+0o5zaU9RJ6bcidfDtC1cWCZXQ+nvX8bf6bAji37QQ==", "engines": { "node": ">=12" } @@ -3405,10 +3660,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/minimatch": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.0.1.tgz", - "integrity": "sha512-nLDxIFRyhDblz3qMuq+SoRZED4+miJ/G+tdDrjkkkRnjAsBexeGpgjLEQ0blJy7rHhR2b93rhQY4SvyWu9v03g==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -3427,32 +3691,31 @@ } }, "node_modules/mocha": { - "version": "10.2.0", - "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.2.0.tgz", - "integrity": "sha512-IDY7fl/BecMwFHzoqF2sg/SHHANeBoMMXFlS9r0OXKDssYE1M5O43wUY/9BVPeIvfH2zmEbBfseqN9gBQZzXkg==", - "dev": true, - "dependencies": { - "ansi-colors": "4.1.1", - "browser-stdout": "1.3.1", - "chokidar": "3.5.3", - "debug": "4.3.4", - "diff": "5.0.0", - "escape-string-regexp": "4.0.0", - "find-up": "5.0.0", - "glob": "7.2.0", - "he": "1.2.0", - "js-yaml": "4.1.0", - "log-symbols": "4.1.0", - "minimatch": "5.0.1", - "ms": "2.1.3", - "nanoid": "3.3.3", - "serialize-javascript": "6.0.0", - "strip-json-comments": "3.1.1", - "supports-color": "8.1.1", - "workerpool": "6.2.1", - "yargs": "16.2.0", - "yargs-parser": "20.2.4", - "yargs-unparser": "2.0.0" + "version": "10.7.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-10.7.0.tgz", + "integrity": "sha512-v8/rBWr2VO5YkspYINnvu81inSz2y3ODJrhO175/Exzor1RcEZZkizgE2A+w/CAXXoESS8Kys5E62dOHGHzULA==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.3", + "browser-stdout": "^1.3.1", + "chokidar": "^3.5.3", + "debug": "^4.3.5", + "diff": "^5.2.0", + "escape-string-regexp": "^4.0.0", + "find-up": "^5.0.0", + "glob": "^8.1.0", + "he": "^1.2.0", + "js-yaml": "^4.1.0", + "log-symbols": "^4.1.0", + "minimatch": "^5.1.6", + "ms": "^2.1.3", + "serialize-javascript": "^6.0.2", + "strip-json-comments": "^3.1.1", + "supports-color": "^8.1.1", + "workerpool": "^6.5.1", + "yargs": "^16.2.0", + "yargs-parser": "^20.2.9", + "yargs-unparser": "^2.0.0" }, "bin": { "_mocha": "bin/_mocha", @@ -3460,10 +3723,6 @@ }, "engines": { "node": ">= 14.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mochajs" } }, "node_modules/ms": { @@ -3472,18 +3731,6 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true }, - "node_modules/nanoid": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.3.tgz", - "integrity": "sha512-p1sjXuopFs0xg+fPASzQ28agW1oHD7xDsd9Xkf3T15H3c/cifrFHVwrh74PdoklAPi+i7MdRsE47vm2r6JoB+w==", - "dev": true, - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -3503,9 +3750,9 @@ } }, "node_modules/node-releases": { - "version": "2.0.12", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.12.tgz", - "integrity": "sha512-QzsYKWhXTWx8h1kIvqfnC++o0pEmpRQA/aenALsL2F4pqNVr7YzcdMlDij5WBnwftRbJCNJL/O7zdKaxKPHqgQ==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "node_modules/normalize-path": { @@ -3518,9 +3765,9 @@ } }, "node_modules/nyc": { - "version": "15.1.0", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", - "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-17.0.0.tgz", + "integrity": "sha512-ISp44nqNCaPugLLGGfknzQwSwt10SSS5IMoPR7GLoMAyS18Iw5js8U7ga2VF9lYuMZ42gOHr3UddZw4WZltxKg==", "dev": true, "dependencies": { "@istanbuljs/load-nyc-config": "^1.0.0", @@ -3535,7 +3782,7 @@ "glob": "^7.1.6", "istanbul-lib-coverage": "^3.0.0", "istanbul-lib-hook": "^3.0.0", - "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-instrument": "^6.0.2", "istanbul-lib-processinfo": "^2.0.2", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", @@ -3555,7 +3802,17 @@ "nyc": "bin/nyc.js" }, "engines": { - "node": ">=8.9" + "node": ">=18" + } + }, + "node_modules/nyc/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" } }, "node_modules/nyc/node_modules/cliui": { @@ -3582,6 +3839,27 @@ "node": ">=8" } }, + "node_modules/nyc/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/nyc/node_modules/locate-path": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", @@ -3594,6 +3872,18 @@ "node": ">=8" } }, + "node_modules/nyc/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/nyc/node_modules/p-limit": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", @@ -3621,18 +3911,6 @@ "node": ">=8" } }, - "node_modules/nyc/node_modules/p-map": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", - "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/nyc/node_modules/wrap-ansi": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", @@ -3698,10 +3976,13 @@ } }, "node_modules/object-inspect": { - "version": "1.12.3", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz", - "integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3716,13 +3997,13 @@ } }, "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", "has-symbols": "^1.0.3", "object-keys": "^1.1.1" }, @@ -3734,28 +4015,29 @@ } }, "node_modules/object.entries": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.6.tgz", - "integrity": "sha512-leTPzo4Zvg3pmbQ3rDK69Rl8GQvIqMWubrkxONG9/ojtFE2rD9fjMKfSI5BxW3osRH1m6VdzmqK8oAY9aT4x5w==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", + "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" } }, "node_modules/object.fromentries": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.6.tgz", - "integrity": "sha512-VciD13dswC4j1Xt5394WR4MzmAQmlgN72phd/riNp9vtD7tp4QQWJ0R4wvclXcafgcYK8veHRed2W6XeGBvcfg==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -3764,28 +4046,29 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.hasown": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.2.tgz", - "integrity": "sha512-B5UIT3J1W+WuWIU55h0mjlwaqxiE5vYENJXIXZ4VFe05pNYrkKuK0U/6aFcb0pKywYJh7IhfoqUfKVmrJJHZHw==", + "node_modules/object.groupby": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, "dependencies": { - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">= 0.4" } }, "node_modules/object.values": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.6.tgz", - "integrity": "sha512-FVVTkD1vENCsAcwNs9k6jea2uHC/X0+JcjG8YA60FN5CMaJmG95wT9jek/xX9nornqGRrBkKtzuAu2wuHpKqvw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", + "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -3804,17 +4087,17 @@ } }, "node_modules/optionator": { - "version": "0.9.3", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", - "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", "dev": true, "dependencies": { - "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0" + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" }, "engines": { "node": ">= 0.8.0" @@ -3851,17 +4134,15 @@ } }, "node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, "dependencies": { "aggregate-error": "^3.0.0" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=8" } }, "node_modules/p-try": { @@ -3888,11 +4169,6 @@ "node": ">=8" } }, - "node_modules/packet-reader": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", - "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" - }, "node_modules/parent-module": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", @@ -3952,15 +4228,13 @@ "dev": true }, "node_modules/pg": { - "version": "8.11.1", - "resolved": "https://registry.npmjs.org/pg/-/pg-8.11.1.tgz", - "integrity": "sha512-utdq2obft07MxaDg0zBJI+l/M3mBRfIpEN3iSemsz0G5F2/VXx+XzqF4oxrbIZXQxt2AZzIUzyVg/YM6xOP/WQ==", - "dependencies": { - "buffer-writer": "2.0.0", - "packet-reader": "1.0.0", - "pg-connection-string": "^2.6.1", - "pg-pool": "^3.6.1", - "pg-protocol": "^1.6.0", + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.12.0.tgz", + "integrity": "sha512-A+LHUSnwnxrnL/tZ+OLfqR1SxLN3c/pgDztZ47Rpbsd4jUytsTtwQo/TLPRzPJMp/1pbhYVhH9cuSZLAajNfjQ==", + "dependencies": { + "pg-connection-string": "^2.6.4", + "pg-pool": "^3.6.2", + "pg-protocol": "^1.6.1", "pg-types": "^2.1.0", "pgpass": "1.x" }, @@ -3986,9 +4260,9 @@ "optional": true }, "node_modules/pg-connection-string": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.1.tgz", - "integrity": "sha512-w6ZzNu6oMmIzEAYVw+RLK0+nqHPt8K3ZnknKi+g48Ak2pr3dtljJW3o+D/n2zzCG07Zoe9VOX3aiKpj+BN0pjg==" + "version": "2.6.4", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.6.4.tgz", + "integrity": "sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA==" }, "node_modules/pg-int8": { "version": "1.0.1", @@ -3999,17 +4273,17 @@ } }, "node_modules/pg-pool": { - "version": "3.6.1", - "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.1.tgz", - "integrity": "sha512-jizsIzhkIitxCGfPRzJn1ZdcosIt3pz9Sh3V01fm1vZnbnCMgmGl5wvGGdNN2EL9Rmb0EcFoCkixH4Pu+sP9Og==", + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.6.2.tgz", + "integrity": "sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg==", "peerDependencies": { "pg": ">=8.0" } }, "node_modules/pg-protocol": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.0.tgz", - "integrity": "sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q==" + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.6.1.tgz", + "integrity": "sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg==" }, "node_modules/pg-types": { "version": "2.2.0", @@ -4035,9 +4309,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", "dev": true }, "node_modules/picomatch": { @@ -4199,6 +4473,15 @@ "node": ">=8" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/postgres-array": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", @@ -4267,9 +4550,9 @@ } }, "node_modules/punycode": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", - "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", "dev": true, "engines": { "node": ">=6" @@ -4322,15 +4605,37 @@ "node": ">=8.10.0" } }, + "node_modules/reflect.getprototypeof": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", + "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.1", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "globalthis": "^1.0.3", + "which-builtin-type": "^1.1.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/regexp.prototype.flags": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz", - "integrity": "sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "functions-have-names": "^1.2.3" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -4379,12 +4684,12 @@ "dev": true }, "node_modules/resolve": { - "version": "1.22.2", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz", - "integrity": "sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g==", + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", "dev": true, "dependencies": { - "is-core-module": "^2.11.0", + "is-core-module": "^2.13.0", "path-parse": "^1.0.7", "supports-preserve-symlinks-flag": "^1.0.0" }, @@ -4418,6 +4723,7 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", "dev": true, "dependencies": { "glob": "^7.1.3" @@ -4429,6 +4735,49 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/rimraf/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/rimraf/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/rimraf/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, "node_modules/run-parallel": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", @@ -4452,6 +4801,24 @@ "queue-microtask": "^1.2.2" } }, + "node_modules/safe-array-concat": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", + "has-symbols": "^1.0.3", + "isarray": "^2.0.5" + }, + "engines": { + "node": ">=0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -4473,26 +4840,32 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/semver": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "dev": true, "bin": { "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" } }, "node_modules/serialize-error": { @@ -4521,9 +4894,9 @@ } }, "node_modules/serialize-javascript": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", - "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz", + "integrity": "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==", "dev": true, "dependencies": { "randombytes": "^2.1.0" @@ -4535,6 +4908,38 @@ "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", "dev": true }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/set-function-name": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", + "dev": true, + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "functions-have-names": "^1.2.3", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -4557,14 +4962,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4697,33 +5106,51 @@ } }, "node_modules/string.prototype.matchall": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.8.tgz", - "integrity": "sha512-6zOCOcJ+RJAQshcTvXPHoxoQGONa3e/Lqx90wUA+wEzX78sg5Bo+1tQo4N0pohS0erG9qtCqJDjNCQBjeWVxyg==", + "version": "4.0.11", + "resolved": "https://registry.npmjs.org/string.prototype.matchall/-/string.prototype.matchall-4.0.11.tgz", + "integrity": "sha512-NUdh0aDavY2og7IbBPenWqR9exH+E26Sv8e0/eTe1tltDGZL+GtBkDAnnyBtmekfK6/Dq3MkcGtzXFEd1LQrtg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "regexp.prototype.flags": "^1.4.3", - "side-channel": "^1.0.4" + "internal-slot": "^1.0.7", + "regexp.prototype.flags": "^1.5.2", + "set-function-name": "^2.0.2", + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, "node_modules/string.prototype.trim": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz", - "integrity": "sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -4733,28 +5160,31 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz", - "integrity": "sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz", - "integrity": "sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.20.4" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4844,6 +5274,27 @@ "concat-map": "0.0.1" } }, + "node_modules/test-exclude/node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/test-exclude/node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", @@ -4884,9 +5335,9 @@ } }, "node_modules/tsconfig-paths": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz", - "integrity": "sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==", + "version": "3.15.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", + "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, "dependencies": { "@types/json5": "^0.0.29", @@ -4937,15 +5388,74 @@ "node": ">=8" } }, + "node_modules/typed-array-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/typed-array-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/typed-array-byte-offset": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", + "dev": true, + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "for-each": "^0.3.3", + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4975,10 +5485,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, "node_modules/update-browserslist-db": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.11.tgz", - "integrity": "sha512-dCwEFf0/oT85M1fHBg4F0jtLwJrutGoHSQXCh7u4o2t1drG+c0a9Flnqww6XUKSfQMPpJBRjU8d4RXB09qtvaA==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", "dev": true, "funding": [ { @@ -4995,8 +5511,8 @@ } ], "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.1.2", + "picocolors": "^1.0.1" }, "bin": { "update-browserslist-db": "cli.js" @@ -5015,17 +5531,18 @@ } }, "node_modules/uuid": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.0.tgz", - "integrity": "sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg==", + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, "bin": { "uuid": "dist/bin/uuid" } }, "node_modules/version-guard": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.1.tgz", - "integrity": "sha512-MGQLX89UxmYHgDvcXyjBI0cbmoW+t/dANDppNPrno64rYr8nH4SHSuElQuSYdXGEs0mUzdQe1BY+FhVPNsAmJQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.2.tgz", + "integrity": "sha512-D8d+YxCUpoqtCnQzDxm6SF7DLU3gr2535T4khAtMq4osBahsQnmSxuwXFdrbAdDGG8Uokzfis/jvyeFPdmlc7w==", "dev": true, "engines": { "node": ">=0.10.48" @@ -5062,6 +5579,50 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/which-builtin-type": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.4.tgz", + "integrity": "sha512-bppkmBSsHFmIMSl8BO9TbsyzsvGjVoppt8xUiGzwiu/bhDCGxnpOKCxgqj6GuyHE0mINMDecBFPlOm2hzY084w==", + "dev": true, + "dependencies": { + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", + "is-async-function": "^2.0.0", + "is-date-object": "^1.0.5", + "is-finalizationregistry": "^1.0.2", + "is-generator-function": "^1.0.10", + "is-regex": "^1.1.4", + "is-weakref": "^1.0.2", + "isarray": "^2.0.5", + "which-boxed-primitive": "^1.0.2", + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.15" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/which-collection": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", + "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", + "dev": true, + "dependencies": { + "is-map": "^2.0.3", + "is-set": "^2.0.3", + "is-weakmap": "^2.0.2", + "is-weakset": "^2.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/which-module": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz", @@ -5069,17 +5630,16 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", - "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.10" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -5088,10 +5648,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/workerpool": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.2.1.tgz", - "integrity": "sha512-ILEIE97kDZvF9Wb9f6h5aXK4swSlKGUcOEGiIYb2OOu/IrDU9iwj0fD//SsA6E5ibwJxpEvhullJY4Sl4GcpAw==", + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.5.1.tgz", + "integrity": "sha512-Fs4dNYcsdpYSAfVxhnl1L5zTksjvOJxtC5hzMNl+1t9B8hTJTdKDyZ5ju7ztgPy+ft9tBFXoOlDNiOT9WUXZlA==", "dev": true }, "node_modules/wrap-ansi": { @@ -5180,9 +5749,9 @@ } }, "node_modules/yargs-parser": { - "version": "20.2.4", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", - "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", "dev": true, "engines": { "node": ">=10" diff --git a/package.json b/package.json index f75611ed..1703706f 100644 --- a/package.json +++ b/package.json @@ -1,35 +1,29 @@ { "name": "pg-boss", - "version": "9.0.3", - "description": "Queueing jobs in Node.js using PostgreSQL like a boss", + "version": "10.0.0", + "description": "Queueing jobs in Postgres from Node.js like a boss", "main": "./src/index.js", "engines": { - "node": ">=16" + "node": ">=20" }, "dependencies": { "cron-parser": "^4.0.0", - "delay": "^5.0.0", - "lodash.debounce": "^4.0.8", - "p-map": "^4.0.0", "pg": "^8.5.1", - "serialize-error": "^8.1.0", - "uuid": "^9.0.0" + "serialize-error": "^8.1.0" }, "devDependencies": { "@types/node": "^20.3.3", "luxon": "^3.0.1", "mocha": "^10.0.0", - "nyc": "^15.1.0", + "nyc": "^17.0.0", "standard": "^17.0.0" }, "scripts": { "test": "standard && mocha", "cover": "nyc npm test", - "export-schema": "node ./scripts/construct.js", - "export-migration": "node ./scripts/migrate.js", - "export-rollback": "node ./scripts/rollback.js", "tsc": "tsc --noEmit types.d.ts", - "readme": "node ./test/readme.js" + "readme": "node ./test/readme.js", + "migrate": "node -e 'console.log(require(\"./src\").getMigrationPlans())'" }, "mocha": { "timeout": 10000, diff --git a/src/attorney.js b/src/attorney.js index 2a6fae4d..2d1d1bcc 100644 --- a/src/attorney.js +++ b/src/attorney.js @@ -1,20 +1,20 @@ const assert = require('assert') -const { DEFAULT_SCHEMA, SINGLETON_QUEUE_KEY } = require('./plans') +const { DEFAULT_SCHEMA } = require('./plans') module.exports = { getConfig, checkSendArgs, - checkInsertArgs, + checkQueueArgs, checkWorkArgs, checkFetchArgs, - warnClockSkew + warnClockSkew, + assertPostgresObjectName, + assertQueueName } +const MAX_INTERVAL_HOURS = 24 + const WARNINGS = { - EXPIRE_IN_REMOVED: { - message: '\'expireIn\' option detected. This option has been removed. Use expireInSeconds, expireInMinutes or expireInHours.', - code: 'pg-boss-w01' - }, CLOCK_SKEW: { message: 'Timekeeper detected clock skew between this instance and the database server. This will not affect scheduling operations, but this warning is shown any time the skew exceeds 60 seconds.', code: 'pg-boss-w02' @@ -22,9 +22,23 @@ const WARNINGS = { CRON_DISABLED: { message: 'Archive interval is set less than 60s. Cron processing is disabled.', code: 'pg-boss-w03' + }, + ON_COMPLETE_REMOVED: { + message: '\'onComplete\' option detected. This option has been removed. Consider deadLetter if needed.', + code: 'pg-boss-w04' } } +function checkQueueArgs (name, options = {}) { + assert(!('deadLetter' in options) || (typeof options.deadLetter === 'string'), 'deadLetter must be a string') + + applyRetryConfig(options) + applyExpirationConfig(options) + applyRetentionConfig(options) + + return options +} + function checkSendArgs (args, defaults) { let name, data, options @@ -57,11 +71,11 @@ function checkSendArgs (args, defaults) { assert(!('priority' in options) || (Number.isInteger(options.priority)), 'priority must be an integer') options.priority = options.priority || 0 + assert(!('deadLetter' in options) || (typeof options.deadLetter === 'string'), 'deadLetter must be a string') + applyRetryConfig(options, defaults) applyExpirationConfig(options, defaults) applyRetentionConfig(options, defaults) - applyCompletionConfig(options, defaults) - applySingletonKeyConfig(options) const { startAfter, singletonSeconds, singletonMinutes, singletonHours } = options @@ -83,23 +97,11 @@ function checkSendArgs (args, defaults) { assert(!singletonSeconds || singletonSeconds <= defaults.archiveSeconds, `throttling interval ${singletonSeconds}s cannot exceed archive interval ${defaults.archiveSeconds}s`) - return { name, data, options } -} - -function checkInsertArgs (jobs) { - assert(Array.isArray(jobs), `jobs argument should be an array. Received '${typeof jobs}'`) - return jobs.map(job => { - job = { ...job } - applySingletonKeyConfig(job) - return job - }) -} - -function applySingletonKeyConfig (options) { - if (options.singletonKey && options.useSingletonQueue && options.singletonKey !== SINGLETON_QUEUE_KEY) { - options.singletonKey = SINGLETON_QUEUE_KEY + options.singletonKey + if (options.onComplete) { + emitWarning(WARNINGS.ON_COMPLETE_REMOVED) } - delete options.useSingletonQueue + + return { name, data, options } } function checkWorkArgs (name, args, defaults) { @@ -120,34 +122,25 @@ function checkWorkArgs (name, args, defaults) { options = { ...options } - applyNewJobCheckInterval(options, defaults) - - assert(!('teamConcurrency' in options) || - (Number.isInteger(options.teamConcurrency) && options.teamConcurrency >= 1 && options.teamConcurrency <= 1000), - 'teamConcurrency must be an integer between 1 and 1000') + applyPollingInterval(options, defaults) - assert(!('teamSize' in options) || (Number.isInteger(options.teamSize) && options.teamSize >= 1), 'teamSize must be an integer > 0') assert(!('batchSize' in options) || (Number.isInteger(options.batchSize) && options.batchSize >= 1), 'batchSize must be an integer > 0') assert(!('includeMetadata' in options) || typeof options.includeMetadata === 'boolean', 'includeMetadata must be a boolean') - assert(!('enforceSingletonQueueActiveLimit' in options) || typeof options.enforceSingletonQueueActiveLimit === 'boolean', 'enforceSingletonQueueActiveLimit must be a boolean') + assert(!('priority' in options) || typeof options.priority === 'boolean', 'priority must be a boolean') + + options.batchSize = options.batchSize || 1 return { options, callback } } -function checkFetchArgs (name, batchSize, options) { +function checkFetchArgs (name, options) { assert(name, 'missing queue name') - name = sanitizeQueueNameForFetch(name) - - assert(!batchSize || (Number.isInteger(batchSize) && batchSize >= 1), 'batchSize must be an integer > 0') + assert(!('batchSize' in options) || (Number.isInteger(options.batchSize) && options.batchSize >= 1), 'batchSize must be an integer > 0') assert(!('includeMetadata' in options) || typeof options.includeMetadata === 'boolean', 'includeMetadata must be a boolean') - assert(!('enforceSingletonQueueActiveLimit' in options) || typeof options.enforceSingletonQueueActiveLimit === 'boolean', 'enforceSingletonQueueActiveLimit must be a boolean') + assert(!('priority' in options) || typeof options.priority === 'boolean', 'priority must be a boolean') - return { name } -} - -function sanitizeQueueNameForFetch (name) { - return name.replace(/[%_*]/g, match => match === '*' ? '%' : '\\' + match) + options.batchSize = options.batchSize || 1 } function getConfig (value) { @@ -158,32 +151,45 @@ function getConfig (value) { ? { connectionString: value } : { ...value } - applyDatabaseConfig(config) + config.schedule = ('schedule' in config) ? config.schedule : true + config.supervise = ('supervise' in config) ? config.supervise : true + config.migrate = ('migrate' in config) ? config.migrate : true + + applySchemaConfig(config) applyMaintenanceConfig(config) applyArchiveConfig(config) applyArchiveFailedConfig(config) applyDeleteConfig(config) applyMonitoringConfig(config) - applyUuidConfig(config) - applyNewJobCheckInterval(config) + applyPollingInterval(config) applyExpirationConfig(config) applyRetentionConfig(config) - applyCompletionConfig(config) return config } -function applyDatabaseConfig (config) { +function applySchemaConfig (config) { if (config.schema) { - assert(typeof config.schema === 'string', 'configuration assert: schema must be a string') - assert(config.schema.length <= 50, 'configuration assert: schema name cannot exceed 50 characters') - assert(!/\W/.test(config.schema), `configuration assert: ${config.schema} cannot be used as a schema. Only alphanumeric characters and underscores are allowed`) + assertPostgresObjectName(config.schema) } config.schema = config.schema || DEFAULT_SCHEMA } +function assertPostgresObjectName (name) { + assert(typeof name === 'string', 'Name must be a string') + assert(name.length <= 50, 'Name cannot exceed 50 characters') + assert(!/\W/.test(name), 'Name can only contain alphanumeric characters or underscores') + assert(!/^\d/.test(name), 'Name cannot start with a number') +} + +function assertQueueName (name) { + assert(name, 'Name is required') + assert(typeof name === 'string', 'Name must be a string') + assert(/[\w-]/.test(name), 'Name can only contain alphanumeric characters, underscores, or hyphens') +} + function applyArchiveConfig (config) { const ARCHIVE_DEFAULT = 60 * 60 * 12 @@ -211,18 +217,7 @@ function applyArchiveFailedConfig (config) { } } -function applyCompletionConfig (config, defaults) { - assert(!('onComplete' in config) || config.onComplete === true || config.onComplete === false, - 'configuration assert: onComplete must be either true or false') - - if (!('onComplete' in config)) { - config.onComplete = defaults - ? defaults.onComplete - : false - } -} - -function applyRetentionConfig (config, defaults) { +function applyRetentionConfig (config, defaults = {}) { assert(!('retentionSeconds' in config) || config.retentionSeconds >= 1, 'configuration assert: retentionSeconds must be at least every second') @@ -243,18 +238,13 @@ function applyRetentionConfig (config, defaults) { ? `${config.retentionMinutes} minutes` : ('retentionSeconds' in config) ? `${config.retentionSeconds} seconds` - : defaults - ? defaults.keepUntil - : '14 days' + : null config.keepUntil = keepUntil + config.keepUntilDefault = defaults?.keepUntil } -function applyExpirationConfig (config, defaults) { - if ('expireIn' in config) { - emitWarning(WARNINGS.EXPIRE_IN_REMOVED) - } - +function applyExpirationConfig (config, defaults = {}) { assert(!('expireInSeconds' in config) || config.expireInSeconds >= 1, 'configuration assert: expireInSeconds must be at least every second') @@ -265,16 +255,17 @@ function applyExpirationConfig (config, defaults) { 'configuration assert: expireInHours must be at least every hour') const expireIn = ('expireInHours' in config) - ? `${config.expireInHours} hours` + ? config.expireInHours * 60 * 60 : ('expireInMinutes' in config) - ? `${config.expireInMinutes} minutes` + ? config.expireInMinutes * 60 : ('expireInSeconds' in config) - ? `${config.expireInSeconds} seconds` - : defaults - ? defaults.expireIn - : '15 minutes' + ? config.expireInSeconds + : null + + assert(!expireIn || expireIn / 60 / 60 < MAX_INTERVAL_HOURS, `configuration assert: expiration cannot exceed ${MAX_INTERVAL_HOURS} hours`) config.expireIn = expireIn + config.expireInDefault = defaults?.expireIn } function applyRetryConfig (config, defaults) { @@ -282,35 +273,18 @@ function applyRetryConfig (config, defaults) { assert(!('retryLimit' in config) || (Number.isInteger(config.retryLimit) && config.retryLimit >= 0), 'retryLimit must be an integer >= 0') assert(!('retryBackoff' in config) || (config.retryBackoff === true || config.retryBackoff === false), 'retryBackoff must be either true or false') - if (defaults) { - config.retryDelay = config.retryDelay || defaults.retryDelay - config.retryLimit = config.retryLimit || defaults.retryLimit - config.retryBackoff = config.retryBackoff || defaults.retryBackoff - } - - config.retryDelay = config.retryDelay || 0 - config.retryLimit = config.retryLimit || 0 - config.retryBackoff = !!config.retryBackoff - config.retryDelay = (config.retryBackoff && !config.retryDelay) ? 1 : config.retryDelay - config.retryLimit = (config.retryDelay && !config.retryLimit) ? 1 : config.retryLimit + config.retryDelayDefault = defaults?.retryDelay + config.retryLimitDefault = defaults?.retryLimit + config.retryBackoffDefault = defaults?.retryBackoff } -function applyNewJobCheckInterval (config, defaults) { - const second = 1000 - - assert(!('newJobCheckInterval' in config) || config.newJobCheckInterval >= 100, - 'configuration assert: newJobCheckInterval must be at least every 100ms') - - assert(!('newJobCheckIntervalSeconds' in config) || config.newJobCheckIntervalSeconds >= 1, - 'configuration assert: newJobCheckIntervalSeconds must be at least every second') +function applyPollingInterval (config, defaults) { + assert(!('pollingIntervalSeconds' in config) || config.pollingIntervalSeconds >= 0.5, + 'configuration assert: pollingIntervalSeconds must be at least every 500ms') - config.newJobCheckInterval = ('newJobCheckIntervalSeconds' in config) - ? config.newJobCheckIntervalSeconds * second - : ('newJobCheckInterval' in config) - ? config.newJobCheckInterval - : defaults - ? defaults.newJobCheckInterval - : second * 2 + config.pollingInterval = ('pollingIntervalSeconds' in config) + ? config.pollingIntervalSeconds * 1000 + : defaults?.pollingInterval || 2000 } function applyMaintenanceConfig (config) { @@ -325,6 +299,8 @@ function applyMaintenanceConfig (config) { : ('maintenanceIntervalSeconds' in config) ? config.maintenanceIntervalSeconds : 120 + + assert(config.maintenanceIntervalSeconds / 60 / 60 < MAX_INTERVAL_HOURS, `configuration assert: maintenance interval cannot exceed ${MAX_INTERVAL_HOURS} hours`) } function applyDeleteConfig (config) { @@ -367,6 +343,10 @@ function applyMonitoringConfig (config) { ? config.monitorStateIntervalSeconds : null + if (config.monitorStateIntervalSeconds) { + assert(config.monitorStateIntervalSeconds / 60 / 60 < MAX_INTERVAL_HOURS, `configuration assert: state monitoring interval cannot exceed ${MAX_INTERVAL_HOURS} hours`) + } + const TEN_MINUTES_IN_SECONDS = 600 assert(!('clockMonitorIntervalSeconds' in config) || (config.clockMonitorIntervalSeconds >= 1 && config.clockMonitorIntervalSeconds <= TEN_MINUTES_IN_SECONDS), @@ -382,26 +362,21 @@ function applyMonitoringConfig (config) { ? config.clockMonitorIntervalSeconds : TEN_MINUTES_IN_SECONDS - assert(!('cronMonitorIntervalSeconds' in config) || (config.cronMonitorIntervalSeconds >= 1 && config.cronMonitorIntervalSeconds <= 60), - 'configuration assert: cronMonitorIntervalSeconds must be between 1 and 60 seconds') + assert(!('cronMonitorIntervalSeconds' in config) || (config.cronMonitorIntervalSeconds >= 1 && config.cronMonitorIntervalSeconds <= 45), + 'configuration assert: cronMonitorIntervalSeconds must be between 1 and 45 seconds') config.cronMonitorIntervalSeconds = ('cronMonitorIntervalSeconds' in config) ? config.cronMonitorIntervalSeconds - : 60 + : 30 - assert(!('cronWorkerIntervalSeconds' in config) || (config.cronWorkerIntervalSeconds >= 1 && config.cronWorkerIntervalSeconds <= 60), - 'configuration assert: cronWorkerIntervalSeconds must be between 1 and 60 seconds') + assert(!('cronWorkerIntervalSeconds' in config) || (config.cronWorkerIntervalSeconds >= 1 && config.cronWorkerIntervalSeconds <= 45), + 'configuration assert: cronWorkerIntervalSeconds must be between 1 and 45 seconds') config.cronWorkerIntervalSeconds = ('cronWorkerIntervalSeconds' in config) ? config.cronWorkerIntervalSeconds - : 4 -} - -function applyUuidConfig (config) { - assert(!('uuid' in config) || config.uuid === 'v1' || config.uuid === 'v4', 'configuration assert: uuid option only supports v1 or v4') - config.uuid = config.uuid || 'v4' + : 5 } function warnClockSkew (message) { diff --git a/src/boss.js b/src/boss.js index 01a47c91..94c293f8 100644 --- a/src/boss.js +++ b/src/boss.js @@ -1,12 +1,6 @@ const EventEmitter = require('events') const plans = require('./plans') -const { states } = require('./plans') -const { COMPLETION_JOB_PREFIX } = plans - -const queues = { - MAINTENANCE: '__pgboss__maintenance', - MONITOR_STATES: '__pgboss__monitor-states' -} +const { delay } = require('./tools') const events = { error: 'error', @@ -23,177 +17,131 @@ class Boss extends EventEmitter { this.manager = config.manager this.maintenanceIntervalSeconds = config.maintenanceIntervalSeconds - - this.monitorStates = config.monitorStateIntervalSeconds !== null - - if (this.monitorStates) { - this.monitorIntervalSeconds = config.monitorStateIntervalSeconds - } + this.monitorStateIntervalSeconds = config.monitorStateIntervalSeconds this.events = events - this.expireCommand = plans.locked(config.schema, plans.expire(config.schema)) + this.failJobsByTimeoutCommand = plans.locked(config.schema, plans.failJobsByTimeout(config.schema)) this.archiveCommand = plans.locked(config.schema, plans.archive(config.schema, config.archiveInterval, config.archiveFailedInterval)) - this.purgeCommand = plans.locked(config.schema, plans.purge(config.schema, config.deleteAfter)) - this.getMaintenanceTimeCommand = plans.getMaintenanceTime(config.schema) - this.setMaintenanceTimeCommand = plans.setMaintenanceTime(config.schema) + this.dropCommand = plans.locked(config.schema, plans.drop(config.schema, config.deleteAfter)) + this.trySetMaintenanceTimeCommand = plans.trySetMaintenanceTime(config.schema) + this.trySetMonitorTimeCommand = plans.trySetMonitorTime(config.schema) this.countStatesCommand = plans.countStates(config.schema) this.functions = [ this.expire, this.archive, - this.purge, + this.drop, this.countStates, - this.getQueueNames + this.maintain ] } async supervise () { - this.metaMonitor() - - await this.manager.deleteQueue(COMPLETION_JOB_PREFIX + queues.MAINTENANCE) - await this.manager.deleteQueue(queues.MAINTENANCE) - - await this.maintenanceAsync() - - const maintenanceWorkOptions = { - newJobCheckIntervalSeconds: Math.max(1, this.maintenanceIntervalSeconds / 2) - } + this.maintenanceInterval = setInterval(() => this.onSupervise(), this.maintenanceIntervalSeconds * 1000) + } - await this.manager.work(queues.MAINTENANCE, maintenanceWorkOptions, (job) => this.onMaintenance(job)) + async monitor () { + this.monitorInterval = setInterval(() => this.onMonitor(), this.monitorStateIntervalSeconds * 1000) + } - if (this.monitorStates) { - await this.manager.deleteQueue(COMPLETION_JOB_PREFIX + queues.MONITOR_STATES) - await this.manager.deleteQueue(queues.MONITOR_STATES) + async onMonitor () { + try { + if (this.monitoring) { + return + } - await this.monitorStatesAsync() + this.monitoring = true - const monitorStatesWorkOptions = { - newJobCheckIntervalSeconds: Math.max(1, this.monitorIntervalSeconds / 2) + if (this.config.__test__delay_monitor) { + await delay(this.config.__test__delay_monitor) } - await this.manager.work(queues.MONITOR_STATES, monitorStatesWorkOptions, (job) => this.onMonitorStates(job)) - } - } + if (this.config.__test__throw_monitor) { + throw new Error(this.config.__test__throw_monitor) + } - metaMonitor () { - this.metaMonitorInterval = setInterval(async () => { - try { - if (this.config.__test__throw_meta_monitor) { - throw new Error(this.config.__test__throw_meta_monitor) - } - - const { secondsAgo } = await this.getMaintenanceTime() - - if (secondsAgo > this.maintenanceIntervalSeconds * 2) { - await this.manager.deleteQueue(queues.MAINTENANCE, { before: states.completed }) - await this.maintenanceAsync() - } - } catch (err) { - this.emit(events.error, err) + if (this.stopped) { + return } - }, this.maintenanceIntervalSeconds * 2 * 1000) - } - async maintenanceAsync (options = {}) { - const { startAfter } = options + const { rows } = await this.db.executeSql(this.trySetMonitorTimeCommand, [this.config.monitorStateIntervalSeconds]) - options = { - startAfter, - retentionSeconds: this.maintenanceIntervalSeconds * 4, - singletonKey: queues.MAINTENANCE, - onComplete: false + if (rows.length === 1 && !this.stopped) { + const states = await this.countStates() + this.emit(events.monitorStates, states) + } + } catch (err) { + this.emit(events.error, err) + } finally { + this.monitoring = false } - - await this.manager.send(queues.MAINTENANCE, null, options) } - async monitorStatesAsync (options = {}) { - const { startAfter } = options + async onSupervise () { + try { + if (this.maintaining) { + return + } - options = { - startAfter, - retentionSeconds: this.monitorIntervalSeconds * 4, - singletonKey: queues.MONITOR_STATES, - onComplete: false - } + this.maintaining = true - await this.manager.send(queues.MONITOR_STATES, null, options) - } + if (this.config.__test__delay_maintenance && !this.stopped) { + this.__testDelayPromise = delay(this.config.__test__delay_maintenance) + await this.__testDelayPromise + } - async onMaintenance (job) { - try { if (this.config.__test__throw_maint) { throw new Error(this.config.__test__throw_maint) } - const started = Date.now() - - await this.expire() - await this.archive() - await this.purge() - - const ended = Date.now() - - await this.setMaintenanceTime() + if (this.stopped) { + return + } - this.emit('maintenance', { ms: ended - started }) + const { rows } = await this.db.executeSql(this.trySetMaintenanceTimeCommand, [this.config.maintenanceIntervalSeconds]) - if (!this.stopped) { - await this.manager.complete(job.id) // pre-complete to bypass throttling - await this.maintenanceAsync({ startAfter: this.maintenanceIntervalSeconds }) + if (rows.length === 1 && !this.stopped) { + const result = await this.maintain() + this.emit(events.maintenance, result) } } catch (err) { this.emit(events.error, err) + } finally { + this.maintaining = false } } - async onMonitorStates (job) { - try { - if (this.config.__test__throw_monitor) { - throw new Error(this.config.__test__throw_monitor) - } + async maintain () { + const started = Date.now() - const states = await this.countStates() + !this.stopped && await this.expire() + !this.stopped && await this.archive() + !this.stopped && await this.drop() - this.emit(events.monitorStates, states) + const ended = Date.now() - if (!this.stopped && this.monitorStates) { - await this.manager.complete(job.id) // pre-complete to bypass throttling - await this.monitorStatesAsync({ startAfter: this.monitorIntervalSeconds }) - } - } catch (err) { - this.emit(events.error, err) - } + return { ms: ended - started } } async stop () { - if (this.config.__test__throw_stop) { - throw new Error(this.config.__test__throw_stop) - } - if (!this.stopped) { - if (this.metaMonitorInterval) { - clearInterval(this.metaMonitorInterval) - } - - await this.manager.offWork(queues.MAINTENANCE) - - if (this.monitorStates) { - await this.manager.offWork(queues.MONITOR_STATES) - } + if (this.__testDelayPromise) this.__testDelayPromise.abort() + if (this.maintenanceInterval) clearInterval(this.maintenanceInterval) + if (this.monitorInterval) clearInterval(this.monitorInterval) this.stopped = true } } async countStates () { - const stateCountDefault = { ...plans.states } + const stateCountDefault = { ...plans.JOB_STATES } - Object.keys(stateCountDefault) - .forEach(key => { stateCountDefault[key] = 0 }) + for (const key of Object.keys(stateCountDefault)) { + stateCountDefault[key] = 0 + } - const counts = await this.executeSql(this.countStatesCommand) + const counts = await this.db.executeSql(this.countStatesCommand) const states = counts.rows.reduce((acc, item) => { if (item.name) { @@ -213,43 +161,16 @@ class Boss extends EventEmitter { } async expire () { - await this.executeSql(this.expireCommand) + await this.db.executeSql(this.failJobsByTimeoutCommand) } async archive () { - await this.executeSql(this.archiveCommand) + await this.db.executeSql(this.archiveCommand) } - async purge () { - await this.executeSql(this.purgeCommand) - } - - async setMaintenanceTime () { - await this.executeSql(this.setMaintenanceTimeCommand) - } - - async getMaintenanceTime () { - if (!this.stopped) { - const { rows } = await this.db.executeSql(this.getMaintenanceTimeCommand) - - let { maintained_on: maintainedOn, seconds_ago: secondsAgo } = rows[0] - - secondsAgo = secondsAgo !== null ? parseFloat(secondsAgo) : this.maintenanceIntervalSeconds * 10 - - return { maintainedOn, secondsAgo } - } - } - - getQueueNames () { - return queues - } - - async executeSql (sql, params) { - if (!this.stopped) { - return await this.db.executeSql(sql, params) - } + async drop () { + await this.db.executeSql(this.dropCommand) } } module.exports = Boss -module.exports.QUEUES = queues diff --git a/src/contractor.js b/src/contractor.js index a77bad9b..1ae7766e 100644 --- a/src/contractor.js +++ b/src/contractor.js @@ -21,32 +21,53 @@ class Contractor { this.config = config this.db = db this.migrations = this.config.migrations || migrationStore.getAll(this.config.schema) + + // exported api to index + this.functions = [ + this.schemaVersion, + this.isInstalled + ] } - async version () { + async schemaVersion () { const result = await this.db.executeSql(plans.getVersion(this.config.schema)) return result.rows.length ? parseInt(result.rows[0].version) : null } async isInstalled () { const result = await this.db.executeSql(plans.versionTableExists(this.config.schema)) - return result.rows.length ? result.rows[0].name : null + return !!result.rows[0].name } async start () { const installed = await this.isInstalled() if (installed) { - const version = await this.version() + const version = await this.schemaVersion() if (schemaVersion > version) { - await this.migrate(version) + throw new Error('Migrations are not supported to v10') + // await this.migrate(version) } } else { await this.create() } } + async check () { + const installed = await this.isInstalled() + + if (!installed) { + throw new Error('pg-boss is not installed') + } + + const version = await this.schemaVersion() + + if (schemaVersion !== version) { + throw new Error('pg-boss database requires migrations') + } + } + async create () { try { const commands = plans.create(this.config.schema, schemaVersion) @@ -65,15 +86,15 @@ class Contractor { } } - async next (version) { - const commands = migrationStore.next(this.config.schema, version, this.migrations) - await this.db.executeSql(commands) - } + // async next (version) { + // const commands = migrationStore.next(this.config.schema, version, this.migrations) + // await this.db.executeSql(commands) + // } - async rollback (version) { - const commands = migrationStore.rollback(this.config.schema, version, this.migrations) - await this.db.executeSql(commands) - } + // async rollback (version) { + // const commands = migrationStore.rollback(this.config.schema, version, this.migrations) + // await this.db.executeSql(commands) + // } } module.exports = Contractor diff --git a/src/db.js b/src/db.js index 97a6261f..39a52fee 100644 --- a/src/db.js +++ b/src/db.js @@ -10,6 +10,10 @@ class Db extends EventEmitter { this.config = config } + events = { + error: 'error' + } + async open () { this.pool = new pg.Pool(this.config) this.pool.on('error', error => this.emit('error', error)) @@ -25,16 +29,18 @@ class Db extends EventEmitter { async executeSql (text, values) { if (this.opened) { - return await this.pool.query(text, values) - } - } + // if (this.config.debug === true) { + // console.log(`${new Date().toISOString()}: DEBUG SQL`) + // console.log(text) + + // if (values) { + // console.log(`${new Date().toISOString()}: DEBUG VALUES`) + // console.log(values) + // } + // } - static quotePostgresStr (str) { - const delimeter = '$sanitize$' - if (str.includes(delimeter)) { - throw new Error(`Attempted to quote string that contains reserved Postgres delimeter: ${str}`) + return await this.pool.query(text, values) } - return `${delimeter}${str}${delimeter}` } } diff --git a/src/index.js b/src/index.js index 775bc345..23acfe7a 100644 --- a/src/index.js +++ b/src/index.js @@ -6,13 +6,24 @@ const Manager = require('./manager') const Timekeeper = require('./timekeeper') const Boss = require('./boss') const Db = require('./db') -const delay = require('delay') +const { delay } = require('./tools') const events = { error: 'error', stopped: 'stopped' } class PgBoss extends EventEmitter { + #stoppingOn + #stopped + #starting + #started + #config + #db + #boss + #contractor + #manager + #timekeeper + static getConstructionPlans (schema) { return Contractor.constructionPlans(schema) } @@ -25,158 +36,182 @@ class PgBoss extends EventEmitter { return Contractor.rollbackPlans(schema, version) } - constructor (value) { - const config = Attorney.getConfig(value) + static states = plans.JOB_STATES + static policies = plans.QUEUE_POLICIES + constructor (value) { super() - const db = getDb(config) + this.#stoppingOn = null + this.#stopped = true + + const config = Attorney.getConfig(value) + this.#config = config + + const db = this.getDb() + this.#db = db if (db.isOurs) { - promoteEvent.call(this, db, 'error') + this.#promoteEvents(db) } - const manager = new Manager(db, config) - Object.keys(manager.events).forEach(event => promoteEvent.call(this, manager, manager.events[event])) - manager.functions.forEach(func => promoteFunction.call(this, manager, func)) + const contractor = new Contractor(db, config) + const manager = new Manager(db, config) const bossConfig = { ...config, manager } const boss = new Boss(db, bossConfig) - Object.keys(boss.events).forEach(event => promoteEvent.call(this, boss, boss.events[event])) - boss.functions.forEach(func => promoteFunction.call(this, boss, func)) const timekeeper = new Timekeeper(db, bossConfig) - Object.keys(timekeeper.events).forEach(event => promoteEvent.call(this, timekeeper, timekeeper.events[event])) - timekeeper.functions.forEach(func => promoteFunction.call(this, timekeeper, func)) - manager.timekeeper = timekeeper - this.stoppingOn = null - this.stopped = true - this.config = config - this.db = db - this.boss = boss - this.contractor = new Contractor(db, config) - this.manager = manager - this.timekeeper = timekeeper - - function getDb (config) { - if (config.db) { - return config.db - } + this.#promoteEvents(manager) + this.#promoteEvents(boss) + this.#promoteEvents(timekeeper) - const db = new Db(config) - db.isOurs = true - return db - } + this.#promoteFunctions(boss) + this.#promoteFunctions(contractor) + this.#promoteFunctions(manager) + this.#promoteFunctions(timekeeper) - function promoteFunction (obj, func) { - this[func.name] = (...args) => { - const shouldRun = !this.started || !((func.name === 'work' || func.name === 'onComplete') && (this.stopped || this.stoppingOn)) + this.#boss = boss + this.#contractor = contractor + this.#manager = manager + this.#timekeeper = timekeeper + } - if (shouldRun) { - return func.apply(obj, args) - } else { - const state = this.stoppingOn ? 'stopping' : this.stopped ? 'stopped' : !this.started ? 'not started' : 'started' - return Promise.reject(new Error(`pg-boss is ${state}.`)) - } - } + getDb () { + if (this.#db) { + return this.#db + } + + if (this.#config.db) { + return this.#config.db } - function promoteEvent (emitter, event) { + const db = new Db(this.#config) + db.isOurs = true + return db + } + + #promoteEvents (emitter) { + for (const event of Object.values(emitter?.events)) { emitter.on(event, arg => this.emit(event, arg)) } } + #promoteFunctions (obj) { + for (const func of obj?.functions) { + this[func.name] = (...args) => func.apply(obj, args) + } + } + async start () { - if (!this.stopped) { - return this + if (this.#starting || this.#started) { + return } - if (this.db.isOurs && !this.db.opened) { - await this.db.open() + this.#starting = true + + if (this.#db.isOurs && !this.#db.opened) { + await this.#db.open() } - await this.contractor.start() + if (this.#config.migrate) { + await this.#contractor.start() + } else { + await this.#contractor.check() + } - this.stopped = false - this.started = true + this.#manager.start() - this.manager.start() + if (this.#config.supervise) { + await this.#boss.supervise() + } - if (!this.config.noSupervisor) { - await this.boss.supervise() + if (this.#config.monitorStateIntervalSeconds) { + await this.#boss.monitor() } - if (!this.config.noScheduling) { - await this.timekeeper.start() + if (this.#config.schedule) { + await this.#timekeeper.start() } + this.#starting = false + this.#started = true + this.#stopped = false + return this } async stop (options = {}) { - if (this.stoppingOn) { + if (this.#stoppingOn || this.#stopped) { return } - if (this.stopped) { - this.emit(events.stopped) - } - - let { destroy = false, graceful = true, timeout = 30000 } = options + let { close = true, graceful = true, timeout = 30000, wait = true } = options timeout = Math.max(timeout, 1000) - this.stoppingOn = Date.now() + this.#stoppingOn = Date.now() - await this.manager.stop() - await this.timekeeper.stop() + await this.#manager.stop() + await this.#timekeeper.stop() + await this.#boss.stop() - const shutdown = async () => { - this.stopped = true - this.stoppingOn = null + await new Promise((resolve, reject) => { + const shutdown = async () => { + try { + if (this.#config.__test__throw_shutdown) { + throw new Error(this.#config.__test__throw_shutdown) + } - if (this.db.isOurs && this.db.opened && destroy) { - await this.db.close() - } + await this.#manager.failWip() - this.emit(events.stopped) - } + if (this.#db.isOurs && this.#db.opened && close) { + await this.#db.close() + } - if (!graceful) { - await this.boss.stop() - await shutdown() - return - } + this.#stopped = true + this.#stoppingOn = null + this.#started = false - setImmediate(async () => { - let closing = false + this.emit(events.stopped) + resolve() + } catch (err) { + this.emit(events.error, err) + reject(err) + } + } - try { - while (Date.now() - this.stoppingOn < timeout) { - if (this.manager.getWipData({ includeInternal: closing }).length === 0) { - if (closing) { - break - } + if (!graceful) { + return shutdown() + } - closing = true + if (!wait) { + resolve() + } - await this.boss.stop() + setImmediate(async () => { + try { + if (this.#config.__test__throw_stop_monitor) { + throw new Error(this.#config.__test__throw_stop_monitor) } - await delay(1000) - } + const isWip = () => this.#manager.getWipData({ includeInternal: false }).length > 0 - await this.boss.stop() - await shutdown() - } catch (err) { - this.emit(events.error, err) - } + while ((Date.now() - this.#stoppingOn) < timeout && isWip()) { + await delay(500) + } + + await shutdown() + } catch (err) { + reject(err) + this.emit(events.error, err) + } + }) }) } } module.exports = PgBoss -module.exports.states = plans.states diff --git a/src/manager.js b/src/manager.js index 29727f03..fddf46ea 100644 --- a/src/manager.js +++ b/src/manager.js @@ -1,24 +1,16 @@ const assert = require('assert') const EventEmitter = require('events') -const delay = require('delay') -const uuid = require('uuid') -const debounce = require('lodash.debounce') +const { randomUUID } = require('crypto') const { serializeError: stringify } = require('serialize-error') +const { delay } = require('./tools') const Attorney = require('./attorney') const Worker = require('./worker') -const Db = require('./db') -const pMap = require('p-map') +const plans = require('./plans') -const { QUEUES: BOSS_QUEUES } = require('./boss') const { QUEUES: TIMEKEEPER_QUEUES } = require('./timekeeper') +const { QUEUE_POLICIES } = plans -const INTERNAL_QUEUES = Object.values(BOSS_QUEUES).concat(Object.values(TIMEKEEPER_QUEUES)).reduce((acc, i) => ({ ...acc, [i]: i }), {}) - -const plans = require('./plans') -const { COMPLETION_JOB_PREFIX, SINGLETON_QUEUE_KEY } = plans - -const WIP_EVENT_INTERVAL = 2000 -const WIP_DEBOUNCE_OPTIONS = { leading: true, trailing: true, maxWait: WIP_EVENT_INTERVAL } +const INTERNAL_QUEUES = Object.values(TIMEKEEPER_QUEUES).reduce((acc, i) => ({ ...acc, [i]: i }), {}) const events = { error: 'error', @@ -27,16 +19,14 @@ const events = { const resolveWithinSeconds = async (promise, seconds) => { const timeout = Math.max(1, seconds) * 1000 - const reject = delay.reject(timeout, { value: new Error(`handler execution exceeded ${timeout}ms`) }) + const reject = delay(timeout, `handler execution exceeded ${timeout}ms`) let result try { result = await Promise.race([promise, reject]) } finally { - try { - reject.clear() - } catch {} + reject.abort() } return result @@ -50,6 +40,7 @@ class Manager extends EventEmitter { this.db = db this.events = events + this.wipTs = Date.now() this.workers = new Map() this.nextJobCommand = plans.fetchNextJob(config.schema) @@ -58,26 +49,32 @@ class Manager extends EventEmitter { this.completeJobsCommand = plans.completeJobs(config.schema) this.cancelJobsCommand = plans.cancelJobs(config.schema) this.resumeJobsCommand = plans.resumeJobs(config.schema) - this.failJobsCommand = plans.failJobs(config.schema) + this.deleteJobsCommand = plans.deleteJobs(config.schema) + this.failJobsByIdCommand = plans.failJobsById(config.schema) this.getJobByIdCommand = plans.getJobById(config.schema) this.getArchivedJobByIdCommand = plans.getArchivedJobById(config.schema) this.subscribeCommand = plans.subscribe(config.schema) this.unsubscribeCommand = plans.unsubscribe(config.schema) + this.getQueuesCommand = plans.getQueues(config.schema) + this.getQueueByNameCommand = plans.getQueueByName(config.schema) this.getQueuesForEventCommand = plans.getQueuesForEvent(config.schema) + this.createQueueCommand = plans.createQueue(config.schema) + this.updateQueueCommand = plans.updateQueue(config.schema) + this.purgeQueueCommand = plans.purgeQueue(config.schema) + this.deleteQueueCommand = plans.deleteQueue(config.schema) + this.clearStorageCommand = plans.clearStorage(config.schema) // exported api to index this.functions = [ this.complete, this.cancel, this.resume, + this.deleteJob, this.fail, this.fetch, - this.fetchCompleted, this.work, this.offWork, this.notifyWorker, - this.onComplete, - this.offComplete, this.publish, this.subscribe, this.unsubscribe, @@ -85,41 +82,45 @@ class Manager extends EventEmitter { this.send, this.sendDebounced, this.sendThrottled, - this.sendOnce, this.sendAfter, - this.sendSingleton, + this.createQueue, + this.updateQueue, this.deleteQueue, - this.deleteAllQueues, - this.clearStorage, + this.purgeQueue, this.getQueueSize, + this.getQueue, + this.getQueues, + this.clearStorage, this.getJobById ] - - this.emitWipThrottled = debounce(() => this.emit(events.wip, this.getWipData()), WIP_EVENT_INTERVAL, WIP_DEBOUNCE_OPTIONS) } start () { - this.stopping = false + this.stopped = false } async stop () { - this.stopping = true + this.stopped = true - for (const sub of this.workers.values()) { - if (!INTERNAL_QUEUES[sub.name]) { - await this.offWork(sub.name) + for (const worker of this.workers.values()) { + if (!INTERNAL_QUEUES[worker.name]) { + await this.offWork(worker.name) } } } - async work (name, ...args) { - const { options, callback } = Attorney.checkWorkArgs(name, args, this.config) - return await this.watch(name, options, callback) + async failWip () { + for (const worker of this.workers.values()) { + const jobIds = worker.jobs.map(j => j.id) + if (jobIds.length) { + await this.fail(worker.name, jobIds, 'pg-boss shut down while active') + } + } } - async onComplete (name, ...args) { + async work (name, ...args) { const { options, callback } = Attorney.checkWorkArgs(name, args, this.config) - return await this.watch(COMPLETION_JOB_PREFIX + name, options, callback) + return await this.watch(name, options, callback) } addWorker (worker) { @@ -136,7 +137,12 @@ class Manager extends EventEmitter { emitWip (name) { if (!INTERNAL_QUEUES[name]) { - this.emitWipThrottled() + const now = Date.now() + + if (now - this.wipTs > 2000) { + this.emit(events.wip, this.getWipData()) + this.wipTs = now + } } } @@ -175,78 +181,40 @@ class Manager extends EventEmitter { } async watch (name, options, callback) { - if (this.stopping) { - throw new Error('Workers are disabled. pg-boss is stopping.') + if (this.stopped) { + throw new Error('Workers are disabled. pg-boss is stopped') } const { - newJobCheckInterval: interval = this.config.newJobCheckInterval, + pollingInterval: interval = this.config.pollingInterval, batchSize, - teamSize = 1, - teamConcurrency = 1, - teamRefill: refill = false, includeMetadata = false, - enforceSingletonQueueActiveLimit = false + priority = true } = options - const id = uuid.v4() - - let queueSize = 0 - - let refillTeamPromise - let resolveRefillTeam - - // Setup a promise that onFetch can await for when at least one - // job is finished and so the team is ready to be topped up - const createTeamRefillPromise = () => { - refillTeamPromise = new Promise((resolve) => { resolveRefillTeam = resolve }) - } - - createTeamRefillPromise() - - const onRefill = () => { - queueSize-- - resolveRefillTeam() - createTeamRefillPromise() - } + const id = randomUUID({ disableEntropyCache: true }) - const fetch = () => this.fetch(name, batchSize || (teamSize - queueSize), { includeMetadata, enforceSingletonQueueActiveLimit }) + const fetch = () => this.fetch(name, { batchSize, includeMetadata, priority }) const onFetch = async (jobs) => { + if (!jobs.length) { + return + } + if (this.config.__test__throw_worker) { throw new Error('__test__throw_worker') } this.emitWip(name) - if (batchSize) { - const maxExpiration = jobs.reduce((acc, i) => Math.max(acc, i.expire_in_seconds), 0) - - await resolveWithinSeconds(Promise.all([callback(jobs)]), maxExpiration) - .then(() => this.complete(jobs.map(job => job.id))) - .catch(err => this.fail(jobs.map(job => job.id), err)) - } else { - if (refill) { - queueSize += jobs.length || 1 - } - - const allTeamPromise = pMap(jobs, job => - resolveWithinSeconds(callback(job), job.expire_in_seconds) - .then(result => this.complete(job.id, result)) - .catch(err => this.fail(job.id, err)) - .then(() => refill ? onRefill() : null) - , { concurrency: teamConcurrency } - ).catch(() => {}) // allow promises & non-promises to live together in harmony - - if (refill) { - if (queueSize < teamSize) { - return - } else { - await refillTeamPromise - } - } else { - await allTeamPromise - } + const maxExpiration = jobs.reduce((acc, i) => Math.max(acc, i.expireInSeconds), 0) + const jobIds = jobs.map(job => job.id) + + try { + const result = await resolveWithinSeconds(callback(jobs), maxExpiration) + this.complete(name, jobIds, jobIds.length === 1 ? result : undefined) + } catch (err) { + this.fail(name, jobIds, err) } this.emitWip(name) @@ -320,21 +288,9 @@ class Manager extends EventEmitter { async publish (event, ...args) { assert(event, 'Missing required argument') - const result = await this.db.executeSql(this.getQueuesForEventCommand, [event]) + const { rows } = await this.db.executeSql(this.getQueuesForEventCommand, [event]) - if (!result || result.rowCount === 0) { - return [] - } - - return await Promise.all(result.rows.map(({ name }) => this.send(name, ...args))) - } - - async offComplete (value) { - if (typeof value === 'string') { - value = COMPLETION_JOB_PREFIX + value - } - - return await this.offWork(value) + await Promise.allSettled(rows.map(({ name }) => this.send(name, ...args))) } async send (...args) { @@ -342,26 +298,6 @@ class Manager extends EventEmitter { return await this.createJob(name, data, options) } - async sendOnce (name, data, options, key) { - options = options ? { ...options } : {} - - options.singletonKey = key || name - - const result = Attorney.checkSendArgs([name, data, options], this.config) - - return await this.createJob(result.name, result.data, result.options) - } - - async sendSingleton (name, data, options) { - options = options ? { ...options } : {} - - options.singletonKey = SINGLETON_QUEUE_KEY - - const result = Attorney.checkSendArgs([name, data, options], this.config) - - return await this.createJob(result.name, result.data, result.options) - } - async sendAfter (name, data, options, after) { options = options ? { ...options } : {} options.startAfter = after @@ -395,42 +331,52 @@ class Manager extends EventEmitter { async createJob (name, data, options, singletonOffset = 0) { const { + id = null, db: wrapper, - expireIn, priority, startAfter, - keepUntil, singletonKey = null, singletonSeconds, - retryBackoff, + deadLetter = null, + expireIn, + expireInDefault, + keepUntil, + keepUntilDefault, retryLimit, + retryLimitDefault, retryDelay, - onComplete + retryDelayDefault, + retryBackoff, + retryBackoffDefault } = options - const id = uuid[this.config.uuid]() - const values = [ id, // 1 name, // 2 - priority, // 3 - retryLimit, // 4 + data, // 3 + priority, // 4 startAfter, // 5 - expireIn, // 6 - data, // 7 - singletonKey, // 8 - singletonSeconds, // 9 - singletonOffset, // 10 - retryDelay, // 11 - retryBackoff, // 12 - keepUntil, // 13 - onComplete // 14 + singletonKey, // 6 + singletonSeconds, // 7 + singletonOffset, // 8 + deadLetter, // 9 + expireIn, // 10 + expireInDefault, // 11 + keepUntil, // 12 + keepUntilDefault, // 13 + retryLimit, // 14 + retryLimitDefault, // 15 + retryDelay, // 16 + retryDelayDefault, // 17 + retryBackoff, // 18 + retryBackoffDefault // 19 ] + const db = wrapper || this.db - const result = await db.executeSql(this.insertJobCommand, values) + const { rows } = await db.executeSql(this.insertJobCommand, values) - if (result && result.rowCount === 1) { - return result.rows[0].id + if (rows.length === 1) { + return rows[0].id } if (!options.singletonNextSlot) { @@ -449,11 +395,20 @@ class Manager extends EventEmitter { } async insert (jobs, options = {}) { - const { db: wrapper } = options - const db = wrapper || this.db - const checkedJobs = Attorney.checkInsertArgs(jobs) - const data = JSON.stringify(checkedJobs) - return await db.executeSql(this.insertJobsCommand, [data]) + assert(Array.isArray(jobs), 'jobs argument should be an array') + + const db = options.db || this.db + + const params = [ + JSON.stringify(jobs), // 1 + this.config.expireIn, // 2 + this.config.keepUntil, // 3 + this.config.retryLimit, // 4 + this.config.retryDelay, // 5 + this.config.retryBackoff // 6 + ] + + return await db.executeSql(this.insertJobsCommand, params) } getDebounceStartAfter (singletonSeconds, clockOffset) { @@ -473,39 +428,20 @@ class Manager extends EventEmitter { return startAfter } - async fetch (name, batchSize, options = {}) { - const values = Attorney.checkFetchArgs(name, batchSize, options) + async fetch (name, options = {}) { + Attorney.checkFetchArgs(name, options) const db = options.db || this.db - const preparedStatement = this.nextJobCommand(options.includeMetadata || false, options.enforceSingletonQueueActiveLimit || false) - const statementValues = [values.name, batchSize || 1] + const nextJobSql = this.nextJobCommand({ ...options }) let result - if (options.enforceSingletonQueueActiveLimit && !options.db) { - // Prepare/format now and send multi-statement transaction - const fetchQuery = preparedStatement - .replace('$1', Db.quotePostgresStr(statementValues[0])) - .replace('$2', statementValues[1].toString()) - // eslint-disable-next-line no-unused-vars - const [_begin, _setLocal, fetchResult, _commit] = await db.executeSql([ - 'BEGIN', - 'SET LOCAL jit = OFF', // JIT can slow things down significantly - fetchQuery, - 'COMMIT' - ].join(';\n')) - result = fetchResult - } else { - result = await db.executeSql(preparedStatement, statementValues) - } - if (!result || result.rows.length === 0) { - return null + try { + result = await db.executeSql(nextJobSql, [name, options.batchSize]) + } catch (err) { + // errors from fetchquery should only be unique constraint violations } - return result.rows.length === 1 && !batchSize ? result.rows[0] : result.rows - } - - async fetchCompleted (name, batchSize, options = {}) { - return await this.fetch(COMPLETION_JOB_PREFIX + name, batchSize, options) + return result?.rows || [] } mapCompletionIdArg (id, funcName) { @@ -530,85 +466,177 @@ class Manager extends EventEmitter { return stringify(result) } - mapCompletionResponse (ids, result) { + mapCommandResponse (ids, result) { return { jobs: ids, requested: ids.length, - updated: result && result.rows ? parseInt(result.rows[0].count) : 0 + affected: result && result.rows ? parseInt(result.rows[0].count) : 0 } } - async complete (id, data, options = {}) { + async complete (name, id, data, options = {}) { + Attorney.assertQueueName(name) const db = options.db || this.db const ids = this.mapCompletionIdArg(id, 'complete') - const result = await db.executeSql(this.completeJobsCommand, [ids, this.mapCompletionDataArg(data)]) - return this.mapCompletionResponse(ids, result) + const result = await db.executeSql(this.completeJobsCommand, [name, ids, this.mapCompletionDataArg(data)]) + return this.mapCommandResponse(ids, result) } - async fail (id, data, options = {}) { + async fail (name, id, data, options = {}) { + Attorney.assertQueueName(name) const db = options.db || this.db const ids = this.mapCompletionIdArg(id, 'fail') - const result = await db.executeSql(this.failJobsCommand, [ids, this.mapCompletionDataArg(data)]) - return this.mapCompletionResponse(ids, result) + const result = await db.executeSql(this.failJobsByIdCommand, [name, ids, this.mapCompletionDataArg(data)]) + return this.mapCommandResponse(ids, result) } - async cancel (id, options = {}) { + async cancel (name, id, options = {}) { + Attorney.assertQueueName(name) const db = options.db || this.db const ids = this.mapCompletionIdArg(id, 'cancel') - const result = await db.executeSql(this.cancelJobsCommand, [ids]) - return this.mapCompletionResponse(ids, result) + const result = await db.executeSql(this.cancelJobsCommand, [name, ids]) + return this.mapCommandResponse(ids, result) } - async resume (id, options = {}) { + async deleteJob (name, id, options = {}) { + Attorney.assertQueueName(name) + const db = options.db || this.db + const ids = this.mapCompletionIdArg(id, 'deleteJob') + const result = await db.executeSql(this.deleteJobsCommand, [name, ids]) + return this.mapCommandResponse(ids, result) + } + + async resume (name, id, options = {}) { + Attorney.assertQueueName(name) const db = options.db || this.db const ids = this.mapCompletionIdArg(id, 'resume') - const result = await db.executeSql(this.resumeJobsCommand, [ids]) - return this.mapCompletionResponse(ids, result) + const result = await db.executeSql(this.resumeJobsCommand, [name, ids]) + return this.mapCommandResponse(ids, result) + } + + async createQueue (name, options = {}) { + name = name || options.name + + Attorney.assertQueueName(name) + + const { policy = QUEUE_POLICIES.standard } = options + + assert(policy in QUEUE_POLICIES, `${policy} is not a valid queue policy`) + + const { + retryLimit, + retryDelay, + retryBackoff, + expireInSeconds, + retentionMinutes, + deadLetter + } = Attorney.checkQueueArgs(name, options) + + if (deadLetter) { + Attorney.assertQueueName(deadLetter) + } + + // todo: pull in defaults from constructor config + const data = { + policy, + retryLimit, + retryDelay, + retryBackoff, + expireInSeconds, + retentionMinutes, + deadLetter + } + + await this.db.executeSql(this.createQueueCommand, [name, data]) + } + + async getQueues () { + const { rows } = await this.db.executeSql(this.getQueuesCommand) + return rows + } + + async updateQueue (name, options = {}) { + Attorney.assertQueueName(name) + + const { policy = QUEUE_POLICIES.standard } = options + + assert(policy in QUEUE_POLICIES, `${policy} is not a valid queue policy`) + + const { + retryLimit, + retryDelay, + retryBackoff, + expireInSeconds, + retentionMinutes, + deadLetter + } = Attorney.checkQueueArgs(name, options) + + const params = [ + name, + policy, + retryLimit, + retryDelay, + retryBackoff, + expireInSeconds, + retentionMinutes, + deadLetter + ] + + await this.db.executeSql(this.updateQueueCommand, params) } - async deleteQueue (queue, options) { - assert(queue, 'Missing queue name argument') - const sql = plans.deleteQueue(this.config.schema, options) - const result = await this.db.executeSql(sql, [queue]) - return result ? result.rowCount : null + async getQueue (name) { + Attorney.assertQueueName(name) + + const { rows } = await this.db.executeSql(this.getQueueByNameCommand, [name]) + + return rows[0] || null + } + + async deleteQueue (name) { + Attorney.assertQueueName(name) + + const { rows } = await this.db.executeSql(this.getQueueByNameCommand, [name]) + + if (rows.length === 1) { + await this.db.executeSql(this.deleteQueueCommand, [name]) + } } - async deleteAllQueues (options) { - const sql = plans.deleteAllQueues(this.config.schema, options) - const result = await this.db.executeSql(sql) - return result ? result.rowCount : null + async purgeQueue (name) { + Attorney.assertQueueName(name) + await this.db.executeSql(this.purgeQueueCommand, [name]) } async clearStorage () { - const sql = plans.clearStorage(this.config.schema) - await this.db.executeSql(sql) + await this.db.executeSql(this.clearStorageCommand) } - async getQueueSize (queue, options) { - assert(queue, 'Missing queue name argument') + async getQueueSize (name, options) { + Attorney.assertQueueName(name) const sql = plans.getQueueSize(this.config.schema, options) - const result = await this.db.executeSql(sql, [queue]) + const result = await this.db.executeSql(sql, [name]) return result ? parseFloat(result.rows[0].count) : null } - async getJobById (id, options = {}) { - const db = options.db || this.db - const result1 = await db.executeSql(this.getJobByIdCommand, [id]) + async getJobById (name, id, options = {}) { + Attorney.assertQueueName(name) - if (result1 && result1.rows && result1.rows.length === 1) { - return result1.rows[0] - } + const db = options.db || this.db - const result2 = await db.executeSql(this.getArchivedJobByIdCommand, [id]) + const result1 = await db.executeSql(this.getJobByIdCommand, [name, id]) - if (result2 && result2.rows && result2.rows.length === 1) { - return result2.rows[0] + if (result1?.rows?.length === 1) { + return result1.rows[0] + } else if (options.includeArchive) { + const result2 = await db.executeSql(this.getArchivedJobByIdCommand, [name, id]) + return result2?.rows[0] || null + } else { + return null } - - return null } } diff --git a/src/migrationStore.js b/src/migrationStore.js index 08fa1b44..abe6cb25 100644 --- a/src/migrationStore.js +++ b/src/migrationStore.js @@ -64,110 +64,5 @@ function migrate (value, version, migrations) { function getAll (schema) { return [ - { - release: '7.4.0', - version: 20, - previous: 19, - install: [ - `DROP INDEX ${schema}.job_singletonKey`, - `DROP INDEX ${schema}.job_singleton_queue`, - `CREATE UNIQUE INDEX job_singletonKey ON ${schema}.job (name, singletonKey) WHERE state < 'completed' AND singletonOn IS NULL AND NOT singletonKey LIKE '\\_\\_pgboss\\_\\_singleton\\_queue%'`, - `CREATE UNIQUE INDEX job_singleton_queue ON ${schema}.job (name, singletonKey) WHERE state < 'active' AND singletonOn IS NULL AND singletonKey LIKE '\\_\\_pgboss\\_\\_singleton\\_queue%'` - ], - uninstall: [ - `DROP INDEX ${schema}.job_singletonKey`, - `DROP INDEX ${schema}.job_singleton_queue`, - `CREATE UNIQUE INDEX job_singletonKey ON ${schema}.job (name, singletonKey) WHERE state < 'completed' AND singletonOn IS NULL AND NOT singletonKey = '__pgboss__singleton_queue'`, - `CREATE UNIQUE INDEX job_singleton_queue ON ${schema}.job (name, singletonKey) WHERE state < 'active' AND singletonOn IS NULL AND singletonKey = '__pgboss__singleton_queue'` - ] - }, - { - release: '7.0.0', - version: 19, - previous: 18, - install: [ - `CREATE TABLE ${schema}.subscription ( - event text not null, - name text not null, - created_on timestamp with time zone not null default now(), - updated_on timestamp with time zone not null default now(), - PRIMARY KEY(event, name) - )` - ], - uninstall: [ - `DROP TABLE ${schema}.subscription` - ] - }, - { - release: '6.1.1', - version: 18, - previous: 17, - install: [ - `ALTER TABLE ${schema}.job ALTER COLUMN on_complete SET DEFAULT false` - ] - }, - { - release: '6.0.0', - version: 17, - previous: 16, - install: [ - `DROP INDEX ${schema}.job_singletonKey`, - `CREATE UNIQUE INDEX job_singletonKey ON ${schema}.job (name, singletonKey) WHERE state < 'completed' AND singletonOn IS NULL AND NOT singletonKey = '__pgboss__singleton_queue'`, - `CREATE UNIQUE INDEX job_singleton_queue ON ${schema}.job (name, singletonKey) WHERE state < 'active' AND singletonOn IS NULL AND singletonKey = '__pgboss__singleton_queue'`, - `CREATE INDEX IF NOT EXISTS job_fetch ON ${schema}.job (name text_pattern_ops, startAfter) WHERE state < 'active'`, - `ALTER TABLE ${schema}.job ADD output jsonb`, - `ALTER TABLE ${schema}.archive ADD output jsonb`, - `ALTER TABLE ${schema}.job ALTER COLUMN on_complete SET DEFAULT false`, - `ALTER TABLE ${schema}.job ALTER COLUMN keepuntil SET DEFAULT now() + interval '14 days'` - ], - uninstall: [ - `DROP INDEX ${schema}.job_fetch`, - `DROP INDEX ${schema}.job_singleton_queue`, - `DROP INDEX ${schema}.job_singletonKey`, - `CREATE UNIQUE INDEX job_singletonKey ON ${schema}.job (name, singletonKey) WHERE state < 'completed' AND singletonOn IS NULL`, - `ALTER TABLE ${schema}.job DROP COLUMN output`, - `ALTER TABLE ${schema}.archive DROP COLUMN output`, - `ALTER TABLE ${schema}.job ALTER COLUMN on_complete SET DEFAULT true`, - `ALTER TABLE ${schema}.job ALTER COLUMN keepuntil SET DEFAULT now() + interval '30 days'` - ] - }, - { - release: '5.2.0', - version: 16, - previous: 15, - install: [ - `ALTER TABLE ${schema}.job ADD on_complete boolean`, - `UPDATE ${schema}.job SET on_complete = true`, - `ALTER TABLE ${schema}.job ALTER COLUMN on_complete SET DEFAULT true`, - `ALTER TABLE ${schema}.job ALTER COLUMN on_complete SET NOT NULL`, - `ALTER TABLE ${schema}.archive ADD on_complete boolean` - ], - uninstall: [ - `ALTER TABLE ${schema}.job DROP COLUMN on_complete`, - `ALTER TABLE ${schema}.archive DROP COLUMN on_complete` - ] - }, - { - release: '5.0.6', - version: 15, - previous: 14, - install: [ - `ALTER TABLE ${schema}.version ADD cron_on timestamp with time zone` - ], - uninstall: [ - `ALTER TABLE ${schema}.version DROP COLUMN cron_on` - ] - }, - { - release: '5.0.0', - version: 14, - previous: 13, - install: [ - `ALTER TABLE ${schema}.version ADD maintained_on timestamp with time zone` - ], - uninstall: [ - `ALTER TABLE ${schema}.version DROP COLUMN maintained_on` - ] - } ] } diff --git a/src/plans.js b/src/plans.js index 73dc0014..548bc8dd 100644 --- a/src/plans.js +++ b/src/plans.js @@ -1,22 +1,22 @@ -const assert = require('assert') +const DEFAULT_SCHEMA = 'pgboss' +const MIGRATE_RACE_MESSAGE = 'division by zero' +const CREATE_RACE_MESSAGE = 'already exists' -const states = { +const JOB_STATES = Object.freeze({ created: 'created', retry: 'retry', active: 'active', completed: 'completed', - expired: 'expired', cancelled: 'cancelled', failed: 'failed' -} - -const DEFAULT_SCHEMA = 'pgboss' -const COMPLETION_JOB_PREFIX = `__state__${states.completed}__` -const SINGLETON_QUEUE_KEY = '__pgboss__singleton_queue' -const SINGLETON_QUEUE_KEY_ESCAPED = SINGLETON_QUEUE_KEY.replace(/_/g, '\\_') +}) -const MIGRATE_RACE_MESSAGE = 'division by zero' -const CREATE_RACE_MESSAGE = 'already exists' +const QUEUE_POLICIES = Object.freeze({ + standard: 'standard', + short: 'short', + singleton: 'singleton', + stately: 'stately' +}) module.exports = { create, @@ -28,7 +28,9 @@ module.exports = { completeJobs, cancelJobs, resumeJobs, - failJobs, + deleteJobs, + failJobsById, + failJobsByTimeout, insertJob, insertJobs, getTime, @@ -38,62 +40,53 @@ module.exports = { subscribe, unsubscribe, getQueuesForEvent, - expire, archive, - purge, + drop, countStates, + updateQueue, + createQueue, deleteQueue, - deleteAllQueues, - clearStorage, + getQueues, + getQueueByName, getQueueSize, - getMaintenanceTime, - setMaintenanceTime, - getCronTime, - setCronTime, + purgeQueue, + clearStorage, + trySetMaintenanceTime, + trySetMonitorTime, + trySetCronTime, locked, assertMigration, getArchivedJobById, getJobById, - states: { ...states }, - COMPLETION_JOB_PREFIX, - SINGLETON_QUEUE_KEY, + QUEUE_POLICIES, + JOB_STATES, MIGRATE_RACE_MESSAGE, CREATE_RACE_MESSAGE, DEFAULT_SCHEMA } -function locked (schema, query) { - if (Array.isArray(query)) { - query = query.join(';\n') - } - - return ` - BEGIN; - SET LOCAL statement_timeout = '30s'; - ${advisoryLock(schema)}; - ${query}; - COMMIT; - ` -} +const assert = require('assert') function create (schema, version) { const commands = [ createSchema(schema), - createVersionTable(schema), - createJobStateEnum(schema), - createJobTable(schema), - cloneJobTableForArchive(schema), - createScheduleTable(schema), - createSubscriptionTable(schema), - addIdIndexToArchive(schema), - addArchivedOnToArchive(schema), - addArchivedOnIndexToArchive(schema), - createIndexJobName(schema), - createIndexJobFetch(schema), - createIndexSingletonOn(schema), - createIndexSingletonKeyOn(schema), - createIndexSingletonKey(schema), - createIndexSingletonQueue(schema), + createEnumJobState(schema), + + createTableVersion(schema), + createTableQueue(schema), + createTableSchedule(schema), + createTableSubscription(schema), + + createTableJob(schema), + + createTableArchive(schema), + createPrimaryKeyArchive(schema), + createColumnArchiveArchivedOn(schema), + createIndexArchiveArchivedOn(schema), + + createQueueFunction(schema), + deleteQueueFunction(schema), + insertVersion(schema, version) ] @@ -106,183 +99,331 @@ function createSchema (schema) { ` } -function createVersionTable (schema) { +function createEnumJobState (schema) { + // ENUM definition order is important + // base type is numeric and first values are less than last values + return ` + CREATE TYPE ${schema}.job_state AS ENUM ( + '${JOB_STATES.created}', + '${JOB_STATES.retry}', + '${JOB_STATES.active}', + '${JOB_STATES.completed}', + '${JOB_STATES.cancelled}', + '${JOB_STATES.failed}' + ) + ` +} + +function createTableVersion (schema) { return ` CREATE TABLE ${schema}.version ( version int primary key, maintained_on timestamp with time zone, - cron_on timestamp with time zone + cron_on timestamp with time zone, + monitored_on timestamp with time zone ) ` } -function createJobStateEnum (schema) { - // ENUM definition order is important - // base type is numeric and first values are less than last values +function createTableQueue (schema) { return ` - CREATE TYPE ${schema}.job_state AS ENUM ( - '${states.created}', - '${states.retry}', - '${states.active}', - '${states.completed}', - '${states.expired}', - '${states.cancelled}', - '${states.failed}' + CREATE TABLE ${schema}.queue ( + name text, + policy text, + retry_limit int, + retry_delay int, + retry_backoff bool, + expire_seconds int, + retention_minutes int, + dead_letter text REFERENCES ${schema}.queue (name), + partition_name text, + created_on timestamp with time zone not null default now(), + updated_on timestamp with time zone not null default now(), + PRIMARY KEY (name) + ) + ` +} + +function createTableSchedule (schema) { + return ` + CREATE TABLE ${schema}.schedule ( + name text REFERENCES ${schema}.queue ON DELETE CASCADE, + cron text not null, + timezone text, + data jsonb, + options jsonb, + created_on timestamp with time zone not null default now(), + updated_on timestamp with time zone not null default now(), + PRIMARY KEY (name) + ) + ` +} + +function createTableSubscription (schema) { + return ` + CREATE TABLE ${schema}.subscription ( + event text not null, + name text not null REFERENCES ${schema}.queue ON DELETE CASCADE, + created_on timestamp with time zone not null default now(), + updated_on timestamp with time zone not null default now(), + PRIMARY KEY(event, name) ) ` } -function createJobTable (schema) { +function createTableJob (schema) { return ` CREATE TABLE ${schema}.job ( - id uuid primary key not null default gen_random_uuid(), + id uuid not null default gen_random_uuid(), name text not null, priority integer not null default(0), data jsonb, - state ${schema}.job_state not null default('${states.created}'), - retryLimit integer not null default(0), - retryCount integer not null default(0), - retryDelay integer not null default(0), - retryBackoff boolean not null default false, - startAfter timestamp with time zone not null default now(), - startedOn timestamp with time zone, - singletonKey text, - singletonOn timestamp without time zone, - expireIn interval not null default interval '15 minutes', - createdOn timestamp with time zone not null default now(), - completedOn timestamp with time zone, - keepUntil timestamp with time zone NOT NULL default now() + interval '14 days', - on_complete boolean not null default false, - output jsonb - ) + state ${schema}.job_state not null default('${JOB_STATES.created}'), + retry_limit integer not null default(0), + retry_count integer not null default(0), + retry_delay integer not null default(0), + retry_backoff boolean not null default false, + start_after timestamp with time zone not null default now(), + started_on timestamp with time zone, + singleton_key text, + singleton_on timestamp without time zone, + expire_in interval not null default interval '15 minutes', + created_on timestamp with time zone not null default now(), + completed_on timestamp with time zone, + keep_until timestamp with time zone NOT NULL default now() + interval '14 days', + output jsonb, + dead_letter text, + policy text + ) PARTITION BY LIST (name) ` } -function cloneJobTableForArchive (schema) { - return `CREATE TABLE ${schema}.archive (LIKE ${schema}.job)` +const baseJobColumns = 'id, name, data, EXTRACT(epoch FROM expire_in) as "expireInSeconds"' +const allJobColumns = `${baseJobColumns}, + policy, + state, + priority, + retry_limit as "retryLimit", + retry_count as "retryCount", + retry_delay as "retryDelay", + retry_backoff as "retryBackoff", + start_after as "startAfter", + started_on as "startedOn", + singleton_key as "singletonKey", + singleton_on as "singletonOn", + expire_in as "expireIn", + created_on as "createdOn", + completed_on as "completedOn", + keep_until as "keepUntil", + dead_letter as "deadLetter", + output +` + +function createQueueFunction (schema) { + return ` + CREATE FUNCTION ${schema}.create_queue(queue_name text, options json) + RETURNS VOID AS + $$ + DECLARE + table_name varchar := 'j' || encode(sha224(queue_name::bytea), 'hex'); + BEGIN + + INSERT INTO ${schema}.queue ( + name, + policy, + retry_limit, + retry_delay, + retry_backoff, + expire_seconds, + retention_minutes, + dead_letter, + partition_name + ) + VALUES ( + queue_name, + options->>'policy', + (options->>'retryLimit')::int, + (options->>'retryDelay')::int, + (options->>'retryBackoff')::bool, + (options->>'expireInSeconds')::int, + (options->>'retentionMinutes')::int, + options->>'deadLetter', + table_name + ); + + EXECUTE format('CREATE TABLE ${schema}.%I (LIKE ${schema}.job INCLUDING DEFAULTS)', table_name); + + EXECUTE format('${formatPartitionCommand(createPrimaryKeyJob(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createQueueForeignKeyJob(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createQueueForeignKeyJobDeadLetter(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createIndexJobPolicyShort(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createIndexJobPolicySingleton(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createIndexJobPolicyStately(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createIndexJobThrottle(schema))}', table_name); + EXECUTE format('${formatPartitionCommand(createIndexJobFetch(schema))}', table_name); + + EXECUTE format('ALTER TABLE ${schema}.%I ADD CONSTRAINT cjc CHECK (name=%L)', table_name, queue_name); + EXECUTE format('ALTER TABLE ${schema}.job ATTACH PARTITION ${schema}.%I FOR VALUES IN (%L)', table_name, queue_name); + END; + $$ + LANGUAGE plpgsql; + ` } -function addArchivedOnToArchive (schema) { - return `ALTER TABLE ${schema}.archive ADD archivedOn timestamptz NOT NULL DEFAULT now()` +function formatPartitionCommand (command) { + return command.replace('.job', '.%1$I').replace('job_i', '%1$s_i').replaceAll('\'', '\'\'') } -function addArchivedOnIndexToArchive (schema) { - return `CREATE INDEX archive_archivedon_idx ON ${schema}.archive(archivedon)` +function deleteQueueFunction (schema) { + return ` + CREATE FUNCTION ${schema}.delete_queue(queue_name text) + RETURNS VOID AS + $$ + DECLARE + table_name varchar; + BEGIN + WITH deleted as ( + DELETE FROM ${schema}.queue + WHERE name = queue_name + RETURNING partition_name + ) + SELECT partition_name from deleted INTO table_name; + + EXECUTE format('DROP TABLE IF EXISTS ${schema}.%I', table_name); + END; + $$ + LANGUAGE plpgsql; + ` } -function addIdIndexToArchive (schema) { - return `CREATE INDEX archive_id_idx ON ${schema}.archive(id)` +function createQueue (schema) { + return `SELECT ${schema}.create_queue($1, $2)` } -function setMaintenanceTime (schema) { - return `UPDATE ${schema}.version SET maintained_on = now()` +function deleteQueue (schema) { + return `SELECT ${schema}.delete_queue($1)` } -function getMaintenanceTime (schema) { - return `SELECT maintained_on, EXTRACT( EPOCH FROM (now() - maintained_on) ) seconds_ago FROM ${schema}.version` +function createPrimaryKeyJob (schema) { + return `ALTER TABLE ${schema}.job ADD PRIMARY KEY (name, id)` } -function setCronTime (schema, time) { - time = time || 'now()' - return `UPDATE ${schema}.version SET cron_on = ${time}` +function createQueueForeignKeyJob (schema) { + return `ALTER TABLE ${schema}.job ADD CONSTRAINT q_fkey FOREIGN KEY (name) REFERENCES ${schema}.queue (name) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED` } -function getCronTime (schema) { - return `SELECT cron_on, EXTRACT( EPOCH FROM (now() - cron_on) ) seconds_ago FROM ${schema}.version` +function createQueueForeignKeyJobDeadLetter (schema) { + return `ALTER TABLE ${schema}.job ADD CONSTRAINT dlq_fkey FOREIGN KEY (dead_letter) REFERENCES ${schema}.queue (name) ON DELETE RESTRICT DEFERRABLE INITIALLY DEFERRED` } -function deleteQueue (schema, options = {}) { - options.before = options.before || states.active - assert(options.before in states, `${options.before} is not a valid state`) - return `DELETE FROM ${schema}.job WHERE name = $1 and state < '${options.before}'` +function createPrimaryKeyArchive (schema) { + return `ALTER TABLE ${schema}.archive ADD PRIMARY KEY (name, id)` } -function deleteAllQueues (schema, options = {}) { - options.before = options.before || states.active - assert(options.before in states, `${options.before} is not a valid state`) - return `DELETE FROM ${schema}.job WHERE state < '${options.before}'` +function createIndexJobPolicyShort (schema) { + return `CREATE UNIQUE INDEX job_i1 ON ${schema}.job (name, COALESCE(singleton_key, '')) WHERE state = '${JOB_STATES.created}' AND policy = '${QUEUE_POLICIES.short}';` } -function clearStorage (schema) { - return `TRUNCATE ${schema}.job, ${schema}.archive` +function createIndexJobPolicySingleton (schema) { + return `CREATE UNIQUE INDEX job_i2 ON ${schema}.job (name, COALESCE(singleton_key, '')) WHERE state = '${JOB_STATES.active}' AND policy = '${QUEUE_POLICIES.singleton}'` } -function getQueueSize (schema, options = {}) { - options.before = options.before || states.active - assert(options.before in states, `${options.before} is not a valid state`) - return `SELECT count(*) as count FROM ${schema}.job WHERE name = $1 AND state < '${options.before}'` +function createIndexJobPolicyStately (schema) { + return `CREATE UNIQUE INDEX job_i3 ON ${schema}.job (name, state, COALESCE(singleton_key, '')) WHERE state <= '${JOB_STATES.active}' AND policy = '${QUEUE_POLICIES.stately}'` } -function createIndexSingletonKey (schema) { - // anything with singletonKey means "only 1 job can be queued or active at a time" - return ` - CREATE UNIQUE INDEX job_singletonKey ON ${schema}.job (name, singletonKey) WHERE state < '${states.completed}' AND singletonOn IS NULL AND NOT singletonKey LIKE '${SINGLETON_QUEUE_KEY_ESCAPED}%' - ` +function createIndexJobThrottle (schema) { + return `CREATE UNIQUE INDEX job_i4 ON ${schema}.job (name, singleton_on, COALESCE(singleton_key, '')) WHERE state <> '${JOB_STATES.cancelled}' AND singleton_on IS NOT NULL` } -function createIndexSingletonQueue (schema) { - // "singleton queue" means "only 1 job can be queued at a time" - return ` - CREATE UNIQUE INDEX job_singleton_queue ON ${schema}.job (name, singletonKey) WHERE state < '${states.active}' AND singletonOn IS NULL AND singletonKey LIKE '${SINGLETON_QUEUE_KEY_ESCAPED}%' - ` +function createIndexJobFetch (schema) { + return `CREATE INDEX job_i5 ON ${schema}.job (name, start_after) INCLUDE (priority, created_on, id) WHERE state < '${JOB_STATES.active}'` } -function createIndexSingletonOn (schema) { - // anything with singletonOn means "only 1 job within this time period, queued, active or completed" - return ` - CREATE UNIQUE INDEX job_singletonOn ON ${schema}.job (name, singletonOn) WHERE state < '${states.expired}' AND singletonKey IS NULL - ` +function createTableArchive (schema) { + return `CREATE TABLE ${schema}.archive (LIKE ${schema}.job)` } -function createIndexSingletonKeyOn (schema) { - // anything with both singletonOn and singletonKey means "only 1 job within this time period with this key, queued, active or completed" - return ` - CREATE UNIQUE INDEX job_singletonKeyOn ON ${schema}.job (name, singletonOn, singletonKey) WHERE state < '${states.expired}' - ` +function createColumnArchiveArchivedOn (schema) { + return `ALTER TABLE ${schema}.archive ADD archived_on timestamptz NOT NULL DEFAULT now()` } -function createIndexJobName (schema) { - return ` - CREATE INDEX job_name ON ${schema}.job (name text_pattern_ops) - ` +function createIndexArchiveArchivedOn (schema) { + return `CREATE INDEX archive_i1 ON ${schema}.archive(archived_on)` } -function createIndexJobFetch (schema) { +function trySetMaintenanceTime (schema) { + return trySetTimestamp(schema, 'maintained_on') +} + +function trySetMonitorTime (schema) { + return trySetTimestamp(schema, 'monitored_on') +} + +function trySetCronTime (schema) { + return trySetTimestamp(schema, 'cron_on') +} + +function trySetTimestamp (schema, column) { return ` - CREATE INDEX job_fetch ON ${schema}.job (name text_pattern_ops, startAfter) WHERE state < '${states.active}' + UPDATE ${schema}.version SET ${column} = now() + WHERE EXTRACT( EPOCH FROM (now() - COALESCE(${column}, now() - interval '1 week') ) ) > $1 + RETURNING true ` } -function createScheduleTable (schema) { +function updateQueue (schema) { return ` - CREATE TABLE ${schema}.schedule ( - name text primary key, - cron text not null, - timezone text, - data jsonb, - options jsonb, - created_on timestamp with time zone not null default now(), - updated_on timestamp with time zone not null default now() - ) + UPDATE ${schema}.queue SET + policy = COALESCE($2, policy), + retry_limit = COALESCE($3, retry_limit), + retry_delay = COALESCE($4, retry_delay), + retry_backoff = COALESCE($5, retry_backoff), + expire_seconds = COALESCE($6, expire_seconds), + retention_minutes = COALESCE($7, retention_minutes), + dead_letter = COALESCE($8, dead_letter), + updated_on = now() + WHERE name = $1 ` } -function createSubscriptionTable (schema) { +function getQueues (schema) { return ` - CREATE TABLE ${schema}.subscription ( - event text not null, - name text not null, - created_on timestamp with time zone not null default now(), - updated_on timestamp with time zone not null default now(), - PRIMARY KEY(event, name) - ) - ` + SELECT + name, + policy, + retry_limit as "retryLimit", + retry_delay as "retryDelay", + retry_backoff as "retryBackoff", + expire_seconds as "expireInSeconds", + retention_minutes as "retentionMinutes", + dead_letter as "deadLetter", + created_on as "createdOn", + updated_on as "updatedOn" + FROM ${schema}.queue + ` +} + +function getQueueByName (schema) { + return `${getQueues(schema)} WHERE name = $1` +} + +function purgeQueue (schema) { + return `DELETE from ${schema}.job WHERE name = $1 and state < '${JOB_STATES.active}'` +} + +function clearStorage (schema) { + return `TRUNCATE ${schema}.job, ${schema}.archive` +} + +function getQueueSize (schema, options = {}) { + options.before = options.before || JOB_STATES.active + assert(options.before in JOB_STATES, `${options.before} is not a valid state`) + return `SELECT count(*) as count FROM ${schema}.job WHERE name = $1 AND state < '${options.before}'` } function getSchedules (schema) { - return ` - SELECT * FROM ${schema}.schedule - ` + return `SELECT * FROM ${schema}.schedule` } function schedule (schema) { @@ -351,180 +492,132 @@ function insertVersion (schema, version) { } function fetchNextJob (schema) { - return (includeMetadata, enforceSingletonQueueActiveLimit) => ` - WITH nextJob as ( + return ({ includeMetadata, priority = true } = {}) => ` + WITH next as ( SELECT id - FROM ${schema}.job j - WHERE state < '${states.active}' - AND name LIKE $1 - AND startAfter < now() - ${enforceSingletonQueueActiveLimit - ? `AND ( - CASE - WHEN singletonKey IS NOT NULL - AND singletonKey LIKE '${SINGLETON_QUEUE_KEY_ESCAPED}%' - THEN NOT EXISTS ( - SELECT 1 - FROM ${schema}.job active_job - WHERE active_job.state = '${states.active}' - AND active_job.name = j.name - AND active_job.singletonKey = j.singletonKey - LIMIT 1 - ) - ELSE - true - END - )` - : ''} - ORDER BY priority desc, createdOn, id + FROM ${schema}.job + WHERE name = $1 + AND state < '${JOB_STATES.active}' + AND start_after < now() + ORDER BY ${priority && 'priority desc, '} created_on, id LIMIT $2 FOR UPDATE SKIP LOCKED ) UPDATE ${schema}.job j SET - state = '${states.active}', - startedOn = now(), - retryCount = CASE WHEN state = '${states.retry}' THEN retryCount + 1 ELSE retryCount END - FROM nextJob - WHERE j.id = nextJob.id - RETURNING ${includeMetadata ? 'j.*' : 'j.id, name, data'}, EXTRACT(epoch FROM expireIn) as expire_in_seconds + state = '${JOB_STATES.active}', + started_on = now(), + retry_count = CASE WHEN started_on IS NOT NULL THEN retry_count + 1 ELSE retry_count END + FROM next + WHERE name = $1 AND j.id = next.id + RETURNING j.${includeMetadata ? allJobColumns : baseJobColumns} ` } -function buildJsonCompletionObject (withResponse) { - // job completion contract - return `jsonb_build_object( - 'request', jsonb_build_object('id', id, 'name', name, 'data', data), - 'response', ${withResponse ? '$2::jsonb' : 'null'}, - 'state', state, - 'retryCount', retryCount, - 'createdOn', createdOn, - 'startedOn', startedOn, - 'completedOn', completedOn, - 'failed', CASE WHEN state = '${states.completed}' THEN false ELSE true END - )` -} - -const retryCompletedOnCase = `CASE - WHEN retryCount < retryLimit - THEN NULL - ELSE now() - END` - -const retryStartAfterCase = `CASE - WHEN retryCount = retryLimit THEN startAfter - WHEN NOT retryBackoff THEN now() + retryDelay * interval '1' - ELSE now() + - ( - retryDelay * 2 ^ LEAST(16, retryCount + 1) / 2 - + - retryDelay * 2 ^ LEAST(16, retryCount + 1) / 2 * random() - ) - * interval '1' - END` - -const keepUntilInheritance = 'keepUntil + (keepUntil - startAfter)' - function completeJobs (schema) { return ` WITH results AS ( UPDATE ${schema}.job - SET completedOn = now(), - state = '${states.completed}', - output = $2::jsonb - WHERE id IN (SELECT UNNEST($1::uuid[])) - AND state = '${states.active}' + SET completed_on = now(), + state = '${JOB_STATES.completed}', + output = $3::jsonb + WHERE name = $1 + AND id IN (SELECT UNNEST($2::uuid[])) + AND state = '${JOB_STATES.active}' RETURNING * - ), completion_jobs as ( - INSERT INTO ${schema}.job (name, data, keepUntil) - SELECT - '${COMPLETION_JOB_PREFIX}' || name, - ${buildJsonCompletionObject(true)}, - ${keepUntilInheritance} - FROM results - WHERE NOT name LIKE '${COMPLETION_JOB_PREFIX}%' - AND on_complete ) SELECT COUNT(*) FROM results ` } -function failJobs (schema) { +function failJobsById (schema) { + const where = `name = $1 AND id IN (SELECT UNNEST($2::uuid[])) AND state < '${JOB_STATES.completed}'` + const output = '$3::jsonb' + + return failJobs(schema, where, output) +} + +function failJobsByTimeout (schema) { + const where = `state = '${JOB_STATES.active}' AND (started_on + expire_in) < now()` + const output = '\'{ "value": { "message": "job failed by timeout in active state" } }\'::jsonb' + return failJobs(schema, where, output) +} + +function failJobs (schema, where, output) { return ` WITH results AS ( - UPDATE ${schema}.job - SET state = CASE - WHEN retryCount < retryLimit - THEN '${states.retry}'::${schema}.job_state - ELSE '${states.failed}'::${schema}.job_state + UPDATE ${schema}.job SET + state = CASE + WHEN retry_count < retry_limit THEN '${JOB_STATES.retry}'::${schema}.job_state + ELSE '${JOB_STATES.failed}'::${schema}.job_state END, - completedOn = ${retryCompletedOnCase}, - startAfter = ${retryStartAfterCase}, - output = $2::jsonb - WHERE id IN (SELECT UNNEST($1::uuid[])) - AND state < '${states.completed}' + completed_on = CASE + WHEN retry_count < retry_limit THEN NULL + ELSE now() + END, + start_after = CASE + WHEN retry_count = retry_limit THEN start_after + WHEN NOT retry_backoff THEN now() + retry_delay * interval '1' + ELSE now() + ( + retry_delay * 2 ^ LEAST(16, retry_count + 1) / 2 + + retry_delay * 2 ^ LEAST(16, retry_count + 1) / 2 * random() + ) * interval '1' + END, + output = ${output} + WHERE ${where} RETURNING * - ), completion_jobs as ( - INSERT INTO ${schema}.job (name, data, keepUntil) + ), dlq_jobs as ( + INSERT INTO ${schema}.job (name, data, output, retry_limit, keep_until) SELECT - '${COMPLETION_JOB_PREFIX}' || name, - ${buildJsonCompletionObject(true)}, - ${keepUntilInheritance} + dead_letter, + data, + output, + retry_limit, + keep_until + (keep_until - start_after) FROM results - WHERE state = '${states.failed}' - AND NOT name LIKE '${COMPLETION_JOB_PREFIX}%' - AND on_complete + WHERE state = '${JOB_STATES.failed}' + AND dead_letter IS NOT NULL + AND NOT name = dead_letter ) SELECT COUNT(*) FROM results ` } -function expire (schema) { +function cancelJobs (schema) { return ` - WITH results AS ( + with results as ( UPDATE ${schema}.job - SET state = CASE - WHEN retryCount < retryLimit THEN '${states.retry}'::${schema}.job_state - ELSE '${states.expired}'::${schema}.job_state - END, - completedOn = ${retryCompletedOnCase}, - startAfter = ${retryStartAfterCase} - WHERE state = '${states.active}' - AND (startedOn + expireIn) < now() - RETURNING * + SET completed_on = now(), + state = '${JOB_STATES.cancelled}' + WHERE name = $1 + AND id IN (SELECT UNNEST($2::uuid[])) + AND state < '${JOB_STATES.completed}' + RETURNING 1 ) - INSERT INTO ${schema}.job (name, data, keepUntil) - SELECT - '${COMPLETION_JOB_PREFIX}' || name, - ${buildJsonCompletionObject()}, - ${keepUntilInheritance} - FROM results - WHERE state = '${states.expired}' - AND NOT name LIKE '${COMPLETION_JOB_PREFIX}%' - AND on_complete + SELECT COUNT(*) from results ` } -function cancelJobs (schema) { +function resumeJobs (schema) { return ` with results as ( UPDATE ${schema}.job - SET completedOn = now(), - state = '${states.cancelled}' - WHERE id IN (SELECT UNNEST($1::uuid[])) - AND state < '${states.completed}' + SET completed_on = NULL, + state = '${JOB_STATES.created}' + WHERE name = $1 + AND id IN (SELECT UNNEST($2::uuid[])) + AND state = '${JOB_STATES.cancelled}' RETURNING 1 ) SELECT COUNT(*) from results ` } -function resumeJobs (schema) { +function deleteJobs (schema) { return ` with results as ( - UPDATE ${schema}.job - SET completedOn = NULL, - state = '${states.created}' - WHERE id IN (SELECT UNNEST($1::uuid[])) + DELETE FROM ${schema}.job + WHERE name = $1 + AND id IN (SELECT UNNEST($2::uuid[])) RETURNING 1 ) SELECT COUNT(*) from results @@ -536,68 +629,73 @@ function insertJob (schema) { INSERT INTO ${schema}.job ( id, name, - priority, - state, - retryLimit, - startAfter, - expireIn, data, - singletonKey, - singletonOn, - retryDelay, - retryBackoff, - keepUntil, - on_complete + priority, + start_after, + singleton_key, + singleton_on, + dead_letter, + expire_in, + keep_until, + retry_limit, + retry_delay, + retry_backoff, + policy ) SELECT id, - name, - priority, - state, - retryLimit, - startAfter, - expireIn, + j.name, data, - singletonKey, - singletonOn, - retryDelay, - retryBackoff, - keepUntil, - on_complete + priority, + start_after, + singleton_key, + singleton_on, + COALESCE(j.dead_letter, q.dead_letter) as dead_letter, + CASE + WHEN expire_in IS NOT NULL THEN CAST(expire_in as interval) + WHEN q.expire_seconds IS NOT NULL THEN q.expire_seconds * interval '1s' + WHEN expire_in_default IS NOT NULL THEN CAST(expire_in_default as interval) + ELSE interval '15 minutes' + END as expire_in, + CASE + WHEN right(keep_until, 1) = 'Z' THEN CAST(keep_until as timestamp with time zone) + ELSE start_after + CAST(COALESCE(keep_until, (q.retention_minutes * 60)::text, keep_until_default, '14 days') as interval) + END as keep_until, + COALESCE(j.retry_limit, q.retry_limit, retry_limit_default, 2) as retry_limit, + CASE + WHEN COALESCE(j.retry_backoff, q.retry_backoff, retry_backoff_default, false) + THEN GREATEST(COALESCE(j.retry_delay, q.retry_delay, retry_delay_default), 1) + ELSE COALESCE(j.retry_delay, q.retry_delay, retry_delay_default, 0) + END as retry_delay, + COALESCE(j.retry_backoff, q.retry_backoff, retry_backoff_default, false) as retry_backoff, + q.policy FROM - ( SELECT *, - CASE - WHEN right(keepUntilValue, 1) = 'Z' THEN CAST(keepUntilValue as timestamp with time zone) - ELSE startAfter + CAST(COALESCE(keepUntilValue,'0') as interval) - END as keepUntil - FROM - ( SELECT *, + ( SELECT + COALESCE($1::uuid, gen_random_uuid()) as id, + $2 as name, + $3::jsonb as data, + COALESCE($4::int, 0) as priority, + CASE + WHEN right($5, 1) = 'Z' THEN CAST($5 as timestamp with time zone) + ELSE now() + CAST(COALESCE($5,'0') as interval) + END as start_after, + $6 as singleton_key, CASE - WHEN right(startAfterValue, 1) = 'Z' THEN CAST(startAfterValue as timestamp with time zone) - ELSE now() + CAST(COALESCE(startAfterValue,'0') as interval) - END as startAfter - FROM - ( SELECT - $1::uuid as id, - $2::text as name, - $3::int as priority, - '${states.created}'::${schema}.job_state as state, - $4::int as retryLimit, - $5::text as startAfterValue, - CAST($6 as interval) as expireIn, - $7::jsonb as data, - $8::text as singletonKey, - CASE - WHEN $9::integer IS NOT NULL THEN 'epoch'::timestamp + '1 second'::interval * ($9 * floor((date_part('epoch', now()) + $10) / $9)) - ELSE NULL - END as singletonOn, - $11::int as retryDelay, - $12::bool as retryBackoff, - $13::text as keepUntilValue, - $14::boolean as on_complete - ) j1 - ) j2 - ) j3 + WHEN $7::integer IS NOT NULL THEN 'epoch'::timestamp + '1 second'::interval * ($7 * floor((date_part('epoch', now()) + $8) / $7)) + ELSE NULL + END as singleton_on, + $9 as dead_letter, + $10 as expire_in, + $11 as expire_in_default, + $12 as keep_until, + $13 as keep_until_default, + $14::int as retry_limit, + $15::int as retry_limit_default, + $16::int as retry_delay, + $17::int as retry_delay_default, + $18::bool as retry_backoff, + $19::bool as retry_backoff_default + ) j JOIN ${schema}.queue q ON j.name = q.name ON CONFLICT DO NOTHING RETURNING id ` @@ -605,79 +703,105 @@ function insertJob (schema) { function insertJobs (schema) { return ` + WITH defaults as ( + SELECT + $2 as expire_in, + $3 as keep_until, + $4::int as retry_limit, + $5::int as retry_delay, + $6::bool as retry_backoff + ) INSERT INTO ${schema}.job ( id, name, data, priority, - startAfter, - expireIn, - retryLimit, - retryDelay, - retryBackoff, - singletonKey, - keepUntil, - on_complete + start_after, + singleton_key, + dead_letter, + expire_in, + keep_until, + retry_limit, + retry_delay, + retry_backoff, + policy ) SELECT COALESCE(id, gen_random_uuid()) as id, - name, + j.name, data, COALESCE(priority, 0) as priority, - COALESCE("startAfter", now()) as startAfter, - COALESCE("expireInSeconds", 15 * 60) * interval '1s' as expireIn, - COALESCE("retryLimit", 0) as retryLimit, - COALESCE("retryDelay", 0) as retryDelay, - COALESCE("retryBackoff", false) as retryBackoff, - "singletonKey", - COALESCE("keepUntil", now() + interval '14 days') as keepUntil, - COALESCE("onComplete", false) as onComplete - FROM json_to_recordset($1) as x( - id uuid, - name text, - priority integer, - data jsonb, - "retryLimit" integer, - "retryDelay" integer, - "retryBackoff" boolean, - "startAfter" timestamp with time zone, - "singletonKey" text, - "expireInSeconds" integer, - "keepUntil" timestamp with time zone, - "onComplete" boolean - ) + j.start_after, + "singletonKey" as singleton_key, + COALESCE("deadLetter", q.dead_letter) as dead_letter, + CASE + WHEN "expireInSeconds" IS NOT NULL THEN "expireInSeconds" * interval '1s' + WHEN q.expire_seconds IS NOT NULL THEN q.expire_seconds * interval '1s' + WHEN defaults.expire_in IS NOT NULL THEN CAST(defaults.expire_in as interval) + ELSE interval '15 minutes' + END as expire_in, + CASE + WHEN "keepUntil" IS NOT NULL THEN "keepUntil" + ELSE COALESCE(j.start_after, now()) + CAST(COALESCE((q.retention_minutes * 60)::text, defaults.keep_until, '14 days') as interval) + END as keep_until, + COALESCE("retryLimit", q.retry_limit, defaults.retry_limit, 2), + CASE + WHEN COALESCE("retryBackoff", q.retry_backoff, defaults.retry_backoff, false) + THEN GREATEST(COALESCE("retryDelay", q.retry_delay, defaults.retry_delay), 1) + ELSE COALESCE("retryDelay", q.retry_delay, defaults.retry_delay, 0) + END as retry_delay, + COALESCE("retryBackoff", q.retry_backoff, defaults.retry_backoff, false) as retry_backoff, + q.policy + FROM ( + SELECT *, + CASE + WHEN right("startAfter", 1) = 'Z' THEN CAST("startAfter" as timestamp with time zone) + ELSE now() + CAST(COALESCE("startAfter",'0') as interval) + END as start_after + FROM json_to_recordset($1) as x ( + id uuid, + name text, + priority integer, + data jsonb, + "startAfter" text, + "retryLimit" integer, + "retryDelay" integer, + "retryBackoff" boolean, + "singletonKey" text, + "singletonOn" text, + "expireInSeconds" integer, + "keepUntil" timestamp with time zone, + "deadLetter" text + ) + ) j + JOIN ${schema}.queue q ON j.name = q.name, + defaults ON CONFLICT DO NOTHING ` } -function purge (schema, interval) { +function drop (schema, interval) { return ` DELETE FROM ${schema}.archive - WHERE archivedOn < (now() - interval '${interval}') + WHERE archived_on < (now() - interval '${interval}') ` } function archive (schema, completedInterval, failedInterval = completedInterval) { + const columns = 'id, name, priority, data, state, retry_limit, retry_count, retry_delay, retry_backoff, start_after, started_on, singleton_key, singleton_on, expire_in, created_on, completed_on, keep_until, dead_letter, policy, output' + return ` WITH archived_rows AS ( DELETE FROM ${schema}.job - WHERE ( - state <> '${states.failed}' AND completedOn < (now() - interval '${completedInterval}') - ) - OR ( - state = '${states.failed}' AND completedOn < (now() - interval '${failedInterval}') - ) - OR ( - state < '${states.active}' AND keepUntil < now() - ) + WHERE (state <> '${JOB_STATES.failed}' AND completed_on < (now() - interval '${completedInterval}')) + OR (state = '${JOB_STATES.failed}' AND completed_on < (now() - interval '${failedInterval}')) + OR (state < '${JOB_STATES.active}' AND keep_until < now()) RETURNING * ) - INSERT INTO ${schema}.archive ( - id, name, priority, data, state, retryLimit, retryCount, retryDelay, retryBackoff, startAfter, startedOn, singletonKey, singletonOn, expireIn, createdOn, completedOn, keepUntil, on_complete, output - ) - SELECT - id, name, priority, data, state, retryLimit, retryCount, retryDelay, retryBackoff, startAfter, startedOn, singletonKey, singletonOn, expireIn, createdOn, completedOn, keepUntil, on_complete, output + INSERT INTO ${schema}.archive (${columns}) + SELECT ${columns} FROM archived_rows + ON CONFLICT DO NOTHING ` } @@ -689,9 +813,24 @@ function countStates (schema) { ` } -function advisoryLock (schema) { +function locked (schema, query) { + if (Array.isArray(query)) { + query = query.join(';\n') + } + + return ` + BEGIN; + SET LOCAL lock_timeout = '30s'; + SET LOCAL idle_in_transaction_session_timeout = '30s'; + ${advisoryLock(schema)}; + ${query}; + COMMIT; + ` +} + +function advisoryLock (schema, key) { return `SELECT pg_advisory_xact_lock( - ('x' || md5(current_database() || '.pgboss.${schema}'))::bit(64)::bigint + ('x' || encode(sha224((current_database() || '.pgboss.${schema}${key || ''}')::bytea), 'hex'))::bit(64)::bigint )` } @@ -701,13 +840,13 @@ function assertMigration (schema, version) { } function getJobById (schema) { - return getJobByTableAndId(schema, 'job') + return getJobByTableQueueId(schema, 'job') } function getArchivedJobById (schema) { - return getJobByTableAndId(schema, 'archive') + return getJobByTableQueueId(schema, 'archive') } -function getJobByTableAndId (schema, table) { - return `SELECT * From ${schema}.${table} WHERE id = $1` +function getJobByTableQueueId (schema, table) { + return `SELECT ${allJobColumns} FROM ${schema}.${table} WHERE name = $1 AND id = $2` } diff --git a/src/timekeeper.js b/src/timekeeper.js index ef989035..2c69497b 100644 --- a/src/timekeeper.js +++ b/src/timekeeper.js @@ -2,14 +2,12 @@ const EventEmitter = require('events') const plans = require('./plans') const cronParser = require('cron-parser') const Attorney = require('./attorney') -const pMap = require('p-map') -const queues = { - CRON: '__pgboss__cron', +const QUEUES = { SEND_IT: '__pgboss__send-it' } -const events = { +const EVENTS = { error: 'error', schedule: 'schedule' } @@ -25,14 +23,14 @@ class Timekeeper extends EventEmitter { this.cronMonitorIntervalMs = config.cronMonitorIntervalSeconds * 1000 this.clockSkew = 0 - this.events = events + this.events = EVENTS this.getTimeCommand = plans.getTime(config.schema) + this.getQueueCommand = plans.getQueueByName(config.schema) this.getSchedulesCommand = plans.getSchedules(config.schema) this.scheduleCommand = plans.schedule(config.schema) this.unscheduleCommand = plans.unschedule(config.schema) - this.getCronTimeCommand = plans.getCronTime(config.schema) - this.setCronTimeCommand = plans.setCronTime(config.schema) + this.trySetCronTimeCommand = plans.trySetCronTime(config.schema) this.functions = [ this.schedule, @@ -49,21 +47,25 @@ class Timekeeper extends EventEmitter { return } - // cache the clock skew from the db server + this.stopped = false + await this.cacheClockSkew() - await this.manager.work(queues.CRON, { newJobCheckIntervalSeconds: this.config.cronWorkerIntervalSeconds }, (job) => this.onCron(job)) - await this.manager.work(queues.SEND_IT, { newJobCheckIntervalSeconds: this.config.cronWorkerIntervalSeconds, teamSize: 50, teamConcurrency: 5 }, (job) => this.onSendIt(job)) + try { + await this.manager.createQueue(QUEUES.SEND_IT) + } catch {} - // uses sendDebounced() to enqueue a cron check - await this.checkSchedulesAsync() + const options = { + pollingIntervalSeconds: this.config.cronWorkerIntervalSeconds, + batchSize: 50 + } - // create monitoring interval to make sure cron hasn't crashed - this.cronMonitorInterval = setInterval(async () => await this.monitorCron(), this.cronMonitorIntervalMs) - // create monitoring interval to measure and adjust for drift in clock skew - this.skewMonitorInterval = setInterval(async () => await this.cacheClockSkew(), this.skewMonitorIntervalMs) + await this.manager.work(QUEUES.SEND_IT, options, (jobs) => this.manager.insert(jobs.map(i => i.data))) - this.stopped = false + setImmediate(() => this.onCron()) + + this.cronMonitorInterval = setInterval(async () => await this.onCron(), this.cronMonitorIntervalMs) + this.skewMonitorInterval = setInterval(async () => await this.cacheClockSkew(), this.skewMonitorIntervalMs) } async stop () { @@ -73,8 +75,7 @@ class Timekeeper extends EventEmitter { this.stopped = true - await this.manager.offWork(queues.CRON) - await this.manager.offWork(queues.SEND_IT) + await this.manager.offWork(QUEUES.SEND_IT) if (this.skewMonitorInterval) { clearInterval(this.skewMonitorInterval) @@ -87,22 +88,6 @@ class Timekeeper extends EventEmitter { } } - async monitorCron () { - try { - if (this.config.__test__force_cron_monitoring_error) { - throw new Error(this.config.__test__force_cron_monitoring_error) - } - - const { secondsAgo } = await this.getCronTime() - - if (secondsAgo > 60) { - await this.checkSchedulesAsync() - } - } catch (err) { - this.emit(this.events.error, err) - } - } - async cacheClockSkew () { let skew = 0 @@ -131,44 +116,39 @@ class Timekeeper extends EventEmitter { } } - async checkSchedulesAsync () { - const opts = { - retryLimit: 2, - retentionSeconds: 60, - onComplete: false - } - - await this.manager.sendDebounced(queues.CRON, null, opts, 60) - } - async onCron () { - if (this.stopped) return - try { - if (this.config.__test__throw_clock_monitoring) { - throw new Error(this.config.__test__throw_clock_monitoring) + if (this.stopped || this.timekeeping) return + + if (this.config.__test__force_cron_monitoring_error) { + throw new Error(this.config.__test__force_cron_monitoring_error) } - const items = await this.getSchedules() + this.timekeeping = true - const sending = items.filter(i => this.shouldSendIt(i.cron, i.timezone)) + const { rows } = await this.db.executeSql(this.trySetCronTimeCommand, [this.config.cronMonitorIntervalSeconds]) - if (sending.length && !this.stopped) { - await pMap(sending, it => this.send(it), { concurrency: 5 }) + if (rows.length === 1 && !this.stopped) { + await this.cron() } - - if (this.stopped) return - - // set last time cron was evaluated for downstream usage in cron monitoring - await this.setCronTime() } catch (err) { this.emit(this.events.error, err) + } finally { + this.timekeeping = false } + } + + async cron () { + const schedules = await this.getSchedules() - if (this.stopped) return + const scheduled = schedules + .filter(i => this.shouldSendIt(i.cron, i.timezone)) + .map(({ name, data, options }) => + ({ name: QUEUES.SEND_IT, data: { name, data, options }, options: { singletonKey: name, singletonSeconds: 60 } })) - // uses sendDebounced() to enqueue a cron check - await this.checkSchedulesAsync() + if (scheduled.length > 0 && !this.stopped) { + await this.manager.insert(scheduled) + } } shouldSendIt (cron, tz) { @@ -183,22 +163,6 @@ class Timekeeper extends EventEmitter { return prevDiff < 60 } - async send (job) { - const options = { - singletonKey: job.name, - singletonSeconds: 60, - onComplete: false - } - - await this.manager.send(queues.SEND_IT, job, options) - } - - async onSendIt (job) { - if (this.stopped) return - const { name, data, options } = job.data - await this.manager.send(name, data, options) - } - async getSchedules () { const { rows } = await this.db.executeSql(this.getSchedulesCommand) return rows @@ -209,35 +173,25 @@ class Timekeeper extends EventEmitter { cronParser.parseExpression(cron, { tz }) - // validation pre-check Attorney.checkSendArgs([name, data, options], this.config) const values = [name, cron, tz, data, options] - const result = await this.db.executeSql(this.scheduleCommand, values) + try { + await this.db.executeSql(this.scheduleCommand, values) + } catch (err) { + if (err.message.includes('foreign key')) { + err.message = `Queue ${name} not found` + } - return result ? result.rowCount : null + throw err + } } async unschedule (name) { - const result = await this.db.executeSql(this.unscheduleCommand, [name]) - return result ? result.rowCount : null - } - - async setCronTime () { - await this.db.executeSql(this.setCronTimeCommand) - } - - async getCronTime () { - const { rows } = await this.db.executeSql(this.getCronTimeCommand) - - let { cron_on: cronOn, seconds_ago: secondsAgo } = rows[0] - - secondsAgo = secondsAgo !== null ? parseFloat(secondsAgo) : 61 - - return { cronOn, secondsAgo } + await this.db.executeSql(this.unscheduleCommand, [name]) } } module.exports = Timekeeper -module.exports.QUEUES = queues +module.exports.QUEUES = QUEUES diff --git a/src/tools.js b/src/tools.js new file mode 100644 index 00000000..7a04c3dc --- /dev/null +++ b/src/tools.js @@ -0,0 +1,28 @@ +module.exports = { + delay +} + +function delay (ms, error) { + const { setTimeout } = require('timers/promises') + const ac = new AbortController() + + const promise = new Promise((resolve, reject) => { + setTimeout(ms, null, { signal: ac.signal }) + .then(() => { + if (error) { + reject(new Error(error)) + } else { + resolve() + } + }) + .catch(resolve) + }) + + promise.abort = () => { + if (!ac.signal.aborted) { + ac.abort() + } + } + + return promise +} diff --git a/src/worker.js b/src/worker.js index 386dea41..d245c392 100644 --- a/src/worker.js +++ b/src/worker.js @@ -1,4 +1,4 @@ -const delay = require('delay') +const { delay } = require('./tools') const WORKER_STATES = { created: 'created', @@ -34,7 +34,7 @@ class Worker { this.beenNotified = true if (this.loopDelayPromise) { - this.loopDelayPromise.clear() + this.loopDelayPromise.abort() } } @@ -74,7 +74,7 @@ class Worker { this.lastJobDuration = duration - if (!this.stopping && !this.beenNotified && duration < this.interval) { + if (!this.stopping && !this.beenNotified && (this.interval - duration > 500)) { this.loopDelayPromise = delay(this.interval - duration) await this.loopDelayPromise this.loopDelayPromise = null @@ -91,7 +91,7 @@ class Worker { this.state = WORKER_STATES.stopping if (this.loopDelayPromise) { - this.loopDelayPromise.clear() + this.loopDelayPromise.abort() } } } diff --git a/test/archiveTest.js b/test/archiveTest.js index 85ff88db..c45f54b4 100644 --- a/test/archiveTest.js +++ b/test/archiveTest.js @@ -1,12 +1,12 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') -const { states } = require('../src/plans') +const { delay } = require('../src/tools') +const { JOB_STATES } = require('../src/plans') describe('archive', function () { const defaults = { archiveCompletedAfterSeconds: 1, - maintenanceIntervalSeconds: 1 + supervise: true } it('should archive a completed job', async function () { @@ -15,15 +15,17 @@ describe('archive', function () { const queue = this.test.bossConfig.schema const jobId = await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.strictEqual(job.id, jobId) - await boss.complete(jobId) + await boss.complete(queue, jobId) - await delay(4000) + await delay(1000) - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + await boss.maintain() + + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) assert.strictEqual(jobId, archivedJob.id) assert.strictEqual(queue, archivedJob.name) @@ -35,15 +37,17 @@ describe('archive', function () { const queue = this.test.bossConfig.schema const jobId = await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.strictEqual(job.id, jobId) - await boss.complete(jobId) + await boss.complete(queue, jobId) + + await delay(1000) - await delay(4000) + await boss.maintain() - const archivedJob = await boss.getJobById(jobId) + const archivedJob = await boss.getJobById(queue, jobId, { includeArchive: true }) assert.strictEqual(jobId, archivedJob.id) assert.strictEqual(queue, archivedJob.name) @@ -56,9 +60,11 @@ describe('archive', function () { const jobId = await boss.send(queue, null, { retentionSeconds: 1 }) - await delay(7000) + await delay(1000) - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + await boss.maintain() + + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) assert.strictEqual(jobId, archivedJob.id) assert.strictEqual(queue, archivedJob.name) @@ -71,9 +77,11 @@ describe('archive', function () { const jobId = await boss.send(queue) - await delay(7000) + await delay(1000) + + await boss.maintain() - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) assert.strictEqual(jobId, archivedJob.id) assert.strictEqual(queue, archivedJob.name) @@ -87,29 +95,35 @@ describe('archive', function () { const failPayload = { someReason: 'nuna' } const jobId = await boss.send(queue, null, { retentionSeconds: 1 }) - await boss.fail(jobId, failPayload) - await delay(7000) + await boss.fail(queue, jobId, failPayload) - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + await delay(1000) + + await boss.maintain() + + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) assert.strictEqual(archivedJob, null) }) it('should archive a failed job', async function () { - const config = { ...this.test.bossConfig, maintenanceIntervalSeconds: 1, archiveFailedAfterSeconds: 1 } + const config = { ...this.test.bossConfig, archiveFailedAfterSeconds: 1 } const boss = this.test.boss = await helper.start(config) const queue = this.test.bossConfig.schema const failPayload = { someReason: 'nuna' } const jobId = await boss.send(queue, null, { retentionSeconds: 1 }) - await boss.fail(jobId, failPayload) - await delay(7000) + await boss.fail(queue, jobId, failPayload) + + await delay(1000) + + await boss.maintain() - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) assert.strictEqual(jobId, archivedJob.id) assert.strictEqual(queue, archivedJob.name) - assert.strictEqual(states.failed, archivedJob.state) + assert.strictEqual(JOB_STATES.failed, archivedJob.state) }) }) diff --git a/test/backgroundErrorTest.js b/test/backgroundErrorTest.js index fd1a14ff..f17291f0 100644 --- a/test/backgroundErrorTest.js +++ b/test/backgroundErrorTest.js @@ -1,78 +1,136 @@ const assert = require('assert') const PgBoss = require('../') -const delay = require('delay') +const { delay } = require('../src/tools') describe('background processing error handling', function () { it('maintenance error handling works', async function () { const defaults = { - monitorStateIntervalMinutes: 1, maintenanceIntervalSeconds: 1, - noScheduling: true, - __test__throw_maint: true + supervise: true, + __test__throw_maint: 'my maintenance error' } const config = { ...this.test.bossConfig, ...defaults } const boss = this.test.boss = new PgBoss(config) - return new Promise((resolve) => { - let resolved = false - - boss.on('error', () => { - if (!resolved) { - resolved = true - resolve() - } - }) + let errorCount = 0 - boss.start().then(() => {}) + boss.once('error', (error) => { + assert.strictEqual(error.message, config.__test__throw_maint) + errorCount++ }) + + await boss.start() + + await delay(3000) + + assert.strictEqual(errorCount, 1) + }) + + it('slow maintenance will back off loop interval', async function () { + const config = { + ...this.test.bossConfig, + maintenanceIntervalSeconds: 1, + supervise: true, + __test__delay_maintenance: 2000 + } + + const boss = this.test.boss = new PgBoss(config) + + let eventCount = 0 + + boss.on('maintenance', () => eventCount++) + + await boss.start() + + await delay(5000) + + assert.strictEqual(eventCount, 1) + }) + + it('slow monitoring will back off loop interval', async function () { + const config = { + ...this.test.bossConfig, + monitorStateIntervalSeconds: 1, + __test__delay_monitor: 2000 + } + + const boss = this.test.boss = new PgBoss(config) + + let eventCount = 0 + + boss.on('monitor-states', () => eventCount++) + + await boss.start() + + await delay(4000) + + assert.strictEqual(eventCount, 1) }) it('state monitoring error handling works', async function () { const defaults = { - monitorStateIntervalSeconds: 2, - maintenanceIntervalMinutes: 1, - noScheduling: true, - __test__throw_monitor: true + monitorStateIntervalSeconds: 1, + supervise: true, + __test__throw_monitor: 'my monitor error' } const config = { ...this.test.bossConfig, ...defaults } const boss = this.test.boss = new PgBoss(config) - return new Promise((resolve) => { - let resolved = false - - boss.on('error', () => { - if (!resolved) { - resolved = true - resolve() - } - }) + let errorCount = 0 - boss.start().then(() => {}) + boss.once('error', (error) => { + assert.strictEqual(error.message, config.__test__throw_monitor) + errorCount++ }) + + await boss.start() + + await delay(3000) + + assert.strictEqual(errorCount, 1) }) - it('clock monitoring error handling works', async function () { + it('shutdown monitoring error handling works', async function () { const config = { ...this.test.bossConfig, - clockMonitorIntervalSeconds: 1, - __test__throw_clock_monitoring: 'pg-boss mock error: clock monitoring' + __test__throw_shutdown: 'shutdown error' } - let errorCount = 0 - const boss = this.test.boss = new PgBoss(config) + let errorCount = 0 + boss.once('error', (error) => { - assert.strictEqual(error.message, config.__test__throw_clock_monitoring) + assert.strictEqual(error.message, config.__test__throw_shutdown) errorCount++ }) await boss.start() - await delay(8000) + await boss.stop({ wait: false }) + + await delay(1000) assert.strictEqual(errorCount, 1) }) + + it('shutdown error handling works', async function () { + const config = { + ...this.test.bossConfig, + __test__throw_stop_monitor: 'monitor error' + } + + const boss = this.test.boss = new PgBoss(config) + + await boss.start() + + try { + await boss.stop({ wait: false }) + assert(false) + } catch (err) { + assert(true) + } + }) }) diff --git a/test/cancelTest.js b/test/cancelTest.js index 6bff1d6d..b4ae2e4e 100644 --- a/test/cancelTest.js +++ b/test/cancelTest.js @@ -2,7 +2,7 @@ const assert = require('assert') const helper = require('./testHelper') describe('cancel', function () { - it('should reject missing id argument', async function () { + it('should reject missing arguments', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) try { @@ -14,52 +14,51 @@ describe('cancel', function () { }) it('should cancel a pending job', async function () { - const config = this.test.bossConfig - const boss = this.test.boss = await helper.start(config) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const jobId = await boss.send('will_cancel', null, { startAfter: 1 }) + const jobId = await boss.send(queue, null, { startAfter: 1 }) - await boss.cancel(jobId) + await boss.cancel(queue, jobId) - const job = await boss.getJobById(jobId) + const job = await boss.getJobById(queue, jobId) assert(job && job.state === 'cancelled') }) it('should not cancel a completed job', async function () { - const config = this.test.bossConfig - - const boss = this.test.boss = await helper.start(config) - - const queue = 'will_not_cancel' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - await boss.complete(job.id) + const completeResult = await boss.complete(queue, job.id) - const response = await boss.cancel(job.id) + assert.strictEqual(completeResult.affected, 1) - assert.strictEqual(response.updated, 0) + const cancelResult = await boss.cancel(queue, job.id) + + assert.strictEqual(cancelResult.affected, 0) }) it('should cancel a batch of jobs', async function () { - const queue = 'cancel-batch' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const boss = this.test.boss = await helper.start(this.test.bossConfig) const jobs = await Promise.all([ boss.send(queue), boss.send(queue), boss.send(queue) ]) - await boss.cancel(jobs) + await boss.cancel(queue, jobs) }) it('should cancel a pending job with custom connection', async function () { - const config = this.test.bossConfig - const boss = this.test.boss = await helper.start(config) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema let called = false const _db = await helper.getDb() @@ -70,11 +69,11 @@ describe('cancel', function () { } } - const jobId = await boss.send('will_cancel', null, { startAfter: 1 }) + const jobId = await boss.send(queue, null, { startAfter: 1 }) - await boss.cancel(jobId, { db }) + await boss.cancel(queue, jobId, { db }) - const job = await boss.getJobById(jobId) + const job = await boss.getJobById(queue, jobId) assert(job && job.state === 'cancelled') assert.strictEqual(called, true) diff --git a/test/completeTest.js b/test/completeTest.js index dbe09f6a..e5969b03 100644 --- a/test/completeTest.js +++ b/test/completeTest.js @@ -1,4 +1,3 @@ -const delay = require('delay') const assert = require('assert') const helper = require('./testHelper') const PgBoss = require('../') @@ -16,9 +15,9 @@ describe('complete', function () { }) it('should complete a batch of jobs', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queue = 'complete-batch' const batchSize = 3 await Promise.all([ @@ -29,262 +28,59 @@ describe('complete', function () { const countJobs = (state) => helper.countJobs(this.test.bossConfig.schema, 'name = $1 AND state = $2', [queue, state]) - const jobs = await boss.fetch(queue, batchSize) + const jobs = await boss.fetch(queue, { batchSize }) const activeCount = await countJobs(PgBoss.states.active) assert.strictEqual(activeCount, batchSize) - await boss.complete(jobs.map(job => job.id)) - - const completed = await boss.fetchCompleted(queue, batchSize) - - assert.strictEqual(batchSize, completed.length) - }) - - it('onComplete should have the payload from complete() in the response object', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const jobName = 'part-of-something-important' - const responsePayload = { message: 'super-important-payload', arg2: '123' } - - await boss.send(jobName) - - const job = await boss.fetch(jobName) - - await boss.complete(job.id, responsePayload) - - return new Promise((resolve) => { - boss.onComplete(jobName, async job => { - assert.strictEqual(job.data.response.message, responsePayload.message) - assert.strictEqual(job.data.response.arg2, responsePayload.arg2) - - resolve() - }) - }) - }) - - it('onComplete should have the original payload in request object', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) + const result = await boss.complete(queue, jobs.map(job => job.id)) - const queueName = 'onCompleteRequestTest' - const requestPayload = { foo: 'bar' } - - const jobId = await boss.send(queueName, requestPayload) - - const job = await boss.fetch(queueName) - await boss.complete(job.id) - - return new Promise((resolve) => { - boss.onComplete(queueName, async job => { - assert.strictEqual(jobId, job.data.request.id) - assert.strictEqual(job.data.request.data.foo, requestPayload.foo) - - resolve() - }) - }) + assert.strictEqual(batchSize, result.jobs.length) }) - it('onComplete should have both request and response', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const jobName = 'onCompleteFtw' - const requestPayload = { token: 'trivial' } - const responsePayload = { message: 'so verbose', code: '1234' } - - const jobId = await boss.send(jobName, requestPayload) - const job = await boss.fetch(jobName) - - await boss.complete(job.id, responsePayload) - - return new Promise((resolve) => { - boss.onComplete(jobName, async job => { - assert.strictEqual(jobId, job.data.request.id) - assert.strictEqual(job.data.request.data.token, requestPayload.token) - assert.strictEqual(job.data.response.message, responsePayload.message) - assert.strictEqual(job.data.response.code, responsePayload.code) - - resolve() - }) - }) - }) - - it('should remove an onComplete worker', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const jobName = 'offComplete' - - let receivedCount = 0 - - boss.onComplete(jobName, { newJobCheckInterval: 500 }, async job => { - receivedCount++ - await boss.offComplete(jobName) - }) - - await boss.send(jobName) - const job1 = await boss.fetch(jobName) - await boss.complete(job1.id) - - await delay(2000) - - await boss.send(jobName) - const job2 = await boss.fetch(jobName) - await boss.complete(job2.id) - - await delay(2000) - - assert.strictEqual(receivedCount, 1) - }) - - it('should remove an onComplete worker by id', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) + it('should store job output in job.output from complete()', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) const queue = this.test.bossConfig.schema - let receivedCount = 0 - - await boss.send(queue) - const job1 = await boss.fetch(queue) - await boss.complete(job1.id) - - await boss.send(queue) - const job2 = await boss.fetch(queue) - await boss.complete(job2.id) - - const id = await boss.onComplete(queue, { newJobCheckInterval: 500 }, async () => { - receivedCount++ - await boss.offComplete({ id }) - }) - - await delay(2000) - - assert.strictEqual(receivedCount, 1) - }) - - it('should fetch a completed job', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const queue = 'fetchCompleted' - const jobId = await boss.send(queue) - await boss.fetch(queue) - await boss.complete(jobId) - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job.data.request.id, jobId) - }) - - it('should not create an extra state job after completion', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const queue = 'noMoreExtraStateJobs' - const config = this.test.bossConfig - - const jobId = await boss.send(queue) - - await boss.fetch(queue) - - await boss.complete(jobId) - - const job = await boss.fetchCompleted(queue) - - await boss.complete(job.id) - - const stateJobCount = await helper.countJobs(config.schema, 'name = $1', [`${helper.COMPLETION_JOB_PREFIX}${queue}`]) - - assert.strictEqual(stateJobCount, 1) - }) - - it('should not create a completion job if opted out during send', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const queue = 'onCompleteOptOut' - - const jobId = await boss.send(queue, null, { onComplete: false }) - - await boss.fetch(queue) - - await boss.complete(jobId) - - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job, null) - }) - - it('should not create a completion job if opted out during constructor', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: false }) - - const queue = 'onCompleteOptOutGlobal' - const jobId = await boss.send(queue) - await boss.fetch(queue) - - await boss.complete(jobId) - - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job, null) - }) - - it('should create completion job if overriding the default from constructor', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: false }) - - const queue = 'onCompleteOptInOverride' - - const jobId = await boss.send(queue, null, { onComplete: true }) - - await boss.fetch(queue) - - await boss.complete(jobId) + let [job] = await boss.fetch(queue) - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job.data.request.id, jobId) - }) - - it('should store job output in job.output from complete()', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'completion-data-in-job-output' - - const jobId = await boss.send(queue, null, { onComplete: false }) - - const { id } = await boss.fetch(queue) - - assert.strictEqual(jobId, id) + assert.strictEqual(jobId, job.id) const completionData = { msg: 'i am complete' } - await boss.complete(jobId, completionData) + await boss.complete(queue, jobId, completionData) - const job = await boss.getJobById(jobId) + job = await boss.getJobById(queue, jobId) assert.strictEqual(job.output.msg, completionData.msg) }) it('should store job error in job.output from fail()', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'completion-data-in-job-output' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const jobId = await boss.send(queue, null, { onComplete: false }) + const jobId = await boss.send(queue) - const { id } = await boss.fetch(queue) + let [job] = await boss.fetch(queue) - assert.strictEqual(jobId, id) + assert.strictEqual(jobId, job.id) const completionError = new Error('i am complete') - await boss.fail(jobId, completionError) + await boss.fail(queue, jobId, completionError) - const job = await boss.getJobById(jobId) + job = await boss.getJobById(queue, jobId) assert.strictEqual(job.output.message, completionError.message) }) it('should complete a batch of jobs with custom connection', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queue = 'complete-batch' const batchSize = 3 await Promise.all([ @@ -295,7 +91,7 @@ describe('complete', function () { const countJobs = (state) => helper.countJobs(this.test.bossConfig.schema, 'name = $1 AND state = $2', [queue, state]) - const jobs = await boss.fetch(queue, batchSize) + const jobs = await boss.fetch(queue, { batchSize }) const activeCount = await countJobs(PgBoss.states.active) @@ -310,11 +106,32 @@ describe('complete', function () { } } - await boss.complete(jobs.map(job => job.id), null, { db }) - - const completed = await boss.fetchCompleted(queue, batchSize) + const result = await boss.complete(queue, jobs.map(job => job.id), null, { db }) - assert.strictEqual(batchSize, completed.length) + assert.strictEqual(batchSize, result.jobs.length) assert.strictEqual(called, true) }) + + it('should warn with an old onComplete option only once', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + + let warningCount = 0 + + const warningEvent = 'warning' + const onWarning = (warning) => { + assert(warning.message.includes('onComplete')) + warningCount++ + } + + process.on(warningEvent, onWarning) + + await boss.send({ name: queue, options: { onComplete: true } }) + await boss.send({ name: queue, options: { onComplete: true } }) + await boss.send({ name: queue, options: { onComplete: true } }) + + process.removeListener(warningEvent, onWarning) + + assert.strictEqual(warningCount, 1) + }) }) diff --git a/test/config.json b/test/config.json index 7a99f9cd..86edcffe 100644 --- a/test/config.json +++ b/test/config.json @@ -4,6 +4,6 @@ "database": "pgboss", "user": "postgres", "password": "postgres", - "uuid": "v4", - "max": 3 + "max": 3, + "debug": false } diff --git a/test/configTest.js b/test/configTest.js index 5668dc6b..cb1414c4 100644 --- a/test/configTest.js +++ b/test/configTest.js @@ -15,6 +15,8 @@ describe('config', function () { const boss = this.test.boss = new PgBoss(config) await boss.start() + + await helper.dropSchema(config.schema) }) it('should not allow more than 50 characters in schema name', async function () { diff --git a/test/databaseTest.js b/test/databaseTest.js index 5c8bbcf4..caf974d0 100644 --- a/test/databaseTest.js +++ b/test/databaseTest.js @@ -1,6 +1,5 @@ const assert = require('assert') const PgBoss = require('../') -const Db = require('../src/db') describe('database', function () { it('should fail on invalid database host', async function () { @@ -18,29 +17,12 @@ describe('database', function () { const query = 'SELECT something FROM somewhere' const mydb = { - executeSql: async (text, values) => ({ rows: [], text, rowCount: 0 }) + executeSql: async (text, values) => ({ rows: [], text }) } const boss = new PgBoss({ db: mydb }) - const response = await boss.db.executeSql(query) + const response = await boss.getDb().executeSql(query) assert(response.text === query) }) - - describe('Db.quotePostgresStr', function () { - it('should dollar-sign quote specified input', async function () { - const str = Db.quotePostgresStr('Here\'s my input') - assert(str === '$sanitize$Here\'s my input$sanitize$') - }) - - it('should error if input contains reserved quote delimiter', async function () { - const badInput = '$sanitize$; DROP TABLE job --' - try { - Db.quotePostgresStr(badInput) - assert(false, 'Error was expected but did not occur') - } catch (err) { - assert(err.message === `Attempted to quote string that contains reserved Postgres delimeter: ${badInput}`) - } - }) - }) }) diff --git a/test/delayTest.js b/test/delayTest.js index 918d7089..37a13983 100644 --- a/test/delayTest.js +++ b/test/delayTest.js @@ -1,6 +1,6 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') +const { delay } = require('../src/tools') describe('delayed jobs', function () { it('should wait until after an int (in seconds)', async function () { @@ -11,21 +11,20 @@ describe('delayed jobs', function () { await boss.send(queue, null, { startAfter }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) await delay(startAfter * 1000) - const job2 = await boss.fetch(queue) + const [job2] = await boss.fetch(queue) assert(job2) }) it('should wait until after a date time string', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'delay-date-string' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const date = new Date() @@ -35,9 +34,9 @@ describe('delayed jobs', function () { await boss.send(queue, null, { startAfter }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) await delay(5000) @@ -47,8 +46,8 @@ describe('delayed jobs', function () { }) it('should wait until after a date object', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'delay-date-object' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const date = new Date() date.setUTCSeconds(date.getUTCSeconds() + 2) @@ -57,20 +56,20 @@ describe('delayed jobs', function () { await boss.send(queue, null, { startAfter }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) await delay(2000) - const job2 = await boss.fetch(queue) + const [job2] = await boss.fetch(queue) assert(job2) }) it('should work with sendAfter() and a date object', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendAfter-date-object' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const date = new Date() date.setUTCSeconds(date.getUTCSeconds() + 2) @@ -79,13 +78,13 @@ describe('delayed jobs', function () { await boss.sendAfter(queue, { something: 1 }, { retryLimit: 0 }, startAfter) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) await delay(2000) - const job2 = await boss.fetch(queue) + const [job2] = await boss.fetch(queue) assert(job2) }) diff --git a/test/deleteQueueTest.js b/test/deleteQueueTest.js deleted file mode 100644 index 0f05dbbc..00000000 --- a/test/deleteQueueTest.js +++ /dev/null @@ -1,99 +0,0 @@ -const assert = require('assert') -const helper = require('./testHelper') -const delay = require('delay') - -describe('deleteQueue', function () { - it('should clear a specific queue', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue2 = 'delete-named-queue-2' - const queue1 = 'delete-named-queue-1' - - await boss.send(queue1) - await boss.send(queue2) - - const q1Count1 = await boss.getQueueSize(queue1) - const q2Count1 = await boss.getQueueSize(queue2) - - assert.strictEqual(1, q1Count1) - assert.strictEqual(1, q2Count1) - - await boss.deleteQueue(queue1) - - const q1Count2 = await boss.getQueueSize(queue1) - const q2Count2 = await boss.getQueueSize(queue2) - - assert.strictEqual(0, q1Count2) - assert.strictEqual(1, q2Count2) - - await boss.deleteQueue(queue2) - - const q2Count3 = await boss.getQueueSize(queue2) - - assert.strictEqual(0, q2Count3) - }) - - it('should clear all queues', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue1 = 'delete-named-queue-11' - const queue2 = 'delete-named-queue-22' - - await boss.send(queue1) - await boss.send(queue2) - - const q1Count1 = await boss.getQueueSize(queue1) - const q2Count1 = await boss.getQueueSize(queue2) - - assert.strictEqual(1, q1Count1) - assert.strictEqual(1, q2Count1) - - await boss.deleteAllQueues() - - const q1Count2 = await boss.getQueueSize(queue1) - const q2Count2 = await boss.getQueueSize(queue2) - - assert.strictEqual(0, q1Count2) - assert.strictEqual(0, q2Count2) - }) - - it('clearStorage() should empty both job storage tables', async function () { - const defaults = { - archiveCompletedAfterSeconds: 1, - maintenanceIntervalSeconds: 1 - } - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'clear-storage-works' - - const jobId = await boss.send(queue) - const job = await boss.fetch(queue) - - assert.strictEqual(job.id, jobId) - - await boss.complete(jobId) - - await delay(3000) - - const db = await helper.getDb() - - const getJobCount = async table => { - const jobCountResult = await db.executeSql(`SELECT count(*)::int as job_count FROM ${this.test.bossConfig.schema}.${table}`) - return jobCountResult.rows[0].job_count - } - - const preJobCount = await getJobCount('job') - const preArchiveCount = await getJobCount('archive') - - assert(preJobCount > 0) - assert(preArchiveCount > 0) - - await boss.clearStorage() - - const postJobCount = await getJobCount('job') - const postArchiveCount = await getJobCount('archive') - - assert(postJobCount === 0) - assert(postArchiveCount === 0) - }) -}) diff --git a/test/deleteTest.js b/test/deleteTest.js index 9a69080e..3ebf8b2c 100644 --- a/test/deleteTest.js +++ b/test/deleteTest.js @@ -1,29 +1,38 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') describe('delete', async function () { - const defaults = { - deleteAfterSeconds: 1, - maintenanceIntervalSeconds: 1 - } + it('should delete an archived via maintenance', async function () { + const config = { ...this.test.bossConfig, deleteAfterSeconds: 1 } + const boss = this.test.boss = await helper.start(config) + const queue = this.test.bossConfig.schema + + const jobId = await boss.send(queue) + + await boss.fetch(queue) + + await boss.complete(queue, jobId) - it('should delete an archived job', async function () { - const jobName = 'deleteMe' + await boss.maintain() + + const archivedJob = await helper.getArchivedJobById(config.schema, queue, jobId) + + assert(!archivedJob) + }) - const config = { ...this.test.bossConfig, ...defaults } + it('should delete a job via deleteJob()', async function () { + const config = { ...this.test.bossConfig } const boss = this.test.boss = await helper.start(config) - const jobId = await boss.send(jobName) - const job = await boss.fetch(jobName) + const queue = config.schema - assert.strictEqual(jobId, job.id) + const jobId = await boss.send(queue) - await boss.complete(jobId) + await boss.fetch(queue) - await delay(7000) + await boss.deleteJob(queue, jobId) - const archivedJob = await helper.getArchivedJobById(config.schema, jobId) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(archivedJob, null) + assert(!job) }) }) diff --git a/test/errorTest.js b/test/errorTest.js index 58564841..0fc18275 100644 --- a/test/errorTest.js +++ b/test/errorTest.js @@ -11,7 +11,7 @@ describe('error', function () { await boss.send(queue) await new Promise((resolve) => { - boss.work(queue, async job => { + boss.work(queue, async () => { processCount++ if (processCount === 1) { diff --git a/test/expireTest.js b/test/expireTest.js index c37f9628..533c01c2 100644 --- a/test/expireTest.js +++ b/test/expireTest.js @@ -1,70 +1,43 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') +const { delay } = require('../src/tools') describe('expire', function () { - const defaults = { maintenanceIntervalSeconds: 1 } - it('should expire a job', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults, onComplete: true }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + const key = this.test.bossConfig.schema - const queue = 'expire' + const jobId = await boss.send({ name: queue, data: { key }, options: { retryLimit: 0, expireInSeconds: 1 } }) - const jobId = await boss.send({ name: queue, options: { expireInSeconds: 1 } }) + const [job1] = await boss.fetch(queue) - // fetch the job but don't complete it - await boss.fetch(queue) + assert(job1) + + await delay(1000) - // this should give it enough time to expire - await delay(8000) + await boss.maintain() - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(jobId, job.data.request.id) - assert.strictEqual('expired', job.data.state) + assert.strictEqual('failed', job.state) }) it('should expire a job - cascaded config', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults, expireInSeconds: 1 }) - - const queue = 'expire-cascade-config' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, expireInSeconds: 1, retryLimit: 0 }) + const queue = this.test.bossConfig.schema const jobId = await boss.send(queue) // fetch the job but don't complete it - const { id } = await boss.fetch(queue) - - assert.strictEqual(jobId, id) - - // this should give it enough time to expire - await delay(8000) - - const job = await boss.getJobById(jobId) - - assert.strictEqual('expired', job.state) - }) - - it('should warn with an old expireIn option only once', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noSupervisor: true }) - - const queue = 'expireIn-warning-only-once' - - let warningCount = 0 - - const warningEvent = 'warning' - const onWarning = (warning) => { - assert(warning.message.includes('expireIn')) - warningCount++ - } + await boss.fetch(queue) - process.on(warningEvent, onWarning) + await delay(1000) - await boss.send({ name: queue, options: { expireIn: '1 minute' } }) - await boss.send({ name: queue, options: { expireIn: '1 minute' } }) - await boss.send({ name: queue, options: { expireIn: '1 minute' } }) + await boss.maintain() - process.removeListener(warningEvent, onWarning) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(warningCount, 1) + assert.strictEqual('failed', job.state) }) }) diff --git a/test/exportTest.js b/test/exportTest.js index 6cd13194..433c4e46 100644 --- a/test/exportTest.js +++ b/test/exportTest.js @@ -11,14 +11,36 @@ describe('export', function () { assert(plans.includes(`${schema}.version`)) }) - it('should export commands to migrate', function () { + it('should fail to export migration using current version', function () { + const schema = 'custom' + + try { + PgBoss.getMigrationPlans(schema, currentSchemaVersion) + assert(false, 'migration plans should fail on current version') + } catch { + assert(true) + } + }) + + it.skip('should export commands to migrate', function () { const schema = 'custom' const plans = PgBoss.getMigrationPlans(schema, currentSchemaVersion - 1) assert(plans, 'migration plans not found') }) - it('should export commands to roll back', function () { + it('should fail to export commands to roll back from invalid version', function () { + const schema = 'custom' + + try { + PgBoss.getRollbackPlans(schema, -1) + assert(false, 'migration plans should fail on current version') + } catch { + assert(true) + } + }) + + it.skip('should export commands to roll back', function () { const schema = 'custom' const plans = PgBoss.getRollbackPlans(schema, currentSchemaVersion) diff --git a/test/failureTest.js b/test/failureTest.js index 6b66dd4f..be1c4d15 100644 --- a/test/failureTest.js +++ b/test/failureTest.js @@ -1,7 +1,6 @@ -const delay = require('delay') +const { delay } = require('../src/tools') const assert = require('assert') const helper = require('./testHelper') -const pMap = require('p-map') describe('failure', function () { it('should reject missing id argument', async function () { @@ -21,28 +20,9 @@ describe('failure', function () { await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - await boss.fail(job.id) - }) - - it('worker for job failure', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - - const jobId = await boss.send(queue, null, { onComplete: true }) - - const job = await boss.fetch(queue) - - await boss.fail(job.id) - - return new Promise((resolve, reject) => { - boss.onComplete(queue, async job => { - assert.strictEqual(jobId, job.data.request.id) - assert.strictEqual('failed', job.data.state) - resolve() - }).catch(reject) - }) + await boss.fail(queue, job.id) }) it('should fail a batch of jobs', async function () { @@ -55,9 +35,11 @@ describe('failure', function () { boss.send(queue) ]) - const jobs = await boss.fetch(queue, 3) + const jobs = await boss.fetch(queue, { batchSize: 3 }) - await boss.fail(jobs.map(job => job.id)) + const result = await boss.fail(queue, jobs.map(job => job.id)) + + assert.strictEqual(result.jobs.length, 3) }) it('should fail a batch of jobs with a data arg', async function () { @@ -71,31 +53,15 @@ describe('failure', function () { boss.send(queue) ]) - const jobs = await boss.fetch(queue, 3) + const jobs = await boss.fetch(queue, { batchSize: 3 }) - await boss.fail(jobs.map(job => job.id), new Error(message)) + await boss.fail(queue, jobs.map(job => job.id), new Error(message)) - const results = await pMap(jobs, job => boss.getJobById(job.id)) + const results = await Promise.all(jobs.map(job => boss.getJobById(queue, job.id))) assert(results.every(i => i.output.message === message)) }) - it('should accept a payload', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - - const failPayload = { someReason: 'nuna' } - - const jobId = await boss.send(queue, null, { onComplete: true }) - - await boss.fail(jobId, failPayload) - - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job.data.state, 'failed') - assert.strictEqual(job.data.response.someReason, failPayload.someReason) - }) - it('should preserve nested objects within a payload that is an instance of Error', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema @@ -103,14 +69,13 @@ describe('failure', function () { const failPayload = new Error('Something went wrong') failPayload.some = { deeply: { nested: { reason: 'nuna' } } } - const jobId = await boss.send(queue, null, { onComplete: true }) + const jobId = await boss.send(queue) - await boss.fail(jobId, failPayload) + await boss.fail(queue, jobId, failPayload) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(job.data.state, 'failed') - assert.strictEqual(job.data.response.some.deeply.nested.reason, failPayload.some.deeply.nested.reason) + assert.strictEqual(job.output.some.deeply.nested.reason, failPayload.some.deeply.nested.reason) }) it('failure via Promise reject() should pass string wrapped in value prop', async function () { @@ -118,15 +83,14 @@ describe('failure', function () { const queue = this.test.bossConfig.schema const failPayload = 'mah error' - await boss.work(queue, job => Promise.reject(failPayload)) - await boss.send(queue, null, { onComplete: true }) + const jobId = await boss.send(queue) + await boss.work(queue, () => Promise.reject(failPayload)) - await delay(7000) + await delay(1000) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(job.data.state, 'failed') - assert.strictEqual(job.data.response.value, failPayload) + assert.strictEqual(job.output.value, failPayload) }) it('failure via Promise reject() should pass object payload', async function () { @@ -137,31 +101,29 @@ describe('failure', function () { const errorResponse = new Error('custom error') errorResponse.something = something - await boss.work(queue, job => Promise.reject(errorResponse)) - await boss.send(queue, null, { onComplete: true }) + const jobId = await boss.send(queue) + await boss.work(queue, () => Promise.reject(errorResponse)) - await delay(7000) + await delay(1000) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(job.data.state, 'failed') - assert.strictEqual(job.data.response.something, something) + assert.strictEqual(job.output.something, something) }) - it('failure with Error object should get stored in the failure job', async function () { + it('failure with Error object should be saved in the job', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema const message = 'a real error!' - await boss.send(queue, null, { onComplete: true }) + const jobId = await boss.send(queue) await boss.work(queue, async () => { throw new Error(message) }) - await delay(2000) + await delay(1000) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(job.data.state, 'failed') - assert(job.data.response.message.includes(message)) + assert(job.output.message.includes(message)) }) it('should fail a job with custom connection', async function () { @@ -170,7 +132,7 @@ describe('failure', function () { await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) let called = false const _db = await helper.getDb() @@ -181,7 +143,7 @@ describe('failure', function () { } } - await boss.fail(job.id, null, { db }) + await boss.fail(queue, job.id, null, { db }) assert.strictEqual(called, true) }) @@ -190,22 +152,57 @@ describe('failure', function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema - await boss.send(queue, null, { onComplete: true }) - - await boss.work(queue, async job => { - const err = { - message: 'something' - } + const jobId = await boss.send(queue) + const message = 'mhmm' + await boss.work(queue, { pollingIntervalSeconds: 0.5 }, async () => { + const err = { message } err.myself = err - throw err }) await delay(2000) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) + + assert.strictEqual(job.output.message, message) + }) + + it('dead letter queues are working', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + + const queue = this.test.bossConfig.schema + const deadLetter = `${queue}_dlq` + + await boss.createQueue(queue) + await boss.createQueue(deadLetter) + + const jobId = await boss.send(queue, { key: queue }, { deadLetter }) + + await boss.fetch(queue) + await boss.fail(queue, jobId) + + const [job] = await boss.fetch(deadLetter) + + assert.strictEqual(job.data.key, queue) + }) + + it('should fail active jobs in a worker during shutdown', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + + const jobId = await boss.send(queue, null, { retryLimit: 1, expireInSeconds: 60 }) + + await boss.work(queue, async () => await delay(10000)) + + await delay(1000) + + await boss.stop({ wait: true, timeout: 2000 }) + + await boss.start() + + const [job] = await boss.fetch(queue) - assert(job) + assert.strictEqual(job?.id, jobId) }) }) diff --git a/test/fetchTest.js b/test/fetchTest.js index 092230c4..bc583580 100644 --- a/test/fetchTest.js +++ b/test/fetchTest.js @@ -18,10 +18,10 @@ describe('fetch', function () { const queue = this.test.bossConfig.schema await boss.send(queue) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert(queue === job.name) // Metadata should only be included when specifically requested - assert(job.startedon === undefined) + assert(job.startedOn === undefined) }) it('should get a batch of jobs as an array', async function () { @@ -36,11 +36,11 @@ describe('fetch', function () { boss.send(queue) ]) - const jobs = await boss.fetch(queue, batchSize) + const jobs = await boss.fetch(queue, { batchSize }) assert(jobs.length === batchSize) // Metadata should only be included when specifically requested - assert(jobs[0].startedon === undefined) + assert(jobs[0].startedOn === undefined) }) it('should fetch all metadata for a single job when requested', async function () { @@ -48,22 +48,24 @@ describe('fetch', function () { const queue = this.test.bossConfig.schema await boss.send(queue) - const job = await boss.fetch(queue, undefined, { includeMetadata: true }) + const [job] = await boss.fetch(queue, { includeMetadata: true }) + assert(queue === job.name) assert(job.priority === 0) assert(job.state === 'active') - assert(job.retrylimit === 0) - assert(job.retrycount === 0) - assert(job.retrydelay === 0) - assert(job.retrybackoff === false) - assert(job.startafter !== undefined) - assert(job.startedon !== undefined) - assert(job.singletonkey === null) - assert(job.singletonon === null) - assert(job.expirein.minutes === 15) - assert(job.createdon !== undefined) - assert(job.completedon === null) - assert(job.keepuntil !== undefined) + assert(job.policy !== undefined) + assert(job.retryLimit === 0) + assert(job.retryCount === 0) + assert(job.retryDelay === 0) + assert(job.retryBackoff === false) + assert(job.startAfter !== undefined) + assert(job.startedOn !== undefined) + assert(job.singletonKey === null) + assert(job.singletonOn === null) + assert(job.expireIn.minutes === 15) + assert(job.createdOn !== undefined) + assert(job.completedOn === null) + assert(job.keepUntil !== undefined) }) it('should fetch all metadata for a batch of jobs when requested', async function () { @@ -78,26 +80,27 @@ describe('fetch', function () { boss.send(queue) ]) - const jobs = await boss.fetch(queue, batchSize, { includeMetadata: true }) + const jobs = await boss.fetch(queue, { batchSize, includeMetadata: true }) assert(jobs.length === batchSize) - jobs.forEach(job => { + for (const job of jobs) { assert(queue === job.name) assert(job.priority === 0) assert(job.state === 'active') - assert(job.retrylimit === 0) - assert(job.retrycount === 0) - assert(job.retrydelay === 0) - assert(job.retrybackoff === false) - assert(job.startafter !== undefined) - assert(job.startedon !== undefined) - assert(job.singletonkey === null) - assert(job.singletonon === null) - assert(job.expirein.minutes === 15) - assert(job.createdon !== undefined) - assert(job.completedon === null) - assert(job.keepuntil !== undefined) - }) + assert(job.policy !== undefined) + assert(job.retryLimit === 0) + assert(job.retryCount === 0) + assert(job.retryDelay === 0) + assert(job.retryBackoff === false) + assert(job.startAfter !== undefined) + assert(job.startedOn !== undefined) + assert(job.singletonKey === null) + assert(job.singletonOn === null) + assert(job.expireIn.minutes === 15) + assert(job.createdOn !== undefined) + assert(job.completedOn === null) + assert(job.keepUntil !== undefined) + } }) it('should fetch a job with custom connection', async function () { @@ -116,54 +119,9 @@ describe('fetch', function () { } await boss.send(queue, {}, options) - const [job] = await boss.fetch(queue, 10, options) + const [job] = await boss.fetch(queue, { ...options, batchSize: 10 }) assert(queue === job.name) - assert(job.startedon === undefined) + assert(job.startedOn === undefined) assert.strictEqual(calledCounter, 2) }) - - describe('enforceSingletonQueueActiveLimit option', function () { - it('when enforceSingletonQueueActiveLimit=false, should fetch singleton queue job even if there is already an active one', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - const jobOptions = { singletonKey: 'singleton_queue_active_test', useSingletonQueue: true } - const sendArgs = [queue, {}, jobOptions] - const fetchArgs = [queue, undefined, { enforceSingletonQueueActiveLimit: false }] - - const publish1 = await boss.send(...sendArgs) - assert(publish1) - const fetch1 = await boss.fetch(...fetchArgs) - assert(fetch1) - - const publish2 = await boss.send(...sendArgs) - assert(publish2) - const fetch2 = await boss.fetch(...fetchArgs) - assert(fetch2) - }) - - it('when enforceSingletonQueueActiveLimit=true, should not fetch singleton queue job if there is already an active one', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - const jobOptions = { singletonKey: 'singleton_queue_active_test', useSingletonQueue: true } - const sendArgs = [queue, {}, jobOptions] - const fetchArgs = [queue, undefined, { enforceSingletonQueueActiveLimit: true }] - - const publish1 = await boss.send(...sendArgs) - assert(publish1) - const fetch1 = await boss.fetch(...fetchArgs) - assert(fetch1) - - const publish2 = await boss.send(...sendArgs) - assert(publish2) - // Job 1 still active, can't fetch job 2 - const fetch2 = await boss.fetch(...fetchArgs) - assert(fetch2 === null) - - await boss.complete(fetch1.id) - // Job 1 no longer active, should be able to fetch job 2 - const retryFetch2 = await boss.fetch(...fetchArgs) - assert(retryFetch2) - assert(retryFetch2.id === publish2) - }) - }) }) diff --git a/test/hooks.js b/test/hooks.js index eae6392e..a876dbec 100644 --- a/test/hooks.js +++ b/test/hooks.js @@ -21,18 +21,16 @@ async function beforeEach () { async function afterEach () { this.timeout(10000) - const config = helper.getConfig({ testKey: getTestKey(this.currentTest) }) - - const { boss } = this.currentTest + const { boss, state } = this.currentTest if (boss) { - await new Promise((resolve) => { - boss.on('stopped', resolve) - helper.stop(boss) - }) + await boss.stop({ timeout: 2000 }) } - await helper.dropSchema(config.schema) + if (state === 'passed') { + const config = helper.getConfig({ testKey: getTestKey(this.currentTest) }) + await helper.dropSchema(config.schema) + } } function getTestKey (ctx) { diff --git a/test/insertTest.js b/test/insertTest.js index 26f455c3..05bf5199 100644 --- a/test/insertTest.js +++ b/test/insertTest.js @@ -1,5 +1,5 @@ const assert = require('assert') -const { v4: uuid } = require('uuid') +const { randomUUID } = require('crypto') const helper = require('./testHelper') describe('insert', function () { @@ -20,8 +20,11 @@ describe('insert', function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema + const deadLetter = `${queue}_dlq` + await boss.createQueue(deadLetter) + const input = { - id: uuid(), + id: randomUUID(), name: queue, priority: 1, data: { some: 'data' }, @@ -32,33 +35,36 @@ describe('insert', function () { expireInSeconds: 5, singletonKey: '123', keepUntil: new Date().toISOString(), - onComplete: true + deadLetter } await boss.insert([input]) - const job = await boss.getJobById(input.id) + const job = await boss.getJobById(queue, input.id) assert.strictEqual(job.id, input.id, `id input ${input.id} didn't match job ${job.id}`) assert.strictEqual(job.name, input.name, `name input ${input.name} didn't match job ${job.name}`) assert.strictEqual(job.priority, input.priority, `priority input ${input.priority} didn't match job ${job.priority}`) assert.strictEqual(JSON.stringify(job.data), JSON.stringify(input.data), `data input ${input.data} didn't match job ${job.data}`) - assert.strictEqual(job.retrylimit, input.retryLimit, `retryLimit input ${input.retryLimit} didn't match job ${job.retrylimit}`) - assert.strictEqual(job.retrydelay, input.retryDelay, `retryDelay input ${input.retryDelay} didn't match job ${job.retrydelay}`) - assert.strictEqual(job.retrybackoff, input.retryBackoff, `retryBackoff input ${input.retryBackoff} didn't match job ${job.retrybackoff}`) - assert.strictEqual(new Date(job.startafter).toISOString(), input.startAfter, `startAfter input ${input.startAfter} didn't match job ${job.startafter}`) - assert.strictEqual(job.expirein.seconds, input.expireInSeconds, `expireInSeconds input ${input.expireInSeconds} didn't match job ${job.expirein}`) - assert.strictEqual(job.singletonkey, input.singletonKey, `name input ${input.singletonKey} didn't match job ${job.singletonkey}`) - assert.strictEqual(new Date(job.keepuntil).toISOString(), input.keepUntil, `keepUntil input ${input.keepUntil} didn't match job ${job.keepuntil}`) - assert.strictEqual(job.on_complete, input.onComplete, `onComplete input ${input.onComplete} didn't match job ${job.on_complete}`) + assert.strictEqual(job.retryLimit, input.retryLimit, `retryLimit input ${input.retryLimit} didn't match job ${job.retryLimit}`) + assert.strictEqual(job.retryDelay, input.retryDelay, `retryDelay input ${input.retryDelay} didn't match job ${job.retryDelay}`) + assert.strictEqual(job.retryBackoff, input.retryBackoff, `retryBackoff input ${input.retryBackoff} didn't match job ${job.retryBackoff}`) + assert.strictEqual(new Date(job.startAfter).toISOString(), input.startAfter, `startAfter input ${input.startAfter} didn't match job ${job.startAfter}`) + assert.strictEqual(job.expireIn.seconds, input.expireInSeconds, `expireInSeconds input ${input.expireInSeconds} didn't match job ${job.expireIn}`) + assert.strictEqual(job.singletonKey, input.singletonKey, `name input ${input.singletonKey} didn't match job ${job.singletonKey}`) + assert.strictEqual(new Date(job.keepUntil).toISOString(), input.keepUntil, `keepUntil input ${input.keepUntil} didn't match job ${job.keepUntil}`) + assert.strictEqual(job.deadLetter, input.deadLetter, `deadLetter input ${input.deadLetter} didn't match job ${job.deadLetter}`) }) it('should create jobs from an array with all properties and custom connection', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema + const deadLetter = `${queue}_dlq` + await boss.createQueue(deadLetter) + const input = { - id: uuid(), + id: randomUUID(), name: queue, priority: 1, data: { some: 'data' }, @@ -69,7 +75,7 @@ describe('insert', function () { expireInSeconds: 5, singletonKey: '123', keepUntil: new Date().toISOString(), - onComplete: true + deadLetter } let called = false const db = await helper.getDb() @@ -84,20 +90,20 @@ describe('insert', function () { await boss.insert([input], options) - const job = await boss.getJobById(input.id) + const job = await boss.getJobById(queue, input.id) assert.strictEqual(job.id, input.id, `id input ${input.id} didn't match job ${job.id}`) assert.strictEqual(job.name, input.name, `name input ${input.name} didn't match job ${job.name}`) assert.strictEqual(job.priority, input.priority, `priority input ${input.priority} didn't match job ${job.priority}`) assert.strictEqual(JSON.stringify(job.data), JSON.stringify(input.data), `data input ${input.data} didn't match job ${job.data}`) - assert.strictEqual(job.retrylimit, input.retryLimit, `retryLimit input ${input.retryLimit} didn't match job ${job.retrylimit}`) - assert.strictEqual(job.retrydelay, input.retryDelay, `retryDelay input ${input.retryDelay} didn't match job ${job.retrydelay}`) - assert.strictEqual(job.retrybackoff, input.retryBackoff, `retryBackoff input ${input.retryBackoff} didn't match job ${job.retrybackoff}`) - assert.strictEqual(new Date(job.startafter).toISOString(), input.startAfter, `startAfter input ${input.startAfter} didn't match job ${job.startafter}`) - assert.strictEqual(job.expirein.seconds, input.expireInSeconds, `expireInSeconds input ${input.expireInSeconds} didn't match job ${job.expirein}`) - assert.strictEqual(job.singletonkey, input.singletonKey, `name input ${input.singletonKey} didn't match job ${job.singletonkey}`) - assert.strictEqual(new Date(job.keepuntil).toISOString(), input.keepUntil, `keepUntil input ${input.keepUntil} didn't match job ${job.keepuntil}`) - assert.strictEqual(job.on_complete, input.onComplete, `onComplete input ${input.onComplete} didn't match job ${job.on_complete}`) + assert.strictEqual(job.retryLimit, input.retryLimit, `retryLimit input ${input.retryLimit} didn't match job ${job.retryLimit}`) + assert.strictEqual(job.retryDelay, input.retryDelay, `retryDelay input ${input.retryDelay} didn't match job ${job.retryDelay}`) + assert.strictEqual(job.retryBackoff, input.retryBackoff, `retryBackoff input ${input.retryBackoff} didn't match job ${job.retryBackoff}`) + assert.strictEqual(new Date(job.startAfter).toISOString(), input.startAfter, `startAfter input ${input.startAfter} didn't match job ${job.startAfter}`) + assert.strictEqual(job.expireIn.seconds, input.expireInSeconds, `expireInSeconds input ${input.expireInSeconds} didn't match job ${job.expireIn}`) + assert.strictEqual(job.singletonKey, input.singletonKey, `name input ${input.singletonKey} didn't match job ${job.singletonKey}`) + assert.strictEqual(new Date(job.keepUntil).toISOString(), input.keepUntil, `keepUntil input ${input.keepUntil} didn't match job ${job.keepUntil}`) + assert.strictEqual(job.deadLetter, input.deadLetter, `deadLetter input ${input.deadLetter} didn't match job ${job.deadLetter}`) assert.strictEqual(called, true) }) }) diff --git a/test/maintenanceTest.js b/test/maintenanceTest.js index 5270d212..db965277 100644 --- a/test/maintenanceTest.js +++ b/test/maintenanceTest.js @@ -1,53 +1,40 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') -const PgBoss = require('../') +const { delay } = require('../src/tools') describe('maintenance', async function () { - it('should send maintenance job if missing during monitoring', async function () { - const config = { ...this.test.bossConfig, maintenanceIntervalSeconds: 1 } + it('clearStorage() should empty both job storage tables', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, archiveCompletedAfterSeconds: 1 }) + const queue = this.test.bossConfig.schema - const db = await helper.getDb() - - const boss = this.test.boss = new PgBoss(config) - - const queues = boss.boss.getQueueNames() - const countJobs = () => helper.countJobs(config.schema, 'name = $1', [queues.MAINTENANCE]) - - await boss.start() + const jobId = await boss.send(queue) + await boss.fetch(queue) + await boss.complete(queue, jobId) - boss.on('maintenance', async () => { - // force timestamp to an older date - await db.executeSql(`UPDATE ${config.schema}.version SET maintained_on = now() - interval '5 minutes'`) - }) + await delay(1000) + await boss.maintain() - // wait for monitoring to check timestamp - await delay(4000) + await boss.send(queue) - const count = await countJobs() - assert(count > 1) - }) + const db = await helper.getDb() - it('meta monitoring error handling works', async function () { - const config = { - ...this.test.bossConfig, - maintenanceIntervalSeconds: 1, - __test__throw_meta_monitor: 'meta monitoring error' + const getJobCount = async table => { + const jobCountResult = await db.executeSql(`SELECT count(*)::int as job_count FROM ${this.test.bossConfig.schema}.${table}`) + return jobCountResult.rows[0].job_count } - let errorCount = 0 - - const boss = this.test.boss = new PgBoss(config) + const preJobCount = await getJobCount('job') + const preArchiveCount = await getJobCount('archive') - boss.once('error', (error) => { - assert.strictEqual(error.message, config.__test__throw_meta_monitor) - errorCount++ - }) + assert.strictEqual(preJobCount, 1) + assert.strictEqual(preArchiveCount, 1) - await boss.start() + await boss.clearStorage() - await delay(6000) + const postJobCount = await getJobCount('job') + const postArchiveCount = await getJobCount('archive') - assert.strictEqual(errorCount, 1) + assert.strictEqual(postJobCount, 0) + assert.strictEqual(postArchiveCount, 0) }) }) diff --git a/test/managerTest.js b/test/managerTest.js index 18569807..15007be6 100644 --- a/test/managerTest.js +++ b/test/managerTest.js @@ -1,4 +1,4 @@ -const delay = require('delay') +const { delay } = require('../src/tools') const assert = require('assert') const helper = require('./testHelper') diff --git a/test/migrationTest.js b/test/migrationTest.js index 8147af30..2d95f863 100644 --- a/test/migrationTest.js +++ b/test/migrationTest.js @@ -4,16 +4,37 @@ const helper = require('./testHelper') const Contractor = require('../src/contractor') const migrationStore = require('../src/migrationStore') const currentSchemaVersion = require('../version.json').schema +const plans = require('../src/plans') describe('migration', function () { - let contractor - beforeEach(async function () { + const db = await helper.getDb({ debug: false }) + this.currentTest.contractor = new Contractor(db, this.currentTest.bossConfig) + }) + + it('should not migrate when current version is not found in migration store', async function () { + const { contractor } = this.test + const config = { ...this.test.bossConfig } + + await contractor.create() + const db = await helper.getDb() - contractor = new Contractor(db, this.currentTest.bossConfig) + // version 20 was v9 and dropped from the migration store with v10 + await db.executeSql(plans.setVersion(config.schema, 20)) + + const boss = this.test.boss = new PgBoss(config) + + try { + await boss.start() + assert(false) + } catch { + assert(true) + } }) - it('should migrate to previous version and back again', async function () { + it.skip('should migrate to previous version and back again', async function () { + const { contractor } = this.test + await contractor.create() await contractor.rollback(currentSchemaVersion) @@ -27,12 +48,14 @@ describe('migration', function () { assert.strictEqual(newVersion, currentSchemaVersion) }) - it('should migrate to latest during start if on previous schema version', async function () { + it.skip('should migrate to latest during start if on previous schema version', async function () { + const { contractor } = this.test + await contractor.create() await contractor.rollback(currentSchemaVersion) - const config = { ...this.test.bossConfig, noSupervisor: true } + const config = { ...this.test.bossConfig } const boss = this.test.boss = new PgBoss(config) @@ -43,10 +66,12 @@ describe('migration', function () { assert.strictEqual(version, currentSchemaVersion) }) - it('should migrate through 2 versions back and forth', async function () { + it.skip('should migrate through 2 versions back and forth', async function () { + const { contractor } = this.test + const queue = 'migrate-back-2-and-forward' - const config = { ...this.test.bossConfig, noSupervisor: true } + const config = { ...this.test.bossConfig } const boss = this.test.boss = new PgBoss(config) @@ -56,12 +81,8 @@ describe('migration', function () { // completed job await boss.send(queue) - const job = await boss.fetch(queue) - await boss.complete(job.id) - - // active job - await boss.send(queue) - await boss.fetch(queue) + const [job] = await boss.fetch(queue) + await boss.complete(queue, job.id) // created job await boss.send(queue) @@ -85,9 +106,15 @@ describe('migration', function () { const version = await contractor.version() assert.strictEqual(version, currentSchemaVersion) + + await boss.send(queue) + const [job2] = await boss.fetch(queue) + await boss.complete(queue, job2.id) }) - it('should migrate to latest during start if on previous 2 schema versions', async function () { + it.skip('should migrate to latest during start if on previous 2 schema versions', async function () { + const { contractor } = this.test + await contractor.create() await contractor.rollback(currentSchemaVersion) @@ -98,7 +125,7 @@ describe('migration', function () { const twoVersionsAgo = await contractor.version() assert.strictEqual(twoVersionsAgo, currentSchemaVersion - 2) - const config = { ...this.test.bossConfig, noSupervisor: true } + const config = { ...this.test.bossConfig } const boss = this.test.boss = new PgBoss(config) await boss.start() @@ -108,6 +135,8 @@ describe('migration', function () { }) it('migrating to non-existent version fails gracefully', async function () { + const { contractor } = this.test + await contractor.create() try { @@ -117,8 +146,10 @@ describe('migration', function () { } }) - it('should roll back an error during a migration', async function () { - const config = { ...this.test.bossConfig, noSupervisor: true } + it.skip('should roll back an error during a migration', async function () { + const { contractor } = this.test + + const config = { ...this.test.bossConfig } config.migrations = migrationStore.getAll(config.schema) @@ -136,7 +167,7 @@ describe('migration', function () { } catch (error) { assert(error.message.includes('wat')) } finally { - await boss1.stop({ graceful: false }) + await boss1.stop({ graceful: false, wait: false }) } const version1 = await contractor.version() @@ -154,6 +185,57 @@ describe('migration', function () { assert.strictEqual(version2, currentSchemaVersion) - await boss2.stop({ graceful: false }) + await boss2.stop({ graceful: false, wait: false }) + }) + + it('should not install if migrate option is false', async function () { + const config = { ...this.test.bossConfig, migrate: false } + const boss = this.test.boss = new PgBoss(config) + try { + await boss.start() + assert(false) + } catch (err) { + assert(true) + } + }) + + it.skip('should not migrate if migrate option is false', async function () { + const { contractor } = this.test + + await contractor.create() + + await contractor.rollback(currentSchemaVersion) + + const config = { ...this.test.bossConfig, migrate: false } + const boss = this.test.boss = new PgBoss(config) + + try { + await boss.start() + assert(false) + } catch (err) { + assert(true) + } + }) + + it('should still work if migrate option is false', async function () { + const { contractor } = this.test + + await contractor.create() + + const config = { ...this.test.bossConfig, migrate: false } + const queue = this.test.bossConfig.schema + + const boss = this.test.boss = new PgBoss(config) + + try { + await boss.start() + await boss.send(queue) + const [job] = await boss.fetch(queue) + await boss.complete(queue, job.id) + + assert(false) + } catch (err) { + assert(true) + } }) }) diff --git a/test/moduleTest.js b/test/moduleTest.js index 94377e1e..244eb2b3 100644 --- a/test/moduleTest.js +++ b/test/moduleTest.js @@ -8,7 +8,6 @@ describe('module', function () { assert(states.retry) assert(states.active) assert(states.completed) - assert(states.expired) assert(states.cancelled) assert(states.failed) }) diff --git a/test/monitoringTest.js b/test/monitoringTest.js index 929209ff..ad9ef23d 100644 --- a/test/monitoringTest.js +++ b/test/monitoringTest.js @@ -3,14 +3,13 @@ const helper = require('./testHelper') describe('monitoring', function () { it('should emit state counts', async function () { - const defaults = { - monitorStateIntervalSeconds: 1, - maintenanceIntervalSeconds: 10 + const config = { + ...this.test.bossConfig, + monitorStateIntervalSeconds: 1 } - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'monitorMe' + const boss = this.test.boss = await helper.start(config) + const queue = this.test.bossConfig.schema await boss.send(queue) await boss.send(queue) @@ -34,8 +33,8 @@ describe('monitoring', function () { assert.strictEqual(1, states3.queues[queue].created, 'created count is wrong after 3 sendes and 2 fetches') assert.strictEqual(2, states3.queues[queue].active, 'active count is wrong after 3 sendes and 2 fetches') - const job = await boss.fetch(queue) - await boss.complete(job.id) + const [job] = await boss.fetch(queue) + await boss.complete(queue, job.id) const states4 = await boss.countStates() @@ -43,18 +42,13 @@ describe('monitoring', function () { assert.strictEqual(2, states4.queues[queue].active, 'active count is wrong after 3 sendes and 3 fetches and 1 complete') assert.strictEqual(1, states4.queues[queue].completed, 'completed count is wrong after 3 sendes and 3 fetches and 1 complete') - return new Promise((resolve) => { - let resolved = false - - boss.on('monitor-states', async states => { - if (!resolved) { - resolved = true - assert.strictEqual(states4.queues[queue].created, states.queues[queue].created, 'created count from monitor-states doesn\'t match') - assert.strictEqual(states4.queues[queue].active, states.queues[queue].active, 'active count from monitor-states doesn\'t match') - assert.strictEqual(states4.queues[queue].completed, states.queues[queue].completed, 'completed count from monitor-states doesn\'t match') + await new Promise((resolve) => { + boss.once('monitor-states', async states => { + assert.strictEqual(states4.queues[queue].created, states.queues[queue].created, 'created count from monitor-states doesn\'t match') + assert.strictEqual(states4.queues[queue].active, states.queues[queue].active, 'active count from monitor-states doesn\'t match') + assert.strictEqual(states4.queues[queue].completed, states.queues[queue].completed, 'completed count from monitor-states doesn\'t match') - resolve() - } + resolve() }) }) }) diff --git a/test/multiMasterTest.js b/test/multiMasterTest.js index 361d7c4d..6b63c2d4 100644 --- a/test/multiMasterTest.js +++ b/test/multiMasterTest.js @@ -1,16 +1,14 @@ const assert = require('assert') -const delay = require('delay') const helper = require('./testHelper') const PgBoss = require('../') const Contractor = require('../src/contractor') const migrationStore = require('../src/migrationStore') const currentSchemaVersion = require('../version.json').schema -const pMap = require('p-map') describe('multi-master', function () { it('should only allow 1 master to start at a time', async function () { const replicaCount = 20 - const config = { ...this.test.bossConfig, noSupervisor: true, max: 2 } + const config = { ...this.test.bossConfig, supervise: false, max: 2 } const instances = [] for (let i = 0; i < replicaCount; i++) { @@ -18,17 +16,21 @@ describe('multi-master', function () { } try { - await pMap(instances, i => i.start()) + await Promise.all(instances.map(i => i.start())) } catch (err) { assert(false, err.message) } finally { - await pMap(instances, i => i.stop({ graceful: false })) + await Promise.all(instances.map(i => i.stop({ graceful: false, wait: false }))) } }) - it('should only allow 1 master to migrate to latest at a time', async function () { - const replicaCount = 5 - const config = { ...this.test.bossConfig, noSupervisor: true, max: 2 } + it.skip('should only allow 1 master to migrate to latest at a time', async function () { + const config = { + ...this.test.bossConfig, + supervise: true, + maintenanceIntervalSeconds: 1, + max: 2 + } const db = await helper.getDb() const contractor = new Contractor(db, config) @@ -46,58 +48,16 @@ describe('multi-master', function () { const instances = [] - for (let i = 0; i < replicaCount; i++) { + for (let i = 0; i < 5; i++) { instances.push(new PgBoss(config)) } try { - await pMap(instances, i => i.start()) + await Promise.all(instances.map(i => i.start())) } catch (err) { assert(false) } finally { - await pMap(instances, i => i.stop({ graceful: false })) - } - }) - - it('should clear maintenance queue before supervising', async function () { - const { states } = PgBoss - const jobCount = 5 - - const defaults = { - maintenanceIntervalSeconds: 1, - noSupervisor: true + await Promise.all(instances.map(i => i.stop({ graceful: false, wait: false }))) } - - const config = { ...this.test.bossConfig, ...defaults } - - let boss = new PgBoss(config) - - const queues = boss.boss.getQueueNames() - const countJobs = (state) => helper.countJobs(config.schema, 'name = $1 AND state = $2', [queues.MAINTENANCE, state]) - - await boss.start() - - // create extra maintenace jobs manually - for (let i = 0; i < jobCount; i++) { - await boss.send(queues.MAINTENANCE) - } - - const beforeCount = await countJobs(states.created) - - assert.strictEqual(beforeCount, jobCount) - - await boss.stop({ graceful: false }) - - boss = new PgBoss(this.test.bossConfig) - - await boss.start() - - await delay(3000) - - const completedCount = await countJobs(states.completed) - - assert.strictEqual(completedCount, 1) - - await boss.stop({ graceful: false }) }) }) diff --git a/test/opsTest.js b/test/opsTest.js index 0e1ff95b..c41f6a7a 100644 --- a/test/opsTest.js +++ b/test/opsTest.js @@ -1,47 +1,25 @@ const assert = require('assert') const helper = require('./testHelper') -const { v4: uuid } = require('uuid') -const delay = require('delay') +const { randomUUID } = require('crypto') describe('ops', function () { - const defaults = { - noSupervisor: true, - noScheduling: true - } - it('should expire manually', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) await boss.expire() }) it('should archive manually', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) await boss.archive() }) it('should purge the archive manually', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - await boss.purge() - }) - - it('stop should re-emit stoppped if already stopped', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const stopPromise1 = new Promise(resolve => boss.once('stopped', resolve)) - - await boss.stop({ timeout: 1 }) - - await stopPromise1 - - const stopPromise2 = new Promise(resolve => boss.once('stopped', resolve)) - - await boss.stop({ timeout: 1 }) - - await stopPromise2 + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + await boss.drop() }) it('should emit error in worker', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults, __test__throw_worker: true }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, __test__throw_worker: true }) const queue = this.test.bossConfig.schema await boss.send(queue) @@ -51,57 +29,41 @@ describe('ops', function () { }) it('should return null from getJobById if not found', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const jobId = await boss.getJobById(uuid()) + const jobId = await boss.getJobById(queue, randomUUID()) - assert.strictEqual(jobId, null) + assert(!jobId) }) it('should force stop', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - await boss.stop({ graceful: false }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + await boss.stop({ graceful: false, wait: true }) }) - it('should destroy the connection pool', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - await boss.stop({ destroy: true, graceful: false }) + it('should close the connection pool', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + await boss.stop({ graceful: false, wait: true }) - assert(boss.db.pool.totalCount === 0) + assert(boss.getDb().pool.totalCount === 0) }) - it('should destroy the connection pool gracefully', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - await boss.stop({ destroy: true }) - await new Promise((resolve) => { - boss.on('stopped', () => resolve()) - }) + it('should close the connection pool gracefully', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + await boss.stop({ wait: true }) - assert(boss.db.pool.totalCount === 0) + assert(boss.getDb().pool.totalCount === 0) }) - it('should emit error during graceful stop if worker is busy', async function () { - const boss = await helper.start({ ...this.test.bossConfig, ...defaults, __test__throw_stop: true }) + it('should not close the connection pool after stop with close option', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) const queue = this.test.bossConfig.schema + await boss.stop({ close: false, wait: true }) - await boss.send(queue) - await boss.work(queue, () => delay(2000)) - - await delay(500) - - await boss.stop({ timeout: 5000 }) - - await new Promise(resolve => boss.on('error', resolve)) - }) - - it('should throw error during graceful stop if no workers are busy', async function () { - const boss = await helper.start({ ...this.test.bossConfig, ...defaults, __test__throw_stop: true }) + const jobId = await boss.send(queue) + const [job] = await boss.fetch(queue) - try { - await boss.stop({ timeout: 1 }) - assert(false) - } catch (err) { - assert(true) - } + assert.strictEqual(jobId, job.id) }) }) diff --git a/test/priorityTest.js b/test/priorityTest.js index 468fab72..6681d24a 100644 --- a/test/priorityTest.js +++ b/test/priorityTest.js @@ -4,30 +4,28 @@ const helper = require('./testHelper') describe('priority', function () { it('higher priority job', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) + const queue = this.test.bossConfig.schema - const jobName = 'priority-test' + await boss.send(queue) - await boss.send(jobName) + const high = await boss.send(queue, null, { priority: 1 }) - const high = await boss.send(jobName, null, { priority: 1 }) - - const job = await boss.fetch(jobName) + const [job] = await boss.fetch(queue) assert.strictEqual(job.id, high) }) it('descending priority order', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'multiple-priority-test' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const low = await boss.send(queue, null, { priority: 1 }) const medium = await boss.send(queue, null, { priority: 5 }) const high = await boss.send(queue, null, { priority: 10 }) - const job1 = await boss.fetch(queue) - const job2 = await boss.fetch(queue) - const job3 = await boss.fetch(queue) + const [job1] = await boss.fetch(queue) + const [job2] = await boss.fetch(queue) + const [job3] = await boss.fetch(queue) assert.strictEqual(job1.id, high) assert.strictEqual(job2.id, medium) diff --git a/test/publishTest.js b/test/publishTest.js index 69a48636..18ecbce1 100644 --- a/test/publishTest.js +++ b/test/publishTest.js @@ -3,7 +3,7 @@ const helper = require('./testHelper') describe('pubsub', function () { it('should fail with no arguments', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) try { await boss.publish() @@ -14,10 +14,11 @@ describe('pubsub', function () { }) it('should fail with a function for data', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema try { - await boss.publish('job', () => true) + await boss.publish(queue, () => true) assert(false) } catch (err) { assert(err) @@ -25,10 +26,11 @@ describe('pubsub', function () { }) it('should fail with a function for options', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema try { - await boss.publish('event', 'data', () => true) + await boss.publish(queue, 'data', () => true) assert(false) } catch (err) { assert(err) @@ -36,47 +38,54 @@ describe('pubsub', function () { }) it('should accept single string argument', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendNameOnly' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema await boss.publish(queue) }) it('should accept job object argument with only name', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueOnly' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema await boss.publish(queue) }) it('should not send to the same named queue', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndData' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + const message = 'hi' await boss.publish(queue, { message }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) }) it('should use subscriptions to map to a single queue', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndData' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + const event = 'event' const message = 'hi' await boss.subscribe(event, queue) await boss.publish(event, { message }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.strictEqual(message, job.data.message) }) it('should use subscriptions to map to more than one queue', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue1 = 'queue1' - const queue2 = 'queue2' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + + const queue1 = 'subqueue1' + const queue2 = 'subqueue2' + + await boss.createQueue(queue1) + await boss.createQueue(queue2) + const event = 'event' const message = 'hi' @@ -84,8 +93,8 @@ describe('pubsub', function () { await boss.subscribe(event, queue2) await boss.publish(event, { message }) - const job1 = await boss.fetch(queue1) - const job2 = await boss.fetch(queue2) + const [job1] = await boss.fetch(queue1) + const [job2] = await boss.fetch(queue2) assert.strictEqual(message, job1.data.message) assert.strictEqual(message, job2.data.message) @@ -115,31 +124,45 @@ it('should fail if unsubscribe is called without both args', async function () { }) it('unsubscribe works', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) const event = 'foo' + const queue1 = 'queue1' const queue2 = 'queue2' + await boss.createQueue(queue1) + await boss.createQueue(queue2) + await boss.subscribe(event, queue1) await boss.subscribe(event, queue2) await boss.publish(event) - assert(await boss.fetch(queue1)) - assert(await boss.fetch(queue2)) + const [job1] = await boss.fetch(queue1) + + assert(job1) + + const [job2] = await boss.fetch(queue2) + + assert(job2) await boss.unsubscribe(event, queue2) await boss.publish(event) - assert(await boss.fetch(queue1)) + const [job3] = await boss.fetch(queue1) + + assert(job3) + + const [job4] = await boss.fetch(queue2) - assert.strictEqual(null, await boss.fetch(queue2)) + assert(!job4) await boss.unsubscribe(event, queue1) await boss.publish(event) - assert.strictEqual(null, await boss.fetch(queue1)) + const [job5] = await boss.fetch(queue1) + assert(!job5) }) diff --git a/test/queueTest.js b/test/queueTest.js new file mode 100644 index 00000000..e4e97c31 --- /dev/null +++ b/test/queueTest.js @@ -0,0 +1,426 @@ +const assert = require('assert') +const helper = require('./testHelper') + +describe('queues', function () { + it('should create a queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue) + }) + + it('should reject a queue with invalid characters', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = `*${this.test.bossConfig.schema}` + + try { + await boss.createQueue(queue) + assert(false) + } catch (err) { + assert(true) + } + }) + + it('should reject a queue that starts with a number', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = `4${this.test.bossConfig.schema}` + + try { + await boss.createQueue(queue) + assert(false) + } catch (err) { + assert(true) + } + }) + + it('should reject a queue with invalid policy', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + try { + await boss.createQueue(queue, { policy: 'something' }) + assert(false) + } catch (err) { + assert(true) + } + }) + + it('should create a queue with standard policy', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'standard' }) + }) + + it('should create a queue with stately policy', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'stately' }) + }) + + it('should create a queue with singleton policy', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'singleton' }) + }) + + it('should create a queue with short policy', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'short' }) + }) + + it('should delete and then create a queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue) + assert(await boss.getQueue(queue)) + await boss.deleteQueue(queue) + await boss.createQueue(queue) + }) + + it('should purge a queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue) + await boss.purgeQueue(queue) + }) + + it('getQueue() returns null when missing', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = await boss.getQueue(this.test.bossConfig.schema) + assert.strictEqual(queue, null) + }) + + it('getQueues() returns queues array', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue1 = `${this.test.bossConfig.schema}_1` + const queue2 = `${this.test.bossConfig.schema}_2` + + await boss.createQueue(queue1) + await boss.createQueue(queue2) + + const queues = await boss.getQueues() + + assert.strictEqual(queues.length, 2) + + assert(queues.some(q => q.name === queue1)) + assert(queues.some(q => q.name === queue2)) + }) + + it('should update queue properties', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + let deadLetter = `${queue}_dlq1` + await boss.createQueue(deadLetter) + + const createProps = { + policy: 'standard', + retryLimit: 1, + retryBackoff: false, + retryDelay: 1, + expireInSeconds: 1, + retentionMinutes: 1, + deadLetter + } + + await boss.createQueue(queue, createProps) + + let queueObj = await boss.getQueue(queue) + + assert.strictEqual(queue, queueObj.name) + assert.strictEqual(createProps.policy, queueObj.policy) + assert.strictEqual(createProps.retryLimit, queueObj.retryLimit) + assert.strictEqual(createProps.retryBackoff, queueObj.retryBackoff) + assert.strictEqual(createProps.retryDelay, queueObj.retryDelay) + assert.strictEqual(createProps.expireInSeconds, queueObj.expireInSeconds) + assert.strictEqual(createProps.retentionMinutes, queueObj.retentionMinutes) + assert.strictEqual(createProps.deadLetter, queueObj.deadLetter) + assert(queueObj.createdOn) + assert(queueObj.updatedOn) + + deadLetter = `${queue}_dlq2` + await boss.createQueue(deadLetter) + + const updateProps = { + policy: 'short', + retryLimit: 2, + retryBackoff: true, + retryDelay: 2, + expireInSeconds: 2, + retentionMinutes: 2, + deadLetter + } + + await boss.updateQueue(queue, updateProps) + + queueObj = await boss.getQueue(queue) + + assert.strictEqual(updateProps.policy, queueObj.policy) + assert.strictEqual(updateProps.retryLimit, queueObj.retryLimit) + assert.strictEqual(updateProps.retryBackoff, queueObj.retryBackoff) + assert.strictEqual(updateProps.retryDelay, queueObj.retryDelay) + assert.strictEqual(updateProps.expireInSeconds, queueObj.expireInSeconds) + assert.strictEqual(updateProps.retentionMinutes, queueObj.retentionMinutes) + assert.strictEqual(updateProps.deadLetter, queueObj.deadLetter) + }) + + it('jobs should inherit properties from queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + const deadLetter = `${queue}_dlq` + await boss.createQueue(deadLetter) + + const createProps = { + retryLimit: 1, + retryBackoff: true, + retryDelay: 2, + expireInSeconds: 3, + retentionMinutes: 4, + deadLetter + } + + await boss.createQueue(queue, createProps) + + const jobId = await boss.send(queue) + + const job = await boss.getJobById(queue, jobId) + + const retentionMinutes = (new Date(job.keepUntil) - new Date(job.createdOn)) / 1000 / 60 + + assert.strictEqual(createProps.retryLimit, job.retryLimit) + assert.strictEqual(createProps.retryBackoff, job.retryBackoff) + assert.strictEqual(createProps.retryDelay, job.retryDelay) + assert.strictEqual(createProps.expireInSeconds, job.expireIn.seconds) + assert.strictEqual(createProps.retentionMinutes, retentionMinutes) + assert.strictEqual(createProps.deadLetter, job.deadLetter) + }) + + it('short policy only allows 1 job in queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'short' }) + + const jobId = await boss.send(queue) + + assert(jobId) + + const jobId2 = await boss.send(queue) + + assert.strictEqual(jobId2, null) + + const [job] = await boss.fetch(queue) + + assert.strictEqual(job.id, jobId) + + const jobId3 = await boss.send(queue) + + assert(jobId3) + }) + + it('short policy should be extended with singletonKey', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'short' }) + + const jobId = await boss.send(queue, null, { singletonKey: 'a' }) + + assert(jobId) + + const jobId2 = await boss.send(queue, null, { singletonKey: 'a' }) + + assert.strictEqual(jobId2, null) + + const jobId3 = await boss.send(queue, null, { singletonKey: 'b' }) + + assert(jobId3) + + const [job] = await boss.fetch(queue) + + assert.strictEqual(job.id, jobId) + + const jobId4 = await boss.send(queue, null, { singletonKey: 'a' }) + + assert(jobId4) + }) + + it('singleton policy only allows 1 active job', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'singleton' }) + + await boss.send(queue) + + await boss.send(queue) + + const [job1] = await boss.fetch(queue) + + const [job2] = await boss.fetch(queue) + + assert(!job2) + + await boss.complete(queue, job1.id) + + const [job3] = await boss.fetch(queue) + + assert(job3) + }) + + it('singleton policy should be extended with singletonKey', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'singleton' }) + + await boss.send(queue, null, { singletonKey: 'a' }) + + await boss.send(queue, null, { singletonKey: 'b' }) + + const [job1] = await boss.fetch(queue) + + assert(job1) + + const [job2] = await boss.fetch(queue) + + assert(job2) + + await boss.send(queue, null, { singletonKey: 'b' }) + + const [job3] = await boss.fetch(queue) + + assert(!job3) + + await boss.complete(queue, job2.id) + + const [job3b] = await boss.fetch(queue) + + assert(job3b) + }) + + it('stately policy only allows 1 job per state up to active', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'stately' }) + + const jobId1 = await boss.send(queue, null, { retryLimit: 1 }) + + const blockedId = await boss.send(queue) + + assert.strictEqual(blockedId, null) + + let [job1] = await boss.fetch(queue) + + await boss.fail(queue, job1.id) + + job1 = await boss.getJobById(queue, jobId1) + + assert.strictEqual(job1.state, 'retry') + + const jobId2 = await boss.send(queue, null, { retryLimit: 1 }) + + assert(jobId2) + + await boss.fetch(queue) + + const job1a = await boss.getJobById(queue, jobId1) + + assert.strictEqual(job1a.state, 'active') + + const [blockedSecondActive] = await boss.fetch(queue) + + assert(!blockedSecondActive) + }) + + it('stately policy should be extended with singletonKey', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + const queue = this.test.bossConfig.schema + + await boss.createQueue(queue, { policy: 'stately' }) + + const jobAId = await boss.send(queue, null, { singletonKey: 'a', retryLimit: 1 }) + + assert(jobAId) + + const jobBId = await boss.send(queue, null, { singletonKey: 'b', retryLimit: 1 }) + + assert(jobBId) + + const jobA2Id = await boss.send(queue, null, { singletonKey: 'a', retryLimit: 1 }) + + assert.strictEqual(jobA2Id, null) + + let [jobA] = await boss.fetch(queue) + + await boss.fail(queue, jobA.id) + + jobA = await boss.getJobById(queue, jobAId) + + assert.strictEqual(jobA.state, 'retry') + + await boss.fetch(queue) + + jobA = await boss.getJobById(queue, jobAId) + + assert.strictEqual(jobA.state, 'active') + + let [jobB] = await boss.fetch(queue) + + assert(jobB) + + jobB = await boss.getJobById(queue, jobBId) + + assert.strictEqual(jobB.state, 'active') + + const jobA3Id = await boss.send(queue, null, { singletonKey: 'a' }) + + assert(jobA3Id) + + const [jobA3] = await boss.fetch(queue) + + assert(!jobA3) + }) + + it('should clear a specific queue', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) + + const queue1 = `${this.test.bossConfig.schema}1` + const queue2 = `${this.test.bossConfig.schema}2` + + await boss.createQueue(queue1) + await boss.send(queue1) + + await boss.createQueue(queue2) + await boss.send(queue2) + + const q1Count1 = await boss.getQueueSize(queue1) + const q2Count1 = await boss.getQueueSize(queue2) + + assert.strictEqual(1, q1Count1) + assert.strictEqual(1, q2Count1) + + await boss.purgeQueue(queue1) + + const q1Count2 = await boss.getQueueSize(queue1) + const q2Count2 = await boss.getQueueSize(queue2) + + assert.strictEqual(0, q1Count2) + assert.strictEqual(1, q2Count2) + + await boss.purgeQueue(queue2) + + const q2Count3 = await boss.getQueueSize(queue2) + + assert.strictEqual(0, q2Count3) + }) +}) diff --git a/test/readme.js b/test/readme.js index cac01c37..d132b672 100644 --- a/test/readme.js +++ b/test/readme.js @@ -1,4 +1,5 @@ const helper = require('./testHelper') +const { delay } = require('../src/tools') async function readme () { const PgBoss = require('../src') @@ -8,17 +9,21 @@ async function readme () { await boss.start() - const queue = 'some-queue' + const queue = 'readme-queue' - await boss.schedule(queue, '* * * * *') + await boss.deleteQueue(queue) + await boss.createQueue(queue) - console.log(`created cronjob in queue ${queue}`) + const id = await boss.send(queue, { arg1: 'read me' }) - await boss.work(queue, someAsyncJobHandler) -} + console.log(`created job ${id} in queue ${queue}`) + + await boss.work(queue, async ([job]) => { + console.log(`received job ${job.id} with data ${JSON.stringify(job.data)}`) + }) -async function someAsyncJobHandler (job) { - console.log(`running job ${job.id}`) + await delay(2000) + await boss.stop() } readme() diff --git a/test/resumeTest.js b/test/resumeTest.js index a6d5797b..bedc55cc 100644 --- a/test/resumeTest.js +++ b/test/resumeTest.js @@ -14,29 +14,29 @@ describe('cancel', function () { }) it('should cancel and resume a pending job', async function () { - const config = this.test.bossConfig - const boss = this.test.boss = await helper.start(config) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const jobId = await boss.send('will_cancel', null, { startAfter: 1 }) + const jobId = await boss.send(queue, null, { startAfter: 1 }) - await boss.cancel(jobId) + await boss.cancel(queue, jobId) - const job = await boss.getJobById(jobId) + const job = await boss.getJobById(queue, jobId) assert(job && job.state === 'cancelled') - await boss.resume(jobId) + await boss.resume(queue, jobId) - const job2 = await boss.getJobById(jobId) + const job2 = await boss.getJobById(queue, jobId) assert(job2 && job2.state === 'created') }) it('should cancel and resume a pending job with custom connection', async function () { - const config = this.test.bossConfig - const boss = this.test.boss = await helper.start(config) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const jobId = await boss.send('will_cancel', null, { startAfter: 1 }) + const jobId = await boss.send(queue, null, { startAfter: 1 }) let callCount = 0 const _db = await helper.getDb() @@ -47,15 +47,15 @@ describe('cancel', function () { } } - await boss.cancel(jobId, { db }) + await boss.cancel(queue, jobId, { db }) - const job = await boss.getJobById(jobId, { db }) + const job = await boss.getJobById(queue, jobId, { db }) assert(job && job.state === 'cancelled') - await boss.resume(jobId, { db }) + await boss.resume(queue, jobId, { db }) - const job2 = await boss.getJobById(jobId, { db }) + const job2 = await boss.getJobById(queue, jobId, { db }) assert(job2 && job2.state === 'created') assert.strictEqual(callCount, 4) diff --git a/test/retryTest.js b/test/retryTest.js index 13f5dbb4..aaf6b647 100644 --- a/test/retryTest.js +++ b/test/retryTest.js @@ -1,118 +1,89 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') +const { delay } = require('../src/tools') describe('retries', function () { - const defaults = { maintenanceIntervalSeconds: 1 } - it('should retry a job that didn\'t complete', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'unreliable' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const jobId = await boss.send({ name: queue, options: { expireInSeconds: 1, retryLimit: 1 } }) - const try1 = await boss.fetch(queue) + const [try1] = await boss.fetch(queue) - await delay(5000) + await delay(1000) + await boss.maintain() - const try2 = await boss.fetch(queue) + const [try2] = await boss.fetch(queue) assert.strictEqual(try1.id, jobId) assert.strictEqual(try2.id, jobId) }) it('should retry a job that failed', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queueName = 'retryFailed' - const retryLimit = 1 + const jobId = await boss.send(queue, null, { retryLimit: 1 }) - const jobId = await boss.send(queueName, null, { retryLimit }) - - await boss.fetch(queueName) - await boss.fail(jobId) + await boss.fetch(queue) + await boss.fail(queue, jobId) - const job = await boss.fetch(queueName) + const [job] = await boss.fetch(queue) assert.strictEqual(job.id, jobId) }) it('should retry a job that failed with cascaded config', async function () { - const retryLimit = 1 - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults, retryLimit }) - - const queueName = 'retryFailed-config-cascade' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, retryLimit: 1 }) + const queue = this.test.bossConfig.schema - const jobId = await boss.send(queueName) + const jobId = await boss.send(queue) - await boss.fetch(queueName) - await boss.fail(jobId) + await boss.fetch(queue) + await boss.fail(queue, jobId) - const job = await boss.fetch(queueName) + const [job] = await boss.fetch(queue) assert.strictEqual(job.id, jobId) }) it('should retry with a fixed delay', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'retryDelayFixed' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const jobId = await boss.send(queue, null, { retryLimit: 1, retryDelay: 1 }) await boss.fetch(queue) - await boss.fail(jobId) + await boss.fail(queue, jobId) - const job1 = await boss.fetch(queue) + const [job1] = await boss.fetch(queue) - assert.strictEqual(job1, null) + assert(!job1) await delay(1000) - const job2 = await boss.fetch(queue) + const [job2] = await boss.fetch(queue) assert(job2) }) it('should retry with a exponential backoff', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'retryDelayBackoff' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema let processCount = 0 const retryLimit = 4 - await boss.work(queue, { newJobCheckInterval: 500 }, async () => { + await boss.work(queue, { pollingIntervalSeconds: 1 }, async () => { ++processCount throw new Error('retry') }) - await boss.send(queue, null, { retryLimit, retryBackoff: true }) + await boss.send(queue, null, { retryLimit, retryDelay: 2, retryBackoff: true }) - await delay(9000) + await delay(8000) assert(processCount < retryLimit) }) - - it('should set the default retry limit to 1 if missing', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, ...defaults }) - - const queue = 'retryLimitDefault' - - const jobId = await boss.send(queue, null, { retryDelay: 1 }) - - await boss.fetch(queue) - await boss.fail(jobId) - - const job1 = await boss.fetch(queue) - - assert.strictEqual(job1, null) - - await delay(1000) - - const job2 = await boss.fetch(queue) - - assert(job2) - }) }) diff --git a/test/scheduleTest.js b/test/scheduleTest.js index c3c3fde5..5ce049d6 100644 --- a/test/scheduleTest.js +++ b/test/scheduleTest.js @@ -1,108 +1,99 @@ -const delay = require('delay') +const { delay } = require('../src/tools') const assert = require('assert') const { DateTime } = require('luxon') const helper = require('./testHelper') const plans = require('../src/plans') const PgBoss = require('../') -const ASSERT_DELAY = 4000 +const ASSERT_DELAY = 3000 describe('schedule', function () { it('should send job based on every minute expression', async function () { const config = { ...this.test.bossConfig, - cronWorkerIntervalSeconds: 1 + cronMonitorIntervalSeconds: 1, + cronWorkerIntervalSeconds: 1, + noDefault: true } - const boss = this.test.boss = await helper.start(config) - + let boss = await helper.start(config) const queue = this.test.bossConfig.schema + await boss.createQueue(queue) await boss.schedule(queue, '* * * * *') + await boss.stop({ graceful: false }) - await delay(ASSERT_DELAY) + boss = await helper.start({ ...config, schedule: true }) + + await delay(2000) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert(job) + + await boss.stop({ graceful: false }) }) - it('should accept a custom clock monitoring interval in seconds', async function () { + it('should not enable scheduling if archive config is < 60s', async function () { const config = { ...this.test.bossConfig, - clockMonitorIntervalSeconds: 1, - cronWorkerIntervalSeconds: 1 + cronWorkerIntervalSeconds: 1, + cronMonitorIntervalSeconds: 1, + archiveCompletedAfterSeconds: 1, + schedule: true } const boss = this.test.boss = await helper.start(config) - const queue = this.test.bossConfig.schema await boss.schedule(queue, '* * * * *') await delay(ASSERT_DELAY) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert(job) + assert(!job) }) - it('cron monitoring should restart cron if paused', async function () { - const config = { - ...this.test.bossConfig, - cronMonitorIntervalSeconds: 1, - cronWorkerIntervalSeconds: 1 - } - - const boss = this.test.boss = await helper.start(config) - + it('should fail to schedule a queue that does not exist', async function () { + const boss = await helper.start({ ...this.test.bossConfig, noDefault: true }) const queue = this.test.bossConfig.schema - const { schema } = this.test.bossConfig - const db = await helper.getDb() - await db.executeSql(plans.clearStorage(schema)) - await db.executeSql(plans.setCronTime(schema, "now() - interval '1 hour'")) - - await boss.schedule(queue, '* * * * *') - - await delay(ASSERT_DELAY) - - const job = await boss.fetch(queue) - - assert(job) + try { + await boss.schedule(queue, '* * * * *') + assert(false) + } catch (err) { + assert(true) + } }) it('should send job based on every minute expression after a restart', async function () { - const config = { - ...this.test.bossConfig, - cronMonitorIntervalSeconds: 1, - noScheduling: true, - noSupervisor: true - } - - let boss = await helper.start(config) + let boss = await helper.start({ ...this.test.bossConfig, schedule: false, noDefault: true }) const queue = this.test.bossConfig.schema + await boss.createQueue(queue) + await boss.schedule(queue, '* * * * *') - await boss.stop() + await delay(ASSERT_DELAY) + + await boss.stop({ graceful: false }) - boss = await helper.start({ ...this.test.bossConfig, cronWorkerIntervalSeconds: 1 }) + boss = await helper.start({ ...this.test.bossConfig, cronWorkerIntervalSeconds: 1, schedule: true, noDefault: true }) await delay(ASSERT_DELAY) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert(job) - await boss.stop() + await boss.stop({ graceful: false }) }) it('should remove previously scheduled job', async function () { const config = { ...this.test.bossConfig, - noSupervisor: true, cronWorkerIntervalSeconds: 1 } const boss = this.test.boss = await helper.start(config) @@ -122,15 +113,17 @@ describe('schedule', function () { await delay(ASSERT_DELAY) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert(job === null) + assert(!job) }) it('should send job based on current minute in UTC', async function () { const config = { ...this.test.bossConfig, - cronWorkerIntervalSeconds: 1 + cronMonitorIntervalSeconds: 1, + cronWorkerIntervalSeconds: 1, + schedule: true } const boss = this.test.boss = await helper.start(config) @@ -155,9 +148,9 @@ describe('schedule', function () { await boss.schedule(queue, cron) - await delay(ASSERT_DELAY) + await delay(6000) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert(job) }) @@ -165,7 +158,9 @@ describe('schedule', function () { it('should send job based on current minute in a specified time zone', async function () { const config = { ...this.test.bossConfig, - cronWorkerIntervalSeconds: 1 + cronMonitorIntervalSeconds: 1, + cronWorkerIntervalSeconds: 1, + schedule: true } const boss = this.test.boss = await helper.start(config) @@ -194,7 +189,7 @@ describe('schedule', function () { await delay(ASSERT_DELAY) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert(job) }) @@ -202,6 +197,7 @@ describe('schedule', function () { it('should force a clock skew warning', async function () { const config = { ...this.test.bossConfig, + schedule: true, __test__force_clock_skew_warning: true } @@ -229,6 +225,7 @@ describe('schedule', function () { const config = { ...this.test.bossConfig, clockMonitorIntervalSeconds: 1, + schedule: true, __test__force_clock_monitoring_error: 'pg-boss mock error: clock skew monitoring' } @@ -252,6 +249,7 @@ describe('schedule', function () { const config = { ...this.test.bossConfig, cronMonitorIntervalSeconds: 1, + schedule: true, __test__force_cron_monitoring_error: 'pg-boss mock error: cron monitoring' } @@ -270,4 +268,28 @@ describe('schedule', function () { assert.strictEqual(errorCount, 1) }) + + it('clock monitoring error handling works', async function () { + const config = { + ...this.test.bossConfig, + schedule: true, + clockMonitorIntervalSeconds: 1, + __test__force_clock_monitoring_error: 'pg-boss mock error: clock monitoring' + } + + let errorCount = 0 + + const boss = this.test.boss = new PgBoss(config) + + boss.once('error', (error) => { + assert.strictEqual(error.message, config.__test__force_clock_monitoring_error) + errorCount++ + }) + + await boss.start() + + await delay(4000) + + assert.strictEqual(errorCount, 1) + }) }) diff --git a/test/sendTest.js b/test/sendTest.js index 4ba69300..e80c6f3c 100644 --- a/test/sendTest.js +++ b/test/sendTest.js @@ -36,44 +36,49 @@ describe('send', function () { }) it('should accept single string argument', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendNameOnly' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + await boss.send(queue) }) it('should accept job object argument with only name', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueOnly' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + await boss.send({ name: queue }) }) it('should accept job object with name and data only', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndData' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + const message = 'hi' await boss.send({ name: queue, data: { message } }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.strictEqual(message, job.data.message) }) it('should accept job object with name and options only', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndOptions' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + const options = { someCrazyOption: 'whatever' } await boss.send({ name: queue, options }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.strictEqual(job.data, null) }) it('should accept job object with name and custom connection', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndOptions' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema + let called = false const db = await helper.getDb() const options = { @@ -88,7 +93,7 @@ describe('send', function () { await boss.send({ name: queue, options }) - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) assert.notEqual(job, null) assert.strictEqual(job.data, null) @@ -96,11 +101,13 @@ describe('send', function () { }) it('should not create job if transaction fails', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'sendqueueAndOptions' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const { schema } = this.test.bossConfig + const queue = schema + const db = await helper.getDb() const client = db.pool - await client.query('CREATE TABLE IF NOT EXISTS test (label VARCHAR(50))') + await client.query(`CREATE TABLE IF NOT EXISTS ${schema}.test (label VARCHAR(50))`) const throwError = () => { throw new Error('Error!!') } @@ -114,7 +121,7 @@ describe('send', function () { }, someCrazyOption: 'whatever' } - const queryText = 'INSERT INTO test(label) VALUES(\'Test\')' + const queryText = `INSERT INTO ${schema}.test(label) VALUES('Test')` await client.query(queryText) await boss.send({ name: queue, options }) @@ -125,8 +132,8 @@ describe('send', function () { await client.query('ROLLBACK') } - const job = await boss.fetch(queue) + const [job] = await boss.fetch(queue) - assert.strictEqual(job, null) + assert(!job) }) }) diff --git a/test/singletonTest.js b/test/singletonTest.js deleted file mode 100644 index 10e17271..00000000 --- a/test/singletonTest.js +++ /dev/null @@ -1,157 +0,0 @@ -const assert = require('assert') -const { v4: uuid } = require('uuid') -const helper = require('./testHelper') - -describe('singleton', function () { - it('should not allow more than 1 pending job at a time with the same key', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'singleton-1-pending' - const singletonKey = 'a' - - const jobId = await boss.send(queue, null, { singletonKey }) - - assert(jobId) - - const jobId2 = await boss.send(queue, null, { singletonKey }) - - assert.strictEqual(jobId2, null) - }) - - it('should not allow more than 1 complete job with the same key with an interval', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'singleton-1-complete' - const singletonKey = 'a' - const singletonMinutes = 1 - - await boss.send(queue, null, { singletonKey, singletonMinutes }) - const job = await boss.fetch(queue) - - await boss.complete(job.id) - - const jobId = await boss.send(queue, null, { singletonKey, singletonMinutes }) - - assert.strictEqual(jobId, null) - }) - - it('should allow more than 1 pending job at the same time with different keys', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'singleton' - const jobId = await boss.send(queue, null, { singletonKey: 'a' }) - - assert(jobId) - - const jobId2 = await boss.send(queue, null, { singletonKey: 'b' }) - - assert(jobId2) - }) - - it('sendOnce() should work', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'sendOnce' - const key = 'only-once-plz' - - const jobId = await boss.sendOnce(queue, null, null, key) - - assert(jobId) - - const jobId2 = await boss.sendOnce(queue, null, null, key) - - assert.strictEqual(jobId2, null) - - const job = await boss.fetch(queue) - - assert.strictEqual(job.id, jobId) - - const jobId3 = await boss.sendOnce(queue, null, null, key) - - assert.strictEqual(jobId3, null) - }) - - it('sendOnce() without a key should also work', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'sendOnceNoKey' - const jobId = await boss.sendOnce(queue) - - assert(jobId) - - const jobId2 = await boss.sendOnce(queue) - - assert.strictEqual(jobId2, null) - }) - - it('sendSingleton() works', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = this.test.bossConfig.schema - - const jobId = await boss.sendSingleton(queue) - - assert(jobId) - - const jobId2 = await boss.sendSingleton(queue) - - assert.strictEqual(jobId2, null) - - const job = await boss.fetch(queue) - - assert.strictEqual(job.id, jobId) - - const jobId3 = await boss.sendSingleton(queue) - - assert(jobId3) - }) - - it('useSingletonQueue allows a second singleton job if first has enetered active state', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'singleton-queue-check' - const singletonKey = 'myKey' - - const jobId = await boss.send(queue, null, { singletonKey, useSingletonQueue: true }) - - assert(jobId) - - const jobId2 = await boss.send(queue, null, { singletonKey, useSingletonQueue: true }) - - assert.strictEqual(jobId2, null) - - const job = await boss.fetch(queue) - - assert.strictEqual(job.id, jobId) - - const jobId3 = await boss.send(queue, null, { singletonKey, useSingletonQueue: true }) - - assert(jobId3) - }) - - it('useSingletonQueue works when using insert', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const name = 'singleton-queue-check' - const singletonKey = 'myKey' - - const jobId = uuid() - await boss.insert([{ id: jobId, name, singletonKey, useSingletonQueue: true }]) - - assert(await boss.getJobById(jobId)) - - const jobId2 = uuid() - await boss.insert([{ id: jobId2, name, singletonKey, useSingletonQueue: true }]) - - assert.strictEqual(await boss.getJobById(jobId2), null) - - const job = await boss.fetch(name) - - assert.strictEqual(job.id, jobId) - - const jobId3 = uuid() - await boss.insert([{ id: jobId3, name, singletonKey, useSingletonQueue: true }]) - - assert(await boss.getJobById(jobId3)) - }) -}) diff --git a/test/speedTest.js b/test/speedTest.js index f66bc933..5a28495c 100644 --- a/test/speedTest.js +++ b/test/speedTest.js @@ -1,31 +1,25 @@ const helper = require('./testHelper') -const pMap = require('p-map') +const assert = require('assert') describe('speed', function () { - const expectedSeconds = 2 - const jobCount = 10000 + const expectedSeconds = 9 + const jobCount = 10_000 const queue = 'speedTest' - - const jobs = new Array(jobCount).fill(null).map((item, index) => ({ name: queue, data: { index } })) - + const data = new Array(jobCount).fill(null).map((item, index) => ({ name: queue, data: { index } })) const testTitle = `should be able to fetch and complete ${jobCount} jobs in ${expectedSeconds} seconds` - let boss - - beforeEach(async function () { - const defaults = { noSupervisor: true, min: 10, max: 10 } - boss = await helper.start({ ...this.currentTest.bossConfig, ...defaults }) - await pMap(jobs, job => boss.send(job.name, job.data)) - }) - - afterEach(async function () { await helper.stop(boss) }) - it(testTitle, async function () { this.timeout(expectedSeconds * 1000) this.slow(0) - this.retries(1) - const jobs = await boss.fetch(queue, jobCount) - await boss.complete(jobs.map(job => job.id)) + const config = { ...this.test.bossConfig, min: 10, max: 10, noDefault: true } + const boss = this.test.boss = await helper.start(config) + await boss.createQueue(queue) + await boss.insert(data) + const jobs = await boss.fetch(queue, { batchSize: jobCount }) + + assert.strictEqual(jobCount, jobs.length) + + await boss.complete(queue, jobs.map(job => job.id)) }) }) diff --git a/test/testHelper.js b/test/testHelper.js index 16bf6b61..b6c1a56d 100644 --- a/test/testHelper.js +++ b/test/testHelper.js @@ -1,19 +1,15 @@ const Db = require('../src/db') const PgBoss = require('../') -const plans = require('../src/plans') -const { COMPLETION_JOB_PREFIX } = plans const crypto = require('crypto') const sha1 = (value) => crypto.createHash('sha1').update(value).digest('hex') module.exports = { dropSchema, start, - stop, getDb, getArchivedJobById, countJobs, findJobs, - COMPLETION_JOB_PREFIX, getConfig, getConnectionString, tryCreateDb, @@ -34,11 +30,15 @@ function getConfig (options = {}) { config.password = process.env.POSTGRES_PASSWORD || config.password if (options.testKey) { - config.schema = `pgboss${sha1(options.testKey).slice(-10)}` + config.schema = `pgboss${sha1(options.testKey)}` } config.schema = config.schema || 'pgboss' + config.supervise = false + config.schedule = false + config.retryLimit = 0 + const result = { ...config } return Object.assign(result, options) @@ -48,27 +48,20 @@ async function init () { const { database } = getConfig() await tryCreateDb(database) - await createPgCrypto(database) } -async function getDb (database) { +async function getDb ({ database, debug } = {}) { const config = getConfig() config.database = database || config.database - const db = new Db(config) + const db = new Db({ ...config, debug }) await db.open() return db } -async function createPgCrypto (database) { - const db = await getDb(database) - await db.executeSql('create extension if not exists pgcrypto') - await db.close() -} - async function dropSchema (schema) { const db = await getDb() await db.executeSql(`DROP SCHEMA IF EXISTS ${schema} CASCADE`) @@ -82,8 +75,8 @@ async function findJobs (schema, where, values) { return jobs } -async function getArchivedJobById (schema, id) { - const response = await findArchivedJobs(schema, 'id = $1', [id]) +async function getArchivedJobById (schema, name, id) { + const response = await findArchivedJobs(schema, 'name = $1 AND id = $2', [name, id]) return response.rows.length ? response.rows[0] : null } @@ -102,7 +95,7 @@ async function countJobs (schema, where, values) { } async function tryCreateDb (database) { - const db = await getDb('postgres') + const db = await getDb({ database: 'postgres' }) try { await db.executeSql(`CREATE DATABASE ${database}`) @@ -115,8 +108,12 @@ async function start (options) { try { options = getConfig(options) const boss = new PgBoss(options) - boss.on('error', err => console.log({ schema: options.schema, message: err.message })) + // boss.on('error', err => console.log({ schema: options.schema, message: err.message })) await boss.start() + // auto-create queue for tests + if (!options.noDefault) { + await boss.createQueue(options.schema) + } return boss } catch (err) { // this is nice for occaisional debugging, Mr. Linter @@ -125,7 +122,3 @@ async function start (options) { } } } - -async function stop (boss, timeout = 4000) { - await boss.stop({ timeout }) -} diff --git a/test/throttleTest.js b/test/throttleTest.js index ff77afe2..a3b46d97 100644 --- a/test/throttleTest.js +++ b/test/throttleTest.js @@ -1,35 +1,29 @@ const assert = require('assert') const helper = require('./testHelper') -const delay = require('delay') +const { delay } = require('../src/tools') describe('throttle', function () { - it('should only create 1 job for interval with a delay', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'delayThrottle' - const singletonSeconds = 4 - const startAfter = 2 - const sendInterval = 200 - const sendCount = 5 - - let processCount = 0 + it('should only create 1 job for interval', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - boss.work(queue, async () => processCount++) + const singletonSeconds = 2 + const sendCount = 4 for (let i = 0; i < sendCount; i++) { - await boss.send(queue, null, { startAfter, singletonSeconds }) - await delay(sendInterval) + await boss.send(queue, null, { singletonSeconds }) + await delay(1000) } - await delay(singletonSeconds * 1000) + const { length } = await boss.fetch(queue, { batchSize: sendCount }) - assert(processCount <= 2) + assert(length < sendCount) }) it('should process at most 1 job per second', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queue = 'throttle-1ps' const singletonSeconds = 1 const jobCount = 3 const sendInterval = 100 @@ -51,9 +45,8 @@ describe('throttle', function () { }) it('should debounce', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'debounce' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const jobId = await boss.send(queue, null, { singletonHours: 1 }) @@ -65,9 +58,9 @@ describe('throttle', function () { }) it('should debounce via sendDebounced()', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queue = 'sendDebounced' const seconds = 60 const jobId = await boss.sendDebounced(queue, null, null, seconds) @@ -84,9 +77,8 @@ describe('throttle', function () { }) it('should reject 2nd request in the same time slot', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'throttle-reject-2nd' + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema const jobId1 = await boss.send(queue, null, { singletonHours: 1 }) @@ -98,9 +90,9 @@ describe('throttle', function () { }) it('should throttle via sendThrottled()', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - const queue = 'throttle-reject-2nd-sendThrottled' const seconds = 60 const jobId1 = await boss.sendThrottled(queue, null, null, seconds) @@ -111,4 +103,21 @@ describe('throttle', function () { assert.strictEqual(jobId2, null) }) + + it('should not allow more than 1 complete job with the same key with an interval', async function () { + const boss = this.test.boss = await helper.start(this.test.bossConfig) + const queue = this.test.bossConfig.schema + + const singletonKey = 'a' + const singletonMinutes = 1 + + await boss.send(queue, null, { singletonKey, singletonMinutes }) + const [job] = await boss.fetch(queue) + + await boss.complete(queue, job.id) + + const jobId = await boss.send(queue, null, { singletonKey, singletonMinutes }) + + assert.strictEqual(jobId, null) + }) }) diff --git a/test/wildcardTest.js b/test/wildcardTest.js deleted file mode 100644 index 73cd1161..00000000 --- a/test/wildcardTest.js +++ /dev/null @@ -1,43 +0,0 @@ -const assert = require('assert') -const helper = require('./testHelper') - -describe('wildcard', function () { - it('fetch() should return all jobs using a wildcard pattern', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - - await boss.send(`${queue}_1234`) - await boss.send(`${queue}_5678`) - - const jobs = await boss.fetch(`${queue}_*`, 2) - - assert.strictEqual(jobs.length, 2) - }) - - it('work() should return all jobs using a wildcard pattern', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - - await boss.send(`${queue}_1234`) - await boss.send(`${queue}_5678`) - - return new Promise((resolve) => { - boss.work(`${queue}_*`, { batchSize: 2 }, jobs => { - assert.strictEqual(jobs.length, 2) - resolve() - }) - }) - }) - - it('should not accidentally fetch state completion jobs from a pattern', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = this.test.bossConfig.schema - - await boss.send(`${queue}_1234`) - const job = await boss.fetch(`${queue}_*`) - await boss.complete(job.id) - const job2 = await boss.fetch(`${queue}_*`) - - assert.strictEqual(job2, null) - }) -}) diff --git a/test/workTest.js b/test/workTest.js index 3d5d4fe3..8ae97ccf 100644 --- a/test/workTest.js +++ b/test/workTest.js @@ -1,7 +1,6 @@ -const delay = require('delay') +const { delay } = require('../src/tools') const assert = require('assert') const helper = require('./testHelper') -const PgBoss = require('../') describe('work', function () { it('should fail with no arguments', async function () { @@ -48,11 +47,11 @@ describe('work', function () { } }) - it('should honor a custom new job check interval', async function () { + it('should honor a custom polling interval', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema - const newJobCheckIntervalSeconds = 1 + const pollingIntervalSeconds = 1 const timeout = 5000 let processCount = 0 const jobCount = 10 @@ -61,11 +60,11 @@ describe('work', function () { await boss.send(queue) } - await boss.work(queue, { newJobCheckIntervalSeconds }, () => processCount++) + await boss.work(queue, { pollingIntervalSeconds }, () => processCount++) await delay(timeout) - assert.strictEqual(processCount, timeout / 1000 / newJobCheckIntervalSeconds) + assert.strictEqual(processCount, timeout / 1000 / pollingIntervalSeconds) }) it('should honor when a worker is notified', async function () { @@ -73,18 +72,21 @@ describe('work', function () { const queue = this.test.bossConfig.schema let processCount = 0 - const newJobCheckIntervalSeconds = 5 await boss.send(queue) - const workerId = await boss.work(queue, { newJobCheckIntervalSeconds }, () => processCount++) - await delay(100) + const workerId = await boss.work(queue, { pollingIntervalSeconds: 5 }, () => processCount++) + + await delay(500) + assert.strictEqual(processCount, 1) + await boss.send(queue) boss.notifyWorker(workerId) - await delay(100) + await delay(500) + assert.strictEqual(processCount, 2) }) @@ -116,7 +118,7 @@ describe('work', function () { await boss.send(queue) await boss.send(queue) - const id = await boss.work(queue, { newJobCheckInterval: 500 }, async () => { + const id = await boss.work(queue, { pollingIntervalSeconds: 0.5 }, async () => { receivedCount++ await boss.offWork({ id }) }) @@ -126,61 +128,10 @@ describe('work', function () { assert.strictEqual(receivedCount, 1) }) - it('should handle a batch of jobs via teamSize', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'process-teamSize' - const teamSize = 4 - - let processCount = 0 - - for (let i = 0; i < teamSize; i++) { - await boss.send(queue) - } - - return new Promise((resolve, reject) => { - boss.work(queue, { teamSize }, async () => { - processCount++ - - // test would time out if it had to wait for 4 fetch intervals - if (processCount === teamSize) { - resolve() - } - }).catch(reject) - }) - }) - - it('should apply teamConcurrency option', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'process-teamConcurrency' - const teamSize = 4 - const teamConcurrency = 4 - - let processCount = 0 - - for (let i = 0; i < teamSize; i++) { - await boss.send(queue) - } - - return new Promise((resolve) => { - boss.work(queue, { teamSize, teamConcurrency }, async () => { - processCount++ - - if (processCount === teamSize) { - resolve() - } - - // test would time out if it had to wait for each handler to resolve - await delay(4000) - }) - }) - }) - it('should handle a batch of jobs via batchSize', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) + const queue = this.test.bossConfig.schema - const queue = 'process-batchSize' const batchSize = 4 for (let i = 0; i < batchSize; i++) { @@ -199,7 +150,7 @@ describe('work', function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema - await boss.send(queue, null, { onComplete: true }) + const jobId = await boss.send(queue) await new Promise((resolve) => { boss.work(queue, { batchSize: 1 }, async jobs => { @@ -208,16 +159,16 @@ describe('work', function () { }) }) - await delay(2000) + await delay(500) - const result = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert(result) + assert.strictEqual(job.state, 'completed') }) it('returning promise applies backpressure', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'backpressure' + const queue = this.test.bossConfig.schema const jobCount = 4 let processCount = 0 @@ -237,110 +188,57 @@ describe('work', function () { assert(processCount < jobCount) }) - it('top up jobs when at least one job in team is still running', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + it('completion should pass string wrapped in value prop', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) const queue = this.test.bossConfig.schema - this.timeout(1000) + const result = 'success' - const teamSize = 4 - const teamConcurrency = 2 + const jobId = await boss.send(queue) - let processCount = 0 + await boss.work(queue, async () => result) - for (let i = 0; i < 6; i++) { - await boss.send(queue) - } + await delay(1000) - const newJobCheckInterval = 100 + const job = await boss.getJobById(queue, jobId) - return new Promise((resolve) => { - boss.work(queue, { teamSize, teamConcurrency, newJobCheckInterval, teamRefill: true }, async () => { - processCount++ - if (processCount === 1) { - // Test would timeout if all were blocked on this first - // process - await new Promise(resolve => setTimeout(resolve, 500)) - return - } - - if (processCount === 6) { - resolve() - } - }) - }) + assert.strictEqual(job.state, 'completed') + assert.strictEqual(job.output.value, result) }) - it('does not fetch more than teamSize', async function () { - const boss = this.test.boss = await helper.start(this.test.bossConfig) + it('handler result should be stored in output', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) const queue = this.test.bossConfig.schema - const teamSize = 4 - const teamConcurrency = 2 - const newJobCheckInterval = 200 - let processCount = 0 - let remainCount = 0 - - for (let i = 0; i < 7; i++) { - await boss.send(queue) - } - - // This should consume 5 jobs, all will block after the first job - await boss.work(queue, { teamSize, teamConcurrency, newJobCheckInterval, teamRefill: true }, async () => { - processCount++ - if (processCount > 1) await new Promise(resolve => setTimeout(resolve, 1000)) - }) - - await new Promise(resolve => setTimeout(resolve, 400)) - - // this should pick up the last 2 jobs - await boss.work(queue, { teamSize, teamConcurrency, newJobCheckInterval, teamRefill: true }, async () => { - remainCount++ - }) - - await new Promise(resolve => setTimeout(resolve, 400)) - - assert(remainCount === 2) - }) - - it('completion should pass string wrapped in value prop', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const queue = 'processCompletionString' - const result = 'success' + const something = 'clever' - boss.work(queue, async job => result) + const jobId = await boss.send(queue) + await boss.work(queue, async () => ({ something })) - await boss.send(queue) + await delay(1000) - await delay(8000) + const job = await boss.getJobById(queue, jobId) - const job = await boss.fetchCompleted(queue) - - assert.strictEqual(job.data.state, 'completed') - assert.strictEqual(job.data.response.value, result) + assert.strictEqual(job.state, 'completed') + assert.strictEqual(job.output.something, something) }) - it('completion via Promise resolve() should pass object payload', async function () { - const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, onComplete: true }) - - const queue = 'processCompletionObject' - const something = 'clever' - - boss.work(queue, async job => ({ something })) + it('job cab be deleted in handler', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig }) + const queue = this.test.bossConfig.schema - await boss.send(queue) + const jobId = await boss.send(queue) + await boss.work(queue, async ([job]) => boss.deleteJob(queue, job.id)) - await delay(8000) + await delay(1000) - const job = await boss.fetchCompleted(queue) + const job = await boss.getJobById(queue, jobId) - assert.strictEqual(job.data.state, 'completed') - assert.strictEqual(job.data.response.something, something) + assert(!job) }) it('should allow multiple workers to the same queue per instance', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) - const queue = 'multiple-workers' + const queue = this.test.bossConfig.schema await boss.work(queue, () => {}) await boss.work(queue, () => {}) @@ -348,28 +246,20 @@ describe('work', function () { it('should honor the includeMetadata option', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) - - const queue = 'process-includeMetadata' + const queue = this.test.bossConfig.schema await boss.send(queue) return new Promise((resolve) => { - boss.work(queue, { includeMetadata: true }, async job => { - assert(job.startedon !== undefined) + boss.work(queue, { includeMetadata: true }, async ([job]) => { + assert(job.startedOn !== undefined) resolve() }) }) }) - it('should fail job at expiration without maintenance', async function () { - const boss = this.test.boss = new PgBoss(this.test.bossConfig) - - const maintenanceTick = new Promise((resolve) => boss.on('maintenance', resolve)) - - await boss.start() - - await maintenanceTick - + it('should fail job at expiration in worker', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, supervise: false }) const queue = this.test.bossConfig.schema const jobId = await boss.send(queue, null, { expireInSeconds: 1 }) @@ -378,21 +268,14 @@ describe('work', function () { await delay(2000) - const job = await boss.getJobById(jobId) + const job = await boss.getJobById(queue, jobId) assert.strictEqual(job.state, 'failed') assert(job.output.message.includes('handler execution exceeded')) }) - it('should fail a batch of jobs at expiration without maintenance', async function () { - const boss = this.test.boss = new PgBoss(this.test.bossConfig) - - const maintenanceTick = new Promise((resolve) => boss.on('maintenance', resolve)) - - await boss.start() - - await maintenanceTick - + it('should fail a batch of jobs at expiration in worker', async function () { + const boss = this.test.boss = await helper.start({ ...this.test.bossConfig, supervise: false }) const queue = this.test.bossConfig.schema const jobId1 = await boss.send(queue, null, { expireInSeconds: 1 }) @@ -402,8 +285,8 @@ describe('work', function () { await delay(2000) - const job1 = await boss.getJobById(jobId1) - const job2 = await boss.getJobById(jobId2) + const job1 = await boss.getJobById(queue, jobId1) + const job2 = await boss.getJobById(queue, jobId2) assert.strictEqual(job1.state, 'failed') assert(job1.output.message.includes('handler execution exceeded')) @@ -419,32 +302,33 @@ describe('work', function () { const firstWipEvent = new Promise(resolve => boss.once('wip', resolve)) await boss.send(queue) - await boss.work(queue, () => delay(1000)) + + await boss.work(queue, { pollingIntervalSeconds: 1 }, () => delay(2000)) const wip1 = await firstWipEvent + await boss.send(queue) + assert.strictEqual(wip1.length, 1) const secondWipEvent = new Promise(resolve => boss.once('wip', resolve)) const wip2 = await secondWipEvent - assert.strictEqual(wip2.length, 0) + assert.strictEqual(wip2.length, 1) }) it('should reject work() after stopping', async function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema - boss.stop({ timeout: 1 }) - - await delay(500) + await boss.stop({ wait: true }) try { - await boss.work(queue) + await boss.work(queue, () => {}) assert(false) } catch (err) { - assert(err.message.includes('stopping')) + assert(true) } }) @@ -452,9 +336,7 @@ describe('work', function () { const boss = this.test.boss = await helper.start(this.test.bossConfig) const queue = this.test.bossConfig.schema - boss.stop({ timeout: 1 }) - - await delay(500) + boss.stop({ wait: true }) await boss.send(queue) }) diff --git a/types.d.ts b/types.d.ts index 2ffab407..9e8c43ea 100644 --- a/types.d.ts +++ b/types.d.ts @@ -1,8 +1,24 @@ import { EventEmitter } from 'events' declare namespace PgBoss { + + type JobStates = { + created : 'created', + retry: 'retry', + active: 'active', + completed: 'completed', + cancelled: 'cancelled', + failed: 'failed' + } + + type QueuePolicies = { + standard: 'standard' + short: 'short', + singleton: 'singleton', + stately: 'stately' + } interface Db { - executeSql(text: string, values: any[]): Promise<{ rows: any[]; rowCount: number }>; + executeSql(text: string, values: any[]): Promise<{ rows: any[] }>; } interface DatabaseOptions { @@ -20,20 +36,20 @@ declare namespace PgBoss { } interface QueueOptions { - uuid?: "v1" | "v4"; monitorStateIntervalSeconds?: number; monitorStateIntervalMinutes?: number; } interface SchedulingOptions { - noScheduling?: boolean; + schedule?: boolean; clockMonitorIntervalSeconds?: number; clockMonitorIntervalMinutes?: number; } interface MaintenanceOptions { - noSupervisor?: boolean; + supervise?: boolean; + migrate?: boolean; deleteAfterSeconds?: number; deleteAfterMinutes?: number; @@ -56,11 +72,6 @@ declare namespace PgBoss { & RetentionOptions & RetryOptions & JobPollingOptions - & CompletionOptions - - interface CompletionOptions { - onComplete?: boolean; - } interface ExpirationOptions { expireInSeconds?: number; @@ -82,67 +93,49 @@ declare namespace PgBoss { } interface JobOptions { + id?: string, priority?: number; startAfter?: number | string | Date; singletonKey?: string; - useSingletonQueue?: boolean; singletonSeconds?: number; singletonMinutes?: number; singletonHours?: number; singletonNextSlot?: boolean; + deadLetter?: string; } interface ConnectionOptions { db?: Db; } - + type InsertOptions = ConnectionOptions; - - type SendOptions = JobOptions & ExpirationOptions & RetentionOptions & RetryOptions & CompletionOptions & ConnectionOptions; - + + type SendOptions = JobOptions & ExpirationOptions & RetentionOptions & RetryOptions & ConnectionOptions; + + type QueuePolicy = 'standard' | 'short' | 'singleton' | 'stately' + + type Queue = RetryOptions & ExpirationOptions & RetentionOptions & { name: string, policy?: QueuePolicy, deadLetter?: string } + type QueueResult = Queue & { createdOn: Date, updatedOn: Date } type ScheduleOptions = SendOptions & { tz?: string } interface JobPollingOptions { - newJobCheckInterval?: number; - newJobCheckIntervalSeconds?: number; + pollingIntervalSeconds?: number; } - interface CommonJobFetchOptions { + interface JobFetchOptions { includeMetadata?: boolean; - enforceSingletonQueueActiveLimit?: boolean; - } - - type JobFetchOptions = CommonJobFetchOptions & { - teamSize?: number; - teamConcurrency?: number; - teamRefill?: boolean; - } - - type BatchJobFetchOptions = CommonJobFetchOptions & { - batchSize: number; + priority?: boolean; + batchSize?: number; } type WorkOptions = JobFetchOptions & JobPollingOptions - type BatchWorkOptions = BatchJobFetchOptions & JobPollingOptions - - type FetchOptions = { - includeMetadata?: boolean; - enforceSingletonQueueActiveLimit?: boolean; - } & ConnectionOptions; + type FetchOptions = JobFetchOptions & ConnectionOptions; interface WorkHandler { - (job: PgBoss.Job): Promise; - } - - interface BatchWorkHandler { (job: PgBoss.Job[]): Promise; } interface WorkWithMetadataHandler { - (job: PgBoss.JobWithMetadata): Promise; - } - - interface BatchWorkWithMetadataHandler { (job: PgBoss.JobWithMetadata[]): Promise; } @@ -181,24 +174,26 @@ declare namespace PgBoss { data: T; } - interface JobWithMetadata extends Job { + interface JobWithMetadata { + id: string; + name: string; + data: T; priority: number; - state: 'created' | 'retry' | 'active' | 'completed' | 'expired' | 'cancelled' | 'failed'; - retrylimit: number; - retrycount: number; - retrydelay: number; - retrybackoff: boolean; - startafter: Date; - // This is nullable in the schema, but by the time this type is reified, - // it will have been set. - startedon: Date; - singletonkey: string | null; - singletonon: Date | null; - expirein: PostgresInterval; - createdon: Date; - completedon: Date | null; - keepuntil: Date; - oncomplete: boolean, + state: 'created' | 'retry' | 'active' | 'completed' | 'cancelled' | 'failed'; + retryLimit: number; + retryCount: number; + retryDelay: number; + retryBackoff: boolean; + startAfter: Date; + startedOn: Date; + singletonKey: string | null; + singletonOn: Date | null; + expireIn: PostgresInterval; + createdOn: Date; + completedOn: Date | null; + keepUntil: Date; + deadLetter: string, + policy: QueuePolicy, output: object } @@ -214,7 +209,7 @@ declare namespace PgBoss { singletonKey?: string; expireInSeconds?: number; keepUntil?: Date | string; - onComplete?: boolean + deadLetter?: string; } interface MonitorState { @@ -223,7 +218,6 @@ declare namespace PgBoss { retry: number; active: number; completed: number; - expired: number; cancelled: number; failed: number; } @@ -236,7 +230,7 @@ declare namespace PgBoss { id: string, name: string, options: WorkOptions, - state: 'created' | 'retry' | 'active' | 'completed' | 'expired' | 'cancelled' | 'failed', + state: 'created' | 'active' | 'stopping' | 'stopped' count: number, createdOn: Date, lastFetchedOn: Date, @@ -248,9 +242,10 @@ declare namespace PgBoss { } interface StopOptions { - destroy?: boolean, + close?: boolean, graceful?: boolean, - timeout?: number + timeout?: number, + wait?: boolean } interface OffWorkOptions { @@ -274,6 +269,9 @@ declare class PgBoss extends EventEmitter { static getRollbackPlans(schema: string): string; static getRollbackPlans(): string; + static states: PgBoss.JobStates + static policies: PgBoss.QueuePolicies + on(event: "error", handler: (error: Error) => void): this; off(event: "error", handler: (error: Error) => void): this; @@ -300,10 +298,6 @@ declare class PgBoss extends EventEmitter { sendAfter(name: string, data: object, options: PgBoss.SendOptions, dateString: string): Promise; sendAfter(name: string, data: object, options: PgBoss.SendOptions, seconds: number): Promise; - sendOnce(name: string, data: object, options: PgBoss.SendOptions, key: string): Promise; - - sendSingleton(name: string, data: object, options: PgBoss.SendOptions): Promise; - sendThrottled(name: string, data: object, options: PgBoss.SendOptions, seconds: number): Promise; sendThrottled(name: string, data: object, options: PgBoss.SendOptions, seconds: number, key: string): Promise; @@ -313,68 +307,59 @@ declare class PgBoss extends EventEmitter { insert(jobs: PgBoss.JobInsert[]): Promise; insert(jobs: PgBoss.JobInsert[], options: PgBoss.InsertOptions): Promise; + fetch(name: string): Promise[]>; + fetch(name: string, options: PgBoss.FetchOptions & { includeMetadata: true }): Promise[]>; + fetch(name: string, options: PgBoss.FetchOptions): Promise[]>; + work(name: string, handler: PgBoss.WorkHandler): Promise; work(name: string, options: PgBoss.WorkOptions & { includeMetadata: true }, handler: PgBoss.WorkWithMetadataHandler): Promise; work(name: string, options: PgBoss.WorkOptions, handler: PgBoss.WorkHandler): Promise; - work(name: string, options: PgBoss.BatchWorkOptions & { includeMetadata: true }, handler: PgBoss.BatchWorkWithMetadataHandler): Promise; - work(name: string, options: PgBoss.BatchWorkOptions, handler: PgBoss.BatchWorkHandler): Promise; - - onComplete(name: string, handler: Function): Promise; - onComplete(name: string, options: PgBoss.WorkOptions, handler: Function): Promise; - offWork(name: string): Promise; offWork(options: PgBoss.OffWorkOptions): Promise; - /** - * Notify worker that something has changed - * @param workerId - */ notifyWorker(workerId: string): void; subscribe(event: string, name: string): Promise; unsubscribe(event: string, name: string): Promise; - publish(event: string): Promise; - publish(event: string, data: object): Promise; - publish(event: string, data: object, options: PgBoss.SendOptions): Promise; - - offComplete(name: string): Promise; - offComplete(options: PgBoss.OffWorkOptions): Promise; - - fetch(name: string): Promise | null>; - fetch(name: string, batchSize: number): Promise[] | null>; - fetch(name: string, batchSize: number, options: PgBoss.FetchOptions & { includeMetadata: true }): Promise[] | null>; - fetch(name: string, batchSize: number, options: PgBoss.FetchOptions): Promise[] | null>; - - fetchCompleted(name: string): Promise | null>; - fetchCompleted(name: string, batchSize: number): Promise[] | null>; - fetchCompleted(name: string, batchSize: number, options: PgBoss.FetchOptions & { includeMetadata: true }): Promise[] | null>; - fetchCompleted(name: string, batchSize: number, options: PgBoss.FetchOptions): Promise[] | null>; - - cancel(id: string, options?: PgBoss.ConnectionOptions): Promise; - cancel(ids: string[], options?: PgBoss.ConnectionOptions): Promise; - - resume(id: string, options?: PgBoss.ConnectionOptions): Promise; - resume(ids: string[], options?: PgBoss.ConnectionOptions): Promise; - - complete(id: string, options?: PgBoss.ConnectionOptions): Promise; - complete(id: string, data: object, options?: PgBoss.ConnectionOptions): Promise; - complete(ids: string[], options?: PgBoss.ConnectionOptions): Promise; - - fail(id: string, options?: PgBoss.ConnectionOptions): Promise; - fail(id: string, data: object, options?: PgBoss.ConnectionOptions): Promise; - fail(ids: string[], options?: PgBoss.ConnectionOptions): Promise; - - getQueueSize(name: string, options?: object): Promise; - getJobById(id: string, options?: PgBoss.ConnectionOptions): Promise; - + publish(event: string): Promise; + publish(event: string, data: object): Promise; + publish(event: string, data: object, options: PgBoss.SendOptions): Promise; + + cancel(name: string, id: string, options?: PgBoss.ConnectionOptions): Promise; + cancel(name: string, ids: string[], options?: PgBoss.ConnectionOptions): Promise; + + resume(name: string, id: string, options?: PgBoss.ConnectionOptions): Promise; + resume(name: string, ids: string[], options?: PgBoss.ConnectionOptions): Promise; + + deleteJob(name: string, id: string, options?: PgBoss.ConnectionOptions): Promise; + deleteJob(name: string, ids: string[], options?: PgBoss.ConnectionOptions): Promise; + + complete(name: string, id: string, options?: PgBoss.ConnectionOptions): Promise; + complete(name: string, id: string, data: object, options?: PgBoss.ConnectionOptions): Promise; + complete(name: string, ids: string[], options?: PgBoss.ConnectionOptions): Promise; + + fail(name: string, id: string, options?: PgBoss.ConnectionOptions): Promise; + fail(name: string, id: string, data: object, options?: PgBoss.ConnectionOptions): Promise; + fail(name: string, ids: string[], options?: PgBoss.ConnectionOptions): Promise; + + getJobById(name: string, id: string, options?: PgBoss.ConnectionOptions & { includeArchive: bool }): Promise; + + createQueue(name: string, options?: PgBoss.Queue): Promise; + updateQueue(name: string, options?: PgBoss.Queue): Promise; deleteQueue(name: string): Promise; - deleteAllQueues(): Promise; - clearStorage(): Promise; + purgeQueue(name: string): Promise; + getQueues(): Promise; + getQueue(name: string): Promise; + getQueueSize(name: string, options?: { before: 'retry' | 'active' | 'completed' | 'cancelled' | 'failed' }): Promise; + clearStorage(): Promise; archive(): Promise; purge(): Promise; expire(): Promise; + maintain(): Promise; + isInstalled(): Promise; + schemaVersion(): Promise; schedule(name: string, cron: string, data?: object, options?: PgBoss.ScheduleOptions): Promise; unschedule(name: string): Promise; diff --git a/version.json b/version.json index efaedb7f..a295918c 100644 --- a/version.json +++ b/version.json @@ -1,3 +1,3 @@ { - "schema": 20 + "schema": 21 }