diff --git a/README.md b/README.md index d2c54899..ed05982b 100644 --- a/README.md +++ b/README.md @@ -180,7 +180,11 @@ Migrations should be named according to the following pattern: `####-{action}-{t For example: `0001-create-users` or `0001-modify-users` -In `/src/databases/seeds` there is seed or starter data. The contents can be run manually to help developers get data in their databases. The scripts are not referenced by the software and are included for convenience. The migrations must run prior to using seed scripts. +The `CopyBaseFieldsTask` worker is designed to copy basefields from a remote to a local instance of the pdc service. An administrative user looking to seed their database can make a POST request to the `/tasks/baseFieldsCopy` route, specifying the remote url in the body of the form: + +```json +{ "synchronizationUrl": "https://remote.pdc.instance" } +``` #### Linting diff --git a/package-lock.json b/package-lock.json index 3885f05e..005b7f8a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -24,6 +24,7 @@ "language-tags": "^1.0.9", "monocle-ts": "^2.3.13", "newtype-ts": "^0.3.5", + "node-fetch": "^2.7.0", "phone": "^3.1.57", "pino": "^9.6.0", "pino-http": "^10.3.0", @@ -42,6 +43,7 @@ "@types/jest": "^28.1.2", "@types/language-tags": "^1.0.4", "@types/node": "^22.10.5", + "@types/node-fetch": "^2.6.12", "@types/pg": "^8.11.10", "@types/supertest": "^6.0.2", "@types/swagger-ui-express": "^4.1.7", @@ -3275,6 +3277,17 @@ "undici-types": "~6.20.0" } }, + "node_modules/@types/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, "node_modules/@types/pg": { "version": "8.11.10", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.10.tgz", @@ -7998,6 +8011,7 @@ "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", "dev": true, + "license": "MIT", "dependencies": { "debug": "^4.1.0", "json-stringify-safe": "^5.0.1", @@ -8007,6 +8021,26 @@ "node": ">= 10.13" } }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/node-forge": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", @@ -9942,6 +9976,12 @@ "node": ">=0.6" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, "node_modules/ts-api-utils": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", @@ -10356,6 +10396,22 @@ "makeerror": "1.0.12" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -13189,6 +13245,16 @@ "undici-types": "~6.20.0" } }, + "@types/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==", + "dev": true, + "requires": { + "@types/node": "*", + "form-data": "^4.0.0" + } + }, "@types/pg": { "version": "8.11.10", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.10.tgz", @@ -16688,6 +16754,14 @@ "propagate": "^2.0.0" } }, + "node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "requires": { + "whatwg-url": "^5.0.0" + } + }, "node-forge": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", @@ -18124,6 +18198,11 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + }, "ts-api-utils": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-1.3.0.tgz", @@ -18393,6 +18472,20 @@ "makeerror": "1.0.12" } }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, "which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/package.json b/package.json index ddbf8748..c640b469 100644 --- a/package.json +++ b/package.json @@ -53,6 +53,7 @@ "@types/jest": "^28.1.2", "@types/language-tags": "^1.0.4", "@types/node": "^22.10.5", + "@types/node-fetch": "^2.6.12", "@types/pg": "^8.11.10", "@types/supertest": "^6.0.2", "@types/swagger-ui-express": "^4.1.7", @@ -95,6 +96,7 @@ "language-tags": "^1.0.9", "monocle-ts": "^2.3.13", "newtype-ts": "^0.3.5", + "node-fetch": "^2.7.0", "phone": "^3.1.57", "pino": "^9.6.0", "pino-http": "^10.3.0", diff --git a/src/__tests__/baseFieldsCopyTasks.int.test.ts b/src/__tests__/baseFieldsCopyTasks.int.test.ts new file mode 100644 index 00000000..6b00229b --- /dev/null +++ b/src/__tests__/baseFieldsCopyTasks.int.test.ts @@ -0,0 +1,233 @@ +import request from 'supertest'; +import { app } from '../app'; +import { + createBaseFieldsCopyTask, + createUser, + loadTableMetrics, +} from '../database'; +import { expectTimestamp, loadTestUser } from '../test/utils'; +import { + mockJwt as authHeader, + mockJwtWithAdminRole as authHeaderWithAdminRole, +} from '../test/mockJwt'; +import { TaskStatus } from '../types'; + +const MOCK_API_URL = 'https://example.com'; +const agent = request.agent(app); + +describe('/tasks/baseFieldsCopy', () => { + describe('GET /', () => { + it('requires authentication', async () => { + await agent.get('/tasks/baseFieldsCopy').expect(401); + }); + + it('requires administrator role', async () => { + await agent.get('/tasks/baseFieldsCopy').set(authHeader).expect(401); + }); + + it('returns an empty Bundle when no data is present', async () => { + await request(app) + .get('/tasks/baseFieldsCopy') + .set(authHeaderWithAdminRole) + .expect(200, { + total: 0, + entries: [], + }); + }); + + it('returns all BaseFieldsCopy Tasks for administrative users', async () => { + const testUser = await loadTestUser(); + const anotherUser = await createUser({ + keycloakUserId: '123e4567-e89b-12d3-a456-426614174000', + }); + + await createBaseFieldsCopyTask({ + pdcApiUrl: MOCK_API_URL, + status: TaskStatus.PENDING, + createdBy: testUser.keycloakUserId, + }); + + await createBaseFieldsCopyTask({ + pdcApiUrl: MOCK_API_URL, + status: TaskStatus.COMPLETED, + createdBy: anotherUser.keycloakUserId, + }); + + await request(app) + .get('/tasks/baseFieldsCopy') + .set(authHeaderWithAdminRole) + .expect(200) + .expect((res) => + expect(res.body).toEqual({ + total: 2, + entries: [ + { + id: 2, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: anotherUser.keycloakUserId, + }, + { + id: 1, + status: TaskStatus.PENDING, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + ], + }), + ); + }); + + it('supports pagination', async () => { + const testUser = await loadTestUser(); + await Array.from(Array(20)).reduce(async (p) => { + await p; + await createBaseFieldsCopyTask({ + pdcApiUrl: MOCK_API_URL, + status: TaskStatus.COMPLETED, + createdBy: testUser.keycloakUserId, + }); + }, Promise.resolve()); + + await request(app) + .get('/tasks/baseFieldsCopy') + .query({ + _page: 2, + _count: 5, + }) + .set(authHeaderWithAdminRole) + .expect(200) + .expect((res) => + expect(res.body).toEqual({ + total: 20, + entries: [ + { + id: 15, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + { + id: 14, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + { + id: 13, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + { + id: 12, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + { + id: 11, + status: TaskStatus.COMPLETED, + statusUpdatedAt: expectTimestamp, + pdcApiUrl: MOCK_API_URL, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }, + ], + }), + ); + }); + }); + + describe('POST /', () => { + it('requires authentication', async () => { + await request(app).post('/tasks/baseFieldsCopy').expect(401); + }); + + it('requires administrator role', async () => { + await request(app) + .post('/tasks/baseFieldsCopy') + .set(authHeader) + .expect(401); + }); + + it('throws an error if a synchzronizationUrl is not provided', async () => { + const result = await request(app) + .post('/tasks/baseFieldsCopy') + .type('application/json') + .set(authHeaderWithAdminRole) + .send({}) + .expect(400); + expect(result.body).toMatchObject({ + name: 'InputValidationError', + details: expect.any(Array) as unknown[], + }); + }); + + it('throws an error if a synchzronizationUrl is not provided', async () => { + const result = await request(app) + .post('/tasks/baseFieldsCopy') + .type('application/json') + .set(authHeaderWithAdminRole) + .send({}) + .expect(400); + expect(result.body).toMatchObject({ + name: 'InputValidationError', + details: expect.any(Array) as unknown[], + }); + }); + + it('creates exactly one BaseField copy task', async () => { + const before = await loadTableMetrics('base_fields_copy_tasks'); + const result = await request(app) + .post('/tasks/baseFieldsCopy') + .type('application/json') + .set(authHeaderWithAdminRole) + .send({ + pdcApiUrl: MOCK_API_URL, + }) + .expect(201); + const after = await loadTableMetrics('base_fields_copy_tasks'); + const testUser = await loadTestUser(); + + expect(before.count).toEqual(0); + expect(result.body).toEqual({ + id: expect.any(Number) as number, + status: 'pending', + pdcApiUrl: MOCK_API_URL, + statusUpdatedAt: expectTimestamp, + createdAt: expectTimestamp, + createdBy: testUser.keycloakUserId, + }); + expect(after.count).toEqual(1); + }); + + it('returns 400 bad request when no synchronization url is provided', async () => { + const result = await request(app) + .post('/tasks/baseFieldsCopy') + .type('application/json') + .set(authHeaderWithAdminRole) + .send({ + knockknock: 'whos there?', + orange: 'oh. weird.', + }) + .expect(400); + expect(result.body).toMatchObject({ + name: 'InputValidationError', + details: expect.any(Array) as unknown[], + }); + }); + }); +}); diff --git a/src/database/initialization/base_fields_copy_task_to_json.sql b/src/database/initialization/base_fields_copy_task_to_json.sql new file mode 100644 index 00000000..e849b165 --- /dev/null +++ b/src/database/initialization/base_fields_copy_task_to_json.sql @@ -0,0 +1,19 @@ +SELECT drop_function('base_fields_copy_task_to_json'); + +CREATE FUNCTION base_fields_copy_task_to_json( + base_fields_copy_task base_fields_copy_tasks +) +RETURNS jsonb AS $$ +DECLARE + source_json JSONB; +BEGIN + RETURN jsonb_build_object( + 'id', base_fields_copy_task.id, + 'status', base_fields_copy_task.status, + 'pdcApiUrl', base_fields_copy_task.pdc_api_url, + 'statusUpdatedAt', to_json(base_fields_copy_task.status_updated_at)::jsonb, + 'createdAt', to_json(base_fields_copy_task.created_at)::jsonb, + 'createdBy', base_fields_copy_task.created_by + ); +END; +$$ LANGUAGE plpgsql; diff --git a/src/database/migrations/0046-create-base_fields_copy_tasks.sql b/src/database/migrations/0046-create-base_fields_copy_tasks.sql new file mode 100644 index 00000000..256eb3db --- /dev/null +++ b/src/database/migrations/0046-create-base_fields_copy_tasks.sql @@ -0,0 +1,28 @@ +CREATE TABLE base_fields_copy_tasks ( + id integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + status task_status NOT NULL, + pdc_api_url varchar NOT NULL, + status_updated_at timestamp with time zone NOT NULL DEFAULT now(), + FOREIGN KEY (created_by) REFERENCES users (keycloak_user_id), + created_at timestamp with time zone NOT NULL DEFAULT now(), + created_by uuid NOT NULL DEFAULT system_keycloak_user_id() +); + +COMMENT ON TABLE base_fields_copy_tasks IS +'An entity representing a basefield copy graphile task' +'from a remote pdc instance to a local instance.'; + +CREATE OR REPLACE FUNCTION update_status_timestamp() +RETURNS trigger AS $$ +BEGIN + IF NEW.status IS DISTINCT FROM OLD.status THEN + NEW.status_updated_at := NOW(); + END IF; + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER update_status_updated_at +BEFORE UPDATE ON base_fields_copy_tasks +FOR EACH ROW +EXECUTE FUNCTION update_status_timestamp(); diff --git a/src/database/operations/baseFields/createOrUpdateBaseField.ts b/src/database/operations/baseFields/createOrUpdateBaseField.ts new file mode 100644 index 00000000..a4b7b580 --- /dev/null +++ b/src/database/operations/baseFields/createOrUpdateBaseField.ts @@ -0,0 +1,33 @@ +import { db } from '../../db'; +import { NotFoundError } from '../../../errors'; +import type { + BaseField, + WritableBaseField, + JsonResultSet, +} from '../../../types'; + +export const createOrUpdateBaseField = async ( + updateValues: WritableBaseField, +): Promise => { + const { scope, dataType, shortCode, label, description } = updateValues; + const result = await db.sql>( + 'baseFields.createOrUpdateByShortcode', + { + scope, + dataType, + shortCode, + label, + description, + }, + ); + const baseField = result.rows[0]?.object; + if (baseField === undefined) { + throw new NotFoundError('BaseField could not be created or updated', { + entityType: 'BaseField', + lookupValues: { + shortCode, + }, + }); + } + return baseField; +}; diff --git a/src/database/operations/baseFields/index.ts b/src/database/operations/baseFields/index.ts index dc3aa32e..80cb73d3 100644 --- a/src/database/operations/baseFields/index.ts +++ b/src/database/operations/baseFields/index.ts @@ -1,4 +1,5 @@ export * from './createBaseField'; +export * from './createOrUpdateBaseField'; export * from './loadBaseField'; export * from './loadBaseFields'; export * from './updateBaseField'; diff --git a/src/database/operations/baseFieldsCopyTasks/createBaseFieldsCopyTask.ts b/src/database/operations/baseFieldsCopyTasks/createBaseFieldsCopyTask.ts new file mode 100644 index 00000000..54c45493 --- /dev/null +++ b/src/database/operations/baseFieldsCopyTasks/createBaseFieldsCopyTask.ts @@ -0,0 +1,28 @@ +import { db } from '../../db'; +import type { + JsonResultSet, + BaseFieldsCopyTask, + InternallyWritableBaseFieldsCopyTask, +} from '../../../types'; + +export const createBaseFieldsCopyTask = async ( + createValues: InternallyWritableBaseFieldsCopyTask, +): Promise => { + const { pdcApiUrl, status, createdBy } = createValues; + + const result = await db.sql>( + 'baseFieldsCopyTasks.insertOne', + { + status, + pdcApiUrl, + createdBy, + }, + ); + const { object } = result.rows[0] ?? {}; + if (object === undefined) { + throw new Error( + 'The entity creation did not appear to fail, but no data was returned by the operation.', + ); + } + return object; +}; diff --git a/src/database/operations/baseFieldsCopyTasks/index.ts b/src/database/operations/baseFieldsCopyTasks/index.ts new file mode 100644 index 00000000..7439132a --- /dev/null +++ b/src/database/operations/baseFieldsCopyTasks/index.ts @@ -0,0 +1,4 @@ +export * from './createBaseFieldsCopyTask'; +export * from './loadBaseFieldsCopyTask'; +export * from './loadBaseFieldsCopyTaskBundle'; +export * from './updateBaseFieldsCopyTask'; diff --git a/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTask.ts b/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTask.ts new file mode 100644 index 00000000..f07bb9b5 --- /dev/null +++ b/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTask.ts @@ -0,0 +1,21 @@ +import { db } from '../../db'; +import { NotFoundError } from '../../../errors'; +import type { JsonResultSet, BaseFieldsCopyTask } from '../../../types'; + +export const loadBaseFieldsCopyTask = async ( + id: number, +): Promise => { + const baseFieldsCopyTaskQueryResult = await db.sql< + JsonResultSet + >('baseFieldsCopyTasks.selectById', { + id, + }); + const { object } = baseFieldsCopyTaskQueryResult.rows[0] ?? {}; + if (object === undefined) { + throw new NotFoundError(`Entity not found`, { + entityType: 'BaseFieldsCopyTask', + entityId: id, + }); + } + return object; +}; diff --git a/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTaskBundle.ts b/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTaskBundle.ts new file mode 100644 index 00000000..7275be42 --- /dev/null +++ b/src/database/operations/baseFieldsCopyTasks/loadBaseFieldsCopyTaskBundle.ts @@ -0,0 +1,35 @@ +import { loadBundle } from '../generic/loadBundle'; +import type { + JsonResultSet, + Bundle, + BaseFieldsCopyTask, + AuthContext, + KeycloakId, +} from '../../../types'; + +export const loadBaseFieldsCopyTaskBundle = async ( + authContext: AuthContext | undefined, + createdBy: KeycloakId | undefined, + limit: number | undefined, + offset: number, +): Promise> => { + const authContextKeycloakUserId = authContext?.user.keycloakUserId; + const authContextIsAdministrator = authContext?.role.isAdministrator; + + const bundle = await loadBundle>( + 'baseFieldsCopyTasks.selectWithPagination', + { + authContextIsAdministrator, + authContextKeycloakUserId, + createdBy, + limit, + offset, + }, + 'base_fields_copy_tasks', + ); + const entries = bundle.entries.map((entry) => entry.object); + return { + ...bundle, + entries, + }; +}; diff --git a/src/database/operations/baseFieldsCopyTasks/updateBaseFieldsCopyTask.ts b/src/database/operations/baseFieldsCopyTasks/updateBaseFieldsCopyTask.ts new file mode 100644 index 00000000..cc19c5e0 --- /dev/null +++ b/src/database/operations/baseFieldsCopyTasks/updateBaseFieldsCopyTask.ts @@ -0,0 +1,30 @@ +import { db } from '../../db'; +import { NotFoundError } from '../../../errors'; +import type { + JsonResultSet, + BaseFieldsCopyTask, + InternallyWritableBaseFieldsCopyTask, +} from '../../../types'; + +export const updateBaseFieldsCopyTask = async ( + id: number, + updateValues: Partial, +): Promise => { + const { status } = updateValues; + + const result = await db.sql>( + 'baseFieldsCopyTasks.updateById', + { + id, + status, + }, + ); + const { object } = result.rows[0] ?? {}; + if (object === undefined) { + throw new NotFoundError(`Entity not found`, { + entityType: 'BaseFieldsCopyTask', + entityId: id, + }); + } + return object; +}; diff --git a/src/database/operations/index.ts b/src/database/operations/index.ts index 920bef00..cb3634ec 100644 --- a/src/database/operations/index.ts +++ b/src/database/operations/index.ts @@ -2,6 +2,7 @@ export * from './applicationFormFields'; export * from './applicationForms'; export * from './baseFieldLocalization'; export * from './baseFields'; +export * from './baseFieldsCopyTasks'; export * from './bulkUploadTasks'; export * from './changemakerProposals'; export * from './changemakers'; diff --git a/src/database/queries/baseFields/createOrUpdateByShortcode.sql b/src/database/queries/baseFields/createOrUpdateByShortcode.sql new file mode 100644 index 00000000..93b6450a --- /dev/null +++ b/src/database/queries/baseFields/createOrUpdateByShortcode.sql @@ -0,0 +1,21 @@ +INSERT INTO base_fields ( + label, + description, + short_code, + data_type, + scope +) +VALUES ( + :label, + :description, + :shortCode, + :dataType, + :scope +) +ON CONFLICT (short_code) +DO UPDATE SET +label = excluded.label, +description = excluded.description, +data_type = excluded.data_type, +scope = excluded.scope +RETURNING base_field_to_json(base_fields) AS object; diff --git a/src/database/queries/baseFieldsCopyTasks/insertOne.sql b/src/database/queries/baseFieldsCopyTasks/insertOne.sql new file mode 100644 index 00000000..da45df8d --- /dev/null +++ b/src/database/queries/baseFieldsCopyTasks/insertOne.sql @@ -0,0 +1,11 @@ +INSERT INTO base_fields_copy_tasks ( + status, + pdc_api_url, + created_by +) +VALUES ( + :status, + :pdcApiUrl, + :createdBy +) +RETURNING base_fields_copy_task_to_json(base_fields_copy_tasks) AS object; diff --git a/src/database/queries/baseFieldsCopyTasks/selectById.sql b/src/database/queries/baseFieldsCopyTasks/selectById.sql new file mode 100644 index 00000000..2263a0df --- /dev/null +++ b/src/database/queries/baseFieldsCopyTasks/selectById.sql @@ -0,0 +1,3 @@ +SELECT base_fields_copy_task_to_json(base_fields_copy_tasks.*) AS object +FROM base_fields_copy_tasks +WHERE id = :id; diff --git a/src/database/queries/baseFieldsCopyTasks/selectWithPagination.sql b/src/database/queries/baseFieldsCopyTasks/selectWithPagination.sql new file mode 100644 index 00000000..f1a9c26a --- /dev/null +++ b/src/database/queries/baseFieldsCopyTasks/selectWithPagination.sql @@ -0,0 +1,23 @@ +SELECT base_fields_copy_task_to_json(base_fields_copy_tasks.*) AS object +FROM base_fields_copy_tasks +WHERE + CASE + WHEN :createdBy::uuid IS NULL THEN + TRUE + ELSE + created_by = :createdBy + END + AND + CASE + WHEN :authContextKeycloakUserId::uuid IS NULL THEN + TRUE + ELSE + ( + created_by = :authContextKeycloakUserId + OR :authContextIsAdministrator::boolean + ) + END +ORDER BY id DESC +LIMIT + :limit + OFFSET :offset; diff --git a/src/database/queries/baseFieldsCopyTasks/updateById.sql b/src/database/queries/baseFieldsCopyTasks/updateById.sql new file mode 100644 index 00000000..4136a860 --- /dev/null +++ b/src/database/queries/baseFieldsCopyTasks/updateById.sql @@ -0,0 +1,5 @@ +UPDATE base_fields_copy_tasks +SET + status = coalesce(:status, status) +WHERE id = :id +RETURNING base_fields_copy_task_to_json(base_fields_copy_tasks) AS object; diff --git a/src/database/seeds/0001-insert-base_fields.sql b/src/database/seeds/0001-insert-base_fields.sql index b8172bc2..69d17e2d 100644 --- a/src/database/seeds/0001-insert-base_fields.sql +++ b/src/database/seeds/0001-insert-base_fields.sql @@ -298,3 +298,4 @@ INSERT INTO base_fields (label, description, short_code, data_type, scope) VALUE ('Overall Score' , '', 'overall_score', 'number', 'proposal' ), ('Overall Rating' , '', 'overall_rating', 'number', 'proposal' ), ('Yes/No indicator: This rating is a Donor Advisory' , '', 'yes_no_indicator_this_rating_is_a_donor_advisory', 'boolean', 'proposal' ); + diff --git a/src/handlers/baseFieldsCopyTasksHandlers.ts b/src/handlers/baseFieldsCopyTasksHandlers.ts new file mode 100644 index 00000000..b463a1e6 --- /dev/null +++ b/src/handlers/baseFieldsCopyTasksHandlers.ts @@ -0,0 +1,101 @@ +import { + createBaseFieldsCopyTask, + loadBaseFieldsCopyTaskBundle, + getLimitValues, +} from '../database'; +import { + TaskStatus, + isAuthContext, + isTinyPgErrorWithQueryContext, + isWritableBaseFieldsCopyTask, +} from '../types'; +import { + DatabaseError, + FailedMiddlewareError, + InputValidationError, +} from '../errors'; +import { + extractCreatedByParameters, + extractPaginationParameters, +} from '../queryParameters'; +import { addCopyBaseFieldsJob } from '../jobQueue'; +import type { Request, Response, NextFunction } from 'express'; + +const postBaseFieldsCopyTask = ( + req: Request, + res: Response, + next: NextFunction, +) => { + if (!isAuthContext(req)) { + next(new FailedMiddlewareError('Unexpected lack of auth context.')); + return; + } + + if (!isWritableBaseFieldsCopyTask(req.body)) { + throw new InputValidationError( + 'Invalid request body.', + isWritableBaseFieldsCopyTask.errors ?? [], + ); + } + + const { pdcApiUrl } = req.body; + const createdBy = req.user.keycloakUserId; + (async () => { + const baseFieldsCopyTask = await createBaseFieldsCopyTask({ + pdcApiUrl, + status: TaskStatus.PENDING, + createdBy, + }); + + await addCopyBaseFieldsJob({ + baseFieldsCopyTaskId: baseFieldsCopyTask.id, + }); + + res.status(201).contentType('application/json').send(baseFieldsCopyTask); + })().catch((error: unknown) => { + if (isTinyPgErrorWithQueryContext(error)) { + next(new DatabaseError('Error creating basefield copy task.', error)); + } else { + next(error); + } + }); +}; + +const getBaseFieldsCopyTasks = ( + req: Request, + res: Response, + next: NextFunction, +): void => { + if (!isAuthContext(req)) { + next(new FailedMiddlewareError('Unexpected lack of auth context.')); + return; + } + + const paginationParameters = extractPaginationParameters(req); + const { offset, limit } = getLimitValues(paginationParameters); + const { createdBy } = extractCreatedByParameters(req); + (async () => { + const baseFieldsCopyTaskBundle = await loadBaseFieldsCopyTaskBundle( + req, + createdBy, + limit, + offset, + ); + + res + .status(200) + .contentType('application/json') + .send(baseFieldsCopyTaskBundle); + })().catch((error: unknown) => { + if (isTinyPgErrorWithQueryContext(error)) { + next(new DatabaseError('Error retrieving basefields copy tasks.', error)); + return; + } + next(error); + }); +}; + +export const baseFieldsCopyTasksHandlers = { + postBaseFieldsCopyTask, + getBaseFieldsCopyTasks, +}; diff --git a/src/jobQueue.ts b/src/jobQueue.ts index bacf818e..5f23530e 100644 --- a/src/jobQueue.ts +++ b/src/jobQueue.ts @@ -1,13 +1,17 @@ import { Logger, quickAddJob, run, runMigrations } from 'graphile-worker'; +import { copyBaseFields, processBulkUploadTask } from './tasks'; import { getLogger } from './logger'; import { db } from './database/db'; -import { processBulkUploadTask } from './tasks'; -import type { ProcessBulkUploadJobPayload } from './types'; +import type { + CopyBaseFieldsJobPayload, + ProcessBulkUploadJobPayload, +} from './types'; const logger = getLogger(__filename); enum JobType { PROCESS_BULK_UPLOAD = 'processBulkUploadTask', + COPY_BASE_FIELDS = 'copyBaseFields', } export const jobQueueLogger = new Logger((scope) => (level, message, meta) => { @@ -38,6 +42,7 @@ export const startJobQueue = async () => { pollInterval: 1000, taskList: { processBulkUploadTask, + copyBaseFields, }, }); runner.promise.catch((err) => { @@ -64,3 +69,6 @@ export const addJob = async (jobType: JobType, payload: unknown) => export const addProcessBulkUploadJob = async ( payload: ProcessBulkUploadJobPayload, ) => addJob(JobType.PROCESS_BULK_UPLOAD, payload); + +export const addCopyBaseFieldsJob = async (payload: CopyBaseFieldsJobPayload) => + addJob(JobType.COPY_BASE_FIELDS, payload); diff --git a/src/openapi.json b/src/openapi.json index ac082bc8..1f470e8d 100644 --- a/src/openapi.json +++ b/src/openapi.json @@ -199,6 +199,73 @@ "format": "uuid", "nullable": true }, + "BaseFieldsCopyTask": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "readOnly": true, + "example": 3407 + }, + "status": { + "type": "string", + "readOnly": true, + "enum": [ + "pending", + "in_progress", + "completed", + "failed", + "canceled" + ] + }, + "pdcApiUrl": { + "type": "string" + }, + "statusUpdatedAt": { + "type": "string", + "format": "date-time", + "readOnly": true + }, + "createdBy": { + "description": "The keycloak user id of the PDC user that created this BaseFieldCopyTask", + "type": "string", + "format": "uuid", + "readOnly": true + }, + "createdAt": { + "type": "string", + "format": "date-time", + "readOnly": true + } + }, + "required": [ + "id", + "pdcApiUrl", + "status", + "statusUpdatedAt", + "createdBy", + "createdAt" + ] + }, + "BaseFieldsCopyTaskBundle": { + "allOf": [ + { + "$ref": "#/components/schemas/Bundle" + }, + { + "type": "object", + "properties": { + "entries": { + "type": "array", + "items": { + "$ref": "#/components/schemas/BaseFieldsCopyTask" + } + } + }, + "required": ["entries"] + } + ] + }, "DataProvider": { "type": "object", "properties": { @@ -1465,6 +1532,87 @@ } } }, + "/tasks/baseFieldsCopy": { + "get": { + "operationId": "getBaseFieldsCopyTasks", + "summary": "Gets a list of basefieldcopytasks.", + "tags": ["Tasks"], + "security": [ + { + "auth": [] + } + ], + "parameters": [ + { "$ref": "#/components/parameters/pageParam" }, + { "$ref": "#/components/parameters/countParam" }, + { "$ref": "#/components/parameters/createdByParam" } + ], + "responses": { + "200": { + "description": "Requested BaseFieldCopyTasks registered in the PDC.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BaseFieldsCopyTaskBundle" + } + } + } + }, + "401": { + "description": "Authentication was not provided or was invalid.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PdcError" + } + } + } + } + } + }, + "post": { + "operationId": "addBaseFieldsCopyTask", + "summary": "Creates a BaseFieldsCopyTask entity, which is then used to enqueue a basefield copying job to the job queue. This pulls basefields from a remote PDC instance and upserts them into the local database", + "tags": ["Tasks"], + "security": [ + { + "auth": [] + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BaseFieldsCopyTask" + } + } + } + }, + "responses": { + "201": { + "description": "The Sync BaseField", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/BaseFieldsCopyTask" + } + } + } + }, + "401": { + "description": "Authentication was not provided or was invalid.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PdcError" + } + } + } + } + } + } + }, "/changemakerProposals": { "get": { "operationId": "getChangemakerProposals", diff --git a/src/routers/tasksRouter.ts b/src/routers/tasksRouter.ts index f8326a54..4c42c2b4 100644 --- a/src/routers/tasksRouter.ts +++ b/src/routers/tasksRouter.ts @@ -1,6 +1,7 @@ import express from 'express'; import { bulkUploadTasksHandlers } from '../handlers/bulkUploadTasksHandlers'; -import { requireAuthentication } from '../middleware'; +import { baseFieldsCopyTasksHandlers } from '../handlers/baseFieldsCopyTasksHandlers'; +import { requireAdministratorRole, requireAuthentication } from '../middleware'; const tasksRouter = express.Router(); @@ -16,4 +17,16 @@ tasksRouter.get( bulkUploadTasksHandlers.getBulkUploadTasks, ); +tasksRouter.post( + '/baseFieldsCopy', + requireAdministratorRole, + baseFieldsCopyTasksHandlers.postBaseFieldsCopyTask, +); + +tasksRouter.get( + '/baseFieldsCopy', + requireAdministratorRole, + baseFieldsCopyTasksHandlers.getBaseFieldsCopyTasks, +); + export { tasksRouter }; diff --git a/src/tasks/__tests__/copyBaseFields.int.test.ts b/src/tasks/__tests__/copyBaseFields.int.test.ts new file mode 100644 index 00000000..4bc565e4 --- /dev/null +++ b/src/tasks/__tests__/copyBaseFields.int.test.ts @@ -0,0 +1,728 @@ +import nock from 'nock'; +import { fetchBaseFieldsFromRemote, copyBaseFields } from '../index'; +import { getMockJobHelpers } from '../../test/mockGraphileWorker'; +import { + loadBaseFieldsCopyTask, + createBaseFieldsCopyTask, + loadSystemUser, + loadTableMetrics, + createOrUpdateBaseField, + loadBaseField, + createOrUpdateBaseFieldLocalization, +} from '../../database/operations'; +import { + InternallyWritableBaseFieldsCopyTask, + BaseFieldsCopyTask, + TaskStatus, + BaseFieldDataType, + BaseFieldScope, +} from '../../types'; +import { expectTimestamp } from '../../test/utils'; + +const MOCK_API_URL = 'https://example.com'; + +const createTestBaseFieldsCopyTask = async ( + overrideValues?: Partial, +): Promise => { + const systemUser = await loadSystemUser(); + const defaultValues = { + pdcApiUrl: MOCK_API_URL, + status: TaskStatus.PENDING, + statusUpdatedAt: new Date(Date.now()).toISOString(), + createdBy: systemUser.keycloakUserId, + }; + return createBaseFieldsCopyTask({ + ...defaultValues, + ...overrideValues, + }); +}; + +const mockFirstNameBaseField = { + id: 1, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: 'first_name', + localizations: { + en: { + baseFieldId: 1, + language: 'en', + label: 'First name', + description: 'The first name of the Applicant', + createdAt: '2024-10-17T01:46:58.494Z', + }, + fr: { + baseFieldId: 1, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: '2024-10-17T01:46:58.494Z', + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: '2024-10-17T01:46:58.494Z', +}; + +const mockFirstNameBaseFieldWithNoLocalizations = { + id: 1, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: 'first_name', + localizations: {}, + dataType: 'string', + scope: 'proposal', + createdAt: '2024-10-17T01:46:58.494Z', +}; + +const mockLastNameBaseField = { + id: 2, + label: 'Last Name', + description: 'The last name of the applicant', + shortCode: 'last_name', + localizations: { + en: { + baseFieldId: 2, + language: 'en', + label: 'Last name', + description: 'The Last name of the Applicant', + createdAt: '2024-10-17T01:46:58.494Z', + }, + fr: { + baseFieldId: 2, + language: 'fr', + label: 'Nom de famille', + description: 'Le nom de la famille de la applicant', + createdAt: '2024-10-17T01:46:58.494Z', + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: '2024-10-17T01:46:58.494Z', +}; + +const mockBaseFields = [mockFirstNameBaseField, mockLastNameBaseField]; + +describe('fetchBaseFieldsFromRemote', () => { + beforeEach(() => { + nock.cleanAll(); + }); + + it('should throw an error if the http request to the synchronization url fails', async () => { + const request = nock(MOCK_API_URL) + .get('/baseFields') + .replyWithError('nobody here but us chickens'); + + await expect( + fetchBaseFieldsFromRemote(MOCK_API_URL, getMockJobHelpers().logger), + ).rejects.toThrow(); + + expect(request.isDone()).toEqual(true); + }); + + it('should throw an error if the http request to the synchronization url succeeds, but the data recieved is not valid json', async () => { + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, 'nobody here but us chickens'); + + await expect( + fetchBaseFieldsFromRemote(MOCK_API_URL, getMockJobHelpers().logger), + ).rejects.toThrow(); + + expect(request.isDone()).toEqual(true); + }); + + it('should throw an error if the http request to the synchronization url succeeds, provides valid json, but the json is not an array of basefields', async () => { + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [ + { + hello: 'how ya doing', + may: 'i be inserted into your database?', + pretty: 'please?', + idont: 'have any escape sequences in me i swear', + }, + ]); + + await expect( + fetchBaseFieldsFromRemote(MOCK_API_URL, getMockJobHelpers().logger), + ).rejects.toThrow(); + + expect(request.isDone()).toEqual(true); + }); + + it('should resolve a valid response', async () => { + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, mockBaseFields); + + await expect( + fetchBaseFieldsFromRemote(MOCK_API_URL, getMockJobHelpers().logger), + ).resolves.toEqual(mockBaseFields); + + expect(request.isDone()).toEqual(true); + }); +}); + +describe('copyBaseFields', () => { + beforeEach(() => { + nock.cleanAll(); + }); + + it('should not process or modify processing status if the task is not PENDING', async () => { + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.IN_PROGRESS, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, mockBaseFields); + + await copyBaseFields( + { BaseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.IN_PROGRESS); + expect(request.isDone()).toEqual(false); + }); + + it('should fail if the remote url is unavailable', async () => { + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(404, 'page not found'); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.FAILED); + expect(request.isDone()).toEqual(true); + }); + + it('should fail if the remote url is available, but sends back invalid basefield data', async () => { + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [ + { + hello: 'how ya doing', + may: 'i be inserted into your database?', + pretty: 'please?', + imjust: 'a friendly piece of data, believe me', + }, + ]); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.FAILED); + expect(request.isDone()).toEqual(true); + }); + + it('should not insert any remote basefields if there are no basefields in the remote instance', async () => { + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL).get('/baseFields').reply(200, []); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + const after = await loadTableMetrics('base_fields'); + + expect(before.count).toEqual(0); + expect(after.count).toEqual(0); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should insert all remote basefields to an empty local database', async () => { + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockFirstNameBaseField]); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + const after = await loadTableMetrics('base_fields'); + + expect(before.count).toEqual(0); + expect(after.count).toEqual(1); + + const insertedRemoteBaseField = await loadBaseField(1); + + expect(insertedRemoteBaseField).toEqual({ + id: 1, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: mockFirstNameBaseField.shortCode, + localizations: { + en: { + baseFieldId: 1, + language: 'en', + label: 'First name', + description: 'The first name of the Applicant', + createdAt: expectTimestamp, + }, + fr: { + baseFieldId: 1, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: expectTimestamp, + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: expectTimestamp, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should insert all remote basefields, without updating any local basefields, assuming there is no overlap on shortcode', async () => { + const localBaseField = await createOrUpdateBaseField({ + label: 'Local BaseField', + description: 'This basefield should not be updated on basefield copy', + shortCode: 'local', + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, mockBaseFields); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + const after = await loadTableMetrics('base_fields'); + + expect(before.count).toEqual(1); + expect(after.count).toEqual(3); + + const localBaseFieldAfterInsertion = await loadBaseField(localBaseField.id); + + expect(localBaseFieldAfterInsertion).toEqual(localBaseField); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should update local basefields when they match on remote basefield shortcodes, even if the basefields have identical data', async () => { + const localBaseField = await createOrUpdateBaseField({ + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + const mockRemoteBaseField = { + id: 1, + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + localizations: {}, + createdAt: '2024-10-17T01:46:58.494Z', + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }; + + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockRemoteBaseField]); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + const after = await loadTableMetrics('base_fields'); + + const updatedBaseField = await loadBaseField(localBaseField.id); + + expect(before.count).toEqual(1); + expect(after.count).toEqual(1); + + expect(updatedBaseField).toEqual({ + id: localBaseField.id, + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + dataType: 'string', + scope: 'proposal', + createdAt: localBaseField.createdAt, + localizations: {}, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should update local basefields when they match on remote basefield shortcodes, and insert all other remote basefields', async () => { + const localBaseField = await createOrUpdateBaseField({ + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + const mockRemoteBaseField = { + id: 1, + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + localizations: {}, + createdAt: '2024-10-17T01:46:58.494Z', + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }; + + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockRemoteBaseField, mockFirstNameBaseField]); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + const after = await loadTableMetrics('base_fields'); + + const updatedBaseField = await loadBaseField(localBaseField.id); + + expect(before.count).toEqual(1); + expect(after.count).toEqual(2); + + expect(updatedBaseField).toEqual({ + id: localBaseField.id, + label: 'Local Data', + description: 'This is local data', + shortCode: 'ld', + dataType: 'string', + scope: 'proposal', + createdAt: localBaseField.createdAt, + localizations: {}, + }); + + const insertedRemoteBaseField = await loadBaseField(3); + + expect(insertedRemoteBaseField).toEqual({ + id: 3, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: mockFirstNameBaseField.shortCode, + localizations: { + en: { + baseFieldId: 3, + language: 'en', + label: 'First name', + description: 'The first name of the Applicant', + createdAt: expectTimestamp, + }, + fr: { + baseFieldId: 3, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: expectTimestamp, + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: expectTimestamp, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should preserve localizations for a local basefield with localizations, when there is a remote basefield with no localizations that matches on shortcode', async () => { + const localBaseField = await createOrUpdateBaseField({ + label: 'Update me', + description: 'This is a field to be updated', + shortCode: mockFirstNameBaseField.shortCode, + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + await createOrUpdateBaseFieldLocalization({ + baseFieldId: localBaseField.id, + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + language: 'fr', + }); + + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockFirstNameBaseFieldWithNoLocalizations]); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + const updatedBaseField = await loadBaseField(localBaseField.id); + + expect(updatedBaseField).toEqual({ + id: localBaseField.id, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: mockFirstNameBaseField.shortCode, + localizations: { + fr: { + baseFieldId: localBaseField.id, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: expectTimestamp, + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: localBaseField.createdAt, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should add localizations to a local basefield from a remote basefield with matching shortcode', async () => { + const localBaseField = await createOrUpdateBaseField({ + label: 'Update me', + description: 'This is a field to be updated', + shortCode: mockFirstNameBaseField.shortCode, + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + await createOrUpdateBaseFieldLocalization({ + baseFieldId: localBaseField.id, + label: 'Nombre de Pila', + description: 'Nombre de Pila', + language: 'sp', + }); + + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockFirstNameBaseField]); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + const updatedBaseField = await loadBaseField(localBaseField.id); + + expect(updatedBaseField).toEqual({ + id: localBaseField.id, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: mockFirstNameBaseField.shortCode, + localizations: { + en: { + baseFieldId: localBaseField.id, + language: 'en', + label: 'First name', + description: 'The first name of the Applicant', + createdAt: expectTimestamp, + }, + fr: { + baseFieldId: localBaseField.id, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: expectTimestamp, + }, + sp: { + baseFieldId: localBaseField.id, + label: 'Nombre de Pila', + description: 'Nombre de Pila', + language: 'sp', + createdAt: expectTimestamp, + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: localBaseField.createdAt, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should insert all valid remote basefields into the database, and have status set as completed', async () => { + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, mockBaseFields); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + const after = await loadTableMetrics('base_fields'); + + expect(before.count).toEqual(0); + expect(after.count).toEqual(2); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); + + it('should update any existing local basefields that match on shortcode, and have status set as completed', async () => { + const baseField = await createOrUpdateBaseField({ + label: 'Old First Name', + description: 'This should be replaced', + shortCode: mockFirstNameBaseField.shortCode, + dataType: BaseFieldDataType.STRING, + scope: BaseFieldScope.PROPOSAL, + }); + + const before = await loadTableMetrics('base_fields'); + + const baseFieldsCopyTask = await createTestBaseFieldsCopyTask({ + status: TaskStatus.PENDING, + }); + const request = nock(MOCK_API_URL) + .get('/baseFields') + .reply(200, [mockFirstNameBaseField]); + + await copyBaseFields( + { baseFieldsCopyTaskId: baseFieldsCopyTask.id }, + getMockJobHelpers(), + ); + + const updatedBaseFieldsCopyTask = await loadBaseFieldsCopyTask( + baseFieldsCopyTask.id, + ); + + const after = await loadTableMetrics('base_fields'); + + const updatedBaseField = await loadBaseField(baseField.id); + + expect(before.count).toEqual(1); + expect(after.count).toEqual(1); + + expect(updatedBaseField).toEqual({ + id: baseField.id, + label: 'First Name', + description: 'The first name of the applicant', + shortCode: 'first_name', + localizations: { + en: { + baseFieldId: baseField.id, + language: 'en', + label: 'First name', + description: 'The first name of the Applicant', + createdAt: expectTimestamp, + }, + fr: { + baseFieldId: 1, + language: 'fr', + label: 'Le Prenom', + description: 'Le Prenom de la Applicant', + createdAt: expectTimestamp, + }, + }, + dataType: 'string', + scope: 'proposal', + createdAt: baseField.createdAt, + }); + + expect(updatedBaseFieldsCopyTask.status).toEqual(TaskStatus.COMPLETED); + expect(request.isDone()).toEqual(true); + }); +}); diff --git a/src/tasks/__tests__/copyBaseFields.unit.test.ts b/src/tasks/__tests__/copyBaseFields.unit.test.ts new file mode 100644 index 00000000..7077b99f --- /dev/null +++ b/src/tasks/__tests__/copyBaseFields.unit.test.ts @@ -0,0 +1,10 @@ +import { getMockJobHelpers } from '../../test/mockGraphileWorker'; +import { copyBaseFields } from '../copyBaseFields'; + +describe('copyBaseFields', () => { + it('should not error when passed an invalid payload', async () => { + await expect( + copyBaseFields({}, getMockJobHelpers()), + ).resolves.not.toThrow(); + }); +}); diff --git a/src/tasks/copyBaseFields.ts b/src/tasks/copyBaseFields.ts new file mode 100644 index 00000000..d78b43ea --- /dev/null +++ b/src/tasks/copyBaseFields.ts @@ -0,0 +1,144 @@ +import fetch from 'node-fetch'; +import { + isCopyBaseFieldsJobPayload, + TaskStatus, + isBaseField, + BaseField, +} from '../types'; +import { + createOrUpdateBaseField, + createOrUpdateBaseFieldLocalization, + loadBaseFieldsCopyTask, + updateBaseFieldsCopyTask, +} from '../database/operations'; +import type { JobHelpers, Logger } from 'graphile-worker'; +import type { Response } from 'node-fetch'; + +export const fetchBaseFieldsFromRemote = async ( + pdcApiUrl: string, + logger: Logger, +): Promise => { + try { + const response = (await fetch( + `${pdcApiUrl}/baseFields`, + )) as unknown as Response; + + if (!response.ok) { + logger.error('Failed to fetch base fields from remote PDC instance', { + status: response.status, + statusText: response.statusText, + }); + throw new Error( + `Failed to fetch base fields: ${response.status} ${response.statusText}`, + ); + } + + const data = (await response.json()) as unknown; + + if (!Array.isArray(data) || !data.every((item) => isBaseField(item))) { + logger.error('Invalid basefield data received from remote PDC instance', { + data, + }); + throw new Error('Invalid data received from remote PDC instance'); + } + + return data; + } catch (err) { + logger.error('Error fetching base fields from remote PDC instance', { + err, + }); + throw new Error('An error occurred while fetching base fields', { + cause: err, + }); + } +}; + +const copyBaseField = async (targetBaseField: BaseField) => { + const { scope, dataType, shortCode, label, description } = targetBaseField; + const copiedBaseField = await createOrUpdateBaseField({ + scope, + dataType, + shortCode, + label, + description, + }); + await Promise.all( + Object.entries(targetBaseField.localizations).map( + async ([language, baseFieldLocalization]) => { + await createOrUpdateBaseFieldLocalization({ + baseFieldId: copiedBaseField.id, + language, + label: baseFieldLocalization.label, + description: baseFieldLocalization.description, + }); + }, + ), + ); +}; + +export const copyBaseFields = async ( + payload: unknown, + helpers: JobHelpers, +): Promise => { + if (!isCopyBaseFieldsJobPayload(payload)) { + helpers.logger.error('Malformed basefields copy job payload', { + errors: isCopyBaseFieldsJobPayload.errors ?? [], + }); + return; + } + helpers.logger.debug( + `Started BasefieldsCopy Job for BaseFieldsCopyTask ID ${payload.baseFieldsCopyTaskId}`, + ); + const baseFieldsCopyTask = await loadBaseFieldsCopyTask( + payload.baseFieldsCopyTaskId, + ); + + if (baseFieldsCopyTask.status !== TaskStatus.PENDING) { + helpers.logger.warn( + 'Basefields Copy cannot be processed because it is not in a PENDING state', + { baseFieldsCopyTask }, + ); + return; + } + + let remoteBaseFields: BaseField[]; + let taskFailed = false; + + await updateBaseFieldsCopyTask(baseFieldsCopyTask.id, { + status: TaskStatus.IN_PROGRESS, + }); + + try { + remoteBaseFields = await fetchBaseFieldsFromRemote( + baseFieldsCopyTask.pdcApiUrl, + helpers.logger, + ); + } catch (err) { + helpers.logger.warn('Fetching data from remote instance failed', { err }); + await updateBaseFieldsCopyTask(baseFieldsCopyTask.id, { + status: TaskStatus.FAILED, + }); + return; + } + + try { + await Promise.all( + remoteBaseFields.map>(async (baseField) => { + await copyBaseField(baseField); + }), + ); + } catch (err) { + helpers.logger.info('Basefields copy has failed', { err }); + taskFailed = true; + } + + if (taskFailed) { + await updateBaseFieldsCopyTask(baseFieldsCopyTask.id, { + status: TaskStatus.FAILED, + }); + } else { + await updateBaseFieldsCopyTask(baseFieldsCopyTask.id, { + status: TaskStatus.COMPLETED, + }); + } +}; diff --git a/src/tasks/index.ts b/src/tasks/index.ts index c3d92c30..ce4eab54 100644 --- a/src/tasks/index.ts +++ b/src/tasks/index.ts @@ -1 +1,2 @@ +export * from './copyBaseFields'; export * from './processBulkUploadTask'; diff --git a/src/types/BaseField.ts b/src/types/BaseField.ts index f46caaf9..c69ce55a 100644 --- a/src/types/BaseField.ts +++ b/src/types/BaseField.ts @@ -1,5 +1,8 @@ import { ajv } from '../ajv'; -import type { BaseFieldLocalization } from './BaseFieldLocalization'; +import { + baseFieldLocalizationSchema, + BaseFieldLocalization, +} from './BaseFieldLocalization'; import type { JSONSchemaType } from 'ajv'; import type { Writable } from './Writable'; @@ -28,6 +31,53 @@ interface BaseField { readonly createdAt: string; } +const baseFieldSchema: JSONSchemaType = { + type: 'object', + properties: { + id: { + type: 'number', + }, + label: { + type: 'string', + }, + description: { + type: 'string', + }, + shortCode: { + type: 'string', + }, + dataType: { + type: 'string', + enum: Object.values(BaseFieldDataType), + }, + scope: { + type: 'string', + enum: Object.values(BaseFieldScope), + }, + localizations: { + type: 'object', + additionalProperties: baseFieldLocalizationSchema, + required: [], + }, + createdAt: { + type: 'string', + }, + }, + required: [ + 'id', + 'label', + 'description', + 'shortCode', + 'dataType', + 'scope', + 'localizations', + 'createdAt', + ], + additionalProperties: true, +}; + +const isBaseField = ajv.compile(baseFieldSchema); + type WritableBaseField = Writable; const writableBaseFieldSchema: JSONSchemaType = { @@ -58,7 +108,9 @@ const isWritableBaseField = ajv.compile(writableBaseFieldSchema); export { BaseField, - isWritableBaseField, + baseFieldSchema, + isBaseField, WritableBaseField, + isWritableBaseField, writableBaseFieldSchema, }; diff --git a/src/types/BaseFieldLocalization.ts b/src/types/BaseFieldLocalization.ts index 2ad549dd..a2a914e4 100644 --- a/src/types/BaseFieldLocalization.ts +++ b/src/types/BaseFieldLocalization.ts @@ -10,6 +10,29 @@ interface BaseFieldLocalization { readonly createdAt: string; } +const baseFieldLocalizationSchema: JSONSchemaType = { + type: 'object', + properties: { + language: { + type: 'string', + }, + baseFieldId: { + type: 'number', + }, + label: { + type: 'string', + }, + description: { + type: 'string', + }, + createdAt: { + type: 'string', + }, + }, + required: ['description', 'language', 'label', 'baseFieldId', 'createdAt'], + additionalProperties: true, +}; + type WritableBaseFieldLocalization = Writable; const writableBaseFieldLocalizationSchema: JSONSchemaType = @@ -47,13 +70,16 @@ const internallyWritableBaseFieldLocalizationSchema: JSONSchemaType; + +type InternallyWritableBaseFieldsCopyTask = WritableBaseFieldsCopyTask & + Pick; + +const writableBaseFieldsCopyTaskSchema: JSONSchemaType = + { + type: 'object', + properties: { + pdcApiUrl: { + type: 'string', + }, + }, + required: ['pdcApiUrl'], + }; + +const isWritableBaseFieldsCopyTask = ajv.compile( + writableBaseFieldsCopyTaskSchema, +); +export { + BaseFieldsCopyTask, + WritableBaseFieldsCopyTask, + isWritableBaseFieldsCopyTask, + InternallyWritableBaseFieldsCopyTask, +}; diff --git a/src/types/CopyBaseFieldsJobPayload.ts b/src/types/CopyBaseFieldsJobPayload.ts new file mode 100644 index 00000000..59738650 --- /dev/null +++ b/src/types/CopyBaseFieldsJobPayload.ts @@ -0,0 +1,25 @@ +import { ajv } from '../ajv'; +import type { JSONSchemaType } from 'ajv'; + +interface CopyBaseFieldsJobPayload { + baseFieldsCopyTaskId: number; +} + +const copyBaseFieldsJobPayloadSchema: JSONSchemaType = + { + type: 'object', + properties: { + baseFieldsCopyTaskId: { + type: 'integer', + }, + }, + required: ['baseFieldsCopyTaskId'], + }; + +const isCopyBaseFieldsJobPayload = ajv.compile(copyBaseFieldsJobPayloadSchema); + +export { + CopyBaseFieldsJobPayload, + copyBaseFieldsJobPayloadSchema, + isCopyBaseFieldsJobPayload, +}; diff --git a/src/types/index.ts b/src/types/index.ts index cd70ba5b..ef18bd1f 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -3,11 +3,13 @@ export * from './ApplicationFormField'; export * from './AuthContext'; export * from './BaseField'; export * from './BaseFieldLocalization'; +export * from './BaseFieldsCopyTask'; export * from './BulkUploadTask'; export * from './Bundle'; export * from './Changemaker'; export * from './ChangemakerProposal'; export * from './CheckResult'; +export * from './CopyBaseFieldsJobPayload'; export * from './DataProvider'; export * from './express/AuthenticatedRequest'; export * from './Funder';