diff --git a/Makefile b/Makefile index 50154ebd18..3bdf9ff7d1 100644 --- a/Makefile +++ b/Makefile @@ -4,15 +4,17 @@ # If using the same variables in recipes that need to use a dotenv file other # than .env, remember to check that no values from .env are being used # inadvertently. -ENVFILE := $(if $(environment), .env-test-e2e, .env) +ENVFILE := $(if $(environment), .env-$(environment), .env) + +ifneq (,$(wildcard $(ENVFILE))) + include $(ENVFILE) + export +endif + CIENV := $(if $(filter $(environment), ci), -f docker-compose-test-e2e.ci.yml , -f docker-compose-test-e2e.local.yml) API_DB_INSTANCE := $(if $(environment), test-e2e-postgresql-api, postgresql-api) GEO_DB_INSTANCE := $(if $(environment), test-e2e-postgresql-geo-api, postgresql-geo-api) REDIS_INSTANCE := $(if $(environment), test-e2e-redis, redis) -_API_POSTGRES_USER := $(if $(filter $(environment), ci),${API_POSTGRES_USER},$(shell grep -e API_POSTGRES_USER ${ENVFILE} | sed 's/^.*=//')) -_API_POSTGRES_DB := $(if $(filter $(environment), ci),${API_POSTGRES_DB},$(shell grep -e API_POSTGRES_DB ${ENVFILE} | sed 's/^.*=//')) -_GEO_POSTGRES_USER := $(if $(filter $(environment), ci),${GEO_POSTGRES_USER},$(shell grep -e GEO_POSTGRES_USER ${ENVFILE} | sed 's/^.*=//')) -_GEO_POSTGRES_DB := $(if $(filter $(environment), ci),${GEO_POSTGRES_DB},$(shell grep -e GEO_POSTGRES_DB ${ENVFILE} | sed 's/^.*=//')) DOCKER_COMPOSE_FILE := $(if $(environment), -f docker-compose-test-e2e.yml $(CIENV), -f docker-compose.yml ) DOCKER_CLEAN_VOLUMES := $(if $(environment), , \ @@ -29,8 +31,8 @@ test-commands: @echo $(ENVFILE) @echo $(DOCKER_COMPOSE_FILE) @echo $(CIENV) - @echo $(_API_POSTGRES_DB) - @echo $(_GEO_POSTGRES_USER) + @echo $(API_POSTGRES_DB) + @echo $(GEO_POSTGRES_USER) # Start only API and Geoprocessing services # @@ -53,10 +55,10 @@ stop: docker-compose $(DOCKER_COMPOSE_FILE) stop psql-api: - docker-compose $(DOCKER_COMPOSE_FILE) exec $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" + docker-compose $(DOCKER_COMPOSE_FILE) exec $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" psql-geo: - docker-compose $(DOCKER_COMPOSE_FILE) exec $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}" + docker-compose $(DOCKER_COMPOSE_FILE) exec $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}" redis-api: docker-compose exec redis redis-cli @@ -84,25 +86,25 @@ seed-dbs: seed-api-with-test-data seed-api-with-test-data: seed-api-init-data | seed-geoapi-init-data @echo "$(RED)seeding db with testing project and scenarios:$(NC) $(API_DB_INSTANCE)" - docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-data.sql + docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-data.sql seed-api-init-data: @echo "$(RED)seeding initial dbs:$(NC) $(API_DB_INSTANCE)" - docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-init-apidb.sql + docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-init-apidb.sql seed-geoapi-init-data: @echo "$(RED)seeding dbs with initial geodata:$(NC) $(API_DB_INSTANCE), $(GEO_DB_INSTANCE)" - sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"; \ - sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"; - docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${_API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-features.sql + sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"; \ + sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"; + docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -U "${API_POSTGRES_USER}" < api/apps/api/test/fixtures/test-features.sql @for i in api/apps/api/test/fixtures/features/*.sql; do \ table_name=`basename -s .sql "$$i"`; \ - featureid=`docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -X -A -t -U "${_API_POSTGRES_USER}" -c "select id from features where feature_class_name = '$$table_name'"`; \ + featureid=`docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(API_DB_INSTANCE) psql -X -A -t -U "${API_POSTGRES_USER}" -c "select id from features where feature_class_name = '$$table_name'"`; \ echo "appending data for $${table_name} with id $${featureid}"; \ - sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${_GEO_POSTGRES_USER}"; \ + sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | docker-compose $(DOCKER_COMPOSE_FILE) exec -T $(GEO_DB_INSTANCE) psql -U "${GEO_POSTGRES_USER}"; \ done; -# need notebook service to execute a expecific notebook. this requires a full geodb +# need notebook service to execute a specific notebook. this requires a full geodb generate-geo-test-data: extract-geo-test-data docker-compose --project-name ${COMPOSE_PROJECT_NAME} -f ./data/docker-compose.yml exec marxan-science-notebooks papermill --progress-bar --log-output work/notebooks/Lab/convert_csv_sql.ipynb /dev/null mv -f -u -Z data/data/processed/test-wdpa-data.sql api/apps/api/test/fixtures/test-wdpa-data.sql @@ -159,10 +161,10 @@ run-test-unit: $(MAKE) --keep-going test-unit-backend dump-geodb-data: - docker-compose exec -T postgresql-geo-api pg_dump -T migrations -a -U "${_GEO_POSTGRES_USER}" -F t ${_GEO_POSTGRES_DB} | gzip > data/data/processed/db_dumps/geo_db-$$(date +%Y-%m-%d).tar.gz + docker-compose exec -T postgresql-geo-api pg_dump -T migrations -a -U "${GEO_POSTGRES_USER}" -F t ${GEO_POSTGRES_DB} | gzip > data/data/processed/db_dumps/geo_db-$$(date +%Y-%m-%d).tar.gz dump-api-data: - docker-compose exec -T postgresql-api pg_dump -T '(migrations|api_event_kinds|roles)' -a -U "${_API_POSTGRES_USER}" -F t ${_API_POSTGRES_DB} | gzip > data/data/processed/db_dumps/api_db-$$(date +%Y-%m-%d).tar.gz + docker-compose exec -T postgresql-api pg_dump -T '(migrations|api_event_kinds|roles)' -a -U "${API_POSTGRES_USER}" -F t ${API_POSTGRES_DB} | gzip > data/data/processed/db_dumps/api_db-$$(date +%Y-%m-%d).tar.gz upload-dump-data: az storage blob upload-batch --account-name marxancloudtest --auth-mode login -d data-ingestion-test-00/dbs-dumps -s data/data/processed/db_dumps @@ -186,18 +188,49 @@ restore-volumes-data: docker run --rm --volumes-from marxan-postgresql-geo-api -v $$(pwd)/data/data/processed/db_volumes:/backup ubuntu bash -c "rm -rf /var/lib/postgresql/data/* && cd / && tar xvf /backup/psql-geo-data.tar" extract-geo-test-data: #This location correspond with the Okavango delta touching partially Botswana, Angola Zambia and Namibia - TEST_GEOMETRY=$(shell cat api/apps/api/test/fixtures/test-geometry-subset.json | jq 'tostring'); \ - docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM admin_regions WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_admin_regions_okavango.csv; \ - docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM wdpa WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_wdpa_okavango.csv; \ - docker-compose exec -T postgresql-geo-api psql -U "${_GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM features_data WHERE st_intersects(the_geom, st_geomfromgeojson('$${TEST_GEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_features_data_okavango.csv; - docker-compose exec -T postgresql-api psql -U "${_API_POSTGRES_USER}" -c "COPY (SELECT * FROM features) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/api_features_okavango.csv + TESTGEOMETRY=$(shell cat api/apps/api/test/fixtures/test-geometry-subset.json | jq 'tostring'); \ + docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM admin_regions WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_admin_regions_okavango.csv; \ + docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM wdpa WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_wdpa_okavango.csv; \ + docker-compose exec -T postgresql-geo-api psql -U "${GEO_POSTGRES_USER}" -c "COPY (SELECT * FROM features_data WHERE st_intersects(the_geom, st_geomfromgeojson('$${TESTGEOMETRY}'))) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/geo_features_data_okavango.csv; + docker-compose exec -T postgresql-api psql -U "${API_POSTGRES_USER}" -c "COPY (SELECT * FROM features) TO STDOUT DELIMITER ',' CSV HEADER;" > data/data/processed/api_features_okavango.csv generate-content-dumps: dump-api-data | dump-geodb-data jq -n --arg dateName $$(date +%Y-%m-%d) '{"metadata":{"latest":{"name":$$dateName}}}' > data/data/processed/db_dumps/content.json generate-export-shpfile: -docker-compose exec -T postgresql-geo-api mkdir testdataoutput2 - -docker-compose exec -T postgresql-geo-api pgsql2shp -f ./testdataoutput2/test.shp -h localhost -p 5432 -r -g the_geom -u ${_GEO_POSTGRES_USER} ${_GEO_POSTGRES_DB} "SELECT the_geom, pug.id as uid, 1 as cost FROM scenarios_pu_data spd inner join planning_units_geom pug on pug.id = spd.pu_geom_id "; + -docker-compose exec -T postgresql-geo-api pgsql2shp -f ./testdataoutput2/test.shp -h localhost -p 5432 -r -g the_geom -u ${GEO_POSTGRES_USER} ${GEO_POSTGRES_DB} "SELECT the_geom, pug.id as uid, 1 as cost FROM scenarios_pu_data spd inner join planning_units_geom pug on pug.id = spd.pu_geom_id "; -mkdir data/data -docker cp marxan-postgresql-geo-api:testdataoutput2 data/data + +# Native support tasks + +# Create the databases +native-db-create: + @echo "SELECT 'CREATE DATABASE \"${API_POSTGRES_DB}\"' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${API_POSTGRES_DB}')\gexec" | psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" + +# Apply migrations to the current database. Assumes the PostgreSQL server is up, the database already exists and is empty +native-db-migrate: native-db-create + cd api; yarn geoprocessing:typeorm migration:run -t each + cd api; yarn api:typeorm migration:run -t each + +native-seed-api-init-data: + @echo "seeding initial dbs" + psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-init-apidb.sql + +native-seed-geoapi-init-data: + @echo "seeding dbs with initial geodata" + sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-admin-data.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB}; \ + sed -e "s/\$$user/00000000-0000-0000-0000-000000000000/g" api/apps/api/test/fixtures/test-wdpa-data.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB}; + psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-features.sql + @for i in api/apps/api/test/fixtures/features/*.sql; do \ + table_name=`basename -s .sql "$$i"`; \ + featureid=`psql -X -A -t -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} -c "select id from features where feature_class_name = '$$table_name'"`; \ + echo "appending data for $${table_name} with id $${featureid}"; \ + sed -e "s/\$$feature_id/$$featureid/g" api/apps/api/test/fixtures/features/$${table_name}.sql | psql -U "${GEO_POSTGRES_USER}" -h "${GEO_POSTGRES_HOST}" ${GEO_POSTGRES_DB}; \ + done; + +native-seed-api-with-test-data: native-db-migrate native-seed-api-init-data | native-seed-geoapi-init-data + @echo "seeding db with testing project and scenarios" + psql -U "${API_POSTGRES_USER}" -h "${API_POSTGRES_HOST}" ${API_POSTGRES_DB} < api/apps/api/test/fixtures/test-data.sql diff --git a/api/apps/api/config/custom-environment-variables.json b/api/apps/api/config/custom-environment-variables.json index b0a1816003..742a8b2676 100644 --- a/api/apps/api/config/custom-environment-variables.json +++ b/api/apps/api/config/custom-environment-variables.json @@ -4,12 +4,22 @@ }, "postgresApi": { "url": "API_POSTGRES_URL", + "host": "API_POSTGRES_HOST", + "username": "API_POSTGRES_USER", + "password": "API_POSTGRES_PASSWORD", + "database": "API_POSTGRES_DB", + "port": "API_POSTGRES_PORT", "runMigrationsOnStartup": "API_RUN_MIGRATIONS_ON_STARTUP" - }, - "postgresGeoApi": { + }, + "postgresGeoApi": { "url": "GEO_POSTGRES_URL", + "host": "GEO_POSTGRES_HOST", + "username": "GEO_POSTGRES_USER", + "password": "GEO_POSTGRES_PASSWORD", + "database": "GEO_POSTGRES_DB", + "port": "GEO_POSTGRES_PORT", "runMigrationsOnStartup": "GEOPROCESSING_RUN_MIGRATIONS_ON_STARTUP" - }, + }, "network": { "cors": { "origins_extra": "NETWORK_CORS_ORIGINS" @@ -23,10 +33,13 @@ "secret": "API_AUTH_X_API_KEY" } }, + "geoprocessing": { + "url": "GEOPROCESSING_URL" + }, "redisApi": { - "connection": { - "host": "REDIS_HOST" - } + "connection": { + "host": "REDIS_HOST" + } }, "api": { "url": "API_SERVICE_URL", diff --git a/api/apps/api/config/default.json b/api/apps/api/config/default.json index 85a8fcbdf1..cb8a544a91 100644 --- a/api/apps/api/config/default.json +++ b/api/apps/api/config/default.json @@ -13,6 +13,22 @@ }, "concurrency": 50 }, + "postgresApi": { + "url": null, + "port": 5432, + "host": null, + "username": null, + "password": null, + "database": null + }, + "postgresGeoApi": { + "url": null, + "port": 5432, + "host": null, + "username": null, + "password": null, + "database": null + }, "api": { "url": "http://api:3000", "daemonListenPort": 3000 diff --git a/api/apps/api/src/migrations/api/1608149578000-EnablePostgis.ts b/api/apps/api/src/migrations/api/1608149578000-EnablePostgis.ts new file mode 100644 index 0000000000..aa5117ea10 --- /dev/null +++ b/api/apps/api/src/migrations/api/1608149578000-EnablePostgis.ts @@ -0,0 +1,25 @@ +import { Logger } from '@nestjs/common'; +import { MigrationInterface, QueryRunner } from 'typeorm'; +import { PostgreSQLUtils } from '@marxan-api/utils/postgresql.utils'; + +export class EnablePostgis1608149578000 implements MigrationInterface { + async up(queryRunner: QueryRunner): Promise { + if (await PostgreSQLUtils.version13Plus()) { + await queryRunner.query(` +CREATE EXTENSION IF NOT EXISTS postgis; + `); + } else { + Logger.warn( + 'The PostgreSQL extension `postgis` is needed for the Marxan API but it was not possible to activate it. Please activate it manually (see setup documentation).', + ); + } + } + + async down(queryRunner: QueryRunner): Promise { + if (await PostgreSQLUtils.version13Plus()) { + await queryRunner.query(` +DROP EXTENSION IF EXISTS postgis; + `); + } + } +} diff --git a/api/apps/api/src/ormconfig.ts b/api/apps/api/src/ormconfig.ts index b4d299d530..7c080c03c7 100644 --- a/api/apps/api/src/ormconfig.ts +++ b/api/apps/api/src/ormconfig.ts @@ -25,6 +25,11 @@ export const apiConnections: Record< name: DbConnections.default, synchronize: false, type: 'postgres', + username: AppConfig.get('postgresApi.username'), + password: AppConfig.get('postgresApi.password'), + port: AppConfig.get('postgresApi.port'), + host: AppConfig.get('postgresApi.host'), + database: AppConfig.get('postgresApi.database'), url: AppConfig.get('postgresApi.url'), ssl: false, entities: [ @@ -59,6 +64,11 @@ export const apiConnections: Record< name: DbConnections.geoprocessingDB, synchronize: false, type: 'postgres', + username: AppConfig.get('postgresGeoApi.username'), + password: AppConfig.get('postgresGeoApi.password'), + port: AppConfig.get('postgresGeoApi.port'), + host: AppConfig.get('postgresGeoApi.host'), + database: AppConfig.get('postgresGeoApi.database'), url: AppConfig.get('postgresGeoApi.url'), ssl: false, entities: [ diff --git a/api/apps/api/src/utils/config.utils.spec.ts b/api/apps/api/src/utils/config.utils.spec.ts index ef194c0b95..a07de976a9 100644 --- a/api/apps/api/src/utils/config.utils.spec.ts +++ b/api/apps/api/src/utils/config.utils.spec.ts @@ -8,6 +8,14 @@ process.env.NETWORK_CORS_ORIGINS = extraCorsOrigin; import { AppConfig } from './config.utils'; describe('AppConfig', () => { + beforeAll(() => { + if (process.env.NODE_CONFIG_DIR !== 'apps/api/config') { + throw Error( + `Running the test suite with NODE_CONFIG_DIR=${process.env.NODE_CONFIG_DIR}, which may cause this test to fail. Please use NODE_CONFIG_DIR=apps/api/config.`, + ); + } + }); + describe('getFromArrayAndParsedString', () => { // Expected full result from `network.cors.origins`. If updating the default // list in `config`, relevant tests should break and this list should be diff --git a/api/apps/geoprocessing/config/custom-environment-variables.json b/api/apps/geoprocessing/config/custom-environment-variables.json index 514cc70460..aa969a724f 100644 --- a/api/apps/geoprocessing/config/custom-environment-variables.json +++ b/api/apps/geoprocessing/config/custom-environment-variables.json @@ -1,10 +1,20 @@ { "postgresApi": { "url": "API_POSTGRES_URL", + "host": "API_POSTGRES_HOST", + "username": "API_POSTGRES_USER", + "password": "API_POSTGRES_PASSWORD", + "database": "API_POSTGRES_DB", + "port": "API_POSTGRES_PORT", "runMigrationsOnStartup": "API_RUN_MIGRATIONS_ON_STARTUP" }, "postgresGeoApi": { "url": "GEO_POSTGRES_URL", + "host": "GEO_POSTGRES_HOST", + "username": "GEO_POSTGRES_USER", + "password": "GEO_POSTGRES_PASSWORD", + "database": "GEO_POSTGRES_DB", + "port": "GEO_POSTGRES_PORT", "runMigrationsOnStartup": "GEOPROCESSING_RUN_MIGRATIONS_ON_STARTUP" }, "network": { diff --git a/api/apps/geoprocessing/config/default.json b/api/apps/geoprocessing/config/default.json index 310f4dbbc0..058935be3c 100644 --- a/api/apps/geoprocessing/config/default.json +++ b/api/apps/geoprocessing/config/default.json @@ -6,6 +6,22 @@ "port": "6379" } }, + "postgresApi": { + "url": null, + "port": 5432, + "host": null, + "username": null, + "password": null, + "database": null + }, + "postgresGeoApi": { + "url": null, + "port": 5432, + "host": null, + "username": null, + "password": null, + "database": null + }, "api": { "url": "http://api:3000" }, diff --git a/api/apps/geoprocessing/src/ormconfig.ts b/api/apps/geoprocessing/src/ormconfig.ts index df1ae8139a..154a23002a 100644 --- a/api/apps/geoprocessing/src/ormconfig.ts +++ b/api/apps/geoprocessing/src/ormconfig.ts @@ -22,6 +22,11 @@ export const geoprocessingConnections: { synchronize: false, type: 'postgres', url: AppConfig.get('postgresGeoApi.url'), + username: AppConfig.get('postgresGeoApi.username'), + password: AppConfig.get('postgresGeoApi.password'), + port: AppConfig.get('postgresGeoApi.port'), + host: AppConfig.get('postgresGeoApi.host'), + database: AppConfig.get('postgresGeoApi.database'), ssl: false, entities: [ path.join(__dirname, '/modules/**/*.geo.entity.{ts,js}'), @@ -49,6 +54,11 @@ export const geoprocessingConnections: { synchronize: false, type: 'postgres', url: AppConfig.get('postgresApi.url'), + username: AppConfig.get('postgresApi.username'), + password: AppConfig.get('postgresApi.password'), + port: AppConfig.get('postgresApi.port'), + host: AppConfig.get('postgresApi.host'), + database: AppConfig.get('postgresApi.database'), ssl: false, entities: [ __dirname + '/modules/**/*.api.entity.{ts,js}',