Skip to content

Feature: Add eligible donations to qfround entity #122

Feature: Add eligible donations to qfround entity

Feature: Add eligible donations to qfround entity #122

name: staging-pipeline
on:
push:
branches:
- staging
pull_request:
branches:
- staging
jobs:
test:
runs-on: ubuntu-latest
services:
# Label used to access the service container
redis:
# Docker Hub image
image: redis
# Set health checks to wait until redis has started
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 6379:6379
postgres:
# Use this postgres image https://github.com/Giveth/postgres-givethio
image: ghcr.io/giveth/postgres-givethio:latest
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_DB: givethio
PGDATA: /var/lib/postgresql/data/pgdata
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5443:5432
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_S3_REGION }}
- name: Download latest DB backup from S3
run: |
FILENAME=$(aws s3 ls ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/ | sort | tail -n 1 | awk '{print $4}')
aws s3 cp ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/$FILENAME /tmp/db_backup.zip
- name: Unzip DB backup
run: |
unzip /tmp/db_backup.zip -d /tmp
mv /tmp/backups/givethio-staging/*.sql /tmp/backups/givethio-staging/db_backup.sql
- name: Wait for PostgreSQL to become ready
run: |
for i in {1..10}
do
pg_isready -h localhost -p 5443 -U postgres && echo Success && break
echo -n .
sleep 1
done
- name: Restore DB backup
run: PGPASSWORD=postgres psql -h localhost -p 5443 -U postgres -d givethio < /tmp/backups/givethio-staging/db_backup.sql
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: 18.17.1
- name: Install dependencies
run: npm ci
- name: Run tslint
run: npm run tslint
- name: Run build
run: npm run build
- name: Run migrations
run: npm run db:migrate:run:test
- name: Run tests
run: npm run test
env:
ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }}
XDAI_NODE_HTTP_URL: ${{ secrets.XDAI_NODE_HTTP_URL }}
INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }}
INFURA_ID: ${{ secrets.INFURA_ID }}
POLYGON_SCAN_API_KEY: ${{ secrets.POLYGON_SCAN_API_KEY }}
OPTIMISTIC_SCAN_API_KEY: ${{ secrets.OPTIMISTIC_SCAN_API_KEY }}
CELO_SCAN_API_KEY: ${{ secrets.CELO_SCAN_API_KEY }}
CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }}
publish:
needs: test
runs-on: ubuntu-latest
if: github.event_name == 'push'
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Build image and push to GitHub Packages
uses: docker/build-push-action@v1
with:
username: ${{ github.actor }}
password: ${{ github.token }}
registry: ghcr.io
repository: giveth/impact-graph
add_git_labels: true
# Add branch name to docker image tag @see{@link https://github.com/docker/build-push-action/tree/releases/v1#tag_with_ref}
tag_with_ref: true
# Add commit hash to docker image tag @see{@link https://github.com/docker/build-push-action/tree/releases/v1#tag_with_sha}
tag_with_sha: true
deploy:
needs: publish
runs-on: ubuntu-latest
steps:
- name: SSH and Redeploy
uses: appleboy/[email protected]
with:
host: ${{ secrets.STAGING_HOST_ALL }}
username: ${{ secrets.STAGING_USERNAME_ALL }}
key: ${{ secrets.STAGING_PRIVATE_KEY_ALL }}
port: ${{ secrets.SSH_PORT }}
script: |
cd giveth-all
docker-compose stop impact-graph
docker-compose pull impact-graph
docker-compose up -d impact-graph
docker image prune -a --force