Skip to content

Commit

Permalink
Merge pull request #283 from geoadmin/develop
Browse files Browse the repository at this point in the history
Release v1.4.0
  • Loading branch information
ltshb authored May 12, 2021
2 parents 4487e48 + 70e886b commit 7717d67
Show file tree
Hide file tree
Showing 73 changed files with 10,973 additions and 5,293 deletions.
8 changes: 5 additions & 3 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=pylint_django
django-settings-module=stac_api.settings

# Pickle collected data for later comparisons.
persistent=yes
Expand Down Expand Up @@ -72,6 +73,7 @@ disable=missing-docstring,
# yapf correct indentation continuation issue
useless-object-inheritance,
no-self-use, # method signature often defined by django
similarities # currently similarities has a bug when running pylint with jobs>=2


# Enable the message, report, category or checker with the given id(s). You can
Expand Down Expand Up @@ -321,7 +323,7 @@ module-naming-style=snake_case

# Regular expression matching correct module names. Overrides module-naming-
# style.
module-rgx=^(?:(?P<snake>[a-z]+[a-z_]*)|(?P<migration>\d+_[_a-z0-9]+))$
module-rgx=^(?:(?P<snake>[a-z]+[a-z_\d]*)|(?P<migration>\d+_[_a-z0-9]+))$

# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
Expand Down Expand Up @@ -384,10 +386,10 @@ ignore-comments=yes
ignore-docstrings=yes

# Ignore imports when computing similarities.
ignore-imports=no
ignore-imports=yes

# Minimum lines number of a similarity.
min-similarity-lines=4
min-similarity-lines=8


[VARIABLES]
Expand Down
8 changes: 3 additions & 5 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,9 @@ RUN groupadd -r geoadmin \
&& pip3 install pipenv \
&& pipenv --version

COPY Pipfile* multihash.patch /tmp/
COPY Pipfile* /tmp/
RUN cd /tmp && \
pipenv install --system --deploy --ignore-pipfile && \
# Patch multihash for md5 support
pipenv run pypatch apply ./multihash.patch multihash
pipenv install --system --deploy --ignore-pipfile

# Set the working dir and copy the app
WORKDIR /app
Expand Down Expand Up @@ -92,4 +90,4 @@ USER geoadmin
EXPOSE $HTTP_PORT

# Use a real WSGI server
ENTRYPOINT ["python3"]
ENTRYPOINT ["python3"]
5 changes: 0 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ SHELL = /bin/bash
SERVICE_NAME := service-stac

CURRENT_DIR := $(shell pwd)
VENV := $(CURRENT_DIR)/.venv

# Django specific
APP_SRC_DIR := app
Expand Down Expand Up @@ -104,8 +103,6 @@ $(SETTINGS_TIMESTAMP): $(TIMESTAMPS)
setup: $(SETTINGS_TIMESTAMP)
# Create virtual env with all packages for development
pipenv install --dev
# Patch multihash for md5 support
pipenv run pypatch apply ./multihash.patch multihash
# Create volume directories for postgres and minio
# Note that the '/service_stac_local' part is already the bucket name
mkdir -p .volumes/minio/service-stac-local
Expand All @@ -119,8 +116,6 @@ setup: $(SETTINGS_TIMESTAMP)
ci: $(SETTINGS_TIMESTAMP)
# Create virtual env with all packages for development using the Pipfile.lock
pipenv sync --dev
# Patch multihash for md5 support
pipenv run pypatch apply ./multihash.patch multihash


# call yapf to make sure your code is easier to read and respects some conventions.
Expand Down
15 changes: 8 additions & 7 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ verify_ssl = true

[dev-packages]
yapf = "~=0.30.0"
pylint = "~=2.5.3"
isort = "~=4.3.21"
pylint-django = "~=2.3.0"
django-extensions = "~=3.0.9"
pylint = "*"
pylint-django = "*"
django-extensions = "*"
django-debug-toolbar = ">=3.2.1"
pip = "*"
django-debug-toolbar = "*"
mock = "==4.0.2"
tblib = "*" # needed for traceback when running tests in parallel
mock = "~=4.0.2"
responses = "!=0.12.1" # skip version 0.12.1 which has a bug see https://github.com/getsentry/responses/issues/358
moto = {extras = [ "s3",], version = "*"}
requests-mock = "*"
Expand All @@ -25,8 +25,8 @@ logging-utilities = "~=1.2.0"
numpy = "~=1.19.2"
python-dotenv = "~=0.14.0"
djangorestframework = "~=3.12.1"
Django = "~=3.1"
PyYAML = "~=5.3.1"
Django = "~=3.1.8"
PyYAML = "~=5.4"
whitenoise = "~=5.2.0"
djangorestframework-gis = "~=0.16"
python-dateutil = "~=2.8.1"
Expand All @@ -38,6 +38,7 @@ requests = "~=2.25.0"
py-multihash = "~=2.0.1"
pypatch = "*"
django-prometheus = "*"
django-admin-autocomplete-filter = "~=0.6.1"

[requires]
python_version = "3.7"
956 changes: 408 additions & 548 deletions Pipfile.lock

Large diffs are not rendered by default.

31 changes: 23 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,30 @@

## Table of Content

- [Table of Content](#table-of-content)
- [Summary of the project](#summary-of-the-project)
- [Specs](spec/README.md)
- [Local development](#local-development)
- [Dependencies](#dependencies)
- [Python3.7](#python37)
- [pipenv](#pipenv)
- [Using Postgres on local host](#using-postgres-on-local-host)
- [Creating the local environment](#creating-the-local-environment)
- [Setting up the local database](#setting-up-the-local-database)
- [Using a local PostGres database instead of a container](#using-a-local-postgres-database-instead-of-a-container)
- [Starting dev server](#starting-dev-server)
- [Running tests](#running-tests)
- [Unit test logging](#unit-test-logging)
- [Using Django shell](#using-django-shell)
- [Migrate DB with Django](#migrate-db-with-django)
- [Linting and formatting your work](#linting-and-formatting-your-work)
- [Initial Setup up the RDS database and the user](#initial-setup-up-the-rds-database-and-the-user)
- [Deploying the project and continuous integration](#deploying-the-project-and-continuous-integration)
- [Docker](#docker)
- [Configuration](#configuration)
- [Configuration](#configuration)
- [**General settings**](#general-settings)
- [**Database settings**](#database-settings)
- [**Asset Storage settings (AWS S3)**](#asset-storage-settings-aws-s3)
- [**Development settings (only for local environment and DEV staging)**](#development-settings-only-for-local-environment-and-dev-staging)

## Summary of the project

Expand All @@ -33,17 +43,20 @@
Prerequisites on host for development and build:

- python version 3.7
- libgdal-dev
- [pipenv](https://pipenv-fork.readthedocs.io/en/latest/install.html)
- `docker` and `docker-compose`

#### Python3.7

If your Ubuntu distribution is missing Python 3.7, you may use the `deadsnakes` PPA and install it:

sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install python3.7

```bash
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install python3.7
```

#### pipenv

Generally, all modern distribution have already a [pipenv](https://pipenv-fork.readthedocs.io) package. If no, install from hand.
Expand All @@ -52,7 +65,7 @@ The other services that are used (Postgres with PostGIS extension for metadata a

Starting postgres and MinIO is done with a simple

```
```bash
docker-compose up
```

Expand Down Expand Up @@ -101,7 +114,7 @@ These steps will ensure you have everything needed to start working locally.
```

An alternative to ```pipenv install``` is to use the ```make setup``` command, which will install the environment,
apply a patch to the multihash package to support md5, create the volumes needed by the Postgres and MinIO containers
create the volumes needed by the Postgres and MinIO containers
and run those containers. ```Make setup``` assume a standard local installation with a dev environment.

### Setting up the local database
Expand Down Expand Up @@ -146,6 +159,7 @@ some default values to be able to start working with it. (From the root)
```

the ```pipenv shell``` command activate the virtual environment provided by pipenv.

### Using a local PostGres database instead of a container

To use a local postgres instance rather than a container, once you've ensured you've the needed dependencies, you should :
Expand Down Expand Up @@ -432,6 +446,7 @@ The service is configured by Environment Variable:
| AWS_S3_REGION_NAME | - | |
| AWS_S3_ENDPOINT_URL | `None` | |
| AWS_S3_CUSTOM_DOMAIN | `None` | |
| AWS_PRESIGNED_URL_EXPIRES | 3600 | AWS presigned url for asset upload expire time in seconds |
#### **Development settings (only for local environment and DEV staging)**
Expand Down
13 changes: 9 additions & 4 deletions app/config/settings_prod.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
'rest_framework.authtoken',
# Note: If you use TokenAuthentication in production you must ensure
# that your API is only available over https.
'admin_auto_filters',
'solo.apps.SoloAppConfig',
'storages',
'whitenoise.runserver_nostatic',
Expand Down Expand Up @@ -108,7 +109,7 @@
try:
CACHE_MIDDLEWARE_SECONDS = int(os.environ.get('HTTP_CACHE_SECONDS', '600'))
except ValueError as error:
raise ValueError('Invalid HTTP_CACHE_SECONDS environment value: must be an integer')
raise ValueError('Invalid HTTP_CACHE_SECONDS environment value: must be an integer') from error

ROOT_URLCONF = 'config.urls'
API_BASE = 'api'
Expand Down Expand Up @@ -194,7 +195,9 @@
try:
WHITENOISE_MAX_AGE = int(os.environ.get('HTTP_STATIC_CACHE_SECONDS', '3600'))
except ValueError as error:
raise ValueError('Invalid HTTP_STATIC_CACHE_SECONDS environment value: must be an integer')
raise ValueError(
'Invalid HTTP_STATIC_CACHE_SECONDS environment value: must be an integer'
) from error
WHITENOISE_MIMETYPES = {
# These sets the mime types for the api/stac/static/spec/v0.9/openapi.yaml static file
# otherwise a default application/octet-stream is used.
Expand Down Expand Up @@ -226,11 +229,13 @@
except KeyError as err:
raise KeyError(f'AWS configuration {err} missing') from err

AWS_PRESIGNED_URL_EXPIRES = int(os.environ.get('AWS_PRESIGNED_URL_EXPIRES', '3600'))

# Configure the admin upload caching
try:
STORAGE_ASSETS_CACHE_SECONDS = int(os.environ.get('HTTP_ASSETS_CACHE_SECONDS', '7200'))
except ValueError as err:
raise ValueError('Invalid HTTP_ASSETS_CACHE_SECONDS, must be an integer')
raise ValueError('Invalid HTTP_ASSETS_CACHE_SECONDS, must be an integer') from err

# Logging
# https://docs.djangoproject.com/en/3.1/topics/logging/
Expand Down Expand Up @@ -289,6 +294,6 @@ def get_logging_config():
# data.geo.admin.ch/collection/item/asset to check if asset exists.
EXTERNAL_SERVICE_TIMEOUT = 3

# By default django_promtheus tracks the number of migrations
# By default django_prometheus tracks the number of migrations
# This causes troubles in various places so we disable it
PROMETHEUS_EXPORT_MIGRATIONS = False
20 changes: 19 additions & 1 deletion app/config/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,27 @@ def checker(request):

if settings.DEBUG:
import debug_toolbar
from stac_api.views import TestHttp500
from stac_api.views_test import TestHttp500
from stac_api.views_test import TestCollectionUpsertHttp500
from stac_api.views_test import TestItemUpsertHttp500
from stac_api.views_test import TestAssetUpsertHttp500

urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
path('tests/test_http_500', TestHttp500.as_view()),
path(
'tests/test_collection_upsert_http_500/<collection_name>',
TestCollectionUpsertHttp500.as_view(),
name='test-collection-detail-http-500'
),
path(
'tests/test_item_upsert_http_500/<collection_name>/<item_name>',
TestItemUpsertHttp500.as_view(),
name='test-item-detail-http-500'
),
path(
'tests/test_asset_upsert_http_500/<collection_name>/<item_name>/<asset_name>',
TestAssetUpsertHttp500.as_view(),
name='test-asset-detail-http-500'
),
] + urlpatterns
15 changes: 7 additions & 8 deletions app/config/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,17 @@
# the tag is directly related to the commit or has an additional
# suffix 'v[0-9]+\.[0-9]+\.[0-9]+-beta.[0-9]-[0-9]+-gHASH' denoting
# the 'distance' to the latest tag
proc = subprocess.Popen(["git", "describe", "--tags"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
with subprocess.Popen(["git", "describe", "--tags"], stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
stdout, stderr = proc.communicate()
GIT_VERSION = stdout.decode('utf-8').strip()
if GIT_VERSION == '':
# If theres no git tag found in the history we simply use the short
# version of the latest git commit hash
proc = subprocess.Popen(["git", "rev-parse", "--short", "HEAD"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
with subprocess.Popen(["git", "rev-parse", "--short", "HEAD"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) as proc:
stdout, stderr = proc.communicate()
APP_VERSION = f"v_{stdout.decode('utf-8').strip()}"
else:
APP_VERSION = GIT_VERSION
Loading

0 comments on commit 7717d67

Please sign in to comment.