Skip to content
This repository has been archived by the owner on Mar 24, 2024. It is now read-only.

Commit

Permalink
Merge pull request #268 from quantmind/master
Browse files Browse the repository at this point in the history
2.3.1
  • Loading branch information
lsbardel authored Apr 25, 2021
2 parents 101d195 + 8fbc786 commit 319cb68
Show file tree
Hide file tree
Showing 13 changed files with 64 additions and 36 deletions.
2 changes: 1 addition & 1 deletion docs/env.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@

Several environment variables can be configured at application level

* **DATASTORE** Connection string for postgresql database
* **BAD_DATA_MESSAGE** (Invalid data format), message displayed when data is not in valid format (not JSON for example)
* **ERROR_500_MESSSAGE** (Internal Server Error), message displayed when things go wrong
* **DBPOOL_MIN_SIZE** (10), minimum number of connection in postgres connection pool
* **DBPOOL_MAX_SIZE** (10), maximum number of connections in postgres connection pool
* **DBECHO**, if set to `true` or `yes` it will use `echo=True` when setting up sqlalchemy engine
* **MICRO_SERVICE_PORT** (8080), default port when running the `serve` command
Expand Down
18 changes: 17 additions & 1 deletion docs/reference.rst
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,15 @@ Allow to add redoc_ redering to your api.
DB
==

This module provides integration with SqlAlchemy_ asynchronous engine for postgresql.
The connection string supported is of this type only::

postgresql+asyncpg://<db_user>:<db_password>@<db_host>:<db_port>/<db_name>


.. module:: openapi.db.container


Database
--------

Expand All @@ -172,6 +179,14 @@ Database container with CRUD operations. Used extensively by the :class:`.SqlApi
:members:


get_db
-------

.. module:: openapi.db

.. autofunction:: get_db


.. module:: openapi.testing

SingleConnDatabase
Expand Down Expand Up @@ -277,4 +292,5 @@ Publish
:member-order: bysource


.. _redoc: https://github.com/Redocly/redoc
.. _redoc: https://gith
.. _SqlAlchemy: https://www.sqlalchemy.org/
1 change: 1 addition & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[mypy]
# plugins = mypy.plugins.dataclasses
warn_return_any = False
warn_unused_configs = False

Expand Down
2 changes: 1 addition & 1 deletion openapi/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
"""Minimal OpenAPI asynchronous server application"""

__version__ = "2.3.0"
__version__ = "2.3.1"
2 changes: 1 addition & 1 deletion openapi/data/dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def dump(schema: Any, data: Any) -> Any:
:param data: data to dump, if dataclasses are part of the schema,
the `dump` metadata function will be used if available (see :func:`.data_field`)
"""
type_info = TypingInfo.get(schema)
type_info = cast(TypingInfo, TypingInfo.get(schema))
if type_info.container is list:
return dump_list(type_info.element, cast(List, data))
elif type_info.container is dict:
Expand Down
9 changes: 5 additions & 4 deletions openapi/data/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,11 +156,11 @@ def decimal_field(min_value=None, max_value=None, precision=None, **kw) -> Field
return data_field(**kw)


def email_field(min_length: int = 0, max_length: Optional[int] = None, **kw) -> Field:
def email_field(min_length: int = 0, max_length: int = 0, **kw) -> Field:
"""A specialized :func:`.data_field` for emails, validation via the
`email_validator` third party library
:param min_length: minim length of email
:param min_length: minimum length of email
:param max_length: maximum length of email
"""
kw.setdefault(
Expand Down Expand Up @@ -222,14 +222,15 @@ def field_ops(field: Field) -> Iterator[str]:


class Validator:
dump = None

def __call__(self, field: Field, value: Any) -> Any:
raise ValidationError(field.name, "invalid")

def openapi(self, prop: Dict) -> None:
pass

def dump(self, value: Any) -> Any:
return value


@dataclass
class StrValidator(Validator):
Expand Down
4 changes: 2 additions & 2 deletions openapi/data/validate.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from dataclasses import MISSING, Field, fields
from typing import Any, Dict, NamedTuple, Optional, Tuple, Union
from typing import Any, Dict, NamedTuple, Optional, Tuple, Union, cast

from multidict import MultiDict

Expand Down Expand Up @@ -79,7 +79,7 @@ def validate(
:param as_schema: return the schema object rather than simple data type
(dataclass rather than dict for example)
"""
type_info = TypingInfo.get(schema)
type_info = cast(TypingInfo, TypingInfo.get(schema))
try:
if type_info.container is list:
vdata = validate_list(
Expand Down
19 changes: 14 additions & 5 deletions openapi/data/view.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from dataclasses import dataclass
from typing import Any, Dict, Optional, cast
from typing import Any, Dict, NoReturn, Optional, cast

from aiohttp import web

Expand Down Expand Up @@ -51,7 +51,7 @@ def cleaned(
raise Error
elif schema == "path_schema":
raise web.HTTPNotFound
self.raiseValidationError(errors=validated.errors)
self.raise_validation_error(errors=validated.errors)

# Hacky hacky hack hack
# Later we'll want to implement proper multicolumn search and so
Expand Down Expand Up @@ -100,8 +100,17 @@ def get_special_params(self, params: StrDict) -> StrDict:
search_fields=params.pop("search_fields", []),
)

def raiseValidationError(self, message: str = "", errors: ErrorType = None) -> None:
raise ValidationErrors(self.as_errors(message, errors))
def validation_error(
self, message: str = "", errors: Optional[ErrorType] = None
) -> Exception:
"""Create the validation exception used by :meth:`.raise_validation_error`"""
return ValidationErrors(self.as_errors(message, errors))

def raise_validation_error(
self, message: str = "", errors: Optional[ErrorType] = None
) -> NoReturn:
"""Raise an :class:`aiohttp.web.HTTPUnprocessableEntity`"""
raise self.validation_error(message, errors)

def raise_bad_data(
self, exc: Optional[Exception] = None, message: str = ""
Expand All @@ -110,7 +119,7 @@ def raise_bad_data(
raise exc from exc
raise TypeError(message or BAD_DATA_MESSAGE)

def as_errors(self, message: str = "", errors: ErrorType = None) -> Dict:
def as_errors(self, message: str = "", errors: Optional[ErrorType] = None) -> Dict:
if isinstance(errors, str):
message = cast(str, message or errors)
errors = None
Expand Down
14 changes: 8 additions & 6 deletions openapi/db/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,16 @@


def get_db(app: Application, store_url: Optional[str] = None) -> Optional[CrudDB]:
"""Create an Open API db handler
"""Create an Open API db handler and set it for use in an aiohttp application
This function
* add the database to the aiohttp application
* add the db command to the command line client (if command is True)
* add the close handler on shutdown
:param app: aiohttp Application
:param store_url: datastore connection string, if not provided the env
variable `DATASTORE` is used instead. If the env variable is not available
either the method logs a warning and return `None`
It returns the database object
This function 1) adds the database to the aiohttp application at key "db",
2) add the db command to the command line client (if command is True)
and 3) add the close handler on application shutdown
"""
store_url = store_url or os.environ.get("DATASTORE")
if not store_url: # pragma: no cover
Expand Down
10 changes: 4 additions & 6 deletions openapi/spec/path.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,15 +83,13 @@ async def json_data(self) -> DataType:
except Exception:
self.raise_bad_data()

def raise_validation_error(
def validation_error(
self, message: str = "", errors: Optional[ErrorType] = None
) -> None:
) -> Exception:
"""Create an :class:`aiohttp.web.HTTPUnprocessableEntity`"""
raw = self.as_errors(message, errors)
data = self.dump(ValidationErrors, raw)
raise web.HTTPUnprocessableEntity(**self.api_response_data(data))

# backward compatibility
raiseValidationError = raise_validation_error
return web.HTTPUnprocessableEntity(**self.api_response_data(data))

def raise_bad_data(
self, exc: Optional[Exception] = None, message: str = ""
Expand Down
15 changes: 8 additions & 7 deletions openapi/spec/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from dataclasses import fields as get_fields
from dataclasses import is_dataclass
from enum import Enum
from typing import Any, Dict, Iterable, List, Optional, Set, cast
from typing import Any, Dict, Iterable, List, Optional, Set, Type, cast

from aiohttp import hdrs, web

Expand Down Expand Up @@ -131,13 +131,13 @@ def field2json(self, field: Field, validate: bool = True) -> Dict[str, str]:
validator.openapi(json_property)
return json_property

def dataclass2json(self, schema: Any) -> Dict[str, str]:
def dataclass2json(self, schema: Any) -> Dict[str, Any]:
"""Extract the object representation of a dataclass schema"""
type_info = cast(TypingInfo, TypingInfo.get(schema))
if not type_info or not type_info.is_dataclass:
raise InvalidSpecException(
"Schema must be a dataclass, got "
f"{type_info.typing if type_info else None}"
f"{type_info.element if type_info else None}"
)
properties = {}
required = []
Expand Down Expand Up @@ -165,8 +165,8 @@ def dataclass2json(self, schema: Any) -> Dict[str, str]:

def get_schema_info(
self, schema: Any, items: Optional[Field] = None
) -> Dict[str, str]:
type_info = TypingInfo.get(schema)
) -> Dict[str, Any]:
type_info = cast(TypingInfo, TypingInfo.get(schema))
if type_info.container is list:
return {
"type": "array",
Expand All @@ -193,11 +193,12 @@ def get_schema_info(
else:
return self.get_primitive_info(type_info.element)

def get_primitive_info(self, schema: type) -> Dict[str, str]:
def get_primitive_info(self, schema: Type) -> Dict[str, Any]:
mapping = fields.PRIMITIVE_TYPES.get(schema)
if not mapping:
if is_subclass(schema, Enum):
return {"type": "string", "enum": [e.name for e in schema]}
enum_type = cast(Type[Enum], schema)
return {"type": "string", "enum": [e.name for e in enum_type]}
else:
raise InvalidTypeException(f"Cannot understand {schema} while parsing")
return dict(mapping)
Expand Down
2 changes: 1 addition & 1 deletion readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ To run tests
pytest --cov
```

By default tests are run against a database with the following connection string `postgresql://postgres+asyncpg:postgres@localhost:5432/openapi`. To use a different DB, create a `.env` file with
By default tests are run against a database with the following connection string `postgresql+asyncpg://postgres:postgres@localhost:5432/openapi`. To use a different DB, create a `.env` file with
a different connection string, for example:

```
Expand Down
2 changes: 1 addition & 1 deletion tests/data/test_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ def test_error() -> None:
dv.raise_bad_data(exc=RuntimeError)

with pytest.raises(ValidationErrors):
dv.raiseValidationError()
dv.raise_validation_error()

0 comments on commit 319cb68

Please sign in to comment.