From a4b5fe970bfb2a33658f3b28e82eeb04eed9ac08 Mon Sep 17 00:00:00 2001 From: Atticuszz <1831768457@qq.com> Date: Sat, 9 Nov 2024 15:36:21 +0800 Subject: [PATCH] back_up: pre-start --- README.md | 130 ++++--------------------- backend/alembic.ini | 115 ++++++++++++++++++++++ backend/app/alembic/README | 1 + backend/app/alembic/env.py | 87 +++++++++++++++++ backend/app/alembic/script.py.mako | 26 +++++ backend/app/core/auth.py | 4 +- backend/app/core/db.py | 13 ++- backend/app/core/events.py | 19 ---- backend/app/crud/base.py | 3 +- backend/app/models/__init__.py | 5 +- backend/app/tests/conftest.py | 2 +- backend/app/tests/pre_start/test_db.py | 1 - backend/app/utils/pre_start.py | 24 +++++ backend/scripts/pre-start.sh | 13 +++ 14 files changed, 300 insertions(+), 143 deletions(-) create mode 100644 backend/alembic.ini create mode 100644 backend/app/alembic/README create mode 100644 backend/app/alembic/env.py create mode 100644 backend/app/alembic/script.py.mako delete mode 100644 backend/app/core/events.py create mode 100644 backend/app/utils/pre_start.py create mode 100755 backend/scripts/pre-start.sh diff --git a/README.md b/README.md index aaf418a..de95cc8 100644 --- a/README.md +++ b/README.md @@ -32,51 +32,21 @@ ___ ___ -### FastAPI&supabase - -1. works of authorization all handled by supabase-py and fastapi **dependency** without any extra code -2. supabase-py crud integration with **pydantic** model validation - -### Pytest - -1. pytest integration with **pytest-cov** -2. pytest **fixtures** for fastapi client and supabase client -3. pytest **fixtures** for access_token and refresh_token -4. test for **CRUD** operations -5. test for **api** operations - -### CI/CD - -1. **codecov** for coverage report -2. **poetry** for dependency management and pytest integration -3. **pre-commit** for code quality -4. **latest_changes.yml** for auto update README.md -5. **Semantic Release** for auto release and changelog -6. **docker** for deployment - ## How to use it -___ -![](docs/assets/usage.gif) -1. create your github repo and config it - 1. allow ci to access your repo - ![img.png](docs/assets/img.png) - 2. config ci_tokens - 1. `CODECOV_TOKEN` for codecov in `.github/workflows/ci.yml` ,`semantic-release` is optional for auto release - 2. `ATTICUS_PAT`should replace with your GitHub token for latest_changes.yml in `.github/workflows/latest_changes.yml` - 3. `DOCKER_USERNAME` and `DOCKER_PASSWORD` for docker-image.yml in `.github/workflows/docker-image.yml` - 4. replace `tags: atticuszhou/supafast:latest` with your docker repo in `.github/workflows/docker-image.yml` - 3. config fastapi setting in `your_project\src\app\core\config.py` - 4. config `pyproject.toml` with your project name and description,etc -2. cd your repo and install dependencies with [uv](https://github.com/astral-sh/uv), which is an extremely fast Python package and project manager, written in Rust. +### install python dependencies + +cd your repo and install dependencies with [uv](https://github.com/astral-sh/uv), which is an extremely fast Python package and project manager, written in Rust. ```shell uv sync ``` -3. [start your supabase locally](https://supabase.com/docs/guides/local-development/cli/getting-started?queryGroups=platform&platform=linux&queryGroups=access-method&access-method=postgres) +### install supabase + +1. [start your supabase locally](https://supabase.com/docs/guides/local-development/cli/getting-started?queryGroups=platform&platform=linux&queryGroups=access-method&access-method=postgres) ```bash # brew in linux https://brew.sh/ @@ -85,34 +55,17 @@ supabase init supabase start ``` -4. set your supabase env +2. set your supabase env ```shell export SUPABASE_URL=your_supabase_url export SUPABASE_KEY=your_supabase_key -export SUPERUSER_EMAIL=your_superuser_email -export SUPERUSER_PASSWORD=your_superuser_password ``` -5. config fastapi settings - -```python -# src/app/core/config.py -class Settings(BaseSettings): - API_V1_STR: str = "/api/v1" - SUPABASE_URL: str = Field(default_factory=lambda: os.getenv("SUPABASE_URL")) - SUPABASE_KEY: str = Field(default_factory=lambda: os.getenv("SUPABASE_KEY")) - SUPERUSER_EMAIL: str = Field(default_factory=lambda: os.getenv("SUPERUSER_EMAIL")) - SUPERUSER_PASSWORD: str = Field(default=lambda: os.getenv("SUPERUSER_PASSWORD")) - # SERVER_NAME: str - SERVER_HOST: AnyHttpUrl = "https://localhost" - SERVER_PORT: int = 8000 - BACKEND_CORS_ORIGINS: list[AnyHttpUrl] = [] - PROJECT_NAME: str = "fastapi supabase template" - Config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) -``` -6. run server +### run + +1. run server ```shell uv run uvicorn app.main:app --reload @@ -123,63 +76,18 @@ uv run uvicorn app.main:app --reload ___ - [x] FastAPI backend - - [x] **standard** structure + - [ ] Layered Architecture for **FastAPI** project + - [ ] sqlmodel ORM supabase + - [x] test_pre_start.py + - [ ] pre_start.py + - [ ] crud test + - [ ] api test - ```text - ── src - │ └── app - │ ├── api - │ │ ├── api_v1 - │ │ │ ├── endpoints - │ │ │ │ ├── __init__.py - │ │ │ │ └── items.py - │ │ │ ├── __init__.py - │ │ │ └── api.py - │ │ ├── __init__.py - │ │ └── deps.py - │ ├── core - │ │ ├── __init__.py - │ │ ├── config.py - │ │ └── events.py - │ ├── crud - │ │ ├── __init__.py - │ │ ├── base.py - │ │ └── crud_item.py - │ ├── schemas - │ │ ├── __init__.py - │ │ ├── auth.py - │ │ ├── base.py - │ │ ├── item.py - │ │ └── msg.py - │ ├── services - │ │ └── __init__.py - │ ├── utils - │ │ └── __init__.py - │ ├── __init__.py - │ └── main.py - ... - ``` - - - [x] **auto-auth** by fastapi dependency with supabase-auth - - [x] **CRUD** operations pytest - - [x] **api** requests pytest - [ ] Supabase integration - - [x] crud supabase-postgresql + - [ ] crud supabase-postgresql - [ ] websocket with supabase-realtime - [ ] curd supabase-storage - [ ] supafunc integration -- [x] deployment - - [x] Full **Docker** integration (Docker based). -- [ ] clone - - [ ] cookiecutter - -## Release Notes 🥸 - -___ - -### Latest Changes - -## License - -This project is licensed under the terms of the MIT license. +- [ ] deployment + - [ ] Full **Docker** integration (Docker based). diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..66d43a6 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,115 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +# Use forward slashes (/) also on windows to provide an os agnostic path +script_location = app/alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +# version_path_separator = newline +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARNING +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/app/alembic/README b/backend/app/alembic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/backend/app/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/backend/app/alembic/env.py b/backend/app/alembic/env.py new file mode 100644 index 0000000..d4f82e3 --- /dev/null +++ b/backend/app/alembic/env.py @@ -0,0 +1,87 @@ +from logging.config import fileConfig + +from alembic import context +from app.core.config import settings +from app.models import * # noqa: F403 +from sqlalchemy import engine_from_config, pool +from sqlmodel import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support + + +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def get_url()-> str: + return str(settings.SQLALCHEMY_DATABASE_URI) + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = get_url() + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + compare_type=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + configuration = config.get_section(config.config_ini_section) + if configuration is None: + raise FileNotFoundError("alembic config is None!") + configuration["sqlalchemy.url"] = get_url() + connectable = engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure( + connection=connection, target_metadata=target_metadata,compare_type=True,dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() + diff --git a/backend/app/alembic/script.py.mako b/backend/app/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/backend/app/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/app/core/auth.py b/backend/app/core/auth.py index ab1d440..b35004d 100644 --- a/backend/app/core/auth.py +++ b/backend/app/core/auth.py @@ -3,11 +3,11 @@ from fastapi import Depends, HTTPException from fastapi.security import OAuth2PasswordBearer +from supabase._async.client import AsyncClient, create_client +from supabase.lib.client_options import ClientOptions from app.core.config import settings from app.schemas.auth import UserIn -from supabase._async.client import AsyncClient, create_client -from supabase.lib.client_options import ClientOptions async def get_super_client() -> AsyncClient: diff --git a/backend/app/core/db.py b/backend/app/core/db.py index 99b49f2..2b7e781 100644 --- a/backend/app/core/db.py +++ b/backend/app/core/db.py @@ -1,8 +1,7 @@ -from collections.abc import Generator -from typing import AsyncGenerator +from collections.abc import AsyncGenerator from sqlalchemy.ext.asyncio import create_async_engine -from sqlmodel import Session, select +from sqlmodel import select from sqlmodel.ext.asyncio.session import AsyncSession from app.core.auth import get_super_client @@ -21,7 +20,7 @@ async def get_db() -> AsyncGenerator[AsyncSession, None]: yield session -async def init_db(session: Session) -> None: +async def init_db(session: AsyncSession) -> None: # Tables should be created with Alembic migrations # But if you don't want to use migrations, create # the tables un-commenting the next lines @@ -29,10 +28,10 @@ async def init_db(session: Session) -> None: # # This works because the models are already imported and registered from app.models # SQLModel.metadata.create_all(engine) - user = session.exec( + result = await session.exec( select(User).where(User.email == settings.FIRST_SUPERUSER) - ).first() - + ) + user = result.first() if not user: super_client = await get_super_client() response = await super_client.auth.sign_up( diff --git a/backend/app/core/events.py b/backend/app/core/events.py deleted file mode 100644 index 4f2c1d6..0000000 --- a/backend/app/core/events.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -life span events -""" - -import logging -from collections.abc import AsyncGenerator -from contextlib import asynccontextmanager - -from fastapi import FastAPI - - -@asynccontextmanager -async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: # noqa ARG001 - """life span events""" - try: - logging.info("lifespan start") - yield - finally: - logging.info("lifespan shutdown") diff --git a/backend/app/crud/base.py b/backend/app/crud/base.py index 8670ed7..dd4f9ea 100644 --- a/backend/app/crud/base.py +++ b/backend/app/crud/base.py @@ -1,8 +1,9 @@ from typing import Generic, TypeVar +from supabase._async.client import AsyncClient + from app.schemas.auth import UserIn from app.schemas.base import CreateBase, ResponseBase, UpdateBase -from supabase._async.client import AsyncClient ModelType = TypeVar("ModelType", bound=ResponseBase) CreateSchemaType = TypeVar("CreateSchemaType", bound=CreateBase) diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py index 10a1377..66e0f1e 100644 --- a/backend/app/models/__init__.py +++ b/backend/app/models/__init__.py @@ -1,9 +1,12 @@ +from sqlmodel import SQLModel + from .item import Item from .user import User -from sqlmodel import SQLModel + __all__ = ["User", "Item", "Message"] + # Generic message class Message(SQLModel): message: str diff --git a/backend/app/tests/conftest.py b/backend/app/tests/conftest.py index ab9e7c9..4642643 100644 --- a/backend/app/tests/conftest.py +++ b/backend/app/tests/conftest.py @@ -8,10 +8,10 @@ from faker import Faker from fastapi.testclient import TestClient from sqlmodel import Session +from supabase._async.client import create_client from app.main import app from app.schemas import Token -from supabase._async.client import create_client LOG_FILE = Path(__file__).parent / "scripts.log" diff --git a/backend/app/tests/pre_start/test_db.py b/backend/app/tests/pre_start/test_db.py index b1be969..18aa6ac 100644 --- a/backend/app/tests/pre_start/test_db.py +++ b/backend/app/tests/pre_start/test_db.py @@ -2,7 +2,6 @@ import pytest from sqlmodel import Session - from src.app.api.deps import engine from src.app.core.db import init_db diff --git a/backend/app/utils/pre_start.py b/backend/app/utils/pre_start.py new file mode 100644 index 0000000..80ba3db --- /dev/null +++ b/backend/app/utils/pre_start.py @@ -0,0 +1,24 @@ +import asyncio +import logging + +from sqlmodel.ext.asyncio.session import AsyncSession + +from app.core.db import engine, init_db + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +async def init() -> None: + async with AsyncSession(engine) as session: + await init_db(session) + + +def main() -> None: + logger.info("Creating initial data") + asyncio.run(init()) + logger.info("Initial data created") + + +if __name__ == "__main__": + main() diff --git a/backend/scripts/pre-start.sh b/backend/scripts/pre-start.sh new file mode 100755 index 0000000..f92788c --- /dev/null +++ b/backend/scripts/pre-start.sh @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +set -e +set -x + +# Let the DB start +python -m app.utils.pre_start + +# # Run migrations +# alembic upgrade head + +# # Create initial data in DB +# python app/initial_data.py