Skip to content

Commit

Permalink
JTools v3.18.0
Browse files Browse the repository at this point in the history
功能变动:

- 使用连接池提升数据库查询性能
- 优化贝市分析工具贝价趋势与挂单量趋势图表样式
- 贝市分析工具挂单价格分布改为堆叠柱形图
- 简化日期转换逻辑
- 修复捉虫计划鸣谢名单表 `user_slug` 字段的错误类型
- 修改 `utils.db` 模块命名

依赖变动:

- 升级构建镜像使用的 `uv` 版本
- 更新依赖库
  • Loading branch information
FHU-yezi committed Nov 11, 2024
2 parents 4741262 + 7e68ed1 commit 086b903
Show file tree
Hide file tree
Showing 23 changed files with 532 additions and 528 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.backend
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ WORKDIR /app

COPY backend/pyproject.toml backend/uv.lock .

RUN --mount=from=ghcr.io/astral-sh/uv:0.4.0,source=/uv,target=/bin/uv \
RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \
uv sync --frozen --no-dev --no-cache

COPY backend .
Expand Down
8 changes: 2 additions & 6 deletions backend/api/v1/articles.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from litestar.params import Parameter
from litestar.status_codes import HTTP_400_BAD_REQUEST
from msgspec import Struct, field
from sshared.time import to_datetime
from sshared.word_split import WordSplitter
from sspeedup.api.code import Code
from sspeedup.api.litestar import (
Expand Down Expand Up @@ -193,12 +194,7 @@ async def get_LP_recommend_check_handler( # noqa: N802
article_title=article_title,
can_recommend_now=can_recommend_now,
FP_reward=article_fp_reward,
# TODO
next_can_recommend_date=datetime(
year=article_next_can_recommend_date.year,
month=article_next_can_recommend_date.month,
day=article_next_can_recommend_date.day,
)
next_can_recommend_date=to_datetime(article_next_can_recommend_date)
if article_next_can_recommend_date
else None,
)
Expand Down
15 changes: 4 additions & 11 deletions backend/api/v1/jpep/ftn_macket.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,20 @@
from asyncio import gather
from datetime import datetime, timedelta
from datetime import datetime
from typing import Annotated, Literal, Optional

from jkit.jpep.platform_settings import PlatformSettings
from litestar import Response, Router, get
from litestar.params import Parameter
from msgspec import Struct, field
from sshared.time import get_datetime_before_now, parse_td_str
from sspeedup.api.litestar import (
RESPONSE_STRUCT_CONFIG,
generate_response_spec,
success,
)
from sspeedup.time_helper import get_start_time

from models.jpep.ftn_macket_record import FTNMacketRecord

RANGE_TO_TIMEDELTA: dict[str, timedelta] = {
"24h": timedelta(hours=24),
"7d": timedelta(days=7),
"15d": timedelta(days=15),
"30d": timedelta(days=30),
}

RESOLUTION_MAPPING: dict[str, Literal["max", "hour", "day"]] = {
"5m": "max", # TODO
"1h": "hour",
Expand Down Expand Up @@ -135,7 +128,7 @@ async def get_price_history_handler(
) -> Response:
history = await FTNMacketRecord.get_price_history(
type=type_.upper(), # type: ignore
start_time=get_start_time(RANGE_TO_TIMEDELTA[range]),
start_time=get_datetime_before_now(parse_td_str(range)),
resolution=RESOLUTION_MAPPING[resolution],
)

Expand Down Expand Up @@ -168,7 +161,7 @@ async def get_amount_history_handler(
) -> Response:
history = await FTNMacketRecord.get_amount_history(
type=type_.upper(), # type: ignore
start_time=get_start_time(RANGE_TO_TIMEDELTA[range]),
start_time=get_datetime_before_now(parse_td_str(range)),
resolution=RESOLUTION_MAPPING[resolution],
)

Expand Down
5 changes: 5 additions & 0 deletions backend/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,18 @@
from models.tech_stack import TechStack
from models.tool import Tool
from utils.config import CONFIG
from utils.db import jianshu_pool, jpep_pool, jtools_pool
from utils.log import logger

logging.getLogger("httpx").setLevel(logging.CRITICAL)
logging.getLogger("httpcore").setLevel(logging.CRITICAL)


async def init_db() -> None:
await jianshu_pool.prepare()
await jpep_pool.prepare()
await jtools_pool.prepare()

await DebugProjectRecord.init()
await LotteryWinRecord.init()
await TechStack.init()
Expand Down
64 changes: 32 additions & 32 deletions backend/models/debug_project_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from sshared.postgres import Table
from sshared.strict_struct import NonEmptyStr, PositiveInt

from utils.postgres import get_jtools_conn
from utils.db import jtools_pool


class DebugProjectRecord(Table, frozen=True):
Expand All @@ -19,38 +19,38 @@ class DebugProjectRecord(Table, frozen=True):

@classmethod
async def _create_table(cls) -> None:
conn = await get_jtools_conn()
await conn.execute(
"""
CREATE TABLE IF NOT EXISTS debug_project_records (
id SMALLSERIAL CONSTRAINT pk_debug_project_records_id PRIMARY KEY,
date DATE NOT NULL,
type TEXT NOT NULL,
module TEXT NOT NULL,
description TEXT NOT NULL,
user_name TEXT NOT NULL,
user_slug CHAR(12) NOT NULL,
reward SMALLINT NOT NULL
);
"""
)
async with jtools_pool.get_conn() as conn:
await conn.execute(
"""
CREATE TABLE IF NOT EXISTS debug_project_records (
id SMALLSERIAL CONSTRAINT pk_debug_project_records_id PRIMARY KEY,
date DATE NOT NULL,
type TEXT NOT NULL,
module TEXT NOT NULL,
description TEXT NOT NULL,
user_name TEXT NOT NULL,
user_slug VARCHAR(12) NOT NULL,
reward SMALLINT NOT NULL
);
"""
)

@classmethod
async def iter(cls) -> AsyncGenerator["DebugProjectRecord", None]:
conn = await get_jtools_conn()
cursor = await conn.execute(
"SELECT id, date, type, module, description, user_name, "
"user_slug, reward FROM debug_project_records ORDER BY date DESC;"
)

async for item in cursor:
yield cls(
id=item[0],
date=item[1],
type=item[2],
module=item[3],
description=item[4],
user_name=item[5],
user_slug=item[6],
reward=item[7],
async with jtools_pool.get_conn() as conn:
cursor = await conn.execute(
"SELECT id, date, type, module, description, user_name, "
"user_slug, reward FROM debug_project_records ORDER BY date DESC;"
)

async for item in cursor:
yield cls(
id=item[0],
date=item[1],
type=item[2],
module=item[3],
description=item[4],
user_name=item[5],
user_slug=item[6],
reward=item[7],
)
106 changes: 53 additions & 53 deletions backend/models/jianshu/article_earning_ranking_record.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from sshared.postgres import Table
from sshared.strict_struct import NonEmptyStr, PositiveFloat, PositiveInt

from utils.postgres import get_jianshu_conn
from utils.db import jianshu_pool


class ArticleEarningRankingRecord(Table, frozen=True):
Expand All @@ -28,50 +28,50 @@ async def iter_by_author_slug(
offset: int,
limit: int,
) -> AsyncGenerator["ArticleEarningRankingRecord"]:
conn = await get_jianshu_conn()
if order_direction == "ASC":
cursor = await conn.execute(
sql.SQL(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE author_slug = %s "
"ORDER BY {} OFFSET %s LIMIT %s;"
).format(sql.Identifier(order_by)),
(author_slug, offset, limit),
)
else:
cursor = await conn.execute(
sql.SQL(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE author_slug = %s "
"ORDER BY {} DESC OFFSET %s LIMIT %s;"
).format(sql.Identifier(order_by)),
(author_slug, offset, limit),
)
async with jianshu_pool.get_conn() as conn:
if order_direction == "ASC":
cursor = await conn.execute(
sql.SQL(
"SELECT date, ranking, slug, title, author_earning, "
"voter_earning FROM article_earning_ranking_records "
"WHERE author_slug = %s ORDER BY {} OFFSET %s LIMIT %s;"
).format(sql.Identifier(order_by)),
(author_slug, offset, limit),
)
else:
cursor = await conn.execute(
sql.SQL(
"SELECT date, ranking, slug, title, author_earning, "
"voter_earning FROM article_earning_ranking_records "
"WHERE author_slug = %s ORDER BY {} DESC OFFSET %s LIMIT %s;"
).format(sql.Identifier(order_by)),
(author_slug, offset, limit),
)

async for item in cursor:
yield cls(
date=item[0],
ranking=item[1],
slug=item[2],
title=item[3],
author_slug=author_slug,
author_earning=item[4],
voter_earning=item[5],
)
async for item in cursor:
yield cls(
date=item[0],
ranking=item[1],
slug=item[2],
title=item[3],
author_slug=author_slug,
author_earning=item[4],
voter_earning=item[5],
)

@classmethod
async def get_latest_record(
cls, author_slug: str, minimum_ranking: Optional[int] = None
) -> Optional["ArticleEarningRankingRecord"]:
conn = await get_jianshu_conn()
cursor = await conn.execute(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE author_slug = %s AND "
"ranking <= %s ORDER BY date DESC, ranking DESC LIMIT 1;",
(author_slug, minimum_ranking if minimum_ranking else 100),
)
async with jianshu_pool.get_conn() as conn:
cursor = await conn.execute(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE author_slug = %s AND "
"ranking <= %s ORDER BY date DESC, ranking DESC LIMIT 1;",
(author_slug, minimum_ranking if minimum_ranking else 100),
)

data = await cursor.fetchone()
data = await cursor.fetchone()
if not data:
return None

Expand All @@ -91,22 +91,22 @@ async def get_pervious_record(
base_record: "ArticleEarningRankingRecord",
minimum_ranking: Optional[int] = None,
) -> Optional["ArticleEarningRankingRecord"]:
conn = await get_jianshu_conn()
cursor = await conn.execute(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE ( date < %s OR "
"( date = %s AND ranking < %s ) ) AND author_slug = %s AND ranking <= %s "
"ORDER BY date DESC, ranking DESC LIMIT 1;",
(
base_record.date,
base_record.date,
base_record.ranking,
base_record.author_slug,
minimum_ranking if minimum_ranking else 100,
),
)
async with jianshu_pool.get_conn() as conn:
cursor = await conn.execute(
"SELECT date, ranking, slug, title, author_earning, voter_earning "
"FROM article_earning_ranking_records WHERE ( date < %s OR "
"( date = %s AND ranking < %s ) ) AND author_slug = %s "
"AND ranking <= %s ORDER BY date DESC, ranking DESC LIMIT 1;",
(
base_record.date,
base_record.date,
base_record.ranking,
base_record.author_slug,
minimum_ranking if minimum_ranking else 100,
),
)

data = await cursor.fetchone()
data = await cursor.fetchone()
if not data:
return None

Expand Down
Loading

0 comments on commit 086b903

Please sign in to comment.