[+] add ruff

1. add ruff recipies to Makefile;
  2. reformat source code with ruff;
This commit is contained in:
Siarhei Siniak 2025-07-18 10:11:28 +03:00
parent 207a8737ba
commit f77399b1d2
10 changed files with 130 additions and 105 deletions

@ -37,6 +37,19 @@ pyright:
-p pyproject.toml \
--pythonpath $(PYTHON_PATH)
ruff_check:
$(ENV_PATH)/bin/python3 -m ruff \
check
ruff_format_check:
$(ENV_PATH)/bin/python3 -m ruff \
format --check
ruff_format:
$(ENV_PATH)/bin/python3 -m ruff \
format
ruff: ruff_format_check ruff_check
compose_env:
cat docker/postgresql/.env .env/postgresql.env > .env/postgresql.patched.env

@ -4,12 +4,9 @@ import sqlalchemy.ext.asyncio
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
def create_engine() -> 'async_sessionmaker[AsyncSession]':
engine = sqlalchemy.ext.asyncio.create_async_engine(
ModelsSettings.singleton().db_url
)
async_session = sqlalchemy.ext.asyncio.async_sessionmaker(
engine
)
engine = sqlalchemy.ext.asyncio.create_async_engine(ModelsSettings.singleton().db_url)
async_session = sqlalchemy.ext.asyncio.async_sessionmaker(engine)
return async_session

@ -11,7 +11,12 @@ from .settings import Settings as APISettings
from .db import create_engine
# from .websocket_api import WebsocketAPI
from typing import (Any, Optional, Literal, Annotated,)
from typing import (
Any,
Optional,
Literal,
Annotated,
)
logger = logging.getLogger(__name__)
@ -35,6 +40,7 @@ logger = logging.getLogger(__name__)
# finally:
# await websocket_api.disconnect(websocket)
def create_app() -> fastapi.FastAPI:
async_session = create_engine()
@ -55,6 +61,7 @@ def create_app() -> fastapi.FastAPI:
return app
def run(args: list[str]):
log_config = copy.deepcopy(uvicorn.config.LOGGING_CONFIG)
@ -67,5 +74,6 @@ def run(args: list[str]):
log_level=logging.INFO,
)
if __name__ == '__main__':
run(sys.argv[1:])

@ -1,7 +1,10 @@
import pydantic
import decimal
from typing import (Literal, Annotated,)
from typing import (
Literal,
Annotated,
)
# class SubscribeAction(pydantic.BaseModel):
# action: Literal['subscribe']

@ -1,14 +1,17 @@
import pydantic
import pydantic_settings
from typing import (ClassVar, Optional,)
from typing import (
ClassVar,
Optional,
)
class Settings(pydantic_settings.BaseSettings):
uvicorn_port : int = 80
uvicorn_host : str = '127.0.0.1'
uvicorn_port: int = 80
uvicorn_host: str = '127.0.0.1'
_singleton : ClassVar[Optional['Settings']] = None
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':

@ -14,18 +14,15 @@ from alembic import context
from online.fxreader.pr34.test_task_2025_07_17_v2.payloads.settings import Settings
from online.fxreader.pr34.test_task_2025_07_17_v2.payloads.models import (
Base,
# Market,
Base,
# Market,
)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option(
'sqlalchemy.url',
Settings.singleton().db_url
)
config.set_main_option('sqlalchemy.url', Settings.singleton().db_url)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
@ -33,7 +30,7 @@ config.set_main_option(
# fileConfig(config.config_file_name)
# else:
if True:
logging.basicConfig(level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
@ -51,67 +48,67 @@ target_metadata = Base.metadata
def do_run_migrations(
connection: Connection,
connection: Connection,
):
context.configure(connection=connection, target_metadata=target_metadata)
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations():
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
"""
logger.info(dict(msg='started'))
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
logger.info(dict(msg='started'))
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix='sqlalchemy.',
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
await connectable.dispose()
logger.info(dict(msg='done'))
logger.info(dict(msg='done'))
def run_migrations_offline():
"""Run migrations in 'offline' mode.
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
"""
url = config.get_main_option('sqlalchemy.url')
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={'paramstyle': 'named'},
)
with context.begin_transaction():
context.run_migrations()
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode."""
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
asyncio.run(run_async_migrations())
if context.is_offline_mode():
raise NotImplementedError
# run_migrations_offline()
raise NotImplementedError
# run_migrations_offline()
else:
run_migrations_online()
run_migrations_online()

@ -1,10 +1,11 @@
"""add payloads models
Revision ID: f7fa90d3339d
Revises:
Revises:
Create Date: 2025-07-18 09:58:54.099010
"""
from typing import Sequence, Union
from alembic import op
@ -19,22 +20,23 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('payloads_payload',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('output', sa.JSON(), nullable=False),
sa.Column('list_1', sa.JSON(), nullable=False),
sa.Column('list_2', sa.JSON(), nullable=False),
sa.Column('input_hash', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('input_hash')
)
# ### end Alembic commands ###
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'payloads_payload',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('output', sa.JSON(), nullable=False),
sa.Column('list_1', sa.JSON(), nullable=False),
sa.Column('list_2', sa.JSON(), nullable=False),
sa.Column('input_hash', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('input_hash'),
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('payloads_payload')
# ### end Alembic commands ###
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('payloads_payload')
# ### end Alembic commands ###

@ -17,11 +17,15 @@ from sqlalchemy import (
JSON,
)
from typing import (Optional,)
from typing import (
Optional,
)
class Base(DeclarativeBase):
pass
class Payload(Base):
__tablename__ = 'payloads_payload'
@ -31,16 +35,16 @@ class Payload(Base):
list_2: Mapped[list[str]] = mapped_column(JSON())
input_hash: Mapped[str] = mapped_column()
__table_args__ = (
UniqueConstraint('input_hash'),
)
__table_args__ = (UniqueConstraint('input_hash'),)
def __repr__(self) -> str:
return json.dumps(dict(
model=str(type(self)),
id=self.id,
output=self.output,
list_1=self.list_1,
list_2=self.list_2,
input_hash=self.input_hash,
))
return json.dumps(
dict(
model=str(type(self)),
id=self.id,
output=self.output,
list_1=self.list_1,
list_2=self.list_2,
input_hash=self.input_hash,
)
)

@ -1,13 +1,16 @@
import pydantic
import pydantic_settings
from typing import (ClassVar, Optional,)
from typing import (
ClassVar,
Optional,
)
class Settings(pydantic_settings.BaseSettings):
db_url : str
db_url: str
_singleton : ClassVar[Optional['Settings']] = None
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':

@ -1,4 +1,9 @@
from typing import (TypeVar, Optional, Any, cast,)
from typing import (
TypeVar,
Optional,
Any,
cast,
)
from sqlalchemy.ext.asyncio import AsyncSessionTransaction, AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm import DeclarativeBase
@ -6,22 +11,12 @@ from sqlalchemy.exc import NoResultFound, IntegrityError
M = TypeVar('M', bound='DeclarativeBase')
async def get_or_create(
session: AsyncSession,
model: type[M],
create_method: Optional[str] = None,
create_method_kwargs: Optional[dict[str, Any]] = None,
**kwargs: Any
session: AsyncSession, model: type[M], create_method: Optional[str] = None, create_method_kwargs: Optional[dict[str, Any]] = None, **kwargs: Any
) -> tuple[M, bool]:
async def select_row() -> M:
res = await session.execute(
select(model).where(
*[
getattr(model, k) == v
for k, v in kwargs.items()
]
)
)
res = await session.execute(select(model).where(*[getattr(model, k) == v for k, v in kwargs.items()]))
row = res.one()[0]