[+] use timezone for timestamp
This commit is contained in:
parent
83ac7c3a66
commit
3cde36d8a7
38
deps/test-task-2025-06-30-v1/python/online/fxreader/pr34/test_task_2025_06_30_v1/tickers/alembic/versions/729afc7194c9_add_timezone.py
vendored
Normal file
38
deps/test-task-2025-06-30-v1/python/online/fxreader/pr34/test_task_2025_06_30_v1/tickers/alembic/versions/729afc7194c9_add_timezone.py
vendored
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
"""add timezone
|
||||||
|
|
||||||
|
Revision ID: 729afc7194c9
|
||||||
|
Revises: eb63f793db3a
|
||||||
|
Create Date: 2025-07-11 11:30:06.246152
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import postgresql
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = '729afc7194c9'
|
||||||
|
down_revision: Union[str, Sequence[str], None] = 'eb63f793db3a'
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
"""Upgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('tickers_ticker', 'timestamp',
|
||||||
|
existing_type=postgresql.TIMESTAMP(),
|
||||||
|
type_=sa.DateTime(timezone=True),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
"""Downgrade schema."""
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.alter_column('tickers_ticker', 'timestamp',
|
||||||
|
existing_type=sa.DateTime(timezone=True),
|
||||||
|
type_=postgresql.TIMESTAMP(),
|
||||||
|
existing_nullable=False)
|
||||||
|
# ### end Alembic commands ###
|
5
deps/test-task-2025-06-30-v1/python/online/fxreader/pr34/test_task_2025_06_30_v1/tickers/models.py
vendored
5
deps/test-task-2025-06-30-v1/python/online/fxreader/pr34/test_task_2025_06_30_v1/tickers/models.py
vendored
@ -10,6 +10,7 @@ from sqlalchemy import (
|
|||||||
String,
|
String,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
Numeric,
|
Numeric,
|
||||||
|
DateTime,
|
||||||
UniqueConstraint,
|
UniqueConstraint,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,7 +35,9 @@ class Ticker(Base):
|
|||||||
'tickers_market.id',
|
'tickers_market.id',
|
||||||
ondelete='CASCADE',
|
ondelete='CASCADE',
|
||||||
))
|
))
|
||||||
timestamp: Mapped[datetime.datetime] = mapped_column()
|
timestamp: Mapped[datetime.datetime] = mapped_column(
|
||||||
|
DateTime(timezone=True,)
|
||||||
|
)
|
||||||
value: Mapped[decimal.Decimal] = mapped_column(Numeric(
|
value: Mapped[decimal.Decimal] = mapped_column(Numeric(
|
||||||
precision=32, scale=6,
|
precision=32, scale=6,
|
||||||
))
|
))
|
||||||
|
@ -3,6 +3,7 @@ import asyncio
|
|||||||
import decimal
|
import decimal
|
||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
# import datetime.timezone
|
||||||
import pydantic
|
import pydantic
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -96,7 +97,9 @@ class Emcont:
|
|||||||
store_timeout: float | int = 0.5,
|
store_timeout: float | int = 0.5,
|
||||||
request_period: float | int = 1,
|
request_period: float | int = 1,
|
||||||
) -> None:
|
) -> None:
|
||||||
last_retrieval = datetime.datetime.now()
|
last_retrieval = datetime.datetime.now(
|
||||||
|
tz=datetime.timezone.utc,
|
||||||
|
)
|
||||||
|
|
||||||
assert request_timeout >= 0
|
assert request_timeout >= 0
|
||||||
assert store_timeout >= 0
|
assert store_timeout >= 0
|
||||||
@ -138,7 +141,9 @@ class Emcont:
|
|||||||
nonlocal next_retrieval
|
nonlocal next_retrieval
|
||||||
|
|
||||||
return (
|
return (
|
||||||
next_retrieval - datetime.datetime.now()
|
next_retrieval - datetime.datetime.now(
|
||||||
|
tz=datetime.timezone.utc,
|
||||||
|
)
|
||||||
).total_seconds()
|
).total_seconds()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
Loading…
Reference in New Issue
Block a user