Compare commits

..

56 Commits

Author SHA1 Message Date
b0dd2eb5cf [+] release .whl 2025-07-16 12:22:32 +03:00
f7abca1e1b [+] implment assets_index 2025-07-16 12:21:18 +03:00
e73f57670a [+] fix time serialization 2025-07-16 12:14:51 +03:00
f4831d5759 [+] improve asset_history 2025-07-16 11:04:17 +03:00
17bfb08e43 [+] fix typing 2025-07-16 10:52:37 +03:00
dda9c841fd [+] improve ticker orm logic 2025-07-15 14:05:45 +03:00
181a9a5ce9 [+] partially add assets history endpoint 2025-07-15 13:10:04 +03:00
7d6ce1eaee [+] improve websocket api 2025-07-14 11:41:02 +03:00
612d807bc4 [+] add websocket clients manager 2025-07-14 11:17:39 +03:00
25d5b34add [+] add log_level 2025-07-14 11:07:22 +03:00
0151e61cd6 [+] add websockets dependency 2025-07-14 11:05:26 +03:00
687dc4bb9b [+] fix typing 2025-07-14 11:03:10 +03:00
814fefd18b [+] add api settings 2025-07-14 10:59:19 +03:00
afdc7c17b6 [+] partially add fastapi 2025-07-14 10:53:57 +03:00
3cde36d8a7 [+] use timezone for timestamp 2025-07-11 11:34:11 +03:00
83ac7c3a66 [+] update env for emcont_worker 2025-07-11 11:21:14 +03:00
ad7bff67c4 [+] update docker for services 2025-07-11 11:17:48 +03:00
83a09207d6 [+] deploy .whl 2025-07-11 11:04:38 +03:00
7103f3a089 [+] fix periodic retrieval 2025-07-11 10:55:36 +03:00
10c012aba2 [+] improve periodic processing 2025-07-10 11:53:08 +03:00
52df4b54d5 [+] improve timeouts handling 2025-07-10 11:40:53 +03:00
92a9f36acd [+] fix not frozen Market rows 2025-07-10 11:27:58 +03:00
070a63222c [+] improve tickers storage in DB 2025-07-10 11:18:15 +03:00
027475e4b3 [+] remove alembic.ini 2025-07-09 12:08:08 +03:00
13e2bff324 [+] add get_or_create for sqlalchemy 2025-07-09 11:51:20 +03:00
731b9d384a [+] partially add storing of tickers 2025-07-09 11:12:56 +03:00
c8370f96ff [+] reuse settings from tickers 2025-07-09 10:55:10 +03:00
9aec75cdd7 [+] add basic entry point 2025-07-08 13:17:13 +03:00
38c0b9ba87 [+] add Emcont.worker 2025-07-08 13:08:37 +03:00
ac23cc9397 [+] improve requirements 2025-07-08 10:46:16 +03:00
acd34f2ca5 [+] improve emcont wrapper 2025-07-08 10:44:47 +03:00
eb32f27bad [+] partially add emcont
1. update dependencies;
  2. partially add tickers retrieval and parsing;
2025-07-07 10:45:03 +03:00
60ef0e386d [+] add Ticker table 2025-07-07 10:33:44 +03:00
3f1e8c57ac [+] update alembic
1. fix async connection for alembic;
  2. generate Market table migration;
  3. migrate database;
2025-07-04 11:33:33 +03:00
18449382e1 [+] update alembic 2025-07-04 10:56:27 +03:00
c42e39a8d5 [+] update alembic 2025-07-04 10:49:50 +03:00
f8eb591b05 [+] improve alembic 2025-07-04 10:41:27 +03:00
d9f5c20557 [+] add models partially 2025-07-03 11:29:50 +03:00
4440e084b9 [+] improve typing 2025-07-03 11:22:33 +03:00
2dec1e33c2 [+] improve alembic 2025-07-03 11:18:35 +03:00
eb2d630dd0 [+] add alembic 2025-07-03 11:06:50 +03:00
e4a02726e7 [+] improve deployment 2025-07-02 12:43:40 +03:00
a0a2248306 [+] improve deployment 2025-07-02 12:39:40 +03:00
fadfd8711c [+] improve compose 2025-07-02 12:36:55 +03:00
d01386a4dc [+] improve compose 2025-07-02 12:29:45 +03:00
c82107bad1 [+] add postgresql 2025-07-02 12:21:55 +03:00
976576f8c6 [+] add redis 2025-07-01 11:08:51 +03:00
c2fecdd87c [+] improve compose 2025-07-01 11:05:50 +03:00
a0f1654cf5 [+] improve project layout 2025-07-01 11:03:55 +03:00
7fb9aae90e [+] update requirements 2025-07-01 10:49:40 +03:00
1e141ce6fa [+] add partial documentation 2025-06-30 18:43:09 +03:00
4da19bb053 [+] improve pyright 2025-06-30 18:40:03 +03:00
e7bc75f0d8 [+] add basic python module layout 2025-06-30 18:38:14 +03:00
d03154314c [+] update dependencies 2025-06-30 18:35:18 +03:00
1a668159c7 [+] add basic commands 2025-06-30 18:31:22 +03:00
a33711cc49 [+] add tmuxp 2025-06-30 18:21:22 +03:00
180 changed files with 4692 additions and 6920 deletions

@ -1,4 +1,3 @@
* *
.* .*
!d1/blank-app-nginx.conf !d1/blank-app-nginx.conf
!docker/checks

@ -1,5 +0,0 @@
[**/*.py]
vim_modeline = set noet ts=2 sts=2 sw=2 ai ci
[**/meson.build]
vim_modeline = set noet ts=2 sts=2 sw=2 ai ci

@ -1,3 +0,0 @@
NGINX_EXPORTER_PORTS=127.0.0.1:9113
CHECKS_PORTS=127.0.0.1:9097
SUBNET=172.31.0

3
.gitattributes vendored

@ -1,5 +1,2 @@
releases/tar/** filter=lfs diff=lfs merge=lfs -text releases/tar/** filter=lfs diff=lfs merge=lfs -text
releases/whl/** filter=lfs diff=lfs merge=lfs -text releases/whl/** filter=lfs diff=lfs merge=lfs -text
python/deps/whl/** filter=lfs diff=lfs merge=lfs -text
docker/*/deps/whl/** filter=lfs diff=lfs merge=lfs -text
**/*.gpg filter=lfs diff=lfs merge=lfs -text

7
.gitignore vendored

@ -12,13 +12,6 @@ d2/book1/books
*.tar.gz *.tar.gz
.vscode/* .vscode/*
!.vscode/launch.json !.vscode/launch.json
!python/deps/whl/**/*.whl
python/build python/build
.*.kate-swp .*.kate-swp
!releases/whl/*.whl !releases/whl/*.whl
.env
!docker/*/.env
.envs
!docker/*/deps/whl/**
!dotfiles/.vim

@ -51,15 +51,6 @@ python_put_dist:
done done
ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands
python_put_pr34:
$(INSTALL_ROOT)/env3/bin/python3 -m uv pip install $(UV_ARGS) \
-f releases/whl \
-U \
online.fxreader.pr34
ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands
ln -sf $(INSTALL_ROOT)/env3/bin/oom_firefox $(INSTALL_ROOT)/oom_firefox
PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34 PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34
python_whl: python_whl:
for f in $(PYTHON_PROJECTS_NAMES); do \ for f in $(PYTHON_PROJECTS_NAMES); do \
@ -78,6 +69,7 @@ dotfiles_put:
cp dotfiles/.vimrc ~/.vimrc cp dotfiles/.vimrc ~/.vimrc
cp dotfiles/.tmux.conf ~/.tmux.conf cp dotfiles/.tmux.conf ~/.tmux.conf
cp dotfiles/.py3.vimrc ~/.py3.vimrc cp dotfiles/.py3.vimrc ~/.py3.vimrc
cp dotfiles/.py3.vimrc ~/.py3.vimrc
cp dotfiles/.gitconfig ~/.gitconfig cp dotfiles/.gitconfig ~/.gitconfig
cp -rp \ cp -rp \
dotfiles/.ipython/profile_default/ipython_config.py \ dotfiles/.ipython/profile_default/ipython_config.py \
@ -90,20 +82,6 @@ dotfiles_put:
done done
#commands install -f -p dotfiles -s dotfiles/ -t ~/.config/ #commands install -f -p dotfiles -s dotfiles/ -t ~/.config/
dotfiles_vim_put:
@echo INSTALL_ROOT=$(INSTALL_ROOT)
mkdir -p $(INSTALL_ROOT)
mkdir -p $(INSTALL_ROOT)/.vim
cp dotfiles/.vimrc $(INSTALL_ROOT)/.vimrc
cp dotfiles/.py3.vimrc $(INSTALL_ROOT)/.py3.vimrc
cp -rp dotfiles/.vim/online_fxreader_pr34_vim $(INSTALL_ROOT)/.vim/
dotfiles_tmux_put:
mkdir -p $(INSTALL_ROOT)
cp dotfiles/.tmux.conf ~/.tmux.conf
PLATFORM ?= macbook_air_2012 PLATFORM ?= macbook_air_2012
PLATFORM_TMP ?= tmp/platform_dotfiles/$(PLATFORM) PLATFORM_TMP ?= tmp/platform_dotfiles/$(PLATFORM)
@ -118,42 +96,6 @@ dotfiles_put_platform:
sudo udevadm control --reload sudo udevadm control --reload
sudo systemctl daemon-reload sudo systemctl daemon-reload
GPG_RECIPIENTS_ARGS ?= -r 891382BEBFEFFC6729837400DA0B6C15FBB70FC9
dotfiles_fetch_platform:
mkdir -p platform_dotfiles/$(PLATFORM)
tar -cvf - \
/etc/udev/rules.d/ \
/etc/systemd/logind.conf \
~/.local/bin/systemd_gtk \
~/.local/bin/gnome-shortcuts-macbook-air \
/usr/local/bin \
| tar -xvf - -C platform_dotfiles/$(PLATFORM)
dotfiles_fetch_platform_gpg:
mkdir -p platform_dotfiles_gpg/$(PLATFORM)
yay -Q > /tmp/pacman-packages.txt;
tar -h -cvf - \
/tmp/pacman-packages.txt \
~/.sway/config.d \
~/.sway/config \
~/.config/commands-status.json \
/etc/fstab \
| gpg -e $(GPG_RECIPIENTS_ARGS) \
> platform_dotfiles_gpg/$(PLATFORM)/sensitive-configs-$$(date -Iseconds).gpg
rm /tmp/pacman-packages.txt;
dotfiles_fetch_platform_ideapad_slim_3_15arp10:
make dotfiles_fetch_platform \
PLATFORM=ideapad_slim_3_15arp10
dotfiles_fetch_platform_gpg_ideapad_slim_3_15arp10:
make dotfiles_fetch_platform_gpg \
PLATFORM=ideapad_slim_3_15arp10
dotfiles_sway_put:
mkdir -p ~/.sway
cp dotfiles/.sway/config ~/.sway/config
dotfiles_fetch: dotfiles_fetch:
commands install -f -p ~ -s ~/.config/katerc -t dotfiles commands install -f -p ~ -s ~/.config/katerc -t dotfiles
commands install -f -p ~ -s ~/.mime.types -t dotfiles commands install -f -p ~ -s ~/.mime.types -t dotfiles
@ -182,10 +124,14 @@ systemd:
done done
sudo systemctl daemon-reload sudo systemctl daemon-reload
compose_env: venv:
for s in checks; do \ uv venv
cat docker/$$s/.env .envs/$$s.env > .envs/$$s.patched.env; \ uv pip install -p .venv \
done -r requirements.txt
venv_compile:
uv pip compile --generate-hashes \
requirements.in > requirements.txt
MYPY_SOURCES ?= \ MYPY_SOURCES ?= \
d1/cpanel.py d1/cpanel.py
@ -193,28 +139,3 @@ mypy:
. .venv/bin/activate && \ . .venv/bin/activate && \
mypy --strict --follow-imports silent \ mypy --strict --follow-imports silent \
$(MYPY_SOURCES) $(MYPY_SOURCES)
COMPOSE ?= sudo docker-compose
nginx_config_http:
$(COMPOSE) exec app \
python3 \
d1/nginx_config.py \
tmp/cache/forward.nginx.json \
/etc/nginx/nginx.conf
nginx_config_https:
$(COMPOSE) exec ssl-app \
python3 \
d1/nginx_config.py ssl \
tmp/d1/ssl.nginx.json \
/etc/nginx/nginx.conf
nginx_config: nginx_config_https nginx_config_http
nginx_reload_common:
$(COMPOSE) exec $(NGINX_SERVICE) nginx -s reload
nginx_reload:
make nginx_reload_common NGINX_SERVICE=ssl-app
make nginx_reload_common NGINX_SERVICE=app

@ -1,8 +1,5 @@
[Unit] [Unit]
Description=fxreader.online-certbot Description=fxreader.online-certbot
Requires=fxreader.online-gateway
After=fxreader.online-gateway
PartOf=fxreader.online-gateway
[Service] [Service]
Type=oneshot Type=oneshot

@ -2,11 +2,10 @@
Description=fxreader.online-service Description=fxreader.online-service
Requires=docker.service Requires=docker.service
After=docker.service After=docker.service
PartOf=docker.service
[Service] [Service]
#Type=oneshot #Type=oneshot
ExecStart=/usr/bin/docker compose up ExecStart=/usr/bin/docker compose up --force-recreate --remove-orphans
ExecStop=/usr/bin/docker compose down ExecStop=/usr/bin/docker compose down
WorkingDirectory={{PROJECT_ROOT}} WorkingDirectory={{PROJECT_ROOT}}
StandardOutput=null StandardOutput=null

@ -1,6 +1,4 @@
import json import json
import re
import socket
import os import os
import io import io
import sys import sys
@ -86,7 +84,6 @@ def forward(
location_body_get = lambda target_endpoint: \ location_body_get = lambda target_endpoint: \
r''' r'''
proxy_set_header Host $http_host; proxy_set_header Host $http_host;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-For $t1; proxy_set_header X-Forwarded-For $t1;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
@ -220,23 +217,6 @@ def ssl(input_json, output_conf):
upstream_servers = [] upstream_servers = []
server_names = [] server_names = []
ssh_proxy_download_rate = ssl_nginx['stream_server'].get(
'ssh_proxy_download_rate',
128 * 1024,
)
ssh_proxy_upload_rate = ssl_nginx['stream_server'].get(
'ssh_proxy_upload_rate',
128 * 1024,
)
web_proxy_download_rate = ssl_nginx['stream_server'].get(
'web_proxy_download_rate',
128 * 1024 * 1024,
)
web_proxy_upload_rate = ssl_nginx['stream_server'].get(
'web_proxy_upload_rate',
128 * 1024 * 1024,
)
if 'by_server_name' in ssl_nginx['stream_server']: if 'by_server_name' in ssl_nginx['stream_server']:
for k, v in ssl_nginx['stream_server']['by_server_name'].items(): for k, v in ssl_nginx['stream_server']['by_server_name'].items():
upstream_servers.append( upstream_servers.append(
@ -277,15 +257,6 @@ stream {
"TLSv1.3" $upstream_server_name; "TLSv1.3" $upstream_server_name;
} }
map $upstream_protocol $proxy_download_rate {
web {web_proxy_download_rate};
ssh {ssh_proxy_download_rate};
}
map $upstream_protocol $proxy_upload_rate {
web {web_proxy_upload_rate};
ssh {ssh_proxy_upload_rate};
}
map $ssl_preread_server_name $upstream_server_name { map $ssl_preread_server_name $upstream_server_name {
default web; default web;
{server_names} {server_names}
@ -296,12 +267,7 @@ stream {
listen 443; listen 443;
ssl_preread on; ssl_preread on;
proxy_pass $upstream_protocol; proxy_pass $upstream_protocol;
proxy_download_rate $proxy_download_rate;
proxy_upload_rate $proxy_upload_rate;
# proxy_upload_rate 10k;
} }
} }
'''.replace( '''.replace(
@ -311,14 +277,6 @@ stream {
]), ]),
).replace( ).replace(
'{ssh_section}', ssh_section, '{ssh_section}', ssh_section,
).replace(
'{web_proxy_download_rate}', '%d' % web_proxy_download_rate,
).replace(
'{ssh_proxy_download_rate}', '%d' % ssh_proxy_download_rate,
).replace(
'{web_proxy_upload_rate}', '%d' % web_proxy_upload_rate,
).replace(
'{ssh_proxy_upload_rate}', '%d' % ssh_proxy_upload_rate,
).replace( ).replace(
'{server_names}', ''.join([ '{server_names}', ''.join([
' ' + o + '\n' ' ' + o + '\n'
@ -332,36 +290,8 @@ stream {
if 'default_server' in ssl_nginx: if 'default_server' in ssl_nginx:
server = ssl_nginx['default_server'] server = ssl_nginx['default_server']
if 'metrics_allowed' in server:
metrics_allowed_ip = socket.gethostbyname(server['metrics_allowed'])
else:
metrics_allowed_ip = '127.0.0.1'
servers.append( servers.append(
r''' r'''
server {
server_name _;
listen 80 default_server;
location = /_metrics {
stub_status;
access_log off;
# allow 172.0.0.0/8;
allow {metrics_allowed_ip};
# allow 127.0.0.1;
deny all;
}
location ~ ^/.well-known/acme-challenge/ {
alias /var/www/;
try_files $uri =404;
}
location ~ {
deny all;
}
}
server { server {
set $t1 $remote_addr; set $t1 $remote_addr;
if ($http_x_forwarded_for) if ($http_x_forwarded_for)
@ -387,57 +317,14 @@ server {
'{domain_key}', server['domain_key'], '{domain_key}', server['domain_key'],
).replace( ).replace(
'{ssl_port}', '%d' % ssl_port, '{ssl_port}', '%d' % ssl_port,
).replace(
'{metrics_allowed_ip}', metrics_allowed_ip
) )
) )
for server in ssl_nginx['servers']: for server in ssl_nginx['servers']:
location_proxy_app = r'''
location ^~ / {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_redirect off;
proxy_buffering off;
proxy_http_version 1.1;
proxy_pass http://app:80;
}
'''
location_forward_ssl = r'''
location ~ {
#return 444;
return 301 https://$host$request_uri;
}
'''
if server.get('allow_http') in [True]:
http_location = location_proxy_app
else:
http_location = location_forward_ssl
drop_by_user_agent = ''
if not server.get('drop_by_user_agent') is None:
r = re.compile('^([a-zA-Z0-9\s\.\,\(\)]+)$')
user_agent_list = [
r.match(o)[1]
for o in server.get('drop_by_user_agent')
]
drop_by_user_agent = r'''
if ( $http_user_agent ~ ({user_agent_list}) ) {
return 444;
}
'''.replace(
'{user_agent_list}',
'|'.join(user_agent_list)
)
servers.append( servers.append(
r''' r'''
server { server {
set $t1 $remote_addr; set $t1 $remote_addr;
if ($http_x_forwarded_for) if ($http_x_forwarded_for)
@ -454,12 +341,14 @@ server {
try_files $uri =404; try_files $uri =404;
} }
{http_location} location ~ {
#return 444;
return 301 https://$host$request_uri;
}
} }
server { server {
set $t1 $remote_addr; set $t1 $remote_addr;
if ($http_x_forwarded_for) if ($http_x_forwarded_for)
{ {
set $t1 $http_x_forwarded_for; set $t1 $http_x_forwarded_for;
@ -473,8 +362,6 @@ server {
ssl_certificate {signed_chain_cert}; ssl_certificate {signed_chain_cert};
ssl_certificate_key {domain_key}; ssl_certificate_key {domain_key};
{drop_by_user_agent}
location ^~ / { location ^~ / {
proxy_set_header Host $http_host; proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@ -483,7 +370,6 @@ server {
proxy_set_header Connection $connection_upgrade; proxy_set_header Connection $connection_upgrade;
proxy_redirect off; proxy_redirect off;
proxy_buffering off; proxy_buffering off;
proxy_http_version 1.1;
proxy_pass http://app:80; proxy_pass http://app:80;
} }
} }
@ -495,12 +381,8 @@ server {
'{client_max_body_size}', server['client_max_body_size'], '{client_max_body_size}', server['client_max_body_size'],
).replace( ).replace(
'{domain_key}', server['domain_key'], '{domain_key}', server['domain_key'],
).replace(
'{drop_by_user_agent}', drop_by_user_agent,
).replace( ).replace(
'{ssl_port}', '%d' % ssl_port, '{ssl_port}', '%d' % ssl_port,
).replace(
'{http_location}', http_location
) )
) )

@ -1 +1 @@
Subproject commit 4c187fc7dd17c52fb8e4f992d3985eb609eefe6a Subproject commit adef10a8c41f5c550622879370a40f8a9e545574

@ -1 +1 @@
Subproject commit f2366f328fb8129fa6ae26d00b421025d2f090c7 Subproject commit 3c691ef68d8899edf328d5b06135c0d3b02e7940

@ -0,0 +1,5 @@
.venv
tmp
.git
.env
build

@ -0,0 +1 @@
releases/whl/** filter=lfs diff=lfs merge=lfs -text

@ -0,0 +1,6 @@
!.tmuxp/
!python
.env/
releases/tar
build
!releases/whl/**

@ -0,0 +1,9 @@
session_name: test-task-2025-06-30-v1
start_directory: ${PWD}/deps/test-task-2025-06-30-v1
windows:
- focus: 'true'
layout: 5687,98x12,0,0,18
options: {}
panes:
- pane
window_name: zsh

62
deps/test-task-2025-06-30-v1/Makefile vendored Normal file

@ -0,0 +1,62 @@
ENV_PATH ?= .venv
PYTHON_PATH = $(ENV_PATH)/bin/python3
PYTHON_VERSION ?= 3.10
UV_ARGS ?= --offline
DOCKER ?= podman
COMPOSE ?= podman compose
venv_extract_requirements:
$(ENV_PATH)/bin/tomlq \
-r '.project.dependencies | join("\n")' \
pyproject.toml > requirements.in
venv_compile:
uv pip compile \
$(UV_ARGS) \
-p $(PYTHON_VERSION) \
--generate-hashes \
requirements.in > \
requirements.txt
venv:
uv \
venv \
-p 3.13 \
$(UV_ARGS) \
--seed \
$(ENV_PATH)
uv \
pip install \
$(UV_ARGS) \
-p $(ENV_PATH) \
-r requirements.txt
pyright:
$(ENV_PATH)/bin/python3 -m pyright \
-p pyproject.toml \
--pythonpath $(PYTHON_PATH)
compose_env:
cat docker/postgresql/.env .env/postgresql.env > .env/postgresql.patched.env
cat docker/web/.env .env/web.env > .env/web.patched.env
compose_build_web:
$(COMPOSE) build web
git-release:
git archive \
--format=tar \
-o "releases/tar/repo-$$(git describe --tags).tar" \
HEAD
ALEMBIC_CMD ?= --help
alembic:
$(ENV_PATH)/bin/alembic \
-c pyproject.toml \
$(ALEMBIC_CMD)
deploy_wheel:
make pyright
$(PYTHON_PATH) -m build -o releases/whl -w -n

@ -0,0 +1,76 @@
services:
redis:
image: docker.io/redis:latest-alpine@sha256:e71b4cb00ea461ac21114cff40ff12fb8396914238e1e9ec41520b2d5a4d3423
ports:
- 127.0.0.1:9004:6379
web: &web
image: online.fxreader.pr34.test_task_2025_06_30_v1:dev
build:
context: .
dockerfile: ./docker/web/Dockerfile
target: web
env_file: .env/web.env
logging:
driver: "json-file"
options:
max-size: 10m
max-file: "3"
deploy:
resources:
limits:
cpus: '0.5'
memory: 128M
web-dev:
<<: *web
volumes:
- .:/app:ro
- ./tmp/cache:/app/tmp/cache:rw
emcont_worker:
<<: *web
image: online.fxreader.pr34.test_task_2025_06_30_v1:dev
environment:
command:
- python3
- -m
- online.fxreader.pr34.test_task_2025_06_30_v1.async_api.app
postgresql:
image: docker.io/postgres:14.18-bookworm@sha256:c0aab7962b283cf24a0defa5d0d59777f5045a7be59905f21ba81a20b1a110c9
# restart: always
# set shared memory limit when using docker compose
shm_size: 128mb
volumes:
- postgresql_data:/var/lib/postgresql/data/:rw
# or set shared memory limit when deploy via swarm stack
#volumes:
# - type: tmpfs
# target: /dev/shm
# tmpfs:
# size: 134217728 # 128*2^20 bytes = 128Mb
env_file: .env/postgresql.patched.env
# environment:
# POSTGRES_PASSWORD: example
ports:
- 127.0.0.1:9002:5432
logging:
driver: "json-file"
options:
max-size: 10m
max-file: "3"
deploy:
resources:
limits:
cpus: '0.5'
memory: 128M
adminer:
image: docker.io/adminer:standalone@sha256:730215fe535daca9a2f378c48321bc615c8f0d88668721e0eff530fa35b6e8f6
ports:
- 127.0.0.1:9001:8080
volumes:
postgresql_data:

@ -0,0 +1,3 @@
PGDATA=/var/lib/postgresql/data/pgdata
POSTGRES_USER=tickers
POSTGRES_DB=tickers

@ -0,0 +1 @@
# DB_URL=

@ -0,0 +1,50 @@
FROM docker.io/library/python:3.12@sha256:6121c801703ec330726ebf542faab113efcfdf2236378c03df8f49d80e7b4180 AS base
ENV DEBIAN_FRONTEND=noninteractive
WORKDIR /app
COPY docker/web/apt.requirements.txt docker/web/apt.requirements.txt
RUN apt-get update \
&& apt-get install -y $(cat docker/web/apt.requirements.txt)
RUN \
pip3 install \
--break-system-packages uv
COPY requirements.txt requirements.txt
RUN \
--mount=type=bind,source=releases/whl,target=/app/releases/whl \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/uv \
uv pip \
install \
--system \
--break-system-packages \
-f releases/whl \
-r requirements.txt
WORKDIR /app
RUN apt-get update -yy && apt-get install -yy tini
FROM base as web
RUN \
--mount=type=bind,source=releases/whl,target=/app/releases/whl \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/uv \
uv pip \
install \
--system \
--break-system-packages \
--no-index \
-f releases/whl \
'online.fxreader.pr34.test_task_2025_06_30_v1==0.1'
ENTRYPOINT ["tini", "--"]
CMD [ \
"python3", \
"-m", \
"online.fxreader.pr34.test_task_2025_06_30_v1.async_api.app" \
]

@ -0,0 +1 @@
wget tar git curl

@ -0,0 +1,89 @@
# Requirements
Tickers of interest:
- EURUSD
- USDJPY
- GBPUSD
- AUDUSD
- USDCAD
Rest API - https://rates.emcont.com
Scrape every second;
Schema:
Ticker:
id: foreign_key market
timestamp: datetime
# (ask + bid) / 2
value: decimal
Store up to 30 minutes of recent tickers;
Return via websocket up to 30 minutes of recent tickers;
# AsyncAPI
```yaml
AsyncAPI:
Endpoints:
subscribe:
Request: SubscribeAction
Response: AssetHistoryResponse | AssetTickerResponse
list:
Request: AssetsAction
Response: AssetsResponse
Schema:
SubscribeAction:
action: Literal['subscribe']
message:
assetId: 1
AssetHistoryResponse:
action: Literal['asset_history']
message:
points:
- assetName: EURUSD
time: 1455883484
assetId: 1
value: 1.110481
- assetName: EURUSD
time: 1455883485
assetId: 1
value: 1.110948
- assetName: EURUSD
time: 1455883486
assetId: 1
value: 1.111122
AssetTickerResponse:
action: Literal['point']
message:
assetName: EURUSD
time: 1455883484
assetId: 1
value: 1.110481
AssetsAction:
action: Literal['assets']
message: {}
AssetsResponse:
action: Literal['assets']
message:
assets:
- id: 1
name: EURUSD
- id: 2
name: USDJPY
- id: 3
name: GBPUSD
- id: 4
name: AUDUSD
- id: 5
name: USDCAD
```
# Services:
``` yaml
web:
ports:
- 8080:80
```

@ -1,5 +1,5 @@
[project] [project]
description = 'checks service' description = 'test task for websocket with crypto tickers'
requires-python = '>= 3.10' requires-python = '>= 3.10'
maintainers = [ maintainers = [
{ name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' }, { name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' },
@ -8,15 +8,64 @@ classifiers = [
'Programming Language :: Python', 'Programming Language :: Python',
] ]
name = 'online.fxreader.pr34.checks' name = 'online.fxreader.pr34.test_task_2025_06_30_v1'
version = '0.1.1'
dependencies = [
'alembic',
'fastapi',
'uvicorn',
'websockets',
'uvloop',
'tomlq',
'mypy',
'marisa-trie',
'pydantic',
'asyncpg',
'pydantic-settings',
'tomlkit',
'tomlq',
'numpy',
'cryptography',
'mypy',
'pyright',
'ruff',
'ipython',
'ipdb',
'requests',
'types-requests',
'aiohttp',
'build',
'wheel',
'setuptools',
'setuptools-scm',
]
[build-system]
requires = ['build', 'wheel', 'setuptools', 'setuptools-scm']
build-backend = 'setuptools.build_meta'
[tool.setuptools]
include-package-data = false
[tool.setuptools.package-dir]
'online.fxreader.pr34.test_task_2025_06_30_v1' = 'python/online/fxreader/pr34/test_task_2025_06_30_v1'
[tool.alembic]
script_location = 'python/online/fxreader/pr34/test_task_2025_06_30_v1/tickers/alembic'
prepend_sys_path = ['python']
# sqlalchemy.url = 'asdfasdf:/asdfasdfa'
[tool.ruff] [tool.ruff]
line-length = 160 line-length = 160
target-version = 'py310' target-version = 'py310'
# builtins = ['_', 'I', 'P']
include = [ include = [
'*.py', # 'follow_the_leader/**/*.py',
'*/**/*.py', #'*.py',
'*/**/*.pyi', # '*.recipe',
'python/**/*.py',
'python/**/*.pyi',
] ]
exclude = [ exclude = [
'.venv', '.venv',
@ -50,6 +99,9 @@ select = ['E', 'F', 'I', 'W', 'INT']
[tool.ruff.lint.isort] [tool.ruff.lint.isort]
detect-same-package = true detect-same-package = true
# extra-standard-library = ["aes", "elementmaker", "encodings"]
# known-first-party = ["calibre_extensions", "calibre_plugins", "polyglot"]
# known-third-party = ["odf", "qt", "templite", "tinycss", "css_selectors"]
relative-imports-order = "closest-to-furthest" relative-imports-order = "closest-to-furthest"
split-on-trailing-comma = true split-on-trailing-comma = true
section-order = [ section-order = [
@ -67,11 +119,16 @@ enabled = false
[tool.pyright] [tool.pyright]
include = [ include = [
'*/**/*.py', #'../../../../../follow_the_leader/views2/payments.py',
#'../../../../../follow_the_leader/logic/payments.py',
#'../../../../../follow_the_leader/logic/paypal.py',
'python/**/*.py',
'python/**/*.pyi',
] ]
# stubPath = '../mypy-stubs'
extraPaths = [ extraPaths = [
'.',
] ]
#strict = ["src"]
analyzeUnannotatedFunctions = true analyzeUnannotatedFunctions = true
disableBytesTypePromotions = true disableBytesTypePromotions = true
@ -170,4 +227,3 @@ reportShadowedImports = "none"
reportUninitializedInstanceVariable = "none" reportUninitializedInstanceVariable = "none"
reportUnnecessaryTypeIgnoreComment = "none" reportUnnecessaryTypeIgnoreComment = "none"
reportUnusedCallResult = "none" reportUnusedCallResult = "none"

@ -0,0 +1,68 @@
import asyncio
import datetime
import logging
# import os
from ..tickers_retrieval.emcont import Emcont
from ..tickers.models import Ticker
from ..tickers.logic import ticker_store_multiple, markets_get_by_symbol
from .db import create_engine
import sqlalchemy.ext.asyncio
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
from typing import Any
logger = logging.getLogger(__name__)
async def run() -> None:
async_session = create_engine()
async def store_cb(
rates: list[Emcont.rates_get_t.data_t.rate_t],
timestamp: datetime.datetime,
session: 'async_sessionmaker[AsyncSession]',
) -> None:
logger.info(dict(
msg='before markets',
))
markets = await markets_get_by_symbol(
session,
set([
rate.symbol
for rate in rates
]),
)
logger.info(dict(
msg='after markets',
))
await ticker_store_multiple(
session,
[
Ticker(
id=markets[rate.symbol],
timestamp=timestamp,
value=rate.value,
)
for rate in rates
]
)
logger.info(dict(
rates=rates,
timestamp=timestamp.isoformat()
))
await Emcont.worker(
only_symbols={'EURUSD', 'USDJPY', 'GBPUSD', 'AUDUSD', 'USDCAD'},
session=async_session,
store_cb=store_cb,
request_timeout=2,
store_timeout=0.5,
)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
asyncio.run(run())

@ -0,0 +1,15 @@
from ..tickers.settings import Settings as ModelsSettings
import sqlalchemy.ext.asyncio
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
def create_engine() -> 'async_sessionmaker[AsyncSession]':
engine = sqlalchemy.ext.asyncio.create_async_engine(
ModelsSettings.singleton().db_url
)
async_session = sqlalchemy.ext.asyncio.async_sessionmaker(
engine
)
return async_session

@ -0,0 +1,71 @@
import fastapi
import pydantic
import functools
import logging
import copy
import uvicorn
import uvicorn.config
import sys
from .settings import Settings as APISettings
from .db import create_engine
from .websocket_api import WebsocketAPI
from typing import (Any, Optional, Literal, Annotated,)
logger = logging.getLogger(__name__)
async def websocket_tickers(
websocket: fastapi.WebSocket,
websocket_api: WebsocketAPI,
) -> None:
try:
await websocket_api.connect(websocket)
while True:
msg = await websocket.receive_text()
await websocket_api.on_message(websocket, msg)
except fastapi.WebSocketDisconnect:
pass
# websocket_api.disconnect(websocket)
except:
logger.exception('')
raise
finally:
await websocket_api.disconnect(websocket)
def create_app() -> fastapi.FastAPI:
async_session = create_engine()
websocket_api = WebsocketAPI(
session=async_session,
)
app = fastapi.FastAPI()
app.websocket(
'/tickers/',
)(
functools.partial(
websocket_tickers,
websocket_api=fastapi.Depends(lambda : websocket_api),
)
)
return app
def run(args: list[str]):
log_config = copy.deepcopy(uvicorn.config.LOGGING_CONFIG)
uvicorn.run(
create_app(),
host=APISettings.singleton().uvicorn_host,
port=APISettings.singleton().uvicorn_port,
loop='uvloop',
log_config=log_config,
log_level=logging.INFO,
)
if __name__ == '__main__':
run(sys.argv[1:])

@ -0,0 +1,67 @@
import pydantic
import decimal
from typing import (Literal, Annotated,)
class SubscribeAction(pydantic.BaseModel):
action: Literal['subscribe']
class message_t(pydantic.BaseModel):
asset_id: Annotated[
int,
pydantic.Field(alias='assetId')
]
message: message_t
class AssetsAction(pydantic.BaseModel):
action: Literal['assets']
class message_t(pydantic.BaseModel):
pass
message: Annotated[
message_t,
pydantic.Field(
default_factory=message_t,
)
]
Action = pydantic.RootModel[
AssetsAction | SubscribeAction
]
class AssetHistoryResponse(pydantic.BaseModel):
action: Literal['asset_history'] = 'asset_history'
class message_t(pydantic.BaseModel):
class point_t(pydantic.BaseModel):
asset_name : Annotated[
str,
pydantic.Field(
alias='assetName',
)
]
time: int
asset_id : Annotated[
int,
pydantic.Field(alias='assetId')
]
value: decimal.Decimal
points: list[point_t]
message: message_t
class AssetTickerResponse(pydantic.BaseModel):
action: Literal['point'] = 'point'
message: 'AssetHistoryResponse.message_t.point_t'
class AssetsResponse(pydantic.BaseModel):
action: Literal['assets'] = 'assets'
class message_t(pydantic.BaseModel):
class asset_t(pydantic.BaseModel):
id: int
name: str
assets: list[asset_t]
message: message_t

@ -0,0 +1,18 @@
import pydantic
import pydantic_settings
from typing import (ClassVar, Optional,)
class Settings(pydantic_settings.BaseSettings):
uvicorn_port : int = 80
uvicorn_host : str = '127.0.0.1'
_singleton : ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton

@ -0,0 +1,126 @@
import fastapi
import datetime
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
from . import schema
from ..tickers.logic import tickers_get_by_period, markets_all
from typing import (Optional, Literal)
class WebsocketAPI:
def __init__(
self,
session: 'async_sessionmaker[AsyncSession]',
) -> None:
self.connections : set[
fastapi.WebSocket,
] = set()
self.subscriptions_by_asset_id : dict[
int, set[fastapi.WebSocket]
] = dict()
self.subscriptions_by_client : dict[
fastapi.WebSocket,
int,
] = dict()
self.session = session
async def connect(self, client: fastapi.WebSocket) -> None:
assert not client in self.connections
await client.accept()
self.connections.add(client)
async def subscribe(
self,
client: fastapi.WebSocket,
asset_id: int
) -> None:
if client in self.subscriptions_by_client:
last_asset_id = self.subscriptions_by_client[client]
del self.subscriptions_by_asset_id[last_asset_id]
del self.subscriptions_by_client[client]
if not asset_id in self.subscriptions_by_asset_id:
self.subscriptions_by_asset_id[asset_id] = set()
self.subscriptions_by_asset_id[asset_id].add(client)
self.subscriptions_by_client[client] = asset_id
await self.asset_last_period(client, asset_id)
async def asset_last_period(
self,
client: fastapi.WebSocket,
asset_id: int,
) -> None:
tickers = await tickers_get_by_period(
self.session,
period=datetime.timedelta(minutes=30),
market_id=asset_id,
)
await client.send_text(
schema.AssetHistoryResponse(
message=schema.AssetHistoryResponse.message_t(
points=[
schema.AssetHistoryResponse.message_t.point_t.model_construct(
asset_name=o.market.name,
asset_id=o.market.id,
time=int(o.timestamp.timestamp()),
value=o.value,
)
for o in tickers
]
)
).json(by_alias=True,),
)
async def assets_index(
self,
client: fastapi.WebSocket,
) -> None:
markets = await markets_all(
self.session,
)
await client.send_text(
schema.AssetsResponse(
message=schema.AssetsResponse.message_t(
assets=[
schema.AssetsResponse.message_t.asset_t.model_construct(
name=o.name,
id=o.id,
)
for o in markets
]
)
).json(by_alias=True,),
)
async def on_message(
self,
client: fastapi.WebSocket,
msg_raw: str
) -> None:
msg = schema.Action.model_validate_json(
msg_raw
).root
if isinstance(msg, schema.SubscribeAction):
await self.subscribe(
client,
msg.message.asset_id
)
elif isinstance(msg, schema.AssetsAction):
await self.assets_index(
client,
)
else:
raise NotImplementedError
async def disconnect(self, client: fastapi.WebSocket) -> None:
assert client in self.connections
self.connections.remove(client)

@ -0,0 +1,117 @@
import asyncio
import logging
from logging.config import fileConfig
from sqlalchemy.ext.asyncio import async_engine_from_config
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from sqlalchemy.engine.base import Connection
from alembic import context
from online.fxreader.pr34.test_task_2025_06_30_v1.tickers.settings import Settings
from online.fxreader.pr34.test_task_2025_06_30_v1.tickers.models import (
Base,
Market,
)
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
config.set_main_option(
'sqlalchemy.url',
Settings.singleton().db_url
)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
# if config.config_file_name is not None:
# fileConfig(config.config_file_name)
# else:
if True:
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
# target_metadata = None
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def do_run_migrations(
connection: Connection,
):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations():
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
logger.info(dict(msg='started'))
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
logger.info(dict(msg='done'))
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
raise NotImplementedError
# run_migrations_offline()
else:
run_migrations_online()

@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

@ -0,0 +1,36 @@
"""add Market table
Revision ID: 335b4c4f052c
Revises:
Create Date: 2025-07-04 11:31:10.983947
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '335b4c4f052c'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tickers_market',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tickers_market')
# ### end Alembic commands ###

@ -0,0 +1,38 @@
"""add timezone
Revision ID: 729afc7194c9
Revises: eb63f793db3a
Create Date: 2025-07-11 11:30:06.246152
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '729afc7194c9'
down_revision: Union[str, Sequence[str], None] = 'eb63f793db3a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('tickers_ticker', 'timestamp',
existing_type=postgresql.TIMESTAMP(),
type_=sa.DateTime(timezone=True),
existing_nullable=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('tickers_ticker', 'timestamp',
existing_type=sa.DateTime(timezone=True),
type_=postgresql.TIMESTAMP(),
existing_nullable=False)
# ### end Alembic commands ###

@ -0,0 +1,38 @@
"""add Ticker table
Revision ID: eb63f793db3a
Revises: 335b4c4f052c
Create Date: 2025-07-07 10:32:49.812738
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'eb63f793db3a'
down_revision: Union[str, Sequence[str], None] = '335b4c4f052c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tickers_ticker',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('timestamp', sa.DateTime(), nullable=False),
sa.Column('value', sa.Numeric(precision=32, scale=6), nullable=False),
sa.ForeignKeyConstraint(['id'], ['tickers_market.id'], ondelete='CASCADE'),
sa.UniqueConstraint('id', 'timestamp')
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tickers_ticker')
# ### end Alembic commands ###

@ -0,0 +1,83 @@
import datetime
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
from sqlalchemy.orm import selectinload, make_transient
from sqlalchemy.future import select
from .models import Ticker, Market
from .utils import get_or_create
async def markets_get_by_symbol(
session: 'async_sessionmaker[AsyncSession]',
symbols: set[str],
) -> dict[str, int]:
res : dict[str, int] = dict()
async with session() as active_session:
async with active_session.begin() as transaction:
for o in symbols:
m = (await get_or_create(
active_session,
Market,
name=o,
))[0]
res[o] = m.id
return res
async def ticker_store_multiple(
session: 'async_sessionmaker[AsyncSession]',
tickers: list[Ticker],
) -> None:
async with session() as active_session:
async with active_session.begin() as transaction:
active_session.add_all(
tickers,
)
async def tickers_get_by_period(
session: 'async_sessionmaker[AsyncSession]',
market_id: int,
period: datetime.timedelta,
) -> list[Ticker]:
async with session() as active_session:
async with active_session.begin() as transaction:
q = select(
Ticker
).join(Ticker.market).where(
Market.id == market_id,
Ticker.timestamp >= datetime.datetime.now(
tz=datetime.timezone.utc
) - period
).order_by(Ticker.timestamp.desc()).options(
selectinload(Ticker.market)
)
res = await active_session.execute(q)
rows = [o[0] for o in res]
for o in rows:
active_session.expunge(o)
make_transient(o.market)
return rows
async def markets_all(
session: 'async_sessionmaker[AsyncSession]',
) -> list[Market]:
async with session() as active_session:
async with active_session.begin() as transaction:
q = select(
Market
)
res = await active_session.execute(q)
rows = [o[0] for o in res]
for o in rows:
active_session.expunge(o)
return rows

@ -0,0 +1,63 @@
import datetime
import decimal
from sqlalchemy.orm import (
mapped_column,
Mapped,
DeclarativeBase,
relationship,
)
from sqlalchemy import (
String,
ForeignKey,
Numeric,
DateTime,
UniqueConstraint,
)
from typing import (Optional,)
class Base(DeclarativeBase):
pass
class Market(Base):
__tablename__ = 'tickers_market'
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(String(32))
tickers: Mapped[list['Ticker']] = relationship(
back_populates='market',
)
def __repr__(self) -> str:
return f"Market(id={self.id!r}, name={self.name!r})"
class Ticker(Base):
__tablename__ = 'tickers_ticker'
id: Mapped[int] = mapped_column(ForeignKey(
'tickers_market.id',
ondelete='CASCADE',
))
market: Mapped['Market'] = relationship(
back_populates='tickers'
)
timestamp: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True,)
)
value: Mapped[decimal.Decimal] = mapped_column(Numeric(
precision=32, scale=6,
))
__table_args__ = (
UniqueConstraint('id', 'timestamp'),
)
__mapper_args__ = dict(
primary_key=('id', 'timestamp',)
)
def __repr__(self) -> str:
return f"Ticker(id={self.id!r}, timestamp={self.timestamp!r}, value={self.value!r})"

@ -0,0 +1,17 @@
import pydantic
import pydantic_settings
from typing import (ClassVar, Optional,)
class Settings(pydantic_settings.BaseSettings):
db_url : str
_singleton : ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton

@ -0,0 +1,50 @@
from typing import (TypeVar, Optional, Any, cast,)
from sqlalchemy.ext.asyncio import AsyncSessionTransaction, AsyncSession
from sqlalchemy.future import select
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.exc import NoResultFound, IntegrityError
M = TypeVar('M', bound='DeclarativeBase')
async def get_or_create(
session: AsyncSession,
model: type[M],
create_method: Optional[str] = None,
create_method_kwargs: Optional[dict[str, Any]] = None,
**kwargs: Any
) -> tuple[M, bool]:
async def select_row() -> M:
res = await session.execute(
select(model).where(
*[
getattr(model, k) == v
for k, v in kwargs.items()
]
)
)
row = res.one()[0]
assert isinstance(row, model)
return row
try:
res = await select_row()
return res, False
except NoResultFound:
if create_method_kwargs:
kwargs.update(create_method_kwargs)
if not create_method:
created = model(**kwargs)
else:
created = getattr(model, create_method)(**kwargs)
try:
session.add(created)
await session.flush()
return created, True
except IntegrityError:
await session.rollback()
return await select_row(), False

@ -0,0 +1,165 @@
import aiohttp
import asyncio
import decimal
import logging
import datetime
# import datetime.timezone
import pydantic
import json
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import async_sessionmaker
from typing import (
Any, Annotated, Optional, Awaitable, Callable,
Protocol,
)
logger = logging.getLogger(__name__)
class Emcont:
class rates_get_t:
class data_t(pydantic.BaseModel):
class rate_t(pydantic.BaseModel):
symbol: Annotated[
str,
pydantic.Field(
alias='Symbol',
)
]
bid: Annotated[
decimal.Decimal,
pydantic.Field(
alias='Bid',
)
]
ask: Annotated[
decimal.Decimal,
pydantic.Field(
alias='Ask',
)
]
@pydantic.computed_field
def value(self) -> decimal.Decimal:
return (self.ask + self.bid) / 2
product_type: Annotated[
str,
pydantic.Field(
alias='ProductType',
)
]
rates: Annotated[
list[rate_t],
pydantic.Field(
alias='Rates',
)
]
@classmethod
async def rates_get(
cls,
only_symbols: Optional[set[str]] = None,
) -> Any:
async with aiohttp.ClientSession() as session:
async with session.get('https://rates.emcont.com') as response:
data_json = await response.text()
data = cls.rates_get_t.data_t.model_validate_json(
data_json[5:-3],
)
if only_symbols:
data.rates = [
o
for o in data.rates
if o.symbol in only_symbols
]
return data
class store_cb_t(Protocol):
async def __call__(
self,
rates: list['Emcont.rates_get_t.data_t.rate_t'],
timestamp: datetime.datetime,
session: 'async_sessionmaker[AsyncSession]',
) -> None: ...
@classmethod
async def worker(
cls,
session: 'async_sessionmaker[AsyncSession]',
store_cb: 'Emcont.store_cb_t',
only_symbols: Optional[set[str]] = None,
request_timeout: float | int = 0.5,
store_timeout: float | int = 0.5,
request_period: float | int = 1,
) -> None:
last_retrieval = datetime.datetime.now(
tz=datetime.timezone.utc,
)
assert request_timeout >= 0
assert store_timeout >= 0
request_period_timedelta = datetime.timedelta(
seconds=request_period,
)
while True:
logger.info(dict(msg='started'))
entries : Optional['Emcont.rates_get_t.data_t'] = None
try:
try:
async with asyncio.timeout(request_timeout):
entries = await cls.rates_get(
only_symbols=only_symbols,
)
except TimeoutError:
logger.exception('request timeout')
try:
async with asyncio.timeout(store_timeout):
if entries:
await store_cb(
rates=entries.rates,
timestamp=last_retrieval,
session=session,
)
except TimeoutError:
logger.exception('store timeout')
except:
logger.exception('')
next_retrieval = last_retrieval
def wait_interval():
nonlocal next_retrieval
return (
next_retrieval - datetime.datetime.now(
tz=datetime.timezone.utc,
)
).total_seconds()
while True:
next_retrieval += request_period_timedelta
if (
wait_interval() > 0 or
wait_interval() > -request_period_timedelta.total_seconds() / 4
):
break
else:
logger.warning(dict(
msg='skip period due to huge lag',
))
if wait_interval() > 0:
await asyncio.sleep(wait_interval())
last_retrieval = next_retrieval

@ -0,0 +1,27 @@
alembic
fastapi
uvicorn
websockets
uvloop
tomlq
mypy
marisa-trie
pydantic
asyncpg
pydantic-settings
tomlkit
tomlq
numpy
cryptography
mypy
pyright
ruff
ipython
ipdb
requests
types-requests
aiohttp
build
wheel
setuptools
setuptools-scm

File diff suppressed because it is too large Load Diff

@ -8,24 +8,6 @@ services:
- ./d1/:/app/d1/:ro - ./d1/:/app/d1/:ro
- ./tmp/cache/:/app/tmp/cache/:ro - ./tmp/cache/:/app/tmp/cache/:ro
restart: on-failure restart: on-failure
networks:
network:
ipv4_address: ${SUBNET}.2
nginx-exporter:
image: docker.io/nginx/nginx-prometheus-exporter@sha256:6edfb73afd11f2d83ea4e8007f5068c3ffaa38078a6b0ad1339e5bd2f637aacd
#profiles:
# - podman
#env_file:
# .envs/nginx-exporter.env
environment:
SCRAPE_URI: http://ssl-app:80/_metrics
# LISTEN_ADDRESS: 0.0.0.0:9113
ports:
- ${NGINX_EXPORTER_PORTS:-"127.0.0.1:9113"}:9113
networks:
network:
ssl-app: ssl-app:
build: build:
context: . context: .
@ -35,30 +17,8 @@ services:
- ./tmp/d1/:/app/tmp/d1/:ro - ./tmp/d1/:/app/tmp/d1/:ro
- ./tmp/d1/letsencrypt:/etc/letsencrypt:rw - ./tmp/d1/letsencrypt:/etc/letsencrypt:rw
restart: on-failure restart: on-failure
ports:
- ${SSL_APP_PORTS:-"127.0.0.1:443"}:444
- ${APP_PORTS:-"127.0.0.1:80"}:80
networks:
network:
checks:
build:
context: ./docker/checks
dockerfile: ./Dockerfile
init: true
env_file:
.envs/checks.patched.env
command:
- python3
- -m
- online.fxreader.pr34.commands_typed.async_api.fastapi
ports:
- ${CHECKS_PORTS:-"127.0.0.1:80"}:80
networks:
network:
cpanel: cpanel:
image: online.fxreader.pr34.cpanel:dev
build: build:
context: . context: .
dockerfile: ./docker/cpanel/Dockerfile dockerfile: ./docker/cpanel/Dockerfile
@ -68,9 +28,6 @@ services:
- ./d1/:/app/d1:ro - ./d1/:/app/d1:ro
- ./tmp/d1/:/app/tmp/d1/:ro - ./tmp/d1/:/app/tmp/d1/:ro
restart: on-failure restart: on-failure
networks:
network:
dynu: dynu:
build: build:
context: . context: .
@ -83,8 +40,6 @@ services:
restart: on-failure restart: on-failure
# links: # links:
# - ngrok # - ngrok
networks:
network:
ngrok: ngrok:
image: wernight/ngrok image: wernight/ngrok
#links: #links:
@ -95,8 +50,6 @@ services:
volumes: volumes:
- ./tmp/cache/ngrok.yml:/home/ngrok/.ngrok2/ngrok.yml:ro - ./tmp/cache/ngrok.yml:/home/ngrok/.ngrok2/ngrok.yml:ro
restart: on-failure restart: on-failure
networks:
network:
#forward: #forward:
# build: # build:
# context: . # context: .
@ -105,13 +58,3 @@ services:
# - ./d1/forward.py:/app/d1/forward.py:ro # - ./d1/forward.py:/app/d1/forward.py:ro
# - ./tmp/cache/forward_data:/app/tmp/cache/forward_data:ro # - ./tmp/cache/forward_data:/app/tmp/cache/forward_data:ro
# restart: always # restart: always
networks:
network:
driver: bridge
# driver_opts:
# com.docker.network.bridge.name: br-mynet # stable bridge name (optional)
ipam:
config:
- subnet: ${SUBNET}.0/24
gateway: "${SUBNET}.1"
ip_range: "${SUBNET}.128/25" # optional: pool for containers

@ -1,5 +0,0 @@
# UVICORN_HOST=127.0.0.1
# UVICORN_PORT=80
# HTTP_AUTH_USERNAME=test
# HTTP_AUTH_PASSWORD=blah
APPS='["rest:get_router:"]'

@ -1,26 +0,0 @@
FROM alpine@sha256:56fa17d2a7e7f168a043a2712e63aed1f8543aeafdcee47c58dcffe38ed51099
RUN apk add --no-cache python3 py3-pip
RUN \
--mount=type=cache,target=/root/.cache/pip \
pip install \
--break-system-packages \
uv
WORKDIR /app
COPY requirements.txt requirements.txt
RUN \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=./deps/whl,target=/app/deps/whl \
uv pip install \
-f deps/whl \
-r requirements.txt \
--break-system-packages --system
COPY ./rest.py ./rest.py
# CMD ["python3", "rest.py"]

@ -1,28 +0,0 @@
venv_compile:
uv pip compile \
-p 3.12 \
--generate-hashes \
-f deps/whl \
requirements.in > \
requirements.txt
venv:
uv venv -p 3.12 .venv
uv pip install \
-p .venv/bin/python3 \
-f deps/whl \
-r requirements.txt
PYRIGHT_CMD ?= --threads 3
pyright:
.venv/bin/python3 \
-m pyright \
--pythonpath .venv/bin/python3 \
-p pyproject.toml \
$(PYRIGHT_CMD) \
.
RUFF_CMD ?= format
ruff:
.venv/bin/python3 -m ruff --config pyproject.toml $(RUFF_CMD) .

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,4 +0,0 @@
online.fxreader.pr34[django,fastapi,lint]>=0.1.5.24
fastapi
uvicorn
numpy

@ -1,582 +0,0 @@
# This file was autogenerated by uv via the following command:
# uv pip compile -p 3.12 --generate-hashes requirements.in
annotated-types==0.7.0 \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
--hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
# via pydantic
anyio==4.10.0 \
--hash=sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6 \
--hash=sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1
# via starlette
argcomplete==3.6.2 \
--hash=sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591 \
--hash=sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf
# via yq
asgiref==3.9.1 \
--hash=sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142 \
--hash=sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c
# via django
click==8.2.1 \
--hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \
--hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b
# via uvicorn
django==5.2.5 \
--hash=sha256:0745b25681b129a77aae3d4f6549b62d3913d74407831abaa0d9021a03954bae \
--hash=sha256:2b2ada0ee8a5ff743a40e2b9820d1f8e24c11bac9ae6469cd548f0057ea6ddcd
# via
# django-stubs
# django-stubs-ext
# online-fxreader-pr34
django-stubs==5.2.2 \
--hash=sha256:2a04b510c7a812f88223fd7e6d87fb4ea98717f19c8e5c8b59691d83ad40a8a6 \
--hash=sha256:79bd0fdbc78958a8f63e0b062bd9d03f1de539664476c0be62ade5f063c9e41e
# via online-fxreader-pr34
django-stubs-ext==5.2.2 \
--hash=sha256:8833bbe32405a2a0ce168d3f75a87168f61bd16939caf0e8bf173bccbd8a44c5 \
--hash=sha256:d9d151b919fe2438760f5bd938f03e1cb08c84d0651f9e5917f1313907e42683
# via django-stubs
fastapi==0.116.1 \
--hash=sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565 \
--hash=sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143
# via
# -r requirements.in
# online-fxreader-pr34
h11==0.16.0 \
--hash=sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1 \
--hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86
# via uvicorn
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
# via anyio
marisa-trie==1.3.1 \
--hash=sha256:076731f79f8603cb3216cb6e5bbbc56536c89f63f175ad47014219ecb01e5996 \
--hash=sha256:0b9816ab993001a7854b02a7daec228892f35bd5ab0ac493bacbd1b80baec9f1 \
--hash=sha256:0c2bc6bee737f4d47fce48c5b03a7bd3214ef2d83eb5c9f84210091370a5f195 \
--hash=sha256:0dcd42774e367ceb423c211a4fc8e7ce586acfaf0929c9c06d98002112075239 \
--hash=sha256:0e6f3b45def6ff23e254eeaa9079267004f0069d0a34eba30a620780caa4f2cb \
--hash=sha256:137010598d8cebc53dbfb7caf59bde96c33a6af555e3e1bdbf30269b6a157e1e \
--hash=sha256:2f7c10f69cbc3e6c7d715ec9cb0c270182ea2496063bebeda873f4aa83fd9910 \
--hash=sha256:3715d779561699471edde70975e07b1de7dddb2816735d40ed16be4b32054188 \
--hash=sha256:3834304fdeaa1c9b73596ad5a6c01a44fc19c13c115194704b85f7fbdf0a7b8e \
--hash=sha256:389721481c14a92fa042e4b91ae065bff13e2bc567c85a10aa9d9de80aaa8622 \
--hash=sha256:3a96ef3e461ecc85ec7d2233ddc449ff5a3fbdc520caea752bc5bc8faa975231 \
--hash=sha256:3e2a0e1be95237981bd375a388f44b33d69ea5669a2f79fea038e45fff326595 \
--hash=sha256:3e431f9c80ee1850b2a406770acf52c058b97a27968a0ed6aca45c2614d64c9f \
--hash=sha256:47631614c5243ed7d15ae0af8245fcc0599f5b7921fae2a4ae992afb27c9afbb \
--hash=sha256:52d1764906befef91886e3bff374d8090c9716822bd56b70e07aa697188090b7 \
--hash=sha256:5370f9ef6c008e502537cc1ff518c80ddf749367ce90179efa0e7f6275903a76 \
--hash=sha256:56043cf908ddf3d7364498085dbc2855d4ea8969aff3bf2439a79482a79e68e2 \
--hash=sha256:5a6abc9573a6a45d09548fde136dbcd4260b8c56f8dff443eaa565352d7cca59 \
--hash=sha256:5b7c1e7fa6c3b855e8cfbabf38454d7decbaba1c567d0cd58880d033c6b363bd \
--hash=sha256:5ef045f694ef66079b4e00c4c9063a00183d6af7d1ff643de6ea5c3b0d9af01b \
--hash=sha256:68678816818efcd4a1787b557af81f215b989ec88680a86c85c34c914d413690 \
--hash=sha256:6cac19952e0e258ded765737d1fb11704fe81bf4f27526638a5d44496f329235 \
--hash=sha256:70b4c96f9119cfeb4dc6a0cf4afc9f92f0b002cde225bcd910915d976c78e66a \
--hash=sha256:7e957aa4251a8e70b9fe02a16b2d190f18787902da563cb7ba865508b8e8fb04 \
--hash=sha256:82de2de90488d0fbbf74cf9f20e1afd62e320693b88f5e9565fc80b28f5bbad3 \
--hash=sha256:83a3748088d117a9b15d8981c947df9e4f56eb2e4b5456ae34fe1f83666c9185 \
--hash=sha256:83efc045fc58ca04c91a96c9b894d8a19ac6553677a76f96df01ff9f0405f53d \
--hash=sha256:8c8b2386d2d22c57880ed20a913ceca86363765623175671137484a7d223f07a \
--hash=sha256:8f81344d212cb41992340b0b8a67e375f44da90590b884204fd3fa5e02107df2 \
--hash=sha256:954fef9185f8a79441b4e433695116636bf66402945cfee404f8983bafa59788 \
--hash=sha256:9651daa1fdc471df5a5fa6a4833d3b01e76ac512eea141a5995681aebac5555f \
--hash=sha256:9688c7b45f744366a4ef661e399f24636ebe440d315ab35d768676c59c613186 \
--hash=sha256:97107fd12f30e4f8fea97790343a2d2d9a79d93697fe14e1b6f6363c984ff85b \
--hash=sha256:9868b7a8e0f648d09ffe25ac29511e6e208cc5fb0d156c295385f9d5dc2a138e \
--hash=sha256:986eaf35a7f63c878280609ecd37edf8a074f7601c199acfec81d03f1ee9a39a \
--hash=sha256:99a00cab4cf9643a87977c87a5c8961aa44fff8d5dd46e00250135f686e7dedf \
--hash=sha256:9c56001badaf1779afae5c24b7ab85938644ab8ef3c5fd438ab5d49621b84482 \
--hash=sha256:9dc61fb8f8993589544f6df268229c6cf0a56ad4ed3e8585a9cd23c5ad79527b \
--hash=sha256:9de573d933db4753a50af891bcb3ffbfe14e200406214c223aa5dfe2163f316d \
--hash=sha256:9e467e13971c64db6aed8afe4c2a131c3f73f048bec3f788a6141216acda598d \
--hash=sha256:9e6496bbad3068e3bbbb934b1e1307bf1a9cb4609f9ec47b57e8ea37f1b5ee40 \
--hash=sha256:9f92d3577c72d5a97af5c8e3d98247b79c8ccfb64ebf611311dcf631b11e5604 \
--hash=sha256:a1c6990961d1177f6d8fdf7b610fa2e7c0c02743a090d173f6dfa9dc9231c73c \
--hash=sha256:a5a0a58ffe2a7eb3f870214c6df8f9a43ce768bd8fed883e6ba8c77645666b63 \
--hash=sha256:a7416f1a084eb889c5792c57317875aeaa86abfe0bdc6f167712cebcec1d36ee \
--hash=sha256:a83f5f7ae3494e0cc25211296252b1b86901c788ed82c83adda19d0c98f828d6 \
--hash=sha256:a850b151bd1e3a5d9afef113adc22727d696603659d575d7e84f994bd8d04bf1 \
--hash=sha256:ad82ab8a58562cf69e6b786debcc7638b28df12f9f1c7bcffb07efb5c1f09cbd \
--hash=sha256:b173ec46d521308f7c97d96d6e05cf2088e0548f82544ec9a8656af65593304d \
--hash=sha256:bf9f2b97fcfd5e2dbb0090d0664023872dcde990df0b545eca8d0ce95795a409 \
--hash=sha256:c12b44c190deb0d67655021da1f2d0a7d61a257bf844101cf982e68ed344f28d \
--hash=sha256:c6571462417cda2239b1ade86ceaf3852da9b52c6286046e87d404afc6da20a7 \
--hash=sha256:c785fd6dae9daa6825734b7b494cdac972f958be1f9cb3fb1f32be8598d2b936 \
--hash=sha256:c7a33506d0451112911c69f38d55da3e0e050f2be0ea4e5176865cf03baf26a9 \
--hash=sha256:c89df75aefe1ad7e613340790130f1badc5926bcfa66a6b3c9471071002956a5 \
--hash=sha256:ca644534f15f85bba14c412afc17de07531e79a766ce85b8dbf3f8b6e7758f20 \
--hash=sha256:cbd28f95d5f30d9a7af6130869568e75bfd7ef2e0adfb1480f1f44480f5d3603 \
--hash=sha256:d0f87bdf660f01e88ab3a507955697b2e3284065afa0b94fc9e77d6ad153ed5e \
--hash=sha256:d4bd41a6e73c0d0adafe4de449b6d35530a4ce6a836a6ee839baf117785ecfd7 \
--hash=sha256:d8d5e686db0ae758837ed29b3b742afb994d1a01ce10977eabd3490f16b5c9f9 \
--hash=sha256:e5888b269e790356ce4525f3e8df1fe866d1497b7d7fb7548cfec883cb985288 \
--hash=sha256:ec633e108f277f2b7f4671d933a909f39bba549910bf103e2940b87a14da2783 \
--hash=sha256:ecdb19d33b26738a32602ef432b06cc6deeca4b498ce67ba8e5e39c8a7c19745 \
--hash=sha256:ee428575377e29c636f2b4b3b0488875dcea310c6c5b3412ec4ef997f7bb37cc \
--hash=sha256:f4bae4f920f2a1082eaf766c1883df7da84abdf333bafa15b8717c10416a615e
# via online-fxreader-pr34
mypy==1.17.1 \
--hash=sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341 \
--hash=sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5 \
--hash=sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849 \
--hash=sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733 \
--hash=sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81 \
--hash=sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403 \
--hash=sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6 \
--hash=sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01 \
--hash=sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91 \
--hash=sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972 \
--hash=sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8 \
--hash=sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd \
--hash=sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9 \
--hash=sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0 \
--hash=sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19 \
--hash=sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb \
--hash=sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd \
--hash=sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99 \
--hash=sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7 \
--hash=sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056 \
--hash=sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7 \
--hash=sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a \
--hash=sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed \
--hash=sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94 \
--hash=sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9 \
--hash=sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58 \
--hash=sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8 \
--hash=sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5 \
--hash=sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a \
--hash=sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df \
--hash=sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb \
--hash=sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d \
--hash=sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390 \
--hash=sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b \
--hash=sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b \
--hash=sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14 \
--hash=sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259 \
--hash=sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b
# via online-fxreader-pr34
mypy-extensions==1.1.0 \
--hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 \
--hash=sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558
# via mypy
nodeenv==1.9.1 \
--hash=sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f \
--hash=sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9
# via pyright
numpy==2.3.2 \
--hash=sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5 \
--hash=sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b \
--hash=sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631 \
--hash=sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58 \
--hash=sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b \
--hash=sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc \
--hash=sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089 \
--hash=sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf \
--hash=sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15 \
--hash=sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f \
--hash=sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3 \
--hash=sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170 \
--hash=sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910 \
--hash=sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91 \
--hash=sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45 \
--hash=sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c \
--hash=sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f \
--hash=sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b \
--hash=sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89 \
--hash=sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a \
--hash=sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220 \
--hash=sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e \
--hash=sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab \
--hash=sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2 \
--hash=sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b \
--hash=sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370 \
--hash=sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2 \
--hash=sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee \
--hash=sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619 \
--hash=sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712 \
--hash=sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1 \
--hash=sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec \
--hash=sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a \
--hash=sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450 \
--hash=sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a \
--hash=sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2 \
--hash=sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168 \
--hash=sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2 \
--hash=sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73 \
--hash=sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296 \
--hash=sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9 \
--hash=sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125 \
--hash=sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0 \
--hash=sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19 \
--hash=sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b \
--hash=sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f \
--hash=sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2 \
--hash=sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f \
--hash=sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a \
--hash=sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6 \
--hash=sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286 \
--hash=sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981 \
--hash=sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f \
--hash=sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2 \
--hash=sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0 \
--hash=sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b \
--hash=sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b \
--hash=sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56 \
--hash=sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5 \
--hash=sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3 \
--hash=sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8 \
--hash=sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0 \
--hash=sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036 \
--hash=sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6 \
--hash=sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8 \
--hash=sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48 \
--hash=sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07 \
--hash=sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b \
--hash=sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b \
--hash=sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d \
--hash=sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0 \
--hash=sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097 \
--hash=sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be \
--hash=sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5
# via -r requirements.in
online-fxreader-pr34==0.1.5.27 \
--hash=sha256:d081758fdb91fb460da5c55d3a38257122de096363a8d956ba2f91a234566010
# via -r requirements.in
pathspec==0.12.1 \
--hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \
--hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712
# via mypy
pip==25.1 \
--hash=sha256:13b4aa0aaad055020a11bec8a1c2a70a2b2d080e12d89b962266029fff0a16ba \
--hash=sha256:272bdd1289f80165e9070a4f881e8f9e1001bbb50378561d1af20e49bf5a2200
# via online-fxreader-pr34
pydantic==2.11.7 \
--hash=sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db \
--hash=sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b
# via
# fastapi
# online-fxreader-pr34
# pydantic-settings
pydantic-core==2.33.2 \
--hash=sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d \
--hash=sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac \
--hash=sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02 \
--hash=sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56 \
--hash=sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4 \
--hash=sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22 \
--hash=sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef \
--hash=sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec \
--hash=sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d \
--hash=sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b \
--hash=sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a \
--hash=sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f \
--hash=sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052 \
--hash=sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab \
--hash=sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916 \
--hash=sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c \
--hash=sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf \
--hash=sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27 \
--hash=sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a \
--hash=sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8 \
--hash=sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7 \
--hash=sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612 \
--hash=sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1 \
--hash=sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039 \
--hash=sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca \
--hash=sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7 \
--hash=sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a \
--hash=sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6 \
--hash=sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782 \
--hash=sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b \
--hash=sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7 \
--hash=sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025 \
--hash=sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849 \
--hash=sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7 \
--hash=sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b \
--hash=sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa \
--hash=sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e \
--hash=sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea \
--hash=sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac \
--hash=sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51 \
--hash=sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e \
--hash=sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162 \
--hash=sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65 \
--hash=sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2 \
--hash=sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954 \
--hash=sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b \
--hash=sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de \
--hash=sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc \
--hash=sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64 \
--hash=sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb \
--hash=sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9 \
--hash=sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101 \
--hash=sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d \
--hash=sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef \
--hash=sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3 \
--hash=sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1 \
--hash=sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5 \
--hash=sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88 \
--hash=sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d \
--hash=sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290 \
--hash=sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e \
--hash=sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d \
--hash=sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808 \
--hash=sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc \
--hash=sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d \
--hash=sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc \
--hash=sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e \
--hash=sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640 \
--hash=sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30 \
--hash=sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e \
--hash=sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9 \
--hash=sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a \
--hash=sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9 \
--hash=sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f \
--hash=sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb \
--hash=sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5 \
--hash=sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab \
--hash=sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d \
--hash=sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572 \
--hash=sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593 \
--hash=sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29 \
--hash=sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535 \
--hash=sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1 \
--hash=sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f \
--hash=sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8 \
--hash=sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf \
--hash=sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246 \
--hash=sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9 \
--hash=sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011 \
--hash=sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9 \
--hash=sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a \
--hash=sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3 \
--hash=sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6 \
--hash=sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8 \
--hash=sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a \
--hash=sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2 \
--hash=sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c \
--hash=sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6 \
--hash=sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d
# via pydantic
pydantic-settings==2.10.1 \
--hash=sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee \
--hash=sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796
# via online-fxreader-pr34
pyright==1.1.404 \
--hash=sha256:455e881a558ca6be9ecca0b30ce08aa78343ecc031d37a198ffa9a7a1abeb63e \
--hash=sha256:c7b7ff1fdb7219c643079e4c3e7d4125f0dafcc19d253b47e898d130ea426419
# via online-fxreader-pr34
python-dotenv==1.1.1 \
--hash=sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc \
--hash=sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab
# via pydantic-settings
pyyaml==6.0.2 \
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
--hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \
--hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \
--hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \
--hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \
--hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \
--hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \
--hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \
--hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \
--hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \
--hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \
--hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \
--hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \
--hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \
--hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \
--hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \
--hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \
--hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \
--hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \
--hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \
--hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \
--hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \
--hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \
--hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \
--hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \
--hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \
--hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \
--hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \
--hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \
--hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \
--hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \
--hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \
--hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \
--hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \
--hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \
--hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \
--hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \
--hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \
--hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \
--hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \
--hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \
--hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \
--hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \
--hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \
--hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \
--hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \
--hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \
--hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \
--hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \
--hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \
--hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
# via yq
ruff==0.12.11 \
--hash=sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000 \
--hash=sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee \
--hash=sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211 \
--hash=sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee \
--hash=sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8 \
--hash=sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f \
--hash=sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793 \
--hash=sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0 \
--hash=sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644 \
--hash=sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2 \
--hash=sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065 \
--hash=sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd \
--hash=sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93 \
--hash=sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea \
--hash=sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d \
--hash=sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3 \
--hash=sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39 \
--hash=sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd \
--hash=sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9
# via online-fxreader-pr34
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
sqlparse==0.5.3 \
--hash=sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272 \
--hash=sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca
# via django
starlette==0.47.3 \
--hash=sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9 \
--hash=sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51
# via fastapi
tomli==2.2.1 \
--hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
--hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
--hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
--hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
--hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
--hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
--hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
--hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
--hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
--hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
--hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
--hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
--hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
--hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
--hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
--hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
--hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
--hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
--hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
--hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
--hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
--hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
--hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
--hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
--hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
--hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
--hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
--hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
--hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
--hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
--hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
--hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
# via online-fxreader-pr34
tomlkit==0.13.3 \
--hash=sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1 \
--hash=sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0
# via
# online-fxreader-pr34
# yq
tomlq==0.1.0 \
--hash=sha256:4b966fd999ed2bf69081b7c7f5caadbc4c9542d0ed5fcf2e9b7b4d8d7ada3c82 \
--hash=sha256:e775720e90da3e405142b9fe476145e71c0389f787b1ff9933f92a1704d8c6e7
# via online-fxreader-pr34
types-pyyaml==6.0.12.20250822 \
--hash=sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098 \
--hash=sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413
# via django-stubs
typing-extensions==4.15.0 \
--hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
# via
# anyio
# django-stubs
# django-stubs-ext
# fastapi
# mypy
# pydantic
# pydantic-core
# pyright
# starlette
# typing-inspection
typing-inspection==0.4.1 \
--hash=sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51 \
--hash=sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28
# via
# pydantic
# pydantic-settings
uvicorn==0.35.0 \
--hash=sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a \
--hash=sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01
# via
# -r requirements.in
# online-fxreader-pr34
uvloop==0.21.0 \
--hash=sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0 \
--hash=sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f \
--hash=sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc \
--hash=sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414 \
--hash=sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f \
--hash=sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d \
--hash=sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd \
--hash=sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff \
--hash=sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c \
--hash=sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3 \
--hash=sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d \
--hash=sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a \
--hash=sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb \
--hash=sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2 \
--hash=sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0 \
--hash=sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6 \
--hash=sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c \
--hash=sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af \
--hash=sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc \
--hash=sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb \
--hash=sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75 \
--hash=sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb \
--hash=sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553 \
--hash=sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e \
--hash=sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6 \
--hash=sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d \
--hash=sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206 \
--hash=sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc \
--hash=sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281 \
--hash=sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b \
--hash=sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8 \
--hash=sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79 \
--hash=sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f \
--hash=sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe \
--hash=sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26 \
--hash=sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816 \
--hash=sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2
# via online-fxreader-pr34
xmltodict==0.14.2 \
--hash=sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553 \
--hash=sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac
# via yq
yq==3.4.3 \
--hash=sha256:547e34bc3caacce83665fd3429bf7c85f8e8b6b9aaee3f953db1ad716ff3434d \
--hash=sha256:ba586a1a6f30cf705b2f92206712df2281cd320280210e7b7b80adcb8f256e3b
# via tomlq

@ -1,95 +0,0 @@
import fastapi
import re
import numpy
import subprocess
import fastapi.responses
import pydantic_settings
import logging
logger = logging.getLogger(__name__)
from online.fxreader.pr34.commands_typed import metrics as pr34_metrics
from typing import Optional, Any, ClassVar
class Settings(pydantic_settings.BaseSettings):
checks_hosts: list[str]
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton
def ping_stats(host: str) -> Optional[float]:
try:
ping_output = subprocess.check_output(
[
'ping',
'-i',
'0.1',
'-c',
'3',
'-w',
'1',
host,
]
).decode('utf-8')
except:
logger.exception('')
ping_output = ''
r1 = re.compile(r'time=(\d+\.\d+)\sms')
spend_time = [float(o[1]) for o in r1.finditer(ping_output)]
if len(spend_time) == 0:
return None
else:
return float(numpy.mean(spend_time))
async def metrics_get() -> fastapi.responses.Response:
ping_res = {h: ping_stats(h) for h in Settings.singleton().checks_hosts}
metrics = [
pr34_metrics.Metric.model_validate(
dict(
name='ping_mean',
type='gauge',
help='ping to host, 3 counts, up to 1 second',
samples=[
dict(
value=str(v),
parameters=dict(
host=k,
),
)
],
)
)
for k, v in ping_res.items()
if not v is None
]
serialize_res = pr34_metrics.serialize(metrics)
return fastapi.responses.Response(
content=serialize_res.json2,
headers={
'Content-Type': serialize_res.content_type,
},
)
def get_router() -> fastapi.APIRouter:
router = fastapi.APIRouter()
router.get('/metrics')(metrics_get)
return router

@ -1,11 +1,9 @@
# FROM alpine:latest FROM alpine:latest
FROM alpine@sha256:56fa17d2a7e7f168a043a2712e63aed1f8543aeafdcee47c58dcffe38ed51099
RUN apk add openssh RUN apk add openssh
RUN apk add python3 RUN apk add python3
RUN apk add tini RUN apk add tini
RUN apk add bash curl RUN apk add bash curl
RUN apk add py3-pip RUN apk add py3-pip
RUN apk add netcat-openbsd
RUN pip3 install --break-system-packages requests RUN pip3 install --break-system-packages requests
WORKDIR /app WORKDIR /app

@ -1,56 +1,132 @@
py3 << EOF py3 << EOF
def f1():
t1 = vim.current.window
t2 = t1.width
vim.command('vnew')
t3 = t2 // 3
vim.command('vertical resize %d' % t3)
vim.current.window = t1
def load(): def f2():
context = {
k : vim.options['splitright']
for k in ['splitright']
}
try:
current_window = vim.current.window
vim.options['splitright'] = True
vim.command('vnew')
vim.command('r! tmux show-buffer')
vim.current.window = current_window
finally:
for k, v in context.items():
vim.options[k] = v
def f5_1(pattern, flags, info):
import subprocess
import io
import re
import tempfile
import traceback
import logging import logging
import logging.handlers
import importlib
import json #print([pattern, flags, info])
import pathlib completed_process = None
import os
import sys
sys.path.append( options = dict(
str(pathlib.Path('~/.vim').expanduser()) recursive=False,
ext=[],
) )
logging.basicConfig( #print('fuck')
level=getattr( if b'r' in flags:
logging, while True:
os.environ.get('VIM_PY3_LEVEL', 'WARNING') ext_m = re.compile(r'^.([^\,]+),(.*)$').match(pattern)
),
# filename=pathlib.Path('~/.py3.vimrc.log').expanduser(),
handlers=[
logging.handlers.RotatingFileHandler(
pathlib.Path('~/.py3.vimrc.log').expanduser(),
maxBytes=128 * 1024,
backupCount=3,
)
]
)
modules = [ if pattern[:3] in [r'\r,']:
str(o) options['recursive'] = True
for o in json.loads(os.environ.get('VIM_PY3_MODULES', '["online_fxreader_pr34_vim.main"]')) pattern = pattern[3:]
elif not ext_m is None:
options['ext'].append(
ext_m[1]
)
pattern = ext_m[2]
else:
break
print([flags, pattern, options,])
try:
git_cmd = [
'git', 'grep',
'-n',
] ]
for o in modules: if options['recursive']:
# if not o.exists(): git_cmd.append('--recurse-submodules')
# raise RuntimeError('not found %s' % str(o))
m = importlib.import_module(o) git_cmd.extend(['-P', pattern])
getattr(m, 'init')()
# vim.command('py3file {}'.format(str(o))) if len(options['ext']) > 0:
git_cmd.extend(['--', *[
'**/*%s' % o
for o in options['ext']
]])
completed_process = subprocess.run(
git_cmd,
capture_output=True,
)
assert (
completed_process.returncode == 0 or
(
completed_process.stdout == b''
#completed_process.stdout == b'' and
#completed_process.stderr == b''
)
)
t1 = completed_process.stdout
except:
logging.error(''.join([
traceback.format_exc(),
getattr(completed_process, 'stdout', b'').decode('utf-8'),
getattr(completed_process, 'stderr', b'').decode('utf-8'),
]))
t1 = b''
def watch(data):
with tempfile.NamedTemporaryFile(suffix='.txt') as f:
with io.open(f.name, 'wb') as f2:
f2.write(data)
vim.command('!less %s' % f.name)
#watch(t1)
t2 = []
for o in t1.splitlines():
try:
#watch(o.encode('utf-8'))
t3 = o.decode('utf-8')
t4 = re.compile(r'^([^\:\=]+)[\:\=](\d+)[\:\=](.*)$').match(t3)
if not t4 is None:
t2.append(
dict(
name=t4[3].strip(),
filename=t4[1],
cmd=t4[2],
)
)
except:
pass
#print(t2)
#return [{'name': 'blah', 'filename': 'docker-compose.yml', 'cmd': '23'}]
return t2
EOF EOF
" py3file ~/.module.vimrc.py
python3 load()
function! F5(pattern, flags, info) function! F5(pattern, flags, info)
python3 import online_fxreader_pr34_vim.main;
let res = py3eval( let res = py3eval(
\'online_fxreader_pr34_vim.main.f5_1( \'f5_1(
\vim.bindeval("a:pattern").decode("utf-8"), \vim.bindeval("a:pattern").decode("utf-8"),
\vim.bindeval("a:flags"), \vim.bindeval("a:flags"),
\vim.bindeval("a:info") \vim.bindeval("a:info")

@ -79,7 +79,7 @@ bindgesture swipe:4:up exec $lock_cmd
# #
# Basics: # Basics:
# #
bindsym Shift+$mod+q exec $lock_cmd bindsym Shift+$mod+l exec $lock_cmd
bindsym --locked Shift+mod1+1 \ bindsym --locked Shift+mod1+1 \
exec ~/.local/bin/commands \ exec ~/.local/bin/commands \
@ -122,14 +122,12 @@ bindsym --locked XF86AudioMute exec zsh -c "commands media-toggle-volume"
bindsym --locked XF86AudioNext exec zsh -c "commands media-next" bindsym --locked XF86AudioNext exec zsh -c "commands media-next"
bindsym --locked XF86AudioPrev exec zsh -c "commands media-prev" bindsym --locked XF86AudioPrev exec zsh -c "commands media-prev"
bindsym $mod+m exec zsh -c "commands color_scheme toggle"
# Start a terminal # Start a terminal
bindsym $mod+t exec $term bindsym $mod+t exec $term
## Kill focused window # Kill focused window
#bindsym $mod+Shift+q kill bindsym $mod+Shift+q kill
# Start your launcher # Start your launcher
bindsym $mod+Return exec $menu bindsym $mod+Return exec $menu

@ -30,7 +30,6 @@ bind -n M-[ copy-mode
bind -n M-m set -g mouse bind -n M-m set -g mouse
set -g default-terminal "screen-256color" set -g default-terminal "screen-256color"
set-option -g status-style "bg=#00aa00,fg=#ffffff"
#set-option -ga terminal-overrides ",screen-256color:Tc" #set-option -ga terminal-overrides ",screen-256color:Tc"
set-option -g pane-active-border-style "bg=#33dd44 fg=#ffffff" set-option -g pane-active-border-style "bg=#33dd44 fg=#ffffff"
@ -38,8 +37,7 @@ set-option -g pane-active-border-style "bg=#33dd44 fg=#ffffff"
bind space display "Fuck!" bind space display "Fuck!"
set-option -g set-titles on set-option -g set-titles on
set-option -g set-titles-string "#S / #W" set-option -g set-titles-string "#S / #W"
# set -g status-right "#H %H:%M:%S %Y-%m-%d %Z" set -g status-right "#H %H:%M:%S %Y-%m-%d %Z"
set -g status-right "#{=-16:pane_current_path} #{pane_index} #H %H:%M:%S %Y-%m-%d %Z"
set -g status-interval 1 set -g status-interval 1
set -g status-right-length 64 set -g status-right-length 60
set -g mouse on set -g mouse on

@ -1,514 +0,0 @@
import functools
import configparser
import subprocess
import dataclasses
import json
import datetime
import collections
import asyncio
import threading
import re
import inspect
import pathlib
import logging
import fnmatch
import vim
from typing import (
Optional,
ClassVar,
Self,
Any,
Callable,
)
from .utils import Vim
logger = logging.getLogger(__name__)
MODULE_NAME = 'online_fxreader_pr34_vim'
def future_dump_exception(future: Any) -> None:
try:
future.result()
except:
logger.exception('')
class FastSelect:
_instance: ClassVar[Optional['FastSelect']] = None
def __init__(self) -> None:
self.loop = asyncio.new_event_loop()
self.thread = threading.Thread(
target=self.loop.run_forever,
)
self._buffer_frequency: dict[int, int] = dict()
self._buffer_last_used: dict[int, int] = dict()
self._filter_pattern: Optional[str] = None
self._include_git : Optional[bool] = False
self._filtered_ids: Optional[set[int]] = None
self._items: Optional[list['self.entry_t']] = None
self._buffers: Optional[list['self.entry_t']] = None
self._tracked_files: Optional[list['self.entry_t']] = None
self._queue: collections.deque[Callable[[], None]] = collections.deque()
self._lock = threading.Lock()
self.popup_id: Optional[int] = None
self.thread.start()
self._option_id: asyncio.Future[Optional[int]] = None
self._options: list[str] = None
auto_group = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'close',
).capitalize()
vim.command(r"""
func! UIThread(timer_id)
python3 online_fxreader_pr34_vim.beta.FastSelect.singleton().ui_thread()
endfunc
""")
Vim.run_command(r"""
call timer_start(100, 'UIThread', {'repeat': -1})
""")
Vim.run_command(
r"""
augroup {auto_group}
autocmd!
autocmd VimLeavePre * python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().close()
autocmd BufEnter * python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().on_buf_enter()
augroup END
""".format(
auto_group=auto_group,
)
)
def __del__(self) -> None:
self.close()
def close(self) -> None:
logger.info(dict(msg='close started'))
self.loop.call_soon_threadsafe(self.loop.stop)
self.thread.join()
logger.info(dict(msg='close done'))
@classmethod
def singleton(cls) -> Self:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def pick_option_put_id(self, option_id: int) -> None:
self.loop.call_soon_threadsafe(lambda: self._option_id.set_result(option_id))
@dataclasses.dataclass
class entry_t:
path: pathlib.Path
buf_number: Optional[int] = None
def _get_buffers(
self,
res_future: Optional[asyncio.Future[
list[entry_t]
]] = None,
) -> list[entry_t]:
res = [
self.entry_t(
buf_number=o.number,
path=pathlib.Path(o.name).absolute(),
)
for o in vim.buffers
]
if res_future:
self.loop.call_soon_threadsafe(lambda: res_future.set_result(res))
return res
async def _switch_buffer(self) -> None:
with self._lock:
self._reset_items()
await self._sync_task(self._update_items)
# self._items = buffers
with self._lock:
self._set_filter_pattern('')
selected_id = await self._pick_option_from_popup(
# [o[0] for o in buffers]
)
logger.info(dict(selected_id=selected_id))
def ui_switch_buffer():
nonlocal selected_id
# nonlocal buffers
logger.warning(dict(
buffers=self._items[:3],
id=selected_id,
))
# print(vim.buffers, selected_id)
if not selected_id is None:
selected_item = self._items[selected_id]
if selected_item.buf_number is None:
Vim.run_command('badd %s' % json.dumps(str(selected_item.path))[1:-1])
Vim.run_command('e %s' % json.dumps(str(selected_item.path))[1:-1])
else:
vim.current.buffer = vim.buffers[selected_item.buf_number]
with self._lock:
self._queue.append(ui_switch_buffer)
def switch_buffer(self) -> None:
logger.info(dict(msg='before switch_buffer started'))
result = asyncio.run_coroutine_threadsafe(self._switch_buffer(), self.loop)
result.add_done_callback(future_dump_exception)
logger.info(dict(msg='after switch_buffer started'))
async def _pick_option_from_popup(
self,
# options: list[str],
) -> Optional[int]:
logger.info(dict(msg='started'))
self._filter_pattern = ''
self._popup_id = None
# self._options = options
self._option_id = asyncio.Future[int]()
await self._pick_option_start_popup()
option_id = await self._option_id
logger.info(dict(option_id=option_id))
self._options = None
self._option_id = None
logger.info(dict(msg='done'))
if option_id >= 0:
return self._filtered_ids[option_id]
else:
return None
def ui_thread(self):
with self._lock:
# Vim.run_command(r'''
# set laststatus=2
# set statusline={}
#'''.format(datetime.datetime.now().isoformat()))
while len(self._queue) > 0:
cmd = self._queue.pop()
try:
cmd_str = inspect.getsource(cmd)
except:
cmd_str = str(cmd)
try:
logger.warning(dict(msg='start command', cmd=cmd_str))
cmd()
except:
logger.exception('')
# self._result.append(
# vim.command(cmd)
# )
def on_buf_enter(self) -> None:
result = asyncio.run_coroutine_threadsafe(
self._on_buf_enter(
buf_number=vim.current.buffer.number,
buf_name=pathlib.Path(vim.current.buffer.name),
),
self.loop,
)
result.add_done_callback(future_dump_exception)
def on_filter_key(self, key: str) -> None:
logger.info(dict(msg='got key', key=key))
if key == bytes([27]):
logger.info(dict(msg='closing popup'))
vim.Function('popup_close')(self._popup_id)
return 1
if key == b'\x80kb':
logger.info(dict(msg='backspace'))
with self._lock:
self._set_filter_pattern(self._filter_pattern[:-1])
# C-g
elif key == b'\x07':
with self._lock:
self._include_git = not self._include_git
self._update_items()
self._update_filtered()
# self._update_popup()
else:
try:
key_str = key.decode('utf-8')
except:
return vim.Function('popup_filter_menu')(
self._popup_id, key
)
# return 0
if not key_str.isprintable():
return vim.Function('popup_filter_menu')(
self._popup_id, key
)
# return 0
else:
with self._lock:
self._set_filter_pattern(self._filter_pattern + key_str)
self._update_popup()
return 1
async def _sync_task(
self,
cb: Callable[[], None],
# future: asyncio.Future[bool]
) -> None:
res_future: asyncio.Future[bool] = asyncio.Future()
def wrapper():
res : bool = True
try:
cb()
except:
logger.exception('')
res = False
self.loop.call_soon_threadsafe(lambda: res_future.set_result(res))
with self._lock:
self._queue.append(wrapper)
return await res_future
def _cwd(cls) -> pathlib.Path:
return pathlib.Path(
vim.Function('getcwd')().decode('utf-8')
)
def _update_items(self) -> None:
known_files: dict[str, int] = dict()
if self._buffers is None:
self._buffers = self._get_buffers()
logger.info(dict(buffers=self._buffers[:3]))
if self._include_git:
if self._tracked_files is None:
for o in self._buffers:
assert o.buf_number
known_files[str(o.path)] = o.buf_number
ls_files_output = [
o.strip()
for o in subprocess.check_output(
['git', 'ls-files', '--recurse-submodules',], cwd=self._cwd(),
).decode('utf-8').splitlines()
]
self._tracked_files = []
for o in ls_files_output:
path = pathlib.Path(
o,
).absolute()
entry = self.entry_t(
path=path,
buf_number=known_files.get(str(path)),
)
if entry.buf_number:
continue
self._tracked_files.append(entry)
logger.info(dict(tracked_files=self._tracked_files[:3]))
self._items = self._buffers + self._tracked_files
else:
self._items = self._buffers
self._items = sorted(
self._items,
# key=lambda x: -self._buffer_frequency.get(x[1], 0)
key=lambda x: -self._buffer_last_used.get(x.buf_number, 0),
)
def _reset_items(self) -> None:
self._buffers = None
self._tracked_files = None
self._items = None
def _update_filtered(self) -> None:
pattern = re.compile(self._filter_pattern)
self._filtered_ids = [
i for i, o in enumerate(self._items) if not pattern.search(str(o.path)) is None
]
self._options = [str(self._items[o].path) for o in self._filtered_ids]
def _set_filter_pattern(self, filter_pattern: str) -> None:
self._filter_pattern = filter_pattern
self._update_filtered()
def _update_popup(self) -> None:
vim.Function('popup_settext')(
self._popup_id,
self._options,
)
vim.Function('popup_setoptions')(self._popup_id, {'title': 'Select a file, [%s]' % self._filter_pattern})
async def _on_buf_enter(
self,
buf_number: int,
buf_name: pathlib.Path,
) -> None:
# logger.info(dict(msg='waiting'))
with self._lock:
# buf_number = vim.current.buffer.number
if not buf_number in self._buffer_frequency:
self._buffer_frequency[buf_number] = 0
self._buffer_frequency[buf_number] += 1
self._buffer_last_used[buf_number] = datetime.datetime.now().timestamp()
logger.info(
dict(
msg='updated',
buf_path=str(buf_name),
frequency=self._buffer_frequency[buf_number],
buf_number=buf_number,
)
)
async def _pick_option_start_popup(
self,
):
callback_name = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'popup_callback',
).capitalize()
filter_name = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'popup_filter',
).capitalize()
if int(vim.eval('exists("{}")'.format(callback_name))) == 1:
logger.warning(dict(msg='callback already defined, %s' % callback_name))
vim.command(
r"""
function! {callback_name}(id, result)
if a:result > 0
call py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().pick_option_put_id(' . (a:result - 1). ')')
else
call py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().pick_option_put_id(-1)')
endif
endfunction
""".format(
callback_name=callback_name,
)
)
vim.command(
r"""
function! {filter_name}(win_id, key)
return py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().on_filter_key(key)', #{key: a:key})
endfunction
""".replace(
'{filter_name}',
filter_name,
)
)
logger.info(dict(msg='before popup'))
popup_menu = vim.Function('popup_menu')
def create_popup():
self._popup_id = popup_menu(
self._options,
{
'title': 'Select a file',
'callback': callback_name,
'filter': filter_name,
'wrap': 1,
'maxwidth': 80,
'close': 'button',
'resize': 1,
'drag': 1,
'maxheight': '16',
},
)
with self._lock:
self._queue.append(
create_popup,
# lambda : vim.command(
# "call popup_menu({options}, {'title': '{title}', 'callback': '{callback}'})".replace(
# '{options}', '[%s]' % ','.join([
# '\'%s\'' % o.replace('\'', '\\\'')
# for o in self._options
# ]),
# ).replace(
# '{title}', 'Select a file',
# ).replace(
# '{callback}',
# callback_name
# )
# )
)
# logger.info(dict(popup_id=popup_id))
# logger.info(dict(msg='after popup'))
def init():
FastSelect.singleton()

@ -1,265 +0,0 @@
import functools
import configparser
import collections
import asyncio
import threading
import re
import inspect
import pathlib
import logging
import fnmatch
import vim
from typing import (
Optional,
ClassVar,
Self,
Any,
Callable,
)
logger = logging.getLogger(__name__)
from .utils import Vim
# logging.basicConfig(level=logging.WARNING)
MODULE_NAME = 'online_fxreader_pr34_vim'
def f1():
t1 = vim.current.window
t2 = t1.width
vim.command('vnew')
t3 = t2 // 3
vim.command('vertical resize %d' % t3)
vim.current.window = t1
def f2():
context = {k: vim.options['splitright'] for k in ['splitright']}
try:
current_window = vim.current.window
vim.options['splitright'] = True
vim.command('vnew')
vim.command('r! tmux show-buffer')
vim.current.window = current_window
finally:
for k, v in context.items():
vim.options[k] = v
def f5_1(pattern, flags, info):
import subprocess
import io
import re
import tempfile
import traceback
import logging
# print([pattern, flags, info])
completed_process = None
options = dict(
recursive=False,
ext=[],
)
# print('fuck')
if b'r' in flags:
while True:
ext_m = re.compile(r'^.([^\,]+),(.*)$').match(pattern)
if pattern[:3] in [r'\r,']:
options['recursive'] = True
pattern = pattern[3:]
elif not ext_m is None:
options['ext'].append(ext_m[1])
pattern = ext_m[2]
else:
break
print(
[
flags,
pattern,
options,
]
)
try:
git_cmd = [
'git',
'grep',
'-n',
]
if options['recursive']:
git_cmd.append('--recurse-submodules')
git_cmd.extend(['-P', pattern])
if len(options['ext']) > 0:
git_cmd.extend(['--', *['**/*%s' % o for o in options['ext']]])
completed_process = subprocess.run(
git_cmd,
capture_output=True,
)
assert completed_process.returncode == 0 or (
completed_process.stdout == b''
# completed_process.stdout == b'' and
# completed_process.stderr == b''
)
t1 = completed_process.stdout
except:
logging.error(
''.join(
[
traceback.format_exc(),
getattr(completed_process, 'stdout', b'').decode('utf-8'),
getattr(completed_process, 'stderr', b'').decode('utf-8'),
]
)
)
t1 = b''
def watch(data):
with tempfile.NamedTemporaryFile(suffix='.txt') as f:
with io.open(f.name, 'wb') as f2:
f2.write(data)
vim.command('!less %s' % f.name)
# watch(t1)
t2 = []
for o in t1.splitlines():
try:
# watch(o.encode('utf-8'))
t3 = o.decode('utf-8')
t4 = re.compile(r'^([^\:\=]+)[\:\=](\d+)[\:\=](.*)$').match(t3)
if not t4 is None:
t2.append(
dict(
name=t4[3].strip(),
filename=t4[1],
cmd=t4[2],
)
)
except:
pass
# print(t2)
# return [{'name': 'blah', 'filename': 'docker-compose.yml', 'cmd': '23'}]
return t2
class EditorConfigModeline:
_instance: ClassVar[Optional['EditorConfigModeline']] = None
def __init__(self) -> None:
self.configs: dict[
pathlib.Path,
dict[str, str],
] = dict()
Vim.run_command(r"""
augroup EditorConfigModeline
autocmd!
" autocmd BufEnter * ++nested python3 import online_fxreader_pr34_vim.main; online_fxreader_pr34_vim.main.EditorConfigModeline.singleton().on_buffer()
autocmd BufWinEnter * ++nested python3 import online_fxreader_pr34_vim.main; online_fxreader_pr34_vim.main.EditorConfigModeline.singleton().on_buffer()
augroup END
""")
@classmethod
def singleton(cls) -> Self:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def load_config(self) -> Optional[dict[str, str]]:
cwd = pathlib.Path.cwd()
if not cwd in self.configs:
config_path = cwd / '.editorconfig'
if not config_path.exists():
return None
parser = configparser.ConfigParser()
parser.optionxform = str # keep case
try:
parser.read(str(config_path))
except:
logger.exception('')
return None
config: dict[str, str] = dict()
for section in parser.sections():
logger.info(dict(section=section))
if len(section) > 0:
# pattern = section[1:-1]
pattern = section
if not parser[section].get('vim_modeline') is None:
config[pattern] = parser[section].get('vim_modeline')
self.validate_modeline(config[pattern])
self.configs[cwd] = config
return self.configs[cwd]
@classmethod
def validate_modeline(cls, modeline: str) -> None:
pattern = re.compile(r'^set(\s+(noet|sts|ts|et|ai|ci|noai|noci|sw)(\=\w)?)+$')
assert pattern.match(modeline), 'invalid modeline %s' % modeline
@classmethod
def find_entry(
cls,
file_path: pathlib.Path,
config: Optional[dict[str, str]] = None,
) -> Optional[str]:
if config is None:
return None
project_root = pathlib.Path.cwd()
if file_path.is_relative_to(project_root):
rel_path = file_path.relative_to(pathlib.Path.cwd())
else:
rel_path = file_path
for pattern, modeline in config.items():
if fnmatch.fnmatch(str(rel_path), pattern):
return modeline
return None
def on_buffer(self) -> None:
config = self.load_config()
logger.info(dict(config=config))
buf_name = vim.current.buffer.name
file_path = pathlib.Path(buf_name).resolve()
entry = self.find_entry(file_path, config=config)
logger.info(dict(modeline=entry))
vim.command('silent! {}'.format(entry))
# vim.command("echo '{}'".format('applied %s' % entry))
# raise NotImplementedError
# EditorConfigModeline.singleton()
def init():
EditorConfigModeline.singleton()

@ -1,17 +0,0 @@
import vim
import logging
logger = logging.getLogger(__name__)
class Vim:
@classmethod
def run_command(cls, cmd) -> list[str]:
logger.info(dict(cmd=cmd))
output: list[str] = []
for line in cmd.splitlines():
if line.strip() == '':
continue
output.append(vim.command(line))
return output

@ -7,8 +7,7 @@ if has('python3')
source $HOME/.py3.vimrc source $HOME/.py3.vimrc
endif endif
" filetype plugin indent on filetype plugin indent on
filetype plugin off
set number set number
set noswapfile set noswapfile
@ -31,14 +30,7 @@ colorscheme morning
hi MatchParen guifg=white guibg=black gui=NONE ctermfg=1 ctermbg=0 hi MatchParen guifg=white guibg=black gui=NONE ctermfg=1 ctermbg=0
function! MakeSession() function! MakeSession()
" let b:sessiondir = '.vim/' let b:sessiondir = '.vim/'
if exists('g:sessiondir')
let b:sessiondir = g:sessiondir
else
let b:sessiondir = getcwd() . '/' . '.vim/'
let g:sessiondir = b:sessiondir
endif
if exists('g:session_name') if exists('g:session_name')
let b:session_name = g:session_name let b:session_name = g:session_name
else else
@ -51,17 +43,11 @@ function! MakeSession()
endif endif
let b:filename = b:sessiondir . '/' . b:session_name . '.vim' let b:filename = b:sessiondir . '/' . b:session_name . '.vim'
exe "mksession! " . b:filename exe "mksession! " . b:filename
echo 'saved ' . b:sessiondir . ' ' . b:session_name echo 'saved ' . b:session_name
endfunction endfunction
function! LoadSession() function! LoadSession()
if exists('g:sessiondir') let b:sessiondir = '.vim/'
let b:sessiondir = g:sessiondir
else
let b:sessiondir = getcwd() . '/' . '.vim/'
let g:sessiondir = b:sessiondir
endif
if exists('g:session_name') if exists('g:session_name')
let b:session_name = g:session_name let b:session_name = g:session_name
else else
@ -101,5 +87,3 @@ map <Leader>i5 :set sw=2 sts=2 ts=2 noet ai ci<CR>
set foldmethod=indent set foldmethod=indent
set nofoldenable set nofoldenable
map <Leader>e :e #<cR> map <Leader>e :e #<cR>
map <C-p> :python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().switch_buffer()<CR>

@ -5,13 +5,9 @@ ar = 'emar'
windres = '/usr/bin/false' windres = '/usr/bin/false'
; exe_wrapper = '/usr/bin/false' ; exe_wrapper = '/usr/bin/false'
exe_wrapper = 'node' exe_wrapper = 'node'
pkg-config = 'pkg-config'
[host_machine] [host_machine]
system = 'linux' system = 'linux'
cpu_family = 'x86_64' cpu_family = 'x86_64'
cpu = 'x86_64' cpu = 'x86_64'
endian = 'little' endian = 'little'
[properties]
needs_exe_wrapper = true

@ -1,57 +0,0 @@
# This file is part of systemd.
#
# systemd is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Entries in this file show the compile time defaults. Local configuration
# should be created by either modifying this file (or a copy of it placed in
# /etc/ if the original file is shipped in /usr/), or by creating "drop-ins" in
# the /etc/systemd/logind.conf.d/ directory. The latter is generally
# recommended. Defaults can be restored by simply deleting the main
# configuration file and all drop-ins located in /etc/.
#
# Use 'systemd-analyze cat-config systemd/logind.conf' to display the full config.
#
# See logind.conf(5) for details.
[Login]
#NAutoVTs=6
#ReserveVT=6
#KillUserProcesses=no
#KillOnlyUsers=
#KillExcludeUsers=root
#InhibitDelayMaxSec=5
#UserStopDelaySec=10
#HandlePowerKey=hibernate
HandlePowerKey=suspend
#HandlePowerKeyLongPress=ignore
#HandleRebootKey=reboot
#HandleRebootKeyLongPress=poweroff
#HandleSuspendKey=suspend
#HandleSuspendKeyLongPress=hibernate
#HandleHibernateKey=hibernate
#HandleHibernateKeyLongPress=ignore
#HandleLidSwitchExternalPower=suspend
#HandleLidSwitchDocked=ignore
#PowerKeyIgnoreInhibited=no
#SuspendKeyIgnoreInhibited=no
#HibernateKeyIgnoreInhibited=no
#LidSwitchIgnoreInhibited=yes
#RebootKeyIgnoreInhibited=no
#HoldoffTimeoutSec=30s
#IdleAction=ignore
#IdleActionSec=30min
#RuntimeDirectorySize=10%
#RuntimeDirectoryInodesMax=
#RemoveIPC=yes
#InhibitorsMax=8192
#SessionsMax=8192
#StopIdleSessionSec=infinity
HandleLidSwitch=suspend
# for sway
#HandleLidSwitch=none

@ -1 +0,0 @@
ACTION=="add", SUBSYSTEM=="net", KERNEL=="tun0", TAG+="systemd"

@ -1,11 +0,0 @@
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/pci0000:00/0000:00:1b.0/hdaudioC0D0/leds/hda::mute", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/platform/applesmc.768/leds/smc::kbd_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
# udevadm info --attribute-walk --path=/sys/devices/platform/applesmc.768/
# udevadm trigger --action=add --verbose --parent-match /devices/platform/applesmc.768/
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="ls -allh /sys$devpath/", OPTIONS="log_level=debug"
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="/usr/bin/ls -allh /sys$devpath/", OPTIONS="log_level=debug"
ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
#KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", MODE="0660", TAG+="uaccess", OPTIONS="log_level=debug", OPTIONS+="watch"
ACTION=="add|change", DEVPATH=="/class/backlight/intel_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
ACTION=="add|change", KERNEL=="cpu1", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
#ACTION=="add|change", KERNEL=="cpu[0-9]", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"

@ -1 +0,0 @@
ACTION=="add", SUBSYSTEM=="scsi_host", KERNEL=="host*", ATTR{link_power_management_policy}="max_performance", OPTIONS="log_level=debug"

@ -1,13 +0,0 @@
#!/usr/bin/bash
commands gnome-shortcuts \
-a \
'powersave' \
'commands desktop-services --cpufreq-action powersave' \
'<Shift><Alt>1'
commands gnome-shortcuts \
-a \
'performance' \
'commands desktop-services --cpufreq-action performance' \
'<Shift><Alt>2'

@ -1,330 +0,0 @@
#!/usr/bin/env python3
# vi: filetype=python
import gi
gi.require_version("Gtk", "4.0")
from gi.repository import Gtk, Gio, GLib, GObject
import subprocess
import shlex
import threading
import uuid
import argparse
import logging
# CLI / Logging
parser = argparse.ArgumentParser(description="Systemd Scope Manager (GTK4 ColumnView)")
parser.add_argument("--log-level", "-l", choices=["DEBUG","INFO","WARNING","ERROR","CRITICAL"], default="INFO")
parser.add_argument("--values-mode", "-m", choices=["raw","human"], default="human",
help="Display memory / CPU values raw or humanreadable")
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log_level.upper()),
format="%(asctime)s %(name)s %(levelname)s: %(message)s")
logger = logging.getLogger(__name__)
# Helpers
def human_bytes(value: int) -> str:
for unit in ("B","KB","MB","GB","TB"):
if value < 1024:
return f"{value:.1f}{unit}"
value /= 1024
return f"{value:.1f}PB"
def human_time(nsec: int) -> str:
sec = nsec / 1_000_000_000
if sec < 60:
return f"{sec:.1f}s"
minutes = sec / 60
if minutes < 60:
return f"{minutes:.1f}m"
return f"{minutes/60:.1f}h"
def run_systemctl_show(unit: str) -> dict:
cmd = [
"systemctl", "--user", "show", unit,
"--property=MemoryCurrent,MemorySwapCurrent,CPUUsageNSec,ActiveState,Restart"
]
try:
res = subprocess.run(cmd, capture_output=True, text=True, check=True)
props = {}
for line in res.stdout.splitlines():
if "=" in line:
k, v = line.split("=", 1)
props[k.strip()] = v.strip()
return props
except Exception:
logger.exception("systemctl show failed for %s", unit)
return {}
# Data row
class ScopeRow(GObject.Object):
__gtype_name__ = "ScopeRow"
unit = GObject.Property(type=str)
cli = GObject.Property(type=str)
mem = GObject.Property(type=str)
swap = GObject.Property(type=str)
cpu = GObject.Property(type=str)
state= GObject.Property(type=str)
def __init__(self, unit, cli, mem, swap, cpu, state):
super().__init__()
self.unit = unit
self.cli = cli
self.mem = mem
self.swap = swap
self.cpu = cpu
self.state = state
# Main Window
class ScopeManagerWindow(Gtk.ApplicationWindow):
def __init__(self, app):
super().__init__(application=app, title="Systemd Scope Manager")
self.set_default_size(1000, 500)
self.values_mode = args.values_mode
self.scopes = {}
self.model = Gio.ListStore(item_type=ScopeRow)
self.sel = Gtk.SingleSelection.new(self.model)
self.view = Gtk.ColumnView.new(self.sel)
self.view.set_reorderable(True)
self.view.set_show_row_separators(True)
cols = [
("Unit", "unit"),
("CLI", "cli"),
("Memory", "mem"),
("Swap", "swap"),
("CPU", "cpu"),
("State", "state"),
]
for title, prop_name in cols:
factory = Gtk.SignalListItemFactory()
factory.connect("setup", self._factory_setup_label)
factory.connect("bind", self._make_factory_bind(prop_name))
col = Gtk.ColumnViewColumn()
col.set_title(title)
col.set_factory(factory)
col.set_resizable(True)
col.set_expand(True)
self.view.append_column(col)
# Actions column
action_factory = Gtk.SignalListItemFactory()
action_factory.connect("setup", self._factory_setup_actions)
action_factory.connect("bind", self._factory_bind_actions)
act_col = Gtk.ColumnViewColumn()
act_col.set_title("⋮")
act_col.set_factory(action_factory)
act_col.set_resizable(False)
act_col.set_expand(False)
self.view.append_column(act_col)
# Input area
self.cmd_entry = Gtk.Entry()
self.cmd_entry.set_placeholder_text("Command to run in new scope")
run_btn = Gtk.Button(label="Run")
run_btn.connect("clicked", self.on_run_clicked)
prop_btn = Gtk.Button(label="Edit Property")
prop_btn.connect("clicked", self.on_edit_property)
input_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=6)
input_box.append(self.cmd_entry)
input_box.append(run_btn)
input_box.append(prop_btn)
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_propagate_natural_width(True)
scrolled.set_propagate_natural_height(True)
scrolled.set_child(self.view)
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
vbox.set_vexpand(True)
vbox.append(input_box)
vbox.append(scrolled)
self.set_child(vbox)
GLib.timeout_add_seconds(2, self.refresh_scopes)
def _factory_setup_label(self, factory, list_item):
lbl = Gtk.Label()
list_item.set_child(lbl)
def _make_factory_bind(self, prop_name):
def bind(factory, list_item):
row = list_item.get_item()
lbl = list_item.get_child()
lbl.set_text(getattr(row, prop_name))
row.connect(f"notify::{prop_name}", lambda obj, pspec: lbl.set_text(getattr(obj, prop_name)))
return bind
def _factory_setup_actions(self, factory, list_item):
btn = Gtk.MenuButton()
btn.set_icon_name("open-menu-symbolic")
# style class if needed:
btn.get_style_context().add_class("flat")
list_item.set_child(btn)
def _factory_bind_actions(self, factory, list_item):
row = list_item.get_item()
btn = list_item.get_child()
menu = Gio.Menu()
menu.append("Stop", f"app.stop_{row.unit}")
menu.append("Restart", f"app.restart_{row.unit}")
menu.append("Toggle AutoRestart", f"app.toggle_{row.unit}")
btn.set_menu_model(menu)
# create actions
self._ensure_row_actions(row)
def _ensure_row_actions(self, row):
unit = row.unit
# Stop
act_stop = Gio.SimpleAction.new(f"stop_{unit}", None)
act_stop.connect("activate", lambda a, v, r=row: self.menu_action("Stop", r))
self.add_action(act_stop)
# Restart
act_restart = Gio.SimpleAction.new(f"restart_{unit}", None)
act_restart.connect("activate", lambda a, v, r=row: self.menu_action("Restart", r))
self.add_action(act_restart)
# Toggle
act_toggle = Gio.SimpleAction.new(f"toggle_{unit}", None)
act_toggle.connect("activate", lambda a, v, r=row: self.menu_action("Toggle AutoRestart", r))
self.add_action(act_toggle)
def on_run_clicked(self, button):
cmd = self.cmd_entry.get_text().strip()
if not cmd:
return
unit = f"app-{uuid.uuid4().int >> 64}.scope"
argv = ["systemd-run", "--user", "--scope", "--unit", unit,
"-p", "MemoryAccounting=yes", "-p", "CPUAccounting=yes"] + shlex.split(cmd)
logger.info("Starting scope: %s", " ".join(argv))
def worker():
try:
p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
out, err = p.communicate()
if out:
logger.debug("systemd-run stdout [%s]: %s", unit, out.strip())
if err:
logger.debug("systemd-run stderr [%s]: %s", unit, err.strip())
except Exception:
logger.exception("Failed to run systemd-run for %s", unit)
threading.Thread(target=worker, daemon=True).start()
self.scopes[unit] = cmd
self.cmd_entry.set_text("")
def on_edit_property(self, button):
idx = self.sel.get_selected()
if idx < 0:
return
row = self.model.get_item(idx)
dialog = Gtk.Dialog(transient_for=self, modal=True, title="Set systemd property")
content = dialog.get_content_area()
grid = Gtk.Grid(row_spacing=6, column_spacing=6, margin=10)
content.append(grid)
lbl1 = Gtk.Label(label="Property (e.g. MemoryMax):")
entry1 = Gtk.Entry()
lbl2 = Gtk.Label(label="Value (e.g. 512M):")
entry2 = Gtk.Entry()
runtime_chk = Gtk.CheckButton(label="Runtime only")
grid.attach(lbl1, 0,0,1,1)
grid.attach(entry1, 1,0,1,1)
grid.attach(lbl2, 0,1,1,1)
grid.attach(entry2, 1,1,1,1)
grid.attach(runtime_chk,0,2,2,1)
dialog.add_button("OK", Gtk.ResponseType.OK)
dialog.add_button("Cancel", Gtk.ResponseType.CANCEL)
dialog.show()
resp = dialog.run()
if resp == Gtk.ResponseType.OK:
prop = entry1.get_text().strip()
val = entry2.get_text().strip()
rt = runtime_chk.get_active()
dialog.destroy()
self.set_property(row.unit, prop, val, rt)
else:
dialog.destroy()
def set_property(self, unit, prop, val, runtime_flag):
cmd = ["systemctl", "--user", "set-property"]
if runtime_flag:
cmd.append("--runtime")
cmd += [unit, f"{prop}={val}"]
logger.info("Setting %s=%s on %s (runtime=%s)", prop, val, unit, runtime_flag)
try:
res = subprocess.run(cmd, capture_output=True, text=True)
if res.stdout:
logger.debug("set-property stdout: %s", res.stdout.strip())
if res.stderr:
logger.debug("set-property stderr: %s", res.stderr.strip())
except Exception:
logger.exception("Failed set-property for %s", unit)
def menu_action(self, label, row):
unit = row.unit
try:
if label == "Stop":
subprocess.run(["systemctl","--user","stop",unit])
elif label == "Restart":
subprocess.run(["systemctl","--user","restart",unit])
elif label == "Toggle AutoRestart":
props = run_systemctl_show(unit)
current = props.get("Restart","no")
new = "no" if current != "no" else "always"
subprocess.run(["systemctl","--user","set-property",unit,f"Restart={new}"])
except Exception:
logger.exception("Action %s failed on %s", label, unit)
def refresh_scopes(self):
self.model.remove_all()
for unit, cli in self.scopes.items():
props = run_systemctl_show(unit)
mem = int(props.get("MemoryCurrent", "0"))
swap = int(props.get("MemorySwapCurrent","0"))
cpu = int(props.get("CPUUsageNSec", "0"))
state = props.get("ActiveState","unknown")
if self.values_mode == "human":
mem_s = human_bytes(mem)
swap_s = human_bytes(swap)
cpu_s = human_time(cpu)
else:
mem_s = str(mem)
swap_s = str(swap)
cpu_s = str(cpu)
row = ScopeRow(unit, cli, mem_s, swap_s, cpu_s, state)
self.model.append(row)
logger.debug("Refreshed %d scopes", self.model.get_n_items())
return True
# Application
class ScopeManagerApp(Gtk.Application):
def __init__(self):
super().__init__(application_id="org.systemd.ScopeManager")
self.connect("activate", self.on_activate)
def on_activate(self, app):
win = ScopeManagerWindow(self)
win.present()
def main():
app = ScopeManagerApp()
return app.run()
if __name__ == "__main__":
import sys
sys.exit(main())

@ -1,116 +0,0 @@
#!/usr/bin/python3
# vi: filetype=python
import re
import sys
import os
import time
import subprocess
import argparse
import logging
from typing import (Any,)
logger = logging.getLogger(__name__)
def run() -> None:
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument(
'--device',
)
options = parser.parse_args()
DEVICES : dict[str, Any] = dict(
applesmc=dict(
devpath='sys/devices/platform/applesmc.768',
node='/sys/devices/platform/applesmc.768/fan1_manual',
cmd=r'''
chown root:fan /sys/devices/platform/applesmc.768/fan1_*
chmod g+w /sys/devices/platform/applesmc.768/fan1_*
''',
),
intel_pstate=dict(
devpath=r'/?sys/devices/system/cpu/cpu0',
node='/sys/devices/system/cpu/intel_pstate/no_turbo',
cmd=r'''
chown root:fan /sys/devices/system/cpu/intel_pstate/no_turbo
chown root:fan /sys/devices/system/cpu/intel_pstate/max_perf_pct
#chown root:fan /sys/devices/system/cpu/intel_pstate/status
chmod g+w /sys/devices/system/cpu/intel_pstate/no_turbo
chmod g+w /sys/devices/system/cpu/intel_pstate/max_perf_pct
#chmod g+w /sys/devices/system/cpu/intel_pstate/status
echo passive > /sys/devices/system/cpu/intel_pstate/status
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
''',
),
amd_pstate=dict(
devpath=r'/?sys/devices/system/cpu/cpu1',
node='/sys/devices/system/cpu/amd_pstate/status',
cmd=r'''
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/boost
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/boost
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
''',
),
#governor=dict(
# devpath=r'/?sys/devices/system/cpu/cpu(\d+)',
# node=r'/sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor',
# cmd=r'''
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
# ''',
#),
)
processed : int = 0
logger.info(dict(device=options.device))
for k, v in DEVICES.items():
devpath = re.compile(v['devpath'])
devpath_m = devpath.match(options.device)
if devpath_m is None:
continue
node_2 = v['node'].format(*devpath_m.groups())
# logger.info(dict(devpath_m=devpath_m, node=node_2))
while not os.path.exists(node_2):
#continue
time.sleep(1)
cmd_2 = v['cmd'].format(*devpath_m.groups())
subprocess.check_call(cmd_2, shell=True)
logger.info(dict(
devpath_m=devpath_m,
node_2=node_2,
cmd_2=cmd_2,
msg='processed',
label=k,
))
processed += 1
if processed == 0:
raise NotImplementedError
if __name__ == '__main__':
run()

@ -1,73 +0,0 @@
#!/usr/bin/python3
#vi syntax=python
import subprocess
import sys
import logging
from typing import (Optional,)
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
if sys.argv[1] == 'before-suspend':
logger.info('before-suspend started')
subprocess.check_call(['nmcli', 'radio', 'wifi', 'off'])
#subprocess.check_call(['modprobe', '-r', 'atkbd',])
subprocess.check_call(['modprobe', '-r', 'ideapad_laptop',])
subprocess.check_call(['modprobe', '-r', 'i8042',])
logger.info('before-suspend done')
elif sys.argv[1] == 'after-suspend':
logger.info('after-suspend started')
subprocess.check_call(['modprobe', 'i8042',])
subprocess.check_call(['modprobe', 'ideapad_laptop',])
#subprocess.check_call(['modprobe', 'atkbd',])
subprocess.check_call(['nmcli', 'radio', 'wifi', 'on'])
#subprocess.check_call(['rfkill', 'unblock', '109'])
#subprocess.check_call(r'''
# # systemctl restart wg-quick@siarhei-hp.service
#''', shell=True,)
logger.info('after-suspend done')
elif sys.argv[1] == 'lid-switch':
import evdev
import time
import io
lid = evdev.UInput({5 : [0]}, name="virtual-lid-switch")
last_state : Optional[bool] = None
try:
while True:
try:
with io.open('/proc/acpi/button/lid/LID0/state', 'r') as f:
value = f.read()
except:
logger.exception('')
value = None
time.sleep(1)
continue
if not value is None:
if 'open' in value:
is_opened = True
else:
is_opened = False
if last_state != is_opened:
if is_opened:
logger.info(dict(msg='lid opened'))
lid.write(5, 0, 0)
lid.write(0, 0, 0)
else:
logger.info(dict(msg='lid closed'))
lid.write(5, 0, 1)
lid.write(0, 0, 0)
last_state = is_opened
time.sleep(0.1)
finally:
lid.close()
else:
raise NotImplementedError

@ -53,10 +53,7 @@ def js(argv: list[str]) -> int:
'--project-directory', '--project-directory',
Settings.settings().project_root, Settings.settings().project_root,
'-f', '-f',
Settings.settings().project_root Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml',
/ 'docker'
/ 'js'
/ 'docker-compose.yml',
*argv, *argv,
] ]
) )
@ -70,15 +67,7 @@ def env(
env_path = Settings.settings().env_path env_path = Settings.settings().env_path
if not env_path.exists(): if not env_path.exists():
subprocess.check_call( subprocess.check_call([sys.executable, '-m', 'venv', '--system-site-packages', str(env_path)])
[
sys.executable,
'-m',
'venv',
'--system-site-packages',
str(env_path),
]
)
subprocess.check_call( subprocess.check_call(
[ [
@ -244,9 +233,7 @@ Command: TypeAlias = Literal[
def run(argv: Optional[list[str]] = None) -> None: def run(argv: Optional[list[str]] = None) -> None:
logging.basicConfig( logging.basicConfig(
level=logging.INFO, level=logging.INFO,
format=( format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
),
) )
if argv is None: if argv is None:

@ -12,10 +12,6 @@ import dataclasses
from typing import ( from typing import (
Optional, Optional,
# override,
)
from typing_extensions import (
override, override,
) )
@ -30,7 +26,7 @@ logging_setup()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Command(enum.Enum): class Command(enum.StrEnum):
mypy = 'mypy' mypy = 'mypy'
pyright = 'pyright' pyright = 'pyright'
ruff = 'ruff' ruff = 'ruff'
@ -56,18 +52,8 @@ class CLI(_cli.CLI):
self._projects: dict[str, _cli.Project] = { self._projects: dict[str, _cli.Project] = {
'online.fxreader.pr34': _cli.Project( 'online.fxreader.pr34': _cli.Project(
source_dir=self.settings.base_dir / 'python', source_dir=self.settings.base_dir / 'python',
build_dir=self.settings.base_dir build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build',
/ 'tmp' dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install',
/ 'online'
/ 'fxreader'
/ 'pr34'
/ 'build',
dest_dir=self.settings.base_dir
/ 'tmp'
/ 'online'
/ 'fxreader'
/ 'pr34'
/ 'install',
meson_path=self.settings.base_dir / 'python' / 'meson.build', meson_path=self.settings.base_dir / 'python' / 'meson.build',
) )
} }
@ -127,9 +113,7 @@ class CLI(_cli.CLI):
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('command', choices=[o.value for o in Command]) parser.add_argument('command', choices=[o.value for o in Command])
parser.add_argument( parser.add_argument('-p', '--project', choices=[o for o in self.projects])
'-p', '--project', choices=[o for o in self.projects]
)
parser.add_argument( parser.add_argument(
'-o', '-o',
'--output_dir', '--output_dir',

Binary file not shown.

@ -1,7 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import glob import glob
import importlib
import json
import io import io
import tempfile import tempfile
import dataclasses import dataclasses
@ -10,21 +8,15 @@ import sys
import subprocess import subprocess
import os import os
import logging import logging
import typing
from typing import ( from typing import (
Optional, Optional,
Any, Any,
cast,
Type,
TypeVar,
Callable,
) )
from typing_extensions import ( from typing_extensions import (
Self, Self,
BinaryIO, BinaryIO,
overload,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -32,23 +24,17 @@ logger = logging.getLogger(__name__)
def toml_load(f: BinaryIO) -> Any: def toml_load(f: BinaryIO) -> Any:
try: try:
tomllib = importlib.import_module('tomllib') import tomllib
return cast( return tomllib.load(f)
Callable[[Any], Any], except:
getattr(
tomllib,
'load',
),
)(f)
except ModuleNotFoundError:
pass pass
try: try:
import tomli import tomli
return tomli.load(f) return tomli.load(f)
except ModuleNotFoundError: except:
pass pass
raise NotImplementedError raise NotImplementedError
@ -56,142 +42,13 @@ def toml_load(f: BinaryIO) -> Any:
@dataclasses.dataclass @dataclasses.dataclass
class PyProject: class PyProject:
@dataclasses.dataclass
class Module:
name: str
meson: Optional[pathlib.Path] = None
tool: dict[str, Any] = dataclasses.field(default_factory=lambda: dict())
path: pathlib.Path path: pathlib.Path
dependencies: dict[str, list[str]] dependencies: dict[str, list[str]]
early_features: Optional[list[str]] = None early_features: Optional[list[str]] = None
pip_find_links: Optional[list[pathlib.Path]] = None pip_find_links: Optional[list[pathlib.Path]] = None
runtime_libdirs: Optional[list[pathlib.Path]] = None runtime_libdirs: Optional[list[pathlib.Path]] = None
runtime_preload: Optional[list[pathlib.Path]] = None runtime_preload: Optional[list[pathlib.Path]] = None
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
@dataclasses.dataclass
class ThirdPartyRoot:
package: Optional[str] = None
module_root: Optional[str] = None
path: Optional[str] = None
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field(
default_factory=lambda: [],
)
tool: dict[str, Any] = dataclasses.field(
default_factory=lambda: dict(),
)
Key = TypeVar('Key')
Value = TypeVar('Value')
@overload
def check_dict(
value: Any,
KT: Type[Key],
VT: Type[Value],
) -> dict[Key, Value]: ...
@overload
def check_dict(
value: Any,
KT: Type[Key],
) -> dict[Key, Any]: ...
def check_dict(
value: Any,
KT: Type[Key],
VT: Optional[Type[Value]] = None,
) -> dict[Key, Value]:
assert isinstance(value, dict)
value2 = cast(dict[Any, Any], value)
VT_class: Optional[type[Any]] = None
if not VT is None:
if not typing.get_origin(VT) is None:
VT_class = cast(type[Any], typing.get_origin(VT))
else:
VT_class = VT
assert all(
[
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
for k, v in value2.items()
]
)
if VT is None:
return cast(
dict[Key, Any],
value,
)
else:
return cast(
dict[Key, Value],
value,
)
@overload
def check_list(
value: Any,
VT: Type[Value],
) -> list[Value]: ...
@overload
def check_list(
value: Any,
) -> list[Any]: ...
def check_list(
value: Any,
VT: Optional[Type[Value]] = None,
) -> list[Value] | list[Any]:
assert isinstance(value, list)
value2 = cast(list[Any], value)
assert all([(VT is None or isinstance(o, VT)) for o in value2])
if VT is None:
return cast(
list[Any],
value,
)
else:
return cast(
list[Value],
value,
)
def check_type(
value: Any,
VT: Type[Value],
attribute_name: Optional[str] = None,
) -> Value:
if attribute_name:
attribute_value = getattr(value, attribute_name)
assert isinstance(attribute_value, VT)
return attribute_value
else:
assert isinstance(value, VT)
return value
def pyproject_load( def pyproject_load(
@ -209,21 +66,9 @@ def pyproject_load(
if 'optional-dependencies' in content['project']: if 'optional-dependencies' in content['project']:
assert isinstance(content['project']['optional-dependencies'], dict) assert isinstance(content['project']['optional-dependencies'], dict)
for k, v in check_dict( for k, v in content['project']['optional-dependencies'].items():
check_dict( assert isinstance(v, list)
check_dict( assert isinstance(k, str)
content,
str,
# Any,
)['project'],
str,
# Any,
)['optional-dependencies'],
str,
list[Any],
).items():
# assert isinstance(v, list)
# assert isinstance(k, str)
dependencies[k] = v dependencies[k] = v
@ -234,99 +79,36 @@ def pyproject_load(
tool_name = 'online.fxreader.pr34'.replace('.', '-') tool_name = 'online.fxreader.pr34'.replace('.', '-')
if 'tool' in content: if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
res.tool = check_dict( if 'early_features' in content['tool'][tool_name]:
content['tool'], res.early_features = content['tool'][tool_name]['early_features']
str,
)
if ( if 'pip_find_links' in content['tool'][tool_name]:
'tool' in content res.pip_find_links = [d.parent / pathlib.Path(o) for o in content['tool'][tool_name]['pip_find_links']]
and isinstance(content['tool'], dict)
and tool_name in content['tool']
and isinstance(content['tool'][tool_name], dict)
):
pr34_tool = check_dict(
check_dict(
content['tool'],
str,
)[tool_name],
str,
)
if 'early_features' in pr34_tool: if 'runtime_libdirs' in content['tool'][tool_name]:
res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool:
res.pip_find_links = [
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [ res.runtime_libdirs = [
d.parent / pathlib.Path(o) d.parent / pathlib.Path(o)
# pathlib.Path(o) # pathlib.Path(o)
for o in check_list(pr34_tool['runtime_libdirs'], str) for o in content['tool'][tool_name]['runtime_libdirs']
] ]
if 'runtime_preload' in pr34_tool: if 'runtime_preload' in content['tool'][tool_name]:
res.runtime_preload = [ res.runtime_preload = [
d.parent / pathlib.Path(o) d.parent / pathlib.Path(o)
# pathlib.Path(o) # pathlib.Path(o)
for o in check_list(pr34_tool['runtime_preload'], str) for o in content['tool'][tool_name]['runtime_preload']
] ]
if 'third_party_roots' in pr34_tool: if 'requirements' in content['tool'][tool_name]:
for o in check_list(pr34_tool['third_party_roots']): assert isinstance(content['tool'][tool_name]['requirements'], dict)
o2 = check_dict(o, str, str)
assert all(
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
package=o2.get('package'),
module_root=o2.get('module_root'),
path=o2.get('path'),
)
)
if 'requirements' in pr34_tool:
res.requirements = { res.requirements = {
k: d.parent / pathlib.Path(v) k: d.parent / pathlib.Path(v)
# pathlib.Path(o) # pathlib.Path(o)
for k, v in check_dict( for k, v in content['tool'][tool_name]['requirements'].items()
pr34_tool['requirements'], str, str
).items()
} }
if 'modules' in pr34_tool:
modules = check_list(pr34_tool['modules'])
# res.modules = []
for o in modules:
assert isinstance(o, dict)
assert 'name' in o and isinstance(o['name'], str)
module = PyProject.Module(
name=o['name'],
)
if 'meson' in o:
assert 'meson' in o and isinstance(o['meson'], str)
module.meson = pathlib.Path(o['meson'])
if 'tool' in o:
module.tool.update(
check_dict(
o['tool'],
str,
)
)
res.modules.append(module)
return res return res
@ -345,16 +127,10 @@ class BootstrapSettings:
), ),
).strip() ).strip()
) )
pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get(
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)],
)
uv_args: list[str] = dataclasses.field( uv_args: list[str] = dataclasses.field(
default_factory=lambda: os.environ.get( default_factory=lambda: os.environ.get(
'UV_ARGS', 'UV_ARGS',
'--offline -U', '--offline',
).split(), ).split(),
) )
@ -366,12 +142,7 @@ class BootstrapSettings:
if base_dir is None: if base_dir is None:
base_dir = pathlib.Path.cwd() base_dir = pathlib.Path.cwd()
env_path: Optional[pathlib.Path] = None
if 'ENV_PATH' in os.environ:
env_path = pathlib.Path(os.environ['ENV_PATH'])
else:
env_path = base_dir / '.venv' env_path = base_dir / '.venv'
python_path = env_path / 'bin' / 'python3' python_path = env_path / 'bin' / 'python3'
return cls( return cls(
@ -381,49 +152,6 @@ class BootstrapSettings:
) )
class requirements_name_get_t:
@dataclasses.dataclass
class res_t:
not_compiled: pathlib.Path
compiled: pathlib.Path
name: str
def requirements_name_get(
source_dir: pathlib.Path,
python_version: Optional[str],
features: list[str],
requirements: dict[str, pathlib.Path],
) -> requirements_name_get_t.res_t:
requirements_python_version: Optional[str] = None
if not python_version is None:
requirements_python_version = python_version.replace('.', '_')
requirements_name = '_'.join(sorted(features))
if requirements_python_version:
requirements_name += '_' + requirements_python_version
requirements_path: Optional[pathlib.Path] = None
if requirements_name in requirements:
requirements_path = requirements[requirements_name]
else:
requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / (
requirements_path.stem + '.in'
)
requirements_in: list[str] = []
return requirements_name_get_t.res_t(
not_compiled=requirements_path_in,
compiled=requirements_path,
name=requirements_name,
)
def env_bootstrap( def env_bootstrap(
bootstrap_settings: BootstrapSettings, bootstrap_settings: BootstrapSettings,
pyproject: PyProject, pyproject: PyProject,
@ -441,7 +169,7 @@ def env_bootstrap(
] ]
for o in pip_find_links for o in pip_find_links
], ],
cast(list[str], []), [],
) )
features: list[str] = [] features: list[str] = []
@ -449,29 +177,31 @@ def env_bootstrap(
if pyproject.early_features: if pyproject.early_features:
features.extend(pyproject.early_features) features.extend(pyproject.early_features)
requirements_name_get_res = requirements_name_get( requirements_python_version: Optional[str] = None
python_version=bootstrap_settings.python_version, if not bootstrap_settings.python_version is None:
features=features, requirements_python_version = bootstrap_settings.python_version.replace('.', '_')
requirements=pyproject.requirements,
source_dir=pyproject.path.parent, requirements_name = '_'.join(sorted(features))
)
requirements_path = requirements_name_get_res.compiled if requirements_python_version:
requirements_name += '_' + requirements_python_version
requirements_path: Optional[pathlib.Path] = None
if requirements_name in pyproject.requirements:
requirements_path = pyproject.requirements[requirements_name]
else:
requirements_path = pyproject.path.parent / 'requirements.txt'
requirements_in: list[str] = [] requirements_in: list[str] = []
requirements_in.extend( requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features: if pyproject.early_features:
early_dependencies = sum( early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], [])
[pyproject.dependencies[o] for o in pyproject.early_features],
cast(list[str], []),
)
logger.info( logger.info(
dict( dict(
requirements_name_get_res=requirements_name_get_res,
early_dependencies=early_dependencies, early_dependencies=early_dependencies,
) )
) )
@ -488,25 +218,6 @@ def env_bootstrap(
# *early_dependencies, # *early_dependencies,
# ]) # ])
uv_python_version: list[str] = []
venv_python_version: list[str] = []
if not bootstrap_settings.python_version is None:
uv_python_version.extend(
[
# '-p',
'--python-version',
bootstrap_settings.python_version,
]
)
venv_python_version.extend(
[
'-p',
# '--python-version',
bootstrap_settings.python_version,
]
)
if not requirements_path.exists(): if not requirements_path.exists():
with tempfile.NamedTemporaryFile( with tempfile.NamedTemporaryFile(
mode='w', mode='w',
@ -521,7 +232,6 @@ def env_bootstrap(
'uv', 'uv',
'pip', 'pip',
'compile', 'compile',
*uv_python_version,
'--generate-hashes', '--generate-hashes',
*pip_find_links_args, *pip_find_links_args,
# '-p', # '-p',
@ -533,18 +243,24 @@ def env_bootstrap(
] ]
) )
uv_python_version: list[str] = []
if not bootstrap_settings.python_version is None:
uv_python_version.extend(
[
'-p',
bootstrap_settings.python_version,
]
)
subprocess.check_call( subprocess.check_call(
[ [
'uv', 'uv',
*[
o
for o in bootstrap_settings.uv_args
if not o in ['-U', '--upgrade']
],
'venv', 'venv',
*venv_python_version, *uv_python_version,
*pip_find_links_args, *pip_find_links_args,
# '--seed', # '--seed',
*bootstrap_settings.uv_args,
str(bootstrap_settings.env_path), str(bootstrap_settings.env_path),
] ]
) )
@ -554,7 +270,6 @@ def env_bootstrap(
'uv', 'uv',
'pip', 'pip',
'install', 'install',
*uv_python_version,
*pip_find_links_args, *pip_find_links_args,
'-p', '-p',
bootstrap_settings.python_path, bootstrap_settings.python_path,
@ -565,16 +280,6 @@ def env_bootstrap(
] ]
) )
if bootstrap_settings.pip_check_conflicts:
subprocess.check_call(
[
bootstrap_settings.python_path,
'-m',
'online.fxreader.pr34.commands',
'pip_check_conflicts',
]
)
def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool: def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool:
return os.path.abspath(str(a)) == os.path.abspath(str(b)) return os.path.abspath(str(a)) == os.path.abspath(str(b))

@ -1,11 +1,11 @@
project( project(
run_command( run_command(
'.venv/bin/tomlq', '-r', '.project.name', 'pyproject.toml', 'tomlq', '-r', '.project.name', 'pyproject.toml',
check: true check: true
).stdout().strip('\n'), ).stdout().strip('\n'),
# 'online.fxreader.uv', # 'online.fxreader.uv',
# ['c', 'cpp'], # ['c', 'cpp'],
version: '0.1.5.65', version: '0.1.5.17',
# default_options: [ # default_options: [
# 'cpp_std=c++23', # 'cpp_std=c++23',
# # 'prefer_static=true', # # 'prefer_static=true',

File diff suppressed because it is too large Load Diff

@ -1,68 +0,0 @@
import fastapi
import importlib
import pydantic
import functools
import logging
import copy
import uvicorn
import uvicorn.config
import sys
from .settings import Settings as APISettings
from typing import (
Any,
Optional,
Literal,
Annotated,
cast,
Callable,
)
logger = logging.getLogger(__name__)
def create_app() -> fastapi.FastAPI:
app = fastapi.FastAPI()
logger.info(dict(msg='started loading apps'))
for app_config in APISettings.singleton().apps:
logger.info(dict(msg='start loading app = {}'.format(app_config)))
app_module, app_method, app_prefix = app_config.split(':')
app_router = cast(
Callable[[], Any],
getattr(importlib.import_module(app_module), app_method),
)()
assert isinstance(app_router, fastapi.APIRouter)
app.include_router(
app_router,
prefix=app_prefix,
# prefix='/',
)
logger.info(dict(msg='done loading app = {}'.format(app_config)))
logger.info(dict(msg='done loading apps'))
return app
def run(args: list[str]):
logging.basicConfig(level=logging.INFO)
log_config = copy.deepcopy(uvicorn.config.LOGGING_CONFIG)
uvicorn.run(
create_app,
host=APISettings.singleton().uvicorn_host,
port=APISettings.singleton().uvicorn_port,
loop='uvloop',
log_config=log_config,
log_level=logging.INFO,
)
if __name__ == '__main__':
run(sys.argv[1:])

@ -1,29 +0,0 @@
import pydantic
import pydantic_settings
from typing import (
ClassVar,
Optional,
Annotated,
)
class Settings(pydantic_settings.BaseSettings):
apps: Annotated[
list[str],
pydantic.Field(
default_factory=list,
),
]
uvicorn_port: int = 80
uvicorn_host: str = '127.0.0.1'
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton

@ -1,8 +1,5 @@
import dataclasses import dataclasses
import io import io
import json
import importlib
import configparser
import glob import glob
import os import os
import pathlib import pathlib
@ -49,8 +46,7 @@ class PyProject:
class Meson: class Meson:
@dataclasses.dataclass @dataclasses.dataclass
class Args: class Args:
install: Optional[list[str]] = None install: list[str]
setup: Optional[list[str]] = None
args: Args args: Args
@ -172,13 +168,9 @@ class CLI(abc.ABC):
) -> None: ) -> None:
from . import cli_bootstrap from . import cli_bootstrap
pyproject = cli_bootstrap.pyproject_load( pyproject = cli_bootstrap.pyproject_load(self.projects[project].source_dir / 'pyproject.toml')
self.projects[project].source_dir / 'pyproject.toml'
)
dependencies = sum( dependencies = sum([pyproject.dependencies[o] for o in features], cast(list[str], []))
[pyproject.dependencies[o] for o in features], cast(list[str], [])
)
pip_find_links: list[pathlib.Path] = [] pip_find_links: list[pathlib.Path] = []
@ -220,9 +212,7 @@ class CLI(abc.ABC):
force: bool, force: bool,
) -> None: ) -> None:
for k, d in self.dependencies.items(): for k, d in self.dependencies.items():
whl_glob = self.dist_settings.wheel_dir / ( whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_'))
'*%s*.whl' % d.name.replace('.', '_')
)
if len(glob.glob(str(whl_glob))) == 0 or force: if len(glob.glob(str(whl_glob))) == 0 or force:
if d.source_path.exists(): if d.source_path.exists():
@ -262,9 +252,7 @@ class CLI(abc.ABC):
def index_get(o: dict[str, Any]) -> tuple[Any, ...]: def index_get(o: dict[str, Any]) -> tuple[Any, ...]:
return (o['path'], o['stat']) return (o['path'], o['stat'])
present_files_index = { present_files_index = {index_get(o): o for o in present_files}
index_get(o): o for o in present_files
}
new_files: list[dict[str, Any]] = [] new_files: list[dict[str, Any]] = []
@ -289,33 +277,17 @@ class CLI(abc.ABC):
] ]
) )
# @property @property
def pkg_config_path( def pkg_config_path(
self, self,
project: Optional[str] = None, ) -> set[pathlib.Path]:
) -> list[pathlib.Path]: return {
res: list[pathlib.Path] = []
if project:
res.append(self.projects[project].dest_dir / 'lib' / 'pkgconfig')
res.extend(
[
pathlib.Path(o) pathlib.Path(o)
for o in glob.glob( for o in glob.glob(
str( str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'),
self.dist_settings.env_path
/ 'lib'
/ 'python*'
/ '**'
/ 'pkgconfig'
),
recursive=True, recursive=True,
) )
] }
)
return res
def deploy_wheel( def deploy_wheel(
self, self,
@ -380,40 +352,10 @@ class CLI(abc.ABC):
if env is None: if env is None:
env = dict() env = dict()
env = (
dict(
# to generate zip for .whl with a reproducible checksum
SOURCE_DATE_EPOCH='0',
)
| env
)
extra_args: list[str] = [] extra_args: list[str] = []
pyproject_build_dir = project.build_dir / 'pyproject' if len(self.third_party_roots) > 0:
extra_args.extend(['-Csetup-args=%s' % ('-Dthird_party_roots=%s' % str(o.absolute())) for o in self.third_party_roots])
if force and pyproject_build_dir.exists():
logger.info(
dict(
pyproject_build_dir=pyproject_build_dir,
msg='remove build dir',
)
)
shutil.rmtree(pyproject_build_dir)
if len(self.third_party_roots(project_name)) > 0:
extra_args.append(
'-Csetup-args=%s'
% (
'-Dthird_party_roots=%s'
% json.dumps(
[
str(o.absolute())
for o in self.third_party_roots(project_name)
]
)
)
)
cmd = [ cmd = [
sys.executable, sys.executable,
@ -423,7 +365,7 @@ class CLI(abc.ABC):
'-n', '-n',
*extra_args, *extra_args,
'-Csetup-args=-Dmodes=pyproject', '-Csetup-args=-Dmodes=pyproject',
'-Cbuild-dir=%s' % str(pyproject_build_dir), '-Cbuild-dir=%s' % str(project.build_dir / 'pyproject'),
'-Csetup-args=-Dinstall_path=%s' % str(project.dest_dir), '-Csetup-args=-Dinstall_path=%s' % str(project.dest_dir),
# '-Cbuild-dir=%s' % str(project.build_dir), # '-Cbuild-dir=%s' % str(project.build_dir),
str(project.source_dir), str(project.source_dir),
@ -433,7 +375,7 @@ class CLI(abc.ABC):
if not output_dir is None: if not output_dir is None:
cmd.extend(['-o', str(output_dir)]) cmd.extend(['-o', str(output_dir)])
logger.info(dict(env=env, cmd=cmd)) logger.info(dict(env=env))
subprocess.check_call( subprocess.check_call(
cmd, cmd,
@ -458,7 +400,6 @@ class CLI(abc.ABC):
argv: Optional[list[str]] = None, argv: Optional[list[str]] = None,
) -> None: ) -> None:
from . import cli_bootstrap from . import cli_bootstrap
from .os import shutil_rmtree
project = self.projects[project_name] project = self.projects[project_name]
@ -469,36 +410,20 @@ class CLI(abc.ABC):
argv = [] argv = []
if force and project.dest_dir.exists(): if force and project.dest_dir.exists():
shutil_rmtree( shutil.rmtree(project.dest_dir)
project.dest_dir,
preserve_top_path=True,
)
pyproject = cli_bootstrap.pyproject_load( pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
project.source_dir / 'pyproject.toml'
)
pyproject_tool = ( pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root
pydantic.RootModel[PyProject.Tool]
.model_validate(pyproject.tool)
.root
)
if ( if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.install:
pyproject_tool.meson
and pyproject_tool.meson.args
and pyproject_tool.meson.args.install
):
argv = pyproject_tool.meson.args.install + argv argv = pyproject_tool.meson.args.install + argv
cmd = [ cmd = [
str(self.dist_settings.python_path), shutil_which(
'-m', 'meson',
'mesonbuild.mesonmain', True,
# shutil_which( ),
# 'meson',
# True,
# ),
'install', 'install',
'-C', '-C',
str(project.build_dir / 'meson'), str(project.build_dir / 'meson'),
@ -530,22 +455,16 @@ class CLI(abc.ABC):
content = f.read() content = f.read()
with io.open(o, 'w') as f: with io.open(o, 'w') as f:
f.write( f.write(content.replace('prefix=/', 'prefix=${pcfiledir}/../../'))
content.replace('prefix=/', 'prefix=${pcfiledir}/../../')
)
def ninja( def ninja(
self, self,
project_name: str, project_name: str,
argv: Optional[list[str]] = None, argv: Optional[list[str]] = None,
env: Optional[dict[str, str]] = None, env: Optional[dict[str, str]] = None,
mode: Optional[Literal['meson', 'pyproject']] = None,
) -> None: ) -> None:
project = self.projects[project_name] project = self.projects[project_name]
if mode is None:
mode = 'meson'
if argv is None: if argv is None:
argv = [] argv = []
@ -558,7 +477,7 @@ class CLI(abc.ABC):
[ [
shutil_which('ninja', True), shutil_which('ninja', True),
'-C', '-C',
str(project.build_dir / mode), str(project.build_dir / 'meson'),
*argv, *argv,
], ],
env=dict(list(os.environ.items())) | env, env=dict(list(os.environ.items())) | env,
@ -576,13 +495,10 @@ class CLI(abc.ABC):
subprocess.check_call( subprocess.check_call(
[ [
str(self.dist_settings.python_path), shutil_which(
'-m', 'meson',
'mesonbuild.mesonmain', True,
# shutil_which( ),
# 'meson',
# True,
# ),
'test', 'test',
'-C', '-C',
project.build_dir / 'meson', project.build_dir / 'meson',
@ -602,13 +518,10 @@ class CLI(abc.ABC):
subprocess.check_call( subprocess.check_call(
[ [
str(self.dist_settings.python_path), shutil_which(
'-m', 'meson',
'mesonbuild.mesonmain', True,
# shutil_which( ),
# 'meson',
# True,
# ),
'compile', 'compile',
'-C', '-C',
project.build_dir / 'meson', project.build_dir / 'meson',
@ -616,115 +529,9 @@ class CLI(abc.ABC):
] ]
) )
def third_party_roots(
self,
project_name: Optional[str] = None,
) -> list[pathlib.Path]:
from . import cli_bootstrap
from .pip import pip_show
res: list[pathlib.Path] = []
if not project_name is None:
pyproject = cli_bootstrap.pyproject_load(
self.projects[project_name].source_dir / 'pyproject.toml'
)
for third_party_root in pyproject.third_party_roots:
if third_party_root.package:
if not third_party_root.module_root:
third_party_root.module_root = (
third_party_root.package.replace('.', os.path.sep)
)
if not third_party_root.path:
packages = pip_show([third_party_root.package])
assert len(packages) == 1
third_party_root.path = str(
pathlib.Path(packages[0].location)
/ third_party_root.module_root
/ 'lib'
)
else:
assert (
not third_party_root.package
and not third_party_root.module_root
and third_party_root.path
)
res.append(pathlib.Path(third_party_root.path))
# res.append(self.projects[project_name].dest_dir / 'lib')
return res
class meson_toolchains_t:
class res_t:
@dataclasses.dataclass
class toolchain_t:
name: str
path: Optional[pathlib.Path] = None
@property @property
def meson_toolchains( def third_party_roots(self) -> list[pathlib.Path]:
self, return []
) -> dict[str, meson_toolchains_t.res_t.toolchain_t]:
t1 = pathlib.Path(
importlib.import_module('online.fxreader.pr34').__path__[0]
)
toolchains = glob.glob(str(t1 / 'meson' / 'toolchains' / '*'))
res: dict[str, CLI.meson_toolchains_t.res_t.toolchain_t] = dict()
for o in toolchains:
entry = self.meson_toolchains_t.res_t.toolchain_t(
name=pathlib.Path(o).stem,
path=pathlib.Path(o),
)
config = configparser.ConfigParser()
config.read(str(entry.path))
res[entry.name] = entry
return res
def _cross_file(
self,
extra_args: list[str],
pyproject_tool: PyProject.Tool,
) -> list[str]:
from . import argparse as pr34_argparse
if (
pyproject_tool.meson
and pyproject_tool.meson.args
and pyproject_tool.meson.args.setup
):
extra_args = pyproject_tool.meson.args.setup + extra_args
parser = argparse.ArgumentParser()
parser.add_argument(
'--cross-file',
dest='cross_file',
default=None,
# type=pathlib.Path,
type=pathlib.Path,
)
options, args = pr34_argparse.parse_args(parser, extra_args)
if not options.cross_file is None:
if not options.cross_file.exists() and (
not options.cross_file.is_absolute()
and options.cross_file.stem in self.meson_toolchains
):
options.cross_file = self.meson_toolchains[
options.cross_file.stem
].path
extra_args = ['--cross-file', str(options.cross_file)] + args
return extra_args
def meson_setup( def meson_setup(
self, self,
@ -732,15 +539,8 @@ class CLI(abc.ABC):
force: bool, force: bool,
argv: Optional[list[str]] = None, argv: Optional[list[str]] = None,
env: Optional[dict[str, str]] = None, env: Optional[dict[str, str]] = None,
mode: Optional[Literal['meson', 'pyproject']] = None,
# third_party_roots: Optional[list[pathlib.Path]] = None, # third_party_roots: Optional[list[pathlib.Path]] = None,
) -> None: ) -> None:
from . import cli_bootstrap
from .os import shutil_rmtree
if mode is None:
mode = 'meson'
project = self.projects[project_name] project = self.projects[project_name]
if argv is None: if argv is None:
@ -749,61 +549,27 @@ class CLI(abc.ABC):
if env is None: if env is None:
env = dict() env = dict()
pyproject = cli_bootstrap.pyproject_load(
project.source_dir / 'pyproject.toml'
)
pyproject_tool = (
pydantic.RootModel[PyProject.Tool]
.model_validate(pyproject.tool)
.root
)
logger.info(dict(env=env)) logger.info(dict(env=env))
if force: if force:
if (project.build_dir / mode).exists(): if (project.build_dir / 'meson').exists():
logger.info( logger.info(dict(action='removing build dir', path=project.build_dir / 'meson'))
dict( shutil.rmtree(project.build_dir / 'meson')
action='removing build dir',
path=project.build_dir / mode,
)
)
shutil.rmtree(project.build_dir / mode)
extra_args: list[str] = [] extra_args: list[str] = []
extra_args = self._cross_file( if len(self.third_party_roots) > 0:
extra_args=extra_args, extra_args.extend(['-Dthird_party_roots=%s' % str(o.absolute()) for o in self.third_party_roots])
pyproject_tool=pyproject_tool,
)
if len(self.third_party_roots(project_name)) > 0:
extra_args.append(
'-Dthird_party_roots=%s'
% json.dumps(
[
str(o.absolute())
for o in self.third_party_roots(project_name)
]
)
)
cmd = [ cmd = [
# shutil_which( shutil_which(
# 'meson', 'meson',
# True, True,
# ), ),
str(self.dist_settings.python_path),
'-m',
'mesonbuild.mesonmain',
'setup', 'setup',
str(project.source_dir), str(project.source_dir),
str(project.build_dir / mode), str(project.build_dir / 'meson'),
'--pkg-config-path={}'.format( '-Dmodes=["meson"]',
json.dumps([str(o) for o in self.pkg_config_path(project_name)])
),
'-Dmodes=["{}"]'.format(mode),
*extra_args, *extra_args,
# '-Dpkgconfig.relocatable=true', # '-Dpkgconfig.relocatable=true',
'-Dprefix=/', '-Dprefix=/',
@ -852,21 +618,14 @@ class CLI(abc.ABC):
argv, argv,
) )
pyproject = cli_bootstrap.pyproject_load( pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
project.source_dir / 'pyproject.toml'
)
dependencies = sum( dependencies = sum([pyproject.dependencies[o] for o in options.features], cast(list[str], []))
[pyproject.dependencies[o] for o in options.features],
cast(list[str], []),
)
pip_find_links: list[pathlib.Path] = [] pip_find_links: list[pathlib.Path] = []
if not pyproject.pip_find_links is None: if not pyproject.pip_find_links is None:
pip_find_links.extend( pip_find_links.extend([o for o in pyproject.pip_find_links if o.exists()])
[o for o in pyproject.pip_find_links if o.exists()]
)
requirements_name_get_res = cli_bootstrap.requirements_name_get( requirements_name_get_res = cli_bootstrap.requirements_name_get(
source_dir=project.source_dir, source_dir=project.source_dir,
@ -936,7 +695,6 @@ class CLI(abc.ABC):
from . import cli_bootstrap from . import cli_bootstrap
from . import argparse as pr34_argparse from . import argparse as pr34_argparse
from .toml import toml_add_overlay
project = self.projects[project_name] project = self.projects[project_name]
@ -975,9 +733,7 @@ class CLI(abc.ABC):
assert options.module in [o.name for o in pyproject.modules] assert options.module in [o.name for o in pyproject.modules]
modules: dict[str, cli_bootstrap.PyProject.Module] = { modules: dict[str, cli_bootstrap.PyProject.Module] = {o.name: o for o in pyproject.modules}
o.name: o for o in pyproject.modules
}
module = modules[options.module] module = modules[options.module]
@ -989,8 +745,7 @@ class CLI(abc.ABC):
'w', 'w',
) as f: ) as f:
p = pyproject2['project'] p = pyproject2['project']
# assert isinstance(p, tomlkit.items.Table) assert isinstance(p, tomlkit.items.Table)
assert isinstance(p, MutableMapping)
p['name'] = module.name p['name'] = module.name
if not pyproject2['tool']: if not pyproject2['tool']:
@ -1003,14 +758,9 @@ class CLI(abc.ABC):
# assert isinstance(pyproject_tool, tomlkit.items.Array) # assert isinstance(pyproject_tool, tomlkit.items.Array)
assert isinstance(pyproject_tool, MutableMapping) assert isinstance(pyproject_tool, MutableMapping)
toml_add_overlay( for k, v in module.tool.items():
pyproject_tool, assert not k in pyproject_tool
module.tool, pyproject_tool[k] = v
)
# for k, v in module.tool.items():
# assert not k in pyproject_tool
# pyproject_tool[k] = v
del p del p
del pyproject_tool del pyproject_tool

@ -1,6 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import glob import glob
import importlib
import json import json
import io import io
import tempfile import tempfile
@ -19,7 +18,6 @@ from typing import (
cast, cast,
Type, Type,
TypeVar, TypeVar,
Callable,
) )
from typing_extensions import ( from typing_extensions import (
Self, Self,
@ -32,23 +30,17 @@ logger = logging.getLogger(__name__)
def toml_load(f: BinaryIO) -> Any: def toml_load(f: BinaryIO) -> Any:
try: try:
tomllib = importlib.import_module('tomllib') import tomllib
return cast( return tomllib.load(f)
Callable[[Any], Any], except:
getattr(
tomllib,
'load',
),
)(f)
except ModuleNotFoundError:
pass pass
try: try:
import tomli import tomli
return tomli.load(f) return tomli.load(f)
except ModuleNotFoundError: except:
pass pass
raise NotImplementedError raise NotImplementedError
@ -68,19 +60,7 @@ class PyProject:
pip_find_links: Optional[list[pathlib.Path]] = None pip_find_links: Optional[list[pathlib.Path]] = None
runtime_libdirs: Optional[list[pathlib.Path]] = None runtime_libdirs: Optional[list[pathlib.Path]] = None
runtime_preload: Optional[list[pathlib.Path]] = None runtime_preload: Optional[list[pathlib.Path]] = None
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
@dataclasses.dataclass
class ThirdPartyRoot:
package: Optional[str] = None
module_root: Optional[str] = None
path: Optional[str] = None
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field( modules: list[Module] = dataclasses.field(
default_factory=lambda: [], default_factory=lambda: [],
@ -126,12 +106,7 @@ def check_dict(
else: else:
VT_class = VT VT_class = VT
assert all( assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
[
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
for k, v in value2.items()
]
)
if VT is None: if VT is None:
return cast( return cast(
@ -179,21 +154,6 @@ def check_list(
) )
def check_type(
value: Any,
VT: Type[Value],
attribute_name: Optional[str] = None,
) -> Value:
if attribute_name:
attribute_value = getattr(value, attribute_name)
assert isinstance(attribute_value, VT)
return attribute_value
else:
assert isinstance(value, VT)
return value
def pyproject_load( def pyproject_load(
d: pathlib.Path, d: pathlib.Path,
) -> PyProject: ) -> PyProject:
@ -240,12 +200,7 @@ def pyproject_load(
str, str,
) )
if ( if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
'tool' in content
and isinstance(content['tool'], dict)
and tool_name in content['tool']
and isinstance(content['tool'][tool_name], dict)
):
pr34_tool = check_dict( pr34_tool = check_dict(
check_dict( check_dict(
content['tool'], content['tool'],
@ -258,46 +213,27 @@ def pyproject_load(
res.early_features = pr34_tool['early_features'] res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool: if 'pip_find_links' in pr34_tool:
res.pip_find_links = [ res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool: if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [ res.runtime_libdirs = [
d.parent / pathlib.Path(o) d.parent / pathlib.Path(o)
# pathlib.Path(o) # pathlib.Path(o)
for o in check_list(pr34_tool['runtime_libdirs'], str) for o in pr34_tool['runtime_libdirs']
] ]
if 'runtime_preload' in pr34_tool: if 'runtime_preload' in pr34_tool:
res.runtime_preload = [ res.runtime_preload = [
d.parent / pathlib.Path(o) d.parent / pathlib.Path(o)
# pathlib.Path(o) # pathlib.Path(o)
for o in check_list(pr34_tool['runtime_preload'], str) for o in pr34_tool['runtime_preload']
] ]
if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str)
assert all(
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
package=o2.get('package'),
module_root=o2.get('module_root'),
path=o2.get('path'),
)
)
if 'requirements' in pr34_tool: if 'requirements' in pr34_tool:
res.requirements = { res.requirements = {
k: d.parent / pathlib.Path(v) k: d.parent / pathlib.Path(v)
# pathlib.Path(o) # pathlib.Path(o)
for k, v in check_dict( for k, v in check_dict(pr34_tool['requirements'], str, str).items()
pr34_tool['requirements'], str, str
).items()
} }
if 'modules' in pr34_tool: if 'modules' in pr34_tool:
@ -333,7 +269,6 @@ def pyproject_load(
@dataclasses.dataclass @dataclasses.dataclass
class BootstrapSettings: class BootstrapSettings:
env_path: pathlib.Path env_path: pathlib.Path
whl_cache_path: pathlib.Path
python_path: pathlib.Path python_path: pathlib.Path
base_dir: pathlib.Path base_dir: pathlib.Path
python_version: Optional[str] = dataclasses.field( python_version: Optional[str] = dataclasses.field(
@ -347,23 +282,14 @@ class BootstrapSettings:
).strip() ).strip()
) )
pip_check_conflicts: Optional[bool] = dataclasses.field( pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get( default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)],
) )
uv_args: list[str] = dataclasses.field( uv_args: list[str] = dataclasses.field(
default_factory=lambda: os.environ.get( default_factory=lambda: os.environ.get(
'UV_ARGS', 'UV_ARGS',
'--offline -U', '--offline',
).split(), ).split(),
) )
whl_cache_update: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get(
'WHL_CACHE_UPDATE', json.dumps(False)
)
in [json.dumps(True)]
)
@classmethod @classmethod
def get( def get(
@ -379,14 +305,11 @@ class BootstrapSettings:
else: else:
env_path = base_dir / '.venv' env_path = base_dir / '.venv'
whl_cache_path = env_path.parent / '.venv-whl-cache'
python_path = env_path / 'bin' / 'python3' python_path = env_path / 'bin' / 'python3'
return cls( return cls(
base_dir=base_dir, base_dir=base_dir,
env_path=env_path, env_path=env_path,
whl_cache_path=whl_cache_path,
python_path=python_path, python_path=python_path,
) )
@ -421,9 +344,7 @@ def requirements_name_get(
else: else:
requirements_path = source_dir / 'requirements.txt' requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / ( requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
requirements_path.stem + '.in'
)
requirements_in: list[str] = [] requirements_in: list[str] = []
@ -469,15 +390,10 @@ def env_bootstrap(
requirements_in: list[str] = [] requirements_in: list[str] = []
requirements_in.extend( requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features: if pyproject.early_features:
early_dependencies = sum( early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
[pyproject.dependencies[o] for o in pyproject.early_features],
cast(list[str], []),
)
logger.info( logger.info(
dict( dict(
@ -498,25 +414,6 @@ def env_bootstrap(
# *early_dependencies, # *early_dependencies,
# ]) # ])
uv_python_version: list[str] = []
venv_python_version: list[str] = []
if not bootstrap_settings.python_version is None:
uv_python_version.extend(
[
# '-p',
'--python-version',
bootstrap_settings.python_version,
]
)
venv_python_version.extend(
[
'-p',
# '--python-version',
bootstrap_settings.python_version,
]
)
if not requirements_path.exists(): if not requirements_path.exists():
with tempfile.NamedTemporaryFile( with tempfile.NamedTemporaryFile(
mode='w', mode='w',
@ -526,11 +423,11 @@ def env_bootstrap(
f.write('\n'.join(requirements_in)) f.write('\n'.join(requirements_in))
f.flush() f.flush()
cmd = [ subprocess.check_call(
[
'uv', 'uv',
'pip', 'pip',
'compile', 'compile',
*uv_python_version,
'--generate-hashes', '--generate-hashes',
*pip_find_links_args, *pip_find_links_args,
# '-p', # '-p',
@ -540,64 +437,36 @@ def env_bootstrap(
str(requirements_path), str(requirements_path),
f.name, f.name,
] ]
)
logger.info(dict(cmd=cmd)) uv_python_version: list[str] = []
subprocess.check_call(cmd)
# if not bootstrap_settings.env_path.exists(): if not bootstrap_settings.python_version is None:
if ( uv_python_version.extend(
not bootstrap_settings.whl_cache_path.exists() [
or bootstrap_settings.whl_cache_update '-p',
): bootstrap_settings.python_version,
cmd = [
'pip',
'download',
'--only-binary=:all:',
*uv_python_version,
*pip_find_links_args,
'-r',
str(requirements_path),
'-d',
str(bootstrap_settings.whl_cache_path),
] ]
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
cache_find_links_args = (
'-f',
str(bootstrap_settings.whl_cache_path),
) )
subprocess.check_call( subprocess.check_call(
[ [
'uv', 'uv',
*[
o
for o in bootstrap_settings.uv_args
if not o
in [
'-U',
'--upgrade',
'--no-index',
]
],
'venv', 'venv',
*venv_python_version, *uv_python_version,
*cache_find_links_args, *pip_find_links_args,
# '--seed', # '--seed',
*bootstrap_settings.uv_args,
str(bootstrap_settings.env_path), str(bootstrap_settings.env_path),
] ]
) )
cmd = [ subprocess.check_call(
[
'uv', 'uv',
'pip', 'pip',
'install', 'install',
*uv_python_version, *pip_find_links_args,
*cache_find_links_args,
# *pip_find_links_args,
'-p', '-p',
bootstrap_settings.python_path, bootstrap_settings.python_path,
'--require-hashes', '--require-hashes',
@ -605,10 +474,7 @@ def env_bootstrap(
'-r', '-r',
str(requirements_path), str(requirements_path),
] ]
)
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
if bootstrap_settings.pip_check_conflicts: if bootstrap_settings.pip_check_conflicts:
subprocess.check_call( subprocess.check_call(

@ -1,109 +0,0 @@
import subprocess
import sys
import json
import logging
from typing import (
Literal,
Optional,
)
import argparse
logger = logging.getLogger(__name__)
def run(argv: list[str]) -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
'action',
choices=[
'toggle',
'dark',
'light',
'get',
],
# required=True,
type=str,
help='action',
)
from .argparse import parse_args as pr34_parse_args
options, args = pr34_parse_args(parser, argv)
assert len(args) == 0
def get_theme() -> Literal['light', 'dark', 'default']:
res = (
subprocess.check_output(
[
'gsettings',
'get',
'org.gnome.desktop.interface',
'color-scheme',
]
)
.decode('utf-8')
.strip()
)
if res == "'prefer-dark'":
return 'dark'
elif res == "'prefer-light'":
return 'light'
elif res == "'default'":
return 'default'
else:
logger.error(dict(res=res, msg='unknown theme'))
raise NotImplementedError
def set_theme(theme: Literal['light', 'dark', 'default']) -> None:
if theme == 'light':
subprocess.check_call(
[
'gsettings',
'set',
'org.gnome.desktop.interface',
'color-scheme',
'prefer-light',
]
)
elif theme == 'dark':
subprocess.check_call(
[
'gsettings',
'set',
'org.gnome.desktop.interface',
'color-scheme',
'prefer-dark',
]
)
elif theme == 'default':
subprocess.check_call(
[
'gsettings',
'reset',
'org.gnome.desktop.interface',
'color-scheme',
]
)
else:
raise NotImplementedError
def toggle() -> None:
theme = get_theme()
if theme in ('light', 'default'):
set_theme('dark')
else:
set_theme('light')
if options.action == 'toggle':
toggle()
elif options.action == 'dark':
set_theme('dark')
elif options.action == 'light':
set_theme('light')
elif options.action == 'get':
sys.stdout.write(json.dumps(get_theme()))
sys.stdout.flush()
else:
raise NotImplementedError

@ -64,9 +64,7 @@ class PasswordUtils:
raise NotImplementedError raise NotImplementedError
@classmethod @classmethod
def _scrypt_init( def _scrypt_init(cls, salt: bytes) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
cls, salt: bytes
) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
return cryptography.hazmat.primitives.kdf.scrypt.Scrypt( return cryptography.hazmat.primitives.kdf.scrypt.Scrypt(
salt=salt, salt=salt,
length=32, length=32,

@ -10,7 +10,5 @@ def setup(level: Optional[int] = None) -> None:
logging.basicConfig( logging.basicConfig(
level=level, level=level,
format=( format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
),
) )

@ -1,121 +0,0 @@
import pydantic
import json
import logging
import datetime
# import django.http
from typing import (
Literal,
Any,
Optional,
Annotated,
cast,
TypeVar,
Protocol,
Generic,
Callable,
)
logger = logging.getLogger(__name__)
class Metric(pydantic.BaseModel):
name: str
type: Literal['gauge', 'counter']
help: Optional[str] = None
class Sample(pydantic.BaseModel):
value: str
parameters: dict[str, str]
timestamp: Optional[datetime.datetime] = None
samples: list[Sample] = pydantic.Field(
default_factory=lambda: [],
)
@classmethod
def sample_serialize(
cls,
o: 'Metric',
s: 'Metric.Sample',
) -> str:
samples: list[Metric.Sample] = [
s,
]
if o.type == 'gauge':
samples.append(
Metric.Sample(
parameters=s.parameters,
value='NaN',
timestamp=(
s.timestamp + datetime.timedelta(seconds=15)
if s.timestamp
else None
),
)
)
return ''.join(
[
'{metric}{{{parameters}}} {value} {timestamp}\n'.format(
metric=o.name,
parameters=','.join(
[
'%s=%s'
% (
k,
json.dumps(v),
)
for k, v in s2.parameters.items()
]
),
value=s2.value,
timestamp=(
'%.f' % (s2.timestamp.timestamp() * 1000,)
if s2.timestamp
else ''
),
)
for s2 in samples
]
)
class serialize_t:
class res_t(pydantic.BaseModel):
json2: str
content_type: str
def serialize(
metrics: list[Metric],
):
return serialize_t.res_t(
json2=''.join(
[
'{help}{type}{samples}'.format(
# help='# HELP %s some metric' % o.name,
# type='# TYPE %s counter' % o.name,
help=(
'# HELP {0} {1}\n'.format(o.name, o.help)
if o.help
else ''
),
type=(
'# TYPE {0} {1}\n'.format(o.name, o.type)
if o.type
else ''
),
samples=''.join(
[Metric.sample_serialize(o, s) for s in o.samples]
),
)
for o in metrics
if len(o.samples) > 0
]
),
content_type='text/plain; version=0.0.4; charset=utf-8',
)

@ -38,9 +38,7 @@ class MypyFormatEntry:
class MypyFormat: class MypyFormat:
vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry( vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode')
name='vscode', value='vscode'
)
json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json') json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json')
@classmethod @classmethod
@ -151,11 +149,7 @@ def run(
assert not res.returncode is None assert not res.returncode is None
errors = sorted( errors = sorted(
[ [json.loads(o) for o in res.stdout.decode('utf-8').splitlines() if not o.strip() == ''],
json.loads(o)
for o in res.stdout.decode('utf-8').splitlines()
if not o.strip() == ''
],
key=lambda x: ( key=lambda x: (
x.get('file', ''), x.get('file', ''),
x.get('line', 0), x.get('line', 0),

@ -54,21 +54,8 @@ def runtime_libdirs_init(
ld_library_path: list[pathlib.Path] = [ ld_library_path: list[pathlib.Path] = [
o o
for o in [ for o in [
*[ *[o.absolute() for o in (project.runtime_libdirs if project.runtime_libdirs else [])],
o.absolute() *[pathlib.Path(o) for o in os.environ.get('LD_LIBRARY_PATH', '').split(os.path.pathsep) if o != ''],
for o in (
project.runtime_libdirs
if project.runtime_libdirs
else []
)
],
*[
pathlib.Path(o)
for o in os.environ.get('LD_LIBRARY_PATH', '').split(
os.path.pathsep
)
if o != ''
],
] ]
] ]
@ -85,16 +72,10 @@ def runtime_libdirs_init(
ld_library_path_present.append(o) ld_library_path_present.append(o)
os.environ.update( os.environ.update(LD_LIBRARY_PATH=os.path.pathsep.join([str(o) for o in ld_library_path_present]))
LD_LIBRARY_PATH=os.path.pathsep.join(
[str(o) for o in ld_library_path_present]
)
)
for preload_path in project.runtime_preload or []: for preload_path in project.runtime_preload or []:
for preload_found in glob.glob( for preload_found in glob.glob(str(preload_path.parent / ('lib%s.so' % preload_path.name))):
str(preload_path.parent / ('lib%s.so' % preload_path.name))
):
logger.info( logger.info(
dict( dict(
preload_path=preload_path, preload_path=preload_path,
@ -142,17 +123,3 @@ def interfaces_index() -> list[interfaces_index_t.Interface]:
) )
return res return res
def shutil_rmtree(
path: pathlib.Path,
preserve_top_path: bool = False,
) -> None:
if preserve_top_path:
for p in path.iterdir():
if p.is_dir():
shutil.rmtree(str(p))
else:
p.unlink()
else:
shutil.rmtree(str(path))

@ -43,7 +43,7 @@ logger = logging.getLogger(__name__)
class pip_show_t: class pip_show_t:
class res_t: class res_t:
class package_info_t: class package_info_t:
location: str pass
def pip_show( def pip_show(
@ -83,7 +83,7 @@ class pip_resolve_t:
) -> None: ... ) -> None: ...
class kwargs_t: class kwargs_t:
class mode_t(enum.Enum): class mode_t(enum.StrEnum):
copy_paste = 'copy_paste' copy_paste = 'copy_paste'
monkey_patch = 'monkey_patch' monkey_patch = 'monkey_patch'
uv_pip_freeze = 'uv_pip_freeze' uv_pip_freeze = 'uv_pip_freeze'
@ -101,20 +101,8 @@ class pip_resolve_t:
entries: Optional[list[download_info_t]] = None entries: Optional[list[download_info_t]] = None
def pip_resolve_entries_to_txt( def pip_resolve_entries_to_txt(entries: list[pip_resolve_t.res_t.download_info_t]) -> str:
entries: list[pip_resolve_t.res_t.download_info_t], return '\n'.join(['#%s\n%s %s' % (o.url, o.constraint, ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256])) for o in entries])
) -> str:
return '\n'.join(
[
'#%s\n%s %s'
% (
o.url,
o.constraint,
' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256]),
)
for o in entries
]
)
def pip_resolve( def pip_resolve(
@ -140,9 +128,7 @@ def pip_resolve(
import pip._internal.models.direct_url import pip._internal.models.direct_url
with contextlib.ExitStack() as stack: with contextlib.ExitStack() as stack:
stack.enter_context( stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager())
pip._internal.utils.temp_dir.global_tempdir_manager()
)
t2 = pip._internal.cli.main_parser.create_main_parser() t2 = pip._internal.cli.main_parser.create_main_parser()
@ -180,22 +166,15 @@ def pip_resolve(
pip._internal.cli.cmdoptions.check_dist_restriction(options) pip._internal.cli.cmdoptions.check_dist_restriction(options)
# t1._in_main_context = True # t1._in_main_context = True
session = t1.get_default_session(options) session = t1.get_default_session(options)
target_python = pip._internal.cli.cmdoptions.make_target_python( target_python = pip._internal.cli.cmdoptions.make_target_python(options)
options finder = cast(pip_resolve_t.build_package_finder_t, getattr(t1, '_build_package_finder'))(
)
finder = cast(
pip_resolve_t.build_package_finder_t,
getattr(t1, '_build_package_finder'),
)(
options=options, options=options,
session=session, session=session,
target_python=target_python, target_python=target_python,
ignore_requires_python=options.ignore_requires_python, ignore_requires_python=options.ignore_requires_python,
) )
build_tracker = t1.enter_context( build_tracker = t1.enter_context(pip._internal.operations.build.build_tracker.get_build_tracker())
pip._internal.operations.build.build_tracker.get_build_tracker()
)
reqs = t1.get_requirements( reqs = t1.get_requirements(
[ [
#'pip', 'uv', 'ipython', #'pip', 'uv', 'ipython',
@ -205,12 +184,8 @@ def pip_resolve(
finder, finder,
session, session,
) )
pip._internal.req.req_install.check_legacy_setup_py_options( pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs)
options, reqs directory = pip._internal.utils.temp_dir.TempDirectory(delete=True, kind='download', globally_managed=True)
)
directory = pip._internal.utils.temp_dir.TempDirectory(
delete=True, kind='download', globally_managed=True
)
preparer = t1.make_requirement_preparer( preparer = t1.make_requirement_preparer(
temp_build_dir=directory, temp_build_dir=directory,
options=options, options=options,
@ -230,9 +205,7 @@ def pip_resolve(
py_version_info=options.python_version, py_version_info=options.python_version,
) )
t1.trace_basic_info(finder) t1.trace_basic_info(finder)
requirement_set = resolver.resolve( requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
reqs, check_supported_wheels=True
)
res = pip_resolve_t.res_t() res = pip_resolve_t.res_t()
@ -306,9 +279,7 @@ def pip_resolve(
location, location,
) )
batch_downloader_call_def = ( batch_downloader_call_def = pip._internal.network.download.BatchDownloader.__call__
pip._internal.network.download.BatchDownloader.__call__
)
def batch_downloader_call( def batch_downloader_call(
_self: pip._internal.network.download.BatchDownloader, _self: pip._internal.network.download.BatchDownloader,
@ -327,9 +298,7 @@ def pip_resolve(
return [(o, ('/dev/null', '')) for o in links] return [(o, ('/dev/null', '')) for o in links]
# base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve # base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve
base_resolver_resolve_def = ( base_resolver_resolve_def = pip._internal.resolution.resolvelib.resolver.Resolver.resolve
pip._internal.resolution.resolvelib.resolver.Resolver.resolve
)
result_requirements: list[RequirementSet | InstallRequirement] = [] result_requirements: list[RequirementSet | InstallRequirement] = []
@ -340,9 +309,7 @@ def pip_resolve(
) -> RequirementSet: ) -> RequirementSet:
# print(args, kwargs) # print(args, kwargs)
res = base_resolver_resolve_def( res = base_resolver_resolve_def(_self, root_reqs, check_supported_wheels)
_self, root_reqs, check_supported_wheels
)
result_requirements.append(res) result_requirements.append(res)
raise NotImplementedError raise NotImplementedError
@ -402,13 +369,7 @@ def pip_resolve(
patches: list[Any] = [] patches: list[Any] = []
patches.append( patches.append(unittest.mock.patch.object(pip._internal.network.download.Downloader, '__call__', downloader_call))
unittest.mock.patch.object(
pip._internal.network.download.Downloader,
'__call__',
downloader_call,
)
)
# patches.append( # patches.append(
# unittest.mock.patch.object( # unittest.mock.patch.object(
# pip._internal.network.download.BatchDownloader, # pip._internal.network.download.BatchDownloader,
@ -613,6 +574,4 @@ def pip_check_conflicts(
if line.strip() != '' if line.strip() != ''
] ]
return pip_check_conflicts_t.res_t( return pip_check_conflicts_t.res_t(status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates)
status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates
)

@ -1,117 +0,0 @@
import pydantic
import functools
# import asgiref.sync
import inspect
import collections
from typing import (
TypeVar,
Callable,
Any,
Optional,
Mapping,
cast,
Awaitable,
overload,
)
P = TypeVar('P')
R = TypeVar('R')
@overload
def validate_params(
view: Callable[..., Awaitable[R]],
) -> Callable[..., Awaitable[R]]: ...
@overload
def validate_params(view: Callable[..., R]) -> Callable[..., R]: ...
def validate_params(
view: Callable[..., Awaitable[R]] | Callable[..., R],
) -> Callable[..., Awaitable[R]] | Callable[..., R]:
class Parameter:
kind: Any
annotation: Any
parameters = cast(
Mapping[str, Parameter], inspect.signature(view).parameters
)
positional_parameters: collections.OrderedDict[str, type[Any]] = (
collections.OrderedDict(
(
(k, v.annotation)
for k, v in parameters.items()
if v.kind
in (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
)
)
)
positional_names = list(positional_parameters)
model = pydantic.create_model(
getattr(view, '__name__'),
**{
k: v.annotation
for k, v in parameters.items()
if v.kind
in (
inspect.Parameter.POSITIONAL_OR_KEYWORD,
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.KEYWORD_ONLY,
)
},
__config__=pydantic.ConfigDict(
arbitrary_types_allowed=True,
),
)
sync_view: Optional[Callable[..., R]] = None
async_view: Optional[Callable[..., Awaitable[R]]] = None
def validate_params(*args: Any, **kwargs: Any) -> None:
# data = model.model_validate(
kwargs_to_check: dict[str, Any] = {k: v for k, v in kwargs.items()}
for i, o in enumerate(args):
k = positional_names[i]
parameter = positional_parameters[k]
assert not k in kwargs_to_check
kwargs_to_check[k] = o
model.model_validate(
kwargs_to_check,
)
# ).dict()
if inspect.iscoroutinefunction(view):
async_view = cast(Callable[..., Awaitable[R]], view)
@functools.wraps(async_view)
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
validate_params(*args, **kwargs)
return await async_view(*args, **kwargs)
return async_wrapper
else:
sync_view = cast(Callable[..., R], view)
@functools.wraps(sync_view)
def wrapper(*args: Any, **kwargs: Any) -> R:
validate_params(*args, **kwargs)
return sync_view(*args, **kwargs)
return wrapper

@ -1,228 +0,0 @@
import sys
import datetime
import time
import os
import signal
import io
import json
import subprocess
import logging
import inspect
import textwrap
import optparse
import traceback
from . import logging as pr34_logging
from typing import (
Any,
Optional,
)
logger = logging.getLogger(__name__)
def get_info(
sh: list[str],
timeout: int | float,
):
t1: list[str] = []
for sh_index, o in enumerate(
[
*sh,
*[
r"""
A=$(free -h | grep -P Mem: | grep -Po '[\w\.\d]+');
echo -n $A | awk '{print $2, $7}';
""",
r"""
date +'%Y-%m-%d %l:%M:%S %p %Z';
""",
],
]
):
try:
t1.append(
subprocess.check_output(
o,
shell=True,
timeout=timeout,
)
.decode('utf-8')
.strip()
)
except Exception:
t1.append('fail %d' % sh_index)
t3 = ' | '.join(t1).replace('\n\r', '')
sys.stdout.write(t3)
sys.stdout.flush()
def run(argv: list[str]):
# logging.basicConfig(level=logging.INFO)
pr34_logging.setup()
assert isinstance(argv, list) and all([isinstance(o, str) for o in argv])
class c1(optparse.IndentedHelpFormatter):
def format_option(self, *args: Any, **kwargs: Any) -> Any:
def f1(text: str, width: int) -> list[str]:
# width = None
return '\n'.join(
[
textwrap.fill('\t' + o, width, replace_whitespace=False)
for o in text.splitlines()
]
).splitlines()
t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option)
t2 = (
'\n'.join([o[4:] for o in t1.splitlines()[:]])
.replace(
'textwrap.wrap',
'f1',
)
.replace('format_option', 'f2')
)
ns: dict[str, Any] = dict()
exec(t2, dict(f1=f1), ns)
return ns['f2'](self, *args, **kwargs)
parser = optparse.OptionParser(
formatter=c1(
width=int(os.environ.get('COLUMNS', '9999999')),
),
)
def add_option(
p: optparse.OptionParser,
option_name: str,
dest: str,
default: Optional[Any] = None,
action: Optional[str] = None,
**kwargs: Any,
) -> None:
getattr(p, 'add_option')(
option_name,
dest=dest,
default=default,
action=action,
**kwargs,
)
add_option(
parser,
'--sh',
dest='sh',
default=[],
action='append',
type=str,
)
add_option(
parser,
'--timeout',
dest='timeout',
default=None,
type=float,
)
add_option(
parser,
'--repeat_interval',
dest='repeat_interval',
default=None,
type=float,
)
add_option(
parser,
'--config',
dest='config',
default=None,
type=str,
help=''.join(
[
'.json file with array of strings, each is a shell command ',
'that outputs a separate status text value, ',
'like\n',
r"""
ping -w 1 -i 0.02 <hostname> -c 3 | tail -n 2| head -n 1 | grep -Po $'time\\s+.*$'
sensors -j | jq -r '.\"coretemp-isa-0000\".\"Package id 0\".temp1_input|tostring + \" C\"'
printf '%d RPM' $(cat /sys/devices/platform/applesmc.768/fan1_input)
printf '% 3.0f%%' $(upower -d | grep -Po 'percentage:\\s+\\d+(\\.\\d+)?%' | grep -Po '\\d+(\\.\\d+)?' | head -n 1)
""".strip(),
]
),
)
options, args = parser.parse_args(argv)
if options.timeout is None:
options.timeout = 0.5
timeout2 = max(options.timeout, 0.0)
assert timeout2 >= 0.0 and timeout2 <= 4
config: dict[str, Any] = dict()
try:
if not options.config is None:
with io.open(options.config, 'r') as f:
config.update(json.load(f))
except Exception:
logging.error(traceback.format_exc())
pass
options.sh.extend(config.get('sh', []))
last_ts = datetime.datetime.now()
shutdown: bool = False
def on_signal(*args: Any, **kwargs: Any):
nonlocal shutdown
shutdown = True
signal.signal(signal.SIGINT, on_signal)
signal.signal(signal.SIGTERM, on_signal)
while not shutdown:
get_info(
timeout=timeout2,
sh=options.sh,
)
if not options.repeat_interval:
break
else:
sys.stdout.write('\n')
sys.stdout.flush()
is_late = False
new_ts = last_ts
while True:
now_ts = datetime.datetime.now()
spent = (now_ts - last_ts).total_seconds()
new_ts = last_ts + datetime.timedelta(
seconds=options.repeat_interval
)
if new_ts > now_ts:
if is_late:
last_ts = new_ts
break
else:
last_ts = new_ts
is_late = True
if spent < options.repeat_interval:
time.sleep(options.repeat_interval - spent)
if __name__ == '__main__':
run(sys.argv[1:])

Some files were not shown because too many files have changed in this diff Show More