[+] merge gitea/master, regenerate requirements for py3.13

1. merge gitea/master into 25-llm-archlinux-package-manager;
  2. incorporate master deps: tomlq, pip==25.1, django, fastapi, uvicorn;
  3. add requirements.3.13.txt with version-specific mapping;
  4. remove generic requirements.txt, pyproject.toml from tracking;
  5. fix whl_cache_download to run once after compile, before install;
  6. update m.py to use pyproject.common.toml for bootstrap;
This commit is contained in:
LLM 2026-04-06 09:32:19 +00:00
commit 5e1a06a6b5
122 changed files with 6134 additions and 2286 deletions

@ -1,3 +1,4 @@
*
.*
!d1/blank-app-nginx.conf
!docker/checks

5
.editorconfig Normal file

@ -0,0 +1,5 @@
[**/*.py]
vim_modeline = set noet ts=2 sts=2 sw=2 ai ci
[**/meson.build]
vim_modeline = set noet ts=2 sts=2 sw=2 ai ci

3
.env.examples Normal file

@ -0,0 +1,3 @@
NGINX_EXPORTER_PORTS=127.0.0.1:9113
CHECKS_PORTS=127.0.0.1:9097
SUBNET=172.31.0

3
.gitattributes vendored

@ -1,3 +1,6 @@
releases/tar/** filter=lfs diff=lfs merge=lfs -text
releases/whl/** filter=lfs diff=lfs merge=lfs -text
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/*.db filter=lfs diff=lfs merge=lfs -text
python/deps/whl/** filter=lfs diff=lfs merge=lfs -text
docker/*/deps/whl/** filter=lfs diff=lfs merge=lfs -text
**/*.gpg filter=lfs diff=lfs merge=lfs -text

7
.gitignore vendored

@ -12,7 +12,14 @@ d2/book1/books
*.tar.gz
.vscode/*
!.vscode/launch.json
!python/deps/whl/**/*.whl
python/build
python/pyproject.toml
.*.kate-swp
!releases/whl/*.whl
.env
!docker/*/.env
.envs
!docker/*/deps/whl/**
!dotfiles/.vim

@ -66,6 +66,7 @@ python_put_archlinux_venv:
-f ../releases/whl \
online.fxreader.pr34.commands_typed.archlinux
PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34
python_whl:
for f in $(PYTHON_PROJECTS_NAMES); do \

@ -1,5 +1,8 @@
[Unit]
Description=fxreader.online-certbot
Requires=fxreader.online-gateway
After=fxreader.online-gateway
PartOf=fxreader.online-gateway
[Service]
Type=oneshot

@ -2,10 +2,11 @@
Description=fxreader.online-service
Requires=docker.service
After=docker.service
PartOf=docker.service
[Service]
#Type=oneshot
ExecStart=/usr/bin/docker compose up --force-recreate --remove-orphans
ExecStart=/usr/bin/docker compose up
ExecStop=/usr/bin/docker compose down
WorkingDirectory={{PROJECT_ROOT}}
StandardOutput=null

@ -1,4 +1,6 @@
import json
import re
import socket
import os
import io
import sys
@ -84,6 +86,7 @@ def forward(
location_body_get = lambda target_endpoint: \
r'''
proxy_set_header Host $http_host;
proxy_http_version 1.1;
proxy_set_header X-Forwarded-For $t1;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
@ -217,6 +220,23 @@ def ssl(input_json, output_conf):
upstream_servers = []
server_names = []
ssh_proxy_download_rate = ssl_nginx['stream_server'].get(
'ssh_proxy_download_rate',
128 * 1024,
)
ssh_proxy_upload_rate = ssl_nginx['stream_server'].get(
'ssh_proxy_upload_rate',
128 * 1024,
)
web_proxy_download_rate = ssl_nginx['stream_server'].get(
'web_proxy_download_rate',
128 * 1024 * 1024,
)
web_proxy_upload_rate = ssl_nginx['stream_server'].get(
'web_proxy_upload_rate',
128 * 1024 * 1024,
)
if 'by_server_name' in ssl_nginx['stream_server']:
for k, v in ssl_nginx['stream_server']['by_server_name'].items():
upstream_servers.append(
@ -257,6 +277,15 @@ stream {
"TLSv1.3" $upstream_server_name;
}
map $upstream_protocol $proxy_download_rate {
web {web_proxy_download_rate};
ssh {ssh_proxy_download_rate};
}
map $upstream_protocol $proxy_upload_rate {
web {web_proxy_upload_rate};
ssh {ssh_proxy_upload_rate};
}
map $ssl_preread_server_name $upstream_server_name {
default web;
{server_names}
@ -267,7 +296,12 @@ stream {
listen 443;
ssl_preread on;
proxy_pass $upstream_protocol;
proxy_download_rate $proxy_download_rate;
proxy_upload_rate $proxy_upload_rate;
# proxy_upload_rate 10k;
}
}
'''.replace(
@ -277,6 +311,14 @@ stream {
]),
).replace(
'{ssh_section}', ssh_section,
).replace(
'{web_proxy_download_rate}', '%d' % web_proxy_download_rate,
).replace(
'{ssh_proxy_download_rate}', '%d' % ssh_proxy_download_rate,
).replace(
'{web_proxy_upload_rate}', '%d' % web_proxy_upload_rate,
).replace(
'{ssh_proxy_upload_rate}', '%d' % ssh_proxy_upload_rate,
).replace(
'{server_names}', ''.join([
' ' + o + '\n'
@ -290,8 +332,36 @@ stream {
if 'default_server' in ssl_nginx:
server = ssl_nginx['default_server']
if 'metrics_allowed' in server:
metrics_allowed_ip = socket.gethostbyname(server['metrics_allowed'])
else:
metrics_allowed_ip = '127.0.0.1'
servers.append(
r'''
server {
server_name _;
listen 80 default_server;
location = /_metrics {
stub_status;
access_log off;
# allow 172.0.0.0/8;
allow {metrics_allowed_ip};
# allow 127.0.0.1;
deny all;
}
location ~ ^/.well-known/acme-challenge/ {
alias /var/www/;
try_files $uri =404;
}
location ~ {
deny all;
}
}
server {
set $t1 $remote_addr;
if ($http_x_forwarded_for)
@ -317,14 +387,57 @@ server {
'{domain_key}', server['domain_key'],
).replace(
'{ssl_port}', '%d' % ssl_port,
).replace(
'{metrics_allowed_ip}', metrics_allowed_ip
)
)
for server in ssl_nginx['servers']:
location_proxy_app = r'''
location ^~ / {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade;
proxy_redirect off;
proxy_buffering off;
proxy_http_version 1.1;
proxy_pass http://app:80;
}
'''
location_forward_ssl = r'''
location ~ {
#return 444;
return 301 https://$host$request_uri;
}
'''
if server.get('allow_http') in [True]:
http_location = location_proxy_app
else:
http_location = location_forward_ssl
drop_by_user_agent = ''
if not server.get('drop_by_user_agent') is None:
r = re.compile('^([a-zA-Z0-9\s\.\,\(\)]+)$')
user_agent_list = [
r.match(o)[1]
for o in server.get('drop_by_user_agent')
]
drop_by_user_agent = r'''
if ( $http_user_agent ~ ({user_agent_list}) ) {
return 444;
}
'''.replace(
'{user_agent_list}',
'|'.join(user_agent_list)
)
servers.append(
r'''
server {
set $t1 $remote_addr;
if ($http_x_forwarded_for)
@ -341,14 +454,12 @@ server {
try_files $uri =404;
}
location ~ {
#return 444;
return 301 https://$host$request_uri;
}
{http_location}
}
server {
set $t1 $remote_addr;
if ($http_x_forwarded_for)
{
set $t1 $http_x_forwarded_for;
@ -362,6 +473,8 @@ server {
ssl_certificate {signed_chain_cert};
ssl_certificate_key {domain_key};
{drop_by_user_agent}
location ^~ / {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@ -370,6 +483,7 @@ server {
proxy_set_header Connection $connection_upgrade;
proxy_redirect off;
proxy_buffering off;
proxy_http_version 1.1;
proxy_pass http://app:80;
}
}
@ -381,8 +495,12 @@ server {
'{client_max_body_size}', server['client_max_body_size'],
).replace(
'{domain_key}', server['domain_key'],
).replace(
'{drop_by_user_agent}', drop_by_user_agent,
).replace(
'{ssl_port}', '%d' % ssl_port,
).replace(
'{http_location}', http_location
)
)

@ -1 +1 @@
Subproject commit adef10a8c41f5c550622879370a40f8a9e545574
Subproject commit 4c187fc7dd17c52fb8e4f992d3985eb609eefe6a

@ -1 +1 @@
Subproject commit 3c691ef68d8899edf328d5b06135c0d3b02e7940
Subproject commit f2366f328fb8129fa6ae26d00b421025d2f090c7

@ -8,6 +8,24 @@ services:
- ./d1/:/app/d1/:ro
- ./tmp/cache/:/app/tmp/cache/:ro
restart: on-failure
networks:
network:
ipv4_address: ${SUBNET}.2
nginx-exporter:
image: docker.io/nginx/nginx-prometheus-exporter@sha256:6edfb73afd11f2d83ea4e8007f5068c3ffaa38078a6b0ad1339e5bd2f637aacd
#profiles:
# - podman
#env_file:
# .envs/nginx-exporter.env
environment:
SCRAPE_URI: http://ssl-app:80/_metrics
# LISTEN_ADDRESS: 0.0.0.0:9113
ports:
- ${NGINX_EXPORTER_PORTS:-"127.0.0.1:9113"}:9113
networks:
network:
ssl-app:
build:
context: .
@ -17,8 +35,30 @@ services:
- ./tmp/d1/:/app/tmp/d1/:ro
- ./tmp/d1/letsencrypt:/etc/letsencrypt:rw
restart: on-failure
ports:
- ${SSL_APP_PORTS:-"127.0.0.1:443"}:444
- ${APP_PORTS:-"127.0.0.1:80"}:80
networks:
network:
checks:
build:
context: ./docker/checks
dockerfile: ./Dockerfile
init: true
env_file:
.envs/checks.patched.env
command:
- python3
- -m
- online.fxreader.pr34.commands_typed.async_api.fastapi
ports:
- ${CHECKS_PORTS:-"127.0.0.1:80"}:80
networks:
network:
cpanel:
image: online.fxreader.pr34.cpanel:dev
build:
context: .
dockerfile: ./docker/cpanel/Dockerfile
@ -28,6 +68,9 @@ services:
- ./d1/:/app/d1:ro
- ./tmp/d1/:/app/tmp/d1/:ro
restart: on-failure
networks:
network:
dynu:
build:
context: .
@ -40,6 +83,8 @@ services:
restart: on-failure
# links:
# - ngrok
networks:
network:
ngrok:
image: wernight/ngrok
#links:
@ -50,6 +95,8 @@ services:
volumes:
- ./tmp/cache/ngrok.yml:/home/ngrok/.ngrok2/ngrok.yml:ro
restart: on-failure
networks:
network:
#forward:
# build:
# context: .
@ -58,3 +105,13 @@ services:
# - ./d1/forward.py:/app/d1/forward.py:ro
# - ./tmp/cache/forward_data:/app/tmp/cache/forward_data:ro
# restart: always
networks:
network:
driver: bridge
# driver_opts:
# com.docker.network.bridge.name: br-mynet # stable bridge name (optional)
ipam:
config:
- subnet: ${SUBNET}.0/24
gateway: "${SUBNET}.1"
ip_range: "${SUBNET}.128/25" # optional: pool for containers

5
docker/checks/.env Normal file

@ -0,0 +1,5 @@
# UVICORN_HOST=127.0.0.1
# UVICORN_PORT=80
# HTTP_AUTH_USERNAME=test
# HTTP_AUTH_PASSWORD=blah
APPS='["rest:get_router:"]'

26
docker/checks/Dockerfile Normal file

@ -0,0 +1,26 @@
FROM alpine@sha256:56fa17d2a7e7f168a043a2712e63aed1f8543aeafdcee47c58dcffe38ed51099
RUN apk add --no-cache python3 py3-pip
RUN \
--mount=type=cache,target=/root/.cache/pip \
pip install \
--break-system-packages \
uv
WORKDIR /app
COPY requirements.txt requirements.txt
RUN \
--mount=type=cache,target=/root/.cache/pip \
--mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=./deps/whl,target=/app/deps/whl \
uv pip install \
-f deps/whl \
-r requirements.txt \
--break-system-packages --system
COPY ./rest.py ./rest.py
# CMD ["python3", "rest.py"]

28
docker/checks/Makefile Normal file

@ -0,0 +1,28 @@
venv_compile:
uv pip compile \
-p 3.12 \
--generate-hashes \
-f deps/whl \
requirements.in > \
requirements.txt
venv:
uv venv -p 3.12 .venv
uv pip install \
-p .venv/bin/python3 \
-f deps/whl \
-r requirements.txt
PYRIGHT_CMD ?= --threads 3
pyright:
.venv/bin/python3 \
-m pyright \
--pythonpath .venv/bin/python3 \
-p pyproject.toml \
$(PYRIGHT_CMD) \
.
RUFF_CMD ?= format
ruff:
.venv/bin/python3 -m ruff --config pyproject.toml $(RUFF_CMD) .

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -1,5 +1,5 @@
[project]
description = 'set of tools for software development'
description = 'checks service'
requires-python = '>= 3.10'
maintainers = [
{ name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' },
@ -8,81 +8,18 @@ classifiers = [
'Programming Language :: Python',
]
name = "online.fxreader.pr34"
# version = '0.1.5.16+27.7'
dynamic = [
'version',
]
dependencies = [
#"-r requirements.txt",
'mypy',
'marisa-trie',
'pydantic',
'pydantic-settings',
'tomlkit',
'pip==23.3.2',
]
[project.optional-dependencies]
crypto = [
'cryptography',
]
early = [
'numpy',
'cryptography',
'yq',
'toml-cli',
'ninja',
'patchelf',
# 'tomlkit',
]
archlinux = [
'solv==0.7.35',
]
lint = [
'tomli',
# 'tomllib',
'mypy',
'pyright',
'pyrefly',
'ruff',
# 'tomlkit',
]
[project.scripts]
online-fxreader-pr34-commands = 'online.fxreader.pr34.commands:commands_cli'
[tool.online-fxreader-pr34]
early_features = ["default", "early", "lint"]
modules = [
{ name = 'online.fxreader.pr34', tool = { 'online-fxreader-pr34' = { early_features = ['default', 'early', 'lint'] } } },
]
[build-system]
requires = ["meson-python", "pybind11"]
build-backend = "mesonpy"
name = 'online.fxreader.pr34.checks'
[tool.ruff]
line-length = 160
target-version = 'py310'
# builtins = ['_', 'I', 'P']
include = [
# 'follow_the_leader/**/*.py',
#'*.py',
# '*.recipe',
'*.py',
'online/**/*.py',
'online/**/*.pyi',
'*/**/*.py',
'*/**/*.pyi',
]
exclude = [
'.venv',
'online/fxreader/pr34/commands_typed/archlinux',
]
[tool.ruff.format]
@ -113,9 +50,6 @@ select = ['E', 'F', 'I', 'W', 'INT']
[tool.ruff.lint.isort]
detect-same-package = true
# extra-standard-library = ["aes", "elementmaker", "encodings"]
# known-first-party = ["calibre_extensions", "calibre_plugins", "polyglot"]
# known-third-party = ["odf", "qt", "templite", "tinycss", "css_selectors"]
relative-imports-order = "closest-to-furthest"
split-on-trailing-comma = true
section-order = [
@ -133,21 +67,11 @@ enabled = false
[tool.pyright]
include = [
#'../../../../../follow_the_leader/views2/payments.py',
#'../../../../../follow_the_leader/logic/payments.py',
#'../../../../../follow_the_leader/logic/paypal.py',
'online/fxreader/pr34/commands_typed/**/*.py',
'*/**/*.py',
]
# stubPath = '../mypy-stubs'
extraPaths = [
'.',
'../mypy-stubs',
'../mypy-stubs/types-debugpy',
'../mypy-stubs/types-solv',
'../mypy-stubs/marisa-trie-types',
# '../../../../../',
]
#strict = ["src"]
analyzeUnannotatedFunctions = true
disableBytesTypePromotions = true
@ -247,18 +171,3 @@ reportUninitializedInstanceVariable = "none"
reportUnnecessaryTypeIgnoreComment = "none"
reportUnusedCallResult = "none"
[tool.pyrefly]
project-includes = [
'online/fxreader/pr34/commands_typed/**/*.py',
]
project-excludes = [
'.venv',
'online/fxreader/pr34/commands_typed/archlinux',
]
search-path = [
'.',
'../mypy-stubs/types-debugpy',
'../mypy-stubs/types-solv',
'../mypy-stubs/marisa-trie-types',
]
python-version = '3.13'

@ -0,0 +1,4 @@
online.fxreader.pr34[django,fastapi,lint]>=0.1.5.24
fastapi
uvicorn
numpy

@ -0,0 +1,582 @@
# This file was autogenerated by uv via the following command:
# uv pip compile -p 3.12 --generate-hashes requirements.in
annotated-types==0.7.0 \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
--hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
# via pydantic
anyio==4.10.0 \
--hash=sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6 \
--hash=sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1
# via starlette
argcomplete==3.6.2 \
--hash=sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591 \
--hash=sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf
# via yq
asgiref==3.9.1 \
--hash=sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142 \
--hash=sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c
# via django
click==8.2.1 \
--hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \
--hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b
# via uvicorn
django==5.2.5 \
--hash=sha256:0745b25681b129a77aae3d4f6549b62d3913d74407831abaa0d9021a03954bae \
--hash=sha256:2b2ada0ee8a5ff743a40e2b9820d1f8e24c11bac9ae6469cd548f0057ea6ddcd
# via
# django-stubs
# django-stubs-ext
# online-fxreader-pr34
django-stubs==5.2.2 \
--hash=sha256:2a04b510c7a812f88223fd7e6d87fb4ea98717f19c8e5c8b59691d83ad40a8a6 \
--hash=sha256:79bd0fdbc78958a8f63e0b062bd9d03f1de539664476c0be62ade5f063c9e41e
# via online-fxreader-pr34
django-stubs-ext==5.2.2 \
--hash=sha256:8833bbe32405a2a0ce168d3f75a87168f61bd16939caf0e8bf173bccbd8a44c5 \
--hash=sha256:d9d151b919fe2438760f5bd938f03e1cb08c84d0651f9e5917f1313907e42683
# via django-stubs
fastapi==0.116.1 \
--hash=sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565 \
--hash=sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143
# via
# -r requirements.in
# online-fxreader-pr34
h11==0.16.0 \
--hash=sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1 \
--hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86
# via uvicorn
idna==3.10 \
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
# via anyio
marisa-trie==1.3.1 \
--hash=sha256:076731f79f8603cb3216cb6e5bbbc56536c89f63f175ad47014219ecb01e5996 \
--hash=sha256:0b9816ab993001a7854b02a7daec228892f35bd5ab0ac493bacbd1b80baec9f1 \
--hash=sha256:0c2bc6bee737f4d47fce48c5b03a7bd3214ef2d83eb5c9f84210091370a5f195 \
--hash=sha256:0dcd42774e367ceb423c211a4fc8e7ce586acfaf0929c9c06d98002112075239 \
--hash=sha256:0e6f3b45def6ff23e254eeaa9079267004f0069d0a34eba30a620780caa4f2cb \
--hash=sha256:137010598d8cebc53dbfb7caf59bde96c33a6af555e3e1bdbf30269b6a157e1e \
--hash=sha256:2f7c10f69cbc3e6c7d715ec9cb0c270182ea2496063bebeda873f4aa83fd9910 \
--hash=sha256:3715d779561699471edde70975e07b1de7dddb2816735d40ed16be4b32054188 \
--hash=sha256:3834304fdeaa1c9b73596ad5a6c01a44fc19c13c115194704b85f7fbdf0a7b8e \
--hash=sha256:389721481c14a92fa042e4b91ae065bff13e2bc567c85a10aa9d9de80aaa8622 \
--hash=sha256:3a96ef3e461ecc85ec7d2233ddc449ff5a3fbdc520caea752bc5bc8faa975231 \
--hash=sha256:3e2a0e1be95237981bd375a388f44b33d69ea5669a2f79fea038e45fff326595 \
--hash=sha256:3e431f9c80ee1850b2a406770acf52c058b97a27968a0ed6aca45c2614d64c9f \
--hash=sha256:47631614c5243ed7d15ae0af8245fcc0599f5b7921fae2a4ae992afb27c9afbb \
--hash=sha256:52d1764906befef91886e3bff374d8090c9716822bd56b70e07aa697188090b7 \
--hash=sha256:5370f9ef6c008e502537cc1ff518c80ddf749367ce90179efa0e7f6275903a76 \
--hash=sha256:56043cf908ddf3d7364498085dbc2855d4ea8969aff3bf2439a79482a79e68e2 \
--hash=sha256:5a6abc9573a6a45d09548fde136dbcd4260b8c56f8dff443eaa565352d7cca59 \
--hash=sha256:5b7c1e7fa6c3b855e8cfbabf38454d7decbaba1c567d0cd58880d033c6b363bd \
--hash=sha256:5ef045f694ef66079b4e00c4c9063a00183d6af7d1ff643de6ea5c3b0d9af01b \
--hash=sha256:68678816818efcd4a1787b557af81f215b989ec88680a86c85c34c914d413690 \
--hash=sha256:6cac19952e0e258ded765737d1fb11704fe81bf4f27526638a5d44496f329235 \
--hash=sha256:70b4c96f9119cfeb4dc6a0cf4afc9f92f0b002cde225bcd910915d976c78e66a \
--hash=sha256:7e957aa4251a8e70b9fe02a16b2d190f18787902da563cb7ba865508b8e8fb04 \
--hash=sha256:82de2de90488d0fbbf74cf9f20e1afd62e320693b88f5e9565fc80b28f5bbad3 \
--hash=sha256:83a3748088d117a9b15d8981c947df9e4f56eb2e4b5456ae34fe1f83666c9185 \
--hash=sha256:83efc045fc58ca04c91a96c9b894d8a19ac6553677a76f96df01ff9f0405f53d \
--hash=sha256:8c8b2386d2d22c57880ed20a913ceca86363765623175671137484a7d223f07a \
--hash=sha256:8f81344d212cb41992340b0b8a67e375f44da90590b884204fd3fa5e02107df2 \
--hash=sha256:954fef9185f8a79441b4e433695116636bf66402945cfee404f8983bafa59788 \
--hash=sha256:9651daa1fdc471df5a5fa6a4833d3b01e76ac512eea141a5995681aebac5555f \
--hash=sha256:9688c7b45f744366a4ef661e399f24636ebe440d315ab35d768676c59c613186 \
--hash=sha256:97107fd12f30e4f8fea97790343a2d2d9a79d93697fe14e1b6f6363c984ff85b \
--hash=sha256:9868b7a8e0f648d09ffe25ac29511e6e208cc5fb0d156c295385f9d5dc2a138e \
--hash=sha256:986eaf35a7f63c878280609ecd37edf8a074f7601c199acfec81d03f1ee9a39a \
--hash=sha256:99a00cab4cf9643a87977c87a5c8961aa44fff8d5dd46e00250135f686e7dedf \
--hash=sha256:9c56001badaf1779afae5c24b7ab85938644ab8ef3c5fd438ab5d49621b84482 \
--hash=sha256:9dc61fb8f8993589544f6df268229c6cf0a56ad4ed3e8585a9cd23c5ad79527b \
--hash=sha256:9de573d933db4753a50af891bcb3ffbfe14e200406214c223aa5dfe2163f316d \
--hash=sha256:9e467e13971c64db6aed8afe4c2a131c3f73f048bec3f788a6141216acda598d \
--hash=sha256:9e6496bbad3068e3bbbb934b1e1307bf1a9cb4609f9ec47b57e8ea37f1b5ee40 \
--hash=sha256:9f92d3577c72d5a97af5c8e3d98247b79c8ccfb64ebf611311dcf631b11e5604 \
--hash=sha256:a1c6990961d1177f6d8fdf7b610fa2e7c0c02743a090d173f6dfa9dc9231c73c \
--hash=sha256:a5a0a58ffe2a7eb3f870214c6df8f9a43ce768bd8fed883e6ba8c77645666b63 \
--hash=sha256:a7416f1a084eb889c5792c57317875aeaa86abfe0bdc6f167712cebcec1d36ee \
--hash=sha256:a83f5f7ae3494e0cc25211296252b1b86901c788ed82c83adda19d0c98f828d6 \
--hash=sha256:a850b151bd1e3a5d9afef113adc22727d696603659d575d7e84f994bd8d04bf1 \
--hash=sha256:ad82ab8a58562cf69e6b786debcc7638b28df12f9f1c7bcffb07efb5c1f09cbd \
--hash=sha256:b173ec46d521308f7c97d96d6e05cf2088e0548f82544ec9a8656af65593304d \
--hash=sha256:bf9f2b97fcfd5e2dbb0090d0664023872dcde990df0b545eca8d0ce95795a409 \
--hash=sha256:c12b44c190deb0d67655021da1f2d0a7d61a257bf844101cf982e68ed344f28d \
--hash=sha256:c6571462417cda2239b1ade86ceaf3852da9b52c6286046e87d404afc6da20a7 \
--hash=sha256:c785fd6dae9daa6825734b7b494cdac972f958be1f9cb3fb1f32be8598d2b936 \
--hash=sha256:c7a33506d0451112911c69f38d55da3e0e050f2be0ea4e5176865cf03baf26a9 \
--hash=sha256:c89df75aefe1ad7e613340790130f1badc5926bcfa66a6b3c9471071002956a5 \
--hash=sha256:ca644534f15f85bba14c412afc17de07531e79a766ce85b8dbf3f8b6e7758f20 \
--hash=sha256:cbd28f95d5f30d9a7af6130869568e75bfd7ef2e0adfb1480f1f44480f5d3603 \
--hash=sha256:d0f87bdf660f01e88ab3a507955697b2e3284065afa0b94fc9e77d6ad153ed5e \
--hash=sha256:d4bd41a6e73c0d0adafe4de449b6d35530a4ce6a836a6ee839baf117785ecfd7 \
--hash=sha256:d8d5e686db0ae758837ed29b3b742afb994d1a01ce10977eabd3490f16b5c9f9 \
--hash=sha256:e5888b269e790356ce4525f3e8df1fe866d1497b7d7fb7548cfec883cb985288 \
--hash=sha256:ec633e108f277f2b7f4671d933a909f39bba549910bf103e2940b87a14da2783 \
--hash=sha256:ecdb19d33b26738a32602ef432b06cc6deeca4b498ce67ba8e5e39c8a7c19745 \
--hash=sha256:ee428575377e29c636f2b4b3b0488875dcea310c6c5b3412ec4ef997f7bb37cc \
--hash=sha256:f4bae4f920f2a1082eaf766c1883df7da84abdf333bafa15b8717c10416a615e
# via online-fxreader-pr34
mypy==1.17.1 \
--hash=sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341 \
--hash=sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5 \
--hash=sha256:099b9a5da47de9e2cb5165e581f158e854d9e19d2e96b6698c0d64de911dd849 \
--hash=sha256:15a83369400454c41ed3a118e0cc58bd8123921a602f385cb6d6ea5df050c733 \
--hash=sha256:15d54056f7fe7a826d897789f53dd6377ec2ea8ba6f776dc83c2902b899fee81 \
--hash=sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403 \
--hash=sha256:209a58fed9987eccc20f2ca94afe7257a8f46eb5df1fb69958650973230f91e6 \
--hash=sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01 \
--hash=sha256:397fba5d7616a5bc60b45c7ed204717eaddc38f826e3645402c426057ead9a91 \
--hash=sha256:3fbe6d5555bf608c47203baa3e72dbc6ec9965b3d7c318aa9a4ca76f465bd972 \
--hash=sha256:43808d9476c36b927fbcd0b0255ce75efe1b68a080154a38ae68a7e62de8f0f8 \
--hash=sha256:55b918670f692fc9fba55c3298d8a3beae295c5cded0a55dccdc5bbead814acd \
--hash=sha256:5d1092694f166a7e56c805caaf794e0585cabdbf1df36911c414e4e9abb62ae9 \
--hash=sha256:62761474061feef6f720149d7ba876122007ddc64adff5ba6f374fda35a018a0 \
--hash=sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19 \
--hash=sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb \
--hash=sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd \
--hash=sha256:79d44f9bfb004941ebb0abe8eff6504223a9c1ac51ef967d1263c6572bbebc99 \
--hash=sha256:80ef5c058b7bce08c83cac668158cb7edea692e458d21098c7d3bce35a5d43e7 \
--hash=sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056 \
--hash=sha256:93378d3203a5c0800c6b6d850ad2f19f7a3cdf1a3701d3416dbf128805c6a6a7 \
--hash=sha256:9a2b7d9180aed171f033c9f2fc6c204c1245cf60b0cb61cf2e7acc24eea78e0a \
--hash=sha256:9d6b20b97d373f41617bd0708fd46aa656059af57f2ef72aa8c7d6a2b73b74ed \
--hash=sha256:a76906f26bd8d51ea9504966a9c25419f2e668f012e0bdf3da4ea1526c534d94 \
--hash=sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9 \
--hash=sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58 \
--hash=sha256:b01586eed696ec905e61bd2568f48740f7ac4a45b3a468e6423a03d3788a51a8 \
--hash=sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5 \
--hash=sha256:c49562d3d908fd49ed0938e5423daed8d407774a479b595b143a3d7f87cdae6a \
--hash=sha256:c4a580f8a70c69e4a75587bd925d298434057fe2a428faaf927ffe6e4b9a98df \
--hash=sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb \
--hash=sha256:d7598cf74c3e16539d4e2f0b8d8c318e00041553d83d4861f87c7a72e95ac24d \
--hash=sha256:dd86bb649299f09d987a2eebb4d52d10603224500792e1bee18303bbcc1ce390 \
--hash=sha256:e79311f2d904ccb59787477b7bd5d26f3347789c06fcd7656fa500875290264b \
--hash=sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b \
--hash=sha256:fa6ffadfbe6994d724c5a1bb6123a7d27dd68fc9c059561cd33b664a79578e14 \
--hash=sha256:feb8cc32d319edd5859da2cc084493b3e2ce5e49a946377663cc90f6c15fb259 \
--hash=sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b
# via online-fxreader-pr34
mypy-extensions==1.1.0 \
--hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505 \
--hash=sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558
# via mypy
nodeenv==1.9.1 \
--hash=sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f \
--hash=sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9
# via pyright
numpy==2.3.2 \
--hash=sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5 \
--hash=sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b \
--hash=sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631 \
--hash=sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58 \
--hash=sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b \
--hash=sha256:103ea7063fa624af04a791c39f97070bf93b96d7af7eb23530cd087dc8dbe9dc \
--hash=sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089 \
--hash=sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf \
--hash=sha256:14a91ebac98813a49bc6aa1a0dfc09513dcec1d97eaf31ca21a87221a1cdcb15 \
--hash=sha256:1f91e5c028504660d606340a084db4b216567ded1056ea2b4be4f9d10b67197f \
--hash=sha256:20b8200721840f5621b7bd03f8dcd78de33ec522fc40dc2641aa09537df010c3 \
--hash=sha256:240259d6564f1c65424bcd10f435145a7644a65a6811cfc3201c4a429ba79170 \
--hash=sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910 \
--hash=sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91 \
--hash=sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45 \
--hash=sha256:2c3271cc4097beb5a60f010bcc1cc204b300bb3eafb4399376418a83a1c6373c \
--hash=sha256:2f4f0215edb189048a3c03bd5b19345bdfa7b45a7a6f72ae5945d2a28272727f \
--hash=sha256:3dcf02866b977a38ba3ec10215220609ab9667378a9e2150615673f3ffd6c73b \
--hash=sha256:4209f874d45f921bde2cff1ffcd8a3695f545ad2ffbef6d3d3c6768162efab89 \
--hash=sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a \
--hash=sha256:4ae6863868aaee2f57503c7a5052b3a2807cf7a3914475e637a0ecd366ced220 \
--hash=sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e \
--hash=sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab \
--hash=sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2 \
--hash=sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b \
--hash=sha256:572d5512df5470f50ada8d1972c5f1082d9a0b7aa5944db8084077570cf98370 \
--hash=sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2 \
--hash=sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee \
--hash=sha256:6936aff90dda378c09bea075af0d9c675fe3a977a9d2402f95a87f440f59f619 \
--hash=sha256:69779198d9caee6e547adb933941ed7520f896fd9656834c300bdf4dd8642712 \
--hash=sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1 \
--hash=sha256:71669b5daae692189540cffc4c439468d35a3f84f0c88b078ecd94337f6cb0ec \
--hash=sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a \
--hash=sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450 \
--hash=sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a \
--hash=sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2 \
--hash=sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168 \
--hash=sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2 \
--hash=sha256:8145dd6d10df13c559d1e4314df29695613575183fa2e2d11fac4c208c8a1f73 \
--hash=sha256:8446acd11fe3dc1830568c941d44449fd5cb83068e5c70bd5a470d323d448296 \
--hash=sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9 \
--hash=sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125 \
--hash=sha256:8b1224a734cd509f70816455c3cffe13a4f599b1bf7130f913ba0e2c0b2006c0 \
--hash=sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19 \
--hash=sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b \
--hash=sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f \
--hash=sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2 \
--hash=sha256:9e196ade2400c0c737d93465327d1ae7c06c7cb8a1756121ebf54b06ca183c7f \
--hash=sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a \
--hash=sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6 \
--hash=sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286 \
--hash=sha256:aa098a5ab53fa407fded5870865c6275a5cd4101cfdef8d6fafc48286a96e981 \
--hash=sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f \
--hash=sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2 \
--hash=sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0 \
--hash=sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b \
--hash=sha256:bc3186bea41fae9d8e90c2b4fb5f0a1f5a690682da79b92574d63f56b529080b \
--hash=sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56 \
--hash=sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5 \
--hash=sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3 \
--hash=sha256:cbc95b3813920145032412f7e33d12080f11dc776262df1712e1638207dde9e8 \
--hash=sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0 \
--hash=sha256:d95f59afe7f808c103be692175008bab926b59309ade3e6d25009e9a171f7036 \
--hash=sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6 \
--hash=sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8 \
--hash=sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48 \
--hash=sha256:ee807923782faaf60d0d7331f5e86da7d5e3079e28b291973c545476c2b00d07 \
--hash=sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b \
--hash=sha256:f0a1a8476ad77a228e41619af2fa9505cf69df928e9aaa165746584ea17fed2b \
--hash=sha256:f75018be4980a7324edc5930fe39aa391d5734531b1926968605416ff58c332d \
--hash=sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0 \
--hash=sha256:fb1752a3bb9a3ad2d6b090b88a9a0ae1cd6f004ef95f75825e2f382c183b2097 \
--hash=sha256:fc927d7f289d14f5e037be917539620603294454130b6de200091e23d27dc9be \
--hash=sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5
# via -r requirements.in
online-fxreader-pr34==0.1.5.27 \
--hash=sha256:d081758fdb91fb460da5c55d3a38257122de096363a8d956ba2f91a234566010
# via -r requirements.in
pathspec==0.12.1 \
--hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \
--hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712
# via mypy
pip==25.1 \
--hash=sha256:13b4aa0aaad055020a11bec8a1c2a70a2b2d080e12d89b962266029fff0a16ba \
--hash=sha256:272bdd1289f80165e9070a4f881e8f9e1001bbb50378561d1af20e49bf5a2200
# via online-fxreader-pr34
pydantic==2.11.7 \
--hash=sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db \
--hash=sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b
# via
# fastapi
# online-fxreader-pr34
# pydantic-settings
pydantic-core==2.33.2 \
--hash=sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d \
--hash=sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac \
--hash=sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02 \
--hash=sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56 \
--hash=sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4 \
--hash=sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22 \
--hash=sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef \
--hash=sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec \
--hash=sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d \
--hash=sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b \
--hash=sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a \
--hash=sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f \
--hash=sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052 \
--hash=sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab \
--hash=sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916 \
--hash=sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c \
--hash=sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf \
--hash=sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27 \
--hash=sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a \
--hash=sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8 \
--hash=sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7 \
--hash=sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612 \
--hash=sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1 \
--hash=sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039 \
--hash=sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca \
--hash=sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7 \
--hash=sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a \
--hash=sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6 \
--hash=sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782 \
--hash=sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b \
--hash=sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7 \
--hash=sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025 \
--hash=sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849 \
--hash=sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7 \
--hash=sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b \
--hash=sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa \
--hash=sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e \
--hash=sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea \
--hash=sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac \
--hash=sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51 \
--hash=sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e \
--hash=sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162 \
--hash=sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65 \
--hash=sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2 \
--hash=sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954 \
--hash=sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b \
--hash=sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de \
--hash=sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc \
--hash=sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64 \
--hash=sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb \
--hash=sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9 \
--hash=sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101 \
--hash=sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d \
--hash=sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef \
--hash=sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3 \
--hash=sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1 \
--hash=sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5 \
--hash=sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88 \
--hash=sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d \
--hash=sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290 \
--hash=sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e \
--hash=sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d \
--hash=sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808 \
--hash=sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc \
--hash=sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d \
--hash=sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc \
--hash=sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e \
--hash=sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640 \
--hash=sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30 \
--hash=sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e \
--hash=sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9 \
--hash=sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a \
--hash=sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9 \
--hash=sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f \
--hash=sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb \
--hash=sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5 \
--hash=sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab \
--hash=sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d \
--hash=sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572 \
--hash=sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593 \
--hash=sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29 \
--hash=sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535 \
--hash=sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1 \
--hash=sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f \
--hash=sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8 \
--hash=sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf \
--hash=sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246 \
--hash=sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9 \
--hash=sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011 \
--hash=sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9 \
--hash=sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a \
--hash=sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3 \
--hash=sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6 \
--hash=sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8 \
--hash=sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a \
--hash=sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2 \
--hash=sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c \
--hash=sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6 \
--hash=sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d
# via pydantic
pydantic-settings==2.10.1 \
--hash=sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee \
--hash=sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796
# via online-fxreader-pr34
pyright==1.1.404 \
--hash=sha256:455e881a558ca6be9ecca0b30ce08aa78343ecc031d37a198ffa9a7a1abeb63e \
--hash=sha256:c7b7ff1fdb7219c643079e4c3e7d4125f0dafcc19d253b47e898d130ea426419
# via online-fxreader-pr34
python-dotenv==1.1.1 \
--hash=sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc \
--hash=sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab
# via pydantic-settings
pyyaml==6.0.2 \
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
--hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \
--hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \
--hash=sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e \
--hash=sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133 \
--hash=sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5 \
--hash=sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484 \
--hash=sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee \
--hash=sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5 \
--hash=sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68 \
--hash=sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a \
--hash=sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf \
--hash=sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99 \
--hash=sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8 \
--hash=sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85 \
--hash=sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19 \
--hash=sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc \
--hash=sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a \
--hash=sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1 \
--hash=sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317 \
--hash=sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c \
--hash=sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631 \
--hash=sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d \
--hash=sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652 \
--hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5 \
--hash=sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e \
--hash=sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b \
--hash=sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8 \
--hash=sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476 \
--hash=sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706 \
--hash=sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563 \
--hash=sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237 \
--hash=sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b \
--hash=sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083 \
--hash=sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180 \
--hash=sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425 \
--hash=sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e \
--hash=sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f \
--hash=sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725 \
--hash=sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183 \
--hash=sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab \
--hash=sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774 \
--hash=sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725 \
--hash=sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e \
--hash=sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5 \
--hash=sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d \
--hash=sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290 \
--hash=sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44 \
--hash=sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed \
--hash=sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4 \
--hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
# via yq
ruff==0.12.11 \
--hash=sha256:0d737b4059d66295c3ea5720e6efc152623bb83fde5444209b69cd33a53e2000 \
--hash=sha256:411954eca8464595077a93e580e2918d0a01a19317af0a72132283e28ae21bee \
--hash=sha256:4d1df0098124006f6a66ecf3581a7f7e754c4df7644b2e6704cd7ca80ff95211 \
--hash=sha256:4dc75533039d0ed04cd33fb8ca9ac9620b99672fe7ff1533b6402206901c34ee \
--hash=sha256:4fc58f9266d62c6eccc75261a665f26b4ef64840887fc6cbc552ce5b29f96cc8 \
--hash=sha256:5a0113bd6eafd545146440225fe60b4e9489f59eb5f5f107acd715ba5f0b3d2f \
--hash=sha256:5a8dd5f230efc99a24ace3b77e3555d3fbc0343aeed3fc84c8d89e75ab2ff793 \
--hash=sha256:6a2c0a2e1a450f387bf2c6237c727dd22191ae8c00e448e0672d624b2bbd7fb0 \
--hash=sha256:8ca4c3a7f937725fd2413c0e884b5248a19369ab9bdd850b5781348ba283f644 \
--hash=sha256:916fc5defee32dbc1fc1650b576a8fed68f5e8256e2180d4d9855aea43d6aab2 \
--hash=sha256:93fce71e1cac3a8bf9200e63a38ac5c078f3b6baebffb74ba5274fb2ab276065 \
--hash=sha256:a3283325960307915b6deb3576b96919ee89432ebd9c48771ca12ee8afe4a0fd \
--hash=sha256:b8e33ac7b28c772440afa80cebb972ffd823621ded90404f29e5ab6d1e2d4b93 \
--hash=sha256:bae4d6e6a2676f8fb0f98b74594a048bae1b944aab17e9f5d504062303c6dbea \
--hash=sha256:c6b09ae8426a65bbee5425b9d0b82796dbb07cb1af045743c79bfb163001165d \
--hash=sha256:c792e8f597c9c756e9bcd4d87cf407a00b60af77078c96f7b6366ea2ce9ba9d3 \
--hash=sha256:c984f07d7adb42d3ded5be894fb4007f30f82c87559438b4879fe7aa08c62b39 \
--hash=sha256:d69fb9d4937aa19adb2e9f058bc4fbfe986c2040acb1a4a9747734834eaa0bfd \
--hash=sha256:e07fbb89f2e9249f219d88331c833860489b49cdf4b032b8e4432e9b13e8a4b9
# via online-fxreader-pr34
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
sqlparse==0.5.3 \
--hash=sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272 \
--hash=sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca
# via django
starlette==0.47.3 \
--hash=sha256:6bc94f839cc176c4858894f1f8908f0ab79dfec1a6b8402f6da9be26ebea52e9 \
--hash=sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51
# via fastapi
tomli==2.2.1 \
--hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \
--hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \
--hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \
--hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \
--hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \
--hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \
--hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \
--hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \
--hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \
--hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \
--hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \
--hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \
--hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \
--hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \
--hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \
--hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \
--hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \
--hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \
--hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \
--hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \
--hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \
--hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \
--hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \
--hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \
--hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \
--hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \
--hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \
--hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \
--hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \
--hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \
--hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \
--hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7
# via online-fxreader-pr34
tomlkit==0.13.3 \
--hash=sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1 \
--hash=sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0
# via
# online-fxreader-pr34
# yq
tomlq==0.1.0 \
--hash=sha256:4b966fd999ed2bf69081b7c7f5caadbc4c9542d0ed5fcf2e9b7b4d8d7ada3c82 \
--hash=sha256:e775720e90da3e405142b9fe476145e71c0389f787b1ff9933f92a1704d8c6e7
# via online-fxreader-pr34
types-pyyaml==6.0.12.20250822 \
--hash=sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098 \
--hash=sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413
# via django-stubs
typing-extensions==4.15.0 \
--hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
# via
# anyio
# django-stubs
# django-stubs-ext
# fastapi
# mypy
# pydantic
# pydantic-core
# pyright
# starlette
# typing-inspection
typing-inspection==0.4.1 \
--hash=sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51 \
--hash=sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28
# via
# pydantic
# pydantic-settings
uvicorn==0.35.0 \
--hash=sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a \
--hash=sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01
# via
# -r requirements.in
# online-fxreader-pr34
uvloop==0.21.0 \
--hash=sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0 \
--hash=sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f \
--hash=sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc \
--hash=sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414 \
--hash=sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f \
--hash=sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d \
--hash=sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd \
--hash=sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff \
--hash=sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c \
--hash=sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3 \
--hash=sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d \
--hash=sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a \
--hash=sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb \
--hash=sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2 \
--hash=sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0 \
--hash=sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6 \
--hash=sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c \
--hash=sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af \
--hash=sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc \
--hash=sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb \
--hash=sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75 \
--hash=sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb \
--hash=sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553 \
--hash=sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e \
--hash=sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6 \
--hash=sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d \
--hash=sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206 \
--hash=sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc \
--hash=sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281 \
--hash=sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b \
--hash=sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8 \
--hash=sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79 \
--hash=sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f \
--hash=sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe \
--hash=sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26 \
--hash=sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816 \
--hash=sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2
# via online-fxreader-pr34
xmltodict==0.14.2 \
--hash=sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553 \
--hash=sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac
# via yq
yq==3.4.3 \
--hash=sha256:547e34bc3caacce83665fd3429bf7c85f8e8b6b9aaee3f953db1ad716ff3434d \
--hash=sha256:ba586a1a6f30cf705b2f92206712df2281cd320280210e7b7b80adcb8f256e3b
# via tomlq

95
docker/checks/rest.py Normal file

@ -0,0 +1,95 @@
import fastapi
import re
import numpy
import subprocess
import fastapi.responses
import pydantic_settings
import logging
logger = logging.getLogger(__name__)
from online.fxreader.pr34.commands_typed import metrics as pr34_metrics
from typing import Optional, Any, ClassVar
class Settings(pydantic_settings.BaseSettings):
checks_hosts: list[str]
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton
def ping_stats(host: str) -> Optional[float]:
try:
ping_output = subprocess.check_output(
[
'ping',
'-i',
'0.1',
'-c',
'3',
'-w',
'1',
host,
]
).decode('utf-8')
except:
logger.exception('')
ping_output = ''
r1 = re.compile(r'time=(\d+\.\d+)\sms')
spend_time = [float(o[1]) for o in r1.finditer(ping_output)]
if len(spend_time) == 0:
return None
else:
return float(numpy.mean(spend_time))
async def metrics_get() -> fastapi.responses.Response:
ping_res = {h: ping_stats(h) for h in Settings.singleton().checks_hosts}
metrics = [
pr34_metrics.Metric.model_validate(
dict(
name='ping_mean',
type='gauge',
help='ping to host, 3 counts, up to 1 second',
samples=[
dict(
value=str(v),
parameters=dict(
host=k,
),
)
],
)
)
for k, v in ping_res.items()
if not v is None
]
serialize_res = pr34_metrics.serialize(metrics)
return fastapi.responses.Response(
content=serialize_res.json2,
headers={
'Content-Type': serialize_res.content_type,
},
)
def get_router() -> fastapi.APIRouter:
router = fastapi.APIRouter()
router.get('/metrics')(metrics_get)
return router

@ -1,9 +1,11 @@
FROM alpine:latest
# FROM alpine:latest
FROM alpine@sha256:56fa17d2a7e7f168a043a2712e63aed1f8543aeafdcee47c58dcffe38ed51099
RUN apk add openssh
RUN apk add python3
RUN apk add tini
RUN apk add bash curl
RUN apk add py3-pip
RUN apk add netcat-openbsd
RUN pip3 install --break-system-packages requests
WORKDIR /app

@ -1,132 +1,56 @@
py3 << EOF
def f1():
t1 = vim.current.window
t2 = t1.width
vim.command('vnew')
t3 = t2 // 3
vim.command('vertical resize %d' % t3)
vim.current.window = t1
py3 <<EOF
def f2():
context = {
k : vim.options['splitright']
for k in ['splitright']
}
try:
current_window = vim.current.window
vim.options['splitright'] = True
vim.command('vnew')
vim.command('r! tmux show-buffer')
vim.current.window = current_window
finally:
for k, v in context.items():
vim.options[k] = v
def load():
import logging
import logging.handlers
import importlib
def f5_1(pattern, flags, info):
import subprocess
import io
import re
import tempfile
import traceback
import logging
import json
import pathlib
import os
import sys
#print([pattern, flags, info])
completed_process = None
sys.path.append(
str(pathlib.Path('~/.vim').expanduser())
)
options = dict(
recursive=False,
ext=[],
)
logging.basicConfig(
level=getattr(
logging,
os.environ.get('VIM_PY3_LEVEL', 'WARNING')
),
# filename=pathlib.Path('~/.py3.vimrc.log').expanduser(),
handlers=[
logging.handlers.RotatingFileHandler(
pathlib.Path('~/.py3.vimrc.log').expanduser(),
maxBytes=128 * 1024,
backupCount=3,
)
]
)
#print('fuck')
if b'r' in flags:
while True:
ext_m = re.compile(r'^.([^\,]+),(.*)$').match(pattern)
modules = [
str(o)
for o in json.loads(os.environ.get('VIM_PY3_MODULES', '["online_fxreader_pr34_vim.main"]'))
]
if pattern[:3] in [r'\r,']:
options['recursive'] = True
pattern = pattern[3:]
elif not ext_m is None:
options['ext'].append(
ext_m[1]
)
pattern = ext_m[2]
else:
break
for o in modules:
# if not o.exists():
# raise RuntimeError('not found %s' % str(o))
print([flags, pattern, options,])
try:
git_cmd = [
'git', 'grep',
'-n',
]
if options['recursive']:
git_cmd.append('--recurse-submodules')
git_cmd.extend(['-P', pattern])
if len(options['ext']) > 0:
git_cmd.extend(['--', *[
'**/*%s' % o
for o in options['ext']
]])
completed_process = subprocess.run(
git_cmd,
capture_output=True,
)
assert (
completed_process.returncode == 0 or
(
completed_process.stdout == b''
#completed_process.stdout == b'' and
#completed_process.stderr == b''
)
)
t1 = completed_process.stdout
except:
logging.error(''.join([
traceback.format_exc(),
getattr(completed_process, 'stdout', b'').decode('utf-8'),
getattr(completed_process, 'stderr', b'').decode('utf-8'),
]))
t1 = b''
def watch(data):
with tempfile.NamedTemporaryFile(suffix='.txt') as f:
with io.open(f.name, 'wb') as f2:
f2.write(data)
vim.command('!less %s' % f.name)
#watch(t1)
t2 = []
for o in t1.splitlines():
try:
#watch(o.encode('utf-8'))
t3 = o.decode('utf-8')
t4 = re.compile(r'^([^\:\=]+)[\:\=](\d+)[\:\=](.*)$').match(t3)
if not t4 is None:
t2.append(
dict(
name=t4[3].strip(),
filename=t4[1],
cmd=t4[2],
)
)
except:
pass
#print(t2)
#return [{'name': 'blah', 'filename': 'docker-compose.yml', 'cmd': '23'}]
return t2
m = importlib.import_module(o)
getattr(m, 'init')()
# vim.command('py3file {}'.format(str(o)))
EOF
" py3file ~/.module.vimrc.py
python3 load()
function! F5(pattern, flags, info)
python3 import online_fxreader_pr34_vim.main;
let res = py3eval(
\'f5_1(
\'online_fxreader_pr34_vim.main.f5_1(
\vim.bindeval("a:pattern").decode("utf-8"),
\vim.bindeval("a:flags"),
\vim.bindeval("a:info")

@ -79,7 +79,7 @@ bindgesture swipe:4:up exec $lock_cmd
#
# Basics:
#
bindsym Shift+$mod+l exec $lock_cmd
bindsym Shift+$mod+q exec $lock_cmd
bindsym --locked Shift+mod1+1 \
exec ~/.local/bin/commands \
@ -122,12 +122,14 @@ bindsym --locked XF86AudioMute exec zsh -c "commands media-toggle-volume"
bindsym --locked XF86AudioNext exec zsh -c "commands media-next"
bindsym --locked XF86AudioPrev exec zsh -c "commands media-prev"
bindsym $mod+m exec zsh -c "commands color_scheme toggle"
# Start a terminal
bindsym $mod+t exec $term
# Kill focused window
bindsym $mod+Shift+q kill
## Kill focused window
#bindsym $mod+Shift+q kill
# Start your launcher
bindsym $mod+Return exec $menu

@ -30,6 +30,7 @@ bind -n M-[ copy-mode
bind -n M-m set -g mouse
set -g default-terminal "screen-256color"
set-option -g status-style "bg=#00aa00,fg=#ffffff"
#set-option -ga terminal-overrides ",screen-256color:Tc"
set-option -g pane-active-border-style "bg=#33dd44 fg=#ffffff"
@ -37,7 +38,8 @@ set-option -g pane-active-border-style "bg=#33dd44 fg=#ffffff"
bind space display "Fuck!"
set-option -g set-titles on
set-option -g set-titles-string "#S / #W"
set -g status-right "#H %H:%M:%S %Y-%m-%d %Z"
# set -g status-right "#H %H:%M:%S %Y-%m-%d %Z"
set -g status-right "#{=-16:pane_current_path} #{pane_index} #H %H:%M:%S %Y-%m-%d %Z"
set -g status-interval 1
set -g status-right-length 60
set -g status-right-length 64
set -g mouse on

@ -0,0 +1,514 @@
import functools
import configparser
import subprocess
import dataclasses
import json
import datetime
import collections
import asyncio
import threading
import re
import inspect
import pathlib
import logging
import fnmatch
import vim
from typing import (
Optional,
ClassVar,
Self,
Any,
Callable,
)
from .utils import Vim
logger = logging.getLogger(__name__)
MODULE_NAME = 'online_fxreader_pr34_vim'
def future_dump_exception(future: Any) -> None:
try:
future.result()
except:
logger.exception('')
class FastSelect:
_instance: ClassVar[Optional['FastSelect']] = None
def __init__(self) -> None:
self.loop = asyncio.new_event_loop()
self.thread = threading.Thread(
target=self.loop.run_forever,
)
self._buffer_frequency: dict[int, int] = dict()
self._buffer_last_used: dict[int, int] = dict()
self._filter_pattern: Optional[str] = None
self._include_git : Optional[bool] = False
self._filtered_ids: Optional[set[int]] = None
self._items: Optional[list['self.entry_t']] = None
self._buffers: Optional[list['self.entry_t']] = None
self._tracked_files: Optional[list['self.entry_t']] = None
self._queue: collections.deque[Callable[[], None]] = collections.deque()
self._lock = threading.Lock()
self.popup_id: Optional[int] = None
self.thread.start()
self._option_id: asyncio.Future[Optional[int]] = None
self._options: list[str] = None
auto_group = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'close',
).capitalize()
vim.command(r"""
func! UIThread(timer_id)
python3 online_fxreader_pr34_vim.beta.FastSelect.singleton().ui_thread()
endfunc
""")
Vim.run_command(r"""
call timer_start(100, 'UIThread', {'repeat': -1})
""")
Vim.run_command(
r"""
augroup {auto_group}
autocmd!
autocmd VimLeavePre * python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().close()
autocmd BufEnter * python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().on_buf_enter()
augroup END
""".format(
auto_group=auto_group,
)
)
def __del__(self) -> None:
self.close()
def close(self) -> None:
logger.info(dict(msg='close started'))
self.loop.call_soon_threadsafe(self.loop.stop)
self.thread.join()
logger.info(dict(msg='close done'))
@classmethod
def singleton(cls) -> Self:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def pick_option_put_id(self, option_id: int) -> None:
self.loop.call_soon_threadsafe(lambda: self._option_id.set_result(option_id))
@dataclasses.dataclass
class entry_t:
path: pathlib.Path
buf_number: Optional[int] = None
def _get_buffers(
self,
res_future: Optional[asyncio.Future[
list[entry_t]
]] = None,
) -> list[entry_t]:
res = [
self.entry_t(
buf_number=o.number,
path=pathlib.Path(o.name).absolute(),
)
for o in vim.buffers
]
if res_future:
self.loop.call_soon_threadsafe(lambda: res_future.set_result(res))
return res
async def _switch_buffer(self) -> None:
with self._lock:
self._reset_items()
await self._sync_task(self._update_items)
# self._items = buffers
with self._lock:
self._set_filter_pattern('')
selected_id = await self._pick_option_from_popup(
# [o[0] for o in buffers]
)
logger.info(dict(selected_id=selected_id))
def ui_switch_buffer():
nonlocal selected_id
# nonlocal buffers
logger.warning(dict(
buffers=self._items[:3],
id=selected_id,
))
# print(vim.buffers, selected_id)
if not selected_id is None:
selected_item = self._items[selected_id]
if selected_item.buf_number is None:
Vim.run_command('badd %s' % json.dumps(str(selected_item.path))[1:-1])
Vim.run_command('e %s' % json.dumps(str(selected_item.path))[1:-1])
else:
vim.current.buffer = vim.buffers[selected_item.buf_number]
with self._lock:
self._queue.append(ui_switch_buffer)
def switch_buffer(self) -> None:
logger.info(dict(msg='before switch_buffer started'))
result = asyncio.run_coroutine_threadsafe(self._switch_buffer(), self.loop)
result.add_done_callback(future_dump_exception)
logger.info(dict(msg='after switch_buffer started'))
async def _pick_option_from_popup(
self,
# options: list[str],
) -> Optional[int]:
logger.info(dict(msg='started'))
self._filter_pattern = ''
self._popup_id = None
# self._options = options
self._option_id = asyncio.Future[int]()
await self._pick_option_start_popup()
option_id = await self._option_id
logger.info(dict(option_id=option_id))
self._options = None
self._option_id = None
logger.info(dict(msg='done'))
if option_id >= 0:
return self._filtered_ids[option_id]
else:
return None
def ui_thread(self):
with self._lock:
# Vim.run_command(r'''
# set laststatus=2
# set statusline={}
#'''.format(datetime.datetime.now().isoformat()))
while len(self._queue) > 0:
cmd = self._queue.pop()
try:
cmd_str = inspect.getsource(cmd)
except:
cmd_str = str(cmd)
try:
logger.warning(dict(msg='start command', cmd=cmd_str))
cmd()
except:
logger.exception('')
# self._result.append(
# vim.command(cmd)
# )
def on_buf_enter(self) -> None:
result = asyncio.run_coroutine_threadsafe(
self._on_buf_enter(
buf_number=vim.current.buffer.number,
buf_name=pathlib.Path(vim.current.buffer.name),
),
self.loop,
)
result.add_done_callback(future_dump_exception)
def on_filter_key(self, key: str) -> None:
logger.info(dict(msg='got key', key=key))
if key == bytes([27]):
logger.info(dict(msg='closing popup'))
vim.Function('popup_close')(self._popup_id)
return 1
if key == b'\x80kb':
logger.info(dict(msg='backspace'))
with self._lock:
self._set_filter_pattern(self._filter_pattern[:-1])
# C-g
elif key == b'\x07':
with self._lock:
self._include_git = not self._include_git
self._update_items()
self._update_filtered()
# self._update_popup()
else:
try:
key_str = key.decode('utf-8')
except:
return vim.Function('popup_filter_menu')(
self._popup_id, key
)
# return 0
if not key_str.isprintable():
return vim.Function('popup_filter_menu')(
self._popup_id, key
)
# return 0
else:
with self._lock:
self._set_filter_pattern(self._filter_pattern + key_str)
self._update_popup()
return 1
async def _sync_task(
self,
cb: Callable[[], None],
# future: asyncio.Future[bool]
) -> None:
res_future: asyncio.Future[bool] = asyncio.Future()
def wrapper():
res : bool = True
try:
cb()
except:
logger.exception('')
res = False
self.loop.call_soon_threadsafe(lambda: res_future.set_result(res))
with self._lock:
self._queue.append(wrapper)
return await res_future
def _cwd(cls) -> pathlib.Path:
return pathlib.Path(
vim.Function('getcwd')().decode('utf-8')
)
def _update_items(self) -> None:
known_files: dict[str, int] = dict()
if self._buffers is None:
self._buffers = self._get_buffers()
logger.info(dict(buffers=self._buffers[:3]))
if self._include_git:
if self._tracked_files is None:
for o in self._buffers:
assert o.buf_number
known_files[str(o.path)] = o.buf_number
ls_files_output = [
o.strip()
for o in subprocess.check_output(
['git', 'ls-files', '--recurse-submodules',], cwd=self._cwd(),
).decode('utf-8').splitlines()
]
self._tracked_files = []
for o in ls_files_output:
path = pathlib.Path(
o,
).absolute()
entry = self.entry_t(
path=path,
buf_number=known_files.get(str(path)),
)
if entry.buf_number:
continue
self._tracked_files.append(entry)
logger.info(dict(tracked_files=self._tracked_files[:3]))
self._items = self._buffers + self._tracked_files
else:
self._items = self._buffers
self._items = sorted(
self._items,
# key=lambda x: -self._buffer_frequency.get(x[1], 0)
key=lambda x: -self._buffer_last_used.get(x.buf_number, 0),
)
def _reset_items(self) -> None:
self._buffers = None
self._tracked_files = None
self._items = None
def _update_filtered(self) -> None:
pattern = re.compile(self._filter_pattern)
self._filtered_ids = [
i for i, o in enumerate(self._items) if not pattern.search(str(o.path)) is None
]
self._options = [str(self._items[o].path) for o in self._filtered_ids]
def _set_filter_pattern(self, filter_pattern: str) -> None:
self._filter_pattern = filter_pattern
self._update_filtered()
def _update_popup(self) -> None:
vim.Function('popup_settext')(
self._popup_id,
self._options,
)
vim.Function('popup_setoptions')(self._popup_id, {'title': 'Select a file, [%s]' % self._filter_pattern})
async def _on_buf_enter(
self,
buf_number: int,
buf_name: pathlib.Path,
) -> None:
# logger.info(dict(msg='waiting'))
with self._lock:
# buf_number = vim.current.buffer.number
if not buf_number in self._buffer_frequency:
self._buffer_frequency[buf_number] = 0
self._buffer_frequency[buf_number] += 1
self._buffer_last_used[buf_number] = datetime.datetime.now().timestamp()
logger.info(
dict(
msg='updated',
buf_path=str(buf_name),
frequency=self._buffer_frequency[buf_number],
buf_number=buf_number,
)
)
async def _pick_option_start_popup(
self,
):
callback_name = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'popup_callback',
).capitalize()
filter_name = '{}_{}_{}'.format(
MODULE_NAME,
type(self).__name__.lower(),
'popup_filter',
).capitalize()
if int(vim.eval('exists("{}")'.format(callback_name))) == 1:
logger.warning(dict(msg='callback already defined, %s' % callback_name))
vim.command(
r"""
function! {callback_name}(id, result)
if a:result > 0
call py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().pick_option_put_id(' . (a:result - 1). ')')
else
call py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().pick_option_put_id(-1)')
endif
endfunction
""".format(
callback_name=callback_name,
)
)
vim.command(
r"""
function! {filter_name}(win_id, key)
return py3eval('online_fxreader_pr34_vim.beta.FastSelect.singleton().on_filter_key(key)', #{key: a:key})
endfunction
""".replace(
'{filter_name}',
filter_name,
)
)
logger.info(dict(msg='before popup'))
popup_menu = vim.Function('popup_menu')
def create_popup():
self._popup_id = popup_menu(
self._options,
{
'title': 'Select a file',
'callback': callback_name,
'filter': filter_name,
'wrap': 1,
'maxwidth': 80,
'close': 'button',
'resize': 1,
'drag': 1,
'maxheight': '16',
},
)
with self._lock:
self._queue.append(
create_popup,
# lambda : vim.command(
# "call popup_menu({options}, {'title': '{title}', 'callback': '{callback}'})".replace(
# '{options}', '[%s]' % ','.join([
# '\'%s\'' % o.replace('\'', '\\\'')
# for o in self._options
# ]),
# ).replace(
# '{title}', 'Select a file',
# ).replace(
# '{callback}',
# callback_name
# )
# )
)
# logger.info(dict(popup_id=popup_id))
# logger.info(dict(msg='after popup'))
def init():
FastSelect.singleton()

@ -0,0 +1,265 @@
import functools
import configparser
import collections
import asyncio
import threading
import re
import inspect
import pathlib
import logging
import fnmatch
import vim
from typing import (
Optional,
ClassVar,
Self,
Any,
Callable,
)
logger = logging.getLogger(__name__)
from .utils import Vim
# logging.basicConfig(level=logging.WARNING)
MODULE_NAME = 'online_fxreader_pr34_vim'
def f1():
t1 = vim.current.window
t2 = t1.width
vim.command('vnew')
t3 = t2 // 3
vim.command('vertical resize %d' % t3)
vim.current.window = t1
def f2():
context = {k: vim.options['splitright'] for k in ['splitright']}
try:
current_window = vim.current.window
vim.options['splitright'] = True
vim.command('vnew')
vim.command('r! tmux show-buffer')
vim.current.window = current_window
finally:
for k, v in context.items():
vim.options[k] = v
def f5_1(pattern, flags, info):
import subprocess
import io
import re
import tempfile
import traceback
import logging
# print([pattern, flags, info])
completed_process = None
options = dict(
recursive=False,
ext=[],
)
# print('fuck')
if b'r' in flags:
while True:
ext_m = re.compile(r'^.([^\,]+),(.*)$').match(pattern)
if pattern[:3] in [r'\r,']:
options['recursive'] = True
pattern = pattern[3:]
elif not ext_m is None:
options['ext'].append(ext_m[1])
pattern = ext_m[2]
else:
break
print(
[
flags,
pattern,
options,
]
)
try:
git_cmd = [
'git',
'grep',
'-n',
]
if options['recursive']:
git_cmd.append('--recurse-submodules')
git_cmd.extend(['-P', pattern])
if len(options['ext']) > 0:
git_cmd.extend(['--', *['**/*%s' % o for o in options['ext']]])
completed_process = subprocess.run(
git_cmd,
capture_output=True,
)
assert completed_process.returncode == 0 or (
completed_process.stdout == b''
# completed_process.stdout == b'' and
# completed_process.stderr == b''
)
t1 = completed_process.stdout
except:
logging.error(
''.join(
[
traceback.format_exc(),
getattr(completed_process, 'stdout', b'').decode('utf-8'),
getattr(completed_process, 'stderr', b'').decode('utf-8'),
]
)
)
t1 = b''
def watch(data):
with tempfile.NamedTemporaryFile(suffix='.txt') as f:
with io.open(f.name, 'wb') as f2:
f2.write(data)
vim.command('!less %s' % f.name)
# watch(t1)
t2 = []
for o in t1.splitlines():
try:
# watch(o.encode('utf-8'))
t3 = o.decode('utf-8')
t4 = re.compile(r'^([^\:\=]+)[\:\=](\d+)[\:\=](.*)$').match(t3)
if not t4 is None:
t2.append(
dict(
name=t4[3].strip(),
filename=t4[1],
cmd=t4[2],
)
)
except:
pass
# print(t2)
# return [{'name': 'blah', 'filename': 'docker-compose.yml', 'cmd': '23'}]
return t2
class EditorConfigModeline:
_instance: ClassVar[Optional['EditorConfigModeline']] = None
def __init__(self) -> None:
self.configs: dict[
pathlib.Path,
dict[str, str],
] = dict()
Vim.run_command(r"""
augroup EditorConfigModeline
autocmd!
" autocmd BufEnter * ++nested python3 import online_fxreader_pr34_vim.main; online_fxreader_pr34_vim.main.EditorConfigModeline.singleton().on_buffer()
autocmd BufWinEnter * ++nested python3 import online_fxreader_pr34_vim.main; online_fxreader_pr34_vim.main.EditorConfigModeline.singleton().on_buffer()
augroup END
""")
@classmethod
def singleton(cls) -> Self:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def load_config(self) -> Optional[dict[str, str]]:
cwd = pathlib.Path.cwd()
if not cwd in self.configs:
config_path = cwd / '.editorconfig'
if not config_path.exists():
return None
parser = configparser.ConfigParser()
parser.optionxform = str # keep case
try:
parser.read(str(config_path))
except:
logger.exception('')
return None
config: dict[str, str] = dict()
for section in parser.sections():
logger.info(dict(section=section))
if len(section) > 0:
# pattern = section[1:-1]
pattern = section
if not parser[section].get('vim_modeline') is None:
config[pattern] = parser[section].get('vim_modeline')
self.validate_modeline(config[pattern])
self.configs[cwd] = config
return self.configs[cwd]
@classmethod
def validate_modeline(cls, modeline: str) -> None:
pattern = re.compile(r'^set(\s+(noet|sts|ts|et|ai|ci|noai|noci|sw)(\=\w)?)+$')
assert pattern.match(modeline), 'invalid modeline %s' % modeline
@classmethod
def find_entry(
cls,
file_path: pathlib.Path,
config: Optional[dict[str, str]] = None,
) -> Optional[str]:
if config is None:
return None
project_root = pathlib.Path.cwd()
if file_path.is_relative_to(project_root):
rel_path = file_path.relative_to(pathlib.Path.cwd())
else:
rel_path = file_path
for pattern, modeline in config.items():
if fnmatch.fnmatch(str(rel_path), pattern):
return modeline
return None
def on_buffer(self) -> None:
config = self.load_config()
logger.info(dict(config=config))
buf_name = vim.current.buffer.name
file_path = pathlib.Path(buf_name).resolve()
entry = self.find_entry(file_path, config=config)
logger.info(dict(modeline=entry))
vim.command('silent! {}'.format(entry))
# vim.command("echo '{}'".format('applied %s' % entry))
# raise NotImplementedError
# EditorConfigModeline.singleton()
def init():
EditorConfigModeline.singleton()

@ -0,0 +1,17 @@
import vim
import logging
logger = logging.getLogger(__name__)
class Vim:
@classmethod
def run_command(cls, cmd) -> list[str]:
logger.info(dict(cmd=cmd))
output: list[str] = []
for line in cmd.splitlines():
if line.strip() == '':
continue
output.append(vim.command(line))
return output

@ -7,7 +7,8 @@ if has('python3')
source $HOME/.py3.vimrc
endif
filetype plugin indent on
" filetype plugin indent on
filetype plugin off
set number
set noswapfile
@ -30,7 +31,14 @@ colorscheme morning
hi MatchParen guifg=white guibg=black gui=NONE ctermfg=1 ctermbg=0
function! MakeSession()
let b:sessiondir = '.vim/'
" let b:sessiondir = '.vim/'
if exists('g:sessiondir')
let b:sessiondir = g:sessiondir
else
let b:sessiondir = getcwd() . '/' . '.vim/'
let g:sessiondir = b:sessiondir
endif
if exists('g:session_name')
let b:session_name = g:session_name
else
@ -43,11 +51,17 @@ function! MakeSession()
endif
let b:filename = b:sessiondir . '/' . b:session_name . '.vim'
exe "mksession! " . b:filename
echo 'saved ' . b:session_name
echo 'saved ' . b:sessiondir . ' ' . b:session_name
endfunction
function! LoadSession()
let b:sessiondir = '.vim/'
if exists('g:sessiondir')
let b:sessiondir = g:sessiondir
else
let b:sessiondir = getcwd() . '/' . '.vim/'
let g:sessiondir = b:sessiondir
endif
if exists('g:session_name')
let b:session_name = g:session_name
else
@ -87,3 +101,5 @@ map <Leader>i5 :set sw=2 sts=2 ts=2 noet ai ci<CR>
set foldmethod=indent
set nofoldenable
map <Leader>e :e #<cR>
map <C-p> :python3 import online_fxreader_pr34_vim.beta; online_fxreader_pr34_vim.beta.FastSelect.singleton().switch_buffer()<CR>

@ -86,7 +86,9 @@ class PyProject:
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
requirements: dict[str, pathlib.Path] = dataclasses.field(
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field(
default_factory=lambda: [],
@ -132,7 +134,12 @@ def check_dict(
else:
VT_class = VT
assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
assert all(
[
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
for k, v in value2.items()
]
)
if VT is None:
return cast(
@ -251,7 +258,12 @@ def pyproject_load(
str,
)
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
if (
'tool' in content
and isinstance(content['tool'], dict)
and tool_name in content['tool']
and isinstance(content['tool'][tool_name], dict)
):
pr34_tool = check_dict(
check_dict(
content['tool'],
@ -264,7 +276,9 @@ def pyproject_load(
res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool:
res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
res.pip_find_links = [
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [
@ -283,7 +297,9 @@ def pyproject_load(
if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str)
assert all([k in {'package', 'module_root', 'path'} for k in o2])
assert all(
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
@ -297,7 +313,9 @@ def pyproject_load(
res.requirements = {
k: d.parent / pathlib.Path(v)
# pathlib.Path(o)
for k, v in check_dict(pr34_tool['requirements'], str, str).items()
for k, v in check_dict(
pr34_tool['requirements'], str, str
).items()
}
if 'modules' in pr34_tool:
@ -364,7 +382,10 @@ class BootstrapSettings:
).strip()
)
pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
default_factory=lambda: os.environ.get(
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)],
)
uv_cache_dir: str = dataclasses.field(
default_factory=lambda: os.environ.get(
@ -438,7 +459,9 @@ def requirements_name_get(
else:
requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
requirements_path_in = requirements_path.parent / (
requirements_path.stem + '.in'
)
requirements_in: list[str] = []
@ -594,10 +617,15 @@ def env_bootstrap(
requirements_in: list[str] = []
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
requirements_in.extend(
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features:
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
early_dependencies = sum(
[pyproject.dependencies[o] for o in pyproject.early_features],
cast(list[str], []),
)
logger.info(
dict(
@ -647,14 +675,6 @@ def env_bootstrap(
needs_compile = True
constraint_args = ['-c', str(requirements_path)]
if (not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update) and requirements_path.exists():
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
cache_find_links_args: list[str] = []
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
@ -711,15 +731,15 @@ def env_bootstrap(
os.unlink(f_out.name)
raise
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if bootstrap_settings.venv_partial and bootstrap_settings.env_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: skipping venv creation (already exists)')

@ -0,0 +1,57 @@
# This file is part of systemd.
#
# systemd is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Entries in this file show the compile time defaults. Local configuration
# should be created by either modifying this file (or a copy of it placed in
# /etc/ if the original file is shipped in /usr/), or by creating "drop-ins" in
# the /etc/systemd/logind.conf.d/ directory. The latter is generally
# recommended. Defaults can be restored by simply deleting the main
# configuration file and all drop-ins located in /etc/.
#
# Use 'systemd-analyze cat-config systemd/logind.conf' to display the full config.
#
# See logind.conf(5) for details.
[Login]
#NAutoVTs=6
#ReserveVT=6
#KillUserProcesses=no
#KillOnlyUsers=
#KillExcludeUsers=root
#InhibitDelayMaxSec=5
#UserStopDelaySec=10
#HandlePowerKey=hibernate
HandlePowerKey=suspend
#HandlePowerKeyLongPress=ignore
#HandleRebootKey=reboot
#HandleRebootKeyLongPress=poweroff
#HandleSuspendKey=suspend
#HandleSuspendKeyLongPress=hibernate
#HandleHibernateKey=hibernate
#HandleHibernateKeyLongPress=ignore
#HandleLidSwitchExternalPower=suspend
#HandleLidSwitchDocked=ignore
#PowerKeyIgnoreInhibited=no
#SuspendKeyIgnoreInhibited=no
#HibernateKeyIgnoreInhibited=no
#LidSwitchIgnoreInhibited=yes
#RebootKeyIgnoreInhibited=no
#HoldoffTimeoutSec=30s
#IdleAction=ignore
#IdleActionSec=30min
#RuntimeDirectorySize=10%
#RuntimeDirectoryInodesMax=
#RemoveIPC=yes
#InhibitorsMax=8192
#SessionsMax=8192
#StopIdleSessionSec=infinity
HandleLidSwitch=suspend
# for sway
#HandleLidSwitch=none

@ -0,0 +1 @@
ACTION=="add", SUBSYSTEM=="net", KERNEL=="tun0", TAG+="systemd"

@ -0,0 +1,11 @@
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/pci0000:00/0000:00:1b.0/hdaudioC0D0/leds/hda::mute", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/platform/applesmc.768/leds/smc::kbd_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
# udevadm info --attribute-walk --path=/sys/devices/platform/applesmc.768/
# udevadm trigger --action=add --verbose --parent-match /devices/platform/applesmc.768/
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="ls -allh /sys$devpath/", OPTIONS="log_level=debug"
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="/usr/bin/ls -allh /sys$devpath/", OPTIONS="log_level=debug"
ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
#KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", MODE="0660", TAG+="uaccess", OPTIONS="log_level=debug", OPTIONS+="watch"
ACTION=="add|change", DEVPATH=="/class/backlight/intel_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
ACTION=="add|change", KERNEL=="cpu1", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
#ACTION=="add|change", KERNEL=="cpu[0-9]", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"

@ -0,0 +1 @@
ACTION=="add", SUBSYSTEM=="scsi_host", KERNEL=="host*", ATTR{link_power_management_policy}="max_performance", OPTIONS="log_level=debug"

@ -0,0 +1,13 @@
#!/usr/bin/bash
commands gnome-shortcuts \
-a \
'powersave' \
'commands desktop-services --cpufreq-action powersave' \
'<Shift><Alt>1'
commands gnome-shortcuts \
-a \
'performance' \
'commands desktop-services --cpufreq-action performance' \
'<Shift><Alt>2'

@ -0,0 +1,330 @@
#!/usr/bin/env python3
# vi: filetype=python
import gi
gi.require_version("Gtk", "4.0")
from gi.repository import Gtk, Gio, GLib, GObject
import subprocess
import shlex
import threading
import uuid
import argparse
import logging
# CLI / Logging
parser = argparse.ArgumentParser(description="Systemd Scope Manager (GTK4 ColumnView)")
parser.add_argument("--log-level", "-l", choices=["DEBUG","INFO","WARNING","ERROR","CRITICAL"], default="INFO")
parser.add_argument("--values-mode", "-m", choices=["raw","human"], default="human",
help="Display memory / CPU values raw or humanreadable")
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log_level.upper()),
format="%(asctime)s %(name)s %(levelname)s: %(message)s")
logger = logging.getLogger(__name__)
# Helpers
def human_bytes(value: int) -> str:
for unit in ("B","KB","MB","GB","TB"):
if value < 1024:
return f"{value:.1f}{unit}"
value /= 1024
return f"{value:.1f}PB"
def human_time(nsec: int) -> str:
sec = nsec / 1_000_000_000
if sec < 60:
return f"{sec:.1f}s"
minutes = sec / 60
if minutes < 60:
return f"{minutes:.1f}m"
return f"{minutes/60:.1f}h"
def run_systemctl_show(unit: str) -> dict:
cmd = [
"systemctl", "--user", "show", unit,
"--property=MemoryCurrent,MemorySwapCurrent,CPUUsageNSec,ActiveState,Restart"
]
try:
res = subprocess.run(cmd, capture_output=True, text=True, check=True)
props = {}
for line in res.stdout.splitlines():
if "=" in line:
k, v = line.split("=", 1)
props[k.strip()] = v.strip()
return props
except Exception:
logger.exception("systemctl show failed for %s", unit)
return {}
# Data row
class ScopeRow(GObject.Object):
__gtype_name__ = "ScopeRow"
unit = GObject.Property(type=str)
cli = GObject.Property(type=str)
mem = GObject.Property(type=str)
swap = GObject.Property(type=str)
cpu = GObject.Property(type=str)
state= GObject.Property(type=str)
def __init__(self, unit, cli, mem, swap, cpu, state):
super().__init__()
self.unit = unit
self.cli = cli
self.mem = mem
self.swap = swap
self.cpu = cpu
self.state = state
# Main Window
class ScopeManagerWindow(Gtk.ApplicationWindow):
def __init__(self, app):
super().__init__(application=app, title="Systemd Scope Manager")
self.set_default_size(1000, 500)
self.values_mode = args.values_mode
self.scopes = {}
self.model = Gio.ListStore(item_type=ScopeRow)
self.sel = Gtk.SingleSelection.new(self.model)
self.view = Gtk.ColumnView.new(self.sel)
self.view.set_reorderable(True)
self.view.set_show_row_separators(True)
cols = [
("Unit", "unit"),
("CLI", "cli"),
("Memory", "mem"),
("Swap", "swap"),
("CPU", "cpu"),
("State", "state"),
]
for title, prop_name in cols:
factory = Gtk.SignalListItemFactory()
factory.connect("setup", self._factory_setup_label)
factory.connect("bind", self._make_factory_bind(prop_name))
col = Gtk.ColumnViewColumn()
col.set_title(title)
col.set_factory(factory)
col.set_resizable(True)
col.set_expand(True)
self.view.append_column(col)
# Actions column
action_factory = Gtk.SignalListItemFactory()
action_factory.connect("setup", self._factory_setup_actions)
action_factory.connect("bind", self._factory_bind_actions)
act_col = Gtk.ColumnViewColumn()
act_col.set_title("⋮")
act_col.set_factory(action_factory)
act_col.set_resizable(False)
act_col.set_expand(False)
self.view.append_column(act_col)
# Input area
self.cmd_entry = Gtk.Entry()
self.cmd_entry.set_placeholder_text("Command to run in new scope")
run_btn = Gtk.Button(label="Run")
run_btn.connect("clicked", self.on_run_clicked)
prop_btn = Gtk.Button(label="Edit Property")
prop_btn.connect("clicked", self.on_edit_property)
input_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=6)
input_box.append(self.cmd_entry)
input_box.append(run_btn)
input_box.append(prop_btn)
scrolled = Gtk.ScrolledWindow()
scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
scrolled.set_propagate_natural_width(True)
scrolled.set_propagate_natural_height(True)
scrolled.set_child(self.view)
vbox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6)
vbox.set_vexpand(True)
vbox.append(input_box)
vbox.append(scrolled)
self.set_child(vbox)
GLib.timeout_add_seconds(2, self.refresh_scopes)
def _factory_setup_label(self, factory, list_item):
lbl = Gtk.Label()
list_item.set_child(lbl)
def _make_factory_bind(self, prop_name):
def bind(factory, list_item):
row = list_item.get_item()
lbl = list_item.get_child()
lbl.set_text(getattr(row, prop_name))
row.connect(f"notify::{prop_name}", lambda obj, pspec: lbl.set_text(getattr(obj, prop_name)))
return bind
def _factory_setup_actions(self, factory, list_item):
btn = Gtk.MenuButton()
btn.set_icon_name("open-menu-symbolic")
# style class if needed:
btn.get_style_context().add_class("flat")
list_item.set_child(btn)
def _factory_bind_actions(self, factory, list_item):
row = list_item.get_item()
btn = list_item.get_child()
menu = Gio.Menu()
menu.append("Stop", f"app.stop_{row.unit}")
menu.append("Restart", f"app.restart_{row.unit}")
menu.append("Toggle AutoRestart", f"app.toggle_{row.unit}")
btn.set_menu_model(menu)
# create actions
self._ensure_row_actions(row)
def _ensure_row_actions(self, row):
unit = row.unit
# Stop
act_stop = Gio.SimpleAction.new(f"stop_{unit}", None)
act_stop.connect("activate", lambda a, v, r=row: self.menu_action("Stop", r))
self.add_action(act_stop)
# Restart
act_restart = Gio.SimpleAction.new(f"restart_{unit}", None)
act_restart.connect("activate", lambda a, v, r=row: self.menu_action("Restart", r))
self.add_action(act_restart)
# Toggle
act_toggle = Gio.SimpleAction.new(f"toggle_{unit}", None)
act_toggle.connect("activate", lambda a, v, r=row: self.menu_action("Toggle AutoRestart", r))
self.add_action(act_toggle)
def on_run_clicked(self, button):
cmd = self.cmd_entry.get_text().strip()
if not cmd:
return
unit = f"app-{uuid.uuid4().int >> 64}.scope"
argv = ["systemd-run", "--user", "--scope", "--unit", unit,
"-p", "MemoryAccounting=yes", "-p", "CPUAccounting=yes"] + shlex.split(cmd)
logger.info("Starting scope: %s", " ".join(argv))
def worker():
try:
p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
out, err = p.communicate()
if out:
logger.debug("systemd-run stdout [%s]: %s", unit, out.strip())
if err:
logger.debug("systemd-run stderr [%s]: %s", unit, err.strip())
except Exception:
logger.exception("Failed to run systemd-run for %s", unit)
threading.Thread(target=worker, daemon=True).start()
self.scopes[unit] = cmd
self.cmd_entry.set_text("")
def on_edit_property(self, button):
idx = self.sel.get_selected()
if idx < 0:
return
row = self.model.get_item(idx)
dialog = Gtk.Dialog(transient_for=self, modal=True, title="Set systemd property")
content = dialog.get_content_area()
grid = Gtk.Grid(row_spacing=6, column_spacing=6, margin=10)
content.append(grid)
lbl1 = Gtk.Label(label="Property (e.g. MemoryMax):")
entry1 = Gtk.Entry()
lbl2 = Gtk.Label(label="Value (e.g. 512M):")
entry2 = Gtk.Entry()
runtime_chk = Gtk.CheckButton(label="Runtime only")
grid.attach(lbl1, 0,0,1,1)
grid.attach(entry1, 1,0,1,1)
grid.attach(lbl2, 0,1,1,1)
grid.attach(entry2, 1,1,1,1)
grid.attach(runtime_chk,0,2,2,1)
dialog.add_button("OK", Gtk.ResponseType.OK)
dialog.add_button("Cancel", Gtk.ResponseType.CANCEL)
dialog.show()
resp = dialog.run()
if resp == Gtk.ResponseType.OK:
prop = entry1.get_text().strip()
val = entry2.get_text().strip()
rt = runtime_chk.get_active()
dialog.destroy()
self.set_property(row.unit, prop, val, rt)
else:
dialog.destroy()
def set_property(self, unit, prop, val, runtime_flag):
cmd = ["systemctl", "--user", "set-property"]
if runtime_flag:
cmd.append("--runtime")
cmd += [unit, f"{prop}={val}"]
logger.info("Setting %s=%s on %s (runtime=%s)", prop, val, unit, runtime_flag)
try:
res = subprocess.run(cmd, capture_output=True, text=True)
if res.stdout:
logger.debug("set-property stdout: %s", res.stdout.strip())
if res.stderr:
logger.debug("set-property stderr: %s", res.stderr.strip())
except Exception:
logger.exception("Failed set-property for %s", unit)
def menu_action(self, label, row):
unit = row.unit
try:
if label == "Stop":
subprocess.run(["systemctl","--user","stop",unit])
elif label == "Restart":
subprocess.run(["systemctl","--user","restart",unit])
elif label == "Toggle AutoRestart":
props = run_systemctl_show(unit)
current = props.get("Restart","no")
new = "no" if current != "no" else "always"
subprocess.run(["systemctl","--user","set-property",unit,f"Restart={new}"])
except Exception:
logger.exception("Action %s failed on %s", label, unit)
def refresh_scopes(self):
self.model.remove_all()
for unit, cli in self.scopes.items():
props = run_systemctl_show(unit)
mem = int(props.get("MemoryCurrent", "0"))
swap = int(props.get("MemorySwapCurrent","0"))
cpu = int(props.get("CPUUsageNSec", "0"))
state = props.get("ActiveState","unknown")
if self.values_mode == "human":
mem_s = human_bytes(mem)
swap_s = human_bytes(swap)
cpu_s = human_time(cpu)
else:
mem_s = str(mem)
swap_s = str(swap)
cpu_s = str(cpu)
row = ScopeRow(unit, cli, mem_s, swap_s, cpu_s, state)
self.model.append(row)
logger.debug("Refreshed %d scopes", self.model.get_n_items())
return True
# Application
class ScopeManagerApp(Gtk.Application):
def __init__(self):
super().__init__(application_id="org.systemd.ScopeManager")
self.connect("activate", self.on_activate)
def on_activate(self, app):
win = ScopeManagerWindow(self)
win.present()
def main():
app = ScopeManagerApp()
return app.run()
if __name__ == "__main__":
import sys
sys.exit(main())

@ -0,0 +1,116 @@
#!/usr/bin/python3
# vi: filetype=python
import re
import sys
import os
import time
import subprocess
import argparse
import logging
from typing import (Any,)
logger = logging.getLogger(__name__)
def run() -> None:
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument(
'--device',
)
options = parser.parse_args()
DEVICES : dict[str, Any] = dict(
applesmc=dict(
devpath='sys/devices/platform/applesmc.768',
node='/sys/devices/platform/applesmc.768/fan1_manual',
cmd=r'''
chown root:fan /sys/devices/platform/applesmc.768/fan1_*
chmod g+w /sys/devices/platform/applesmc.768/fan1_*
''',
),
intel_pstate=dict(
devpath=r'/?sys/devices/system/cpu/cpu0',
node='/sys/devices/system/cpu/intel_pstate/no_turbo',
cmd=r'''
chown root:fan /sys/devices/system/cpu/intel_pstate/no_turbo
chown root:fan /sys/devices/system/cpu/intel_pstate/max_perf_pct
#chown root:fan /sys/devices/system/cpu/intel_pstate/status
chmod g+w /sys/devices/system/cpu/intel_pstate/no_turbo
chmod g+w /sys/devices/system/cpu/intel_pstate/max_perf_pct
#chmod g+w /sys/devices/system/cpu/intel_pstate/status
echo passive > /sys/devices/system/cpu/intel_pstate/status
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
''',
),
amd_pstate=dict(
devpath=r'/?sys/devices/system/cpu/cpu1',
node='/sys/devices/system/cpu/amd_pstate/status',
cmd=r'''
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/boost
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/boost
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
''',
),
#governor=dict(
# devpath=r'/?sys/devices/system/cpu/cpu(\d+)',
# node=r'/sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor',
# cmd=r'''
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
# ''',
#),
)
processed : int = 0
logger.info(dict(device=options.device))
for k, v in DEVICES.items():
devpath = re.compile(v['devpath'])
devpath_m = devpath.match(options.device)
if devpath_m is None:
continue
node_2 = v['node'].format(*devpath_m.groups())
# logger.info(dict(devpath_m=devpath_m, node=node_2))
while not os.path.exists(node_2):
#continue
time.sleep(1)
cmd_2 = v['cmd'].format(*devpath_m.groups())
subprocess.check_call(cmd_2, shell=True)
logger.info(dict(
devpath_m=devpath_m,
node_2=node_2,
cmd_2=cmd_2,
msg='processed',
label=k,
))
processed += 1
if processed == 0:
raise NotImplementedError
if __name__ == '__main__':
run()

@ -0,0 +1,73 @@
#!/usr/bin/python3
#vi syntax=python
import subprocess
import sys
import logging
from typing import (Optional,)
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
if sys.argv[1] == 'before-suspend':
logger.info('before-suspend started')
subprocess.check_call(['nmcli', 'radio', 'wifi', 'off'])
#subprocess.check_call(['modprobe', '-r', 'atkbd',])
subprocess.check_call(['modprobe', '-r', 'ideapad_laptop',])
subprocess.check_call(['modprobe', '-r', 'i8042',])
logger.info('before-suspend done')
elif sys.argv[1] == 'after-suspend':
logger.info('after-suspend started')
subprocess.check_call(['modprobe', 'i8042',])
subprocess.check_call(['modprobe', 'ideapad_laptop',])
#subprocess.check_call(['modprobe', 'atkbd',])
subprocess.check_call(['nmcli', 'radio', 'wifi', 'on'])
#subprocess.check_call(['rfkill', 'unblock', '109'])
#subprocess.check_call(r'''
# # systemctl restart wg-quick@siarhei-hp.service
#''', shell=True,)
logger.info('after-suspend done')
elif sys.argv[1] == 'lid-switch':
import evdev
import time
import io
lid = evdev.UInput({5 : [0]}, name="virtual-lid-switch")
last_state : Optional[bool] = None
try:
while True:
try:
with io.open('/proc/acpi/button/lid/LID0/state', 'r') as f:
value = f.read()
except:
logger.exception('')
value = None
time.sleep(1)
continue
if not value is None:
if 'open' in value:
is_opened = True
else:
is_opened = False
if last_state != is_opened:
if is_opened:
logger.info(dict(msg='lid opened'))
lid.write(5, 0, 0)
lid.write(0, 0, 0)
else:
logger.info(dict(msg='lid closed'))
lid.write(5, 0, 1)
lid.write(0, 0, 0)
last_state = is_opened
time.sleep(0.1)
finally:
lid.close()
else:
raise NotImplementedError

@ -53,7 +53,10 @@ def js(argv: list[str]) -> int:
'--project-directory',
Settings.settings().project_root,
'-f',
Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml',
Settings.settings().project_root
/ 'docker'
/ 'js'
/ 'docker-compose.yml',
*argv,
]
)
@ -67,7 +70,15 @@ def env(
env_path = Settings.settings().env_path
if not env_path.exists():
subprocess.check_call([sys.executable, '-m', 'venv', '--system-site-packages', str(env_path)])
subprocess.check_call(
[
sys.executable,
'-m',
'venv',
'--system-site-packages',
str(env_path),
]
)
subprocess.check_call(
[
@ -233,7 +244,9 @@ Command: TypeAlias = Literal[
def run(argv: Optional[list[str]] = None) -> None:
logging.basicConfig(
level=logging.INFO,
format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
format=(
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
),
)
if argv is None:

@ -58,8 +58,18 @@ class CLI(_cli.CLI):
self._projects: dict[str, _cli.Project] = {
'online.fxreader.pr34': _cli.Project(
source_dir=self.settings.base_dir / 'python',
build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build',
dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install',
build_dir=self.settings.base_dir
/ 'tmp'
/ 'online'
/ 'fxreader'
/ 'pr34'
/ 'build',
dest_dir=self.settings.base_dir
/ 'tmp'
/ 'online'
/ 'fxreader'
/ 'pr34'
/ 'install',
meson_path=self.settings.base_dir / 'python' / 'meson.build',
),
'online.fxreader.pr34.commands_typed.archlinux': _cli.Project(
@ -125,7 +135,9 @@ class CLI(_cli.CLI):
parser = argparse.ArgumentParser()
parser.add_argument('command', choices=[o.value for o in Command])
parser.add_argument('-p', '--project', choices=[o for o in self.projects])
parser.add_argument(
'-p', '--project', choices=[o for o in self.projects]
)
parser.add_argument(
'-o',
'--output_dir',

Binary file not shown.

@ -1,5 +1,7 @@
#!/usr/bin/env python3
import glob
import importlib
import json
import io
import tempfile
import dataclasses
@ -8,13 +10,18 @@ import sys
import subprocess
import os
import logging
import re
import typing
from typing import (
Optional,
Any,
cast,
Type,
TypeVar,
Callable,
overload,
)
if typing.TYPE_CHECKING:
@ -23,22 +30,29 @@ if typing.TYPE_CHECKING:
BinaryIO,
)
logger = logging.getLogger(__name__)
def toml_load(f: 'BinaryIO') -> Any:
try:
import tomllib
tomllib = importlib.import_module('tomllib')
return tomllib.load(f)
except:
return cast(
Callable[[Any], Any],
getattr(
tomllib,
'load',
),
)(f)
except ModuleNotFoundError:
pass
try:
import tomli
return tomli.load(f)
except:
except ModuleNotFoundError:
pass
raise NotImplementedError
@ -46,13 +60,146 @@ def toml_load(f: 'BinaryIO') -> Any:
@dataclasses.dataclass
class PyProject:
@dataclasses.dataclass
class Module:
name: str
meson: Optional[pathlib.Path] = None
tool: dict[str, Any] = dataclasses.field(default_factory=lambda: dict())
scripts: dict[str, str] = dataclasses.field(default_factory=lambda: dict())
project: dict[str, Any] = dataclasses.field(default_factory=lambda: dict())
path: pathlib.Path
dependencies: dict[str, list[str]]
name: Optional[str] = None
version: Optional[str] = None
early_features: Optional[list[str]] = None
pip_find_links: Optional[list[pathlib.Path]] = None
runtime_libdirs: Optional[list[pathlib.Path]] = None
runtime_preload: Optional[list[pathlib.Path]] = None
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
@dataclasses.dataclass
class ThirdPartyRoot:
package: Optional[str] = None
module_root: Optional[str] = None
path: Optional[str] = None
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field(
default_factory=lambda: [],
)
tool: dict[str, Any] = dataclasses.field(
default_factory=lambda: dict(),
)
Key = TypeVar('Key')
Value = TypeVar('Value')
@overload
def check_dict(
value: Any,
KT: Type[Key],
VT: Type[Value],
) -> dict[Key, Value]: ...
@overload
def check_dict(
value: Any,
KT: Type[Key],
) -> dict[Key, Any]: ...
def check_dict(
value: Any,
KT: Type[Key],
VT: Optional[Type[Value]] = None,
) -> dict[Key, Value]:
assert isinstance(value, dict)
value2 = cast(dict[Any, Any], value)
VT_class: Optional[type[Any]] = None
if not VT is None:
if not typing.get_origin(VT) is None:
VT_class = cast(type[Any], typing.get_origin(VT))
else:
VT_class = VT
assert all(
[
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
for k, v in value2.items()
]
)
if VT is None:
return cast(
dict[Key, Any],
value,
)
else:
return cast(
dict[Key, Value],
value,
)
@overload
def check_list(
value: Any,
VT: Type[Value],
) -> list[Value]: ...
@overload
def check_list(
value: Any,
) -> list[Any]: ...
def check_list(
value: Any,
VT: Optional[Type[Value]] = None,
) -> list[Value] | list[Any]:
assert isinstance(value, list)
value2 = cast(list[Any], value)
assert all([(VT is None or isinstance(o, VT)) for o in value2])
if VT is None:
return cast(
list[Any],
value,
)
else:
return cast(
list[Value],
value,
)
def check_type(
value: Any,
VT: Type[Value],
attribute_name: Optional[str] = None,
) -> Value:
if attribute_name:
attribute_value = getattr(value, attribute_name)
assert isinstance(attribute_value, VT)
return attribute_value
else:
assert isinstance(value, VT)
return value
def pyproject_load(
@ -70,55 +217,158 @@ def pyproject_load(
if 'optional-dependencies' in content['project']:
assert isinstance(content['project']['optional-dependencies'], dict)
for k, v in content['project']['optional-dependencies'].items():
assert isinstance(v, list)
assert isinstance(k, str)
for k, v in check_dict(
check_dict(
check_dict(
content,
str,
# Any,
)['project'],
str,
# Any,
)['optional-dependencies'],
str,
list[Any],
).items():
# assert isinstance(v, list)
# assert isinstance(k, str)
dependencies[k] = v
name: Optional[str] = None
if 'name' in content.get('project', {}):
name = content['project']['name']
version: Optional[str] = None
if 'version' in content.get('project', {}):
version = content['project']['version']
res = PyProject(
path=d,
dependencies=dependencies,
name=name,
version=version,
)
tool_name = 'online.fxreader.pr34'.replace('.', '-')
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
if 'early_features' in content['tool'][tool_name]:
res.early_features = content['tool'][tool_name]['early_features']
if 'tool' in content:
res.tool = check_dict(
content['tool'],
str,
)
if 'pip_find_links' in content['tool'][tool_name]:
res.pip_find_links = [d.parent / pathlib.Path(o) for o in content['tool'][tool_name]['pip_find_links']]
if (
'tool' in content
and isinstance(content['tool'], dict)
and tool_name in content['tool']
and isinstance(content['tool'][tool_name], dict)
):
pr34_tool = check_dict(
check_dict(
content['tool'],
str,
)[tool_name],
str,
)
if 'runtime_libdirs' in content['tool'][tool_name]:
if 'early_features' in pr34_tool:
res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool:
res.pip_find_links = [
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [
d.parent / pathlib.Path(o)
# pathlib.Path(o)
for o in content['tool'][tool_name]['runtime_libdirs']
for o in check_list(pr34_tool['runtime_libdirs'], str)
]
if 'runtime_preload' in content['tool'][tool_name]:
if 'runtime_preload' in pr34_tool:
res.runtime_preload = [
d.parent / pathlib.Path(o)
# pathlib.Path(o)
for o in content['tool'][tool_name]['runtime_preload']
for o in check_list(pr34_tool['runtime_preload'], str)
]
if 'requirements' in content['tool'][tool_name]:
assert isinstance(content['tool'][tool_name]['requirements'], dict)
if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str)
assert all(
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
package=o2.get('package'),
module_root=o2.get('module_root'),
path=o2.get('path'),
)
)
if 'requirements' in pr34_tool:
res.requirements = {
k: d.parent / pathlib.Path(v)
# pathlib.Path(o)
for k, v in content['tool'][tool_name]['requirements'].items()
for k, v in check_dict(
pr34_tool['requirements'], str, str
).items()
}
if 'modules' in pr34_tool:
modules = check_list(pr34_tool['modules'])
# res.modules = []
for o in modules:
assert isinstance(o, dict)
assert 'name' in o and isinstance(o['name'], str)
module = PyProject.Module(
name=o['name'],
)
if 'meson' in o:
assert 'meson' in o and isinstance(o['meson'], str)
module.meson = pathlib.Path(o['meson'])
if 'tool' in o:
module.tool.update(
check_dict(
o['tool'],
str,
)
)
if 'scripts' in o:
module.scripts.update(
check_dict(
o['scripts'],
str,
str,
)
)
if 'project' in o:
module.project.update(
check_dict(
o['project'],
str,
)
)
res.modules.append(module)
return res
@dataclasses.dataclass
class BootstrapSettings:
env_path: pathlib.Path
whl_cache_path: pathlib.Path
python_path: pathlib.Path
base_dir: pathlib.Path
python_version: Optional[str] = dataclasses.field(
@ -131,12 +381,27 @@ class BootstrapSettings:
),
).strip()
)
pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get(
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)],
)
uv_cache_dir: str = dataclasses.field(
default_factory=lambda: os.environ.get(
'UV_CACHE_DIR',
str(pathlib.Path.cwd() / '.uv-cache'),
)
)
uv_args: list[str] = dataclasses.field(
default_factory=lambda: os.environ.get(
'UV_ARGS',
'--offline',
'--no-index -U',
).split(),
)
whl_cache_update: Optional[bool] = dataclasses.field(default_factory=lambda: os.environ.get('WHL_CACHE_UPDATE', json.dumps(False)) in [json.dumps(True)])
uv_compile_allow_index: bool = dataclasses.field(default_factory=lambda: os.environ.get('UV_COMPILE_ALLOW_INDEX', json.dumps(False)) in [json.dumps(True)])
venv_partial: bool = dataclasses.field(default_factory=lambda: os.environ.get('VENV_PARTIAL', json.dumps(False)) in [json.dumps(True)])
@classmethod
def get(
@ -146,20 +411,181 @@ class BootstrapSettings:
if base_dir is None:
base_dir = pathlib.Path.cwd()
env_path = base_dir / '.venv'
env_path: Optional[pathlib.Path] = None
if 'ENV_PATH' in os.environ:
env_path = pathlib.Path(os.environ['ENV_PATH'])
else:
env_path = base_dir / '.venv'
whl_cache_path = env_path.parent / '.venv-whl-cache'
python_path = env_path / 'bin' / 'python3'
return cls(
base_dir=base_dir,
env_path=env_path,
whl_cache_path=whl_cache_path,
python_path=python_path,
)
class requirements_name_get_t:
@dataclasses.dataclass
class res_t:
not_compiled: pathlib.Path
compiled: pathlib.Path
name: str
def requirements_name_get(
source_dir: pathlib.Path,
python_version: Optional[str],
features: list[str],
requirements: dict[str, pathlib.Path],
) -> requirements_name_get_t.res_t:
requirements_python_version: Optional[str] = None
if not python_version is None:
requirements_python_version = python_version.replace('.', '_')
requirements_name = '_'.join(sorted(features))
if requirements_python_version:
requirements_name += '_' + requirements_python_version
requirements_path: Optional[pathlib.Path] = None
if requirements_name in requirements:
requirements_path = requirements[requirements_name]
else:
requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / (
requirements_path.stem + '.in'
)
requirements_in: list[str] = []
return requirements_name_get_t.res_t(
not_compiled=requirements_path_in,
compiled=requirements_path,
name=requirements_name,
)
class packaging_t:
class constants_t:
canonicalize_re: typing.ClassVar[re.Pattern[str]] = re.compile(r'[-_.]+')
req_spec_re: typing.ClassVar[re.Pattern[str]] = re.compile(r'^([a-zA-Z0-9._-]+)==([^\s;]+)')
@dataclasses.dataclass
class pkg_id_t:
name: str
version: str
@staticmethod
def canonicalize_name(name: str) -> str:
return packaging_t.constants_t.canonicalize_re.sub('-', name).lower()
@staticmethod
def parse_whl_name_version(filename: str) -> Optional['packaging_t.pkg_id_t']:
parts = filename.split('-')
if len(parts) >= 3 and filename.endswith('.whl'):
return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(parts[0]),
version=parts[1],
)
return None
@staticmethod
def parse_req_spec(line: str) -> Optional['packaging_t.pkg_id_t']:
m = packaging_t.constants_t.req_spec_re.match(line)
if m:
return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(m.group(1)),
version=m.group(2),
)
return None
def whl_cache_download(
whl_cache_path: pathlib.Path,
requirements_path: pathlib.Path,
uv_python_version: list[str],
pip_find_links_args: list[str],
) -> None:
whl_cache_path.mkdir(parents=True, exist_ok=True)
cached_pkgs: set[tuple[str, str]] = set()
for whl in whl_cache_path.glob('*.whl'):
parsed = packaging_t.parse_whl_name_version(whl.name)
if parsed is not None:
cached_pkgs.add((parsed.name, parsed.version))
missing_reqs: list[str] = []
with io.open(requirements_path, 'r') as f:
for line in f:
stripped = line.strip()
if not stripped or stripped.startswith('#') or stripped.startswith('--hash'):
continue
spec = stripped.rstrip(' \\')
if spec.startswith('#'):
continue
parsed = packaging_t.parse_req_spec(spec)
if parsed is not None and (parsed.name, parsed.version) in cached_pkgs:
logger.info(dict(msg='cached', pkg='%s==%s' % (parsed.name, parsed.version)))
continue
missing_reqs.append(spec)
if not missing_reqs:
logger.info(dict(msg='all wheels cached, skipping pip download'))
return
logger.info(dict(msg='downloading missing wheels', count=len(missing_reqs), pkgs=missing_reqs))
with tempfile.NamedTemporaryFile(mode='w', prefix='requirements_missing_', suffix='.txt', delete=False) as f:
f.write('\n'.join(missing_reqs))
f.flush()
missing_req_path = f.name
try:
cmd = [
sys.executable,
'-m',
'pip',
'download',
'--only-binary=:all:',
*uv_python_version,
*pip_find_links_args,
'-r',
missing_req_path,
'-d',
str(whl_cache_path),
]
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
finally:
os.unlink(missing_req_path)
def check_host_prerequisites() -> None:
for mod in ['pip', 'uv']:
try:
subprocess.check_call(
[sys.executable, '-m', mod, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
except (subprocess.CalledProcessError, FileNotFoundError):
logger.error('[bootstrap] %s -m %s is not available on the host system' % (sys.executable, mod))
sys.exit(1)
def env_bootstrap(
bootstrap_settings: BootstrapSettings,
pyproject: PyProject,
) -> None:
check_host_prerequisites()
pip_find_links: list[pathlib.Path] = []
if not pyproject.pip_find_links is None:
@ -173,7 +599,7 @@ def env_bootstrap(
]
for o in pip_find_links
],
[],
cast(list[str], []),
)
features: list[str] = []
@ -181,31 +607,29 @@ def env_bootstrap(
if pyproject.early_features:
features.extend(pyproject.early_features)
requirements_python_version: Optional[str] = None
if not bootstrap_settings.python_version is None:
requirements_python_version = bootstrap_settings.python_version.replace('.', '_')
requirements_name = '_'.join(sorted(features))
if requirements_python_version:
requirements_name += '_' + requirements_python_version
requirements_path: Optional[pathlib.Path] = None
if requirements_name in pyproject.requirements:
requirements_path = pyproject.requirements[requirements_name]
else:
requirements_path = pyproject.path.parent / 'requirements.txt'
requirements_name_get_res = requirements_name_get(
python_version=bootstrap_settings.python_version,
features=features,
requirements=pyproject.requirements,
source_dir=pyproject.path.parent,
)
requirements_path = requirements_name_get_res.compiled
requirements_in: list[str] = []
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
requirements_in.extend(
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features:
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], [])
early_dependencies = sum(
[pyproject.dependencies[o] for o in pyproject.early_features],
cast(list[str], []),
)
logger.info(
dict(
requirements_name_get_res=requirements_name_get_res,
early_dependencies=early_dependencies,
)
)
@ -222,67 +646,144 @@ def env_bootstrap(
# *early_dependencies,
# ])
if not requirements_path.exists():
with tempfile.NamedTemporaryFile(
mode='w',
prefix='requirements',
suffix='.in',
) as f:
f.write('\n'.join(requirements_in))
f.flush()
subprocess.check_call(
[
'uv',
'pip',
'compile',
'--generate-hashes',
*pip_find_links_args,
# '-p',
# bootstrap_settings.python_path,
*bootstrap_settings.uv_args,
'-o',
str(requirements_path),
f.name,
]
)
uv_python_version: list[str] = []
venv_python_version: list[str] = []
if not bootstrap_settings.python_version is None:
uv_python_version.extend(
[
# '-p',
'--python-version',
bootstrap_settings.python_version,
]
)
venv_python_version.extend(
[
'-p',
# '--python-version',
bootstrap_settings.python_version,
]
)
subprocess.check_call(
[
'uv',
'venv',
*uv_python_version,
*pip_find_links_args,
# '--seed',
*bootstrap_settings.uv_args,
str(bootstrap_settings.env_path),
]
)
logger.info('[bootstrap] step 1/5: compile requirements')
subprocess.check_call(
[
'uv',
'pip',
'install',
*pip_find_links_args,
'-p',
bootstrap_settings.python_path,
'--require-hashes',
*bootstrap_settings.uv_args,
'-r',
str(requirements_path),
]
)
needs_compile = not requirements_path.exists()
constraint_args: list[str] = []
if bootstrap_settings.venv_partial and requirements_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: recompiling with existing requirements.txt as constraints')
needs_compile = True
constraint_args = ['-c', str(requirements_path)]
cache_find_links_args: list[str] = []
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if needs_compile:
with (
tempfile.NamedTemporaryFile(
mode='w',
prefix='requirements',
suffix='.in',
) as f_in,
tempfile.NamedTemporaryFile(
mode='w',
prefix='requirements',
suffix='.txt',
dir=requirements_path.parent,
delete=False,
) as f_out,
):
f_in.write('\n'.join(requirements_in))
f_in.flush()
uv_compile_args = bootstrap_settings.uv_args
if bootstrap_settings.uv_compile_allow_index:
uv_compile_args = [o for o in uv_compile_args if o not in ('--no-index', '-U', '--upgrade')]
if len(constraint_args) > 0:
uv_compile_args = [o for o in uv_compile_args if o not in ('-U', '--upgrade')]
cmd = [
'uv',
'--cache-dir',
bootstrap_settings.uv_cache_dir,
'pip',
'compile',
*uv_python_version,
'--generate-hashes',
'--no-annotate',
'--no-header',
*pip_find_links_args,
*cache_find_links_args,
*constraint_args,
*uv_compile_args,
'-o',
f_out.name,
f_in.name,
]
logger.info(dict(cmd=cmd))
try:
subprocess.check_call(cmd)
os.replace(f_out.name, str(requirements_path))
except subprocess.CalledProcessError:
os.unlink(f_out.name)
raise
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if bootstrap_settings.venv_partial and bootstrap_settings.env_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: skipping venv creation (already exists)')
else:
subprocess.check_call(
[
'uv',
'--cache-dir',
bootstrap_settings.uv_cache_dir,
*[o for o in bootstrap_settings.uv_args if o not in ['-U', '--upgrade', '--no-index']],
'venv',
*venv_python_version,
*cache_find_links_args,
str(bootstrap_settings.env_path),
]
)
cmd = [
'uv',
'--cache-dir',
bootstrap_settings.uv_cache_dir,
'pip',
'install',
*uv_python_version,
*cache_find_links_args,
'-p',
str(bootstrap_settings.python_path),
'--require-hashes',
*bootstrap_settings.uv_args,
'-r',
str(requirements_path),
]
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
if bootstrap_settings.pip_check_conflicts:
subprocess.check_call(
[
bootstrap_settings.python_path,
'-m',
'online.fxreader.pr34.commands',
'pip_check_conflicts',
]
)
def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool:
@ -303,9 +804,12 @@ def run(
pyproject: PyProject = pyproject_load(d)
logging.basicConfig(level=logging.INFO)
logging.basicConfig(
level=logging.INFO,
format='%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s',
)
if not bootstrap_settings.env_path.exists():
if not bootstrap_settings.env_path.exists() or bootstrap_settings.venv_partial:
env_bootstrap(
bootstrap_settings=bootstrap_settings,
pyproject=pyproject,
@ -334,6 +838,6 @@ def run(
if __name__ == '__main__':
run(
d=pathlib.Path(__file__).parent / 'pyproject.toml',
d=pathlib.Path(__file__).parent / 'pyproject.common.toml',
cli_path=pathlib.Path(__file__).parent / 'cli.py',
)

@ -5,7 +5,7 @@ project(
).stdout().strip('\n'),
# 'online.fxreader.uv',
# ['c', 'cpp'],
version: '0.1.5.17+27.23',
version: '0.1.5.65',
# default_options: [
# 'cpp_std=c++23',
# # 'prefer_static=true',

File diff suppressed because it is too large Load Diff

@ -504,9 +504,7 @@ class cache_db_t:
snapshots = [dict(zip(snap_columns, raw)) for raw in cur.fetchall()]
cur.execute(
'SELECT id, snapshot_id, name, version, base, desc, filename, '
'csize, isize, md5sum, sha256sum, url, arch, builddate, packager '
'FROM packages'
'SELECT id, snapshot_id, name, version, base, desc, filename, csize, isize, md5sum, sha256sum, url, arch, builddate, packager FROM packages'
)
pkg_columns = [desc[0] for desc in cur.description]

@ -0,0 +1,68 @@
import fastapi
import importlib
import pydantic
import functools
import logging
import copy
import uvicorn
import uvicorn.config
import sys
from .settings import Settings as APISettings
from typing import (
Any,
Optional,
Literal,
Annotated,
cast,
Callable,
)
logger = logging.getLogger(__name__)
def create_app() -> fastapi.FastAPI:
app = fastapi.FastAPI()
logger.info(dict(msg='started loading apps'))
for app_config in APISettings.singleton().apps:
logger.info(dict(msg='start loading app = {}'.format(app_config)))
app_module, app_method, app_prefix = app_config.split(':')
app_router = cast(
Callable[[], Any],
getattr(importlib.import_module(app_module), app_method),
)()
assert isinstance(app_router, fastapi.APIRouter)
app.include_router(
app_router,
prefix=app_prefix,
# prefix='/',
)
logger.info(dict(msg='done loading app = {}'.format(app_config)))
logger.info(dict(msg='done loading apps'))
return app
def run(args: list[str]):
logging.basicConfig(level=logging.INFO)
log_config = copy.deepcopy(uvicorn.config.LOGGING_CONFIG)
uvicorn.run(
create_app,
host=APISettings.singleton().uvicorn_host,
port=APISettings.singleton().uvicorn_port,
loop='uvloop',
log_config=log_config,
log_level=logging.INFO,
)
if __name__ == '__main__':
run(sys.argv[1:])

@ -0,0 +1,29 @@
import pydantic
import pydantic_settings
from typing import (
ClassVar,
Optional,
Annotated,
)
class Settings(pydantic_settings.BaseSettings):
apps: Annotated[
list[str],
pydantic.Field(
default_factory=list,
),
]
uvicorn_port: int = 80
uvicorn_host: str = '127.0.0.1'
_singleton: ClassVar[Optional['Settings']] = None
@classmethod
def singleton(cls) -> 'Settings':
if cls._singleton is None:
cls._singleton = Settings.model_validate({})
return cls._singleton

@ -220,9 +220,13 @@ class CLI(abc.ABC):
) -> None:
from . import cli_bootstrap
pyproject = cli_bootstrap.pyproject_load(self.projects[project].source_dir / 'pyproject.toml')
pyproject = cli_bootstrap.pyproject_load(
self.projects[project].source_dir / 'pyproject.toml'
)
dependencies = sum([pyproject.dependencies[o] for o in features], cast(list[str], []))
dependencies = sum(
[pyproject.dependencies[o] for o in features], cast(list[str], [])
)
pip_find_links: list[pathlib.Path] = []
@ -264,7 +268,9 @@ class CLI(abc.ABC):
force: bool,
) -> None:
for k, d in self.dependencies.items():
whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_'))
whl_glob = self.dist_settings.wheel_dir / (
'*%s*.whl' % d.name.replace('.', '_')
)
if len(glob.glob(str(whl_glob))) == 0 or force:
if d.source_path.exists():
@ -304,7 +310,9 @@ class CLI(abc.ABC):
def index_get(o: dict[str, Any]) -> tuple[Any, ...]:
return (o['path'], o['stat'])
present_files_index = {index_get(o): o for o in present_files}
present_files_index = {
index_get(o): o for o in present_files
}
new_files: list[dict[str, Any]] = []
@ -329,17 +337,33 @@ class CLI(abc.ABC):
]
)
@property
# @property
def pkg_config_path(
self,
) -> set[pathlib.Path]:
return {
pathlib.Path(o)
for o in glob.glob(
str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'),
recursive=True,
)
}
project: Optional[str] = None,
) -> list[pathlib.Path]:
res: list[pathlib.Path] = []
if project:
res.append(self.projects[project].dest_dir / 'lib' / 'pkgconfig')
res.extend(
[
pathlib.Path(o)
for o in glob.glob(
str(
self.dist_settings.env_path
/ 'lib'
/ 'python*'
/ '**'
/ 'pkgconfig'
),
recursive=True,
)
]
)
return res
def deploy_wheel(
self,
@ -431,6 +455,14 @@ class CLI(abc.ABC):
if env is None:
env = dict()
env = (
dict(
# to generate zip for .whl with a reproducible checksum
SOURCE_DATE_EPOCH='0',
)
| env
)
extra_args: list[str] = []
pyproject_build_dir = project.build_dir / 'pyproject'
@ -445,7 +477,18 @@ class CLI(abc.ABC):
shutil.rmtree(pyproject_build_dir)
if len(self.third_party_roots(project_name)) > 0:
extra_args.append('-Csetup-args=%s' % ('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)])))
extra_args.append(
'-Csetup-args=%s'
% (
'-Dthird_party_roots=%s'
% json.dumps(
[
str(o.absolute())
for o in self.third_party_roots(project_name)
]
)
)
)
cmd = [
sys.executable,
@ -531,11 +574,21 @@ class CLI(abc.ABC):
preserve_top_path=True,
)
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
pyproject = cli_bootstrap.pyproject_load(
project.source_dir / 'pyproject.toml'
)
pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root
pyproject_tool = (
pydantic.RootModel[PyProject.Tool]
.model_validate(pyproject.tool)
.root
)
if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.install:
if (
pyproject_tool.meson
and pyproject_tool.meson.args
and pyproject_tool.meson.args.install
):
argv = pyproject_tool.meson.args.install + argv
cmd = [
@ -580,16 +633,22 @@ class CLI(abc.ABC):
content = f.read()
with io.open(o, 'w') as f:
f.write(content.replace('prefix=/', 'prefix=${pcfiledir}/../../'))
f.write(
content.replace('prefix=/', 'prefix=${pcfiledir}/../../')
)
def ninja(
self,
project_name: str,
argv: Optional[list[str]] = None,
env: Optional[dict[str, str]] = None,
mode: Optional[Literal['meson', 'pyproject']] = None,
) -> None:
project = self.projects[project_name]
if mode is None:
mode = 'meson'
if argv is None:
argv = []
@ -602,7 +661,7 @@ class CLI(abc.ABC):
[
shutil_which('ninja', True),
'-C',
str(project.build_dir / 'meson'),
str(project.build_dir / mode),
*argv,
],
env=self.make_env(env=env),
@ -670,18 +729,30 @@ class CLI(abc.ABC):
res: list[pathlib.Path] = []
if not project_name is None:
pyproject = cli_bootstrap.pyproject_load(self.projects[project_name].source_dir / 'pyproject.toml')
pyproject = cli_bootstrap.pyproject_load(
self.projects[project_name].source_dir / 'pyproject.toml'
)
for third_party_root in pyproject.third_party_roots:
if third_party_root.package:
if not third_party_root.module_root:
third_party_root.module_root = third_party_root.package.replace('.', os.path.sep)
third_party_root.module_root = (
third_party_root.package.replace('.', os.path.sep)
)
if not third_party_root.path:
packages = pip_show([third_party_root.package])
assert len(packages) == 1
third_party_root.path = str(pathlib.Path(packages[0].location) / third_party_root.module_root / 'lib')
third_party_root.path = str(
pathlib.Path(packages[0].location)
/ third_party_root.module_root
/ 'lib'
)
else:
assert not third_party_root.package and not third_party_root.module_root and third_party_root.path
assert (
not third_party_root.package
and not third_party_root.module_root
and third_party_root.path
)
res.append(pathlib.Path(third_party_root.path))
@ -697,8 +768,12 @@ class CLI(abc.ABC):
path: Optional[pathlib.Path] = None
@property
def meson_toolchains(self) -> dict[str, meson_toolchains_t.res_t.toolchain_t]:
t1 = pathlib.Path(importlib.import_module('online.fxreader.pr34').__path__[0])
def meson_toolchains(
self,
) -> dict[str, meson_toolchains_t.res_t.toolchain_t]:
t1 = pathlib.Path(
importlib.import_module('online.fxreader.pr34').__path__[0]
)
toolchains = glob.glob(str(t1 / 'meson' / 'toolchains' / '*'))
res: dict[str, CLI.meson_toolchains_t.res_t.toolchain_t] = dict()
@ -723,7 +798,11 @@ class CLI(abc.ABC):
) -> list[str]:
from . import argparse as pr34_argparse
if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.setup:
if (
pyproject_tool.meson
and pyproject_tool.meson.args
and pyproject_tool.meson.args.setup
):
extra_args = pyproject_tool.meson.args.setup + extra_args
parser = argparse.ArgumentParser()
@ -738,8 +817,13 @@ class CLI(abc.ABC):
options, args = pr34_argparse.parse_args(parser, extra_args)
if not options.cross_file is None:
if not options.cross_file.exists() and (not options.cross_file.is_absolute() and options.cross_file.stem in self.meson_toolchains):
options.cross_file = self.meson_toolchains[options.cross_file.stem].path
if not options.cross_file.exists() and (
not options.cross_file.is_absolute()
and options.cross_file.stem in self.meson_toolchains
):
options.cross_file = self.meson_toolchains[
options.cross_file.stem
].path
extra_args = ['--cross-file', str(options.cross_file)] + args
@ -751,11 +835,15 @@ class CLI(abc.ABC):
force: bool,
argv: Optional[list[str]] = None,
env: Optional[dict[str, str]] = None,
mode: Optional[Literal['meson', 'pyproject']] = None,
# third_party_roots: Optional[list[pathlib.Path]] = None,
) -> None:
from . import cli_bootstrap
from .os import shutil_rmtree
if mode is None:
mode = 'meson'
project = self.projects[project_name]
if argv is None:
@ -764,16 +852,27 @@ class CLI(abc.ABC):
if env is None:
env = dict()
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
pyproject = cli_bootstrap.pyproject_load(
project.source_dir / 'pyproject.toml'
)
pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root
pyproject_tool = (
pydantic.RootModel[PyProject.Tool]
.model_validate(pyproject.tool)
.root
)
logger.info(dict(env=env))
if force:
if (project.build_dir / 'meson').exists():
logger.info(dict(action='removing build dir', path=project.build_dir / 'meson'))
shutil.rmtree(project.build_dir / 'meson')
if (project.build_dir / mode).exists():
logger.info(
dict(
action='removing build dir',
path=project.build_dir / mode,
)
)
shutil.rmtree(project.build_dir / mode)
extra_args: list[str] = []
@ -783,7 +882,15 @@ class CLI(abc.ABC):
)
if len(self.third_party_roots(project_name)) > 0:
extra_args.append('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)]))
extra_args.append(
'-Dthird_party_roots=%s'
% json.dumps(
[
str(o.absolute())
for o in self.third_party_roots(project_name)
]
)
)
cmd = [
# shutil_which(
@ -795,8 +902,11 @@ class CLI(abc.ABC):
'mesonbuild.mesonmain',
'setup',
str(project.source_dir),
str(project.build_dir / 'meson'),
'-Dmodes=["meson"]',
str(project.build_dir / mode),
'--pkg-config-path={}'.format(
json.dumps([str(o) for o in self.pkg_config_path(project_name)])
),
'-Dmodes=["{}"]'.format(mode),
*extra_args,
# '-Dpkgconfig.relocatable=true',
'-Dprefix=/',
@ -845,14 +955,21 @@ class CLI(abc.ABC):
argv,
)
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
pyproject = cli_bootstrap.pyproject_load(
project.source_dir / 'pyproject.toml'
)
dependencies = sum([pyproject.dependencies[o] for o in options.features], cast(list[str], []))
dependencies = sum(
[pyproject.dependencies[o] for o in options.features],
cast(list[str], []),
)
pip_find_links: list[pathlib.Path] = []
if not pyproject.pip_find_links is None:
pip_find_links.extend([o for o in pyproject.pip_find_links if o.exists()])
pip_find_links.extend(
[o for o in pyproject.pip_find_links if o.exists()]
)
requirements_name_get_res = cli_bootstrap.requirements_name_get(
source_dir=project.source_dir,
@ -961,7 +1078,9 @@ class CLI(abc.ABC):
assert options.module in [o.name for o in pyproject.modules]
modules: dict[str, cli_bootstrap.PyProject.Module] = {o.name: o for o in pyproject.modules}
modules: dict[str, cli_bootstrap.PyProject.Module] = {
o.name: o for o in pyproject.modules
}
module = modules[options.module]
@ -973,7 +1092,8 @@ class CLI(abc.ABC):
'w',
) as f:
p = pyproject2['project']
assert isinstance(p, tomlkit.items.Table)
# assert isinstance(p, tomlkit.items.Table)
assert isinstance(p, MutableMapping)
p['name'] = module.name
if not pyproject2['tool']:

@ -86,7 +86,9 @@ class PyProject:
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
requirements: dict[str, pathlib.Path] = dataclasses.field(
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field(
default_factory=lambda: [],
@ -132,7 +134,12 @@ def check_dict(
else:
VT_class = VT
assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
assert all(
[
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
for k, v in value2.items()
]
)
if VT is None:
return cast(
@ -251,7 +258,12 @@ def pyproject_load(
str,
)
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
if (
'tool' in content
and isinstance(content['tool'], dict)
and tool_name in content['tool']
and isinstance(content['tool'][tool_name], dict)
):
pr34_tool = check_dict(
check_dict(
content['tool'],
@ -264,7 +276,9 @@ def pyproject_load(
res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool:
res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
res.pip_find_links = [
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [
@ -283,7 +297,9 @@ def pyproject_load(
if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str)
assert all([k in {'package', 'module_root', 'path'} for k in o2])
assert all(
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
@ -297,7 +313,9 @@ def pyproject_load(
res.requirements = {
k: d.parent / pathlib.Path(v)
# pathlib.Path(o)
for k, v in check_dict(pr34_tool['requirements'], str, str).items()
for k, v in check_dict(
pr34_tool['requirements'], str, str
).items()
}
if 'modules' in pr34_tool:
@ -364,7 +382,10 @@ class BootstrapSettings:
).strip()
)
pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
default_factory=lambda: os.environ.get(
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)],
)
uv_cache_dir: str = dataclasses.field(
default_factory=lambda: os.environ.get(
@ -438,7 +459,9 @@ def requirements_name_get(
else:
requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
requirements_path_in = requirements_path.parent / (
requirements_path.stem + '.in'
)
requirements_in: list[str] = []
@ -594,10 +617,15 @@ def env_bootstrap(
requirements_in: list[str] = []
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
requirements_in.extend(
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features:
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
early_dependencies = sum(
[pyproject.dependencies[o] for o in pyproject.early_features],
cast(list[str], []),
)
logger.info(
dict(
@ -647,14 +675,6 @@ def env_bootstrap(
needs_compile = True
constraint_args = ['-c', str(requirements_path)]
if (not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update) and requirements_path.exists():
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
cache_find_links_args: list[str] = []
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
@ -711,15 +731,15 @@ def env_bootstrap(
os.unlink(f_out.name)
raise
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if bootstrap_settings.venv_partial and bootstrap_settings.env_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: skipping venv creation (already exists)')

@ -0,0 +1,109 @@
import subprocess
import sys
import json
import logging
from typing import (
Literal,
Optional,
)
import argparse
logger = logging.getLogger(__name__)
def run(argv: list[str]) -> None:
parser = argparse.ArgumentParser()
parser.add_argument(
'action',
choices=[
'toggle',
'dark',
'light',
'get',
],
# required=True,
type=str,
help='action',
)
from .argparse import parse_args as pr34_parse_args
options, args = pr34_parse_args(parser, argv)
assert len(args) == 0
def get_theme() -> Literal['light', 'dark', 'default']:
res = (
subprocess.check_output(
[
'gsettings',
'get',
'org.gnome.desktop.interface',
'color-scheme',
]
)
.decode('utf-8')
.strip()
)
if res == "'prefer-dark'":
return 'dark'
elif res == "'prefer-light'":
return 'light'
elif res == "'default'":
return 'default'
else:
logger.error(dict(res=res, msg='unknown theme'))
raise NotImplementedError
def set_theme(theme: Literal['light', 'dark', 'default']) -> None:
if theme == 'light':
subprocess.check_call(
[
'gsettings',
'set',
'org.gnome.desktop.interface',
'color-scheme',
'prefer-light',
]
)
elif theme == 'dark':
subprocess.check_call(
[
'gsettings',
'set',
'org.gnome.desktop.interface',
'color-scheme',
'prefer-dark',
]
)
elif theme == 'default':
subprocess.check_call(
[
'gsettings',
'reset',
'org.gnome.desktop.interface',
'color-scheme',
]
)
else:
raise NotImplementedError
def toggle() -> None:
theme = get_theme()
if theme in ('light', 'default'):
set_theme('dark')
else:
set_theme('light')
if options.action == 'toggle':
toggle()
elif options.action == 'dark':
set_theme('dark')
elif options.action == 'light':
set_theme('light')
elif options.action == 'get':
sys.stdout.write(json.dumps(get_theme()))
sys.stdout.flush()
else:
raise NotImplementedError

@ -64,7 +64,9 @@ class PasswordUtils:
raise NotImplementedError
@classmethod
def _scrypt_init(cls, salt: bytes) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
def _scrypt_init(
cls, salt: bytes
) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
return cryptography.hazmat.primitives.kdf.scrypt.Scrypt(
salt=salt,
length=32,

@ -10,5 +10,7 @@ def setup(level: Optional[int] = None) -> None:
logging.basicConfig(
level=level,
format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
format=(
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
),
)

@ -0,0 +1,121 @@
import pydantic
import json
import logging
import datetime
# import django.http
from typing import (
Literal,
Any,
Optional,
Annotated,
cast,
TypeVar,
Protocol,
Generic,
Callable,
)
logger = logging.getLogger(__name__)
class Metric(pydantic.BaseModel):
name: str
type: Literal['gauge', 'counter']
help: Optional[str] = None
class Sample(pydantic.BaseModel):
value: str
parameters: dict[str, str]
timestamp: Optional[datetime.datetime] = None
samples: list[Sample] = pydantic.Field(
default_factory=lambda: [],
)
@classmethod
def sample_serialize(
cls,
o: 'Metric',
s: 'Metric.Sample',
) -> str:
samples: list[Metric.Sample] = [
s,
]
if o.type == 'gauge':
samples.append(
Metric.Sample(
parameters=s.parameters,
value='NaN',
timestamp=(
s.timestamp + datetime.timedelta(seconds=15)
if s.timestamp
else None
),
)
)
return ''.join(
[
'{metric}{{{parameters}}} {value} {timestamp}\n'.format(
metric=o.name,
parameters=','.join(
[
'%s=%s'
% (
k,
json.dumps(v),
)
for k, v in s2.parameters.items()
]
),
value=s2.value,
timestamp=(
'%.f' % (s2.timestamp.timestamp() * 1000,)
if s2.timestamp
else ''
),
)
for s2 in samples
]
)
class serialize_t:
class res_t(pydantic.BaseModel):
json2: str
content_type: str
def serialize(
metrics: list[Metric],
):
return serialize_t.res_t(
json2=''.join(
[
'{help}{type}{samples}'.format(
# help='# HELP %s some metric' % o.name,
# type='# TYPE %s counter' % o.name,
help=(
'# HELP {0} {1}\n'.format(o.name, o.help)
if o.help
else ''
),
type=(
'# TYPE {0} {1}\n'.format(o.name, o.type)
if o.type
else ''
),
samples=''.join(
[Metric.sample_serialize(o, s) for s in o.samples]
),
)
for o in metrics
if len(o.samples) > 0
]
),
content_type='text/plain; version=0.0.4; charset=utf-8',
)

@ -38,7 +38,9 @@ class MypyFormatEntry:
class MypyFormat:
vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode')
vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(
name='vscode', value='vscode'
)
json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json')
@classmethod
@ -149,7 +151,11 @@ def run(
assert not res.returncode is None
errors = sorted(
[json.loads(o) for o in res.stdout.decode('utf-8').splitlines() if not o.strip() == ''],
[
json.loads(o)
for o in res.stdout.decode('utf-8').splitlines()
if not o.strip() == ''
],
key=lambda x: (
x.get('file', ''),
x.get('line', 0),

@ -54,8 +54,21 @@ def runtime_libdirs_init(
ld_library_path: list[pathlib.Path] = [
o
for o in [
*[o.absolute() for o in (project.runtime_libdirs if project.runtime_libdirs else [])],
*[pathlib.Path(o) for o in os.environ.get('LD_LIBRARY_PATH', '').split(os.path.pathsep) if o != ''],
*[
o.absolute()
for o in (
project.runtime_libdirs
if project.runtime_libdirs
else []
)
],
*[
pathlib.Path(o)
for o in os.environ.get('LD_LIBRARY_PATH', '').split(
os.path.pathsep
)
if o != ''
],
]
]
@ -72,10 +85,16 @@ def runtime_libdirs_init(
ld_library_path_present.append(o)
os.environ.update(LD_LIBRARY_PATH=os.path.pathsep.join([str(o) for o in ld_library_path_present]))
os.environ.update(
LD_LIBRARY_PATH=os.path.pathsep.join(
[str(o) for o in ld_library_path_present]
)
)
for preload_path in project.runtime_preload or []:
for preload_found in glob.glob(str(preload_path.parent / ('lib%s.so' % preload_path.name))):
for preload_found in glob.glob(
str(preload_path.parent / ('lib%s.so' % preload_path.name))
):
logger.info(
dict(
preload_path=preload_path,

@ -101,8 +101,20 @@ class pip_resolve_t:
entries: Optional[list[download_info_t]] = None
def pip_resolve_entries_to_txt(entries: list[pip_resolve_t.res_t.download_info_t]) -> str:
return '\n'.join(['#%s\n%s %s' % (o.url, o.constraint, ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256])) for o in entries])
def pip_resolve_entries_to_txt(
entries: list[pip_resolve_t.res_t.download_info_t],
) -> str:
return '\n'.join(
[
'#%s\n%s %s'
% (
o.url,
o.constraint,
' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256]),
)
for o in entries
]
)
def pip_resolve(
@ -128,7 +140,9 @@ def pip_resolve(
import pip._internal.models.direct_url
with contextlib.ExitStack() as stack:
stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager())
stack.enter_context(
pip._internal.utils.temp_dir.global_tempdir_manager()
)
t2 = pip._internal.cli.main_parser.create_main_parser()
@ -166,15 +180,22 @@ def pip_resolve(
pip._internal.cli.cmdoptions.check_dist_restriction(options)
# t1._in_main_context = True
session = t1.get_default_session(options)
target_python = pip._internal.cli.cmdoptions.make_target_python(options)
finder = cast(pip_resolve_t.build_package_finder_t, getattr(t1, '_build_package_finder'))(
target_python = pip._internal.cli.cmdoptions.make_target_python(
options
)
finder = cast(
pip_resolve_t.build_package_finder_t,
getattr(t1, '_build_package_finder'),
)(
options=options,
session=session,
target_python=target_python,
ignore_requires_python=options.ignore_requires_python,
)
build_tracker = t1.enter_context(pip._internal.operations.build.build_tracker.get_build_tracker())
build_tracker = t1.enter_context(
pip._internal.operations.build.build_tracker.get_build_tracker()
)
reqs = t1.get_requirements(
[
#'pip', 'uv', 'ipython',
@ -184,8 +205,12 @@ def pip_resolve(
finder,
session,
)
pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs)
directory = pip._internal.utils.temp_dir.TempDirectory(delete=True, kind='download', globally_managed=True)
pip._internal.req.req_install.check_legacy_setup_py_options(
options, reqs
)
directory = pip._internal.utils.temp_dir.TempDirectory(
delete=True, kind='download', globally_managed=True
)
preparer = t1.make_requirement_preparer(
temp_build_dir=directory,
options=options,
@ -205,7 +230,9 @@ def pip_resolve(
py_version_info=options.python_version,
)
t1.trace_basic_info(finder)
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=True
)
res = pip_resolve_t.res_t()
@ -279,7 +306,9 @@ def pip_resolve(
location,
)
batch_downloader_call_def = pip._internal.network.download.BatchDownloader.__call__
batch_downloader_call_def = (
pip._internal.network.download.BatchDownloader.__call__
)
def batch_downloader_call(
_self: pip._internal.network.download.BatchDownloader,
@ -298,7 +327,9 @@ def pip_resolve(
return [(o, ('/dev/null', '')) for o in links]
# base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve
base_resolver_resolve_def = pip._internal.resolution.resolvelib.resolver.Resolver.resolve
base_resolver_resolve_def = (
pip._internal.resolution.resolvelib.resolver.Resolver.resolve
)
result_requirements: list[RequirementSet | InstallRequirement] = []
@ -309,7 +340,9 @@ def pip_resolve(
) -> RequirementSet:
# print(args, kwargs)
res = base_resolver_resolve_def(_self, root_reqs, check_supported_wheels)
res = base_resolver_resolve_def(
_self, root_reqs, check_supported_wheels
)
result_requirements.append(res)
raise NotImplementedError
@ -369,7 +402,13 @@ def pip_resolve(
patches: list[Any] = []
patches.append(unittest.mock.patch.object(pip._internal.network.download.Downloader, '__call__', downloader_call))
patches.append(
unittest.mock.patch.object(
pip._internal.network.download.Downloader,
'__call__',
downloader_call,
)
)
# patches.append(
# unittest.mock.patch.object(
# pip._internal.network.download.BatchDownloader,
@ -575,4 +614,6 @@ def pip_check_conflicts(
if line.strip() != ''
]
return pip_check_conflicts_t.res_t(status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates)
return pip_check_conflicts_t.res_t(
status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates
)

@ -1,4 +1,7 @@
import pydantic
import functools
# import asgiref.sync
import inspect
import collections
@ -9,27 +12,45 @@ from typing import (
Optional,
Mapping,
cast,
Awaitable,
overload,
)
P = TypeVar('P')
R = TypeVar('R')
def validate_params(view: Callable[..., R]) -> Callable[..., R]:
@overload
def validate_params(
view: Callable[..., Awaitable[R]],
) -> Callable[..., Awaitable[R]]: ...
@overload
def validate_params(view: Callable[..., R]) -> Callable[..., R]: ...
def validate_params(
view: Callable[..., Awaitable[R]] | Callable[..., R],
) -> Callable[..., Awaitable[R]] | Callable[..., R]:
class Parameter:
kind: Any
annotation: Any
parameters = cast(Mapping[str, Parameter], inspect.signature(view).parameters)
parameters = cast(
Mapping[str, Parameter], inspect.signature(view).parameters
)
positional_parameters: collections.OrderedDict[str, type[Any]] = collections.OrderedDict(
(
(k, v.annotation)
for k, v in parameters.items()
if v.kind
in (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
positional_parameters: collections.OrderedDict[str, type[Any]] = (
collections.OrderedDict(
(
(k, v.annotation)
for k, v in parameters.items()
if v.kind
in (
inspect.Parameter.POSITIONAL_ONLY,
inspect.Parameter.POSITIONAL_OR_KEYWORD,
)
)
)
)
@ -52,7 +73,10 @@ def validate_params(view: Callable[..., R]) -> Callable[..., R]:
),
)
def wrapper(*args: Any, **kwargs: Any) -> R:
sync_view: Optional[Callable[..., R]] = None
async_view: Optional[Callable[..., Awaitable[R]]] = None
def validate_params(*args: Any, **kwargs: Any) -> None:
# data = model.model_validate(
kwargs_to_check: dict[str, Any] = {k: v for k, v in kwargs.items()}
@ -71,10 +95,23 @@ def validate_params(view: Callable[..., R]) -> Callable[..., R]:
)
# ).dict()
return view(
# **data,
*args,
**kwargs,
)
if inspect.iscoroutinefunction(view):
async_view = cast(Callable[..., Awaitable[R]], view)
return wrapper
@functools.wraps(async_view)
async def async_wrapper(*args: Any, **kwargs: Any) -> R:
validate_params(*args, **kwargs)
return await async_view(*args, **kwargs)
return async_wrapper
else:
sync_view = cast(Callable[..., R], view)
@functools.wraps(sync_view)
def wrapper(*args: Any, **kwargs: Any) -> R:
validate_params(*args, **kwargs)
return sync_view(*args, **kwargs)
return wrapper

@ -0,0 +1,228 @@
import sys
import datetime
import time
import os
import signal
import io
import json
import subprocess
import logging
import inspect
import textwrap
import optparse
import traceback
from . import logging as pr34_logging
from typing import (
Any,
Optional,
)
logger = logging.getLogger(__name__)
def get_info(
sh: list[str],
timeout: int | float,
):
t1: list[str] = []
for sh_index, o in enumerate(
[
*sh,
*[
r"""
A=$(free -h | grep -P Mem: | grep -Po '[\w\.\d]+');
echo -n $A | awk '{print $2, $7}';
""",
r"""
date +'%Y-%m-%d %l:%M:%S %p %Z';
""",
],
]
):
try:
t1.append(
subprocess.check_output(
o,
shell=True,
timeout=timeout,
)
.decode('utf-8')
.strip()
)
except Exception:
t1.append('fail %d' % sh_index)
t3 = ' | '.join(t1).replace('\n\r', '')
sys.stdout.write(t3)
sys.stdout.flush()
def run(argv: list[str]):
# logging.basicConfig(level=logging.INFO)
pr34_logging.setup()
assert isinstance(argv, list) and all([isinstance(o, str) for o in argv])
class c1(optparse.IndentedHelpFormatter):
def format_option(self, *args: Any, **kwargs: Any) -> Any:
def f1(text: str, width: int) -> list[str]:
# width = None
return '\n'.join(
[
textwrap.fill('\t' + o, width, replace_whitespace=False)
for o in text.splitlines()
]
).splitlines()
t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option)
t2 = (
'\n'.join([o[4:] for o in t1.splitlines()[:]])
.replace(
'textwrap.wrap',
'f1',
)
.replace('format_option', 'f2')
)
ns: dict[str, Any] = dict()
exec(t2, dict(f1=f1), ns)
return ns['f2'](self, *args, **kwargs)
parser = optparse.OptionParser(
formatter=c1(
width=int(os.environ.get('COLUMNS', '9999999')),
),
)
def add_option(
p: optparse.OptionParser,
option_name: str,
dest: str,
default: Optional[Any] = None,
action: Optional[str] = None,
**kwargs: Any,
) -> None:
getattr(p, 'add_option')(
option_name,
dest=dest,
default=default,
action=action,
**kwargs,
)
add_option(
parser,
'--sh',
dest='sh',
default=[],
action='append',
type=str,
)
add_option(
parser,
'--timeout',
dest='timeout',
default=None,
type=float,
)
add_option(
parser,
'--repeat_interval',
dest='repeat_interval',
default=None,
type=float,
)
add_option(
parser,
'--config',
dest='config',
default=None,
type=str,
help=''.join(
[
'.json file with array of strings, each is a shell command ',
'that outputs a separate status text value, ',
'like\n',
r"""
ping -w 1 -i 0.02 <hostname> -c 3 | tail -n 2| head -n 1 | grep -Po $'time\\s+.*$'
sensors -j | jq -r '.\"coretemp-isa-0000\".\"Package id 0\".temp1_input|tostring + \" C\"'
printf '%d RPM' $(cat /sys/devices/platform/applesmc.768/fan1_input)
printf '% 3.0f%%' $(upower -d | grep -Po 'percentage:\\s+\\d+(\\.\\d+)?%' | grep -Po '\\d+(\\.\\d+)?' | head -n 1)
""".strip(),
]
),
)
options, args = parser.parse_args(argv)
if options.timeout is None:
options.timeout = 0.5
timeout2 = max(options.timeout, 0.0)
assert timeout2 >= 0.0 and timeout2 <= 4
config: dict[str, Any] = dict()
try:
if not options.config is None:
with io.open(options.config, 'r') as f:
config.update(json.load(f))
except Exception:
logging.error(traceback.format_exc())
pass
options.sh.extend(config.get('sh', []))
last_ts = datetime.datetime.now()
shutdown: bool = False
def on_signal(*args: Any, **kwargs: Any):
nonlocal shutdown
shutdown = True
signal.signal(signal.SIGINT, on_signal)
signal.signal(signal.SIGTERM, on_signal)
while not shutdown:
get_info(
timeout=timeout2,
sh=options.sh,
)
if not options.repeat_interval:
break
else:
sys.stdout.write('\n')
sys.stdout.flush()
is_late = False
new_ts = last_ts
while True:
now_ts = datetime.datetime.now()
spent = (now_ts - last_ts).total_seconds()
new_ts = last_ts + datetime.timedelta(
seconds=options.repeat_interval
)
if new_ts > now_ts:
if is_late:
last_ts = new_ts
break
else:
last_ts = new_ts
is_late = True
if spent < options.repeat_interval:
time.sleep(options.repeat_interval - spent)
if __name__ == '__main__':
run(sys.argv[1:])

@ -0,0 +1,724 @@
#!/usr/bin/env python3
import psutil
import pathlib
import logging
import logging.handlers
import os
import signal
import subprocess
import threading
import time
import argparse
import re
import sys
from prompt_toolkit.application import Application
from prompt_toolkit.key_binding import KeyBindings, ConditionalKeyBindings
from prompt_toolkit.layout import Layout, HSplit, FloatContainer, Float
from prompt_toolkit.filters import Condition
from prompt_toolkit.layout.containers import Window
from prompt_toolkit.layout.controls import FormattedTextControl
from prompt_toolkit.widgets import TextArea, Frame, Dialog, Button, Label
from prompt_toolkit.styles import Style
from typing import (
TypedDict,
Any,
Optional,
)
from collections import OrderedDict
logger = logging.getLogger(__name__)
__version__ = '0.6.1'
__created__ = '2025-11-21'
# — Helper for cgroup / slice matching —
def get_cgroup_path(pid: int) -> Optional[str]:
try:
with open(f'/proc/{pid}/cgroup', 'r') as f:
for line in f:
parts = line.strip().split(':', 2)
if len(parts) == 3:
return parts[2]
except Exception:
logger.exception('')
return None
return None
def slice_matches(cpath, target_slice):
if not cpath or not target_slice:
return False
comps = cpath.strip('/').split('/')
tgt = target_slice.lower()
for comp in comps:
name = comp.lower()
if name.endswith('.slice'):
name = name[:-6]
if tgt == name:
return True
return False
# — Memorymanagement logic —
class get_firefox_procs_ps_t:
class res_t:
class entry_t(TypedDict):
rss: int
pid: int
ppid: int
cgroup: str
cmd: str
def get_firefox_procs_ps(
slice_name=None,
) -> list[get_firefox_procs_ps_t.res_t.entry_t]:
entries: dict[int, dict[str, Any]] = dict()
for regex, columns in [
(
re.compile(r'^\s*(\d+)\s+(\d+)\s+(\d+)\s+(.*)$'),
OrderedDict(
pid=lambda x: int(x[1]),
rss=lambda x: int(x[2]) * 1024,
ppid=lambda x: int(x[3]),
cmd=lambda x: x[4],
),
),
(
re.compile(r'^\s*(\d+)\s+(.*)$'),
OrderedDict(
pid=lambda x: int(x[1]),
cgroup=lambda x: x[2],
),
),
]:
lines = (
subprocess.check_output(
[
'ps',
'-ax',
'-o',
','.join(columns.keys()),
]
)
.decode('utf-8')
.splitlines()[1:]
)
for line in lines:
r = re.compile(regex)
# print([r, line])
match = r.match(line)
assert match
entry = {k: v(match) for k, v in columns.items()}
if not entry['pid'] in entries:
entries[entry['pid']] = dict()
entries[entry['pid']].update(entry)
filtered_entries: list[dict[str, Any]] = []
for entry in entries.values():
if not 'cgroup' in entry or not 'rss' in entry:
continue
if not slice_name is None:
if not slice_name in entry['cgroup']:
continue
filtered_entries.append(entry)
return filtered_entries
def get_firefox_procs(slice_name=None):
procs = []
for p in psutil.process_iter(['pid', 'name', 'cmdline', 'memory_info']):
try:
name = p.info['name']
cmd = p.info['cmdline']
if not cmd:
continue
if 'firefox' not in name and not (cmd and 'firefox' in cmd[0]):
continue
if slice_name:
cpath = get_cgroup_path(p.pid)
if not slice_matches(cpath, slice_name):
continue
procs.append(p)
except (psutil.NoSuchProcess, psutil.AccessDenied):
continue
return procs
def total_rss_mb(procs: list['get_firefox_procs_ps_t.res_t.entry_t']):
total = 0
for p in procs:
try:
# total += p.memory_info().rss
total += p['rss']
except Exception:
logger.exception('')
pass
return total / (1024 * 1024)
def is_main_firefox(p):
try:
# for arg in p.cmdline():
# for arg in p['cmd'].split():
# if "contentproc" in arg:
# return False
if 'contentproc' in p['cmd']:
return False
return True
except Exception:
logger.exception('')
return False
def kill_prioritized(
procs: list['get_firefox_procs_ps_t.res_t.entry_t'],
to_free_mb,
low_priority_pids,
main_regex: Optional[str],
worker_regex: Optional[str],
):
candidates = []
for p in procs:
if worker_regex is None and main_regex is None:
pass
elif (
not worker_regex is None
and not re.compile(worker_regex).match(p['cmd']) is None
):
pass
elif (
not main_regex is None
and not re.compile(main_regex).match(p['cmd']) is None
):
continue
elif not main_regex is None and not worker_regex is None:
continue
elif main_regex is None and not worker_regex is None:
continue
elif not main_regex is None and worker_regex is None:
pass
else:
raise NotImplementedError
try:
# rss_mb = p.memory_info().rss / (1024 * 1024)
rss_mb = p['rss'] / (1024 * 1024)
candidates.append((p, rss_mb))
except Exception:
logger.exception('')
continue
candidates.sort(key=lambda x: ((x[0]['pid'] in low_priority_pids), -x[1]))
freed = 0.0
killed = []
for p, rss in candidates:
if freed >= to_free_mb:
break
logger.info(
dict(
p=p,
action='kill',
msg='started',
to_free_mb=to_free_mb,
killed=killed,
freed=freed,
low_priority_pids=low_priority_pids,
)
)
try:
os.kill(p['pid'], signal.SIGTERM)
killed.append(p['pid'])
freed += rss
except Exception as e:
logger.exception(f'Error killing pid {p["pid"]}')
# print(f"Error killing pid {p.pid}: {e}", file=sys.stderr)
return killed, freed
# — systemd-run logic —
def launch_firefox_with_limits(
base_cmd, memory_high, swap_max, extra_args, unit_name
):
cmd = [
'systemd-run',
'--user',
'--scope',
'-p',
f'MemoryHigh={int(memory_high)}M',
]
if swap_max is not None:
cmd += ['-p', f'MemorySwapMax={int(swap_max)}M']
if unit_name:
cmd += ['--unit', unit_name]
cmd += base_cmd
cmd += extra_args
devnull = subprocess.DEVNULL
proc = subprocess.Popen(cmd, stdin=devnull, stdout=devnull, stderr=devnull)
print('Launched Firefox via systemd-run, PID:', proc.pid, file=sys.stderr)
return proc
# — Main + TUI + Monitoring —
def main():
parser = argparse.ArgumentParser(
description='Firefox memory manager with slice + graceful shutdown'
)
parser.add_argument(
'--max-mb',
type=float,
required=True,
help='Memory threshold in MB (used for killing logic & MemoryHigh)',
)
parser.add_argument(
'--kill-percent',
type=float,
default=70.0,
help='If over max, kill until usage ≤ this percent of max',
)
parser.add_argument(
'--swap-max-mb',
type=float,
default=None,
help='MemorySwapMax (MB) for the systemd scope',
)
parser.add_argument(
'--interval',
type=float,
default=1.0,
help='Monitoring interval in seconds',
)
parser.add_argument(
'--unit-name',
type=str,
default='firefox-limited',
help='Name for systemd transient unit',
)
parser.add_argument(
'--main-regex',
type=str,
default=None,
help='regex for main processes, that are not to kill',
)
parser.add_argument(
'--worker-regex',
type=str,
# default=r'^.*contentproc.*$',
default=None,
help='regex for worker processes, that can be killed, like .*contentproc.* for firefox',
)
parser.add_argument(
'--firefox-extra',
action='append',
default=[],
help='Extra CLI args to pass to Firefox (can repeat)',
)
parser.add_argument(
'firefox_cmd',
nargs=argparse.REMAINDER,
help='Firefox command + args (if launching it)',
)
args = parser.parse_args()
os.makedirs(
pathlib.Path('~/.cache/oom_firefox/').expanduser(), exist_ok=True
)
logging.basicConfig(
level=logging.INFO,
format=(
'%(asctime)s '
'%(levelname)-8s '
'%(filename)s:%(lineno)d '
'%(funcName)s %(message)s'
),
handlers=[
logging.handlers.RotatingFileHandler(
pathlib.Path(
'~/.cache/oom_firefox/log-%s' % args.unit_name
).expanduser(),
maxBytes=128 * 1024,
backupCount=3,
)
],
)
low_priority_pids = set()
body = TextArea(focusable=False, scrollbar=True)
terminate_flag = threading.Event()
lock = threading.Lock()
firefox_proc = None
def terminate():
terminate_flag.set()
app.exit()
def stop():
with lock:
if firefox_proc:
try:
firefox_proc.terminate()
firefox_proc.wait(timeout=5)
except Exception:
logger.exception('')
try:
firefox_proc.kill()
except Exception:
logger.exception('')
pass
# app.exit()
signal.signal(signal.SIGINT, lambda s, f: terminate())
signal.signal(signal.SIGTERM, lambda s, f: terminate())
def refresh_body():
nonlocal firefox_proc
with lock:
procs = get_firefox_procs_ps(slice_name=args.unit_name)
total = total_rss_mb(procs)
limit = args.max_mb
kill_to = args.kill_percent / 100.0 * limit
lines = [
f'Firefox RSS (slice={args.unit_name}): {total:.1f} MB',
f'Threshold (max): {limit:.1f} MB',
f'Killto target: {kill_to:.1f} MB ({args.kill_percent}%)',
f'Lowpriority PIDs: {sorted(low_priority_pids)}',
]
if total > limit:
to_free = total - kill_to
logger.info(
dict(
total=total,
limit=limit,
kill_to=kill_to,
to_free=to_free,
low_priority_pids=low_priority_pids,
worker_regex=args.worker_regex,
main_regex=args.main_regex,
)
)
killed, freed = kill_prioritized(
procs,
to_free,
low_priority_pids,
main_regex=args.main_regex,
worker_regex=args.worker_regex,
)
lines.append(f'Killed: {killed}')
lines.append(f'Freed ≈ {freed:.1f} MB')
else:
lines.append('Within limit — no kill')
if firefox_proc and firefox_proc.poll() is not None:
print('Firefox died — restarting …', file=sys.stderr)
firefox_proc = launch_firefox_with_limits(
args.firefox_cmd,
memory_high=args.max_mb,
swap_max=args.swap_max_mb,
extra_args=args.firefox_extra,
unit_name=args.unit_name,
)
body.text = '\n'.join(lines)
dialog_float = [None]
root_floats = []
def open_pid_dialog():
ta = TextArea(text='', multiline=True, scrollbar=True)
def on_ok():
txt = ta.text
for m in re.finditer(r'\((\d+)[^\)\d]*\)', txt):
low_priority_pids.add(int(m.group(1)))
for m in re.finditer(r'^\s*(\d+)\s*$', txt):
low_priority_pids.add(int(m.group(1)))
for m in re.finditer(r'^\s*-(\d+)\s*$', txt):
low_priority_pids.remove(int(m.group(1)))
close_dialog()
refresh_body()
def on_cancel():
close_dialog()
dialog = Dialog(
title='Enter lowpriority PIDs',
body=ta,
buttons=[
Button(text='OK', handler=on_ok),
Button(text='Cancel', handler=on_cancel),
],
width=60,
modal=True,
)
f = Float(content=dialog, left=2, top=2)
dialog_float[0] = f
root_floats.append(f)
app.layout.focus(ta)
def change_kill_percent(kill_percent: float) -> None:
assert kill_percent >= 10 and kill_percent <= 90
args.kill_percent = kill_percent
def change_max_mb(max_mb: int) -> None:
for cmd in (
[
'systemctl',
'--user',
'set-property',
'%s.scope' % args.unit_name,
'MemoryHigh=%dM' % max_mb,
],
[
'systemctl',
'--user',
'set-property',
'%s.scope' % args.unit_name,
'MemoryMax=%dM' % (max_mb * 1.1),
],
):
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
args.max_mb = max_mb
def open_limit_dialog():
ta = TextArea(text='', multiline=True, scrollbar=True)
def on_ok():
txt = ta.text
m = re.compile(r'^\s*(\d+)\s*$').match(txt)
if m:
change_max_mb(int(m[1]))
close_dialog()
refresh_body()
else:
logger.error('invalid input %s' % txt)
def on_cancel():
close_dialog()
dialog = Dialog(
title='Enter maximum memory threshold in MB',
body=ta,
buttons=[
Button(text='OK', handler=on_ok),
Button(text='Cancel', handler=on_cancel),
],
width=60,
modal=True,
)
f = Float(content=dialog, left=2, top=2)
dialog_float[0] = f
root_floats.append(f)
app.layout.focus(ta)
def open_percentage_dialog():
ta = TextArea(text='', multiline=True, scrollbar=True)
def on_ok():
txt = ta.text
m = re.compile(r'^\s*(\d+|\d+\.\d+)\s*$').match(txt)
if m:
change_kill_percent(float(m[1]))
close_dialog()
refresh_body()
else:
logger.error('invalid input %s' % txt)
def on_cancel():
close_dialog()
dialog = Dialog(
title='Enter kill percent from 10% to 90%, without % sign',
body=ta,
buttons=[
Button(text='OK', handler=on_ok),
Button(text='Cancel', handler=on_cancel),
],
width=60,
modal=True,
)
f = Float(content=dialog, left=2, top=2)
dialog_float[0] = f
root_floats.append(f)
app.layout.focus(ta)
def open_message(title, message):
def on_close():
close_dialog()
dialog = Dialog(
title=title,
body=Label(text=message),
buttons=[Button(text='Close', handler=on_close)],
width=50,
modal=True,
)
f = Float(content=dialog, left=4, top=4)
dialog_float[0] = f
root_floats.append(f)
app.layout.focus(dialog)
def close_dialog():
f = dialog_float[0]
if f in root_floats:
root_floats.remove(f)
dialog_float[0] = None
app.layout.focus(body)
kb = KeyBindings()
gkb = ConditionalKeyBindings(
key_bindings=kb,
filter=Condition(lambda: dialog_float[0] is None),
)
@kb.add('q')
def _(event):
terminate()
@kb.add('m')
def _(event):
open_pid_dialog()
@kb.add('l')
def _(event):
open_limit_dialog()
@kb.add('p')
def _(event):
open_percentage_dialog()
HELP_TEXT = 'm=add PIDs, l=change limit, s=settings, a=about, q=quit'
@kb.add('h')
def _(event):
open_message(
'Help',
'Keys: %s' % HELP_TEXT,
)
@kb.add('s')
def _(event):
open_message(
'Settings',
f'max_mb = {args.max_mb}\n'
f'kill_percent = {args.kill_percent}\n'
f'slice = {args.unit_name}\n'
f'swap_max_mb = {args.swap_max_mb}\n'
f'extra firefox args = {args.firefox_extra}',
)
@kb.add('a')
def _(event):
open_message('About', f'Version {__version__}\nCreated {__created__}')
root = FloatContainer(
content=HSplit(
[
Frame(body, title='Firefox Memory Manager'),
Window(
height=1,
content=FormattedTextControl(
HELP_TEXT,
),
),
]
),
floats=root_floats,
modal=True,
)
style = Style.from_dict(
{
'frame.border': 'ansicyan',
'dialog.body': 'bg:#444444',
'dialog': 'bg:#888888',
}
)
app = Application(
layout=Layout(root),
key_bindings=gkb,
style=style,
full_screen=True,
refresh_interval=args.interval,
)
if args.firefox_cmd:
firefox_proc = launch_firefox_with_limits(
args.firefox_cmd,
memory_high=args.max_mb,
swap_max=args.swap_max_mb, # **fixed here**
extra_args=args.firefox_extra,
unit_name=args.unit_name,
)
def monitor_loop():
nonlocal firefox_proc
while not terminate_flag.is_set():
try:
refresh_body()
except:
logger.exception('')
time.sleep(args.interval)
# stop()
terminate_flag = threading.Event()
t = threading.Thread(target=monitor_loop, daemon=True)
t.start()
# refresh_body()
app.run(
# handle_sigint=True
) # from prompttoolkit API :contentReference[oaicite:0]{index=0}
t.join()
stop()
if __name__ == '__main__':
main()

@ -23,7 +23,13 @@ async def f2(device, timeout=None):
async def f3(client):
t1 = [dict(service=o.__dict__, characteristics=[o2.__dict__ for o2 in o.characteristics]) for o in client.services]
t1 = [
dict(
service=o.__dict__,
characteristics=[o2.__dict__ for o2 in o.characteristics],
)
for o in client.services
]
return t1
@ -43,7 +49,13 @@ async def f5(
t5 = {i: o.details[0].name() for i, o in enumerate(t1)}
t2.extend([t1[k] for k, v in t5.items() if isinstance(v, str) and name_check(v)])
t2.extend(
[
t1[k]
for k, v in t5.items()
if isinstance(v, str) and name_check(v)
]
)
else:
t2.extend(t1)
@ -66,7 +78,9 @@ async def f4(
assert name_check in [
'watch fit',
]
name_check2 = lambda current_name: name_check.lower() in current_name.lower()
name_check2 = (
lambda current_name: name_check.lower() in current_name.lower()
)
else:
name_check2 = name_check

@ -66,7 +66,13 @@ def build(content: str, module: M) -> M:
# )
t1.run()
return cast(M, Cython.Build.Inline.load_dynamic('_%s' % sha256sum, glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0]))
return cast(
M,
Cython.Build.Inline.load_dynamic(
'_%s' % sha256sum,
glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0],
),
)
raise NotImplementedError
@ -125,7 +131,9 @@ def mypyc_build(file_path: pathlib.Path) -> Any:
# f.write(content)
t1 = Cython.Build.Inline._get_build_extension()
t1.extensions = mypyc.build.mypycify([str(source_path)], target_dir=str(output_dir / 'build'))
t1.extensions = mypyc.build.mypycify(
[str(source_path)], target_dir=str(output_dir / 'build')
)
t1.build_temp = str(output_dir)
t1.build_lib = str(lib_dir)
# t2 = Cython.Build.Inline.Extension(
@ -147,7 +155,11 @@ def mypyc_build(file_path: pathlib.Path) -> Any:
class Source:
@staticmethod
def test2(_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]], _id: numpy.dtype[numpy.int32] | int, T: float = 16) -> int:
def test2(
_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]],
_id: numpy.dtype[numpy.int32] | int,
T: float = 16,
) -> int:
raise NotImplementedError
@ -243,7 +255,11 @@ def test_cython(N: int = 4, T: int = 16) -> None:
def test_mypyc(N: int = 4, W: int = 35) -> None:
cython2 = mypyc_build((pathlib.Path(__file__).parent / 'cython2.py').relative_to(pathlib.Path.cwd()))
cython2 = mypyc_build(
(pathlib.Path(__file__).parent / 'cython2.py').relative_to(
pathlib.Path.cwd()
)
)
# from .cython2 import fib

@ -73,8 +73,21 @@ def kernel_2():
from keras.layers.embeddings import Embedding
from keras.layers.normalization import BatchNormalization
from keras.utils import np_utils
from sklearn import preprocessing, decomposition, model_selection, metrics, pipeline
from keras.layers import GlobalMaxPooling1D, Conv1D, MaxPooling1D, Flatten, Bidirectional, SpatialDropout1D
from sklearn import (
preprocessing,
decomposition,
model_selection,
metrics,
pipeline,
)
from keras.layers import (
GlobalMaxPooling1D,
Conv1D,
MaxPooling1D,
Flatten,
Bidirectional,
SpatialDropout1D,
)
from keras.preprocessing import sequence, text
from keras.callbacks import EarlyStopping
@ -112,15 +125,25 @@ def kernel_2():
print('REPLICAS: ', strategy.num_replicas_in_sync)
# %% [code]
train = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv')
validation = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv')
test = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv')
train = pd.read_csv(
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv'
)
validation = pd.read_csv(
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv'
)
test = pd.read_csv(
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv'
)
# %% [markdown]
# We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models
# %% [code]
train.drop(['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'], axis=1, inplace=True)
train.drop(
['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'],
axis=1,
inplace=True,
)
# %% [code]
train = train.loc[:12000, :]
@ -137,7 +160,12 @@ def kernel_2():
# %% [code]
xtrain, xvalid, ytrain, yvalid = train_test_split(
train.comment_text.values, train.toxic.values, stratify=train.toxic.values, random_state=42, test_size=0.2, shuffle=True
train.comment_text.values,
train.toxic.values,
stratify=train.toxic.values,
random_state=42,
test_size=0.2,
shuffle=True,
)
# %% [markdown]
@ -206,7 +234,9 @@ def kernel_2():
model.add(Embedding(len(word_index) + 1, 300, input_length=max_len))
model.add(SimpleRNN(100))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.compile(
loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']
)
model.summary()
@ -253,7 +283,10 @@ def kernel_3(
o_2['model'].load_weights('model.h5')
else:
o_2['model'].fit(
o_2['xtrain_pad'], o_2['ytrain'], nb_epoch=nb_epochs, batch_size=64 * o_2['strategy'].num_replicas_in_sync
o_2['xtrain_pad'],
o_2['ytrain'],
nb_epoch=nb_epochs,
batch_size=64 * o_2['strategy'].num_replicas_in_sync,
) # Multiplying by Strategy to run on TPU's
o_2['model'].save_weights('model.h5')
@ -263,7 +296,9 @@ def kernel_3(
# %% [code]
scores_model = []
scores_model.append({'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])})
scores_model.append(
{'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])}
)
# %% [markdown]
# ## Code Explanantion
@ -283,7 +318,12 @@ def kernel_4(
import keras.preprocessing.sequence
if input_texts is None:
input_texts = ['blahb blahb blah', 'Hello World!', 'This is very good!', 'A very non toxic comment! This is so polite and polished one!']
input_texts = [
'blahb blahb blah',
'Hello World!',
'This is very good!',
'A very non toxic comment! This is so polite and polished one!',
]
t6 = []
for o in input_texts:
@ -291,7 +331,9 @@ def kernel_4(
t2 = o_2['token'].texts_to_sequences(
[t1],
)
t3 = keras.preprocessing.sequence.pad_sequences(t2, maxlen=o_2['max_len'])
t3 = keras.preprocessing.sequence.pad_sequences(
t2, maxlen=o_2['max_len']
)
t4 = o_2['model'].predict(
t3,
)

@ -42,12 +42,26 @@ def kernel_2(
):
t1 = {}
for k in ['playerTwitterFollowers', 'teamTwitterFollowers', 'games', 'events']:
for k in [
'playerTwitterFollowers',
'teamTwitterFollowers',
'games',
'events',
]:
t4 = '%s.nc' % k
if not os.path.exists(t4):
print('started %s' % t4)
t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv'
t3 = pandas.DataFrame(sum([json.loads(o) for o in o_1['t3'][t2][k].values if isinstance(o, str)], [])).to_xarray()
t3 = pandas.DataFrame(
sum(
[
json.loads(o)
for o in o_1['t3'][t2][k].values
if isinstance(o, str)
],
[],
)
).to_xarray()
t3.to_netcdf(t4)
print('cached %s' % t4)
@ -55,7 +69,9 @@ def kernel_2(
t5 = '%s-v2.nc' % k
if not os.path.exists(t5):
t2 = xarray.load_dataset(t4)
t3 = t2.sel(index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1))
t3 = t2.sel(
index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1)
)
t3.to_netcdf(t5)
t1[k] = xarray.load_dataset(t5)
print('loaded %s' % t5)
@ -119,9 +135,15 @@ def kernel_3(should_exist=None):
def kernel_4(
o_3=None,
):
[print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)]
[
print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4))
for k in range(-10, -1)
]
[print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)]
[
print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4))
for k in range(-10, -1)
]
t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M'
@ -264,7 +286,9 @@ def kernel_6(
try:
cap = cv2.VideoCapture(o)
fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
fps = cap.get(
cv2.CAP_PROP_FPS
) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
duration = frame_count / fps
finally:
@ -454,15 +478,31 @@ def kernel_7(
for k in layer:
v = layer[k]
if 'pool' in k:
layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])]
layers += [
nn.MaxPool2d(
kernel_size=v[0], stride=v[1], padding=v[2]
)
]
else:
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
conv2d = nn.Conv2d(
in_channels=v[0],
out_channels=v[1],
kernel_size=v[2],
stride=v[3],
padding=v[4],
)
layers += [conv2d, nn.ReLU(inplace=True)]
layer = list(layer_dict[-1].keys())
k = layer[0]
v = layer_dict[-1][k]
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
conv2d = nn.Conv2d(
in_channels=v[0],
out_channels=v[1],
kernel_size=v[2],
stride=v[3],
padding=v[4],
)
layers += [conv2d]
return nn.Sequential(*layers)
@ -530,9 +570,19 @@ def kernel_7(
for key in block:
v = block[key]
if 'pool' in key:
layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])]
layers += [
nn.MaxPool2d(
kernel_size=v[0], stride=v[1], padding=v[2]
)
]
else:
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
conv2d = nn.Conv2d(
in_channels=v[0],
out_channels=v[1],
kernel_size=v[2],
stride=v[3],
padding=v[4],
)
layers += [conv2d, nn.ReLU(inplace=True)]
models = {'block_0': nn.Sequential(*layers)}
@ -543,16 +593,38 @@ def kernel_7(
return PoseEstimation(models)
def get_paf_and_heatmap(model, img_raw, scale_search, param_stride=8, box_size=368):
multiplier = [scale * box_size / img_raw.shape[0] for scale in scale_search]
def get_paf_and_heatmap(
model, img_raw, scale_search, param_stride=8, box_size=368
):
multiplier = [
scale * box_size / img_raw.shape[0] for scale in scale_search
]
heatmap_avg = torch.zeros((len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])).cuda()
paf_avg = torch.zeros((len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])).cuda()
heatmap_avg = torch.zeros(
(len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])
).cuda()
paf_avg = torch.zeros(
(len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])
).cuda()
for i, scale in enumerate(multiplier):
img_test = cv2.resize(img_raw, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
img_test_pad, pad = pad_right_down_corner(img_test, param_stride, param_stride)
img_test_pad = np.transpose(np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)) / 256 - 0.5
img_test = cv2.resize(
img_raw,
(0, 0),
fx=scale,
fy=scale,
interpolation=cv2.INTER_CUBIC,
)
img_test_pad, pad = pad_right_down_corner(
img_test, param_stride, param_stride
)
img_test_pad = (
np.transpose(
np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)
)
/ 256
- 0.5
)
feed = Variable(torch.from_numpy(img_test_pad)).cuda()
output1, output2 = model(feed)
@ -560,17 +632,27 @@ def kernel_7(
# print(output1.size())
# print(output2.size())
heatmap = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output2)
heatmap = nn.UpsamplingBilinear2d(
(img_raw.shape[0], img_raw.shape[1])
).cuda()(output2)
paf = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output1)
paf = nn.UpsamplingBilinear2d(
(img_raw.shape[0], img_raw.shape[1])
).cuda()(output1)
heatmap_avg[i] = heatmap[0].data
paf_avg[i] = paf[0].data
heatmap_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), 1, 2).cuda()
heatmap_avg = torch.transpose(
torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1),
1,
2,
).cuda()
heatmap_avg = heatmap_avg.cpu().numpy()
paf_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2).cuda()
paf_avg = torch.transpose(
torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2
).cuda()
paf_avg = paf_avg.cpu().numpy()
return paf_avg, heatmap_avg
@ -592,20 +674,34 @@ def kernel_7(
map_down = np.zeros(map_gau.shape)
map_down[:, :-1] = map_gau[:, 1:]
peaks_binary = np.logical_and.reduce((map_gau >= map_left, map_gau >= map_right, map_gau >= map_up, map_gau >= map_down, map_gau > param_thre1))
peaks_binary = np.logical_and.reduce(
(
map_gau >= map_left,
map_gau >= map_right,
map_gau >= map_up,
map_gau >= map_down,
map_gau > param_thre1,
)
)
peaks = zip(np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]) # note reverse
peaks = zip(
np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]
) # note reverse
peaks = list(peaks)
peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks]
ids = range(peak_counter, peak_counter + len(peaks))
peaks_with_score_and_id = [peaks_with_score[i] + (ids[i],) for i in range(len(ids))]
peaks_with_score_and_id = [
peaks_with_score[i] + (ids[i],) for i in range(len(ids))
]
all_peaks.append(peaks_with_score_and_id)
peak_counter += len(peaks)
return all_peaks
def extract_paf_info(img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5):
def extract_paf_info(
img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5
):
connection_all = []
special_k = []
mid_num = 10
@ -626,27 +722,69 @@ def kernel_7(
raise ZeroDivisionError
vec = np.divide(vec, norm)
startend = zip(np.linspace(candA[i][0], candB[j][0], num=mid_num), np.linspace(candA[i][1], candB[j][1], num=mid_num))
startend = zip(
np.linspace(candA[i][0], candB[j][0], num=mid_num),
np.linspace(candA[i][1], candB[j][1], num=mid_num),
)
startend = list(startend)
vec_x = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 0] for I in range(len(startend))])
vec_y = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 1] for I in range(len(startend))])
vec_x = np.array(
[
score_mid[
int(round(startend[I][1])),
int(round(startend[I][0])),
0,
]
for I in range(len(startend))
]
)
vec_y = np.array(
[
score_mid[
int(round(startend[I][1])),
int(round(startend[I][0])),
1,
]
for I in range(len(startend))
]
)
score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(vec_y, vec[1])
score_with_dist_prior = sum(score_midpts) / len(score_midpts)
score_with_dist_prior += min(0.5 * img_raw.shape[0] / norm - 1, 0)
score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(
vec_y, vec[1]
)
score_with_dist_prior = sum(score_midpts) / len(
score_midpts
)
score_with_dist_prior += min(
0.5 * img_raw.shape[0] / norm - 1, 0
)
criterion1 = len(np.nonzero(score_midpts > param_thre2)[0]) > 0.8 * len(score_midpts)
criterion1 = len(
np.nonzero(score_midpts > param_thre2)[0]
) > 0.8 * len(score_midpts)
criterion2 = score_with_dist_prior > 0
if criterion1 and criterion2:
connection_candidate.append([i, j, score_with_dist_prior, score_with_dist_prior + candA[i][2] + candB[j][2]])
connection_candidate.append(
[
i,
j,
score_with_dist_prior,
score_with_dist_prior
+ candA[i][2]
+ candB[j][2],
]
)
connection_candidate = sorted(connection_candidate, key=lambda x: x[2], reverse=True)
connection_candidate = sorted(
connection_candidate, key=lambda x: x[2], reverse=True
)
connection = np.zeros((0, 5))
for c in range(len(connection_candidate)):
i, j, s = connection_candidate[c][0:3]
if i not in connection[:, 3] and j not in connection[:, 4]:
connection = np.vstack([connection, [candA[i][3], candB[j][3], s, i, j]])
connection = np.vstack(
[connection, [candA[i][3], candB[j][3], s, i, j]]
)
if len(connection) >= min(nA, nB):
break
@ -661,7 +799,9 @@ def kernel_7(
# last number in each row is the total parts number of that person
# the second last number in each row is the score of the overall configuration
subset = -1 * np.ones((0, 20))
candidate = np.array([item for sublist in all_peaks for item in sublist])
candidate = np.array(
[item for sublist in all_peaks for item in sublist]
)
for k in range(len(map_ids)):
if k not in special_k:
@ -673,7 +813,10 @@ def kernel_7(
found = 0
subset_idx = [-1, -1]
for j in range(len(subset)): # 1:size(subset,1):
if subset[j][indexA] == partAs[i] or subset[j][indexB] == partBs[i]:
if (
subset[j][indexA] == partAs[i]
or subset[j][indexB] == partBs[i]
):
subset_idx[found] = j
found += 1
@ -682,11 +825,17 @@ def kernel_7(
if subset[j][indexB] != partBs[i]:
subset[j][indexB] = partBs[i]
subset[j][-1] += 1
subset[j][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2]
subset[j][-2] += (
candidate[partBs[i].astype(int), 2]
+ connection_all[k][i][2]
)
elif found == 2: # if found 2 and disjoint, merge them
j1, j2 = subset_idx
print('found = 2')
membership = ((subset[j1] >= 0).astype(int) + (subset[j2] >= 0).astype(int))[:-2]
membership = (
(subset[j1] >= 0).astype(int)
+ (subset[j2] >= 0).astype(int)
)[:-2]
if len(np.nonzero(membership == 2)[0]) == 0: # merge
subset[j1][:-2] += subset[j2][:-2] + 1
subset[j1][-2:] += subset[j2][-2:]
@ -695,7 +844,10 @@ def kernel_7(
else: # as like found == 1
subset[j1][indexB] = partBs[i]
subset[j1][-1] += 1
subset[j1][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2]
subset[j1][-2] += (
candidate[partBs[i].astype(int), 2]
+ connection_all[k][i][2]
)
# if find no partA in the subset, create a new subset
elif not found and k < 17:
@ -703,7 +855,14 @@ def kernel_7(
row[indexA] = partAs[i]
row[indexB] = partBs[i]
row[-1] = 2
row[-2] = sum(candidate[connection_all[k][i, :2].astype(int), 2]) + connection_all[k][i][2]
row[-2] = (
sum(
candidate[
connection_all[k][i, :2].astype(int), 2
]
)
+ connection_all[k][i][2]
)
subset = np.vstack([subset, row])
return subset, candidate
@ -718,7 +877,9 @@ def kernel_7(
for i in range(18):
for j in range(len(all_peaks[i])):
cv2.circle(img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1)
cv2.circle(
img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1
)
return subset, img_canvas
@ -735,9 +896,18 @@ def kernel_7(
mY = np.mean(Y)
length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5
angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1]))
polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1)
polygon = cv2.ellipse2Poly(
(int(mY), int(mX)),
(int(length / 2), stickwidth),
int(angle),
0,
360,
1,
)
cv2.fillConvexPoly(cur_canvas, polygon, colors[i])
img_canvas = cv2.addWeighted(img_canvas, 0.4, cur_canvas, 0.6, 0)
img_canvas = cv2.addWeighted(
img_canvas, 0.4, cur_canvas, 0.6, 0
)
return img_canvas
@ -754,11 +924,17 @@ def kernel_7(
img_padded = img
pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1))
img_padded = np.concatenate((pad_up, img_padded), axis=0)
pad_left = np.tile(img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1))
pad_left = np.tile(
img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1)
)
img_padded = np.concatenate((pad_left, img_padded), axis=1)
pad_down = np.tile(img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1))
pad_down = np.tile(
img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1)
)
img_padded = np.concatenate((img_padded, pad_down), axis=0)
pad_right = np.tile(img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1))
pad_right = np.tile(
img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1)
)
img_padded = np.concatenate((img_padded, pad_right), axis=1)
return img_padded, pad
@ -784,11 +960,15 @@ def kernel_7(
# In[4]:
state_dict = torch.load(model)['state_dict'] # getting the pre-trained model's parameters
state_dict = torch.load(model)[
'state_dict'
] # getting the pre-trained model's parameters
# A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor.
model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell.
model_pose.load_state_dict(state_dict) # Loading the parameters (weights, biases) into the model.
model_pose.load_state_dict(
state_dict
) # Loading the parameters (weights, biases) into the model.
model_pose.float() # I'm not sure why this is used. No difference if you remove it.
@ -797,7 +977,9 @@ def kernel_7(
if use_gpu:
model_pose.cuda()
model_pose = torch.nn.DataParallel(model_pose, device_ids=range(torch.cuda.device_count()))
model_pose = torch.nn.DataParallel(
model_pose, device_ids=range(torch.cuda.device_count())
)
cudnn.benchmark = True
def estimate_pose(
@ -833,7 +1015,9 @@ def kernel_7(
img_points = None
try:
paf_info, heatmap_info = get_paf_and_heatmap(model_pose, img_ori, scale_param)
paf_info, heatmap_info = get_paf_and_heatmap(
model_pose, img_ori, scale_param
)
peaks = extract_heatmap_info(heatmap_info)
sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks)
@ -876,7 +1060,13 @@ def kernel_7(
def kernel_8(
o_7,
):
for i, o in enumerate(['../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' % k for k in range(6)]):
for i, o in enumerate(
[
'../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg'
% k
for k in range(6)
]
):
arch_image = o
img_ori = o_7['cv2'].imread(arch_image)
o_7['estimate_pose'](img_ori)
@ -887,7 +1077,9 @@ def kernel_9_benchmark(
):
import datetime
t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg')
t1 = o_7['cv2'].imread(
'../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg'
)
t5 = 10
t2 = datetime.datetime.now()
for k in range(t5):
@ -905,7 +1097,9 @@ def kernel_10():
import torch
# Model
model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom
model = torch.hub.load(
'ultralytics/yolov5', 'yolov5s'
) # or yolov5m, yolov5x, custom
# Images
img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple
@ -927,7 +1121,9 @@ def kernel_11_benchmark(
):
import datetime
t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg')
t1 = o_7['cv2'].imread(
'../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg'
)
t5 = 10
t2 = datetime.datetime.now()
for k in range(t5):
@ -956,7 +1152,18 @@ def kernel_13(
if not len(t4) > 0 or not o_6 is None:
t1 = pandas.concat(
sum([[o2['t11'][0].assign(frame_id=k, video_path=o['video_path']) for k, o2 in enumerate(o['frames'])] for o in o_6['t8']], [])
sum(
[
[
o2['t11'][0].assign(
frame_id=k, video_path=o['video_path']
)
for k, o2 in enumerate(o['frames'])
]
for o in o_6['t8']
],
[],
)
).to_xarray()
t5 = t3[0]
t1.to_netcdf(t5)
@ -1028,7 +1235,9 @@ def kernel_14(
def kernel_15(
o_14,
):
t1 = pandas.DataFrame(numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)).T
t1 = pandas.DataFrame(
numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)
).T
pprint.pprint(
dict(
t1=t1,
@ -1078,7 +1287,9 @@ def kernel_15(
t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB)
t13 = t12.copy()
t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height])
t16 = numpy.array([t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]])
t16 = numpy.array(
[t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]]
)
t17 = t15 * t16
t18 = t17[:2] - t17[2:] / 2
t19 = t17[:2] + t17[2:] / 2
@ -1340,7 +1551,10 @@ def kernel_20(
t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1)
t2 = o_18['t2']['t6'][0]
t3 = o_18['t2']['t1'][0]['image_canvas'].copy()
assert o_18['t2']['t7'][0]['image_id'] == os.path.split(o_18['t2']['t1'][0]['image_name'])[1]
assert (
o_18['t2']['t7'][0]['image_id']
== os.path.split(o_18['t2']['t1'][0]['image_name'])[1]
)
for i, o2 in enumerate(o_21['p_color']):
if i >= 17:
@ -1449,7 +1663,16 @@ def kernel_22(o_18):
o_31 = kernel_31(
image_id=[o['image_id'] for o in t1],
image_size=numpy.array([[list(o['image_canvas'].shape) for o in o_18['t2']['t1'] if o['image_name'] == t1[i]['image_id']][0] for i in range(len(t2))]),
image_size=numpy.array(
[
[
list(o['image_canvas'].shape)
for o in o_18['t2']['t1']
if o['image_name'] == t1[i]['image_id']
][0]
for i in range(len(t2))
]
),
keypoints=numpy.stack(t2, axis=0),
)
t12 = o_31['t12']
@ -1558,7 +1781,11 @@ def kernel_25(images, delay=None):
def kernel_26(o_18, image_name):
t1 = [i for i, o in enumerate(o_18['t2']['t1']) if o['image_name'] == image_name]
t1 = [
i
for i, o in enumerate(o_18['t2']['t1'])
if o['image_name'] == image_name
]
assert len(t1) == 1
return t1[0]
@ -1580,7 +1807,11 @@ def kernel_23(o_18, o_22, ids=None):
t9 = kernel_26(o_18=o_18, image_name=t3['image_name'])
t4 = o_18['t2']['t1'][t9]['image_canvas']
t10 = o_18['t2']['t6'][t9]
t4 = [o['image_canvas'] for o in o_18['t2']['t1'] if o['image_name'] == t3['image_name']]
t4 = [
o['image_canvas']
for o in o_18['t2']['t1']
if o['image_name'] == t3['image_name']
]
assert len(t4) == 1
t5 = t4[0]
t6 = kernel_24(t5, t3['keypoints'])
@ -1641,7 +1872,9 @@ def kernel_27():
""" % (t4, t2)
if False:
pprint.pprint([t4, t2, t6])
with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
with subprocess.Popen(
t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as p:
if False:
pprint.pprint(p.communicate())
p.wait()
@ -1669,7 +1902,9 @@ def kernel_28(
max_seconds = 999999
if video_path is None:
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
video_path = (
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
)
t5 = video_path
t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id
t13 = '/root/kernel_28-output.dir/tmp-slice'
@ -1679,7 +1914,9 @@ def kernel_28(
try:
cap = cv2.VideoCapture(t5)
fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
fps = cap.get(
cv2.CAP_PROP_FPS
) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
real_duration = frame_count / fps
duration = min(real_duration, max_seconds)
@ -1739,7 +1976,9 @@ def kernel_28(
t6,
]
)
with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
with subprocess.Popen(
t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) as p:
if False:
pprint.pprint(p.communicate())
p.wait()
@ -1757,7 +1996,9 @@ def kernel_29(
video_id = ''
if video_path is None:
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
video_path = (
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
)
assert os.path.exists(video_path)
@ -1771,7 +2012,13 @@ def kernel_29(
t7 = [o for o in t6 if os.path.exists(o)]
if len(t7) == 0:
t1 = [dict(data=json.load(io.open(o, 'r')), input_path=o) for o in glob.glob('/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' % video_id)]
t1 = [
dict(data=json.load(io.open(o, 'r')), input_path=o)
for o in glob.glob(
'/kaggle/working/kernel_28-output%s.dir/slice-*/*.json'
% video_id
)
]
assert len(t1) > 0
@ -1835,7 +2082,9 @@ def kernel_30(
low_mean_conf = 0.6
if video_path is None:
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
video_path = (
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
)
if max_frames is None:
max_frames = 9999
@ -2045,7 +2294,10 @@ def kernel_31(image_id, image_size, keypoints):
ab = [a[0] - b[0], a[1] - b[1]]
ab1 = [c[0] - d[0], c[1] - d[1]]
cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2) + 1e-8)
cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (
sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2)
+ 1e-8
)
ang = acos(cos)
return ang * 180 / np.pi
@ -2204,7 +2456,11 @@ def kernel_33():
o_22 = kernel_22(o_18=o_18)
import pandas
o_23 = kernel_23(o_18=o_18, o_22=o_22, ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values)
o_23 = kernel_23(
o_18=o_18,
o_22=o_22,
ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values,
)
o_27 = kernel_27()
o_28 = kernel_28()
o_29 = kernel_29()
@ -2273,7 +2529,9 @@ def kernel_36():
# import os
from os.path import exists, join, basename, splitext
git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git'
git_repo_url = (
'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git'
)
project_name = splitext(basename(git_repo_url))[0]
if 1 or not exists(project_name):
@ -2282,8 +2540,18 @@ def kernel_36():
print('install new CMake becaue of CUDA10')
cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz'
if not exists(cmake_version):
assert os.system(r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """) == 0
assert os.system(r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """) == 0
assert (
os.system(
r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """
)
== 0
)
assert (
os.system(
r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """
)
== 0
)
print('clone openpose')
assert os.system(r"""!git clone -q --depth 1 $git_repo_url """) == 0
@ -2295,7 +2563,12 @@ def kernel_36():
== 0
)
print('build openpose')
assert os.system(r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """) == 0
assert (
os.system(
r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """
)
== 0
)
"""## From a Google Drive's folder"""
@ -2310,7 +2583,9 @@ def kernel_36():
print(filename)
colab_video_path = folder_path + filename
print(colab_video_path)
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
colab_openpose_video_path = (
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
)
print(colab_openpose_video_path)
if not exists(colab_openpose_video_path):
assert (
@ -2325,9 +2600,16 @@ def kernel_36():
assert os.system(r"""!pip install youtube-dl """) == 0
youtube_id = '2021-05-07_22-00-55_UTC'
assert os.system(r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """) == 0
assert (
os.system(
r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """
)
== 0
)
colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4'
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
colab_openpose_video_path = (
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
)
assert (
os.system(
@ -2352,7 +2634,9 @@ def kernel_36():
# from os.path import exists, join, basename, splitext
# colab_video_path = '/content/drive/My Drive/bachata.mp4'
colab_video_path = '/content/output.mp4'
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
colab_openpose_video_path = (
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
)
assert (
os.system(

@ -21,7 +21,8 @@ dependencies = [
'pydantic',
'pydantic-settings',
'tomlkit',
'pip==23.3.2',
'tomlq',
'pip==25.1',
]
[project.optional-dependencies]
@ -29,6 +30,16 @@ crypto = [
'cryptography',
]
django = [
'django',
]
fastapi = [
'uvicorn',
'fastapi',
'uvloop',
]
early = [
'numpy',
'cryptography',
@ -36,6 +47,7 @@ early = [
'toml-cli',
'ninja',
'patchelf',
'online.fxreader.pr34',
# 'tomlkit',
]
@ -47,17 +59,26 @@ lint = [
'tomli',
# 'tomllib',
'mypy',
'django-stubs',
'pyright',
'pyrefly',
'ruff',
'fastapi',
'uvicorn',
'pip==25.1',
# 'tomlkit',
]
[project.scripts]
online-fxreader-pr34-commands = 'online.fxreader.pr34.commands:commands_cli'
oom_firefox = 'online.fxreader.pr34.oom_firefox:main'
[tool.online-fxreader-pr34]
early_features = ['default', 'early', 'lint']
pip_find_links = [
'deps/whl',
]
requirements = { default_early_lint_3_13 = 'requirements.3.13.txt' }
modules = [
{ name = 'online.fxreader.pr34', tool = { 'online-fxreader-pr34' = { early_features = ['default', 'early', 'lint'] } } },

@ -0,0 +1,126 @@
annotated-doc==0.0.4 \
--hash=sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320
annotated-types==0.7.0 \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53
anyio==4.10.0 \
--hash=sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1
argcomplete==3.6.2 \
--hash=sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591
asgiref==3.9.1 \
--hash=sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c
build==1.3.0 \
--hash=sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4
cffi==1.17.1 \
--hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd
click==8.2.1 \
--hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b
cryptography==45.0.6 \
--hash=sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42
django==5.2.5 \
--hash=sha256:2b2ada0ee8a5ff743a40e2b9820d1f8e24c11bac9ae6469cd548f0057ea6ddcd
django-stubs==5.2.2 \
--hash=sha256:79bd0fdbc78958a8f63e0b062bd9d03f1de539664476c0be62ade5f063c9e41e
django-stubs-ext==5.2.2 \
--hash=sha256:8833bbe32405a2a0ce168d3f75a87168f61bd16939caf0e8bf173bccbd8a44c5
fastapi==0.116.1 \
--hash=sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565
h11==0.16.0 \
--hash=sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86
idna==3.10 \
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
jmespath==1.1.0 \
--hash=sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64
marisa-trie==1.3.1 \
--hash=sha256:ecdb19d33b26738a32602ef432b06cc6deeca4b498ce67ba8e5e39c8a7c19745
markdown-it-py==4.0.0 \
--hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147
mdurl==0.1.2 \
--hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8
meson==1.9.1 \
--hash=sha256:f824ab770c041a202f532f69e114c971918ed2daff7ea56583d80642564598d0
meson-python==0.18.0 \
--hash=sha256:3b0fe051551cc238f5febb873247c0949cd60ded556efa130aa57021804868e2
mypy==1.18.2 \
--hash=sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d
mypy-extensions==1.1.0 \
--hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505
ninja==1.13.0 \
--hash=sha256:fb46acf6b93b8dd0322adc3a4945452a4e774b75b91293bafcc7b7f8e6517dfa
nodeenv==1.9.1 \
--hash=sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9
numpy==2.3.2 \
--hash=sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f
online-fxreader-pr34==0.1.5.39 \
--hash=sha256:b73f4caecb1ce6b94b4b3a87299e779b843ceb8d7a581da8d27abe622a6a37db
packaging==25.0 \
--hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484
patchelf==0.17.2.4 \
--hash=sha256:d9b35ebfada70c02679ad036407d9724ffe1255122ba4ac5e4be5868618a5689
pathspec==0.12.1 \
--hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08
pip==25.1 \
--hash=sha256:13b4aa0aaad055020a11bec8a1c2a70a2b2d080e12d89b962266029fff0a16ba
pybind11==3.0.1 \
--hash=sha256:aa8f0aa6e0a94d3b64adfc38f560f33f15e589be2175e103c0a33c6bce55ee89
pycparser==2.22 \
--hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc
pydantic==2.12.3 \
--hash=sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf
pydantic-core==2.41.4 \
--hash=sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84
pydantic-settings==2.11.0 \
--hash=sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c
pygments==2.20.0 \
--hash=sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176
pyproject-hooks==1.2.0 \
--hash=sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913
pyproject-metadata==0.9.1 \
--hash=sha256:ee5efde548c3ed9b75a354fc319d5afd25e9585fa918a34f62f904cc731973ad
pyrefly==0.59.1 \
--hash=sha256:59a2d01723b84d042f4fa6ec871ffd52d0a7e83b0ea791c2e0bb0ff750abce56
pyright==1.1.407 \
--hash=sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21
python-dotenv==1.2.1 \
--hash=sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61
pyyaml==6.0.2 \
--hash=sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5
regex==2026.4.4 \
--hash=sha256:ffa81f81b80047ba89a3c69ae6a0f78d06f4a42ce5126b0eb2a0a10ad44e0b2e
rich==14.3.3 \
--hash=sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d
ruff==0.14.3 \
--hash=sha256:0e2f8a0bbcffcfd895df39c9a4ecd59bb80dca03dc43f7fb63e647ed176b741e
setuptools==80.9.0 \
--hash=sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922
shellingham==1.5.4 \
--hash=sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2
sqlparse==0.5.3 \
--hash=sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca
starlette==0.47.3 \
--hash=sha256:89c0778ca62a76b826101e7c709e70680a1699ca7da6b44d38eb0a7e61fe4b51
toml-cli==0.8.2 \
--hash=sha256:7af4679ca04c53ad0f6d300dab26f45a78fedf88e8310305bfe0a8ead37fd000
tomli==2.3.0 \
--hash=sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf
tomlkit==0.13.3 \
--hash=sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0
tomlq==0.1.0 \
--hash=sha256:4b966fd999ed2bf69081b7c7f5caadbc4c9542d0ed5fcf2e9b7b4d8d7ada3c82
typer==0.24.1 \
--hash=sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e
types-pyyaml==6.0.12.20250822 \
--hash=sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098
typing-extensions==4.15.0 \
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
typing-inspection==0.4.2 \
--hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7
uv==0.9.7 \
--hash=sha256:8cf6bc2482d1293cc630f66b862b494c09acda9b7faff7307ef52667a2b3ad49
uvicorn==0.35.0 \
--hash=sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a
xmltodict==0.14.2 \
--hash=sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac
yq==3.4.3 \
--hash=sha256:547e34bc3caacce83665fd3429bf7c85f8e8b6b9aaee3f953db1ad716ff3434d

File diff suppressed because it is too large Load Diff

BIN
releases/whl/online_fxreader_pr34-0.1.5.18-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.19-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.20-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.21-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.22-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.23-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.24-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.25-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.26-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.27-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.28-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.29-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.30-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.31-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.32-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.33-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.34-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.35-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.36-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.37-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.38-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.39-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.40-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.41-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.42-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

BIN
releases/whl/online_fxreader_pr34-0.1.5.43-py3-none-any.whl (Stored with Git LFS) Normal file

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More