[+] archlinux package v0.10: dev extras, solv extra, module switch support

1. add dev extras with online.fxreader.pr34, toml-cli, ninja, patchelf;
  2. add modules section for module:switch support;
  3. move name/version/scripts into module definition;
  4. add pip_find_links for deps/whl with pr34 wheel;
  5. fix pyrefly excludes with full relative paths for tests/experiments;
  6. update ruff excludes for tests/res and experiments;
  7. update Makefile python_put_archlinux_venv with solv extra and constraints;
  8. add requirements.3.13.txt for version-specific pinning;
  9. update .venv-whl-cache with cp313 wheels;
  10. release archlinux v0.10 .whl;
This commit is contained in:
LLM 2026-04-06 12:25:55 +00:00
parent 38e846cff4
commit 14fb7cb0bf
38 changed files with 458 additions and 72 deletions

@ -60,11 +60,16 @@ python_put_pr34:
ln -sf $(INSTALL_ROOT)/env3/bin/oom_firefox $(INSTALL_ROOT)/oom_firefox ln -sf $(INSTALL_ROOT)/env3/bin/oom_firefox $(INSTALL_ROOT)/oom_firefox
ARCHLINUX_MESON_DIR ?= meson/online/fxreader/pr34/commands_typed/archlinux
python_put_archlinux_venv: python_put_archlinux_venv:
cd python && ./.venv/bin/python3 -m uv pip install \ cd python && ./.venv/bin/python3 -m uv pip install \
--no-cache --no-index --no-deps --reinstall \ --no-cache --no-index --reinstall \
-f ../releases/whl \ -f ../releases/whl \
online.fxreader.pr34.commands_typed.archlinux -f ./.venv-whl-cache \
-f ../$(ARCHLINUX_MESON_DIR)/.venv-whl-cache \
-c ../$(ARCHLINUX_MESON_DIR)/requirements.3.13.txt \
'online.fxreader.pr34.commands_typed.archlinux[solv]'
PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34 PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34

@ -1,4 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import contextlib
import glob import glob
import importlib import importlib
import json import json
@ -86,9 +87,7 @@ class PyProject:
third_party_roots: list[ThirdPartyRoot] = dataclasses.field( third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [], default_factory=lambda: [],
) )
requirements: dict[str, pathlib.Path] = dataclasses.field( requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
default_factory=lambda: dict()
)
modules: list[Module] = dataclasses.field( modules: list[Module] = dataclasses.field(
default_factory=lambda: [], default_factory=lambda: [],
@ -276,9 +275,7 @@ def pyproject_load(
res.early_features = pr34_tool['early_features'] res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool: if 'pip_find_links' in pr34_tool:
res.pip_find_links = [ res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
]
if 'runtime_libdirs' in pr34_tool: if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [ res.runtime_libdirs = [
@ -297,9 +294,7 @@ def pyproject_load(
if 'third_party_roots' in pr34_tool: if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']): for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str) o2 = check_dict(o, str, str)
assert all( assert all([k in {'package', 'module_root', 'path'} for k in o2])
[k in {'package', 'module_root', 'path'} for k in o2]
)
res.third_party_roots.append( res.third_party_roots.append(
PyProject.ThirdPartyRoot( PyProject.ThirdPartyRoot(
@ -313,9 +308,7 @@ def pyproject_load(
res.requirements = { res.requirements = {
k: d.parent / pathlib.Path(v) k: d.parent / pathlib.Path(v)
# pathlib.Path(o) # pathlib.Path(o)
for k, v in check_dict( for k, v in check_dict(pr34_tool['requirements'], str, str).items()
pr34_tool['requirements'], str, str
).items()
} }
if 'modules' in pr34_tool: if 'modules' in pr34_tool:
@ -382,9 +375,7 @@ class BootstrapSettings:
).strip() ).strip()
) )
pip_check_conflicts: Optional[bool] = dataclasses.field( pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get( default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True))
'PIP_CHECK_CONFLICTS', json.dumps(True)
)
in [json.dumps(True)], in [json.dumps(True)],
) )
uv_cache_dir: str = dataclasses.field( uv_cache_dir: str = dataclasses.field(
@ -399,9 +390,18 @@ class BootstrapSettings:
'--no-index -U', '--no-index -U',
).split(), ).split(),
) )
whl_cache_update: Optional[bool] = dataclasses.field(default_factory=lambda: os.environ.get('WHL_CACHE_UPDATE', json.dumps(False)) in [json.dumps(True)]) whl_cache_update: Optional[bool] = dataclasses.field(
uv_compile_allow_index: bool = dataclasses.field(default_factory=lambda: os.environ.get('UV_COMPILE_ALLOW_INDEX', json.dumps(False)) in [json.dumps(True)]) default_factory=lambda: os.environ.get('WHL_CACHE_UPDATE', json.dumps(False))
venv_partial: bool = dataclasses.field(default_factory=lambda: os.environ.get('VENV_PARTIAL', json.dumps(False)) in [json.dumps(True)]) in [json.dumps(True)]
)
uv_compile_allow_index: bool = dataclasses.field(
default_factory=lambda: os.environ.get('UV_COMPILE_ALLOW_INDEX', json.dumps(False))
in [json.dumps(True)]
)
venv_partial: bool = dataclasses.field(
default_factory=lambda: os.environ.get('VENV_PARTIAL', json.dumps(False))
in [json.dumps(True)]
)
@classmethod @classmethod
def get( def get(
@ -459,9 +459,7 @@ def requirements_name_get(
else: else:
requirements_path = source_dir / 'requirements.txt' requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / ( requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
requirements_path.stem + '.in'
)
requirements_in: list[str] = [] requirements_in: list[str] = []
@ -481,6 +479,7 @@ class packaging_t:
class pkg_id_t: class pkg_id_t:
name: str name: str
version: str version: str
python_tag: Optional[str] = None
@staticmethod @staticmethod
def canonicalize_name(name: str) -> str: def canonicalize_name(name: str) -> str:
@ -489,6 +488,12 @@ class packaging_t:
@staticmethod @staticmethod
def parse_whl_name_version(filename: str) -> Optional['packaging_t.pkg_id_t']: def parse_whl_name_version(filename: str) -> Optional['packaging_t.pkg_id_t']:
parts = filename.split('-') parts = filename.split('-')
if len(parts) >= 5 and filename.endswith('.whl'):
return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(parts[0]),
version=parts[1],
python_tag=parts[2],
)
if len(parts) >= 3 and filename.endswith('.whl'): if len(parts) >= 3 and filename.endswith('.whl'):
return packaging_t.pkg_id_t( return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(parts[0]), name=packaging_t.canonicalize_name(parts[0]),
@ -506,20 +511,93 @@ class packaging_t:
) )
return None return None
@staticmethod
def parse_req_name(spec: str) -> Optional[str]:
"""Extract canonical package name from a requirement spec like 'pip>=23' or 'librt>=0.8'."""
m = re.match(r'^([a-zA-Z0-9._-]+)', spec.strip())
if m:
return packaging_t.canonicalize_name(m.group(1))
return None
@staticmethod
def apply_overrides_to_constraints(
requirements_path: pathlib.Path,
overrides: list[str],
output: 'typing.IO[str]',
) -> None:
"""Copy requirements file to output, replacing blocks for overridden packages.
Handles multi-line entries (continuations with \\ and --hash lines).
For each overridden package, its entire block is replaced with the override spec.
"""
override_map: dict[str, str] = {}
for ov in overrides:
name = packaging_t.parse_req_name(ov)
if name is not None:
override_map[name] = ov
with io.open(requirements_path, 'r') as f:
skip_block = False
current_override: Optional[str] = None
for line in f:
stripped = line.strip()
if stripped.startswith('#') or stripped.startswith('--hash'):
if skip_block:
continue
output.write(line)
continue
if stripped == '' or stripped == '\\':
if skip_block:
continue
output.write(line)
continue
if stripped.endswith('\\'):
spec_part = stripped.rstrip('\\').strip()
else:
spec_part = stripped
parsed = packaging_t.parse_req_spec(spec_part)
if parsed is not None and parsed.name in override_map:
if not skip_block:
skip_block = True
current_override = override_map.pop(parsed.name)
output.write(current_override + '\n')
continue
if skip_block and (stripped.startswith('--hash') or stripped.endswith('\\')):
continue
skip_block = False
current_override = None
output.write(line)
for name, ov in override_map.items():
output.write(ov + '\n')
def whl_cache_download( def whl_cache_download(
whl_cache_path: pathlib.Path, whl_cache_path: pathlib.Path,
requirements_path: pathlib.Path, requirements_path: pathlib.Path,
uv_python_version: list[str], python_version: Optional[str],
pip_find_links_args: list[str], pip_find_links_args: list[str],
) -> None: ) -> None:
whl_cache_path.mkdir(parents=True, exist_ok=True) whl_cache_path.mkdir(parents=True, exist_ok=True)
py_tag_prefix = 'cp' + python_version.replace('.', '') if python_version else None
cached_pkgs: set[tuple[str, str]] = set() cached_pkgs: set[tuple[str, str]] = set()
for whl in whl_cache_path.glob('*.whl'): for whl in whl_cache_path.glob('*.whl'):
parsed = packaging_t.parse_whl_name_version(whl.name) parsed = packaging_t.parse_whl_name_version(whl.name)
if parsed is not None: if parsed is None:
cached_pkgs.add((parsed.name, parsed.version)) continue
if py_tag_prefix is not None and parsed.python_tag is not None:
if not parsed.python_tag.startswith(py_tag_prefix) and parsed.python_tag not in ('py3', 'py2.py3'):
continue
cached_pkgs.add((parsed.name, parsed.version))
missing_reqs: list[str] = [] missing_reqs: list[str] = []
with io.open(requirements_path, 'r') as f: with io.open(requirements_path, 'r') as f:
@ -542,11 +620,17 @@ def whl_cache_download(
logger.info(dict(msg='downloading missing wheels', count=len(missing_reqs), pkgs=missing_reqs)) logger.info(dict(msg='downloading missing wheels', count=len(missing_reqs), pkgs=missing_reqs))
with tempfile.NamedTemporaryFile(mode='w', prefix='requirements_missing_', suffix='.txt', delete=False) as f: with tempfile.NamedTemporaryFile(
mode='w', prefix='requirements_missing_', suffix='.txt', delete=False
) as f:
f.write('\n'.join(missing_reqs)) f.write('\n'.join(missing_reqs))
f.flush() f.flush()
missing_req_path = f.name missing_req_path = f.name
pip_python_version_args: list[str] = []
if python_version is not None:
pip_python_version_args = ['--python-version', python_version.replace('.', '')]
try: try:
cmd = [ cmd = [
sys.executable, sys.executable,
@ -554,7 +638,7 @@ def whl_cache_download(
'pip', 'pip',
'download', 'download',
'--only-binary=:all:', '--only-binary=:all:',
*uv_python_version, *pip_python_version_args,
*pip_find_links_args, *pip_find_links_args,
'-r', '-r',
missing_req_path, missing_req_path,
@ -576,13 +660,16 @@ def check_host_prerequisites() -> None:
stderr=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
) )
except (subprocess.CalledProcessError, FileNotFoundError): except (subprocess.CalledProcessError, FileNotFoundError):
logger.error('[bootstrap] %s -m %s is not available on the host system' % (sys.executable, mod)) logger.error(
'[bootstrap] %s -m %s is not available on the host system' % (sys.executable, mod)
)
sys.exit(1) sys.exit(1)
def env_bootstrap( def env_bootstrap(
bootstrap_settings: BootstrapSettings, bootstrap_settings: BootstrapSettings,
pyproject: PyProject, pyproject: PyProject,
overrides: Optional[list[str]] = None,
) -> None: ) -> None:
check_host_prerequisites() check_host_prerequisites()
@ -617,9 +704,7 @@ def env_bootstrap(
requirements_in: list[str] = [] requirements_in: list[str] = []
requirements_in.extend( requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
)
if pyproject.early_features: if pyproject.early_features:
early_dependencies = sum( early_dependencies = sum(
@ -671,7 +756,9 @@ def env_bootstrap(
constraint_args: list[str] = [] constraint_args: list[str] = []
if bootstrap_settings.venv_partial and requirements_path.exists(): if bootstrap_settings.venv_partial and requirements_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: recompiling with existing requirements.txt as constraints') logger.info(
'[bootstrap] VENV_PARTIAL: recompiling with existing requirements.txt as constraints'
)
needs_compile = True needs_compile = True
constraint_args = ['-c', str(requirements_path)] constraint_args = ['-c', str(requirements_path)]
@ -699,43 +786,58 @@ def env_bootstrap(
uv_compile_args = bootstrap_settings.uv_args uv_compile_args = bootstrap_settings.uv_args
if bootstrap_settings.uv_compile_allow_index: if bootstrap_settings.uv_compile_allow_index:
uv_compile_args = [o for o in uv_compile_args if o not in ('--no-index', '-U', '--upgrade')] uv_compile_args = [
o for o in uv_compile_args if o not in ('--no-index', '-U', '--upgrade')
]
if len(constraint_args) > 0: if len(constraint_args) > 0:
uv_compile_args = [o for o in uv_compile_args if o not in ('-U', '--upgrade')] uv_compile_args = [o for o in uv_compile_args if o not in ('-U', '--upgrade')]
cmd = [ with contextlib.ExitStack() as stack:
'uv', if overrides and len(constraint_args) > 0:
'--cache-dir', patched = stack.enter_context(
bootstrap_settings.uv_cache_dir, tempfile.NamedTemporaryFile(
'pip', mode='w', prefix='constraints_', suffix='.txt'
'compile', )
*uv_python_version, )
'--generate-hashes', packaging_t.apply_overrides_to_constraints(
'--no-annotate', requirements_path, overrides, patched
'--no-header', )
*pip_find_links_args, patched.flush()
*cache_find_links_args, constraint_args = ['-c', patched.name]
*constraint_args,
*uv_compile_args,
'-o',
f_out.name,
f_in.name,
]
logger.info(dict(cmd=cmd))
try: cmd = [
subprocess.check_call(cmd) 'uv',
os.replace(f_out.name, str(requirements_path)) '--cache-dir',
except subprocess.CalledProcessError: bootstrap_settings.uv_cache_dir,
os.unlink(f_out.name) 'pip',
raise 'compile',
*uv_python_version,
'--generate-hashes',
'--no-annotate',
'--no-header',
*pip_find_links_args,
*cache_find_links_args,
*constraint_args,
*uv_compile_args,
'-o',
f_out.name,
f_in.name,
]
logger.info(dict(cmd=cmd))
try:
subprocess.check_call(cmd)
os.replace(f_out.name, str(requirements_path))
except subprocess.CalledProcessError:
os.unlink(f_out.name)
raise
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update: if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download( whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path, whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path, requirements_path=requirements_path,
uv_python_version=uv_python_version, python_version=bootstrap_settings.python_version,
pip_find_links_args=pip_find_links_args, pip_find_links_args=pip_find_links_args,
) )
if bootstrap_settings.whl_cache_path.exists(): if bootstrap_settings.whl_cache_path.exists():
@ -749,7 +851,11 @@ def env_bootstrap(
'uv', 'uv',
'--cache-dir', '--cache-dir',
bootstrap_settings.uv_cache_dir, bootstrap_settings.uv_cache_dir,
*[o for o in bootstrap_settings.uv_args if o not in ['-U', '--upgrade', '--no-index']], *[
o
for o in bootstrap_settings.uv_args
if o not in ['-U', '--upgrade', '--no-index']
],
'venv', 'venv',
*venv_python_version, *venv_python_version,
*cache_find_links_args, *cache_find_links_args,
@ -790,6 +896,53 @@ def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool:
return os.path.abspath(str(a)) == os.path.abspath(str(b)) return os.path.abspath(str(a)) == os.path.abspath(str(b))
import argparse as _argparse
class argv_extract_t:
"""Extract known arguments from argv by scanning parser action definitions."""
@dataclasses.dataclass
class res_t:
namespace: _argparse.Namespace
rest: list[str]
@staticmethod
def extract(
parser: _argparse.ArgumentParser,
argv: list[str],
) -> 'argv_extract_t.res_t':
flag_map: dict[str, _argparse.Action] = {}
for action in parser._actions:
for opt in action.option_strings:
flag_map[opt] = action
matched_argv: list[str] = []
rest: list[str] = []
i = 0
while i < len(argv):
action = flag_map.get(argv[i])
if action is not None:
matched_argv.append(argv[i])
i += 1
if action.nargs in (None, 1) and action.const is None and not isinstance(
action, (_argparse._StoreTrueAction, _argparse._StoreFalseAction, _argparse._CountAction, _argparse._HelpAction)
):
if i < len(argv):
matched_argv.append(argv[i])
i += 1
else:
rest.append(argv[i])
i += 1
namespace = parser.parse_args(matched_argv)
return argv_extract_t.res_t(
namespace=namespace,
rest=rest,
)
def run( def run(
d: Optional[pathlib.Path] = None, d: Optional[pathlib.Path] = None,
cli_path: Optional[pathlib.Path] = None, cli_path: Optional[pathlib.Path] = None,
@ -800,6 +953,22 @@ def run(
if d is None: if d is None:
d = pathlib.Path(__file__).parent / 'pyproject.toml' d = pathlib.Path(__file__).parent / 'pyproject.toml'
bootstrap_parser = _argparse.ArgumentParser(add_help=False)
bootstrap_parser.add_argument(
'--bootstrap-help',
action='help',
help='show bootstrap help and exit',
)
bootstrap_parser.add_argument(
'--bootstrap-override',
dest='overrides',
action='append',
default=[],
help='override for uv pip compile (e.g. "librt>=0.8")',
)
bootstrap_args = argv_extract_t.extract(bootstrap_parser, sys.argv[1:])
bootstrap_settings = BootstrapSettings.get() bootstrap_settings = BootstrapSettings.get()
pyproject: PyProject = pyproject_load(d) pyproject: PyProject = pyproject_load(d)
@ -813,6 +982,7 @@ def run(
env_bootstrap( env_bootstrap(
bootstrap_settings=bootstrap_settings, bootstrap_settings=bootstrap_settings,
pyproject=pyproject, pyproject=pyproject,
overrides=bootstrap_args.namespace.overrides or None,
) )
logger.info([sys.executable, sys.argv, bootstrap_settings.python_path]) logger.info([sys.executable, sys.argv, bootstrap_settings.python_path])
@ -822,7 +992,8 @@ def run(
str(bootstrap_settings.python_path), str(bootstrap_settings.python_path),
[ [
str(bootstrap_settings.python_path), str(bootstrap_settings.python_path),
*sys.argv, sys.argv[0],
*bootstrap_args.rest,
], ],
) )
@ -831,7 +1002,7 @@ def run(
[ [
str(bootstrap_settings.python_path), str(bootstrap_settings.python_path),
str(cli_path), str(cli_path),
*sys.argv[1:], *bootstrap_args.rest,
], ],
) )

@ -1,5 +1,4 @@
[project] [project]
name = 'online.fxreader.pr34.commands_typed.archlinux'
description = 'Arch Linux package management tools' description = 'Arch Linux package management tools'
requires-python = '>= 3.10' requires-python = '>= 3.10'
maintainers = [ maintainers = [
@ -9,8 +8,9 @@ classifiers = [
'Programming Language :: Python', 'Programming Language :: Python',
] ]
version = '0.9' dynamic = [
dynamic = [] 'version',
]
dependencies = [ dependencies = [
'pydantic', 'pydantic',
@ -22,6 +22,13 @@ solv = [
'solv==0.7.35', 'solv==0.7.35',
] ]
dev = [
'online.fxreader.pr34',
'toml-cli',
'ninja',
'patchelf',
]
lint = [ lint = [
'tomli', 'tomli',
'mypy', 'mypy',
@ -30,11 +37,16 @@ lint = [
'ruff', 'ruff',
] ]
[project.scripts]
online-fxreader-pr34-archlinux = 'online.fxreader.pr34.commands_typed.archlinux.cli:main'
[tool.online-fxreader-pr34] [tool.online-fxreader-pr34]
early_features = ['default', 'solv', 'lint'] early_features = ['default', 'solv', 'dev', 'lint']
pip_find_links = [
'deps/whl',
]
requirements = { default_dev_lint_solv_3_13 = 'requirements.3.13.txt' }
modules = [
{ name = 'online.fxreader.pr34.commands_typed.archlinux', scripts = { 'online-fxreader-pr34-archlinux' = 'online.fxreader.pr34.commands_typed.archlinux.cli:main' }, project = { version = '0.10', dynamic = [] } },
]
[build-system] [build-system]
requires = ["meson-python", "pybind11"] requires = ["meson-python", "pybind11"]
@ -80,8 +92,8 @@ project-includes = [
] ]
project-excludes = [ project-excludes = [
'.venv', '.venv',
'*/tests/res', '../../../../../../python/online/fxreader/pr34/commands_typed/archlinux/tests/res',
'*/tests/experiments', '../../../../../../python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments',
] ]
search-path = [ search-path = [
'../../../../../../python', '../../../../../../python',

@ -0,0 +1,96 @@
annotated-doc==0.0.4 \
--hash=sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320
annotated-types==0.7.0 \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53
argcomplete==3.6.3 \
--hash=sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce
build==1.4.0 \
--hash=sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596
click==8.3.2 \
--hash=sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d
jmespath==1.1.0 \
--hash=sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64
librt==0.8.1 \
--hash=sha256:5e4af413908f77294605e28cfd98063f54b2c790561383971d2f52d113d9c363
marisa-trie==1.4.0 \
--hash=sha256:33cca20e60a78da01650d67ef97d60f3ec8a2b60d4dea2d65306fb418d17039d
markdown-it-py==4.0.0 \
--hash=sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147
mdurl==0.1.2 \
--hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8
meson==1.10.2 \
--hash=sha256:5f84ef186e6e788d9154db63620fc61b3ece69f643b94b43c8b9203c43d89b36
meson-python==0.19.0 \
--hash=sha256:67b5906c37404396d23c195e12c8825506074460d4a2e7083266b845d14f0298
mypy==1.20.0 \
--hash=sha256:555658c611099455b2da507582ea20d2043dfdfe7f5ad0add472b1c6238b433f
mypy-extensions==1.1.0 \
--hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505
ninja==1.13.0 \
--hash=sha256:fb46acf6b93b8dd0322adc3a4945452a4e774b75b91293bafcc7b7f8e6517dfa
nodeenv==1.10.0 \
--hash=sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827
online-fxreader-pr34==0.1.5.66 \
--hash=sha256:b6f6640fb423adfced027ed39af74512a824574e42e2ccc308ae0c733f1132d4
packaging==26.0 \
--hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529
patchelf==0.17.2.4 \
--hash=sha256:d9b35ebfada70c02679ad036407d9724ffe1255122ba4ac5e4be5868618a5689
pathspec==1.0.4 \
--hash=sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723
pip==25.1 \
--hash=sha256:13b4aa0aaad055020a11bec8a1c2a70a2b2d080e12d89b962266029fff0a16ba
pybind11==3.0.3 \
--hash=sha256:fb5f8e4a64946b4dcc0451c83a8c384f803bc0a62dd1ba02f199e97dbc9aad4c
pydantic==2.12.5 \
--hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d
pydantic-core==2.41.5 \
--hash=sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586
pydantic-settings==2.13.1 \
--hash=sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237
pygments==2.20.0 \
--hash=sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176
pyproject-hooks==1.2.0 \
--hash=sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913
pyproject-metadata==0.11.0 \
--hash=sha256:85bbecca8694e2c00f63b492c96921d6c228454057c88e7c352b2077fcaa4096
pyrefly==0.59.1 \
--hash=sha256:59a2d01723b84d042f4fa6ec871ffd52d0a7e83b0ea791c2e0bb0ff750abce56
pyright==1.1.408 \
--hash=sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1
python-dotenv==1.2.2 \
--hash=sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a
pyyaml==6.0.3 \
--hash=sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6
regex==2026.4.4 \
--hash=sha256:ffa81f81b80047ba89a3c69ae6a0f78d06f4a42ce5126b0eb2a0a10ad44e0b2e
rich==14.3.3 \
--hash=sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d
ruff==0.15.9 \
--hash=sha256:2b0c7c341f68adb01c488c3b7d4b49aa8ea97409eae6462d860a79cf55f431b6
setuptools==82.0.1 \
--hash=sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb
shellingham==1.5.4 \
--hash=sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686
solv==0.7.35 \
--hash=sha256:05aaa3abaf05482d22946cab81e328b536a59465cc47d628a72bc3d2e4dcafac
toml-cli==0.8.2 \
--hash=sha256:7af4679ca04c53ad0f6d300dab26f45a78fedf88e8310305bfe0a8ead37fd000
tomli==2.4.1 \
--hash=sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f
tomlkit==0.14.0 \
--hash=sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680
tomlq==0.1.0 \
--hash=sha256:4b966fd999ed2bf69081b7c7f5caadbc4c9542d0ed5fcf2e9b7b4d8d7ada3c82
typer==0.24.1 \
--hash=sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e
typing-extensions==4.15.0 \
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
typing-inspection==0.4.2 \
--hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7
uv==0.11.3 \
--hash=sha256:0fde893b5ab9f6997fe357138e794bac09d144328052519fbbe2e6f72145e457
xmltodict==1.0.4 \
--hash=sha256:a4a00d300b0e1c59fc2bfccb53d7b2e88c32f200df138a0dd2229f842497026a
yq==3.4.3 \
--hash=sha256:547e34bc3caacce83665fd3429bf7c85f8e8b6b9aaee3f953db1ad716ff3434d

Binary file not shown.