[+] add build config for archlinux as separate meson package

1. add python/pyproject.common.toml with module definitions;
  2. add meson/online/.../archlinux/ with meson.build, m.py, pyproject.common.toml;
  3. meson.build uses fs.relative_to for python sources from ../../python/;
  4. m.py is cli_bootstrap.py copy with last 5 lines patched for paths;
  5. update Makefile with python_put_archlinux_venv rule;
  6. rename python/pyproject.toml to pyproject.toml.bak;
  7. update .gitignore, docker-compose.local.yml, requirements.txt;
This commit is contained in:
LLM 2026-04-06 07:04:09 +00:00
parent 49276b0e06
commit 4e7a15b9e8
13 changed files with 2771 additions and 532 deletions

1
.gitignore vendored

@ -13,5 +13,6 @@ d2/book1/books
.vscode/*
!.vscode/launch.json
python/build
python/pyproject.toml
.*.kate-swp
!releases/whl/*.whl

129
Makefile

@ -51,6 +51,21 @@ python_put_dist:
done
ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands
python_put_pr34:
$(INSTALL_ROOT)/env3/bin/python3 -m uv pip install $(UV_ARGS) \
-f releases/whl \
-U \
online.fxreader.pr34
ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands
ln -sf $(INSTALL_ROOT)/env3/bin/oom_firefox $(INSTALL_ROOT)/oom_firefox
python_put_archlinux_venv:
cd python && ./.venv/bin/python3 -m uv pip install \
--no-cache --no-index --no-deps --reinstall \
-f ../releases/whl \
online.fxreader.pr34.commands_typed.archlinux
PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34
python_whl:
for f in $(PYTHON_PROJECTS_NAMES); do \
@ -69,7 +84,6 @@ dotfiles_put:
cp dotfiles/.vimrc ~/.vimrc
cp dotfiles/.tmux.conf ~/.tmux.conf
cp dotfiles/.py3.vimrc ~/.py3.vimrc
cp dotfiles/.py3.vimrc ~/.py3.vimrc
cp dotfiles/.gitconfig ~/.gitconfig
cp -rp \
dotfiles/.ipython/profile_default/ipython_config.py \
@ -82,6 +96,20 @@ dotfiles_put:
done
#commands install -f -p dotfiles -s dotfiles/ -t ~/.config/
dotfiles_vim_put:
@echo INSTALL_ROOT=$(INSTALL_ROOT)
mkdir -p $(INSTALL_ROOT)
mkdir -p $(INSTALL_ROOT)/.vim
cp dotfiles/.vimrc $(INSTALL_ROOT)/.vimrc
cp dotfiles/.py3.vimrc $(INSTALL_ROOT)/.py3.vimrc
cp -rp dotfiles/.vim/online_fxreader_pr34_vim $(INSTALL_ROOT)/.vim/
dotfiles_tmux_put:
mkdir -p $(INSTALL_ROOT)
cp dotfiles/.tmux.conf ~/.tmux.conf
PLATFORM ?= macbook_air_2012
PLATFORM_TMP ?= tmp/platform_dotfiles/$(PLATFORM)
@ -96,6 +124,42 @@ dotfiles_put_platform:
sudo udevadm control --reload
sudo systemctl daemon-reload
GPG_RECIPIENTS_ARGS ?= -r 891382BEBFEFFC6729837400DA0B6C15FBB70FC9
dotfiles_fetch_platform:
mkdir -p platform_dotfiles/$(PLATFORM)
tar -cvf - \
/etc/udev/rules.d/ \
/etc/systemd/logind.conf \
~/.local/bin/systemd_gtk \
~/.local/bin/gnome-shortcuts-macbook-air \
/usr/local/bin \
| tar -xvf - -C platform_dotfiles/$(PLATFORM)
dotfiles_fetch_platform_gpg:
mkdir -p platform_dotfiles_gpg/$(PLATFORM)
yay -Q > /tmp/pacman-packages.txt;
tar -h -cvf - \
/tmp/pacman-packages.txt \
~/.sway/config.d \
~/.sway/config \
~/.config/commands-status.json \
/etc/fstab \
| gpg -e $(GPG_RECIPIENTS_ARGS) \
> platform_dotfiles_gpg/$(PLATFORM)/sensitive-configs-$$(date -Iseconds).gpg
rm /tmp/pacman-packages.txt;
dotfiles_fetch_platform_ideapad_slim_3_15arp10:
make dotfiles_fetch_platform \
PLATFORM=ideapad_slim_3_15arp10
dotfiles_fetch_platform_gpg_ideapad_slim_3_15arp10:
make dotfiles_fetch_platform_gpg \
PLATFORM=ideapad_slim_3_15arp10
dotfiles_sway_put:
mkdir -p ~/.sway
cp dotfiles/.sway/config ~/.sway/config
dotfiles_fetch:
commands install -f -p ~ -s ~/.config/katerc -t dotfiles
commands install -f -p ~ -s ~/.mime.types -t dotfiles
@ -124,14 +188,10 @@ systemd:
done
sudo systemctl daemon-reload
venv:
uv venv
uv pip install -p .venv \
-r requirements.txt
venv_compile:
uv pip compile --generate-hashes \
requirements.in > requirements.txt
compose_env:
for s in checks; do \
cat docker/$$s/.env .envs/$$s.env > .envs/$$s.patched.env; \
done
MYPY_SOURCES ?= \
d1/cpanel.py
@ -139,3 +199,54 @@ mypy:
. .venv/bin/activate && \
mypy --strict --follow-imports silent \
$(MYPY_SOURCES)
COMPOSE ?= sudo docker-compose
nginx_config_http:
$(COMPOSE) exec app \
python3 \
d1/nginx_config.py \
tmp/cache/forward.nginx.json \
/etc/nginx/nginx.conf
nginx_config_https:
$(COMPOSE) exec ssl-app \
python3 \
d1/nginx_config.py ssl \
tmp/d1/ssl.nginx.json \
/etc/nginx/nginx.conf
nginx_config: nginx_config_https nginx_config_http
nginx_reload_common:
$(COMPOSE) exec $(NGINX_SERVICE) nginx -s reload
nginx_reload:
make nginx_reload_common NGINX_SERVICE=ssl-app
make nginx_reload_common NGINX_SERVICE=app
LLM_COMPOSE ?= podman-compose -f docker-compose.local.yml
LLM_DOCKER ?= podman
llm-podman-pull:
$(LLM_COMPOSE) pull llm
llm-podman-create:
$(LLM_COMPOSE) up --no-start --force-recreate llm
llm-podman-up:
# $(LLM_COMPOSE) start
$(LLM_DOCKER) start claude_llm_1
llm-podman-down:
# $(LLM_COMPOSE) stop
$(LLM_DOCKER) stop claude_llm_1
llm-podman-run:
$(LLM_COMPOSE) exec -w /app llm /bin/bash
llm-podman-run-zsh:
$(LLM_COMPOSE) exec -u nartes -w /app llm /usr/bin/zsh
llm-podman-ps:
$(LLM_COMPOSE) ps

45
docker-compose.local.yml Normal file

@ -0,0 +1,45 @@
version: '3.7'
x-json-logging: &json-logging
logging:
driver: "json-file"
options:
max-size: 10m
max-file: "3"
x-resources-normal: &resources-normal
deploy:
resources:
limits:
cpus: '1'
memory: 2GB
x-resources-small: &resources-small
deploy:
resources:
limits:
cpus: '0.5'
memory: 1GB
x-resources-micro: &resources-micro
deploy:
resources:
limits:
cpus: '0.25'
memory: 128M
services:
llm:
<<: *resources-normal
# image: archlinux@sha256:19f09c502102d6d249f4e15667144cab1b8a195d9a549996774a665f582bea99
image: online.fxreader.pr34.claude.v1:v0.2
command: ['sh', '-c', 'while true; do sleep 1; done']
init: true
volumes:
- ./:/app:rw
- online.fxreader.pr34.claude.v1:/mnt/online.fxreader.pr34.claude
volumes:
online.fxreader.pr34.claude.v1:

@ -0,0 +1,812 @@
#!/usr/bin/env python3
import glob
import importlib
import json
import io
import tempfile
import dataclasses
import pathlib
import sys
import subprocess
import os
import logging
import re
import typing
from typing import (
Optional,
Any,
cast,
Type,
TypeVar,
Callable,
)
if typing.TYPE_CHECKING:
from typing_extensions import (
Self,
BinaryIO,
overload,
)
else:
try:
from typing_extensions import overload
except ModuleNotFoundError:
def overload(f: Any) -> Any:
return f
logger = logging.getLogger(__name__)
def toml_load(f: 'BinaryIO') -> Any:
try:
tomllib = importlib.import_module('tomllib')
return cast(
Callable[[Any], Any],
getattr(
tomllib,
'load',
),
)(f)
except ModuleNotFoundError:
pass
try:
import tomli
return tomli.load(f)
except ModuleNotFoundError:
pass
raise NotImplementedError
@dataclasses.dataclass
class PyProject:
@dataclasses.dataclass
class Module:
name: str
meson: Optional[pathlib.Path] = None
tool: dict[str, Any] = dataclasses.field(default_factory=lambda: dict())
scripts: dict[str, str] = dataclasses.field(default_factory=lambda: dict())
project: dict[str, Any] = dataclasses.field(default_factory=lambda: dict())
path: pathlib.Path
dependencies: dict[str, list[str]]
name: Optional[str] = None
version: Optional[str] = None
early_features: Optional[list[str]] = None
pip_find_links: Optional[list[pathlib.Path]] = None
runtime_libdirs: Optional[list[pathlib.Path]] = None
runtime_preload: Optional[list[pathlib.Path]] = None
@dataclasses.dataclass
class ThirdPartyRoot:
package: Optional[str] = None
module_root: Optional[str] = None
path: Optional[str] = None
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
default_factory=lambda: [],
)
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
modules: list[Module] = dataclasses.field(
default_factory=lambda: [],
)
tool: dict[str, Any] = dataclasses.field(
default_factory=lambda: dict(),
)
Key = TypeVar('Key')
Value = TypeVar('Value')
@overload
def check_dict(
value: Any,
KT: Type[Key],
VT: Type[Value],
) -> dict[Key, Value]: ...
@overload
def check_dict(
value: Any,
KT: Type[Key],
) -> dict[Key, Any]: ...
def check_dict(
value: Any,
KT: Type[Key],
VT: Optional[Type[Value]] = None,
) -> dict[Key, Value]:
assert isinstance(value, dict)
value2 = cast(dict[Any, Any], value)
VT_class: Optional[type[Any]] = None
if not VT is None:
if not typing.get_origin(VT) is None:
VT_class = cast(type[Any], typing.get_origin(VT))
else:
VT_class = VT
assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
if VT is None:
return cast(
dict[Key, Any],
value,
)
else:
return cast(
dict[Key, Value],
value,
)
@overload
def check_list(
value: Any,
VT: Type[Value],
) -> list[Value]: ...
@overload
def check_list(
value: Any,
) -> list[Any]: ...
def check_list(
value: Any,
VT: Optional[Type[Value]] = None,
) -> list[Value] | list[Any]:
assert isinstance(value, list)
value2 = cast(list[Any], value)
assert all([(VT is None or isinstance(o, VT)) for o in value2])
if VT is None:
return cast(
list[Any],
value,
)
else:
return cast(
list[Value],
value,
)
def check_type(
value: Any,
VT: Type[Value],
attribute_name: Optional[str] = None,
) -> Value:
if attribute_name:
attribute_value = getattr(value, attribute_name)
assert isinstance(attribute_value, VT)
return attribute_value
else:
assert isinstance(value, VT)
return value
def pyproject_load(
d: pathlib.Path,
) -> PyProject:
with io.open(d, 'rb') as f:
content = toml_load(f)
assert isinstance(content, dict)
dependencies: dict[str, list[str]] = dict()
dependencies['default'] = content['project']['dependencies']
if 'optional-dependencies' in content['project']:
assert isinstance(content['project']['optional-dependencies'], dict)
for k, v in check_dict(
check_dict(
check_dict(
content,
str,
# Any,
)['project'],
str,
# Any,
)['optional-dependencies'],
str,
list[Any],
).items():
# assert isinstance(v, list)
# assert isinstance(k, str)
dependencies[k] = v
name: Optional[str] = None
if 'name' in content.get('project', {}):
name = content['project']['name']
version: Optional[str] = None
if 'version' in content.get('project', {}):
version = content['project']['version']
res = PyProject(
path=d,
dependencies=dependencies,
name=name,
version=version,
)
tool_name = 'online.fxreader.pr34'.replace('.', '-')
if 'tool' in content:
res.tool = check_dict(
content['tool'],
str,
)
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
pr34_tool = check_dict(
check_dict(
content['tool'],
str,
)[tool_name],
str,
)
if 'early_features' in pr34_tool:
res.early_features = pr34_tool['early_features']
if 'pip_find_links' in pr34_tool:
res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
if 'runtime_libdirs' in pr34_tool:
res.runtime_libdirs = [
d.parent / pathlib.Path(o)
# pathlib.Path(o)
for o in check_list(pr34_tool['runtime_libdirs'], str)
]
if 'runtime_preload' in pr34_tool:
res.runtime_preload = [
d.parent / pathlib.Path(o)
# pathlib.Path(o)
for o in check_list(pr34_tool['runtime_preload'], str)
]
if 'third_party_roots' in pr34_tool:
for o in check_list(pr34_tool['third_party_roots']):
o2 = check_dict(o, str, str)
assert all([k in {'package', 'module_root', 'path'} for k in o2])
res.third_party_roots.append(
PyProject.ThirdPartyRoot(
package=o2.get('package'),
module_root=o2.get('module_root'),
path=o2.get('path'),
)
)
if 'requirements' in pr34_tool:
res.requirements = {
k: d.parent / pathlib.Path(v)
# pathlib.Path(o)
for k, v in check_dict(pr34_tool['requirements'], str, str).items()
}
if 'modules' in pr34_tool:
modules = check_list(pr34_tool['modules'])
# res.modules = []
for o in modules:
assert isinstance(o, dict)
assert 'name' in o and isinstance(o['name'], str)
module = PyProject.Module(
name=o['name'],
)
if 'meson' in o:
assert 'meson' in o and isinstance(o['meson'], str)
module.meson = pathlib.Path(o['meson'])
if 'tool' in o:
module.tool.update(
check_dict(
o['tool'],
str,
)
)
if 'scripts' in o:
module.scripts.update(
check_dict(
o['scripts'],
str,
str,
)
)
if 'project' in o:
module.project.update(
check_dict(
o['project'],
str,
)
)
res.modules.append(module)
return res
@dataclasses.dataclass
class BootstrapSettings:
env_path: pathlib.Path
whl_cache_path: pathlib.Path
python_path: pathlib.Path
base_dir: pathlib.Path
python_version: Optional[str] = dataclasses.field(
default_factory=lambda: os.environ.get(
'PYTHON_VERSION',
'%d.%d'
% (
sys.version_info.major,
sys.version_info.minor,
),
).strip()
)
pip_check_conflicts: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
)
uv_cache_dir: str = dataclasses.field(
default_factory=lambda: os.environ.get(
'UV_CACHE_DIR',
str(pathlib.Path.cwd() / '.uv-cache'),
)
)
uv_args: list[str] = dataclasses.field(
default_factory=lambda: os.environ.get(
'UV_ARGS',
'--no-index -U',
).split(),
)
whl_cache_update: Optional[bool] = dataclasses.field(
default_factory=lambda: os.environ.get('WHL_CACHE_UPDATE', json.dumps(False)) in [json.dumps(True)]
)
uv_compile_allow_index: bool = dataclasses.field(
default_factory=lambda: os.environ.get('UV_COMPILE_ALLOW_INDEX', json.dumps(False)) in [json.dumps(True)]
)
venv_partial: bool = dataclasses.field(
default_factory=lambda: os.environ.get('VENV_PARTIAL', json.dumps(False)) in [json.dumps(True)]
)
@classmethod
def get(
cls,
base_dir: Optional[pathlib.Path] = None,
) -> 'Self':
if base_dir is None:
base_dir = pathlib.Path.cwd()
env_path: Optional[pathlib.Path] = None
if 'ENV_PATH' in os.environ:
env_path = pathlib.Path(os.environ['ENV_PATH'])
else:
env_path = base_dir / '.venv'
whl_cache_path = env_path.parent / '.venv-whl-cache'
python_path = env_path / 'bin' / 'python3'
return cls(
base_dir=base_dir,
env_path=env_path,
whl_cache_path=whl_cache_path,
python_path=python_path,
)
class requirements_name_get_t:
@dataclasses.dataclass
class res_t:
not_compiled: pathlib.Path
compiled: pathlib.Path
name: str
def requirements_name_get(
source_dir: pathlib.Path,
python_version: Optional[str],
features: list[str],
requirements: dict[str, pathlib.Path],
) -> requirements_name_get_t.res_t:
requirements_python_version: Optional[str] = None
if not python_version is None:
requirements_python_version = python_version.replace('.', '_')
requirements_name = '_'.join(sorted(features))
if requirements_python_version:
requirements_name += '_' + requirements_python_version
requirements_path: Optional[pathlib.Path] = None
if requirements_name in requirements:
requirements_path = requirements[requirements_name]
else:
requirements_path = source_dir / 'requirements.txt'
requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
requirements_in: list[str] = []
return requirements_name_get_t.res_t(
not_compiled=requirements_path_in,
compiled=requirements_path,
name=requirements_name,
)
class packaging_t:
class constants_t:
canonicalize_re: typing.ClassVar[re.Pattern[str]] = re.compile(r'[-_.]+')
req_spec_re: typing.ClassVar[re.Pattern[str]] = re.compile(r'^([a-zA-Z0-9._-]+)==([^\s;]+)')
@dataclasses.dataclass
class pkg_id_t:
name: str
version: str
@staticmethod
def canonicalize_name(name: str) -> str:
return packaging_t.constants_t.canonicalize_re.sub('-', name).lower()
@staticmethod
def parse_whl_name_version(filename: str) -> Optional['packaging_t.pkg_id_t']:
parts = filename.split('-')
if len(parts) >= 3 and filename.endswith('.whl'):
return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(parts[0]),
version=parts[1],
)
return None
@staticmethod
def parse_req_spec(line: str) -> Optional['packaging_t.pkg_id_t']:
m = packaging_t.constants_t.req_spec_re.match(line)
if m:
return packaging_t.pkg_id_t(
name=packaging_t.canonicalize_name(m.group(1)),
version=m.group(2),
)
return None
def whl_cache_download(
whl_cache_path: pathlib.Path,
requirements_path: pathlib.Path,
uv_python_version: list[str],
pip_find_links_args: list[str],
) -> None:
whl_cache_path.mkdir(parents=True, exist_ok=True)
cached_pkgs: set[tuple[str, str]] = set()
for whl in whl_cache_path.glob('*.whl'):
parsed = packaging_t.parse_whl_name_version(whl.name)
if parsed is not None:
cached_pkgs.add((parsed.name, parsed.version))
missing_reqs: list[str] = []
with io.open(requirements_path, 'r') as f:
for line in f:
stripped = line.strip()
if not stripped or stripped.startswith('#') or stripped.startswith('--hash'):
continue
spec = stripped.rstrip(' \\')
if spec.startswith('#'):
continue
parsed = packaging_t.parse_req_spec(spec)
if parsed is not None and (parsed.name, parsed.version) in cached_pkgs:
logger.info(dict(msg='cached', pkg='%s==%s' % (parsed.name, parsed.version)))
continue
missing_reqs.append(spec)
if not missing_reqs:
logger.info(dict(msg='all wheels cached, skipping pip download'))
return
logger.info(dict(msg='downloading missing wheels', count=len(missing_reqs), pkgs=missing_reqs))
with tempfile.NamedTemporaryFile(mode='w', prefix='requirements_missing_', suffix='.txt', delete=False) as f:
f.write('\n'.join(missing_reqs))
f.flush()
missing_req_path = f.name
try:
cmd = [
sys.executable, '-m', 'pip', 'download', '--only-binary=:all:',
*uv_python_version, *pip_find_links_args,
'-r', missing_req_path,
'-d', str(whl_cache_path),
]
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
finally:
os.unlink(missing_req_path)
def check_host_prerequisites() -> None:
for mod in ['pip', 'uv']:
try:
subprocess.check_call(
[sys.executable, '-m', mod, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
except (subprocess.CalledProcessError, FileNotFoundError):
logger.error('[bootstrap] %s -m %s is not available on the host system' % (sys.executable, mod))
sys.exit(1)
def env_bootstrap(
bootstrap_settings: BootstrapSettings,
pyproject: PyProject,
) -> None:
check_host_prerequisites()
pip_find_links: list[pathlib.Path] = []
if not pyproject.pip_find_links is None:
pip_find_links.extend(pyproject.pip_find_links)
pip_find_links_args = sum(
[
[
'-f',
str(o),
]
for o in pip_find_links
],
cast(list[str], []),
)
features: list[str] = []
if pyproject.early_features:
features.extend(pyproject.early_features)
requirements_name_get_res = requirements_name_get(
python_version=bootstrap_settings.python_version,
features=features,
requirements=pyproject.requirements,
source_dir=pyproject.path.parent,
)
requirements_path = requirements_name_get_res.compiled
requirements_in: list[str] = []
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
if pyproject.early_features:
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
logger.info(
dict(
requirements_name_get_res=requirements_name_get_res,
early_dependencies=early_dependencies,
)
)
requirements_in.extend(early_dependencies)
# if len(early_dependencies) > 0:
# subprocess.check_call([
# bootstrap_settings.python_path,
# '-m',
# 'uv', 'pip', 'install',
# *pip_find_links_args,
# # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'),
# *bootstrap_settings.uv_args,
# *early_dependencies,
# ])
uv_python_version: list[str] = []
venv_python_version: list[str] = []
if not bootstrap_settings.python_version is None:
uv_python_version.extend(
[
# '-p',
'--python-version',
bootstrap_settings.python_version,
]
)
venv_python_version.extend(
[
'-p',
# '--python-version',
bootstrap_settings.python_version,
]
)
logger.info('[bootstrap] step 1/5: compile requirements')
needs_compile = not requirements_path.exists()
constraint_args: list[str] = []
if bootstrap_settings.venv_partial and requirements_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: recompiling with existing requirements.txt as constraints')
needs_compile = True
constraint_args = ['-c', str(requirements_path)]
if (not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update) and requirements_path.exists():
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
cache_find_links_args: list[str] = []
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if needs_compile:
with tempfile.NamedTemporaryFile(
mode='w',
prefix='requirements',
suffix='.in',
) as f_in, tempfile.NamedTemporaryFile(
mode='w',
prefix='requirements',
suffix='.txt',
dir=requirements_path.parent,
delete=False,
) as f_out:
f_in.write('\n'.join(requirements_in))
f_in.flush()
uv_compile_args = bootstrap_settings.uv_args
if bootstrap_settings.uv_compile_allow_index:
uv_compile_args = [o for o in uv_compile_args if o not in ('--no-index', '-U', '--upgrade')]
if len(constraint_args) > 0:
uv_compile_args = [o for o in uv_compile_args if o not in ('-U', '--upgrade')]
cmd = [
'uv',
'--cache-dir', bootstrap_settings.uv_cache_dir,
'pip', 'compile',
*uv_python_version,
'--generate-hashes',
*pip_find_links_args,
*cache_find_links_args,
*constraint_args,
*uv_compile_args,
'-o', f_out.name,
f_in.name,
]
logger.info(dict(cmd=cmd))
try:
subprocess.check_call(cmd)
os.replace(f_out.name, str(requirements_path))
except subprocess.CalledProcessError:
os.unlink(f_out.name)
raise
if not bootstrap_settings.whl_cache_path.exists() or bootstrap_settings.whl_cache_update:
whl_cache_download(
whl_cache_path=bootstrap_settings.whl_cache_path,
requirements_path=requirements_path,
uv_python_version=uv_python_version,
pip_find_links_args=pip_find_links_args,
)
if bootstrap_settings.whl_cache_path.exists():
cache_find_links_args = ['-f', str(bootstrap_settings.whl_cache_path)]
if bootstrap_settings.venv_partial and bootstrap_settings.env_path.exists():
logger.info('[bootstrap] VENV_PARTIAL: skipping venv creation (already exists)')
else:
subprocess.check_call([
'uv',
'--cache-dir', bootstrap_settings.uv_cache_dir,
*[o for o in bootstrap_settings.uv_args if o not in ['-U', '--upgrade', '--no-index']],
'venv',
*venv_python_version,
*cache_find_links_args,
str(bootstrap_settings.env_path),
])
cmd = [
'uv',
'--cache-dir', bootstrap_settings.uv_cache_dir,
'pip', 'install',
*uv_python_version,
*cache_find_links_args,
'-p', str(bootstrap_settings.python_path),
'--require-hashes',
*bootstrap_settings.uv_args,
'-r', str(requirements_path),
]
logger.info(dict(cmd=cmd))
subprocess.check_call(cmd)
if bootstrap_settings.pip_check_conflicts:
subprocess.check_call(
[
bootstrap_settings.python_path,
'-m',
'online.fxreader.pr34.commands',
'pip_check_conflicts',
]
)
def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool:
return os.path.abspath(str(a)) == os.path.abspath(str(b))
def run(
d: Optional[pathlib.Path] = None,
cli_path: Optional[pathlib.Path] = None,
) -> None:
if cli_path is None:
cli_path = pathlib.Path(__file__).parent / 'cli.py'
if d is None:
d = pathlib.Path(__file__).parent / 'pyproject.toml'
bootstrap_settings = BootstrapSettings.get()
pyproject: PyProject = pyproject_load(d)
logging.basicConfig(
level=logging.INFO,
format='%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s',
)
if not bootstrap_settings.env_path.exists() or bootstrap_settings.venv_partial:
env_bootstrap(
bootstrap_settings=bootstrap_settings,
pyproject=pyproject,
)
logger.info([sys.executable, sys.argv, bootstrap_settings.python_path])
if not paths_equal(sys.executable, bootstrap_settings.python_path):
os.execv(
str(bootstrap_settings.python_path),
[
str(bootstrap_settings.python_path),
*sys.argv,
],
)
os.execv(
str(bootstrap_settings.python_path),
[
str(bootstrap_settings.python_path),
str(cli_path),
*sys.argv[1:],
],
)
if __name__ == '__main__':
run(
d=pathlib.Path(__file__).parent / 'pyproject.common.toml',
cli_path=pathlib.Path(__file__).parent / '..' / '..' / '..' / '..' / '..' / '..' / 'python' / 'cli.py',
)

@ -0,0 +1,55 @@
project(
run_command(
'.venv/bin/toml', 'get', '--toml-path', 'pyproject.toml', 'project.name',
check: true
).stdout().strip('\n'),
version: '0.1.5.17+27.23',
)
install_path = get_option('install_path')
message('install_path = ' + install_path)
modes = get_option('modes')
fs = import('fs')
assert(modes.length() == 1, 'only one mode allowed')
mode = modes[0]
# python sources live relative to this meson.build
python_source_root = meson.project_source_root() / '../../../../../../python'
project_root = '.'
if mode == 'meson'
endif
if mode == 'pyproject'
py = import('python').find_installation(pure: false)
namespace_path = meson.project_name().replace('.', '/')
install_root = py.get_install_dir(pure: true)
# find .py files only under the archlinux namespace
pkg_sources = run_command(
'find', python_source_root / namespace_path, '-iname', '*.py',
check: true
).stdout().strip().split('\n')
foreach src : pkg_sources
rel = fs.relative_to(src, python_source_root)
install_data(
src,
install_dir: install_root / fs.parent(rel),
install_tag: 'python-runtime',
)
endforeach
install_data(
python_source_root / namespace_path / 'py.typed',
install_dir: install_root / namespace_path,
install_tag: 'python-runtime',
)
endif

@ -0,0 +1,2 @@
option('modes', type: 'array', choices: ['meson', 'pyproject'], value: ['pyproject'])
option('install_path', type : 'string', value: '')

@ -0,0 +1,92 @@
[project]
name = 'online.fxreader.pr34.commands_typed.archlinux'
description = 'Arch Linux package management tools'
requires-python = '>= 3.10'
maintainers = [
{ name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' },
]
classifiers = [
'Programming Language :: Python',
]
version = '0.9'
dynamic = []
dependencies = [
'pydantic',
'pydantic-settings',
]
[project.optional-dependencies]
solv = [
'solv==0.7.35',
]
lint = [
'tomli',
'mypy',
'pyright',
'pyrefly',
'ruff',
]
[project.scripts]
online-fxreader-pr34-archlinux = 'online.fxreader.pr34.commands_typed.archlinux.cli:main'
[tool.online-fxreader-pr34]
early_features = ['default', 'solv', 'lint']
[build-system]
requires = ["meson-python", "pybind11"]
build-backend = "mesonpy"
[tool.ruff]
line-length = 160
target-version = 'py310'
include = [
'../../../../../../python/online/fxreader/pr34/commands_typed/archlinux/**/*.py',
]
exclude = [
'.venv',
'*/tests/res/*',
'*/tests/experiments/*',
]
[tool.ruff.format]
quote-style = 'single'
indent-style = 'tab'
skip-magic-trailing-comma = false
[tool.ruff.lint]
ignore = [
'E402', 'E722', 'E741', 'W191', 'E101', 'E501', 'I001', 'F401', 'E714',
'E713', 'E731', 'E712', 'E703', 'F841', 'F403', 'W292',
]
select = ['E', 'F', 'I', 'W', 'INT']
[tool.ruff.lint.isort]
detect-same-package = true
relative-imports-order = "closest-to-furthest"
split-on-trailing-comma = true
section-order = [
"future",
"standard-library", "third-party", "first-party", "local-folder"
]
force-wrap-aliases = true
[tool.pyrefly]
project-includes = [
'../../../../../../python/online/fxreader/pr34/commands_typed/archlinux/**/*.py',
]
project-excludes = [
'.venv',
'*/tests/res',
'*/tests/experiments',
]
search-path = [
'../../../../../../python',
'../../../../../../mypy-stubs/types-debugpy',
'../../../../../../mypy-stubs/types-solv',
'../../../../../../mypy-stubs/marisa-trie-types',
]
python-version = '3.13'

@ -0,0 +1,145 @@
# This file was autogenerated by uv via the following command:
# uv --cache-dir /app/meson/online/fxreader/pr34/commands_typed/archlinux/.uv-cache pip compile --python-version 3.14 --generate-hashes -c /app/meson/online/fxreader/pr34/commands_typed/archlinux/requirements.txt -o /app/meson/online/fxreader/pr34/commands_typed/archlinux/requirementsa_w7bg1p.txt /tmp/requirements7q27fon2.in
annotated-types==0.7.0 \
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53
# via
# -c requirements.txt
# pydantic
build==1.4.0 \
--hash=sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
librt==0.8.1 \
--hash=sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3
# via
# -c requirements.txt
# mypy
meson==1.10.2 \
--hash=sha256:5f84ef186e6e788d9154db63620fc61b3ece69f643b94b43c8b9203c43d89b36
# via
# -c requirements.txt
# meson-python
meson-python==0.19.0 \
--hash=sha256:67b5906c37404396d23c195e12c8825506074460d4a2e7083266b845d14f0298
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
mypy==1.19.1 \
--hash=sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
mypy-extensions==1.1.0 \
--hash=sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505
# via
# -c requirements.txt
# mypy
nodeenv==1.10.0 \
--hash=sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827
# via
# -c requirements.txt
# pyright
packaging==26.0 \
--hash=sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529
# via
# -c requirements.txt
# build
# meson-python
# pyproject-metadata
pathspec==1.0.4 \
--hash=sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723
# via
# -c requirements.txt
# mypy
pip==23.3.2 \
--hash=sha256:5052d7889c1f9d05224cd41741acb7c5d6fa735ab34e339624a614eaaa7e7d76
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
pybind11==3.0.2 \
--hash=sha256:f8a6500548919cc33bcd220d5f984688326f574fa97f1107f2f4fdb4c6fb019f
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
pydantic==2.12.5 \
--hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
# pydantic-settings
pydantic-core==2.41.5 \
--hash=sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375
# via
# -c requirements.txt
# pydantic
pydantic-settings==2.13.1 \
--hash=sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
pyproject-hooks==1.2.0 \
--hash=sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913
# via
# -c requirements.txt
# build
pyproject-metadata==0.11.0 \
--hash=sha256:85bbecca8694e2c00f63b492c96921d6c228454057c88e7c352b2077fcaa4096
# via
# -c requirements.txt
# meson-python
pyrefly==0.56.0 \
--hash=sha256:ec6ab3f9e2c03bae8dfa520f52778f47b6762020929a664177d36aa3b941db22
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
pyright==1.1.408 \
--hash=sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
python-dotenv==1.2.2 \
--hash=sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a
# via
# -c requirements.txt
# pydantic-settings
ruff==0.15.6 \
--hash=sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
setuptools==82.0.1 \
--hash=sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
solv==0.7.35 \
--hash=sha256:d5606cd8e4bb1b3f30bbff592632fbc47c0e4f79d42869eff1af78dca8e6fd47
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
tomli==2.4.0 \
--hash=sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in
typing-extensions==4.15.0 \
--hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548
# via
# -c requirements.txt
# mypy
# pydantic
# pydantic-core
# pyright
# typing-inspection
typing-inspection==0.4.2 \
--hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7
# via
# -c requirements.txt
# pydantic
# pydantic-settings
uv==0.10.10 \
--hash=sha256:e42e9e4a196ef75d1089715574eb1fe9bb62d390da05c6c8b36650a4de23d59f
# via
# -c requirements.txt
# -r /tmp/requirements7q27fon2.in

@ -10,19 +10,23 @@ import os
import logging
import typing
from typing import (
Optional,
Any,
)
from typing_extensions import (
Self,
BinaryIO,
)
if typing.TYPE_CHECKING:
from typing_extensions import (
Self,
BinaryIO,
)
logger = logging.getLogger(__name__)
def toml_load(f: BinaryIO) -> Any:
def toml_load(f: 'BinaryIO') -> Any:
try:
import tomllib
@ -138,7 +142,7 @@ class BootstrapSettings:
def get(
cls,
base_dir: Optional[pathlib.Path] = None,
) -> Self:
) -> 'Self':
if base_dir is None:
base_dir = pathlib.Path.cwd()

@ -1,6 +1,6 @@
project(
run_command(
'tomlq', '-r', '.project.name', 'pyproject.toml',
'.venv/bin/toml', 'get', '--toml-path', 'pyproject.toml', 'project.name',
check: true
).stdout().strip('\n'),
# 'online.fxreader.uv',

@ -0,0 +1,263 @@
[project]
description = 'set of tools for software development'
requires-python = '>= 3.10'
maintainers = [
{ name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' },
]
classifiers = [
'Programming Language :: Python',
]
name = 'online.fxreader.pr34'
# version = '0.1.5.16+27.7'
dynamic = [
'version',
]
dependencies = [
#"-r requirements.txt",
'mypy',
'marisa-trie',
'pydantic',
'pydantic-settings',
'tomlkit',
'pip==23.3.2',
]
[project.optional-dependencies]
crypto = [
'cryptography',
]
early = [
'numpy',
'cryptography',
'yq',
'toml-cli',
'ninja',
'patchelf',
# 'tomlkit',
]
archlinux = [
'solv==0.7.35',
]
lint = [
'tomli',
# 'tomllib',
'mypy',
'pyright',
'pyrefly',
'ruff',
# 'tomlkit',
]
[project.scripts]
online-fxreader-pr34-commands = 'online.fxreader.pr34.commands:commands_cli'
[tool.online-fxreader-pr34]
early_features = ['default', 'early', 'lint']
modules = [
{ name = 'online.fxreader.pr34', tool = { 'online-fxreader-pr34' = { early_features = ['default', 'early', 'lint'] } } },
]
[build-system]
requires = ["meson-python", "pybind11"]
build-backend = "mesonpy"
[tool.ruff]
line-length = 160
target-version = 'py310'
# builtins = ['_', 'I', 'P']
include = [
# 'follow_the_leader/**/*.py',
#'*.py',
# '*.recipe',
'*.py',
'online/**/*.py',
'online/**/*.pyi',
]
exclude = [
'.venv',
]
[tool.ruff.format]
quote-style = 'single'
indent-style = 'tab'
skip-magic-trailing-comma = false
[tool.ruff.lint]
ignore = [
'E402', 'E722', 'E741', 'W191', 'E101', 'E501', 'I001', 'F401', 'E714',
'E713',
# remove lambdas later on
'E731',
# fix this too
'E712',
'E703',
# remove unused variables, or fix a bug
'F841',
# fix * imports
'F403',
# don't care about trailing new lines
'W292',
]
select = ['E', 'F', 'I', 'W', 'INT']
[tool.ruff.lint.isort]
detect-same-package = true
# extra-standard-library = ["aes", "elementmaker", "encodings"]
# known-first-party = ["calibre_extensions", "calibre_plugins", "polyglot"]
# known-third-party = ["odf", "qt", "templite", "tinycss", "css_selectors"]
relative-imports-order = "closest-to-furthest"
split-on-trailing-comma = true
section-order = [
# '__python__',
"future",
"standard-library", "third-party", "first-party", "local-folder"
]
force-wrap-aliases = true
# [tool.ruff.lint.isort.sections]
# '__python__' = ['__python__']
[tool.pylsp-mypy]
enabled = false
[tool.pyright]
include = [
#'../../../../../follow_the_leader/views2/payments.py',
#'../../../../../follow_the_leader/logic/payments.py',
#'../../../../../follow_the_leader/logic/paypal.py',
'online/fxreader/pr34/commands_typed/**/*.py',
]
# stubPath = '../mypy-stubs'
extraPaths = [
'.',
'../mypy-stubs',
'../mypy-stubs/types-debugpy',
'../mypy-stubs/types-solv',
'../mypy-stubs/marisa-trie-types',
# '../../../../../',
]
#strict = ["src"]
analyzeUnannotatedFunctions = true
disableBytesTypePromotions = true
strictParameterNoneValue = true
enableTypeIgnoreComments = true
enableReachabilityAnalysis = true
strictListInference = true
strictDictionaryInference = true
strictSetInference = true
deprecateTypingAliases = false
enableExperimentalFeatures = false
reportMissingTypeStubs ="error"
reportMissingModuleSource = "warning"
reportInvalidTypeForm = "error"
reportMissingImports = "error"
reportUndefinedVariable = "error"
reportAssertAlwaysTrue = "error"
reportInvalidStringEscapeSequence = "error"
reportInvalidTypeVarUse = "error"
reportSelfClsParameterName = "error"
reportUnsupportedDunderAll = "error"
reportUnusedExpression = "error"
reportWildcardImportFromLibrary = "error"
reportAbstractUsage = "error"
reportArgumentType = "error"
reportAssertTypeFailure = "error"
reportAssignmentType = "error"
reportAttributeAccessIssue = "error"
reportCallIssue = "error"
reportGeneralTypeIssues = "error"
reportInconsistentOverload = "error"
reportIndexIssue = "error"
reportInvalidTypeArguments = "error"
reportNoOverloadImplementation = "error"
reportOperatorIssue = "error"
reportOptionalSubscript = "error"
reportOptionalMemberAccess = "error"
reportOptionalCall = "error"
reportOptionalIterable = "error"
reportOptionalContextManager = "error"
reportOptionalOperand = "error"
reportRedeclaration = "error"
reportReturnType = "error"
reportTypedDictNotRequiredAccess = "error"
reportPrivateImportUsage = "error"
reportUnboundVariable = "error"
reportUnhashable = "error"
reportUnusedCoroutine = "error"
reportUnusedExcept = "error"
reportFunctionMemberAccess = "error"
reportIncompatibleMethodOverride = "error"
reportIncompatibleVariableOverride = "error"
reportOverlappingOverload = "error"
reportPossiblyUnboundVariable = "error"
reportConstantRedefinition = "error"
#reportDeprecated = "error"
reportDeprecated = "warning"
reportDuplicateImport = "error"
reportIncompleteStub = "error"
reportInconsistentConstructor = "error"
reportInvalidStubStatement = "error"
reportMatchNotExhaustive = "error"
reportMissingParameterType = "error"
reportMissingTypeArgument = "error"
reportPrivateUsage = "error"
reportTypeCommentUsage = "error"
reportUnknownArgumentType = "error"
reportUnknownLambdaType = "error"
reportUnknownMemberType = "error"
reportUnknownParameterType = "error"
reportUnknownVariableType = "error"
#reportUnknownVariableType = "warning"
reportUnnecessaryCast = "error"
reportUnnecessaryComparison = "error"
reportUnnecessaryContains = "error"
#reportUnnecessaryIsInstance = "error"
reportUnnecessaryIsInstance = "warning"
reportUnusedClass = "error"
#reportUnusedImport = "error"
reportUnusedImport = "none"
# reportUnusedFunction = "error"
reportUnusedFunction = "warning"
#reportUnusedVariable = "error"
reportUnusedVariable = "warning"
reportUntypedBaseClass = "error"
reportUntypedClassDecorator = "error"
reportUntypedFunctionDecorator = "error"
reportUntypedNamedTuple = "error"
reportCallInDefaultInitializer = "none"
reportImplicitOverride = "none"
reportImplicitStringConcatenation = "none"
reportImportCycles = "none"
reportMissingSuperCall = "none"
reportPropertyTypeMismatch = "none"
reportShadowedImports = "none"
reportUninitializedInstanceVariable = "none"
reportUnnecessaryTypeIgnoreComment = "none"
reportUnusedCallResult = "none"
[tool.pyrefly]
project-includes = [
'online/fxreader/pr34/commands_typed/**/*.py',
]
project-excludes = [
'.venv',
'online/fxreader/pr34/commands_typed/archlinux/tests/res',
]
search-path = [
'.',
'../mypy-stubs/types-debugpy',
'../mypy-stubs/types-solv',
'../mypy-stubs/marisa-trie-types',
]
python-version = '3.13'

236
python/pyproject.toml.bak Normal file

@ -0,0 +1,236 @@
[project]
description = 'set of tools for software development'
requires-python = '>= 3.10'
maintainers = [
{ name = 'Siarhei Siniak', email = 'siarheisiniak@gmail.com' },
]
classifiers = [
'Programming Language :: Python',
]
name = 'online.fxreader.pr34'
# version = '0.1.5.16+27.7'
dynamic = [
'version',
]
dependencies = [
#"-r requirements.txt",
'mypy',
'marisa-trie',
'pydantic',
'pydantic-settings',
'tomlkit',
]
[project.optional-dependencies]
crypto = [
'cryptography',
]
early = [
'numpy',
'cryptography',
# 'tomlkit',
]
archlinux = [
'solv',
]
lint = [
'tomli',
# 'tomllib',
'mypy',
'pyright',
'ruff',
# 'tomlkit',
]
[tool.online-fxreader-pr34]
early_features = ['default', 'early', 'lint', 'archlinux',]
[build-system]
requires = ["meson-python", "pybind11"]
build-backend = "mesonpy"
[project.scripts]
online-fxreader-pr34-commands = 'online.fxreader.pr34.commands:commands_cli'
[tool.ruff]
line-length = 160
target-version = 'py310'
# builtins = ['_', 'I', 'P']
include = [
# 'follow_the_leader/**/*.py',
#'*.py',
# '*.recipe',
'*.py',
'online/**/*.py',
'online/**/*.pyi',
]
exclude = [
'.venv',
]
[tool.ruff.format]
quote-style = 'single'
indent-style = 'tab'
skip-magic-trailing-comma = false
[tool.ruff.lint]
ignore = [
'E402', 'E722', 'E741', 'W191', 'E101', 'E501', 'I001', 'F401', 'E714',
'E713',
# remove lambdas later on
'E731',
# fix this too
'E712',
'E703',
# remove unused variables, or fix a bug
'F841',
# fix * imports
'F403',
# don't care about trailing new lines
'W292',
]
select = ['E', 'F', 'I', 'W', 'INT']
[tool.ruff.lint.isort]
detect-same-package = true
# extra-standard-library = ["aes", "elementmaker", "encodings"]
# known-first-party = ["calibre_extensions", "calibre_plugins", "polyglot"]
# known-third-party = ["odf", "qt", "templite", "tinycss", "css_selectors"]
relative-imports-order = "closest-to-furthest"
split-on-trailing-comma = true
section-order = [
# '__python__',
"future",
"standard-library", "third-party", "first-party", "local-folder"
]
force-wrap-aliases = true
# [tool.ruff.lint.isort.sections]
# '__python__' = ['__python__']
[tool.pylsp-mypy]
enabled = false
[tool.pyright]
include = [
#'../../../../../follow_the_leader/views2/payments.py',
#'../../../../../follow_the_leader/logic/payments.py',
#'../../../../../follow_the_leader/logic/paypal.py',
'online/fxreader/pr34/commands_typed/**/*.py',
]
# stubPath = '../mypy-stubs'
extraPaths = [
'.',
'../mypy-stubs',
'../mypy-stubs/types-debugpy',
'../mypy-stubs/marisa-trie-types',
# '../../../../../',
]
#strict = ["src"]
analyzeUnannotatedFunctions = true
disableBytesTypePromotions = true
strictParameterNoneValue = true
enableTypeIgnoreComments = true
enableReachabilityAnalysis = true
strictListInference = true
strictDictionaryInference = true
strictSetInference = true
deprecateTypingAliases = false
enableExperimentalFeatures = false
reportMissingTypeStubs ="error"
reportMissingModuleSource = "warning"
reportInvalidTypeForm = "error"
reportMissingImports = "error"
reportUndefinedVariable = "error"
reportAssertAlwaysTrue = "error"
reportInvalidStringEscapeSequence = "error"
reportInvalidTypeVarUse = "error"
reportSelfClsParameterName = "error"
reportUnsupportedDunderAll = "error"
reportUnusedExpression = "error"
reportWildcardImportFromLibrary = "error"
reportAbstractUsage = "error"
reportArgumentType = "error"
reportAssertTypeFailure = "error"
reportAssignmentType = "error"
reportAttributeAccessIssue = "error"
reportCallIssue = "error"
reportGeneralTypeIssues = "error"
reportInconsistentOverload = "error"
reportIndexIssue = "error"
reportInvalidTypeArguments = "error"
reportNoOverloadImplementation = "error"
reportOperatorIssue = "error"
reportOptionalSubscript = "error"
reportOptionalMemberAccess = "error"
reportOptionalCall = "error"
reportOptionalIterable = "error"
reportOptionalContextManager = "error"
reportOptionalOperand = "error"
reportRedeclaration = "error"
reportReturnType = "error"
reportTypedDictNotRequiredAccess = "error"
reportPrivateImportUsage = "error"
reportUnboundVariable = "error"
reportUnhashable = "error"
reportUnusedCoroutine = "error"
reportUnusedExcept = "error"
reportFunctionMemberAccess = "error"
reportIncompatibleMethodOverride = "error"
reportIncompatibleVariableOverride = "error"
reportOverlappingOverload = "error"
reportPossiblyUnboundVariable = "error"
reportConstantRedefinition = "error"
#reportDeprecated = "error"
reportDeprecated = "warning"
reportDuplicateImport = "error"
reportIncompleteStub = "error"
reportInconsistentConstructor = "error"
reportInvalidStubStatement = "error"
reportMatchNotExhaustive = "error"
reportMissingParameterType = "error"
reportMissingTypeArgument = "error"
reportPrivateUsage = "error"
reportTypeCommentUsage = "error"
reportUnknownArgumentType = "error"
reportUnknownLambdaType = "error"
reportUnknownMemberType = "error"
reportUnknownParameterType = "error"
reportUnknownVariableType = "error"
#reportUnknownVariableType = "warning"
reportUnnecessaryCast = "error"
reportUnnecessaryComparison = "error"
reportUnnecessaryContains = "error"
#reportUnnecessaryIsInstance = "error"
reportUnnecessaryIsInstance = "warning"
reportUnusedClass = "error"
#reportUnusedImport = "error"
reportUnusedImport = "none"
# reportUnusedFunction = "error"
reportUnusedFunction = "warning"
#reportUnusedVariable = "error"
reportUnusedVariable = "warning"
reportUntypedBaseClass = "error"
reportUntypedClassDecorator = "error"
reportUntypedFunctionDecorator = "error"
reportUntypedNamedTuple = "error"
reportCallInDefaultInitializer = "none"
reportImplicitOverride = "none"
reportImplicitStringConcatenation = "none"
reportImportCycles = "none"
reportMissingSuperCall = "none"
reportPropertyTypeMismatch = "none"
reportShadowedImports = "none"
reportUninitializedInstanceVariable = "none"
reportUnnecessaryTypeIgnoreComment = "none"
reportUnusedCallResult = "none"

File diff suppressed because it is too large Load Diff