Compare commits
236 Commits
test-2024-
...
master
Author | SHA1 | Date | |
---|---|---|---|
8510d49015 | |||
277f03594c | |||
7f5c9f4fdb | |||
e5af828867 | |||
1faa511d15 | |||
5190d23f09 | |||
908f0cbd20 | |||
548a8bcb7a | |||
56c2eb1671 | |||
2fef63a253 | |||
dc39583cc6 | |||
a912e1b6bf | |||
fdcd67aae6 | |||
799280f074 | |||
202bf85f55 | |||
7ad8e4a781 | |||
8d43813c37 | |||
ae4fcb16f2 | |||
7fa2a8a83c | |||
fa732867c3 | |||
61584fedac | |||
42ce6ffbff | |||
385e82bab8 | |||
ccb0fb09c9 | |||
6d184edec8 | |||
6374849014 | |||
3d023047b4 | |||
43c711cb2c | |||
dff3782834 | |||
b399c5546c | |||
c1e598b3ab | |||
7442368b03 | |||
4cf720ee17 | |||
93f023dda3 | |||
e06a1f4007 | |||
9ac87fb3df | |||
c8b6d96b01 | |||
cc0acd6f13 | |||
acf1850d70 | |||
9e117048dc | |||
723c5b6677 | |||
aaf8b12549 | |||
06e79d0679 | |||
ff786e3ce6 | |||
add9d858d8 | |||
731c507b95 | |||
62063a1448 | |||
1fb4e4efc5 | |||
b12395621d | |||
905241a068 | |||
01aab0517a | |||
6ddfc7d2d7 | |||
3245d6d7e5 | |||
a529db106a | |||
57f74df865 | |||
528d9b1ce5 | |||
ebbd1a2b5b | |||
62cfbf36cb | |||
d643e8f97b | |||
9b5fff93c0 | |||
ff22d9311b | |||
759ce361e3 | |||
136b5709b0 | |||
584b4b652f | |||
7d1d887692 | |||
212c8c8086 | |||
6599115a68 | |||
22f5f0fba3 | |||
c9382162de | |||
0cee9beaea | |||
93437359a8 | |||
9c8b554acc | |||
adc3fd0205 | |||
48ef23aa88 | |||
1626974759 | |||
ea63c67280 | |||
34c65f7ba5 | |||
dedff2aee5 | |||
cb69309307 | |||
0a50c26d1d | |||
a6cdf03523 | |||
eb457950d3 | |||
4afe4048d9 | |||
74cc54ae85 | |||
fd0dbb0c4a | |||
d0b696206c | |||
92966ca86d | |||
d38022b5a6 | |||
796fb51ad9 | |||
b32b058083 | |||
d3d5e3bcfb | |||
64706907ca | |||
8656b3b985 | |||
1cfc6c52f3 | |||
62f4d9f948 | |||
5c69e922a7 | |||
27af208437 | |||
fadd95cef3 | |||
bd9878335a | |||
cadec5376a | |||
0ef9268f48 | |||
60929176ef | |||
0d9e225a76 | |||
3321e4c165 | |||
dffcb96358 | |||
fc18a91f63 | |||
34c3ed74eb | |||
2522ed4ac4 | |||
6d1c845d74 | |||
83b1177f85 | |||
9620bb2b25 | |||
f9f18df737 | |||
14b499637b | |||
40350d128a | |||
1eecc616f5 | |||
c4944ede7f | |||
43985cd5b0 | |||
ed347e1d21 | |||
32579007e4 | |||
82a7c62ca8 | |||
de05d2cd05 | |||
723ad56ca9 | |||
eb310ceef7 | |||
81bf066e82 | |||
560e748867 | |||
48ce6c87a6 | |||
58a1e5f1be | |||
62b809bd3c | |||
6b7bef9c07 | |||
16a075b19c | |||
92fe90b042 | |||
a7f5be4c8e | |||
392faf5526 | |||
fd6378a643 | |||
32ee4316f1 | |||
1140a46799 | |||
1ad4fab099 | |||
edea7a4fab | |||
e5cb3bbb53 | |||
f4f214d75b | |||
27be8c3104 | |||
c95503fa76 | |||
ce77de626c | |||
f2b8862683 | |||
8dbedd9c28 | |||
a9393dbff2 | |||
4ed4dbcbdb | |||
d0177e7255 | |||
10be7dc897 | |||
d7d4260053 | |||
d4ef7d5ba4 | |||
b5343c1375 | |||
a9e8d8a505 | |||
de292518a5 | |||
3e9708d442 | |||
9ad2030f36 | |||
aa8d84614c | |||
13f0899b90 | |||
2385012e35 | |||
2b3305ab56 | |||
a67d765569 | |||
3163641646 | |||
d06a7655d7 | |||
ac7515f49d | |||
d3718cf271 | |||
45d0be60f9 | |||
03164b8e34 | |||
bd89fff408 | |||
7444095e03 | |||
1018ad3266 | |||
d6f5817c78 | |||
1e55e7baac | |||
b9297f6863 | |||
ae897835b2 | |||
633e1c6424 | |||
d782fbfbf2 | |||
aead4c165d | |||
d50f17eb76 | |||
f656d94fa3 | |||
b6192902ff | |||
d5c616806a | |||
184340db4f | |||
58071843be | |||
b93ee0b7e4 | |||
71c793cbae | |||
5c271c518f | |||
4909d474b1 | |||
430b517ce7 | |||
11a3c59961 | |||
c16f389324 | |||
4758032a35 | |||
c1b7bb71b3 | |||
8ec95247f9 | |||
b3156b5093 | |||
f93b604c51 | |||
437e073411 | |||
99bc0d5ab1 | |||
42bfe00ee5 | |||
45626d85d4 | |||
cf849b1009 | |||
c867c7d828 | |||
0d7c0d9d09 | |||
9725a1493c | |||
15611baed2 | |||
010b426b03 | |||
826aec8f3f | |||
4b93c33d66 | |||
495e901a23 | |||
29053ceb5d | |||
1411dc60e5 | |||
12c55aa0e8 | |||
508c55bd68 | |||
c57b449f69 | |||
45945c0894 | |||
fb8b741ca0 | |||
e6038b7060 | |||
e1aa8edd42 | |||
39fa0f23ea | |||
852c15635f | |||
b1339cad6b | |||
01a09989ef | |||
f2926b6f2b | |||
e60308a28d | |||
b8b2443c2a | |||
3f49dd337f | |||
331e11d728 | |||
3b2dd8045e | |||
bbcf3fb16d | |||
25c9f86ef0 | |||
16cfa6c481 | |||
04f8dca7e6 | |||
a7993b8a82 | |||
c00d06368f | |||
b4836b6514 | |||
11cd349a7e | |||
34ae744866 |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
releases/tar/** filter=lfs diff=lfs merge=lfs -text
|
||||
releases/whl/** filter=lfs diff=lfs merge=lfs -text
|
12
.gitignore
vendored
12
.gitignore
vendored
@ -3,3 +3,15 @@ __pycache__
|
||||
d2/book1/books
|
||||
.DS_Store
|
||||
.vim
|
||||
*.so
|
||||
.mypy_cache
|
||||
.ruff_cache
|
||||
.tmuxp
|
||||
*.egg-info
|
||||
*.whl
|
||||
*.tar.gz
|
||||
.vscode/*
|
||||
!.vscode/launch.json
|
||||
python/build
|
||||
.*.kate-swp
|
||||
!releases/whl/*.whl
|
||||
|
6
.gitmodules
vendored
6
.gitmodules
vendored
@ -16,3 +16,9 @@
|
||||
[submodule "deps/melianmiko-mb7-apps"]
|
||||
path = deps/melianmiko-mb7-apps
|
||||
url = https://notabug.org/melianmiko/mb7_apps
|
||||
[submodule "deps/com.github.aiortc.aiortc"]
|
||||
path = deps/com.github.aiortc.aiortc
|
||||
url = https://gitea.fxreader.online/nartes/com.github.aiortc.aiortc
|
||||
[submodule "deps/online.fxreader.nartes.books"]
|
||||
path = deps/online.fxreader.nartes.books
|
||||
url = https://gitea.fxreader.online/nartes/books.git
|
||||
|
18
.mypy.ini
Normal file
18
.mypy.ini
Normal file
@ -0,0 +1,18 @@
|
||||
[mypy]
|
||||
mypy_path =
|
||||
mypy-stubs,
|
||||
deps/com.github.aiortc.aiortc/src,
|
||||
mypy-stubs/marisa-trie-types,
|
||||
mypy-stubs/types-debugpy,
|
||||
python
|
||||
|
||||
exclude =
|
||||
python/tmp,
|
||||
python/build
|
||||
|
||||
plugins =
|
||||
numpy.typing.mypy_plugin,
|
||||
pydantic.mypy
|
||||
|
||||
explicit_package_bases = true
|
||||
namespace_packages = true
|
49
.vscode/launch.json
vendored
Normal file
49
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
/*
|
||||
{
|
||||
"name": "Python Debugger: Module",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "online_fxreader.vpn.vpn",
|
||||
},
|
||||
{
|
||||
"name": "Python Debugger: Current File with Arguments",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal",
|
||||
"args": [
|
||||
"${command:pickArgs}"
|
||||
]
|
||||
},
|
||||
*/
|
||||
{
|
||||
"name": "Python Debugger: Remote Attach",
|
||||
"type": "debugpy",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "127.0.0.1",
|
||||
"port": 4444
|
||||
},
|
||||
"pathMappings": [
|
||||
/*
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/deps/com.github.aiortc.aiortc/src/",
|
||||
//"remoteRoot": "."
|
||||
"remoteRoot": "~/.local/bin/env3/lib/python3.12/site-packages/",
|
||||
},
|
||||
{
|
||||
"localRoot": "${workspaceFolder}/deps/com.github.aiortc.aiortc/",
|
||||
//"remoteRoot": "."
|
||||
"remoteRoot": "~/.local/bin/env3/lib/python3.12/site-packages/",
|
||||
}
|
||||
*/
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
141
Makefile
Normal file
141
Makefile
Normal file
@ -0,0 +1,141 @@
|
||||
.PHONY: python_clean_online_fxreader_vpn
|
||||
|
||||
host_deps:
|
||||
./m.py host_deps
|
||||
|
||||
python_lint:
|
||||
./m.py mypy -- -f vscode 2>&1 | less
|
||||
|
||||
python_tests:
|
||||
./m.py tests
|
||||
|
||||
#python_clean_online_fxreader_vpn:
|
||||
# rm -fr \
|
||||
# deps/com.github.aiortc.aiortc/src/online_fxreader/vpn/dist;
|
||||
|
||||
PYTHON_PROJECTS ?= \
|
||||
deps/com.github.aiortc.aiortc/ \
|
||||
deps/com.github.aiortc.aiortc/src/online_fxreader/vpn/ \
|
||||
python
|
||||
|
||||
INSTALL_ROOT ?= ~/.local/bin
|
||||
|
||||
#python_clean: python_clean_online_fxreader_vpn
|
||||
python_clean_env:
|
||||
rm -fr \
|
||||
$(INSTALL_ROOT)/env3;
|
||||
|
||||
python_put_env:
|
||||
[[ -d $(INSTALL_ROOT)/env3 ]] || (\
|
||||
uv venv --system-site-packages --seed $(INSTALL_ROOT)/env3 && \
|
||||
$(INSTALL_ROOT)/env3/bin/python3 -m pip install --force-reinstall uv \
|
||||
);
|
||||
|
||||
python_clean_dist:
|
||||
for o in $(PYTHON_PROJECTS); do \
|
||||
[[ -d $$o/dist ]] || continue; \
|
||||
echo $$o/dist; \
|
||||
rm -fr $$o/dist; \
|
||||
done
|
||||
|
||||
python_clean: python_clean_dist python_clean_env
|
||||
|
||||
UV_ARGS ?= --offline
|
||||
python_put_dist:
|
||||
for f in \
|
||||
$(PYTHON_PROJECTS); do \
|
||||
[[ -d $$f/dist ]] && continue; \
|
||||
echo $$f; \
|
||||
python3 -m build -n $$f; \
|
||||
$(INSTALL_ROOT)/env3/bin/python3 -m uv pip install $(UV_ARGS) $$f/dist/*.whl; \
|
||||
done
|
||||
ln -sf $(INSTALL_ROOT)/env3/bin/online-fxreader-pr34-commands $(INSTALL_ROOT)/commands
|
||||
|
||||
PYTHON_PROJECTS_NAMES ?= online.fxreader.pr34
|
||||
python_whl:
|
||||
for f in $(PYTHON_PROJECTS_NAMES); do \
|
||||
./m.py deploy:wheel -o releases/whl -p $$f; \
|
||||
done
|
||||
|
||||
python_put: python_put_dist python_put_env
|
||||
|
||||
dotfiles_put:
|
||||
mkdir -p $(INSTALL_ROOT)
|
||||
cp dotfiles/.local/bin/gnome-shortcuts-macbook-air $(INSTALL_ROOT)/
|
||||
mkdir -p ~/.sway
|
||||
cp dotfiles/.sway/config ~/.sway/config
|
||||
cp dotfiles/.zshenv ~/.zshenv
|
||||
cp dotfiles/.zshrc ~/.zshrc
|
||||
cp dotfiles/.vimrc ~/.vimrc
|
||||
cp dotfiles/.tmux.conf ~/.tmux.conf
|
||||
cp dotfiles/.py3.vimrc ~/.py3.vimrc
|
||||
cp dotfiles/.py3.vimrc ~/.py3.vimrc
|
||||
cp dotfiles/.gitconfig ~/.gitconfig
|
||||
cp -rp \
|
||||
dotfiles/.ipython/profile_default/ipython_config.py \
|
||||
~/.ipython/profile_default/ipython_config.py
|
||||
D1=Code\ -\ OSS; \
|
||||
for p in \
|
||||
"dotfiles/.config/$$D1/User/keybindings.json" \
|
||||
"dotfiles/.config/$$D1/User/settings.json"; do \
|
||||
commands install -f -p "dotfiles/.config/$$D1" -s "$$p" -t ~/.config/"$$D1"; \
|
||||
done
|
||||
#commands install -f -p dotfiles -s dotfiles/ -t ~/.config/
|
||||
|
||||
PLATFORM ?= macbook_air_2012
|
||||
PLATFORM_TMP ?= tmp/platform_dotfiles/$(PLATFORM)
|
||||
|
||||
dotfiles_put_platform:
|
||||
@echo to be installed
|
||||
find platform_dotfiles/$(PLATFORM);
|
||||
echo remove $(PLATFORM_TMP)'?'; read; sudo rm -fr $(PLATFORM_TMP)
|
||||
sudo mkdir -p $(PLATFORM_TMP)
|
||||
sudo cp -rp -T platform_dotfiles/$(PLATFORM)/ $(PLATFORM_TMP)
|
||||
sudo chown -R root:root $(PLATFORM_TMP)
|
||||
sudo cp -rp -T $(PLATFORM_TMP) /
|
||||
sudo udevadm control --reload
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
dotfiles_fetch:
|
||||
commands install -f -p ~ -s ~/.config/katerc -t dotfiles
|
||||
commands install -f -p ~ -s ~/.mime.types -t dotfiles
|
||||
commands install -f -p ~ -s ~/.config/rofi/config.rasi -t dotfiles
|
||||
commands install -f -p ~ -s ~/.config/Code\ -\ OSS/User/keybindings.json -t dotfiles
|
||||
commands install -f -p ~ -s ~/.config/Code\ -\ OSS/User/settings.json -t dotfiles
|
||||
|
||||
DOTFILES_VERSION ?= 0.1
|
||||
|
||||
dotfiles_deploy:
|
||||
mkdir -p releases/tar
|
||||
tar -cvf - \
|
||||
dotfiles \
|
||||
| xz --compress -9 --stdout > \
|
||||
releases/tar/dotfiles-$(DOTFILES_VERSION).tar.xz
|
||||
|
||||
systemd:
|
||||
/usr/bin/env python3 d1/systemd.py
|
||||
for d in tmp/d1; do \
|
||||
(\
|
||||
cd $$d; \
|
||||
for i in *.service *.timer; do \
|
||||
sudo ln -s -f $$PWD/$$i /etc/systemd/system/$$i; \
|
||||
done; \
|
||||
); \
|
||||
done
|
||||
sudo systemctl daemon-reload
|
||||
|
||||
venv:
|
||||
uv venv
|
||||
uv pip install -p .venv \
|
||||
-r requirements.txt
|
||||
|
||||
venv_compile:
|
||||
uv pip compile --generate-hashes \
|
||||
requirements.in > requirements.txt
|
||||
|
||||
MYPY_SOURCES ?= \
|
||||
d1/cpanel.py
|
||||
mypy:
|
||||
. .venv/bin/activate && \
|
||||
mypy --strict --follow-imports silent \
|
||||
$(MYPY_SOURCES)
|
19
d1/certbot.py
Normal file
19
d1/certbot.py
Normal file
@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import subprocess
|
||||
import time
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
while True:
|
||||
subprocess.check_call([
|
||||
'docker', 'compose', 'exec', 'ssl-app', 'certbot', 'renew',
|
||||
])
|
||||
subprocess.check_call([
|
||||
'docker', 'compose', 'exec', 'ssl-app', 'nginx', '-s', 'reload',
|
||||
])
|
||||
break
|
195
d1/cpanel.py
195
d1/cpanel.py
@ -1,4 +1,5 @@
|
||||
import subprocess
|
||||
import os
|
||||
import requests
|
||||
import sys
|
||||
import io
|
||||
@ -10,103 +11,131 @@ import logging
|
||||
import json
|
||||
import time
|
||||
|
||||
with io.open(
|
||||
'tmp/d1/cpanel.json', 'r'
|
||||
) as f:
|
||||
t3 = json.load(f)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
t2 = copy.deepcopy(t3)
|
||||
for k in t2:
|
||||
v = t2[k]
|
||||
v['task'] = lambda : subprocess.Popen(
|
||||
v['task_cmd'],
|
||||
stdin=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
def stop_task(task):
|
||||
task.terminate()
|
||||
try:
|
||||
task.wait(1)
|
||||
except:
|
||||
task.kill()
|
||||
class Launcher:
|
||||
def run(self):
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
t1 = dict()
|
||||
with io.open(
|
||||
'tmp/d1/cpanel.json', 'r'
|
||||
) as f:
|
||||
t3 = json.load(f)
|
||||
|
||||
shutdown = False
|
||||
t2 = copy.deepcopy(t3)
|
||||
|
||||
ssh_known_hosts : list[str] = []
|
||||
|
||||
while True:
|
||||
try:
|
||||
for k, v in t2.items():
|
||||
if not k in t1:
|
||||
logging.info(json.dumps(dict(
|
||||
task=k,
|
||||
status='starting',
|
||||
)))
|
||||
t1[k] = v['task']()
|
||||
logging.info(json.dumps(dict(
|
||||
task=k,
|
||||
status='started',
|
||||
)))
|
||||
continue
|
||||
if 'ssh_known_hosts' in v:
|
||||
ssh_known_hosts.append(v['ssh_known_hosts'])
|
||||
|
||||
o = t1[k]
|
||||
if len(ssh_known_hosts) > 0:
|
||||
subprocess.check_call(
|
||||
r'''
|
||||
mkdir -p ~/.ssh && \
|
||||
cat $SSH_KNOWN_HOSTS > ~/.ssh/known_hosts
|
||||
''', env=dict(list(os.environ.items())) | dict(
|
||||
SSH_KNOWN_HOSTS=' '.join(ssh_known_hosts),
|
||||
),
|
||||
shell=True
|
||||
)
|
||||
|
||||
not_alive = None
|
||||
for k in t2:
|
||||
v = t2[k]
|
||||
v['task'] = lambda : subprocess.Popen(
|
||||
v['task_cmd'],
|
||||
stdin=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
def stop_task(task: subprocess.Popen[bytes]) -> None:
|
||||
task.terminate()
|
||||
try:
|
||||
not_alive = not (
|
||||
requests.get(v['url'], timeout=0.5).status_code
|
||||
== 200
|
||||
)
|
||||
task.wait(1)
|
||||
except:
|
||||
logging.error(json.dumps(dict(
|
||||
error=traceback.format_exc(),
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)))
|
||||
not_alive = True
|
||||
task.kill()
|
||||
|
||||
if not_alive:
|
||||
logging.error(json.dumps(
|
||||
dict(
|
||||
args=o.args,
|
||||
k=k,
|
||||
#o=pprint.pformat(o.__dict__),
|
||||
status='not_alive',
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)
|
||||
))
|
||||
t1 = dict()
|
||||
|
||||
#stop_task(o)
|
||||
#del t1[k]
|
||||
continue
|
||||
shutdown = False
|
||||
|
||||
if not o.poll() is None:
|
||||
logging.error(json.dumps(
|
||||
dict(
|
||||
#o=pprint.pformat(o.__dict__),
|
||||
args=o.args,
|
||||
k=k,
|
||||
return_code=o.poll(),
|
||||
status='crashed',
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)
|
||||
))
|
||||
del t1[k]
|
||||
continue
|
||||
while True:
|
||||
try:
|
||||
for k, v in t2.items():
|
||||
if not k in t1:
|
||||
logging.info(json.dumps(dict(
|
||||
task=k,
|
||||
status='starting',
|
||||
)))
|
||||
t1[k] = v['task']()
|
||||
logging.info(json.dumps(dict(
|
||||
task=k,
|
||||
status='started',
|
||||
)))
|
||||
continue
|
||||
|
||||
if shutdown:
|
||||
break
|
||||
o = t1[k]
|
||||
|
||||
print('\r%s tasks %d' % (
|
||||
datetime.datetime.now().isoformat(),
|
||||
len(t1),
|
||||
), end='')
|
||||
sys.stdout.flush()
|
||||
except KeyboardInterrupt:
|
||||
print('\nshutting down')
|
||||
break
|
||||
finally:
|
||||
time.sleep(5 * 60)
|
||||
not_alive = None
|
||||
|
||||
for o in t1:
|
||||
stop_task(o)
|
||||
try:
|
||||
not_alive = not (
|
||||
requests.get(v['url'], timeout=0.5).status_code
|
||||
== 200
|
||||
)
|
||||
except:
|
||||
logging.error(json.dumps(dict(
|
||||
error=traceback.format_exc(),
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)))
|
||||
not_alive = True
|
||||
|
||||
if not_alive:
|
||||
logging.error(json.dumps(
|
||||
dict(
|
||||
args=o.args,
|
||||
k=k,
|
||||
#o=pprint.pformat(o.__dict__),
|
||||
status='not_alive',
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)
|
||||
))
|
||||
|
||||
#stop_task(o)
|
||||
#del t1[k]
|
||||
continue
|
||||
|
||||
if not o.poll() is None:
|
||||
logging.error(json.dumps(
|
||||
dict(
|
||||
#o=pprint.pformat(o.__dict__),
|
||||
args=o.args,
|
||||
k=k,
|
||||
return_code=o.poll(),
|
||||
status='crashed',
|
||||
time_iso=datetime.datetime.now().isoformat(),
|
||||
)
|
||||
))
|
||||
del t1[k]
|
||||
continue
|
||||
|
||||
if shutdown:
|
||||
break
|
||||
|
||||
print('\r%s tasks %d' % (
|
||||
datetime.datetime.now().isoformat(),
|
||||
len(t1),
|
||||
), end='')
|
||||
sys.stdout.flush()
|
||||
except KeyboardInterrupt:
|
||||
print('\nshutting down')
|
||||
break
|
||||
finally:
|
||||
time.sleep(5 * 60)
|
||||
|
||||
for o in t1:
|
||||
stop_task(o)
|
||||
|
||||
if __name__ == '__main__':
|
||||
Launcher().run()
|
||||
|
15
d1/f1.sh
15
d1/f1.sh
@ -1,15 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
mkdir -p ~/.local/bin
|
||||
cp dotfiles/.local/bin/commands ~/.local/bin/commands
|
||||
mkdir -p ~/.sway
|
||||
cp dotfiles/.sway/config ~/.sway/config
|
||||
cp dotfiles/.zshenv ~/.zshenv
|
||||
cp dotfiles/.zshrc ~/.zshrc
|
||||
cp dotfiles/.vimrc ~/.vimrc
|
||||
cp dotfiles/.py3.vimrc ~/.py3.vimrc
|
||||
cp dotfiles/.py3.vimrc ~/.py3.vimrc
|
||||
cp dotfiles/.gitconfig ~/.gitconfig
|
||||
cp -rp \
|
||||
dotfiles/.ipython/profile_default/ipython_config.py \
|
||||
~/.ipython/profile_default/ipython_config.py
|
11
d1/fxreader.online-certbot.service
Normal file
11
d1/fxreader.online-certbot.service
Normal file
@ -0,0 +1,11 @@
|
||||
[Unit]
|
||||
Description=fxreader.online-certbot
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/python3 d1/certbot.py
|
||||
WorkingDirectory={{PROJECT_ROOT}}
|
||||
#Restart=always
|
||||
|
||||
#[Install]
|
||||
#WantedBy=multi-user.target
|
9
d1/fxreader.online-certbot.timer
Normal file
9
d1/fxreader.online-certbot.timer
Normal file
@ -0,0 +1,9 @@
|
||||
[Unit]
|
||||
Description=fxreader.online-certbot-timer
|
||||
|
||||
[Timer]
|
||||
OnUnitActiveSec=1d
|
||||
OnBootSec=1m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
16
d1/fxreader.online-gateway.service
Normal file
16
d1/fxreader.online-gateway.service
Normal file
@ -0,0 +1,16 @@
|
||||
[Unit]
|
||||
Description=fxreader.online-service
|
||||
Requires=docker.service
|
||||
After=docker.service
|
||||
|
||||
[Service]
|
||||
#Type=oneshot
|
||||
ExecStart=/usr/bin/docker compose up --force-recreate --remove-orphans
|
||||
ExecStop=/usr/bin/docker compose down
|
||||
WorkingDirectory={{PROJECT_ROOT}}
|
||||
StandardOutput=null
|
||||
StandardError=null
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@ -43,9 +43,23 @@ def forward(
|
||||
else:
|
||||
server_name = 'default_server'
|
||||
|
||||
if not server_name in sections:
|
||||
if (
|
||||
not server_name in sections
|
||||
):
|
||||
sections[server_name] = []
|
||||
|
||||
if 'client_max_body_size' in entry:
|
||||
client_max_body_size = entry['client_max_body_size']
|
||||
else:
|
||||
client_max_body_size = '50M'
|
||||
|
||||
assert isinstance(client_max_body_size, str)
|
||||
|
||||
sections[server_name].append(
|
||||
r'''
|
||||
client_max_body_size %s;
|
||||
''' % client_max_body_size
|
||||
)
|
||||
|
||||
location_get = lambda location_body, location_path2, prefix=None,: (
|
||||
r'''
|
||||
@ -138,7 +152,7 @@ server {
|
||||
|
||||
server_name {server_name};
|
||||
listen 80 {default_server};
|
||||
client_max_body_size 50M;
|
||||
#client_max_body_size 50M;
|
||||
|
||||
{sections_config}
|
||||
}
|
||||
@ -199,6 +213,80 @@ def ssl(input_json, output_conf):
|
||||
|
||||
servers = []
|
||||
|
||||
if 'stream_server' in ssl_nginx:
|
||||
upstream_servers = []
|
||||
server_names = []
|
||||
|
||||
if 'by_server_name' in ssl_nginx['stream_server']:
|
||||
for k, v in ssl_nginx['stream_server']['by_server_name'].items():
|
||||
upstream_servers.append(
|
||||
'upstream %s { server %s; }' % (
|
||||
v['upstream_name'],
|
||||
v['url'],
|
||||
)
|
||||
)
|
||||
server_names.append(
|
||||
'"%s" %s;' % (
|
||||
v['server_name'], v['upstream_name'],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if 'ssh' in ssl_nginx['stream_server']:
|
||||
ssh_section = 'upstream ssh { server {ssh}; }'.replace(
|
||||
'{ssh}',
|
||||
ssl_nginx['stream_server']['ssh'],
|
||||
)
|
||||
else:
|
||||
ssh_section = ''
|
||||
|
||||
ssl_port = 444
|
||||
stream_server = r'''
|
||||
stream {
|
||||
upstream web {
|
||||
server 127.0.0.1:444;
|
||||
}
|
||||
|
||||
{upstream_servers}
|
||||
|
||||
{ssh_section}
|
||||
|
||||
map $ssl_preread_protocol $upstream_protocol {
|
||||
default ssh;
|
||||
"TLSv1.2" $upstream_server_name;
|
||||
"TLSv1.3" $upstream_server_name;
|
||||
}
|
||||
|
||||
map $ssl_preread_server_name $upstream_server_name {
|
||||
default web;
|
||||
{server_names}
|
||||
}
|
||||
|
||||
# SSH and SSL on the same port
|
||||
server {
|
||||
listen 443;
|
||||
|
||||
ssl_preread on;
|
||||
proxy_pass $upstream_protocol;
|
||||
}
|
||||
}
|
||||
'''.replace(
|
||||
'{upstream_servers}', ''.join([
|
||||
' ' + o + '\n'
|
||||
for o in upstream_servers
|
||||
]),
|
||||
).replace(
|
||||
'{ssh_section}', ssh_section,
|
||||
).replace(
|
||||
'{server_names}', ''.join([
|
||||
' ' + o + '\n'
|
||||
for o in server_names
|
||||
]),
|
||||
)
|
||||
else:
|
||||
stream_server = ''
|
||||
ssl_port = 443
|
||||
|
||||
if 'default_server' in ssl_nginx:
|
||||
server = ssl_nginx['default_server']
|
||||
|
||||
@ -211,7 +299,7 @@ server {
|
||||
set $t1 $http_x_forwarded_for;
|
||||
}
|
||||
|
||||
listen 443 ssl default_server;
|
||||
listen {ssl_port} ssl default_server;
|
||||
server_name _;
|
||||
|
||||
client_max_body_size {client_max_body_size};
|
||||
@ -227,6 +315,8 @@ server {
|
||||
'{client_max_body_size}', server['client_max_body_size'],
|
||||
).replace(
|
||||
'{domain_key}', server['domain_key'],
|
||||
).replace(
|
||||
'{ssl_port}', '%d' % ssl_port,
|
||||
)
|
||||
)
|
||||
|
||||
@ -264,7 +354,7 @@ server {
|
||||
set $t1 $http_x_forwarded_for;
|
||||
}
|
||||
|
||||
listen 443 ssl;
|
||||
listen {ssl_port} ssl;
|
||||
server_name {server_names};
|
||||
|
||||
client_max_body_size {client_max_body_size};
|
||||
@ -291,20 +381,27 @@ server {
|
||||
'{client_max_body_size}', server['client_max_body_size'],
|
||||
).replace(
|
||||
'{domain_key}', server['domain_key'],
|
||||
).replace(
|
||||
'{ssl_port}', '%d' % ssl_port,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
with io.open(
|
||||
output_conf,
|
||||
'w'
|
||||
) as f:
|
||||
f.write(
|
||||
r'''
|
||||
load_module "modules/ngx_stream_module.so";
|
||||
|
||||
events {
|
||||
multi_accept on;
|
||||
worker_connections 64;
|
||||
}
|
||||
|
||||
{stream_server}
|
||||
|
||||
http {
|
||||
log_format main
|
||||
'[$time_local][$remote_addr:$remote_port, $http_x_forwarded_for, $t1, $http_host]'
|
||||
@ -325,7 +422,9 @@ http {
|
||||
'' close;
|
||||
}
|
||||
}
|
||||
'''.replace('{servers}', '\n'.join(servers))
|
||||
'''\
|
||||
.replace('{servers}', '\n'.join(servers)) \
|
||||
.replace('{stream_server}', stream_server)
|
||||
)
|
||||
|
||||
|
||||
|
41
d1/systemd.py
Normal file
41
d1/systemd.py
Normal file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import io
|
||||
import glob
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
cache_path = pathlib.Path.cwd() / 'tmp'
|
||||
|
||||
project_root = pathlib.Path.cwd()
|
||||
|
||||
logger.info(dict(project_root=project_root, cache_path=cache_path,))
|
||||
|
||||
for service in [
|
||||
pathlib.Path(o) for o in sum([
|
||||
glob.glob('d1/*.service'),
|
||||
glob.glob('d1/*.timer')
|
||||
], [])
|
||||
]:
|
||||
os.makedirs(str((cache_path / service).parent), exist_ok=True)
|
||||
|
||||
with io.open(str(service), 'r') as f:
|
||||
with io.open(
|
||||
str(cache_path / service), 'w'
|
||||
) as f2:
|
||||
f2.write(
|
||||
f.read().replace(
|
||||
'{{PROJECT_ROOT}}',
|
||||
str(project_root),
|
||||
)
|
||||
)
|
||||
logger.info(dict(
|
||||
service=str(service),
|
||||
msg='updated',
|
||||
))
|
2
d2/book1/NoSleep.min.js
vendored
2
d2/book1/NoSleep.min.js
vendored
File diff suppressed because one or more lines are too long
4751
d2/book1/book.js
4751
d2/book1/book.js
File diff suppressed because it is too large
Load Diff
@ -1,77 +0,0 @@
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
<script
|
||||
src="https://code.jquery.com/jquery-3.6.0.slim.min.js"
|
||||
integrity="sha256-u7e5khyithlIdTpu22PHhENmPcRdFiHRjhAuHcs05RI="
|
||||
crossorigin="anonymous"
|
||||
></script>
|
||||
|
||||
<title>Speech synthesiser</title>
|
||||
|
||||
<script>
|
||||
window.context = {};
|
||||
window.context.books = [];
|
||||
</script>
|
||||
<script src="NoSleep.min.js"></script>
|
||||
<script src="script.js"></script>
|
||||
<script src="book.js"></script>
|
||||
<link rel="stylesheet" href="style.css">
|
||||
<!--[if lt IE 9]>
|
||||
<script src="//html5shiv.googlecode.com/svn/trunk/html5.js"></script>
|
||||
<![endif]-->
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class=voice-settings>
|
||||
<h1>Speech synthesiser</h1>
|
||||
|
||||
<p>Enter some text in the input below and press return or the "play" button to hear it. change voices using the dropdown menu.</p>
|
||||
|
||||
<form>
|
||||
<input type="text" class="txt">
|
||||
<div>
|
||||
<label for="rate">Rate</label><input type="range" min="0.5" max="2" value="1" step="0.1" id="rate">
|
||||
<div class="rate-value">1</div>
|
||||
<div class="clearfix"></div>
|
||||
</div>
|
||||
<div>
|
||||
<label for="pitch">Pitch</label><input type="range" min="0" max="2" value="1" step="0.1" id="pitch">
|
||||
<div class="pitch-value">1</div>
|
||||
<div class="clearfix"></div>
|
||||
</div>
|
||||
<select class=voice-select>
|
||||
</select>
|
||||
<div class="controls">
|
||||
<button id="play" type="submit">Play</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class=screen>
|
||||
<div class=widget>
|
||||
<select name=book>
|
||||
<!--<option value=0>Death of a Hear</option>-->
|
||||
</select>
|
||||
<br/>
|
||||
<span>Current Sentence: </span>
|
||||
<input type=input name=current-sentence></input>
|
||||
<span>Total Sentences: </span>
|
||||
<input type=input name=total-sentences disabled>
|
||||
</input>
|
||||
<br/>
|
||||
<input type=button name=add-book value="Add Book">
|
||||
<input type=button name=read-aloud value="Read Aloud">
|
||||
<input type=button name=debug value="Debug">
|
||||
</input>
|
||||
<br/>
|
||||
</div>
|
||||
<pre class=status>
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
</html>
|
@ -1,508 +0,0 @@
|
||||
$(window).on('load', () => {
|
||||
|
||||
var synth = window.speechSynthesis;
|
||||
|
||||
var inputForm = document.querySelector('form');
|
||||
var inputTxt = document.querySelector('.txt');
|
||||
var voiceSelect = document.querySelector('select');
|
||||
|
||||
var pitch = document.querySelector('#pitch');
|
||||
var pitchValue = document.querySelector('.pitch-value');
|
||||
var rate = document.querySelector('#rate');
|
||||
var rateValue = document.querySelector('.rate-value');
|
||||
|
||||
var voices = [];
|
||||
|
||||
|
||||
context.nosleep_timer = null;
|
||||
|
||||
context.ui = {
|
||||
voice_settings_div: $('.voice-settings'),
|
||||
voice_select: $('.voice-select'),
|
||||
status_pre: $('.status'),
|
||||
books_select: $('.screen .widget select[name=book]'),
|
||||
current_sentence_input:
|
||||
$('.screen .widget input[name=current-sentence]'),
|
||||
total_sentences_input:
|
||||
$('.screen .widget input[name=total-sentences]'),
|
||||
read_aloud:
|
||||
$('.screen .widget input[name=read-aloud]'),
|
||||
add_book:
|
||||
$('.screen .widget input[name=add-book]'),
|
||||
debug:
|
||||
$('.screen .widget input[name=debug]'),
|
||||
};
|
||||
context.update_books = () => {
|
||||
context.ui.books_select.empty();
|
||||
window.context.books.map(
|
||||
(o, i) => $('<option>').attr('value', '' + i).text(o.slice(0, 10))
|
||||
).forEach((o) => context.ui.books_select.append(o))
|
||||
}
|
||||
|
||||
context.update_books();
|
||||
|
||||
context.sentences = null;
|
||||
context.pending_stop = false;
|
||||
context.current_book = null;
|
||||
context.nosleep = new NoSleep();
|
||||
context.is_debug = false;
|
||||
context.log = {
|
||||
error: [],
|
||||
info: [],
|
||||
};
|
||||
context.callbacks = {
|
||||
log_error: (msg) => {
|
||||
if (context.is_debug)
|
||||
{
|
||||
console.error(msg);
|
||||
context.log.error.push(msg);
|
||||
}
|
||||
},
|
||||
enable_no_sleep: () => {
|
||||
if (context.nosleep_timer != null)
|
||||
{
|
||||
context.callbacks.log_error('running already');
|
||||
}
|
||||
|
||||
context.nosleep_timer = setInterval(
|
||||
() => {
|
||||
location.hash = 'nosleep' + Math.random();
|
||||
context.callbacks.update_status();
|
||||
/*
|
||||
if ('vibrate' in window.navigator)
|
||||
{
|
||||
window.navigator.vibrate(200);
|
||||
}
|
||||
*/
|
||||
}, 1000
|
||||
);
|
||||
},
|
||||
get_state: () => {
|
||||
let t1 = localStorage['state'];
|
||||
if (t1)
|
||||
{
|
||||
return JSON.parse(t1);
|
||||
}
|
||||
else
|
||||
{
|
||||
return {};
|
||||
}
|
||||
},
|
||||
get_cookie: (key) => {
|
||||
/*
|
||||
return document.cookie.split('; ').map(
|
||||
(o) => o.split('=')
|
||||
).reduce(
|
||||
(b, a) => {
|
||||
if (a.length == 2) {b[a[0]] = a[1]};
|
||||
return b
|
||||
},
|
||||
{}
|
||||
)[key];
|
||||
*/
|
||||
let t1 = localStorage['state'];
|
||||
if (t1 != undefined)
|
||||
{
|
||||
let t2 = JSON.parse(t1);
|
||||
return t2[key];
|
||||
}
|
||||
else
|
||||
{
|
||||
return undefined;
|
||||
}
|
||||
},
|
||||
set_cookie: (key, value) => {
|
||||
let state = context.callbacks.get_state('state');
|
||||
|
||||
state[key] = value;
|
||||
|
||||
//document.cookie = `${key}=${value};`;
|
||||
localStorage['state'] = JSON.stringify(state);
|
||||
|
||||
context.callbacks.update_status();
|
||||
},
|
||||
disable_no_sleep: () => {
|
||||
if (context.nosleep_timer == null)
|
||||
{
|
||||
context.callbacks.log_error('nothing is running');
|
||||
}
|
||||
clearInterval(context.nosleep_timer);
|
||||
location.hash = '';
|
||||
context.nosleep_timer = null;
|
||||
synth.cancel();
|
||||
},
|
||||
continuous_reading: async() => {
|
||||
if (context.is_reading)
|
||||
{
|
||||
context.pending_stop = true;
|
||||
return;
|
||||
}
|
||||
context.is_reading = true;
|
||||
context.nosleep.enable();
|
||||
context.callbacks.enable_no_sleep();
|
||||
context.ui.voice_settings_div.addClass('hidden');
|
||||
context.ui.current_sentence_input.attr(
|
||||
'disabled',
|
||||
'disabled'
|
||||
);
|
||||
|
||||
while (
|
||||
context.callbacks.get_cookie('sentence_id') < context.sentences.length &&
|
||||
!context.pending_stop
|
||||
)
|
||||
{
|
||||
let sentence =
|
||||
context.sentences[context.callbacks.get_cookie('sentence_id')];
|
||||
//context.callbacks.log_error('start');
|
||||
try {
|
||||
await context.read_aloud(
|
||||
context.sentences[
|
||||
context.callbacks.get_cookie('sentence_id')
|
||||
]
|
||||
);
|
||||
} catch (e) {
|
||||
context.callbacks.log_error(e);
|
||||
}
|
||||
//context.callbacks.log_error('finished');
|
||||
if (!context.pending_stop)
|
||||
{
|
||||
context.callbacks.set_cookie(
|
||||
'sentence_id',
|
||||
context.callbacks.get_cookie('sentence_id') + 1
|
||||
);
|
||||
}
|
||||
}
|
||||
context.pending_stop = false;
|
||||
context.ui.current_sentence_input.removeAttr('disabled');
|
||||
context.nosleep.disable();
|
||||
context.ui.voice_settings_div.removeClass('hidden');
|
||||
context.callbacks.disable_no_sleep();
|
||||
context.is_reading = false;
|
||||
},
|
||||
update_status: () => {
|
||||
let data = {};
|
||||
data.state = context.callbacks.get_state();
|
||||
if (
|
||||
context.callbacks.get_cookie('sentence_id') != null &&
|
||||
context.sentences != null &&
|
||||
context.callbacks.get_cookie('sentence_id') < context.sentences.length
|
||||
)
|
||||
{
|
||||
data.sentence = context.sentences[context.callbacks.get_cookie('sentence_id')];
|
||||
}
|
||||
data.pending_stop = context.pending_stop;
|
||||
data.is_reading = context.is_reading;
|
||||
data.log = context.log;
|
||||
context.ui.current_sentence_input.val(
|
||||
context.callbacks.get_cookie('sentence_id')
|
||||
);
|
||||
data.timestamp = (new Date());
|
||||
data.version = 'v0.1.7';
|
||||
data.speech_synthesis = {
|
||||
paused: synth.paused,
|
||||
pending: synth.pending,
|
||||
speaking: synth.speaking,
|
||||
};
|
||||
/*
|
||||
if (!synth.speaking && context.is_reading)
|
||||
{
|
||||
synth.cancel();
|
||||
}
|
||||
*/
|
||||
context.ui.status_pre.text(
|
||||
JSON.stringify(
|
||||
data,
|
||||
null,
|
||||
4,
|
||||
)
|
||||
);
|
||||
},
|
||||
ui_read_aloud_on_click: async() => {
|
||||
let book_id = parseInt(context.ui.books_select.val());
|
||||
if (context.current_book != book_id)
|
||||
{
|
||||
context.current_book = book_id;
|
||||
context.sentences =
|
||||
context.books[
|
||||
context.current_book
|
||||
].replaceAll(/([\.\?\!])\s+/g,'$1\n')
|
||||
.split('\n');
|
||||
context.ui.total_sentences_input.val(
|
||||
context.sentences.length,
|
||||
);
|
||||
{
|
||||
let state = context.callbacks.get_state();
|
||||
}
|
||||
}
|
||||
if (
|
||||
context.ui.current_sentence_input.val() != ''
|
||||
)
|
||||
{
|
||||
try{
|
||||
let sentence_id = parseInt(
|
||||
context.ui.current_sentence_input.val()
|
||||
);
|
||||
|
||||
if (
|
||||
sentence_id >= 0 &&
|
||||
sentence_id < context.sentences.length
|
||||
)
|
||||
{
|
||||
context.callbacks.set_cookie(
|
||||
'sentence_id',
|
||||
sentence_id
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
context.callbacks.log_error(e);
|
||||
}
|
||||
}
|
||||
if (context.is_reading && !context.pending_stop)
|
||||
{
|
||||
context.pending_stop = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
context.callbacks.continuous_reading();
|
||||
}
|
||||
},
|
||||
populateVoiceList: () => {
|
||||
voices = synth.getVoices().sort(function (a, b) {
|
||||
const aname = a.name.toUpperCase(), bname = b.name.toUpperCase();
|
||||
if ( aname < bname ) return -1;
|
||||
else if ( aname == bname ) return 0;
|
||||
else return +1;
|
||||
});
|
||||
//var selectedIndex = voiceSelect.selectedIndex < 0 ? 0 : voiceSelect.selectedIndex;
|
||||
voiceSelect.innerHTML = '';
|
||||
for(i = 0; i < voices.length ; i++) {
|
||||
var option = document.createElement('option');
|
||||
option.textContent = voices[i].name + ' (' + voices[i].lang + ')';
|
||||
|
||||
if(voices[i].default) {
|
||||
option.textContent += ' -- DEFAULT';
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
let voice = context.callbacks.get_cookie('voice');
|
||||
if (voice && option.textContent == voice)
|
||||
{
|
||||
$(option).attr('selected', 'selected');
|
||||
}
|
||||
}
|
||||
|
||||
option.setAttribute('data-lang', voices[i].lang);
|
||||
option.setAttribute('data-name', voices[i].name);
|
||||
voiceSelect.appendChild(option);
|
||||
}
|
||||
|
||||
//voiceSelect.selectedIndex = selectedIndex;
|
||||
},
|
||||
init: () => {
|
||||
let state = context.callbacks.get_state();
|
||||
context.ui.voice_select.val(state.voice);
|
||||
if (!state.book_id)
|
||||
{
|
||||
context.callbacks.set_cookie(
|
||||
'book_id',
|
||||
0,
|
||||
);
|
||||
}
|
||||
if (!state.sentence_id)
|
||||
{
|
||||
context.callbacks.set_cookie(
|
||||
'sentence_id',
|
||||
0,
|
||||
);
|
||||
}
|
||||
if (state.book_id)
|
||||
{
|
||||
context.ui.books_select.find(
|
||||
'>option',
|
||||
).eq(state.book_id).attr('selected', 'selected');
|
||||
}
|
||||
if (state.sentence_id)
|
||||
{
|
||||
context.ui.current_sentence_input.val(
|
||||
state.sentence_id,
|
||||
);
|
||||
}
|
||||
},
|
||||
};
|
||||
context.callbacks.populateVoiceList();
|
||||
if (speechSynthesis.onvoiceschanged !== undefined) {
|
||||
speechSynthesis.onvoiceschanged = context.callbacks.populateVoiceList;
|
||||
}
|
||||
|
||||
context.callbacks.init();
|
||||
|
||||
context.ui.add_book.on(
|
||||
'click',
|
||||
async () => {
|
||||
alert('fuck');
|
||||
let book = await (
|
||||
(await fetch(
|
||||
'books/' + prompt('enter book file', '1.txt')
|
||||
)).text()
|
||||
);
|
||||
//let book = prompt('enter text', '');
|
||||
//let title = prompt('enter title', '');
|
||||
//window.context.books.push(title + '\n' + book);
|
||||
window.context.books.push(book);
|
||||
window.context.update_books();
|
||||
},
|
||||
);
|
||||
context.ui.read_aloud.on(
|
||||
'click',
|
||||
context.callbacks.ui_read_aloud_on_click,
|
||||
);
|
||||
context.ui.voice_select.on(
|
||||
'change',
|
||||
() => {
|
||||
context.callbacks.set_cookie(
|
||||
'voice',
|
||||
context.ui.voice_select.val()
|
||||
);
|
||||
}
|
||||
);
|
||||
context.ui.debug.on(
|
||||
'click',
|
||||
() => {
|
||||
if (context.is_debug)
|
||||
{
|
||||
context.is_debug = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
context.is_debug = true;
|
||||
}
|
||||
context.callbacks.update_status();
|
||||
}
|
||||
);
|
||||
context.read_aloud = async (raw_line) => {
|
||||
line = raw_line.trim();
|
||||
if (line.length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
let sleep_detect = null;
|
||||
let exit = () => {
|
||||
if (sleep_detect != null)
|
||||
{
|
||||
clearInterval(sleep_detect);
|
||||
}
|
||||
}
|
||||
return new Promise((response, reject) => {
|
||||
if (synth.speaking) {
|
||||
context.callbacks.log_error('speechSynthesis.speaking');
|
||||
if (reject != undefined)
|
||||
{
|
||||
reject('error');
|
||||
}
|
||||
return;
|
||||
}
|
||||
let utterThis = new SpeechSynthesisUtterance(line);
|
||||
utterThis.onend = function (event) {
|
||||
exit();
|
||||
context.callbacks.log_error(
|
||||
'SpeechSynthesisUtterance.onend ' + event.error
|
||||
);
|
||||
if (response != undefined)
|
||||
{
|
||||
response('done ' + event.error);
|
||||
}
|
||||
}
|
||||
utterThis.onpause = function (event) {
|
||||
exit();
|
||||
context.callbacks.log_error('SpeechSynthesisUtterance.onpause');
|
||||
if (reject != undefined)
|
||||
{
|
||||
reject('paused ' + event.error);
|
||||
}
|
||||
}
|
||||
utterThis.onerror = function (event) {
|
||||
exit();
|
||||
context.callbacks.log_error(
|
||||
'SpeechSynthesisUtterance.onerror ' + event.error
|
||||
);
|
||||
if (reject != undefined)
|
||||
{
|
||||
reject('error ' + event.error);
|
||||
}
|
||||
}
|
||||
let selectedOption = voiceSelect.selectedOptions[0].getAttribute('data-name');
|
||||
for(i = 0; i < voices.length ; i++) {
|
||||
if(voices[i].name === selectedOption) {
|
||||
utterThis.voice = voices[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
//window.alert('fuck3');
|
||||
utterThis.pitch = pitch.value;
|
||||
utterThis.rate = rate.value;
|
||||
synth.speak(utterThis);
|
||||
let silence_count = 0;
|
||||
sleep_detect = setInterval(
|
||||
() => {
|
||||
if (!synth.speaking)
|
||||
{
|
||||
context.callbacks.log_error(
|
||||
'silence count is ' + silence_count
|
||||
)
|
||||
|
||||
++silence_count;
|
||||
}
|
||||
|
||||
if (silence_count == 3 || context.pending_stop)
|
||||
{
|
||||
exit();
|
||||
if (context.pending_stop)
|
||||
{
|
||||
synth.cancel();
|
||||
reject('pending stop');
|
||||
}
|
||||
else
|
||||
{
|
||||
context.callbacks.log_error('phone is sleeping, retry');
|
||||
response('utterance is not present');
|
||||
}
|
||||
/*
|
||||
context.read_aloud(
|
||||
line
|
||||
).then(response).catch(reject);
|
||||
*/
|
||||
}
|
||||
},
|
||||
100,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function speak(){
|
||||
let line = inputTxt.value;
|
||||
if (line !== '') {
|
||||
context.read_aloud(line);
|
||||
}
|
||||
}
|
||||
|
||||
inputForm.onsubmit = function(event) {
|
||||
event.preventDefault();
|
||||
|
||||
speak();
|
||||
|
||||
inputTxt.blur();
|
||||
}
|
||||
|
||||
pitch.onchange = function() {
|
||||
pitchValue.textContent = pitch.value;
|
||||
}
|
||||
|
||||
rate.onchange = function() {
|
||||
rateValue.textContent = rate.value;
|
||||
}
|
||||
|
||||
voiceSelect.onchange = function(){
|
||||
speak();
|
||||
}
|
||||
});
|
@ -1,84 +0,0 @@
|
||||
body, html {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
height: 90%;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
h1, p {
|
||||
font-family: sans-serif;
|
||||
text-align: center;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
.txt, select, form > div {
|
||||
display: block;
|
||||
margin: 0 auto;
|
||||
font-family: sans-serif;
|
||||
font-size: 16px;
|
||||
padding: 5px;
|
||||
}
|
||||
|
||||
.txt {
|
||||
width: 80%;
|
||||
}
|
||||
|
||||
select {
|
||||
width: 83%;
|
||||
}
|
||||
|
||||
form > div {
|
||||
width: 81%;
|
||||
}
|
||||
|
||||
.txt, form > div {
|
||||
margin-bottom: 10px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.clearfix {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
label {
|
||||
float: left;
|
||||
width: 10%;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
.rate-value, .pitch-value {
|
||||
float: right;
|
||||
width: 5%;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
#rate, #pitch {
|
||||
float: right;
|
||||
width: 81%;
|
||||
}
|
||||
|
||||
.controls {
|
||||
text-align: center;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.controls button {
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
.hidden
|
||||
{
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
pre {
|
||||
word-break: break-all;
|
||||
white-space: pre-wrap;
|
||||
}
|
1
deps/com.github.aiortc.aiortc
vendored
Submodule
1
deps/com.github.aiortc.aiortc
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit adef10a8c41f5c550622879370a40f8a9e545574
|
10
deps/greasyfork/.editorconfig
vendored
Normal file
10
deps/greasyfork/.editorconfig
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{js,json,yml}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
6
deps/greasyfork/.gitattributes
vendored
Normal file
6
deps/greasyfork/.gitattributes
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
/.yarn/** linguist-vendored
|
||||
/.yarn/releases/* binary
|
||||
/.yarn/plugins/**/* binary
|
||||
/.pnp.* binary linguist-generated
|
||||
|
||||
/dist/** filter=lfs diff=lfs merge=lfs -text
|
15
deps/greasyfork/.gitignore
vendored
Normal file
15
deps/greasyfork/.gitignore
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
!dist
|
||||
build
|
||||
|
||||
# Swap the comments on the following lines if you wish to use zero-installs
|
||||
# In that case, don't forget to run `yarn config set enableGlobalCache false`!
|
||||
# Documentation here: https://yarnpkg.com/features/caching#zero-installs
|
||||
|
||||
#!.yarn/cache
|
||||
.pnp.*
|
1
deps/greasyfork/README.md
vendored
Normal file
1
deps/greasyfork/README.md
vendored
Normal file
@ -0,0 +1 @@
|
||||
# greasyfork
|
9486
deps/greasyfork/dist/linkedin.user.js
vendored
Normal file
9486
deps/greasyfork/dist/linkedin.user.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
deps/greasyfork/dist/linkedin.user.js.map
vendored
Normal file
1
deps/greasyfork/dist/linkedin.user.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
32
deps/greasyfork/package.json
vendored
Normal file
32
deps/greasyfork/package.json
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "greasyfork",
|
||||
"packageManager": "yarn@4.4.0",
|
||||
"dependencies": {
|
||||
"@babel/core": "latest",
|
||||
"@babel/runtime": "latest",
|
||||
"@gera2ld/plaid-rollup": "latest",
|
||||
"@violentmonkey/dom": "latest",
|
||||
"@violentmonkey/ui": "latest",
|
||||
"jquery": "latest",
|
||||
"solid-js": "latest",
|
||||
"typescript": "latest",
|
||||
"vite": "latest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/plugin-transform-react-jsx": "latest",
|
||||
"@babel/plugin-transform-runtime": "latest",
|
||||
"@rollup/plugin-typescript": "latest",
|
||||
"@types/babel__core": "latest",
|
||||
"@types/babel__plugin-transform-runtime": "latest",
|
||||
"@types/jquery": "latest",
|
||||
"@violentmonkey/types": "latest",
|
||||
"cross-env": "latest",
|
||||
"postcss": "latest",
|
||||
"prettier": "latest",
|
||||
"rollup": "latest",
|
||||
"rollup-plugin-postcss": "latest",
|
||||
"rollup-plugin-userscript": "latest",
|
||||
"tslib": "latest",
|
||||
"unocss": "latest"
|
||||
}
|
||||
}
|
48
deps/greasyfork/rollup.config.mjs
vendored
Normal file
48
deps/greasyfork/rollup.config.mjs
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
import { defineExternal, definePlugins } from '@gera2ld/plaid-rollup';
|
||||
import { defineConfig } from 'rollup';
|
||||
import userscript from 'rollup-plugin-userscript';
|
||||
import typescript from '@rollup/plugin-typescript';
|
||||
import pkg from './package.json' with { type: 'json' };
|
||||
|
||||
export default defineConfig(
|
||||
Object.entries({
|
||||
'linkedin': 'src/linkedin/index.ts',
|
||||
}).map(([name, entry]) => ({
|
||||
input: entry,
|
||||
plugins: [
|
||||
...definePlugins({
|
||||
esm: true,
|
||||
minimize: false,
|
||||
postcss: {
|
||||
inject: false,
|
||||
minimize: true,
|
||||
},
|
||||
extensions: ['.ts', '.tsx', '.mjs', '.js', '.jsx'],
|
||||
}),
|
||||
userscript((meta) => meta.replace('process.env.AUTHOR', pkg.author)),
|
||||
typescript({ sourceMap: true, inlineSources: true }),
|
||||
],
|
||||
external: defineExternal([
|
||||
'@violentmonkey/ui',
|
||||
//'@violentmonkey/dom',
|
||||
'solid-js',
|
||||
'solid-js/web',
|
||||
]),
|
||||
output: {
|
||||
sourcemap: true,
|
||||
sourcemapBaseUrl: 'https://gitea.fxreader.online/fxreader.online/freelance-project-34-marketing-blog/media/branch/master/deps/greasyfork/dist/',
|
||||
format: 'iife',
|
||||
file: `dist/${name}.user.js`,
|
||||
globals: {
|
||||
// Note:
|
||||
// - VM.solid is just a third-party UMD bundle for solid-js since there is no official one
|
||||
// - If you don't want to use it, just remove `solid-js` related packages from `external`, `globals` and the `meta.js` file.
|
||||
'solid-js': 'VM.solid',
|
||||
'solid-js/web': 'VM.solid.web',
|
||||
//'@violentmonkey/dom': 'VM',
|
||||
'@violentmonkey/ui': 'VM',
|
||||
},
|
||||
indent: false,
|
||||
},
|
||||
})),
|
||||
);
|
636
deps/greasyfork/src/linkedin/index.ts
vendored
Normal file
636
deps/greasyfork/src/linkedin/index.ts
vendored
Normal file
@ -0,0 +1,636 @@
|
||||
// ==UserScript==
|
||||
// @name data extraction linkedin
|
||||
// @namespace Violentmonkey Scripts
|
||||
// @match https://www.linkedin.com/*
|
||||
// @grant GM_getValue
|
||||
// @grant GM_setValue
|
||||
// @grant GM_getValues
|
||||
// @grant GM_setValues
|
||||
// @grant GM_listValues
|
||||
// @grant GM_deleteValue
|
||||
// @grant GM_deleteValues
|
||||
// @grant GM_addStyle
|
||||
// @grant GM_addElement
|
||||
// @version 0.1
|
||||
// @author Siarhei Siniak
|
||||
// @license Unlicense
|
||||
// @description 10/08/2024, 8:44:59 PM
|
||||
|
||||
// @run-at document-body
|
||||
// @inject-into content
|
||||
// @noframes
|
||||
// ==/UserScript==
|
||||
|
||||
/*
|
||||
|
||||
Use this extension to disalbe CSP for linkedin
|
||||
|
||||
https://addons.mozilla.org/en-US/firefox/addon/header-editor/
|
||||
https://github.com/FirefoxBar/HeaderEditor
|
||||
https://github.com/violentmonkey/violentmonkey/issues/1335
|
||||
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/script-src
|
||||
|
||||
|
||||
{
|
||||
"request": [],
|
||||
"sendHeader": [],
|
||||
"receiveHeader": [
|
||||
{
|
||||
"enable": true,
|
||||
"name": "disable CSP for linkedin",
|
||||
"ruleType": "modifyReceiveHeader",
|
||||
"matchType": "domain",
|
||||
"pattern": "www.linkedin.com",
|
||||
"exclude": "",
|
||||
"group": "Ungrouped",
|
||||
"isFunction": false,
|
||||
"action": {
|
||||
"name": "content-security-policy",
|
||||
"value": ""
|
||||
}
|
||||
}
|
||||
],
|
||||
"receiveBody": []
|
||||
}
|
||||
*/
|
||||
|
||||
import $ from "jquery";
|
||||
import * as VM from "@violentmonkey/dom";
|
||||
|
||||
interface Entry {
|
||||
header: string
|
||||
teaser?: string
|
||||
};
|
||||
|
||||
interface State {
|
||||
search: string
|
||||
};
|
||||
|
||||
class Linkedin {
|
||||
data : Map<string, any>;
|
||||
|
||||
|
||||
is_fullscreen: boolean = false;
|
||||
|
||||
ui : {
|
||||
root: any | null
|
||||
entries: any | null
|
||||
search: any | null
|
||||
state: any | null
|
||||
};
|
||||
|
||||
state : State;
|
||||
old_state: State | null = null;
|
||||
|
||||
constructor() {
|
||||
this.data = new Map();
|
||||
this.ui = {
|
||||
root: null,
|
||||
entries: null,
|
||||
search: null,
|
||||
state: null,
|
||||
};
|
||||
|
||||
this.state = {
|
||||
search: '',
|
||||
};
|
||||
}
|
||||
|
||||
clean_page() {
|
||||
if (location.href.search('empty_body=true') != -1)
|
||||
{
|
||||
this.is_fullscreen = true;
|
||||
$('head').empty();
|
||||
$('body').empty();
|
||||
$('body').addClass('no-border');
|
||||
}
|
||||
}
|
||||
|
||||
async data_load() {
|
||||
let self = this;
|
||||
const keys = await GM_listValues();
|
||||
let loaded = 0;
|
||||
for (let o of keys)
|
||||
{
|
||||
if (!o.startsWith('data-'))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
self.data.set(
|
||||
o.slice(5,),
|
||||
await GM_getValue(o)
|
||||
);
|
||||
loaded += 1;
|
||||
}
|
||||
|
||||
console.log({action: 'loaded', total: loaded});
|
||||
}
|
||||
|
||||
string_reduce (text: string) {
|
||||
return text.replaceAll(/\s+/gi, ' ').trim();
|
||||
}
|
||||
|
||||
parse_header() {
|
||||
let self = this;
|
||||
|
||||
return [
|
||||
$(
|
||||
'.scaffold-finite-scroll__content > div > .relative .update-components-header'
|
||||
).map((i, o) => ({
|
||||
header: o.innerText
|
||||
})),
|
||||
$(
|
||||
'.scaffold-finite-scroll__content > div > .relative .update-components-actor'
|
||||
).map((i, o) => {
|
||||
let header = $(o);
|
||||
let teaser = $(o).parents('.relative')
|
||||
.parent().find('.feed-shared-update-v2__description-wrapper');
|
||||
return {
|
||||
header: self.string_reduce(header.text()),
|
||||
teaser: self.string_reduce(teaser.text()),
|
||||
};
|
||||
})
|
||||
]
|
||||
}
|
||||
|
||||
async data_add (entry: Entry) {
|
||||
let self = this;
|
||||
|
||||
if (self.data.has(entry.header))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
self.data.set(entry.header, {
|
||||
entry: entry,
|
||||
ts: (new Date()).valueOf(),
|
||||
});
|
||||
|
||||
await GM_setValue(
|
||||
'data-' + entry.header,
|
||||
self.data.get(entry.header)
|
||||
)
|
||||
|
||||
console.log('saved ' + entry.header);
|
||||
|
||||
console.log(self.data.get(entry.header));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async document_on_changed () {
|
||||
let self = this;
|
||||
|
||||
let state_changed = false;
|
||||
|
||||
if (
|
||||
JSON.stringify(self.state_get()) != JSON.stringify(self.state)
|
||||
)
|
||||
{
|
||||
state_changed = true;
|
||||
self.old_state = self.state;
|
||||
self.state = self.state_get();
|
||||
}
|
||||
|
||||
let current_data = self.parse_header();
|
||||
|
||||
let changed = false;
|
||||
|
||||
for (let o of current_data[0])
|
||||
{
|
||||
let current_changed = await self.data_add(o);
|
||||
if (current_changed)
|
||||
{
|
||||
changed = current_changed;
|
||||
}
|
||||
}
|
||||
|
||||
for (let o of current_data[1])
|
||||
{
|
||||
let current_changed = await self.data_add(o);
|
||||
if (current_changed)
|
||||
{
|
||||
changed = current_changed;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
changed || (
|
||||
state_changed ||
|
||||
self.ui.entries === null && self.data.size > 0
|
||||
)
|
||||
)
|
||||
{
|
||||
self.display();
|
||||
}
|
||||
}
|
||||
|
||||
listener_add() {
|
||||
let self = this;
|
||||
|
||||
return VM.observe(
|
||||
document.body,
|
||||
() => {
|
||||
self.document_on_changed();
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
display_init() {
|
||||
let self = this;
|
||||
|
||||
self.ui.root = $(`<div class=online-fxreader-linkedin>`);
|
||||
|
||||
$(document.body).append(self.ui.root);
|
||||
|
||||
if (self.is_fullscreen)
|
||||
{
|
||||
self.ui.root.addClass('fullscreen');
|
||||
}
|
||||
|
||||
$('head').append($('<style>').html(`
|
||||
div.online-fxreader-linkedin {
|
||||
height: 10em;
|
||||
overflow: hidden;
|
||||
z-index: 9999;
|
||||
position: fixed;
|
||||
top: 5em;
|
||||
background: yellow;
|
||||
margin-left: 1em;
|
||||
word-wrap: anywhere;
|
||||
white-space: break-spaces;
|
||||
margin-right: 1em;
|
||||
width: calc(100% - 2em);
|
||||
}
|
||||
|
||||
.d-none {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin.tray-active .search,
|
||||
.online-fxreader-linkedin.tray-active .entries
|
||||
{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .tray
|
||||
{
|
||||
cursor: pointer;
|
||||
position: absolute;
|
||||
right: 0px;
|
||||
z-index: 9999;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin.tray-active
|
||||
{
|
||||
right: 1em;
|
||||
width: 3em;
|
||||
height: 3em !important;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .search
|
||||
{
|
||||
display: flex;
|
||||
position: sticky;
|
||||
top: 0px;
|
||||
background-color: #eee;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .search input
|
||||
{
|
||||
width: 60em;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .entries
|
||||
{
|
||||
overflow: scroll;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .entry.even
|
||||
{
|
||||
background-color: #eee;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .entry.odd
|
||||
{
|
||||
background-color: #ddd;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin .search,
|
||||
.online-fxreader-linkedin .search input
|
||||
{
|
||||
height: 2em;
|
||||
line-height: 2em;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
.no-border {
|
||||
padding: unset;
|
||||
margin: unset;
|
||||
}
|
||||
|
||||
.online-fxreader-linkedin:hover,
|
||||
.online-fxreader-linkedin.fullscreen
|
||||
{
|
||||
height: 80vh;
|
||||
}
|
||||
`));
|
||||
GM_addElement('script', {
|
||||
"textContent": `
|
||||
class Linkedin {
|
||||
constructor() {
|
||||
let self = this;
|
||||
|
||||
this.has_callbacks = false;
|
||||
|
||||
this.ui = {
|
||||
root: () => {
|
||||
return document.getElementsByClassName('online-fxreader-linkedin')[0];
|
||||
},
|
||||
};
|
||||
|
||||
self.ui.search = () => {
|
||||
let search = self.ui.root().getElementsByClassName('search')[0];
|
||||
let search_input = search.getElementsByTagName('input')[0];
|
||||
return search_input;
|
||||
};
|
||||
self.ui.tray = () => {
|
||||
// let search = self.ui.root().getElementsByClassName('search')[0];
|
||||
let tray = self.ui.root().getElementsByClassName('tray')[0];
|
||||
return tray;
|
||||
};
|
||||
|
||||
self.ui.state = () => {
|
||||
let state = self.ui.root().getElementsByClassName('state')[0];
|
||||
return state;
|
||||
};
|
||||
}
|
||||
|
||||
add_callbacks() {
|
||||
let self = this;
|
||||
self.ui.tray().addEventListener(
|
||||
'click', function(e) {
|
||||
let o = e.currentTarget;
|
||||
let cl = o.classList;
|
||||
let r = self.ui.root();
|
||||
if (cl.contains('active'))
|
||||
{
|
||||
cl.remove('active');
|
||||
r.classList.add('tray-active');
|
||||
}
|
||||
else
|
||||
{
|
||||
cl.add('active');
|
||||
r.classList.remove('tray-active');
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
blah(class_name) {
|
||||
if (!this.has_callbacks)
|
||||
{
|
||||
this.add_callbacks();
|
||||
this.has_callbacks = true;
|
||||
}
|
||||
|
||||
console.log('blah');
|
||||
Array.from(
|
||||
document.getElementsByClassName(class_name)
|
||||
).forEach((o) => o.remove());
|
||||
}
|
||||
|
||||
state_update(partial) {
|
||||
let self = this;
|
||||
|
||||
let ui_state = self.ui.state();
|
||||
|
||||
let old_state = JSON.parse(ui_state.innerText);
|
||||
ui_state.innerText = JSON.stringify(
|
||||
{
|
||||
...old_state,
|
||||
...partial
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
search_on_change() {
|
||||
let self = this;
|
||||
let search = self.ui.search();
|
||||
|
||||
self.state_update(
|
||||
{
|
||||
search: search.value
|
||||
}
|
||||
);
|
||||
}
|
||||
};
|
||||
const online_fxreader_linkedin = new Linkedin();
|
||||
|
||||
console.log('started');
|
||||
`
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
state_get() {
|
||||
let self = this;
|
||||
|
||||
if (self.ui.state && self.ui.state.text() !== '')
|
||||
{
|
||||
return JSON.parse(self.ui.state.text());
|
||||
}
|
||||
else
|
||||
{
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
state_set(partial: any) {
|
||||
let self = this;
|
||||
|
||||
self.ui.state.text(
|
||||
{
|
||||
...self.state_get(),
|
||||
...partial
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
display() {
|
||||
let self = this;
|
||||
|
||||
let sorted_entries = Array.from(self.data.entries()).sort(
|
||||
(a, b) => a[1].ts - b[1].ts
|
||||
);
|
||||
|
||||
// self.ui.root.empty();
|
||||
|
||||
if (self.ui.search === null)
|
||||
{
|
||||
self.ui.root.append(
|
||||
$('<div>').addClass('tray').text('SHOW/HIDE')
|
||||
);
|
||||
|
||||
let search = $('<div>').addClass('search').append(
|
||||
$('<input>').val(self.state.search)
|
||||
).attr(
|
||||
'onkeyup',
|
||||
`online_fxreader_linkedin.search_on_change()`,
|
||||
);
|
||||
search.append(
|
||||
$('<div>').addClass('total')
|
||||
);
|
||||
self.ui.root.append(search);
|
||||
self.ui.search = search;
|
||||
}
|
||||
|
||||
if (self.ui.state === null)
|
||||
{
|
||||
self.ui.state = $('<div>').addClass('state d-none').text(
|
||||
JSON.stringify(self.state)
|
||||
);
|
||||
self.ui.root.append(self.ui.state);
|
||||
}
|
||||
else
|
||||
{
|
||||
}
|
||||
//state_set(old_state);
|
||||
|
||||
let entries = null;
|
||||
|
||||
if (self.ui.entries === null)
|
||||
{
|
||||
entries = $('<div>').addClass('entries');
|
||||
self.ui.root.append(entries);
|
||||
self.ui.entries = entries
|
||||
}
|
||||
else
|
||||
{
|
||||
entries = self.ui.entries;
|
||||
entries.empty();
|
||||
}
|
||||
|
||||
let keywords = (self.state?.search || '').split(/\s+/).map((o) => {
|
||||
let action = '';
|
||||
let word = '';
|
||||
|
||||
if (o.length > 0)
|
||||
{
|
||||
if (o[0] == '+')
|
||||
{
|
||||
action = 'include';
|
||||
word = o.slice(1,);
|
||||
}
|
||||
else if (o[0] == '-')
|
||||
{
|
||||
action = 'exclude';
|
||||
word = o.slice(1,);
|
||||
}
|
||||
else
|
||||
{
|
||||
action = 'include';
|
||||
word = o;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
action,
|
||||
word,
|
||||
};
|
||||
|
||||
}).filter((o) => o.action !== '' && o.word !== '');
|
||||
|
||||
let filtered_entries = sorted_entries.filter((o) => {
|
||||
let match = true;
|
||||
|
||||
let text = JSON.stringify(o);
|
||||
|
||||
for (let k of keywords)
|
||||
{
|
||||
if (k.action == 'include')
|
||||
{
|
||||
if (text.search(k.word) == -1)
|
||||
{
|
||||
match = false;
|
||||
}
|
||||
}
|
||||
else if (k.action == 'exclude')
|
||||
{
|
||||
if (text.search(k.word) != -1)
|
||||
{
|
||||
match = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!match)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return match;
|
||||
})
|
||||
|
||||
self.ui.search.find('.total').text(
|
||||
filtered_entries.length
|
||||
);
|
||||
|
||||
let i = 0;
|
||||
for (let o of filtered_entries.reverse())
|
||||
{
|
||||
let raw = JSON.stringify(o[1]);
|
||||
let ts = (new Date(o[1].ts));
|
||||
let entry = $('<div>').addClass('entry');
|
||||
|
||||
if (i % 2 == 0)
|
||||
{
|
||||
entry.addClass('even');
|
||||
}
|
||||
else
|
||||
{
|
||||
entry.addClass('odd');
|
||||
}
|
||||
|
||||
entry.append(
|
||||
$('<div>').addClass('ts').text(
|
||||
ts.toISOString(),
|
||||
)
|
||||
);
|
||||
entry.append(
|
||||
$('<div>').addClass('header').text(
|
||||
o[1].entry.header
|
||||
)
|
||||
);
|
||||
entry.append(
|
||||
$('<div>').addClass('teaser').text(
|
||||
o[1].entry.teaser
|
||||
)
|
||||
);
|
||||
// entry.append($('<pre>').text(raw));
|
||||
|
||||
entries.append(entry);
|
||||
++i;
|
||||
}
|
||||
|
||||
GM_addElement('script', {
|
||||
"class": 'bridge',
|
||||
"textContent": `
|
||||
online_fxreader_linkedin.blah('bridge');
|
||||
`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const l = new Linkedin();
|
||||
|
||||
(async () => {
|
||||
l.clean_page();
|
||||
|
||||
await l.data_load();
|
||||
|
||||
const disconnect = l.listener_add();
|
||||
l.display_init();
|
||||
})();
|
1
deps/greasyfork/src/types/vm.d.ts
vendored
Normal file
1
deps/greasyfork/src/types/vm.d.ts
vendored
Normal file
@ -0,0 +1 @@
|
||||
import '@violentmonkey/types';
|
115
deps/greasyfork/tsconfig.json
vendored
Normal file
115
deps/greasyfork/tsconfig.json
vendored
Normal file
@ -0,0 +1,115 @@
|
||||
{
|
||||
"include": ["src"],
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "esnext", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
||||
"lib": [
|
||||
"DOM",
|
||||
"ES6",
|
||||
"DOM.Iterable",
|
||||
"ScriptHost",
|
||||
"ESNext"
|
||||
], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
|
||||
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
|
||||
|
||||
/* Modules */
|
||||
"module": "esnext", /* Specify what module code is generated. */
|
||||
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||
"moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
|
||||
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
|
||||
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
||||
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
||||
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files. */
|
||||
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
||||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
|
||||
|
||||
/* Emit */
|
||||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
"sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
|
||||
"outDir": "build/", /* Specify an output folder for all emitted files. */
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
|
||||
// "isolatedDeclarations": true, /* Require sufficient annotation on exports so other tools can trivially generate declaration files. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
|
||||
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
}
|
4844
deps/greasyfork/yarn.lock
vendored
Normal file
4844
deps/greasyfork/yarn.lock
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
deps/online.fxreader.nartes.books
vendored
Submodule
1
deps/online.fxreader.nartes.books
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 3c691ef68d8899edf328d5b06135c0d3b02e7940
|
@ -7,7 +7,7 @@ services:
|
||||
volumes:
|
||||
- ./d1/:/app/d1/:ro
|
||||
- ./tmp/cache/:/app/tmp/cache/:ro
|
||||
restart: always
|
||||
restart: on-failure
|
||||
ssl-app:
|
||||
build:
|
||||
context: .
|
||||
@ -16,36 +16,40 @@ services:
|
||||
- ./d1/:/app/d1/:ro
|
||||
- ./tmp/d1/:/app/tmp/d1/:ro
|
||||
- ./tmp/d1/letsencrypt:/etc/letsencrypt:rw
|
||||
restart: always
|
||||
restart: on-failure
|
||||
|
||||
cpanel:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/cpanel/Dockerfile
|
||||
links:
|
||||
- app
|
||||
#links:
|
||||
# - app
|
||||
volumes:
|
||||
- ./d1/:/app/d1:ro
|
||||
- ./tmp/d1/:/app/tmp/d1/:ro
|
||||
restart: always
|
||||
restart: on-failure
|
||||
dynu:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/dynu/Dockerfile
|
||||
profiles:
|
||||
- broken
|
||||
volumes:
|
||||
- ./d1/dynu_update.py:/app/d1/dynu_update.py:ro
|
||||
- ./tmp/cache/dynu.auth.json:/app/tmp/cache/dynu.auth.json:ro
|
||||
restart: always
|
||||
links:
|
||||
- ngrok
|
||||
restart: on-failure
|
||||
# links:
|
||||
# - ngrok
|
||||
ngrok:
|
||||
image: wernight/ngrok
|
||||
links:
|
||||
- app
|
||||
#links:
|
||||
# - app
|
||||
profiles:
|
||||
- broken
|
||||
command: ['ngrok', 'http', 'app:80']
|
||||
volumes:
|
||||
- ./tmp/cache/ngrok.yml:/home/ngrok/.ngrok2/ngrok.yml:ro
|
||||
restart: always
|
||||
restart: on-failure
|
||||
#forward:
|
||||
# build:
|
||||
# context: .
|
||||
|
@ -4,7 +4,7 @@ RUN apk add python3
|
||||
RUN apk add tini
|
||||
RUN apk add bash curl
|
||||
RUN apk add py3-pip
|
||||
RUN pip3 install requests
|
||||
RUN pip3 install --break-system-packages requests
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
15
docker/js/Dockerfile
Normal file
15
docker/js/Dockerfile
Normal file
@ -0,0 +1,15 @@
|
||||
FROM node as base
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN \
|
||||
apt-get update -yy && \
|
||||
apt-get install \
|
||||
tini zsh less tree \
|
||||
-yy
|
||||
RUN chsh -s /usr/bin/zsh
|
||||
WORKDIR /app/deps/greasyfork
|
||||
|
||||
ENTRYPOINT ["tini", "--"]
|
||||
CMD ["bash", "/app/docker/js/init.sh"]
|
||||
# CMD ["sleep", "999999999999999999"]
|
17
docker/js/docker-compose.yml
Normal file
17
docker/js/docker-compose.yml
Normal file
@ -0,0 +1,17 @@
|
||||
version: '3.7'
|
||||
services:
|
||||
js:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./docker/js/Dockerfile
|
||||
volumes:
|
||||
- ./deps/greasyfork:/app/deps/greasyfork:rw
|
||||
- ./tmp/cache/js/root-cache:/root/.cache:rw
|
||||
- ./tmp/cache/js/root-yarn:/root/.yarn:rw
|
||||
- ./docker/js:/app/docker/js:ro
|
||||
- ./tmp/cache/js:/app/tmp/cache/js:rw
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: 1.5
|
||||
memory: 1G
|
7
docker/js/init.sh
Normal file
7
docker/js/init.sh
Normal file
@ -0,0 +1,7 @@
|
||||
corepack enable
|
||||
corepack install
|
||||
# yarn init -2
|
||||
ln -sf /app/docker/js/.zshrc ~/.zshrc
|
||||
ln -sf /app/tmp/cache/js/.histfile ~/.histfile
|
||||
export EDITOR=vim
|
||||
exec /usr/bin/zsh -l
|
@ -7,6 +7,7 @@ RUN apk add nginx
|
||||
RUN apk add tini
|
||||
#RUN pip3 install requests certbot
|
||||
RUN apk add certbot
|
||||
RUN apk add nginx-mod-stream
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -1,29 +0,0 @@
|
||||
FROM python:latest
|
||||
|
||||
RUN pip3 install ipython jupyter
|
||||
RUN apt-get update -yy && apt-get install -yy zsh htop mc git
|
||||
RUN pip3 install jupyterlab-vim
|
||||
RUN pip3 install pyktok yt-dlp playwright==1.44.0 TikTokApi
|
||||
RUN pip3 install numpy pandas browser_cookie3 ipdb asgiref
|
||||
RUN python3 -m playwright install-deps
|
||||
RUN python3 -m playwright install
|
||||
RUN pip3 install tqdm
|
||||
RUN apt-get install -yy ffmpeg
|
||||
RUN pip3 install celery redis
|
||||
RUN pip3 install dataclasses-json
|
||||
RUN pip3 install rpdb
|
||||
RUN apt-get install -yy netcat-traditional
|
||||
RUN apt-get install -yy vim
|
||||
RUN apt-get install -yy tini
|
||||
RUN apt-get install -yy wkhtmltopdf graphviz
|
||||
RUN pip3 install pandoc
|
||||
RUN apt-get install -yy pandoc
|
||||
RUN apt-get install -yy texlive-xetex texlive-fonts-recommended texlive-plain-generic
|
||||
RUN pip3 install 'nbconvert[webpdf]'
|
||||
RUN pip3 install pickleshare
|
||||
RUN pip3 install networkx
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENTRYPOINT ["tini", "--", "bash", "docker/tiktok/entry.sh"]
|
||||
CMD ["zsh", "-l"]
|
@ -1,59 +0,0 @@
|
||||
PROJECT_ROOT ?= ${PWD}
|
||||
|
||||
export PROJECT_ROOT
|
||||
|
||||
PORT ?= 8888
|
||||
TOKEN ?= $(shell pwgen -n 20 1)
|
||||
|
||||
c:
|
||||
cd ${PROJECT_ROOT} && \
|
||||
sudo docker-compose \
|
||||
-f docker/tiktok/docker-compose.yml $(ARGS)
|
||||
|
||||
build:
|
||||
$(MAKE) c ARGS="pull"
|
||||
$(MAKE) c ARGS="build --pull"
|
||||
|
||||
celery-up:
|
||||
$(MAKE) c ARGS="up -d redis celery"
|
||||
|
||||
celery-stop:
|
||||
$(MAKE) c ARGS="stop redis celery"
|
||||
|
||||
celery-cmd:
|
||||
$(MAKE) c ARGS="exec celery celery -A python.tasks.tiktok.celery ${ARGS}"
|
||||
|
||||
deploy:
|
||||
cd ${PROJECT_ROOT} && tar -cvf ${PROJECT_ROOT}/tmp/cache/tiktok/repo.tar \
|
||||
docker/tiktok \
|
||||
python/tasks/tiktok \
|
||||
tmp/cache/tiktok/notebooks/tiktok.ipynb \
|
||||
tmp/cache/tiktok/notebooks/*.pdf \
|
||||
.dockerignore \
|
||||
.gitignore
|
||||
|
||||
logs:
|
||||
$(MAKE) c ARGS="logs --tail=100 -f"
|
||||
|
||||
celery-restart:
|
||||
$(MAKE) c ARGS="restart celery"
|
||||
|
||||
run:
|
||||
cd ${PROJECT_ROOT} && \
|
||||
sudo docker-compose \
|
||||
-f docker/tiktok/docker-compose.yml \
|
||||
run \
|
||||
--use-aliases \
|
||||
--rm tiktok
|
||||
|
||||
jupyter:
|
||||
cd ${PROJECT_ROOT} && \
|
||||
sudo docker-compose \
|
||||
-f docker/tiktok/docker-compose.yml \
|
||||
run \
|
||||
-p 127.0.0.1:${PORT}:8888 \
|
||||
--rm tiktok \
|
||||
jupyter-lab \
|
||||
--allow-root \
|
||||
--ip=0.0.0.0 \
|
||||
--NotebookApp.token=${TOKEN}
|
@ -1,28 +0,0 @@
|
||||
version: '3.7'
|
||||
services:
|
||||
redis:
|
||||
image: redis:latest
|
||||
volumes:
|
||||
- ../../tmp/cache/tiktok/redis/data:/data:rw
|
||||
tiktok: &tiktok
|
||||
links:
|
||||
- redis
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: ./docker/tiktok/Dockerfile
|
||||
volumes:
|
||||
- ./../../docker/tiktok:/app/docker/tiktok:ro
|
||||
- ./../../tmp/cache/tiktok:/app/tmp/cache/tiktok:rw
|
||||
- ./../../python/tasks/tiktok:/app/python/tasks/tiktok:ro
|
||||
celery:
|
||||
build:
|
||||
context: ../../
|
||||
dockerfile: ./docker/tiktok/Dockerfile
|
||||
depends_on:
|
||||
- redis
|
||||
volumes:
|
||||
- ./../../docker/tiktok:/app/docker/tiktok:ro
|
||||
- ./../../tmp/cache/tiktok:/app/tmp/cache/tiktok:rw
|
||||
- ./../../python/tasks/tiktok:/app/python/tasks/tiktok:ro
|
||||
command:
|
||||
- celery -A python.tasks.tiktok.celery worker -c 2
|
@ -1,10 +0,0 @@
|
||||
ln -sf $PWD/docker/tiktok/.zshrc ~
|
||||
mkdir -p tmp/cache/tiktok/zsh
|
||||
mkdir -p tmp/cache/tiktok/ipython
|
||||
mkdir -p tmp/cache/tiktok/jupyter
|
||||
ln -sf $PWD/tmp/cache/tiktok/zsh/histfile ~/.histfile
|
||||
ln -sf $PWD/tmp/cache/tiktok/jupyter ~/.jupyter
|
||||
ln -sf $PWD/tmp/cache/tiktok/ipython ~/.ipython
|
||||
ipython3 profile create
|
||||
ln -sf $PWD/docker/tiktok/ipython_config.py ~/.ipython/profile_default/
|
||||
exec $@
|
@ -1,72 +0,0 @@
|
||||
c.InteractiveShellApp.exec_lines = [
|
||||
'%autoreload 2',
|
||||
r'''
|
||||
def ipython_update_shortcuts():
|
||||
import IPython
|
||||
import prompt_toolkit.filters
|
||||
import prompt_toolkit.document
|
||||
import functools
|
||||
import tempfile
|
||||
import io
|
||||
import subprocess
|
||||
|
||||
def ipython_edit_in_vim(*args, pt_app):
|
||||
content = pt_app.app.current_buffer.document.text
|
||||
lines_count = lambda text: len(text.splitlines())
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
suffix='.py',
|
||||
mode='w',
|
||||
) as f:
|
||||
with io.open(f.name, 'w') as f2:
|
||||
f2.write(content)
|
||||
f2.flush()
|
||||
|
||||
result = subprocess.call([
|
||||
'vim',
|
||||
'+%d' % lines_count(content),
|
||||
f.name,
|
||||
])
|
||||
|
||||
if result != 0:
|
||||
return
|
||||
|
||||
f.seek(0, io.SEEK_SET)
|
||||
|
||||
with io.open(f.name, 'r') as f2:
|
||||
new_content = f2.read()
|
||||
|
||||
pt_app.app.current_buffer.document = \
|
||||
prompt_toolkit.document.Document(
|
||||
new_content,
|
||||
cursor_position=len(new_content.rstrip()),
|
||||
)
|
||||
|
||||
t1 = IPython.get_ipython()
|
||||
t2 = t1.pt_app
|
||||
t3 = [o for o in t2.key_bindings.bindings if 'f2' in repr(o.keys).lower()]
|
||||
assert len(t3) == 1
|
||||
t4 = t3[0]
|
||||
t2.key_bindings.remove(t4.handler)
|
||||
t2.key_bindings.add(
|
||||
'\\', 'e', filter=~prompt_toolkit.filters.vi_insert_mode,
|
||||
)(
|
||||
functools.partial(
|
||||
ipython_edit_in_vim,
|
||||
pt_app=t2,
|
||||
)
|
||||
#t4.handler
|
||||
)
|
||||
''',
|
||||
'ipython_update_shortcuts()',
|
||||
]
|
||||
c.IPCompleter.use_jedi = False
|
||||
c.InteractiveShellApp.extensions = ['autoreload']
|
||||
c.InteractiveShell.history_length = 100 * 1000 * 1000
|
||||
c.InteractiveShell.history_load_length = 100 * 1000 * 1000
|
||||
#c.InteractiveShell.enable_history_search = False
|
||||
#c.InteractiveShell.autosuggestions_provider = None
|
||||
c.InteractiveShell.pdb = True
|
||||
c.TerminalInteractiveShell.editing_mode = 'vi'
|
||||
c.TerminalInteractiveShell.modal_cursor = False
|
||||
c.TerminalInteractiveShell.emacs_bindings_in_vi_insert_mode = False
|
156
dotfiles/.config/Code - OSS/User/keybindings.json
Normal file
156
dotfiles/.config/Code - OSS/User/keybindings.json
Normal file
@ -0,0 +1,156 @@
|
||||
// Place your key bindings in this file to override the defaults
|
||||
[
|
||||
{
|
||||
"key": "alt+z",
|
||||
"command": "-editor.action.toggleWordWrap"
|
||||
},
|
||||
{
|
||||
"key": "alt+z",
|
||||
"command": "-workbench.action.terminal.sizeToContentWidth",
|
||||
"when": "terminalFocus && terminalHasBeenCreated && terminalIsOpen || terminalFocus && terminalIsOpen && terminalProcessSupported"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "workbench.action.toggleMaximizeEditorGroup",
|
||||
"when": "editorPartMaximizedEditorGroup || editorPartMultipleEditorGroups"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+k ctrl+m",
|
||||
"command": "-workbench.action.toggleMaximizeEditorGroup",
|
||||
"when": "editorPartMaximizedEditorGroup || editorPartMultipleEditorGroups"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "workbench.action.toggleMaximizedPanel",
|
||||
"when": "!editorTextFocus"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+p",
|
||||
"command": "-extension.vim_ctrl+p",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-p> && !inDebugRepl || vim.active && vim.use<C-p> && !inDebugRepl && vim.mode == 'CommandlineInProgress' || vim.active && vim.use<C-p> && !inDebugRepl && vim.mode == 'SearchInProgressMode'"
|
||||
},
|
||||
{
|
||||
"key": "alt+t",
|
||||
"command": "workbench.action.terminal.toggleTerminal",
|
||||
"when": "terminal.active"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+`",
|
||||
"command": "-workbench.action.terminal.toggleTerminal",
|
||||
"when": "terminal.active"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+e",
|
||||
"command": "-workbench.action.quickOpen"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+n",
|
||||
"command": "-extension.vim_ctrl+n",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-n> && !inDebugRepl || vim.active && vim.use<C-n> && !inDebugRepl && vim.mode == 'CommandlineInProgress' || vim.active && vim.use<C-n> && !inDebugRepl && vim.mode == 'SearchInProgressMode'"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+t",
|
||||
"command": "-extension.vim_ctrl+t",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-t> && !inDebugRepl"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+f",
|
||||
"command": "-extension.vim_ctrl+f",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-f> && !inDebugRepl && vim.mode != 'Insert'"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+f",
|
||||
"command": "-actions.find",
|
||||
"when": "editorFocus || editorIsOpen"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+f",
|
||||
"command": "workbench.action.findInFiles"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+shift+f",
|
||||
"command": "-workbench.action.findInFiles"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+g",
|
||||
"command": "-workbench.action.gotoLine"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+g",
|
||||
"command": "-workbench.action.terminal.goToRecentDirectory",
|
||||
"when": "terminalFocus && terminalHasBeenCreated || terminalFocus && terminalProcessSupported"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "-toggleSearchRegex",
|
||||
"when": "searchViewletFocus"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "-toggleFindRegex",
|
||||
"when": "editorFocus"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "-workbench.action.terminal.toggleFindRegex",
|
||||
"when": "terminalFindVisible && terminalHasBeenCreated || terminalFindVisible && terminalProcessSupported"
|
||||
},
|
||||
{
|
||||
"key": "alt+r",
|
||||
"command": "-toggleSearchEditorRegex",
|
||||
"when": "inSearchEditor && searchInputBoxFocus"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+/",
|
||||
"command": "-editor.action.accessibleViewAcceptInlineCompletion",
|
||||
"when": "accessibleViewIsShown && accessibleViewCurrentProviderId == 'inlineCompletions'"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+k ctrl+/",
|
||||
"command": "-editor.foldAllBlockComments",
|
||||
"when": "editorTextFocus && foldingEnabled"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+/",
|
||||
"command": "-toggleExplainMode",
|
||||
"when": "suggestWidgetVisible"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+/",
|
||||
"command": "-workbench.action.chat.attachContext",
|
||||
"when": "inChatInput && chatLocation == 'editing-session' || inChatInput && chatLocation == 'editor' || inChatInput && chatLocation == 'notebook' || inChatInput && chatLocation == 'panel' || inChatInput && chatLocation == 'terminal'"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+/",
|
||||
"command": "-workbench.action.terminal.sendSequence",
|
||||
"when": "terminalFocus"
|
||||
},
|
||||
{
|
||||
"key": "shift+alt+l",
|
||||
"command": "workbench.action.editorLayoutTwoRowsRight"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+b",
|
||||
"command": "-extension.vim_ctrl+b",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-b> && !inDebugRepl && vim.mode != 'Insert'"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+w",
|
||||
"command": "-workbench.action.closeActiveEditor"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+w",
|
||||
"command": "-workbench.action.closeGroup",
|
||||
"when": "activeEditorGroupEmpty && multipleEditorGroups"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+w",
|
||||
"command": "-extension.vim_ctrl+w",
|
||||
"when": "editorTextFocus && vim.active && vim.use<C-w> && !inDebugRepl"
|
||||
},
|
||||
{
|
||||
"key": "ctrl+w",
|
||||
"command": "workbench.action.closeActiveEditor",
|
||||
"when": "editorTextFocus"
|
||||
}
|
||||
]
|
151
dotfiles/.config/Code - OSS/User/settings.json
Normal file
151
dotfiles/.config/Code - OSS/User/settings.json
Normal file
@ -0,0 +1,151 @@
|
||||
{
|
||||
"editor.wordWrap": "on",
|
||||
"editor.minimap.autohide": true,
|
||||
"editor.minimap.maxColumn": 80,
|
||||
"editor.minimap.size": "fit",
|
||||
"python.experiments.enabled": false,
|
||||
"debugpy.debugJustMyCode": false,
|
||||
"python.REPL.enableREPLSmartSend": false,
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.testing.autoTestDiscoverOnSaveEnabled": false,
|
||||
"python.languageServer": "None",
|
||||
"typescript.surveys.enabled": false,
|
||||
"typescript.suggestionActions.enabled": false,
|
||||
"typescript.tsserver.enableRegionDiagnostics": false,
|
||||
"typescript.tsserver.maxTsServerMemory": 0.05,
|
||||
"typescript.tsserver.useSyntaxServer": "never",
|
||||
"typescript.tsserver.web.typeAcquisition.enabled": false,
|
||||
"typescript.validate.enable": false,
|
||||
"typescript.workspaceSymbols.excludeLibrarySymbols": false,
|
||||
"typescript.check.npmIsInstalled": false,
|
||||
"typescript.tsserver.web.projectWideIntellisense.enabled": false,
|
||||
"python.REPL.provideVariables": false,
|
||||
"git.openRepositoryInParentFolders": "never",
|
||||
"workbench.enableExperiments": false,
|
||||
"workbench.cloudChanges.continueOn": "off",
|
||||
"workbench.cloudChanges.autoResume": "off",
|
||||
"extensions.autoCheckUpdates": false,
|
||||
"update.mode": "none",
|
||||
"workbench.settings.enableNaturalLanguageSearch": false,
|
||||
"update.showReleaseNotes": false,
|
||||
"extensions.autoUpdate": false,
|
||||
"telemetry.telemetryLevel": "off",
|
||||
"json.schemaDownload.enable": false,
|
||||
"npm.fetchOnlinePackageInfo": false,
|
||||
"window.experimentalControlOverlay": false,
|
||||
"window.commandCenter": false,
|
||||
"window.confirmBeforeClose": "always",
|
||||
"window.dialogStyle": "custom",
|
||||
"window.titleBarStyle": "custom",
|
||||
"window.customTitleBarVisibility": "windowed",
|
||||
"window.enableMenuBarMnemonics": false,
|
||||
"window.menuBarVisibility": "compact",
|
||||
"issueReporter.experimental.auxWindow": false,
|
||||
"workbench.colorTheme": "Monokai",
|
||||
"workbench.preferredDarkColorTheme": "Monokai",
|
||||
"workbench.preferredHighContrastColorTheme": "Monokai",
|
||||
"workbench.preferredHighContrastLightColorTheme": "Monokai",
|
||||
"workbench.preferredLightColorTheme": "Monokai",
|
||||
"mesonbuild.downloadLanguageServer": false,
|
||||
// "vim.easymotion": true,
|
||||
// "vim.incsearch": true,
|
||||
"vim.useSystemClipboard": true,
|
||||
// "vim.useCtrlKeys": true,
|
||||
"vim.hlsearch": true,
|
||||
// "vim.insertModeKeyBindings": [
|
||||
// {
|
||||
// "before": ["j", "j"],
|
||||
// "after": ["<Esc>"]
|
||||
// }
|
||||
// ],
|
||||
"vim.normalModeKeyBindingsNonRecursive": [
|
||||
{
|
||||
"before": ["<leader>", "w"],
|
||||
"after": ["<C-w>"],
|
||||
// "after": ["d", "d"]
|
||||
},
|
||||
// {
|
||||
// "before": ["<C-n>"],
|
||||
// "commands": [":nohl"]
|
||||
// },
|
||||
// {
|
||||
// "before": ["K"],
|
||||
// "commands": ["lineBreakInsert"],
|
||||
// "silent": true
|
||||
// }
|
||||
],
|
||||
"vim.leader": "\\",
|
||||
// "vim.handleKeys": {
|
||||
// "<C-a>": false,
|
||||
// "<C-f>": false
|
||||
// },
|
||||
"extensions.experimental.affinity": {
|
||||
"vscodevim.vim": 1
|
||||
},
|
||||
"diffEditor.experimental.showMoves": true,
|
||||
"diffEditor.hideUnchangedRegions.enabled": true,
|
||||
"python.locator": "native",
|
||||
"python.testing.promptToConfigure": false,
|
||||
"typescript.format.enable": false,
|
||||
"typescript.format.indentSwitchCase": false,
|
||||
"typescript.preferences.renameMatchingJsxTags": false,
|
||||
"typescript.autoClosingTags": false,
|
||||
"typescript.format.insertSpaceAfterCommaDelimiter": false,
|
||||
"typescript.format.insertSpaceAfterKeywordsInControlFlowStatements": false,
|
||||
"typescript.format.insertSpaceAfterOpeningAndBeforeClosingEmptyBraces": false,
|
||||
"docker.enableDockerComposeLanguageService": false,
|
||||
"go.useLanguageServer": false,
|
||||
"search.maxResults": 128,
|
||||
"search.ripgrep.maxThreads": 1,
|
||||
"search.searchEditor.defaultNumberOfContextLines": 7,
|
||||
"search.searchOnType": false,
|
||||
"task.allowAutomaticTasks": "off",
|
||||
"task.autoDetect": "off",
|
||||
"task.quickOpen.detail": false,
|
||||
"task.reconnection": false,
|
||||
"javascript.autoClosingTags": false,
|
||||
"javascript.format.enable": false,
|
||||
"javascript.format.insertSpaceAfterCommaDelimiter": false,
|
||||
"javascript.format.insertSpaceAfterFunctionKeywordForAnonymousFunctions": false,
|
||||
"javascript.format.insertSpaceAfterKeywordsInControlFlowStatements": false,
|
||||
"javascript.format.insertSpaceAfterOpeningAndBeforeClosingEmptyBraces": false,
|
||||
"javascript.format.insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces": false,
|
||||
"javascript.format.insertSpaceAfterSemicolonInForStatements": false,
|
||||
"javascript.format.insertSpaceBeforeAndAfterBinaryOperators": false,
|
||||
"javascript.inlayHints.parameterNames.suppressWhenArgumentMatchesName": false,
|
||||
"javascript.inlayHints.variableTypes.suppressWhenTypeMatchesName": false,
|
||||
"javascript.preferences.renameMatchingJsxTags": false,
|
||||
"javascript.preferences.useAliasesForRenames": false,
|
||||
"javascript.suggest.autoImports": false,
|
||||
"javascript.suggest.classMemberSnippets.enabled": false,
|
||||
"javascript.suggest.completeJSDocs": false,
|
||||
"javascript.suggest.enabled": false,
|
||||
"javascript.suggest.includeAutomaticOptionalChainCompletions": false,
|
||||
"javascript.suggest.includeCompletionsForImportStatements": false,
|
||||
"javascript.suggest.jsdoc.generateReturns": false,
|
||||
"javascript.suggest.names": false,
|
||||
"javascript.suggest.paths": false,
|
||||
"javascript.suggestionActions.enabled": false,
|
||||
"javascript.updateImportsOnFileMove.enabled": "never",
|
||||
"javascript.validate.enable": false,
|
||||
"js/ts.implicitProjectConfig.strictFunctionTypes": false,
|
||||
"js/ts.implicitProjectConfig.strictNullChecks": false,
|
||||
"typescript.format.insertSpaceAfterFunctionKeywordForAnonymousFunctions": false,
|
||||
"typescript.format.insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces": false,
|
||||
"typescript.format.insertSpaceAfterSemicolonInForStatements": false,
|
||||
"typescript.format.insertSpaceBeforeAndAfterBinaryOperators": false,
|
||||
"typescript.inlayHints.parameterNames.suppressWhenArgumentMatchesName": false,
|
||||
"typescript.inlayHints.variableTypes.suppressWhenTypeMatchesName": false,
|
||||
"typescript.preferences.useAliasesForRenames": false,
|
||||
"typescript.reportStyleChecksAsWarnings": false,
|
||||
"typescript.suggest.autoImports": false,
|
||||
"typescript.suggest.classMemberSnippets.enabled": false,
|
||||
"typescript.suggest.completeJSDocs": false,
|
||||
"typescript.suggest.enabled": false,
|
||||
"typescript.suggest.includeAutomaticOptionalChainCompletions": false,
|
||||
"typescript.suggest.includeCompletionsForImportStatements": false,
|
||||
"typescript.suggest.jsdoc.generateReturns": false,
|
||||
"typescript.suggest.objectLiteralMethodSnippets.enabled": false,
|
||||
"typescript.suggest.paths": false,
|
||||
"typescript.tsc.autoDetect": "off",
|
||||
}
|
283
dotfiles/.config/katerc
Normal file
283
dotfiles/.config/katerc
Normal file
@ -0,0 +1,283 @@
|
||||
[BuildConfig]
|
||||
AllowedCommandLines=
|
||||
AutoSwitchToOutput=true
|
||||
BlockedCommandLines=
|
||||
UseDiagnosticsOutput=true
|
||||
|
||||
[CTags]
|
||||
GlobalCommand=ctags -R --c++-types=+px --extra=+q --excmd=pattern --exclude=Makefile --exclude=.
|
||||
GlobalNumTargets=0
|
||||
|
||||
[General]
|
||||
Allow Tab Scrolling=true
|
||||
Auto Hide Tabs=false
|
||||
Close After Last=false
|
||||
Close documents with window=true
|
||||
Cycle To First Tab=true
|
||||
Days Meta Infos=30
|
||||
Diagnostics Limit=12000
|
||||
Diff Show Style=0
|
||||
Elide Tab Text=false
|
||||
Enable Context ToolView=false
|
||||
Expand Tabs=false
|
||||
Icon size for left and right sidebar buttons=32
|
||||
Last Session=calibre
|
||||
Modified Notification=false
|
||||
Mouse back button action=0
|
||||
Mouse forward button action=0
|
||||
Open New Tab To The Right Of Current=true
|
||||
Output History Limit=100
|
||||
Output With Date=false
|
||||
Quickopen Filter Mode=0
|
||||
Quickopen List Mode=true
|
||||
Recent File List Entry Count=10
|
||||
Restore Window Configuration=true
|
||||
SDI Mode=false
|
||||
Save Meta Infos=false
|
||||
Session Manager Sort Column=0
|
||||
Session Manager Sort Order=1
|
||||
Show Full Path in Title=true
|
||||
Show Menu Bar=true
|
||||
Show Status Bar=true
|
||||
Show Symbol In Navigation Bar=true
|
||||
Show Tab Bar=true
|
||||
Show Tabs Close Button=true
|
||||
Show Url Nav Bar=false
|
||||
Show output view for message type=1
|
||||
Show text for left and right sidebar=false
|
||||
Show welcome view for new window=true
|
||||
Startup Session=manual
|
||||
Stash new unsaved files=true
|
||||
Stash unsaved file changes=true
|
||||
Sync section size with tab positions=false
|
||||
Tab Double Click New Document=true
|
||||
Tab Middle Click Close Document=true
|
||||
Tabbar Tab Limit=0
|
||||
|
||||
[KDE]
|
||||
widgetStyle=Fusion
|
||||
|
||||
[KTextEditor Document]
|
||||
Allow End of Line Detection=true
|
||||
Auto Detect Indent=true
|
||||
Auto Reload If State Is In Version Control=true
|
||||
Auto Save=false
|
||||
Auto Save Interval=0
|
||||
Auto Save On Focus Out=false
|
||||
BOM=false
|
||||
Backup Local=false
|
||||
Backup Prefix=
|
||||
Backup Remote=false
|
||||
Backup Suffix=~
|
||||
Camel Cursor=true
|
||||
Encoding=UTF-8
|
||||
End of Line=0
|
||||
Indent On Backspace=true
|
||||
Indent On Tab=true
|
||||
Indent On Text Paste=true
|
||||
Indentation Mode=normal
|
||||
Indentation Width=2
|
||||
Keep Extra Spaces=false
|
||||
Line Length Limit=10000
|
||||
Newline at End of File=true
|
||||
On-The-Fly Spellcheck=false
|
||||
Overwrite Mode=false
|
||||
PageUp/PageDown Moves Cursor=false
|
||||
Remove Spaces=1
|
||||
ReplaceTabsDyn=false
|
||||
Show Spaces=2
|
||||
Show Tabs=true
|
||||
Smart Home=true
|
||||
Swap Directory=
|
||||
Swap File Mode=1
|
||||
Swap Sync Interval=15
|
||||
Tab Handling=2
|
||||
Tab Width=2
|
||||
Trailing Marker Size=1
|
||||
Use Editor Config=true
|
||||
Word Wrap=false
|
||||
Word Wrap Column=80
|
||||
|
||||
[KTextEditor Renderer]
|
||||
Animate Bracket Matching=false
|
||||
Auto Color Theme Selection=false
|
||||
Color Theme=Monokai2
|
||||
Line Height Multiplier=1
|
||||
Show Indentation Lines=false
|
||||
Show Whole Bracket Expression=false
|
||||
Text Font=Terminus,18,-1,5,400,0,0,0,0,0,0,0,0,0,0,1
|
||||
Text Font Features=
|
||||
Word Wrap Marker=true
|
||||
|
||||
[KTextEditor View]
|
||||
Allow Mark Menu=true
|
||||
Auto Brackets=true
|
||||
Auto Center Lines=0
|
||||
Auto Completion=true
|
||||
Auto Completion Preselect First Entry=true
|
||||
Backspace Remove Composed Characters=false
|
||||
Bookmark Menu Sorting=0
|
||||
Bracket Match Preview=true
|
||||
Chars To Enclose Selection=<>(){}[]'"
|
||||
Cycle Through Bookmarks=true
|
||||
Default Mark Type=1
|
||||
Dynamic Word Wrap=true
|
||||
Dynamic Word Wrap Align Indent=80
|
||||
Dynamic Word Wrap At Static Marker=false
|
||||
Dynamic Word Wrap Indicators=1
|
||||
Dynamic Wrap not at word boundaries=false
|
||||
Enable Accessibility=true
|
||||
Enable Tab completion=false
|
||||
Enter To Insert Completion=true
|
||||
Fold First Line=false
|
||||
Folding Bar=true
|
||||
Folding Preview=true
|
||||
Icon Bar=false
|
||||
Input Mode=1
|
||||
Keyword Completion=true
|
||||
Line Modification=true
|
||||
Line Numbers=true
|
||||
Max Clipboard History Entries=20
|
||||
Maximum Search History Size=100
|
||||
Mouse Paste At Cursor Position=false
|
||||
Multiple Cursor Modifier=134217728
|
||||
Persistent Selection=false
|
||||
Scroll Bar Marks=false
|
||||
Scroll Bar Mini Map All=true
|
||||
Scroll Bar Mini Map Width=60
|
||||
Scroll Bar MiniMap=false
|
||||
Scroll Bar Preview=true
|
||||
Scroll Past End=false
|
||||
Search/Replace Flags=140
|
||||
Shoe Line Ending Type in Statusbar=false
|
||||
Show Documentation With Completion=true
|
||||
Show File Encoding=true
|
||||
Show Folding Icons On Hover Only=true
|
||||
Show Line Count=true
|
||||
Show Scrollbars=0
|
||||
Show Statusbar Dictionary=true
|
||||
Show Statusbar Highlighting Mode=true
|
||||
Show Statusbar Input Mode=true
|
||||
Show Statusbar Line Column=true
|
||||
Show Statusbar Tab Settings=true
|
||||
Show Word Count=true
|
||||
Smart Copy Cut=true
|
||||
Statusbar Line Column Compact Mode=true
|
||||
Text Drag And Drop=true
|
||||
User Sets Of Chars To Enclose Selection=
|
||||
Vi Input Mode Steal Keys=false
|
||||
Vi Relative Line Numbers=false
|
||||
Word Completion=true
|
||||
Word Completion Minimal Word Length=3
|
||||
Word Completion Remove Tail=true
|
||||
|
||||
[Konsole]
|
||||
AutoSyncronizeMode=0
|
||||
KonsoleEscKeyBehaviour=false
|
||||
KonsoleEscKeyExceptions=vi,vim,nvim,git
|
||||
RemoveExtension=false
|
||||
RunPrefix=
|
||||
SetEditor=false
|
||||
|
||||
[MainWindow]
|
||||
1366x768 screen: Height=733
|
||||
1366x768 screen: Width=1362
|
||||
2 screens: Height=727
|
||||
2 screens: Width=679
|
||||
2048x1080 screen: Window-Maximized=true
|
||||
ToolBarsMovable=Disabled
|
||||
|
||||
[PluginSymbolViewer]
|
||||
ExpandTree=false
|
||||
SortSymbols=false
|
||||
TreeView=false
|
||||
ViewTypes=false
|
||||
|
||||
[Printing][HeaderFooter]
|
||||
FooterBackground=211,211,211
|
||||
FooterBackgroundEnabled=false
|
||||
FooterEnabled=true
|
||||
FooterForeground=0,0,0
|
||||
FooterFormatCenter=
|
||||
FooterFormatLeft=
|
||||
FooterFormatRight=%U
|
||||
HeaderBackground=211,211,211
|
||||
HeaderBackgroundEnabled=false
|
||||
HeaderEnabled=true
|
||||
HeaderFooterFont=monospace,10,-1,5,400,0,0,0,1,0,0,0,0,0,0,1
|
||||
HeaderForeground=0,0,0
|
||||
HeaderFormatCenter=%f
|
||||
HeaderFormatLeft=%y
|
||||
HeaderFormatRight=%p
|
||||
|
||||
[Printing][Layout]
|
||||
BackgroundColorEnabled=false
|
||||
BoxColor=invalid
|
||||
BoxEnabled=false
|
||||
BoxMargin=6
|
||||
BoxWidth=1
|
||||
ColorScheme=Printing
|
||||
Font=monospace,10,-1,5,400,0,0,0,1,0,0,0,0,0,0,1
|
||||
|
||||
[Printing][Text]
|
||||
DontPrintFoldedCode=true
|
||||
Legend=false
|
||||
LineNumbers=false
|
||||
|
||||
[Shortcut Schemes]
|
||||
Current Scheme=Default
|
||||
|
||||
[Shortcuts]
|
||||
kate_mdi_focus_toolview_kate_private_plugin_katekonsoleplugin=;\s
|
||||
kate_mdi_sidebar_visibility=;\s
|
||||
kate_mdi_toolview_kate_private_plugin_katekonsoleplugin=;\s
|
||||
kate_mdi_toolview_kateproject=Ctrl+B
|
||||
kate_mdi_toolview_kateprojectinfo=Alt+T
|
||||
|
||||
[debugplugin]
|
||||
DAPConfiguration=
|
||||
|
||||
[filetree]
|
||||
editShade=183,220,246
|
||||
listMode=false
|
||||
middleClickToClose=false
|
||||
shadingEnabled=true
|
||||
showCloseButton=false
|
||||
showFullPathOnRoots=false
|
||||
showToolbar=true
|
||||
sortRole=0
|
||||
viewShade=211,190,222
|
||||
|
||||
[lspclient]
|
||||
AllowedServerCommandLines=/usr/bin/clangd -log=error --background-index --limit-results=500 --completion-style=bundled,/usr/bin/pylsp --check-parent-process
|
||||
AutoHover=true
|
||||
AutoImport=true
|
||||
BlockedServerCommandLines=/usr/bin/python -m esbonio
|
||||
CompletionDocumentation=true
|
||||
CompletionParens=true
|
||||
Diagnostics=true
|
||||
FormatOnSave=false
|
||||
HighlightGoto=true
|
||||
IncrementalSync=true
|
||||
InlayHints=false
|
||||
Messages=true
|
||||
ReferencesDeclaration=true
|
||||
SemanticHighlighting=true
|
||||
ServerConfiguration=
|
||||
SignatureHelp=true
|
||||
SymbolDetails=false
|
||||
SymbolExpand=true
|
||||
SymbolSort=false
|
||||
SymbolTree=true
|
||||
TypeFormatting=false
|
||||
|
||||
[project]
|
||||
autoCMake=false
|
||||
autorepository=git
|
||||
gitStatusDoubleClick=3
|
||||
gitStatusSingleClick=0
|
||||
index=false
|
||||
indexDirectory=
|
||||
multiProjectCompletion=false
|
||||
multiProjectGoto=false
|
||||
restoreProjectsForSessions=false
|
157
dotfiles/.config/rofi/config.rasi
Normal file
157
dotfiles/.config/rofi/config.rasi
Normal file
@ -0,0 +1,157 @@
|
||||
configuration {
|
||||
/* modes: "window,drun,run,ssh";*/
|
||||
/* font: "mono 12";*/
|
||||
/* location: 0;*/
|
||||
/* yoffset: 0;*/
|
||||
/* xoffset: 0;*/
|
||||
/* fixed-num-lines: true;*/
|
||||
/* show-icons: false;*/
|
||||
/* preview-cmd: ;*/
|
||||
/* terminal: "rofi-sensible-terminal";*/
|
||||
/* ssh-client: "ssh";*/
|
||||
/* ssh-command: "{terminal} -e {ssh-client} {host} [-p {port}]";*/
|
||||
/* run-command: "{cmd}";*/
|
||||
/* run-list-command: "";*/
|
||||
/* run-shell-command: "{terminal} -e {cmd}";*/
|
||||
/* window-command: "wmctrl -i -R {window}";*/
|
||||
/* window-match-fields: "all";*/
|
||||
/* icon-theme: ;*/
|
||||
/* drun-match-fields: "name,generic,exec,categories,keywords";*/
|
||||
/* drun-categories: ;*/
|
||||
/* drun-show-actions: false;*/
|
||||
/* drun-display-format: "{name} [<span weight='light' size='small'><i>({generic})</i></span>]";*/
|
||||
/* drun-url-launcher: "xdg-open";*/
|
||||
/* disable-history: false;*/
|
||||
/* ignored-prefixes: "";*/
|
||||
/* sort: false;*/
|
||||
/* sorting-method: "normal";*/
|
||||
/* case-sensitive: false;*/
|
||||
/* cycle: true;*/
|
||||
/* sidebar-mode: false;*/
|
||||
/* hover-select: false;*/
|
||||
/* eh: 1;*/
|
||||
/* auto-select: false;*/
|
||||
/* parse-hosts: false;*/
|
||||
/* parse-known-hosts: true;*/
|
||||
/* combi-modes: "window,run";*/
|
||||
/* matching: "normal";*/
|
||||
/* tokenize: true;*/
|
||||
/* m: "-5";*/
|
||||
/* filter: ;*/
|
||||
/* dpi: -1;*/
|
||||
/* threads: 0;*/
|
||||
/* scroll-method: 0;*/
|
||||
/* window-format: "{w} {c} {t}";*/
|
||||
/* click-to-exit: true;*/
|
||||
/* global-kb: false;*/
|
||||
max-history-size: 1000;
|
||||
/* combi-hide-mode-prefix: false;*/
|
||||
/* combi-display-format: "{mode} {text}";*/
|
||||
/* matching-negate-char: '-' /* unsupported */;*/
|
||||
/* cache-dir: ;*/
|
||||
/* window-thumbnail: false;*/
|
||||
/* drun-use-desktop-cache: false;*/
|
||||
/* drun-reload-desktop-cache: false;*/
|
||||
/* normalize-match: false;*/
|
||||
/* steal-focus: false;*/
|
||||
/* application-fallback-icon: ;*/
|
||||
/* refilter-timeout-limit: 300;*/
|
||||
/* xserver-i300-workaround: false;*/
|
||||
/* completer-mode: "filebrowser";*/
|
||||
/* pid: "/run/user/1000/rofi.pid";*/
|
||||
/* display-window: ;*/
|
||||
/* display-run: ;*/
|
||||
/* display-ssh: ;*/
|
||||
/* display-drun: ;*/
|
||||
/* display-combi: ;*/
|
||||
/* display-keys: ;*/
|
||||
/* display-filebrowser: ;*/
|
||||
/* display-recursivebrowser: ;*/
|
||||
/* kb-primary-paste: "Control+V,Shift+Insert";*/
|
||||
/* kb-secondary-paste: "Control+v,Insert";*/
|
||||
/* kb-secondary-copy: "Control+c";*/
|
||||
/* kb-clear-line: "Control+w";*/
|
||||
/* kb-move-front: "Control+a";*/
|
||||
/* kb-move-end: "Control+e";*/
|
||||
/* kb-move-word-back: "Alt+b,Control+Left";*/
|
||||
/* kb-move-word-forward: "Alt+f,Control+Right";*/
|
||||
/* kb-move-char-back: "Left,Control+b";*/
|
||||
/* kb-move-char-forward: "Right,Control+f";*/
|
||||
/* kb-remove-word-back: "Control+Alt+h,Control+BackSpace";*/
|
||||
/* kb-remove-word-forward: "Control+Alt+d";*/
|
||||
/* kb-remove-char-forward: "Delete,Control+d";*/
|
||||
/* kb-remove-char-back: "BackSpace,Shift+BackSpace,Control+h";*/
|
||||
/* kb-remove-to-eol: "Control+k";*/
|
||||
/* kb-remove-to-sol: "Control+u";*/
|
||||
/* kb-accept-entry: "Control+j,Control+m,Return,KP_Enter";*/
|
||||
/* kb-accept-custom: "Control+Return";*/
|
||||
/* kb-accept-custom-alt: "Control+Shift+Return";*/
|
||||
/* kb-accept-alt: "Shift+Return";*/
|
||||
/* kb-delete-entry: "Shift+Delete";*/
|
||||
/* kb-mode-next: "Shift+Right,Control+Tab";*/
|
||||
/* kb-mode-previous: "Shift+Left,Control+ISO_Left_Tab";*/
|
||||
/* kb-mode-complete: "Control+l";*/
|
||||
/* kb-row-left: "Control+Page_Up";*/
|
||||
/* kb-row-right: "Control+Page_Down";*/
|
||||
/* kb-row-up: "Up,Control+p";*/
|
||||
/* kb-row-down: "Down,Control+n";*/
|
||||
/* kb-row-tab: "";*/
|
||||
/* kb-element-next: "Tab";*/
|
||||
/* kb-element-prev: "ISO_Left_Tab";*/
|
||||
/* kb-page-prev: "Page_Up";*/
|
||||
/* kb-page-next: "Page_Down";*/
|
||||
/* kb-row-first: "Home,KP_Home";*/
|
||||
/* kb-row-last: "End,KP_End";*/
|
||||
/* kb-row-select: "Control+space";*/
|
||||
/* kb-screenshot: "Alt+S";*/
|
||||
/* kb-ellipsize: "Alt+period";*/
|
||||
/* kb-toggle-case-sensitivity: "grave,dead_grave";*/
|
||||
/* kb-toggle-sort: "Alt+grave";*/
|
||||
/* kb-cancel: "Escape,Control+g,Control+bracketleft";*/
|
||||
/* kb-custom-1: "Alt+1";*/
|
||||
/* kb-custom-2: "Alt+2";*/
|
||||
/* kb-custom-3: "Alt+3";*/
|
||||
/* kb-custom-4: "Alt+4";*/
|
||||
/* kb-custom-5: "Alt+5";*/
|
||||
/* kb-custom-6: "Alt+6";*/
|
||||
/* kb-custom-7: "Alt+7";*/
|
||||
/* kb-custom-8: "Alt+8";*/
|
||||
/* kb-custom-9: "Alt+9";*/
|
||||
/* kb-custom-10: "Alt+0";*/
|
||||
/* kb-custom-11: "Alt+exclam";*/
|
||||
/* kb-custom-12: "Alt+at";*/
|
||||
/* kb-custom-13: "Alt+numbersign";*/
|
||||
/* kb-custom-14: "Alt+dollar";*/
|
||||
/* kb-custom-15: "Alt+percent";*/
|
||||
/* kb-custom-16: "Alt+dead_circumflex";*/
|
||||
/* kb-custom-17: "Alt+ampersand";*/
|
||||
/* kb-custom-18: "Alt+asterisk";*/
|
||||
/* kb-custom-19: "Alt+parenleft";*/
|
||||
/* kb-select-1: "Super+1";*/
|
||||
/* kb-select-2: "Super+2";*/
|
||||
/* kb-select-3: "Super+3";*/
|
||||
/* kb-select-4: "Super+4";*/
|
||||
/* kb-select-5: "Super+5";*/
|
||||
/* kb-select-6: "Super+6";*/
|
||||
/* kb-select-7: "Super+7";*/
|
||||
/* kb-select-8: "Super+8";*/
|
||||
/* kb-select-9: "Super+9";*/
|
||||
/* kb-select-10: "Super+0";*/
|
||||
/* kb-entry-history-up: "Control+Up";*/
|
||||
/* kb-entry-history-down: "Control+Down";*/
|
||||
/* ml-row-left: "ScrollLeft";*/
|
||||
/* ml-row-right: "ScrollRight";*/
|
||||
/* ml-row-up: "ScrollUp";*/
|
||||
/* ml-row-down: "ScrollDown";*/
|
||||
/* me-select-entry: "MousePrimary";*/
|
||||
/* me-accept-entry: "MouseDPrimary";*/
|
||||
/* me-accept-custom: "Control+MouseDPrimary";*/
|
||||
timeout {
|
||||
action: "kb-cancel";
|
||||
delay: 0;
|
||||
}
|
||||
filebrowser {
|
||||
directories-first: true;
|
||||
sorting-method: "name";
|
||||
}
|
||||
}
|
@ -3,3 +3,5 @@
|
||||
name = Siarhei Siniak
|
||||
[core]
|
||||
pager = less -x2
|
||||
[fetch]
|
||||
fsckObjects = true
|
||||
|
13
dotfiles/.local/bin/gnome-shortcuts-macbook-air
Executable file
13
dotfiles/.local/bin/gnome-shortcuts-macbook-air
Executable file
@ -0,0 +1,13 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
commands gnome-shortcuts \
|
||||
-a \
|
||||
'powersave' \
|
||||
'commands desktop-services --cpufreq-action powersave' \
|
||||
'<Shift><Alt>1'
|
||||
|
||||
commands gnome-shortcuts \
|
||||
-a \
|
||||
'performance' \
|
||||
'commands desktop-services --cpufreq-action performance' \
|
||||
'<Shift><Alt>2'
|
2
dotfiles/.mime.types
Normal file
2
dotfiles/.mime.types
Normal file
@ -0,0 +1,2 @@
|
||||
# https://terminalroot.com/how-to-open-markdown-files-with-md-extension-in-firefox/
|
||||
text/plain txt asc text pm el c h cc hh cxx hxx f90 conf log yaml yml
|
@ -1,4 +1,3 @@
|
||||
|
||||
#
|
||||
# Copy this to ~/.config/sway/config and edit it to your liking.
|
||||
#
|
||||
@ -20,21 +19,10 @@ set $term weston-terminal
|
||||
# on the original workspace that the command was run on.
|
||||
#for_window [app_id="^launcher$"] floating enable, sticky enable, resize set 30 ppt 60 ppt, border pixel 10
|
||||
#set $menu exec $term --class=launcher -e /usr/bin/sway-launcher-desktop
|
||||
set $dmenu_path /usr/bin/bemenu-run
|
||||
#set $dmenu_path /usr/bin/bemenu-run
|
||||
set $dmenu_path rofi -modes run -show run
|
||||
set $menu $dmenu_path | xargs swaymsg exec --
|
||||
|
||||
### Output configuration
|
||||
#
|
||||
# Default wallpaper (more resolutions are available in /usr/share/backgrounds/sway/)
|
||||
#output * bg /usr/share/backgrounds/sway/Sway_Wallpaper_Blue_1920x1080.png fill
|
||||
#
|
||||
# Example configuration:
|
||||
#
|
||||
# output HDMI-A-1 resolution 1920x1080 position 1920,0
|
||||
#
|
||||
# You can get the names of your outputs by running: swaymsg -t get_outputs
|
||||
output HDMI-A-1 resolution 1920x1080 position 0,0
|
||||
output eDP-1 resolution 1366x748 position 277,1080
|
||||
|
||||
### Idle configuration
|
||||
#
|
||||
@ -62,18 +50,27 @@ output eDP-1 resolution 1366x748 position 277,1080
|
||||
#
|
||||
# You can get the names of your inputs by running: swaymsg -t get_inputs
|
||||
# Read `man 5 sway-input` for more information about this section.
|
||||
input type:pointer {
|
||||
# tap enabled
|
||||
natural_scroll enabled
|
||||
}
|
||||
input type:touchpad {
|
||||
tap enabled
|
||||
natural_scroll enabled
|
||||
# natural_scroll disabled
|
||||
}
|
||||
|
||||
bindgesture swipe:4:left workspace next
|
||||
bindgesture swipe:4:right workspace prev
|
||||
|
||||
for_window [shell="xwayland"] title_format "[XWayland] %title"
|
||||
|
||||
#set $lock_cmd \
|
||||
# loginctl list-sessions | \
|
||||
# tail '-n' +2 | head -n -2 | awk '{print $1}' | \
|
||||
# xargs loginctl lock-session
|
||||
set $lock_cmd \
|
||||
loginctl list-sessions | \
|
||||
tail '-n' +2 | head -n -2 | awk '{print $1}' | \
|
||||
xargs loginctl lock-session
|
||||
zsh -c "commands loginctl --action lock-session"
|
||||
|
||||
bindgesture swipe:4:up exec $lock_cmd
|
||||
|
||||
@ -82,35 +79,48 @@ bindgesture swipe:4:up exec $lock_cmd
|
||||
#
|
||||
# Basics:
|
||||
#
|
||||
bindsym $mod+Shift+l exec $lock_cmd
|
||||
bindsym Shift+$mod+l exec $lock_cmd
|
||||
|
||||
bindsym XF86KbdBrightnessDown \
|
||||
exec commands \
|
||||
bindsym --locked Shift+mod1+1 \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-decrease \
|
||||
--backlight-type keyboard
|
||||
--cpufreq-action performance
|
||||
|
||||
bindsym XF86KbdBrightnessUp \
|
||||
exec commands \
|
||||
bindsym --locked Shift+mod1+2 \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-increase \
|
||||
--backlight-type keyboard
|
||||
--cpufreq-action powersave
|
||||
|
||||
bindsym XF86MonBrightnessDown \
|
||||
exec commands \
|
||||
bindsym --locked XF86MonBrightnessDown \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-decrease \
|
||||
--backlight-type output
|
||||
|
||||
bindsym XF86MonBrightnessUp \
|
||||
exec commands \
|
||||
bindsym --locked XF86MonBrightnessUp \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-increase \
|
||||
--backlight-type output
|
||||
|
||||
bindsym XF86AudioPlay exec bash -c "commands media-play-pause"
|
||||
bindsym XF86AudioNext exec bash -c "commands media-next"
|
||||
bindsym XF86AudioPrev exec bash -c "commands media-prev"
|
||||
bindsym --locked XF86KbdBrightnessDown \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-decrease \
|
||||
--backlight-type keyboard
|
||||
|
||||
bindsym --locked XF86KbdBrightnessUp \
|
||||
exec ~/.local/bin/commands \
|
||||
desktop-services \
|
||||
--backlight-increase \
|
||||
--backlight-type keyboard
|
||||
|
||||
bindsym --locked XF86AudioPlay exec zsh -c "commands media-play-pause"
|
||||
bindsym --locked XF86AudioRaiseVolume exec zsh -c "commands media-raise-volume"
|
||||
bindsym --locked XF86AudioLowerVolume exec zsh -c "commands media-lower-volume"
|
||||
bindsym --locked XF86AudioMute exec zsh -c "commands media-toggle-volume"
|
||||
bindsym --locked XF86AudioNext exec zsh -c "commands media-next"
|
||||
bindsym --locked XF86AudioPrev exec zsh -c "commands media-prev"
|
||||
|
||||
|
||||
# Start a terminal
|
||||
@ -133,11 +143,19 @@ floating_modifier $mod normal
|
||||
bindsym $mod+Shift+c reload
|
||||
|
||||
# Exit sway (logs you out of your Wayland session)
|
||||
bindsym $mod+Shift+e exec swaynag -t warning -m 'You pressed the exit shortcut. Do you really want to exit sway? This will end your Wayland session.' -b 'Yes, exit sway' 'swaymsg exit'
|
||||
bindsym $mod+Shift+e \
|
||||
exec swaynag -t warning \
|
||||
-m 'You pressed the exit shortcut. Do you really want to exit sway? This will end your Wayland session.' \
|
||||
-b 'Yes, exit sway' \
|
||||
'swaymsg exit'
|
||||
#
|
||||
# Moving around:
|
||||
#
|
||||
# Move your focus around
|
||||
bindsym Shift+mod1+tab focus prev
|
||||
bindsym mod1+tab focus next
|
||||
#bindsym mod1+tab focus mode_toggle
|
||||
|
||||
bindsym $mod+$left focus left
|
||||
bindsym $mod+$down focus down
|
||||
bindsym $mod+$up focus up
|
||||
@ -201,6 +219,7 @@ bindsym $mod+v splitv
|
||||
#bindsym $mod+s layout stacking
|
||||
#bindsym $mod+w layout tabbed
|
||||
#bindsym $mod+e layout toggle split
|
||||
bindsym $mod+e layout toggle all
|
||||
|
||||
# Make the current focus fullscreen
|
||||
bindsym $mod+f fullscreen
|
||||
@ -212,7 +231,8 @@ bindsym $mod+p floating toggle
|
||||
## Swap focus between the tiling area and the floating area
|
||||
#bindsym $mod+space focus mode_toggle
|
||||
|
||||
bindsym --release Print exec bash -c "commands wl-screenshot"
|
||||
bindsym --release Print exec zsh -c "commands wl-screenshot"
|
||||
bindsym --release $mod+s exec zsh -c "commands wl-screenshot"
|
||||
|
||||
# Move focus to the parent container
|
||||
#bindsym $mod+a focus parent
|
||||
@ -253,6 +273,40 @@ mode "resize" {
|
||||
}
|
||||
bindsym $mod+r mode "resize"
|
||||
|
||||
set $black #000000
|
||||
set $red #ff0000
|
||||
set $green #00ff00
|
||||
set $blue #0000ff
|
||||
set $white #ffffff
|
||||
set $grey #757575
|
||||
set $pale_green #9df882
|
||||
set $pale_green2 #6baf54
|
||||
set $dark_green #1a7000
|
||||
set $pale_blue #7da9f9
|
||||
set $dark_blue #005ba6
|
||||
set $pale_greenblue #2da078
|
||||
set $pale_greenblue2 #66c473
|
||||
set $yellow #fffd0d
|
||||
set $dark_yellow #908f00
|
||||
|
||||
set $color1 #18ff00
|
||||
set $color2 #000000
|
||||
set $color3 #ff00ff
|
||||
set $color4 #ff0000
|
||||
set $color5 #00000000
|
||||
set $color6 #00000000
|
||||
set $color7 #00000000
|
||||
|
||||
set $border_focused $pale_green
|
||||
set $border_unfocused $color2
|
||||
set $background_focused $pale_greenblue2
|
||||
set $background_unfocused $grey
|
||||
set $child_border_focused $white
|
||||
set $child_border_unfocused $color2
|
||||
|
||||
set $bright_text $white
|
||||
set $dark_text $black
|
||||
|
||||
#
|
||||
# Status Bar:
|
||||
#
|
||||
@ -263,7 +317,7 @@ bar {
|
||||
# When the status_command prints a new line to stdout, swaybar updates.
|
||||
# The default just shows the current date and time.
|
||||
status_command while true; \
|
||||
do commands status --config ~/.config/commands-status.json; \
|
||||
do ~/.local/bin/commands status --config ~/.config/commands-status.json; \
|
||||
sleep 1; \
|
||||
done
|
||||
|
||||
@ -272,27 +326,34 @@ bar {
|
||||
height 16
|
||||
|
||||
colors {
|
||||
statusline #565656
|
||||
background #dfdfdf
|
||||
inactive_workspace #dfdfdf #dfdfdf #000000
|
||||
active_workspace #dfdfdf #efefef #000000
|
||||
focused_workspace #dfdfdf #efefef #000000
|
||||
statusline $bright_text
|
||||
background $pale_green2
|
||||
inactive_workspace $black $white $dark_text
|
||||
active_workspace $black $white $bright_text
|
||||
focused_workspace $dark_yellow $yellow $dark_text
|
||||
}
|
||||
}
|
||||
|
||||
client.focused #f3f3f3 #dfdfdf #565656 #f3f3f3 #f3f3f3
|
||||
client.unfocused #f3f3f3 #dfdfdf #565656 #f3f3f3 #f3f3f3
|
||||
|
||||
#client.focused #f3f3f3 #dfdfdfdd #565656 #f3f3f3 #f3f3f3
|
||||
client.focused $border_focused $background_focused $white $white $child_border_focused
|
||||
client.unfocused $border_unfocused $background_unfocused $white $white $child_border_unfocused
|
||||
|
||||
for_window [all] border 1
|
||||
|
||||
#font pango:Helvetica Neue 10
|
||||
font pango:Terminus 10
|
||||
font pango:Terminus 12
|
||||
|
||||
titlebar_padding 1 4
|
||||
titlebar_padding 32 1
|
||||
titlebar_border_thickness 1
|
||||
title_align center
|
||||
|
||||
#for_window [class=".*"] title_format "<b>%title</b>"
|
||||
|
||||
for_window [class="^firefox$"] floating enable
|
||||
|
||||
for_window [all] opacity set 0.95
|
||||
|
||||
input * {
|
||||
xkb_layout "us,ru"
|
||||
xkb_options "grp:win_space_toggle"
|
||||
@ -300,3 +361,4 @@ input * {
|
||||
input type:keyboard xkb_model "pc101"
|
||||
|
||||
include /etc/sway/config.d/*
|
||||
include ~/.sway/config.d/*
|
||||
|
20
dotfiles/.sway/config.d/macbook-air.conf
Normal file
20
dotfiles/.sway/config.d/macbook-air.conf
Normal file
@ -0,0 +1,20 @@
|
||||
### Output configuration
|
||||
#
|
||||
# Default wallpaper (more resolutions are available in /usr/share/backgrounds/sway/)
|
||||
#output * bg /usr/share/backgrounds/sway/Sway_Wallpaper_Blue_1920x1080.png fill
|
||||
#
|
||||
# Example configuration:
|
||||
#
|
||||
# output HDMI-A-1 resolution 1920x1080 position 1920,0
|
||||
#
|
||||
# You can get the names of your outputs by running: swaymsg -t get_outputs
|
||||
#2560 x 1440
|
||||
output 'Dell Inc. DELL P2418D MY3ND8220WKT' resolution 1920x1080 position 0,0
|
||||
#output 'Dell Inc. DELL P2418D MY3ND8220WKT' mode resolution 2560x1440 position 0,0
|
||||
#output HDMI-A-1 resolution 1920x1080 transform 90 position 0,0
|
||||
output 'LG Electronics LG FHD 403TOAG3C208 ' resolution 1920x1080 transform 90 position 0,0
|
||||
#output eDP-1 resolution 1366x748 position 277,1080
|
||||
#output eDP-1 resolution 1366x748 disable power off position 277,1080
|
||||
output 'Apple Computer Inc Color LCD Unknown' \
|
||||
resolution 1366x748 enable power on position 277,1080
|
||||
bindsym --locked $mod+u output 'Apple Computer Inc Color LCD Unknown' toggle
|
@ -1,2 +1,6 @@
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/pci0000:00/0000:00:1b.0/hdaudioC0D0/leds/hda::mute", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/platform/applesmc.768/leds/smc::kbd_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", DEVPATH=="/devices/platform/applesmc.768", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/fan1_manual /sys$devpath/fan1_output"
|
||||
ACTION=="add|change", DEVPATH=="/class/backlight/intel_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", DEVPATH=="/devices/system/cpu/", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/cpufreq/scaling_governor"
|
||||
ACTION=="add|change", KERNEL=="cpu[0-9]", SUBSYSTEM=="cpu", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/cpufreq/scaling_governor"
|
||||
|
338
m.py
Executable file
338
m.py
Executable file
@ -0,0 +1,338 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import io
|
||||
import tempfile
|
||||
import dataclasses
|
||||
import pathlib
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
|
||||
|
||||
from typing import (Optional, Any,)
|
||||
from typing_extensions import (
|
||||
Self, BinaryIO,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def toml_load(f: BinaryIO) -> Any:
|
||||
try:
|
||||
import tomllib
|
||||
return tomllib.load(f)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
import tomli
|
||||
return tomli.load(f)
|
||||
except:
|
||||
pass
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PyProject:
|
||||
path: pathlib.Path
|
||||
dependencies: dict[str, list[str]]
|
||||
early_features: Optional[list[str]] = None
|
||||
pip_find_links: Optional[list[pathlib.Path]] = None
|
||||
runtime_libdirs: Optional[list[pathlib.Path]] = None
|
||||
runtime_preload: Optional[list[pathlib.Path]] = None
|
||||
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda : dict())
|
||||
|
||||
def pyproject_load(
|
||||
d: pathlib.Path,
|
||||
) -> PyProject:
|
||||
with io.open(d, 'rb') as f:
|
||||
content = toml_load(f)
|
||||
|
||||
assert isinstance(content, dict)
|
||||
|
||||
dependencies : dict[str, list[str]] = dict()
|
||||
|
||||
dependencies['default'] = content['project']['dependencies']
|
||||
|
||||
if (
|
||||
'optional-dependencies' in content['project']
|
||||
):
|
||||
assert isinstance(
|
||||
content['project']['optional-dependencies'],
|
||||
dict
|
||||
)
|
||||
|
||||
for k, v in content['project']['optional-dependencies'].items():
|
||||
assert isinstance(v, list)
|
||||
assert isinstance(k, str)
|
||||
|
||||
dependencies[k] = v
|
||||
|
||||
|
||||
res = PyProject(
|
||||
path=d,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
tool_name = 'online.fxreader.pr34'.replace('.', '-')
|
||||
|
||||
if (
|
||||
'tool' in content and
|
||||
isinstance(
|
||||
content['tool'], dict
|
||||
) and
|
||||
tool_name in content['tool'] and
|
||||
isinstance(
|
||||
content['tool'][tool_name],
|
||||
dict
|
||||
)
|
||||
):
|
||||
if 'early_features' in content['tool'][tool_name]:
|
||||
res.early_features = content['tool'][tool_name]['early_features']
|
||||
|
||||
if 'pip_find_links' in content['tool'][tool_name]:
|
||||
res.pip_find_links = [
|
||||
d.parent / pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['pip_find_links']
|
||||
]
|
||||
|
||||
if 'runtime_libdirs' in content['tool'][tool_name]:
|
||||
res.runtime_libdirs = [
|
||||
d.parent / pathlib.Path(o)
|
||||
# pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['runtime_libdirs']
|
||||
]
|
||||
|
||||
if 'runtime_preload' in content['tool'][tool_name]:
|
||||
res.runtime_preload = [
|
||||
d.parent / pathlib.Path(o)
|
||||
# pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['runtime_preload']
|
||||
]
|
||||
|
||||
if 'requirements' in content['tool'][tool_name]:
|
||||
assert isinstance(content['tool'][tool_name]['requirements'], dict)
|
||||
|
||||
res.requirements = {
|
||||
k : d.parent / pathlib.Path(v)
|
||||
# pathlib.Path(o)
|
||||
for k, v in content['tool'][tool_name]['requirements'].items()
|
||||
}
|
||||
|
||||
return res
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BootstrapSettings:
|
||||
env_path: pathlib.Path
|
||||
python_path: pathlib.Path
|
||||
base_dir: pathlib.Path
|
||||
python_version: Optional[str] = dataclasses.field(
|
||||
default_factory=lambda : os.environ.get(
|
||||
'PYTHON_VERSION',
|
||||
'%d.%d' % (
|
||||
sys.version_info.major,
|
||||
sys.version_info.minor,
|
||||
),
|
||||
).strip()
|
||||
)
|
||||
uv_args: list[str] = dataclasses.field(
|
||||
default_factory=lambda : os.environ.get(
|
||||
'UV_ARGS',
|
||||
'--offline',
|
||||
).split(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls,
|
||||
base_dir: Optional[pathlib.Path] = None,
|
||||
) -> Self:
|
||||
if base_dir is None:
|
||||
base_dir = pathlib.Path.cwd()
|
||||
|
||||
env_path = base_dir / '.venv'
|
||||
python_path = env_path / 'bin' / 'python3'
|
||||
|
||||
return cls(
|
||||
base_dir=base_dir,
|
||||
env_path=env_path,
|
||||
python_path=python_path,
|
||||
)
|
||||
|
||||
def env_bootstrap(
|
||||
bootstrap_settings: BootstrapSettings,
|
||||
pyproject: PyProject,
|
||||
) -> None:
|
||||
pip_find_links : list[pathlib.Path] = []
|
||||
|
||||
if not pyproject.pip_find_links is None:
|
||||
pip_find_links.extend(pyproject.pip_find_links)
|
||||
|
||||
pip_find_links_args = sum([
|
||||
['-f', str(o),]
|
||||
for o in pip_find_links
|
||||
], [])
|
||||
|
||||
features : list[str] = []
|
||||
|
||||
if pyproject.early_features:
|
||||
features.extend(pyproject.early_features)
|
||||
|
||||
requirements_python_version: Optional[str] = None
|
||||
if not bootstrap_settings.python_version is None:
|
||||
requirements_python_version = bootstrap_settings.python_version.replace('.', '_')
|
||||
|
||||
|
||||
requirements_name = '_'.join(sorted(features))
|
||||
|
||||
if requirements_python_version:
|
||||
requirements_name += '_' + requirements_python_version
|
||||
|
||||
requirements_path : Optional[pathlib.Path] = None
|
||||
|
||||
if requirements_name in pyproject.requirements:
|
||||
requirements_path = pyproject.requirements[requirements_name]
|
||||
else:
|
||||
requirements_path = pyproject.path.parent / 'requirements.txt'
|
||||
|
||||
requirements_in : list[str] = []
|
||||
|
||||
requirements_in.extend([
|
||||
'uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'
|
||||
])
|
||||
|
||||
if pyproject.early_features:
|
||||
early_dependencies = sum([
|
||||
pyproject.dependencies[o]
|
||||
for o in pyproject.early_features
|
||||
], [])
|
||||
|
||||
logger.info(dict(
|
||||
early_dependencies=early_dependencies,
|
||||
))
|
||||
|
||||
requirements_in.extend(early_dependencies)
|
||||
# if len(early_dependencies) > 0:
|
||||
# subprocess.check_call([
|
||||
# bootstrap_settings.python_path,
|
||||
# '-m',
|
||||
# 'uv', 'pip', 'install',
|
||||
# *pip_find_links_args,
|
||||
# # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'),
|
||||
# *bootstrap_settings.uv_args,
|
||||
# *early_dependencies,
|
||||
# ])
|
||||
|
||||
if not requirements_path.exists():
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode='w',
|
||||
prefix='requirements',
|
||||
suffix='.in',
|
||||
) as f:
|
||||
f.write(
|
||||
'\n'.join(requirements_in)
|
||||
)
|
||||
f.flush()
|
||||
|
||||
subprocess.check_call([
|
||||
'uv',
|
||||
'pip',
|
||||
'compile',
|
||||
'--generate-hashes',
|
||||
*pip_find_links_args,
|
||||
# '-p',
|
||||
# bootstrap_settings.python_path,
|
||||
*bootstrap_settings.uv_args,
|
||||
'-o', str(requirements_path),
|
||||
f.name,
|
||||
])
|
||||
|
||||
uv_python_version: list[str] = []
|
||||
|
||||
if not bootstrap_settings.python_version is None:
|
||||
uv_python_version.extend([
|
||||
'-p', bootstrap_settings.python_version,
|
||||
])
|
||||
|
||||
subprocess.check_call([
|
||||
'uv', 'venv',
|
||||
*uv_python_version,
|
||||
*pip_find_links_args,
|
||||
# '--seed',
|
||||
*bootstrap_settings.uv_args,
|
||||
str(bootstrap_settings.env_path)
|
||||
])
|
||||
|
||||
subprocess.check_call([
|
||||
'uv',
|
||||
'pip',
|
||||
'install',
|
||||
*pip_find_links_args,
|
||||
'-p',
|
||||
bootstrap_settings.python_path,
|
||||
'--require-hashes',
|
||||
*bootstrap_settings.uv_args,
|
||||
'-r', str(requirements_path),
|
||||
])
|
||||
|
||||
|
||||
def paths_equal(
|
||||
a: pathlib.Path | str,
|
||||
b: pathlib.Path | str
|
||||
) -> bool:
|
||||
return (
|
||||
os.path.abspath(str(a)) ==
|
||||
os.path.abspath(str(b))
|
||||
)
|
||||
|
||||
def run(
|
||||
d: Optional[pathlib.Path] = None,
|
||||
cli_path: Optional[pathlib.Path] = None,
|
||||
) -> None:
|
||||
if cli_path is None:
|
||||
cli_path = pathlib.Path(__file__).parent / 'cli.py'
|
||||
|
||||
if d is None:
|
||||
d = pathlib.Path(__file__).parent / 'pyproject.toml'
|
||||
|
||||
bootstrap_settings = BootstrapSettings.get()
|
||||
|
||||
pyproject : PyProject = pyproject_load(
|
||||
d
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
if not bootstrap_settings.env_path.exists():
|
||||
env_bootstrap(
|
||||
bootstrap_settings=bootstrap_settings,
|
||||
pyproject=pyproject,
|
||||
)
|
||||
|
||||
logger.info([sys.executable, sys.argv, bootstrap_settings.python_path])
|
||||
|
||||
if not paths_equal(sys.executable, bootstrap_settings.python_path):
|
||||
os.execv(
|
||||
str(bootstrap_settings.python_path),
|
||||
[
|
||||
str(bootstrap_settings.python_path),
|
||||
*sys.argv,
|
||||
]
|
||||
)
|
||||
|
||||
os.execv(
|
||||
str(bootstrap_settings.python_path),
|
||||
[
|
||||
str(bootstrap_settings.python_path),
|
||||
str(
|
||||
cli_path
|
||||
),
|
||||
*sys.argv[1:],
|
||||
]
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
run(
|
||||
d=pathlib.Path(__file__).parent / 'python' / 'pyproject.toml',
|
||||
cli_path=pathlib.Path(__file__).parent / 'python' / 'cli.py',
|
||||
)
|
6
mypy-stubs/Cython/Build/Inline.pyi
Normal file
6
mypy-stubs/Cython/Build/Inline.pyi
Normal file
@ -0,0 +1,6 @@
|
||||
import distutils.command
|
||||
from typing import (Any,)
|
||||
|
||||
def _get_build_extension() -> distutils.command.build_ext: ...
|
||||
|
||||
def load_dynamic(name: str, path: str) -> Any: ...
|
17
mypy-stubs/Cython/Build/__init__.pyi
Normal file
17
mypy-stubs/Cython/Build/__init__.pyi
Normal file
@ -0,0 +1,17 @@
|
||||
import setuptools.extension
|
||||
|
||||
from typing import (Iterable,)
|
||||
|
||||
def cythonize(
|
||||
module_list: str | Iterable[str]
|
||||
#module_list,
|
||||
#exclude=None,
|
||||
#nthreads=0,
|
||||
#aliases=None,
|
||||
#quiet=False,
|
||||
#force=None,
|
||||
#language=None,
|
||||
#exclude_failures=False,
|
||||
#show_all_warnings=False,
|
||||
#**options
|
||||
) -> list[setuptools.extension.Extension]: ...
|
7
mypy-stubs/cython/__init__.pyi
Normal file
7
mypy-stubs/cython/__init__.pyi
Normal file
@ -0,0 +1,7 @@
|
||||
from typing import (Type, Any, Self)
|
||||
|
||||
class NoGIL:
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, exc_class: Type[Exception], exc: Exception, tb: Any) -> None: ...
|
||||
|
||||
nogil : NoGIL = NoGIL()
|
14
mypy-stubs/distutils/command/__init__.pyi
Normal file
14
mypy-stubs/distutils/command/__init__.pyi
Normal file
@ -0,0 +1,14 @@
|
||||
import setuptools.extension
|
||||
import pathlib
|
||||
|
||||
class build_ext:
|
||||
extensions : list[setuptools.extension.Extension]
|
||||
#build_temp : pathlib.Path
|
||||
#build_lib: pathlib.Path
|
||||
build_temp: str
|
||||
build_lib: str
|
||||
|
||||
def run(self) -> None:
|
||||
...
|
||||
|
||||
...
|
8
mypy-stubs/marisa-trie-types/marisa_trie/__init__.pyi
Normal file
8
mypy-stubs/marisa-trie-types/marisa_trie/__init__.pyi
Normal file
@ -0,0 +1,8 @@
|
||||
from typing import (Iterable,)
|
||||
|
||||
class Trie:
|
||||
def __init__(self, entries: Iterable[str]) -> None: ...
|
||||
|
||||
def keys(self, entry: str) -> list[str]: ...
|
||||
|
||||
def __contains__(self, entry: str) -> bool: ...
|
18
mypy-stubs/mypyc/build/__init__.pyi
Normal file
18
mypy-stubs/mypyc/build/__init__.pyi
Normal file
@ -0,0 +1,18 @@
|
||||
import setuptools.extension
|
||||
|
||||
from typing import (Any, Iterable,)
|
||||
|
||||
def mypycify(
|
||||
paths: 'list[str]',
|
||||
*,
|
||||
only_compile_paths: 'Iterable[str] | None' = None,
|
||||
verbose: 'bool' = False,
|
||||
opt_level: 'str' = '3',
|
||||
debug_level: 'str' = '1',
|
||||
strip_asserts: 'bool' = False,
|
||||
multi_file: 'bool' = False,
|
||||
separate: 'bool | list[tuple[list[str], str | None]]' = False,
|
||||
skip_cgen_input: 'Any | None' = None,
|
||||
target_dir: 'str | None' = None,
|
||||
include_runtime_files: 'bool | None' = None,
|
||||
) -> 'list[setuptools.extension.Extension]': ...
|
8
mypy-stubs/tqdm/__init__.pyi
Normal file
8
mypy-stubs/tqdm/__init__.pyi
Normal file
@ -0,0 +1,8 @@
|
||||
from typing import (Self, Any)
|
||||
|
||||
class tqdm:
|
||||
def __enter__(self) -> Self: ...
|
||||
def __exit__(self, args: Any) -> None: ...
|
||||
|
||||
def update(self, delta: int) -> None: ...
|
||||
def set_description(self, description: str) -> None: ...
|
7
mypy-stubs/types-debugpy/debugpy/__init__.pyi
Normal file
7
mypy-stubs/types-debugpy/debugpy/__init__.pyi
Normal file
@ -0,0 +1,7 @@
|
||||
def listen(
|
||||
addr: tuple[str, int],
|
||||
) -> None: ...
|
||||
|
||||
def wait_for_client() -> None: ...
|
||||
|
||||
def breakpoint() -> None: ...
|
6
platform_dotfiles/macbook_air_2012/etc/systemd/system/online.fxreader.pr34.udev@.service
Normal file
6
platform_dotfiles/macbook_air_2012/etc/systemd/system/online.fxreader.pr34.udev@.service
Normal file
@ -0,0 +1,6 @@
|
||||
[Unit]
|
||||
Description=udev scripts
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/online-fxreader-pr34-udev --device=%I
|
@ -0,0 +1,11 @@
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/pci0000:00/0000:00:1b.0/hdaudioC0D0/leds/hda::mute", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/platform/applesmc.768/leds/smc::kbd_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
# udevadm info --attribute-walk --path=/sys/devices/platform/applesmc.768/
|
||||
# udevadm trigger --action=add --verbose --parent-match /devices/platform/applesmc.768/
|
||||
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="ls -allh /sys$devpath/", OPTIONS="log_level=debug"
|
||||
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="/usr/bin/ls -allh /sys$devpath/", OPTIONS="log_level=debug"
|
||||
ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
||||
#KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", MODE="0660", TAG+="uaccess", OPTIONS="log_level=debug", OPTIONS+="watch"
|
||||
ACTION=="add|change", DEVPATH=="/class/backlight/intel_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", KERNEL=="cpu0", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
||||
ACTION=="add|change", KERNEL=="cpu[0-9]", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
103
platform_dotfiles/macbook_air_2012/usr/local/bin/online-fxreader-pr34-udev
Executable file
103
platform_dotfiles/macbook_air_2012/usr/local/bin/online-fxreader-pr34-udev
Executable file
@ -0,0 +1,103 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# vi: filetype=python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from typing import (Any,)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run() -> None:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'--device',
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
DEVICES : dict[str, Any] = dict(
|
||||
applesmc=dict(
|
||||
devpath='sys/devices/platform/applesmc.768',
|
||||
node='/sys/devices/platform/applesmc.768/fan1_manual',
|
||||
cmd=r'''
|
||||
chown root:fan /sys/devices/platform/applesmc.768/fan1_*
|
||||
chmod g+w /sys/devices/platform/applesmc.768/fan1_*
|
||||
''',
|
||||
),
|
||||
intel_pstate=dict(
|
||||
devpath=r'/?sys/devices/system/cpu/cpu0',
|
||||
node='/sys/devices/system/cpu/intel_pstate/no_turbo',
|
||||
cmd=r'''
|
||||
chown root:fan /sys/devices/system/cpu/intel_pstate/no_turbo
|
||||
chown root:fan /sys/devices/system/cpu/intel_pstate/max_perf_pct
|
||||
#chown root:fan /sys/devices/system/cpu/intel_pstate/status
|
||||
chmod g+w /sys/devices/system/cpu/intel_pstate/no_turbo
|
||||
chmod g+w /sys/devices/system/cpu/intel_pstate/max_perf_pct
|
||||
#chmod g+w /sys/devices/system/cpu/intel_pstate/status
|
||||
echo passive > /sys/devices/system/cpu/intel_pstate/status
|
||||
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
|
||||
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
|
||||
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
|
||||
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
|
||||
''',
|
||||
),
|
||||
#governor=dict(
|
||||
# devpath=r'/?sys/devices/system/cpu/cpu(\d+)',
|
||||
# node=r'/sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor',
|
||||
# cmd=r'''
|
||||
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
|
||||
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
|
||||
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
|
||||
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
|
||||
# ''',
|
||||
#),
|
||||
)
|
||||
|
||||
processed : int = 0
|
||||
|
||||
logger.info(dict(device=options.device))
|
||||
|
||||
for k, v in DEVICES.items():
|
||||
devpath = re.compile(v['devpath'])
|
||||
|
||||
devpath_m = devpath.match(options.device)
|
||||
|
||||
if devpath_m is None:
|
||||
continue
|
||||
|
||||
node_2 = v['node'].format(*devpath_m.groups())
|
||||
|
||||
# logger.info(dict(devpath_m=devpath_m, node=node_2))
|
||||
|
||||
while not os.path.exists(node_2):
|
||||
#continue
|
||||
time.sleep(1)
|
||||
|
||||
cmd_2 = v['cmd'].format(*devpath_m.groups())
|
||||
|
||||
subprocess.check_call(cmd_2, shell=True)
|
||||
|
||||
logger.info(dict(
|
||||
devpath_m=devpath_m,
|
||||
node_2=node_2,
|
||||
cmd_2=cmd_2,
|
||||
msg='processed',
|
||||
label=k,
|
||||
))
|
||||
|
||||
processed += 1
|
||||
|
||||
if processed == 0:
|
||||
raise NotImplementedError
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
6
platform_dotfiles/macbook_air_2018/etc/systemd/system/online.fxreader.pr34.udev@.service
Normal file
6
platform_dotfiles/macbook_air_2018/etc/systemd/system/online.fxreader.pr34.udev@.service
Normal file
@ -0,0 +1,6 @@
|
||||
[Unit]
|
||||
Description=udev scripts
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/local/bin/online-fxreader-pr34-udev --device=%I
|
@ -0,0 +1,26 @@
|
||||
[Unit]
|
||||
Description=Disable and Re-Enable Apple BCE Module (and Wi-Fi)
|
||||
Before=sleep.target
|
||||
Before=hibernate.target
|
||||
StopWhenUnneeded=yes
|
||||
|
||||
[Service]
|
||||
User=root
|
||||
Type=oneshot
|
||||
RemainAfterExit=yes
|
||||
|
||||
ExecStart=/usr/local/bin/online-fxreader-pr34-suspend-fix-t2 disable_apple_bce
|
||||
#ExecStart=/usr/bin/modprobe -r apple_bce
|
||||
#ExecStart=/usr/bin/modprobe -r brcmfmac_wcc
|
||||
#ExecStart=/usr/bin/modprobe -r brcmfmac
|
||||
#ExecStart=/usr/bin/rmmod -f apple-bce
|
||||
|
||||
ExecStop=/usr/local/bin/online-fxreader-pr34-suspend-fix-t2 enable_apple_bce
|
||||
#ExecStop=/usr/bin/modprobe -r apple_bce
|
||||
#ExecStop=/usr/bin/modprobe apple-bce
|
||||
#ExecStop=/usr/bin/modprobe brcmfmac
|
||||
#ExecStop=/usr/bin/modprobe brcmfmac_wcc
|
||||
|
||||
[Install]
|
||||
WantedBy=sleep.target
|
||||
WantedBy=hibernate.target
|
@ -0,0 +1,13 @@
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/pci0000:00/0000:00:1b.0/hdaudioC0D0/leds/hda::mute", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
ACTION=="add|change", SUBSYSTEM=="leds", DEVPATH=="/devices/platform/applesmc.768/leds/smc::kbd_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
# udevadm info --attribute-walk --path=/sys/devices/platform/applesmc.768/
|
||||
# udevadm trigger --action=add --verbose --parent-match /devices/platform/applesmc.768/
|
||||
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="ls -allh /sys$devpath/", OPTIONS="log_level=debug"
|
||||
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", RUN{program}+="/usr/bin/ls -allh /sys$devpath/", OPTIONS="log_level=debug"
|
||||
#ACTION=="add|change", KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
||||
ACTION=="add|change", KERNEL=="cpu0", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
||||
#KERNEL=="applesmc.768", SUBSYSTEM=="platform", DRIVER=="applesmc", MODE="0660", TAG+="uaccess", OPTIONS="log_level=debug", OPTIONS+="watch"
|
||||
ACTION=="add|change", DEVPATH=="/class/backlight/intel_backlight", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/brightness"
|
||||
#ACTION=="add|change", DEVPATH=="/devices/system/cpu/", RUN{program}+="/usr/bin/chmod 666 /sys$devpath/cpufreq/scaling_governor"
|
||||
ACTION=="add|change", KERNEL=="cpu[0-9]", SUBSYSTEM=="cpu", TAG+="systemd", ENV{SYSTEMD_WANTS}="online.fxreader.pr34.udev@$devnode.service", OPTIONS="log_level=debug"
|
||||
|
113
platform_dotfiles/macbook_air_2018/usr/local/bin/online-fxreader-pr34-suspend-fix-t2
Executable file
113
platform_dotfiles/macbook_air_2018/usr/local/bin/online-fxreader-pr34-suspend-fix-t2
Executable file
@ -0,0 +1,113 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# vi: set filetype=python
|
||||
|
||||
import sys
|
||||
import time
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('mode', choices=[
|
||||
'disable_apple_bce',
|
||||
'enable_apple_bce',
|
||||
])
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.mode == 'disable_apple_bce':
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'systemctl', 'stop', 'iwd',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', '-r', 'brcmfmac_wcc',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', '-r', 'brcmfmac',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', '-r', 'applesmc',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'rmmod', '-f', 'apple-bce',
|
||||
])
|
||||
#if ret != 0:
|
||||
# time.sleep(1)
|
||||
#else:
|
||||
# break
|
||||
break
|
||||
|
||||
elif options.mode == 'enable_apple_bce':
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', 'applesmc',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', 'apple-bce',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', 'brcmfmac',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'modprobe', 'brcmfmac_wcc',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
|
||||
while True:
|
||||
ret = subprocess.call([
|
||||
'systemctl', 'start', 'iwd',
|
||||
])
|
||||
if ret != 0:
|
||||
time.sleep(1)
|
||||
else:
|
||||
break
|
||||
else:
|
||||
raise NotImplementedError
|
103
platform_dotfiles/macbook_air_2018/usr/local/bin/online-fxreader-pr34-udev
Executable file
103
platform_dotfiles/macbook_air_2018/usr/local/bin/online-fxreader-pr34-udev
Executable file
@ -0,0 +1,103 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
# vi: filetype=python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
from typing import (Any,)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run() -> None:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'--device',
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
DEVICES : dict[str, Any] = dict(
|
||||
applesmc=dict(
|
||||
devpath='sys/devices/platform/applesmc.768',
|
||||
node='/sys/devices/platform/applesmc.768/fan1_manual',
|
||||
cmd=r'''
|
||||
chown root:fan /sys/devices/platform/applesmc.768/fan1_*
|
||||
chmod g+w /sys/devices/platform/applesmc.768/fan1_*
|
||||
''',
|
||||
),
|
||||
intel_pstate=dict(
|
||||
devpath=r'/?sys/devices/system/cpu/cpu0',
|
||||
node='/sys/devices/system/cpu/intel_pstate/no_turbo',
|
||||
cmd=r'''
|
||||
chown root:fan /sys/devices/system/cpu/intel_pstate/no_turbo
|
||||
chown root:fan /sys/devices/system/cpu/intel_pstate/max_perf_pct
|
||||
#chown root:fan /sys/devices/system/cpu/intel_pstate/status
|
||||
chmod g+w /sys/devices/system/cpu/intel_pstate/no_turbo
|
||||
chmod g+w /sys/devices/system/cpu/intel_pstate/max_perf_pct
|
||||
#chmod g+w /sys/devices/system/cpu/intel_pstate/status
|
||||
echo passive > /sys/devices/system/cpu/intel_pstate/status
|
||||
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
|
||||
chown root:fan /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
|
||||
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor
|
||||
chmod g+w /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq
|
||||
''',
|
||||
),
|
||||
#governor=dict(
|
||||
# devpath=r'/?sys/devices/system/cpu/cpu(\d+)',
|
||||
# node=r'/sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor',
|
||||
# cmd=r'''
|
||||
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
|
||||
# chown root:fan /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
|
||||
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_governor
|
||||
# chmod g+w /sys/devices/system/cpu/cpu{0}/cpufreq/scaling_max_freq
|
||||
# ''',
|
||||
#),
|
||||
)
|
||||
|
||||
processed : int = 0
|
||||
|
||||
logger.info(dict(device=options.device))
|
||||
|
||||
for k, v in DEVICES.items():
|
||||
devpath = re.compile(v['devpath'])
|
||||
|
||||
devpath_m = devpath.match(options.device)
|
||||
|
||||
if devpath_m is None:
|
||||
continue
|
||||
|
||||
node_2 = v['node'].format(*devpath_m.groups())
|
||||
|
||||
# logger.info(dict(devpath_m=devpath_m, node=node_2))
|
||||
|
||||
while not os.path.exists(node_2):
|
||||
#continue
|
||||
time.sleep(1)
|
||||
|
||||
cmd_2 = v['cmd'].format(*devpath_m.groups())
|
||||
|
||||
subprocess.check_call(cmd_2, shell=True)
|
||||
|
||||
logger.info(dict(
|
||||
devpath_m=devpath_m,
|
||||
node_2=node_2,
|
||||
cmd_2=cmd_2,
|
||||
msg='processed',
|
||||
label=k,
|
||||
))
|
||||
|
||||
processed += 1
|
||||
|
||||
if processed == 0:
|
||||
raise NotImplementedError
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
254
python/_m.py
Normal file
254
python/_m.py
Normal file
@ -0,0 +1,254 @@
|
||||
#!/usr/bin/env python3
|
||||
#vim: set filetype=python
|
||||
|
||||
import logging
|
||||
import json
|
||||
import enum
|
||||
import pathlib
|
||||
import sys
|
||||
import argparse
|
||||
#import optparse
|
||||
import dataclasses
|
||||
import subprocess
|
||||
import os
|
||||
|
||||
|
||||
|
||||
from typing import (
|
||||
Optional, Any, TypeAlias, Literal, cast, BinaryIO, Generator,
|
||||
ClassVar, Self,
|
||||
)
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Settings:
|
||||
project_root : pathlib.Path = pathlib.Path.cwd()
|
||||
|
||||
env_path : pathlib.Path = project_root / 'tmp' / 'env3'
|
||||
|
||||
_settings : ClassVar[Optional['Settings']] = None
|
||||
|
||||
@classmethod
|
||||
def settings(cls) -> Self:
|
||||
if cls._settings is None:
|
||||
cls._settings = cls()
|
||||
|
||||
return cls._settings
|
||||
|
||||
def js(argv: list[str]) -> int:
|
||||
return subprocess.check_call([
|
||||
'sudo',
|
||||
'docker-compose',
|
||||
'--project-directory',
|
||||
Settings.settings().project_root,
|
||||
'-f',
|
||||
Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml',
|
||||
*argv,
|
||||
])
|
||||
|
||||
def env(
|
||||
argv: Optional[list[str]] = None,
|
||||
mode: Literal['exec', 'subprocess'] = 'subprocess',
|
||||
**kwargs: Any,
|
||||
) -> Optional[subprocess.CompletedProcess[bytes]]:
|
||||
env_path = Settings.settings().env_path
|
||||
|
||||
if not env_path.exists():
|
||||
subprocess.check_call([
|
||||
sys.executable, '-m', 'venv',
|
||||
'--system-site-packages',
|
||||
str(env_path)
|
||||
])
|
||||
|
||||
subprocess.check_call([
|
||||
env_path / 'bin' / 'python3',
|
||||
'-m', 'pip',
|
||||
'install', '-r', 'requirements.txt',
|
||||
])
|
||||
|
||||
if not argv is None:
|
||||
python_path = str(env_path / 'bin' / 'python3')
|
||||
|
||||
if mode == 'exec':
|
||||
os.execv(
|
||||
python_path,
|
||||
[
|
||||
python_path,
|
||||
*argv,
|
||||
],
|
||||
)
|
||||
return None
|
||||
elif mode == 'subprocess':
|
||||
return subprocess.run([
|
||||
python_path,
|
||||
*argv,
|
||||
], **kwargs)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
return None
|
||||
|
||||
def ruff(argv: list[str]) -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
dest='paths',
|
||||
help='specify paths to check',
|
||||
default=[],
|
||||
action='append',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
dest='exclude',
|
||||
help='rules to ignore',
|
||||
default=[],
|
||||
action='append',
|
||||
)
|
||||
|
||||
options, args = parser.parse_known_args(argv)
|
||||
|
||||
if len(options.paths) == 0:
|
||||
options.paths.extend([
|
||||
'.',
|
||||
'dotfiles/.local/bin/commands',
|
||||
])
|
||||
|
||||
if len(options.exclude) == 0:
|
||||
options.exclude.extend([
|
||||
'E731',
|
||||
'E713',
|
||||
'E714',
|
||||
'E703',
|
||||
])
|
||||
|
||||
res = env([
|
||||
'-m',
|
||||
'ruff',
|
||||
'check',
|
||||
*args,
|
||||
'--output-format', 'json',
|
||||
'--ignore', ','.join(options.exclude),
|
||||
*options.paths,
|
||||
], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
assert not res is None
|
||||
|
||||
errors = json.loads(res.stdout.decode('utf-8'))
|
||||
|
||||
g: dict[str, Any] = dict()
|
||||
for o in errors:
|
||||
if not o['filename'] in g:
|
||||
g[o['filename']] = []
|
||||
g[o['filename']].append(o)
|
||||
|
||||
h = {
|
||||
k : len(v)
|
||||
for k, v in g.items()
|
||||
}
|
||||
|
||||
logger.info(json.dumps(errors, indent=4))
|
||||
logger.info(json.dumps(h, indent=4))
|
||||
|
||||
|
||||
def inside_env() -> bool:
|
||||
try:
|
||||
import numpy
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
#class Commands(enum.StrEnum):
|
||||
# js = 'js'
|
||||
# mypy = 'mypy'
|
||||
# env = 'env'
|
||||
# ruff = 'ruff'
|
||||
# m2 = 'm2'
|
||||
|
||||
# def mypy(argv: list[str]) -> None:
|
||||
# import online.fxreader.pr34.commands_typed.mypy as _mypy
|
||||
|
||||
# _mypy.run(
|
||||
# argv,
|
||||
# )
|
||||
|
||||
def host_deps(argv: list[str]) -> None:
|
||||
if sys.platform in ['linux']:
|
||||
subprocess.check_call(r'''
|
||||
exec yay -S $(cat requirements-archlinux.txt)
|
||||
''', shell=True,)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
Command_args = ['js', 'mypy', 'env', 'ruff', 'm2', 'host_deps',]
|
||||
|
||||
Command : TypeAlias = Literal['js', 'mypy', 'env', 'ruff', 'm2', 'host_deps',]
|
||||
|
||||
def run(argv: Optional[list[str]] = None) -> None:
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format=(
|
||||
'%(levelname)s:%(name)s:%(message)s'
|
||||
':%(process)d'
|
||||
':%(asctime)s'
|
||||
':%(pathname)s:%(funcName)s:%(lineno)s'
|
||||
),
|
||||
)
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv[:]
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'command',
|
||||
#'_command',
|
||||
choices=[
|
||||
o
|
||||
for o in Command_args
|
||||
],
|
||||
#required=True,
|
||||
)
|
||||
|
||||
options, args = parser.parse_known_args(argv[1:])
|
||||
|
||||
assert options.command in Command_args
|
||||
|
||||
if len(args) > 0 and args[0] == '--':
|
||||
del args[0]
|
||||
|
||||
#options.command = Commands(options._command)
|
||||
|
||||
if options.command == 'js':
|
||||
js(args)
|
||||
elif options.command == 'host_deps':
|
||||
host_deps(args)
|
||||
elif options.command == 'env':
|
||||
env(args, mode='exec',)
|
||||
# elif options.command == 'mypy':
|
||||
# if not inside_env():
|
||||
# env(
|
||||
# [
|
||||
# pathlib.Path(__file__).parent / 'm.py',
|
||||
# *argv[1:],
|
||||
# ],
|
||||
# mode='exec'
|
||||
# )
|
||||
# else:
|
||||
# mypy(args)
|
||||
elif options.command == 'ruff':
|
||||
ruff(args)
|
||||
elif options.command == 'm2':
|
||||
if not inside_env():
|
||||
env(['--', '_m.py', 'm2', *args])
|
||||
return
|
||||
|
||||
import python.tasks.cython
|
||||
python.tasks.cython.mypyc_build(
|
||||
pathlib.Path('_m.py')
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
162
python/cli.py
Normal file
162
python/cli.py
Normal file
@ -0,0 +1,162 @@
|
||||
import sys
|
||||
import shutil
|
||||
import glob
|
||||
import io
|
||||
import copy
|
||||
import subprocess
|
||||
import pathlib
|
||||
import logging
|
||||
import enum
|
||||
import argparse
|
||||
import dataclasses
|
||||
|
||||
from typing import (Optional, override,)
|
||||
|
||||
from online.fxreader.pr34.commands_typed.logging import setup as logging_setup
|
||||
|
||||
from online.fxreader.pr34.commands_typed import cli as _cli
|
||||
|
||||
from online.fxreader.pr34.commands_typed import cli_bootstrap
|
||||
|
||||
logging_setup()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(enum.StrEnum):
|
||||
mypy = 'mypy'
|
||||
deploy_wheel = 'deploy:wheel'
|
||||
tests = 'tests'
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Settings(
|
||||
_cli.DistSettings,
|
||||
):
|
||||
base_dir: pathlib.Path = pathlib.Path(__file__).parent.parent
|
||||
build_dir: pathlib.Path = base_dir / 'tmp' / 'build'
|
||||
wheel_dir: pathlib.Path = base_dir / 'deps' / 'dist'
|
||||
env_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).env_path
|
||||
python_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).python_path
|
||||
|
||||
|
||||
class CLI(_cli.CLI):
|
||||
def __init__(self) -> None:
|
||||
self.settings = Settings()
|
||||
self._projects: dict[str, _cli.Project] = {
|
||||
'online.fxreader.pr34': _cli.Project(
|
||||
source_dir=self.settings.base_dir / 'python',
|
||||
build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build',
|
||||
dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install',
|
||||
)
|
||||
}
|
||||
|
||||
self._dependencies : dict[str, _cli.Dependency] = dict()
|
||||
|
||||
@override
|
||||
@property
|
||||
def dist_settings(self) -> _cli.DistSettings:
|
||||
return self.settings
|
||||
|
||||
@override
|
||||
@property
|
||||
def projects(self) -> dict[str, _cli.Project]:
|
||||
return self._projects
|
||||
|
||||
def mypy(
|
||||
self,
|
||||
argv: list[str],
|
||||
) -> None:
|
||||
import online.fxreader.pr34.commands_typed.mypy as _mypy
|
||||
|
||||
project = self._projects['online.fxreader.pr34']
|
||||
|
||||
_mypy.run(
|
||||
argv,
|
||||
settings=_mypy.MypySettings(
|
||||
paths=[
|
||||
#Settings.settings().project_root / 'dotfiles/.local/bin/commands',
|
||||
# project.source_dir / 'm.py',
|
||||
project.source_dir / '_m.py',
|
||||
project.source_dir / 'online',
|
||||
project.source_dir / 'cli.py',
|
||||
self.settings.base_dir / 'm.py',
|
||||
# Settings.settings().project_root / 'deps/com.github.aiortc.aiortc/src',
|
||||
#Settings.settings().project_root / 'm.py',
|
||||
],
|
||||
max_errors={
|
||||
'python/online/fxreader/pr34/commands_typed': 0,
|
||||
'python/cli.py': 0,
|
||||
'm.py': 0,
|
||||
'deps/com.github.aiortc.aiortc/src/online_fxreader': 0,
|
||||
'deps/com.github.aiortc.aiortc/src/aiortc/contrib/signaling': 0
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@override
|
||||
@property
|
||||
def dependencies(self) -> dict[str, _cli.Dependency]:
|
||||
return self._dependencies
|
||||
|
||||
def run(self, argv: Optional[list[str]] = None) -> None:
|
||||
if argv is None:
|
||||
argv = copy.deepcopy(sys.argv)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'command',
|
||||
choices=[
|
||||
o.value
|
||||
for o in Command
|
||||
]
|
||||
)
|
||||
parser.add_argument(
|
||||
'-p', '--project',
|
||||
choices=[
|
||||
o
|
||||
for o in self.projects
|
||||
]
|
||||
)
|
||||
parser.add_argument(
|
||||
'-o', '--output_dir',
|
||||
default=None,
|
||||
help='wheel output dir for deploy:wheel',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--force',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='remove install dir, before installing, default = false',
|
||||
)
|
||||
|
||||
options, args = parser.parse_known_args(argv[1:])
|
||||
|
||||
options.command = Command(options.command)
|
||||
|
||||
if options.command is Command.deploy_wheel:
|
||||
assert not options.project is None
|
||||
|
||||
self.deploy_wheel(
|
||||
project_name=options.project,
|
||||
argv=args,
|
||||
output_dir=options.output_dir,
|
||||
mypy=True,
|
||||
)
|
||||
elif options.command is Command.mypy:
|
||||
self.mypy(
|
||||
argv=args,
|
||||
)
|
||||
elif options.command is Command.tests:
|
||||
for k, v in self.projects.items():
|
||||
subprocess.check_call([
|
||||
sys.executable,
|
||||
'-m',
|
||||
'unittest',
|
||||
'online.fxreader.pr34.tests.test_crypto',
|
||||
*args,
|
||||
], cwd=str(v.source_dir))
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
if __name__ == '__main__':
|
||||
CLI().run()
|
0
python/online/fxreader/pr34/__init__.py
Normal file
0
python/online/fxreader/pr34/__init__.py
Normal file
1139
dotfiles/.local/bin/commands → python/online/fxreader/pr34/commands.py
Executable file → Normal file
1139
dotfiles/.local/bin/commands → python/online/fxreader/pr34/commands.py
Executable file → Normal file
File diff suppressed because it is too large
Load Diff
0
python/online/fxreader/pr34/commands_typed/__init__.py
Normal file
0
python/online/fxreader/pr34/commands_typed/__init__.py
Normal file
27
python/online/fxreader/pr34/commands_typed/argparse.py
Normal file
27
python/online/fxreader/pr34/commands_typed/argparse.py
Normal file
@ -0,0 +1,27 @@
|
||||
__all__ = (
|
||||
'parse_args',
|
||||
)
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from typing import (Optional,)
|
||||
|
||||
def parse_args(
|
||||
parser: argparse.ArgumentParser,
|
||||
args: Optional[list[str]] = None,
|
||||
) -> tuple[argparse.Namespace, list[str]]:
|
||||
if args is None:
|
||||
args = sys.argv[1:]
|
||||
|
||||
argv : list[str] = []
|
||||
|
||||
for i, o in enumerate(args):
|
||||
if o == '--':
|
||||
argv.extend(args[i + 1:])
|
||||
|
||||
del args[i:]
|
||||
|
||||
break
|
||||
|
||||
return parser.parse_args(args), argv
|
14
python/online/fxreader/pr34/commands_typed/asyncio.py
Normal file
14
python/online/fxreader/pr34/commands_typed/asyncio.py
Normal file
@ -0,0 +1,14 @@
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from typing import (Any,)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def handle_task_result(fut: asyncio.Future[Any]) -> None:
|
||||
try:
|
||||
fut.result()
|
||||
|
||||
logger.debug(dict(fut=fut, msg='done'), stacklevel=2,)
|
||||
except:
|
||||
logger.exception('', stacklevel=2,)
|
478
python/online/fxreader/pr34/commands_typed/cli.py
Normal file
478
python/online/fxreader/pr34/commands_typed/cli.py
Normal file
@ -0,0 +1,478 @@
|
||||
import dataclasses
|
||||
import io
|
||||
import glob
|
||||
import os
|
||||
import pathlib
|
||||
import logging
|
||||
import sys
|
||||
import subprocess
|
||||
import shutil
|
||||
import abc
|
||||
|
||||
from .os import shutil_which
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
Literal,
|
||||
Any,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Project:
|
||||
source_dir : pathlib.Path
|
||||
build_dir : pathlib.Path
|
||||
dest_dir : pathlib.Path
|
||||
meson_path: Optional[pathlib.Path] = None
|
||||
|
||||
@dataclasses.dataclass
|
||||
class Dependency:
|
||||
name: str
|
||||
mode : Literal['pyproject', 'meson', 'meson-python', 'm']
|
||||
source_path : pathlib.Path
|
||||
args: Optional[list[str]] = None
|
||||
|
||||
@dataclasses.dataclass
|
||||
class DistSettings:
|
||||
wheel_dir : pathlib.Path
|
||||
python_path: pathlib.Path
|
||||
env_path: pathlib.Path
|
||||
|
||||
class CLI(abc.ABC):
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def dist_settings(self) -> DistSettings:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def projects(self) -> dict[str, Project]:
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
@abc.abstractmethod
|
||||
def dependencies(self) -> dict[str, Dependency]:
|
||||
raise NotImplementedError
|
||||
|
||||
def mypy(
|
||||
self,
|
||||
argv: list[str]
|
||||
) -> None:
|
||||
from . import mypy as _mypy
|
||||
|
||||
_mypy.run(
|
||||
argv,
|
||||
)
|
||||
|
||||
def ruff(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: list[str],
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if len(argv) == 0:
|
||||
argv = ['check', '.',]
|
||||
|
||||
subprocess.check_call([
|
||||
self.dist_settings.python_path,
|
||||
'-m',
|
||||
'ruff',
|
||||
'--config', str(project.source_dir / 'pyproject.toml'),
|
||||
*argv,
|
||||
])
|
||||
|
||||
def pyright(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: list[str],
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if len(argv) == 0:
|
||||
argv = ['--threads', '3']
|
||||
|
||||
cmd = [
|
||||
str(self.dist_settings.python_path),
|
||||
'-m',
|
||||
'pyright',
|
||||
'--pythonpath', str(self.dist_settings.python_path),
|
||||
'-p', str(project.source_dir / 'pyproject.toml'),
|
||||
*argv,
|
||||
]
|
||||
|
||||
logger.info(cmd)
|
||||
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
def pip_sync(
|
||||
self,
|
||||
project: str,
|
||||
features: list[str],
|
||||
) -> None:
|
||||
from . import cli_bootstrap
|
||||
|
||||
pyproject = cli_bootstrap.pyproject_load(
|
||||
self.projects[project].source_dir / 'pyproject.toml'
|
||||
)
|
||||
|
||||
dependencies = sum([
|
||||
pyproject.dependencies[o]
|
||||
for o in features
|
||||
], [])
|
||||
|
||||
pip_find_links : list[pathlib.Path] = []
|
||||
|
||||
if not pyproject.pip_find_links is None:
|
||||
pip_find_links.extend(pyproject.pip_find_links)
|
||||
|
||||
|
||||
logger.info(dict(
|
||||
dependencies=dependencies,
|
||||
))
|
||||
|
||||
if len(dependencies) > 0:
|
||||
subprocess.check_call([
|
||||
self.dist_settings.python_path,
|
||||
'-m',
|
||||
'uv', 'pip', 'install',
|
||||
*sum([
|
||||
['-f', str(o),]
|
||||
for o in pip_find_links
|
||||
], []),
|
||||
# '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'),
|
||||
'--offline',
|
||||
*dependencies,
|
||||
])
|
||||
|
||||
def deploy_fetch_dist(
|
||||
self,
|
||||
force: bool,
|
||||
) -> None:
|
||||
for k, d in self.dependencies.items():
|
||||
whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_'))
|
||||
if len(glob.glob(
|
||||
str(whl_glob)
|
||||
)) == 0 or force:
|
||||
if d.source_path.exists():
|
||||
def whl_files_get() -> list[dict[str, Any]]:
|
||||
return [
|
||||
dict(
|
||||
path=o,
|
||||
stat=os.stat(o).st_mtime,
|
||||
)
|
||||
for o in glob.glob(
|
||||
str(whl_glob)
|
||||
)
|
||||
]
|
||||
|
||||
present_files = whl_files_get()
|
||||
|
||||
if d.mode == 'm':
|
||||
if (d.source_path / 'm.py').exists():
|
||||
cmd = [
|
||||
sys.executable,
|
||||
str(d.source_path / 'm.py'),
|
||||
'deploy:wheel',
|
||||
'-o',
|
||||
str(self.dist_settings.wheel_dir),
|
||||
]
|
||||
|
||||
if not d.args is None:
|
||||
cmd.extend(d.args)
|
||||
|
||||
subprocess.check_call(
|
||||
cmd,
|
||||
cwd=d.source_path,
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
updated_files = whl_files_get()
|
||||
|
||||
def index_get(o: dict[str, Any]) -> tuple[Any, ...]:
|
||||
return (o['path'], o['stat'])
|
||||
|
||||
present_files_index = {
|
||||
index_get(o) : o
|
||||
for o in present_files
|
||||
}
|
||||
|
||||
new_files : list[dict[str, Any]] = []
|
||||
|
||||
for o in updated_files:
|
||||
entry_index = index_get(o)
|
||||
|
||||
if not entry_index in present_files_index:
|
||||
new_files.append(o)
|
||||
|
||||
if len(new_files) == 0:
|
||||
raise NotImplementedError
|
||||
|
||||
latest_file = sorted(
|
||||
new_files,
|
||||
key=lambda x: x['stat']
|
||||
)[-1]
|
||||
|
||||
subprocess.check_call([
|
||||
self.dist_settings.python_path,
|
||||
'-m', 'pip',
|
||||
'install',
|
||||
latest_file['path'],
|
||||
])
|
||||
|
||||
@property
|
||||
def pkg_config_path(self,) -> set[pathlib.Path]:
|
||||
return {
|
||||
pathlib.Path(o)
|
||||
for o in glob.glob(
|
||||
str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'),
|
||||
recursive=True,
|
||||
)
|
||||
}
|
||||
|
||||
def deploy_wheel(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: Optional[list[str]] = None,
|
||||
output_dir: Optional[pathlib.Path] = None,
|
||||
force: Optional[bool] = None,
|
||||
env: Optional[dict[str, str]] = None,
|
||||
mypy: bool = False,
|
||||
tests: bool = False,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
# subprocess.check_call([
|
||||
# sys.argv[0],
|
||||
# # sys.executable,
|
||||
# '-p', options.project,
|
||||
# Command.meson_setup.value,
|
||||
# ])
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
# assert argv is None or len(argv) == 0
|
||||
|
||||
if not project.meson_path is None:
|
||||
if tests:
|
||||
self.meson_test(
|
||||
project_name=project_name,
|
||||
)
|
||||
|
||||
self.meson_install(
|
||||
project_name=project_name,
|
||||
force=force,
|
||||
)
|
||||
|
||||
if mypy:
|
||||
self.mypy([])
|
||||
|
||||
if env is None:
|
||||
env = dict()
|
||||
|
||||
extra_args: list[str] = []
|
||||
|
||||
if len(self.third_party_roots) > 0:
|
||||
extra_args.extend([
|
||||
'-Csetup-args=%s' % (
|
||||
'-Dthird_party_roots=%s' % str(o.absolute())
|
||||
)
|
||||
for o in self.third_party_roots
|
||||
])
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
'-m',
|
||||
'build',
|
||||
'-w', '-n',
|
||||
*extra_args,
|
||||
'-Csetup-args=-Dmodes=pyproject',
|
||||
'-Cbuild-dir=%s' % str(project.build_dir / 'pyproject'),
|
||||
'-Csetup-args=-Dinstall_path=%s' % str(project.dest_dir),
|
||||
# '-Cbuild-dir=%s' % str(project.build_dir),
|
||||
str(project.source_dir),
|
||||
*argv,
|
||||
]
|
||||
|
||||
if not output_dir is None:
|
||||
cmd.extend(['-o', str(output_dir)])
|
||||
|
||||
logger.info(dict(env=env))
|
||||
|
||||
subprocess.check_call(
|
||||
cmd,
|
||||
env=dict(list(os.environ.items())) | env,
|
||||
)
|
||||
|
||||
if not project.meson_path is None:
|
||||
if tests:
|
||||
subprocess.check_call(
|
||||
[
|
||||
'ninja',
|
||||
'-C',
|
||||
str(project.build_dir / 'pyproject'),
|
||||
'test',
|
||||
]
|
||||
)
|
||||
|
||||
def meson_install(
|
||||
self,
|
||||
project_name: str,
|
||||
force: Optional[bool] = None,
|
||||
argv: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if force is None:
|
||||
force = False
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
if force and project.dest_dir.exists():
|
||||
shutil.rmtree(project.dest_dir)
|
||||
|
||||
subprocess.check_call([
|
||||
shutil_which('meson', True,),
|
||||
'install',
|
||||
'-C',
|
||||
project.build_dir / 'meson',
|
||||
'--destdir', project.dest_dir,
|
||||
*argv,
|
||||
])
|
||||
|
||||
for o in glob.glob(
|
||||
str(project.dest_dir / 'lib' / 'pkgconfig' / '*.pc'),
|
||||
recursive=True,
|
||||
):
|
||||
logger.info(dict(
|
||||
path=o,
|
||||
action='patch prefix',
|
||||
))
|
||||
|
||||
with io.open(o, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
with io.open(o, 'w') as f:
|
||||
f.write(
|
||||
content.replace('prefix=/', 'prefix=${pcfiledir}/../../')
|
||||
)
|
||||
def ninja(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: Optional[list[str]] = None,
|
||||
env: Optional[dict[str, str]] = None,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
if env is None:
|
||||
env = dict()
|
||||
|
||||
logger.info(dict(env=env))
|
||||
|
||||
subprocess.check_call(
|
||||
[
|
||||
shutil_which('ninja', True),
|
||||
'-C',
|
||||
str(project.build_dir / 'meson'),
|
||||
*argv,
|
||||
],
|
||||
env=dict(list(os.environ.items())) | env,
|
||||
)
|
||||
|
||||
def meson_test(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
subprocess.check_call([
|
||||
shutil_which('meson', True,),
|
||||
'test',
|
||||
'-C',
|
||||
project.build_dir / 'meson',
|
||||
*argv,
|
||||
])
|
||||
|
||||
|
||||
def meson_compile(
|
||||
self,
|
||||
project_name: str,
|
||||
argv: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
subprocess.check_call([
|
||||
shutil_which('meson', True,),
|
||||
'compile',
|
||||
'-C',
|
||||
project.build_dir / 'meson',
|
||||
*argv,
|
||||
])
|
||||
|
||||
@property
|
||||
def third_party_roots(self) -> list[pathlib.Path]:
|
||||
return []
|
||||
|
||||
def meson_setup(
|
||||
self,
|
||||
project_name: str,
|
||||
force: bool,
|
||||
argv: Optional[list[str]] = None,
|
||||
env: Optional[dict[str, str]] = None,
|
||||
# third_party_roots: Optional[list[pathlib.Path]] = None,
|
||||
) -> None:
|
||||
project = self.projects[project_name]
|
||||
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
if env is None:
|
||||
env = dict()
|
||||
|
||||
logger.info(dict(env=env))
|
||||
|
||||
if force:
|
||||
if (project.build_dir / 'meson').exists():
|
||||
logger.info(dict(action='removing build dir', path=project.build_dir / 'meson'))
|
||||
shutil.rmtree(project.build_dir / 'meson')
|
||||
|
||||
extra_args : list[str] = []
|
||||
|
||||
if len(self.third_party_roots) > 0:
|
||||
extra_args.extend([
|
||||
'-Dthird_party_roots=%s' % str(o.absolute())
|
||||
for o in self.third_party_roots
|
||||
])
|
||||
|
||||
cmd = [
|
||||
shutil_which('meson', True,),
|
||||
'setup',
|
||||
str(project.source_dir),
|
||||
str(project.build_dir / 'meson'),
|
||||
'-Dmodes=["meson"]',
|
||||
*extra_args,
|
||||
# '-Dpkgconfig.relocatable=true',
|
||||
'-Dprefix=/',
|
||||
*argv,
|
||||
]
|
||||
|
||||
logger.info(dict(cmd=cmd))
|
||||
|
||||
subprocess.check_call(
|
||||
cmd,
|
||||
env=dict(list(os.environ.items())) | env,
|
||||
)
|
336
python/online/fxreader/pr34/commands_typed/cli_bootstrap.py
Normal file
336
python/online/fxreader/pr34/commands_typed/cli_bootstrap.py
Normal file
@ -0,0 +1,336 @@
|
||||
#!/usr/bin/env python3
|
||||
import glob
|
||||
import io
|
||||
import tempfile
|
||||
import dataclasses
|
||||
import pathlib
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import logging
|
||||
|
||||
|
||||
from typing import (Optional, Any,)
|
||||
from typing_extensions import (
|
||||
Self, BinaryIO,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def toml_load(f: BinaryIO) -> Any:
|
||||
try:
|
||||
import tomllib
|
||||
return tomllib.load(f)
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
import tomli
|
||||
return tomli.load(f)
|
||||
except:
|
||||
pass
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
@dataclasses.dataclass
|
||||
class PyProject:
|
||||
path: pathlib.Path
|
||||
dependencies: dict[str, list[str]]
|
||||
early_features: Optional[list[str]] = None
|
||||
pip_find_links: Optional[list[pathlib.Path]] = None
|
||||
runtime_libdirs: Optional[list[pathlib.Path]] = None
|
||||
runtime_preload: Optional[list[pathlib.Path]] = None
|
||||
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda : dict())
|
||||
|
||||
def pyproject_load(
|
||||
d: pathlib.Path,
|
||||
) -> PyProject:
|
||||
with io.open(d, 'rb') as f:
|
||||
content = toml_load(f)
|
||||
|
||||
assert isinstance(content, dict)
|
||||
|
||||
dependencies : dict[str, list[str]] = dict()
|
||||
|
||||
dependencies['default'] = content['project']['dependencies']
|
||||
|
||||
if (
|
||||
'optional-dependencies' in content['project']
|
||||
):
|
||||
assert isinstance(
|
||||
content['project']['optional-dependencies'],
|
||||
dict
|
||||
)
|
||||
|
||||
for k, v in content['project']['optional-dependencies'].items():
|
||||
assert isinstance(v, list)
|
||||
assert isinstance(k, str)
|
||||
|
||||
dependencies[k] = v
|
||||
|
||||
|
||||
res = PyProject(
|
||||
path=d,
|
||||
dependencies=dependencies,
|
||||
)
|
||||
|
||||
tool_name = 'online.fxreader.pr34'.replace('.', '-')
|
||||
|
||||
if (
|
||||
'tool' in content and
|
||||
isinstance(
|
||||
content['tool'], dict
|
||||
) and
|
||||
tool_name in content['tool'] and
|
||||
isinstance(
|
||||
content['tool'][tool_name],
|
||||
dict
|
||||
)
|
||||
):
|
||||
if 'early_features' in content['tool'][tool_name]:
|
||||
res.early_features = content['tool'][tool_name]['early_features']
|
||||
|
||||
if 'pip_find_links' in content['tool'][tool_name]:
|
||||
res.pip_find_links = [
|
||||
d.parent / pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['pip_find_links']
|
||||
]
|
||||
|
||||
if 'runtime_libdirs' in content['tool'][tool_name]:
|
||||
res.runtime_libdirs = [
|
||||
d.parent / pathlib.Path(o)
|
||||
# pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['runtime_libdirs']
|
||||
]
|
||||
|
||||
if 'runtime_preload' in content['tool'][tool_name]:
|
||||
res.runtime_preload = [
|
||||
d.parent / pathlib.Path(o)
|
||||
# pathlib.Path(o)
|
||||
for o in content['tool'][tool_name]['runtime_preload']
|
||||
]
|
||||
|
||||
if 'requirements' in content['tool'][tool_name]:
|
||||
assert isinstance(content['tool'][tool_name]['requirements'], dict)
|
||||
|
||||
res.requirements = {
|
||||
k : d.parent / pathlib.Path(v)
|
||||
# pathlib.Path(o)
|
||||
for k, v in content['tool'][tool_name]['requirements'].items()
|
||||
}
|
||||
|
||||
return res
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BootstrapSettings:
|
||||
env_path: pathlib.Path
|
||||
python_path: pathlib.Path
|
||||
base_dir: pathlib.Path
|
||||
python_version: Optional[str] = dataclasses.field(
|
||||
default_factory=lambda : os.environ.get(
|
||||
'PYTHON_VERSION',
|
||||
'%d.%d' % (
|
||||
sys.version_info.major,
|
||||
sys.version_info.minor,
|
||||
),
|
||||
).strip()
|
||||
)
|
||||
uv_args: list[str] = dataclasses.field(
|
||||
default_factory=lambda : os.environ.get(
|
||||
'UV_ARGS',
|
||||
'--offline',
|
||||
).split(),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get(
|
||||
cls,
|
||||
base_dir: Optional[pathlib.Path] = None,
|
||||
) -> Self:
|
||||
if base_dir is None:
|
||||
base_dir = pathlib.Path.cwd()
|
||||
|
||||
env_path = base_dir / '.venv'
|
||||
python_path = env_path / 'bin' / 'python3'
|
||||
|
||||
return cls(
|
||||
base_dir=base_dir,
|
||||
env_path=env_path,
|
||||
python_path=python_path,
|
||||
)
|
||||
|
||||
def env_bootstrap(
|
||||
bootstrap_settings: BootstrapSettings,
|
||||
pyproject: PyProject,
|
||||
) -> None:
|
||||
pip_find_links : list[pathlib.Path] = []
|
||||
|
||||
if not pyproject.pip_find_links is None:
|
||||
pip_find_links.extend(pyproject.pip_find_links)
|
||||
|
||||
pip_find_links_args = sum([
|
||||
['-f', str(o),]
|
||||
for o in pip_find_links
|
||||
], [])
|
||||
|
||||
features : list[str] = []
|
||||
|
||||
if pyproject.early_features:
|
||||
features.extend(pyproject.early_features)
|
||||
|
||||
requirements_python_version: Optional[str] = None
|
||||
if not bootstrap_settings.python_version is None:
|
||||
requirements_python_version = bootstrap_settings.python_version.replace('.', '_')
|
||||
|
||||
|
||||
requirements_name = '_'.join(sorted(features))
|
||||
|
||||
if requirements_python_version:
|
||||
requirements_name += '_' + requirements_python_version
|
||||
|
||||
requirements_path : Optional[pathlib.Path] = None
|
||||
|
||||
if requirements_name in pyproject.requirements:
|
||||
requirements_path = pyproject.requirements[requirements_name]
|
||||
else:
|
||||
requirements_path = pyproject.path.parent / 'requirements.txt'
|
||||
|
||||
requirements_in : list[str] = []
|
||||
|
||||
requirements_in.extend([
|
||||
'uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'
|
||||
])
|
||||
|
||||
if pyproject.early_features:
|
||||
early_dependencies = sum([
|
||||
pyproject.dependencies[o]
|
||||
for o in pyproject.early_features
|
||||
], [])
|
||||
|
||||
logger.info(dict(
|
||||
requirements_name=requirements_name,
|
||||
early_dependencies=early_dependencies,
|
||||
))
|
||||
|
||||
requirements_in.extend(early_dependencies)
|
||||
# if len(early_dependencies) > 0:
|
||||
# subprocess.check_call([
|
||||
# bootstrap_settings.python_path,
|
||||
# '-m',
|
||||
# 'uv', 'pip', 'install',
|
||||
# *pip_find_links_args,
|
||||
# # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'),
|
||||
# *bootstrap_settings.uv_args,
|
||||
# *early_dependencies,
|
||||
# ])
|
||||
|
||||
if not requirements_path.exists():
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode='w',
|
||||
prefix='requirements',
|
||||
suffix='.in',
|
||||
) as f:
|
||||
f.write(
|
||||
'\n'.join(requirements_in)
|
||||
)
|
||||
f.flush()
|
||||
|
||||
subprocess.check_call([
|
||||
'uv',
|
||||
'pip',
|
||||
'compile',
|
||||
'--generate-hashes',
|
||||
*pip_find_links_args,
|
||||
# '-p',
|
||||
# bootstrap_settings.python_path,
|
||||
*bootstrap_settings.uv_args,
|
||||
'-o', str(requirements_path),
|
||||
f.name,
|
||||
])
|
||||
|
||||
uv_python_version: list[str] = []
|
||||
|
||||
if not bootstrap_settings.python_version is None:
|
||||
uv_python_version.extend([
|
||||
'-p', bootstrap_settings.python_version,
|
||||
])
|
||||
|
||||
subprocess.check_call([
|
||||
'uv', 'venv',
|
||||
*uv_python_version,
|
||||
*pip_find_links_args,
|
||||
# '--seed',
|
||||
*bootstrap_settings.uv_args,
|
||||
str(bootstrap_settings.env_path)
|
||||
])
|
||||
|
||||
subprocess.check_call([
|
||||
'uv',
|
||||
'pip',
|
||||
'install',
|
||||
*pip_find_links_args,
|
||||
'-p',
|
||||
bootstrap_settings.python_path,
|
||||
'--require-hashes',
|
||||
*bootstrap_settings.uv_args,
|
||||
'-r', str(requirements_path),
|
||||
])
|
||||
|
||||
|
||||
def paths_equal(
|
||||
a: pathlib.Path | str,
|
||||
b: pathlib.Path | str
|
||||
) -> bool:
|
||||
return (
|
||||
os.path.abspath(str(a)) ==
|
||||
os.path.abspath(str(b))
|
||||
)
|
||||
|
||||
def run(
|
||||
d: Optional[pathlib.Path] = None,
|
||||
cli_path: Optional[pathlib.Path] = None,
|
||||
) -> None:
|
||||
if cli_path is None:
|
||||
cli_path = pathlib.Path(__file__).parent / 'cli.py'
|
||||
|
||||
if d is None:
|
||||
d = pathlib.Path(__file__).parent / 'pyproject.toml'
|
||||
|
||||
bootstrap_settings = BootstrapSettings.get()
|
||||
|
||||
pyproject : PyProject = pyproject_load(
|
||||
d
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
if not bootstrap_settings.env_path.exists():
|
||||
env_bootstrap(
|
||||
bootstrap_settings=bootstrap_settings,
|
||||
pyproject=pyproject,
|
||||
)
|
||||
|
||||
logger.info([sys.executable, sys.argv, bootstrap_settings.python_path])
|
||||
|
||||
if not paths_equal(sys.executable, bootstrap_settings.python_path):
|
||||
os.execv(
|
||||
str(bootstrap_settings.python_path),
|
||||
[
|
||||
str(bootstrap_settings.python_path),
|
||||
*sys.argv,
|
||||
]
|
||||
)
|
||||
|
||||
os.execv(
|
||||
str(bootstrap_settings.python_path),
|
||||
[
|
||||
str(bootstrap_settings.python_path),
|
||||
str(
|
||||
cli_path
|
||||
),
|
||||
*sys.argv[1:],
|
||||
]
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
run()
|
90
python/online/fxreader/pr34/commands_typed/crypto.py
Normal file
90
python/online/fxreader/pr34/commands_typed/crypto.py
Normal file
@ -0,0 +1,90 @@
|
||||
import base64
|
||||
import os
|
||||
|
||||
import cryptography.hazmat.primitives.kdf.scrypt
|
||||
|
||||
from typing import (Literal, overload, Optional,)
|
||||
|
||||
class PasswordUtils:
|
||||
@overload
|
||||
@classmethod
|
||||
def secret_hash(
|
||||
cls,
|
||||
secret: str | bytes,
|
||||
mode: Literal['base64'],
|
||||
salt: Optional[bytes] = None,
|
||||
) -> tuple[str, str]: ...
|
||||
|
||||
@overload
|
||||
@classmethod
|
||||
def secret_hash(
|
||||
cls,
|
||||
secret: str | bytes,
|
||||
mode: Literal['bytes'],
|
||||
salt: Optional[bytes] = None,
|
||||
) -> tuple[bytes, bytes]: ...
|
||||
|
||||
@classmethod
|
||||
def secret_hash(
|
||||
cls,
|
||||
secret: str | bytes,
|
||||
mode: Literal['bytes', 'base64'],
|
||||
salt: Optional[bytes] = None,
|
||||
) -> tuple[str, str] | tuple[bytes, bytes]:
|
||||
if salt is None:
|
||||
salt = os.urandom(16)
|
||||
|
||||
if isinstance(secret, str):
|
||||
secret = secret.encode('utf-8')
|
||||
# derive
|
||||
kdf = cls._scrypt_init(salt=salt)
|
||||
|
||||
hashed_secret = kdf.derive(secret)
|
||||
|
||||
if mode == 'bytes':
|
||||
return (salt, hashed_secret)
|
||||
elif mode == 'base64':
|
||||
res_tuple = tuple((
|
||||
base64.b64encode(o).decode('utf-8')
|
||||
for o in (salt, hashed_secret,)
|
||||
))
|
||||
return (res_tuple[0], res_tuple[1])
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def _scrypt_init(
|
||||
cls,
|
||||
salt: bytes
|
||||
) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
|
||||
return cryptography.hazmat.primitives.kdf.scrypt.Scrypt(
|
||||
salt=salt,
|
||||
length=32,
|
||||
n=2**14,
|
||||
r=8,
|
||||
p=1,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def secret_check(
|
||||
cls,
|
||||
secret: str | bytes,
|
||||
salt: str | bytes,
|
||||
hashed_secret: str | bytes,
|
||||
) -> bool:
|
||||
if isinstance(salt, str):
|
||||
salt = base64.b64decode(salt)
|
||||
|
||||
if isinstance(secret, str):
|
||||
secret = secret.encode('utf-8')
|
||||
|
||||
if isinstance(hashed_secret, str):
|
||||
hashed_secret = base64.b64decode(hashed_secret)
|
||||
|
||||
kdf = cls._scrypt_init(salt=salt)
|
||||
|
||||
try:
|
||||
kdf.verify(secret, hashed_secret)
|
||||
return True
|
||||
except cryptography.exceptions.InvalidKey:
|
||||
return False
|
35
python/online/fxreader/pr34/commands_typed/debug.py
Normal file
35
python/online/fxreader/pr34/commands_typed/debug.py
Normal file
@ -0,0 +1,35 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
from typing import (Optional,)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class DebugPy:
|
||||
@classmethod
|
||||
def set_trace(
|
||||
cls,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
wait: Optional[bool] = None,
|
||||
) -> None:
|
||||
if host is None:
|
||||
host = '127.0.0.1'
|
||||
if port is None:
|
||||
port = 4444
|
||||
if wait is None:
|
||||
wait = True
|
||||
|
||||
import debugpy
|
||||
|
||||
if os.environ.get('DEBUGPY_RUNNING') != 'true':
|
||||
logger.info('debugpy init')
|
||||
import debugpy
|
||||
debugpy.listen((host, port))
|
||||
os.environ['DEBUGPY_RUNNING'] = 'true'
|
||||
|
||||
if wait:
|
||||
debugpy.wait_for_client()
|
||||
debugpy.breakpoint()
|
||||
|
||||
logger.info('debugpy done')
|
16
python/online/fxreader/pr34/commands_typed/logging.py
Normal file
16
python/online/fxreader/pr34/commands_typed/logging.py
Normal file
@ -0,0 +1,16 @@
|
||||
import logging
|
||||
from typing import (Optional,)
|
||||
|
||||
def setup(level: Optional[int] = None) -> None:
|
||||
if level is None:
|
||||
level = logging.INFO
|
||||
|
||||
logging.basicConfig(
|
||||
level=level,
|
||||
format=(
|
||||
'%(levelname)s:%(name)s:%(message)s'
|
||||
':%(process)d'
|
||||
':%(asctime)s'
|
||||
':%(pathname)s:%(funcName)s:%(lineno)s'
|
||||
),
|
||||
)
|
216
python/online/fxreader/pr34/commands_typed/mypy.py
Normal file
216
python/online/fxreader/pr34/commands_typed/mypy.py
Normal file
@ -0,0 +1,216 @@
|
||||
import pydantic.dataclasses
|
||||
import datetime
|
||||
import pydantic_settings
|
||||
import marisa_trie
|
||||
import json
|
||||
import pathlib
|
||||
import subprocess
|
||||
import logging
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from pydantic import (Field,)
|
||||
|
||||
from typing import (ClassVar, Generator, Annotated, Optional, Any,)
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@pydantic.dataclasses.dataclass
|
||||
class MypyFormatEntry:
|
||||
name : str
|
||||
value : str
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, type(self)):
|
||||
raise NotImplementedError
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
class MypyFormat:
|
||||
vscode : ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode')
|
||||
json : ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json')
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_value(cls, value: str) -> MypyFormatEntry:
|
||||
for e in cls.entries():
|
||||
if value == e.value:
|
||||
return e
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
@classmethod
|
||||
def entries(cls) -> Generator[MypyFormatEntry, None, None,]:
|
||||
for o in dir(cls):
|
||||
e = getattr(cls, o)
|
||||
if not isinstance(e, MypyFormatEntry):
|
||||
continue
|
||||
|
||||
yield e
|
||||
|
||||
class MypySettings(pydantic_settings.BaseSettings):
|
||||
model_config = pydantic_settings.SettingsConfigDict(
|
||||
env_prefix='online_fxreader_pr34_mypy_',
|
||||
case_sensitive=False,
|
||||
)
|
||||
|
||||
config_path : pathlib.Path = pathlib.Path.cwd() / '.mypy.ini'
|
||||
max_errors : dict[str, int] = dict()
|
||||
paths : Annotated[list[pathlib.Path], Field(default_factory=lambda : ['.'])]
|
||||
|
||||
def run(
|
||||
argv: Optional[list[str]] = None,
|
||||
settings: Optional[MypySettings] = None,
|
||||
) -> None:
|
||||
if argv is None:
|
||||
argv = []
|
||||
|
||||
if settings is None:
|
||||
settings = MypySettings()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
'-q', '--quiet',
|
||||
dest='quiet',
|
||||
action='store_true',
|
||||
help='do not print anything if the program is correct according to max_errors limits',
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
'-i',
|
||||
dest='paths',
|
||||
help='specify paths to check',
|
||||
default=[],
|
||||
action='append',
|
||||
)
|
||||
parser.add_argument(
|
||||
'-f', '--format',
|
||||
dest='_format',
|
||||
help='output format of errors',
|
||||
default=MypyFormat.json.value,
|
||||
choices=[
|
||||
o.value
|
||||
for o in MypyFormat.entries()
|
||||
],
|
||||
)
|
||||
options, args = parser.parse_known_args(argv)
|
||||
|
||||
if len(args) > 0 and args[0] == '--':
|
||||
del args[0]
|
||||
|
||||
options.format = MypyFormat.from_value(options._format)
|
||||
|
||||
if len(options.paths) == 0:
|
||||
options.paths.extend(settings.paths)
|
||||
|
||||
started_at = datetime.datetime.now()
|
||||
|
||||
mypy_cmd = [
|
||||
sys.executable,
|
||||
'-m',
|
||||
'mypy',
|
||||
'--config-file', str(settings.config_path),
|
||||
'--strict',
|
||||
'-O',
|
||||
'json',
|
||||
*args,
|
||||
*options.paths,
|
||||
]
|
||||
|
||||
|
||||
logger.info(dict(cmd=mypy_cmd))
|
||||
|
||||
res = subprocess.run(
|
||||
mypy_cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
|
||||
done_at = datetime.datetime.now()
|
||||
|
||||
try:
|
||||
assert not res.returncode is None
|
||||
|
||||
errors = sorted([
|
||||
json.loads(o)
|
||||
for o in res.stdout.decode('utf-8').splitlines()
|
||||
if not o.strip() == ''
|
||||
], key=lambda x: (
|
||||
x.get('file', ''),
|
||||
x.get('line', 0),
|
||||
))
|
||||
|
||||
if not options.quiet:
|
||||
if (len(res.stderr)) > 0:
|
||||
logger.error(res.stderr.decode('utf-8'))
|
||||
except:
|
||||
logger.exception('')
|
||||
logger.error(res.stdout.decode('utf-8'))
|
||||
logger.error(res.stderr.decode('utf-8'))
|
||||
sys.exit(res.returncode)
|
||||
|
||||
|
||||
g : dict[str, Any] = dict()
|
||||
for o in errors:
|
||||
if not o['file'] in g:
|
||||
g[o['file']] = []
|
||||
g[o['file']].append(o)
|
||||
|
||||
h = {
|
||||
k : len(v)
|
||||
for k, v in sorted(
|
||||
list(g.items()),
|
||||
key=lambda x: x[0],
|
||||
)
|
||||
}
|
||||
|
||||
mentioned_paths = marisa_trie.Trie(list(h))
|
||||
|
||||
violated_limits : dict[str, str] = dict()
|
||||
|
||||
for k, v in settings.max_errors.items():
|
||||
matching_paths = mentioned_paths.keys(k)
|
||||
total_errors = sum([
|
||||
h[o]
|
||||
for o in matching_paths
|
||||
], 0)
|
||||
|
||||
if total_errors > v:
|
||||
violated_limits[k] = '%s - [%s]: has %d errors > %d' % (
|
||||
k, ', '.join(matching_paths), total_errors, v,
|
||||
)
|
||||
|
||||
if len(violated_limits) > 0 or not options.quiet:
|
||||
if options.format == MypyFormat.vscode:
|
||||
for o in errors:
|
||||
sys.stdout.write('[%s] %s:%d,%d %s - %s - %s\n' % (
|
||||
o['severity'],
|
||||
o['file'],
|
||||
o['line'],
|
||||
o['column'],
|
||||
o['message'],
|
||||
o['hint'],
|
||||
o['code'],
|
||||
))
|
||||
sys.stdout.flush()
|
||||
#logger.info(json.dumps(errors, indent=4))
|
||||
else:
|
||||
logger.info(json.dumps(errors, indent=4))
|
||||
|
||||
#if len(violated_limits) > 0:
|
||||
# logger.info(json.dumps(violated_limits, indent=4))
|
||||
logger.info(json.dumps(dict(
|
||||
max_errors=settings.max_errors,
|
||||
violated_limits=violated_limits,
|
||||
histogram=h,
|
||||
elapsed=(done_at - started_at).total_seconds(),
|
||||
), indent=4))
|
||||
|
||||
if len(violated_limits) > 0:
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
from . import logging as _logging
|
||||
_logging.setup()
|
||||
run(sys.argv[1:])
|
122
python/online/fxreader/pr34/commands_typed/os.py
Normal file
122
python/online/fxreader/pr34/commands_typed/os.py
Normal file
@ -0,0 +1,122 @@
|
||||
import shutil
|
||||
import glob
|
||||
import subprocess
|
||||
import pydantic
|
||||
import pathlib
|
||||
import ctypes
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import dataclasses
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from typing import (overload, Optional, Literal, Any, Annotated,)
|
||||
|
||||
from .cli_bootstrap import PyProject
|
||||
|
||||
@overload
|
||||
def shutil_which(
|
||||
name: str,
|
||||
raise_on_failure: Literal[True],
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def shutil_which(
|
||||
name: str,
|
||||
raise_on_failure: bool,
|
||||
) -> Optional[str]: ...
|
||||
|
||||
def shutil_which(
|
||||
name: str,
|
||||
raise_on_failure: bool,
|
||||
) -> Optional[str]:
|
||||
res = shutil.which(name)
|
||||
if res is None and raise_on_failure:
|
||||
raise NotImplementedError
|
||||
else:
|
||||
return res
|
||||
|
||||
def runtime_libdirs_init(
|
||||
project: PyProject,
|
||||
) -> None:
|
||||
if sys.platform == 'linux':
|
||||
ld_library_path : list[pathlib.Path] = [
|
||||
o
|
||||
for o in [
|
||||
*[
|
||||
o.absolute()
|
||||
for o in (
|
||||
project.runtime_libdirs
|
||||
if project.runtime_libdirs
|
||||
else []
|
||||
)
|
||||
],
|
||||
*[
|
||||
pathlib.Path(o)
|
||||
for o in os.environ.get(
|
||||
'LD_LIBRARY_PATH',
|
||||
''
|
||||
).split(os.path.pathsep)
|
||||
if o != ''
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
ld_library_path_present : list[pathlib.Path] = []
|
||||
|
||||
for o in ld_library_path:
|
||||
if not o.exists():
|
||||
logger.warning(dict(
|
||||
ld_library_path=o,
|
||||
msg='not found',
|
||||
))
|
||||
|
||||
ld_library_path_present.append(o)
|
||||
|
||||
os.environ.update(
|
||||
LD_LIBRARY_PATH=os.path.pathsep.join([
|
||||
str(o) for o in ld_library_path_present
|
||||
])
|
||||
)
|
||||
|
||||
for preload_path in (project.runtime_preload or []):
|
||||
for preload_found in glob.glob(str(
|
||||
preload_path.parent / ('lib%s.so' % preload_path.name)
|
||||
)):
|
||||
logger.info(dict(
|
||||
preload_path=preload_path, preload_found=preload_found,
|
||||
# lib_path=o,
|
||||
msg='load_library',
|
||||
))
|
||||
|
||||
ctypes.cdll.LoadLibrary(preload_found)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
class interfaces_index_t:
|
||||
@dataclasses.dataclass
|
||||
class Interface:
|
||||
@dataclasses.dataclass
|
||||
class AddrInfo:
|
||||
family: str
|
||||
local: str
|
||||
|
||||
name: Annotated[
|
||||
str,
|
||||
pydantic.Field(
|
||||
alias='ifname',
|
||||
)
|
||||
]
|
||||
addr_info: list[AddrInfo]
|
||||
|
||||
def interfaces_index() -> list[interfaces_index_t.Interface]:
|
||||
res = pydantic.RootModel[
|
||||
list[interfaces_index_t.Interface]
|
||||
].model_validate_json(
|
||||
subprocess.check_output([
|
||||
'ip', '-j', 'addr',
|
||||
]).decode('utf-8')
|
||||
).root
|
||||
|
||||
return res
|
524
python/online/fxreader/pr34/commands_typed/pip.py
Normal file
524
python/online/fxreader/pr34/commands_typed/pip.py
Normal file
@ -0,0 +1,524 @@
|
||||
import contextlib
|
||||
import pathlib
|
||||
import sys
|
||||
import enum
|
||||
import dataclasses
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import unittest.mock
|
||||
import logging
|
||||
import typing
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
import pip._internal.commands.show
|
||||
import pip._internal.commands.download
|
||||
import pip._internal.cli.main_parser
|
||||
import pip._internal.models.index
|
||||
import pip._internal.utils.temp_dir
|
||||
import pip._internal.cli.main
|
||||
import pip._internal.network.download
|
||||
import pip._internal.resolution.base
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
import pip._internal.operations.prepare
|
||||
|
||||
from typing import (
|
||||
Literal, Optional, Iterable, Any,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def pip_show(
|
||||
argv: list[str],
|
||||
) -> list['pip._internal.commands.show._PackageInfo']:
|
||||
import pip._internal.commands.show
|
||||
return list(
|
||||
pip._internal.commands.show.search_packages_info(
|
||||
argv,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class pip_resolve_t:
|
||||
class kwargs_t:
|
||||
class mode_t(enum.StrEnum):
|
||||
copy_paste = "copy_paste"
|
||||
monkey_patch = "monkey_patch"
|
||||
uv_pip_freeze = "uv_pip_freeze"
|
||||
uv_pip_compile = "uv_pip_compile"
|
||||
|
||||
@dataclasses.dataclass
|
||||
class res_t:
|
||||
@dataclasses.dataclass
|
||||
class download_info_t:
|
||||
url: str
|
||||
sha256: str
|
||||
constraint: str
|
||||
|
||||
txt: Optional[str] = None
|
||||
entries: Optional[list[download_info_t]] = None
|
||||
|
||||
|
||||
def pip_resolve_entries_to_txt(
|
||||
entries: list[pip_resolve_t.res_t.download_info_t]
|
||||
) -> str:
|
||||
return '\n'.join([
|
||||
'#%s\n%s %s' % (
|
||||
o.url,
|
||||
o.constraint,
|
||||
' '.join([
|
||||
'--hash=sha256:%s' % o2
|
||||
for o2 in o.sha256
|
||||
])
|
||||
)
|
||||
for o in entries
|
||||
])
|
||||
|
||||
def pip_resolve(
|
||||
argv: list[str],
|
||||
mode: pip_resolve_t.kwargs_t.mode_t,
|
||||
requirements: Optional[list[str]] = None,
|
||||
) -> pip_resolve_t.res_t:
|
||||
if mode is pip_resolve_t.kwargs_t.mode_t.copy_paste:
|
||||
import pip._internal.commands.show
|
||||
import pip._internal.commands.download
|
||||
import pip._internal.cli.cmdoptions
|
||||
import pip._internal.cli.main_parser
|
||||
import pip._internal.models.index
|
||||
import pip._internal.utils.temp_dir
|
||||
import pip._internal.cli.main
|
||||
import pip._internal.network.download
|
||||
import pip._internal.resolution.base
|
||||
import pip._internal.req.req_install
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
import pip._internal.operations.prepare
|
||||
import pip._internal.utils.temp_dir
|
||||
import pip._internal.operations.build.build_tracker
|
||||
import pip._internal.models.direct_url
|
||||
|
||||
with contextlib.ExitStack() as stack:
|
||||
stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager())
|
||||
|
||||
t2 = pip._internal.cli.main_parser.create_main_parser()
|
||||
|
||||
t3 = t2.parse_args(["download"])
|
||||
t1 = pip._internal.commands.download.DownloadCommand("blah", "shit")
|
||||
|
||||
stack.enter_context(t1.main_context())
|
||||
|
||||
# options = pip._internal.commands.download.Values()
|
||||
options = t3[0]
|
||||
options.python_version = None
|
||||
options.platforms = []
|
||||
options.abis = []
|
||||
options.implementation = []
|
||||
options.format_control = None
|
||||
options.ignore_dependencies = None
|
||||
options.index_url = pip._internal.models.index.PyPI.simple_url
|
||||
options.extra_index_urls = []
|
||||
options.no_index = None
|
||||
options.find_links = []
|
||||
options.pre = None
|
||||
options.prefer_binary = True
|
||||
options.only_binary = True
|
||||
options.constraints = []
|
||||
options.use_pep517 = None
|
||||
options.editables = []
|
||||
options.requirements = []
|
||||
options.src_dir = str(pathlib.Path(__file__).parent)
|
||||
options.build_isolation = None
|
||||
options.check_build_deps = None
|
||||
options.progress_bar = True
|
||||
options.require_hashes = None
|
||||
options.ignore_requires_python = False
|
||||
# options.cache_dir
|
||||
pip._internal.cli.cmdoptions.check_dist_restriction(options)
|
||||
# t1._in_main_context = True
|
||||
session = t1.get_default_session(options)
|
||||
target_python = pip._internal.cli.cmdoptions.make_target_python(options)
|
||||
finder = t1._build_package_finder(
|
||||
options=options,
|
||||
session=session,
|
||||
target_python=target_python,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
)
|
||||
build_tracker = t1.enter_context(
|
||||
pip._internal.operations.build.build_tracker.get_build_tracker()
|
||||
)
|
||||
reqs = t1.get_requirements(
|
||||
[
|
||||
#'pip', 'uv', 'ipython',
|
||||
*argv,
|
||||
],
|
||||
options,
|
||||
finder,
|
||||
session,
|
||||
)
|
||||
pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs)
|
||||
directory = pip._internal.utils.temp_dir.TempDirectory(
|
||||
delete=True, kind="download", globally_managed=True
|
||||
)
|
||||
preparer = t1.make_requirement_preparer(
|
||||
temp_build_dir=directory,
|
||||
options=options,
|
||||
build_tracker=build_tracker,
|
||||
session=session,
|
||||
finder=finder,
|
||||
download_dir=None,
|
||||
use_user_site=False,
|
||||
verbosity=False,
|
||||
)
|
||||
resolver = t1.make_resolver(
|
||||
preparer=preparer,
|
||||
finder=finder,
|
||||
options=options,
|
||||
ignore_requires_python=options.ignore_requires_python,
|
||||
use_pep517=options.use_pep517,
|
||||
py_version_info=options.python_version,
|
||||
)
|
||||
t1.trace_basic_info(finder)
|
||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
||||
|
||||
res = pip_resolve_t.res_t()
|
||||
|
||||
res.entries = []
|
||||
|
||||
for k, v in requirement_set.requirements.items():
|
||||
assert not v.download_info is None
|
||||
assert isinstance(
|
||||
v.download_info.info,
|
||||
pip._internal.models.direct_url.ArchiveInfo,
|
||||
)
|
||||
assert not v.download_info.info.hashes is None
|
||||
|
||||
res.entries.append(
|
||||
pip_resolve_t.res_t.download_info_t(
|
||||
constraint=k,
|
||||
sha256=v.download_info.info.hashes["sha256"],
|
||||
url=v.download_info.url,
|
||||
)
|
||||
)
|
||||
|
||||
res.txt = pip_resolve_entries_to_txt(
|
||||
res.entries
|
||||
)
|
||||
|
||||
return res
|
||||
elif mode is pip_resolve_t.kwargs_t.mode_t.monkey_patch:
|
||||
import pip._internal.commands.show
|
||||
import pip._internal.commands.download
|
||||
import pip._internal.cli.main_parser
|
||||
import pip._internal.models.index
|
||||
import pip._internal.models.link
|
||||
from pip._internal.models.link import (
|
||||
Link,
|
||||
)
|
||||
import pip._internal.utils.temp_dir
|
||||
from pip._internal.metadata.base import (
|
||||
BaseDistribution,
|
||||
)
|
||||
import pip._internal.cli.main
|
||||
import pip._internal.network.download
|
||||
import pip._internal.resolution.base
|
||||
import pip._internal.resolution.resolvelib.resolver
|
||||
import pip._internal.operations.prepare
|
||||
from pip._internal.network.download import (
|
||||
Downloader,
|
||||
)
|
||||
from pip._internal.operations.prepare import (
|
||||
File,
|
||||
)
|
||||
from pip._internal.req.req_set import RequirementSet
|
||||
from pip._internal.utils.hashes import Hashes
|
||||
from pip._internal.req.req_install import InstallRequirement
|
||||
|
||||
downloader_call_def = pip._internal.network.download.Downloader.__call__
|
||||
|
||||
def downloader_call(
|
||||
_self: pip._internal.network.download.Downloader,
|
||||
link: pip._internal.models.link.Link,
|
||||
location: str,
|
||||
) -> tuple[str, str]:
|
||||
logger.info(
|
||||
dict(
|
||||
url=link.url,
|
||||
)
|
||||
)
|
||||
|
||||
return downloader_call_def(
|
||||
_self,
|
||||
link, location,
|
||||
)
|
||||
|
||||
batch_downloader_call_def = (
|
||||
pip._internal.network.download.BatchDownloader.__call__
|
||||
)
|
||||
|
||||
def batch_downloader_call(
|
||||
_self: pip._internal.network.download.BatchDownloader,
|
||||
links: Iterable[pip._internal.models.link.Link],
|
||||
location: str,
|
||||
) -> Iterable[
|
||||
tuple[
|
||||
pip._internal.models.link.Link,
|
||||
tuple[str, str]
|
||||
]
|
||||
]:
|
||||
# print(args)
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
links=links,
|
||||
location=location,
|
||||
)
|
||||
)
|
||||
|
||||
return [
|
||||
(o, ("/dev/null", ''))
|
||||
for o in links
|
||||
]
|
||||
|
||||
# base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve
|
||||
base_resolver_resolve_def = (
|
||||
pip._internal.resolution.resolvelib.resolver.Resolver.resolve
|
||||
)
|
||||
|
||||
result_requirements : list[
|
||||
RequirementSet | InstallRequirement
|
||||
] = []
|
||||
|
||||
def base_resolver_resolve(
|
||||
_self: pip._internal.resolution.resolvelib.resolver.Resolver,
|
||||
root_reqs: list[
|
||||
InstallRequirement,
|
||||
],
|
||||
check_supported_wheels: bool,
|
||||
) -> RequirementSet:
|
||||
# print(args, kwargs)
|
||||
|
||||
res = base_resolver_resolve_def(
|
||||
_self,
|
||||
root_reqs,
|
||||
check_supported_wheels
|
||||
)
|
||||
|
||||
result_requirements.append(res)
|
||||
raise NotImplementedError
|
||||
return res
|
||||
|
||||
get_http_url_def = pip._internal.operations.prepare.get_http_url
|
||||
|
||||
def get_http_url(
|
||||
link: Link,
|
||||
download: Downloader,
|
||||
download_dir: Optional[str] = None,
|
||||
hashes: Optional[Hashes] = None,
|
||||
) -> File:
|
||||
logger.info(
|
||||
dict(
|
||||
url=link.url,
|
||||
hashes=hashes,
|
||||
)
|
||||
)
|
||||
|
||||
if link.url.endswith(".whl"):
|
||||
print("blah")
|
||||
hashes = None
|
||||
|
||||
return File(
|
||||
"/dev/null",
|
||||
'',
|
||||
)
|
||||
else:
|
||||
return get_http_url_def(
|
||||
link,
|
||||
download,
|
||||
download_dir,
|
||||
hashes
|
||||
)
|
||||
|
||||
prepare_linked_requirements_more_def = pip._internal.operations.prepare.RequirementPreparer.prepare_linked_requirements_more
|
||||
|
||||
def prepare_linked_requirements_more(
|
||||
_self: pip._internal.resolution.resolvelib.resolver.Resolver,
|
||||
reqs: Iterable[InstallRequirement],
|
||||
parallel_builds: bool = False,
|
||||
) -> None:
|
||||
result_requirements.extend(
|
||||
reqs
|
||||
)
|
||||
raise NotImplementedError
|
||||
|
||||
_complete_partial_requirements_def = pip._internal.operations.prepare.RequirementPreparer._complete_partial_requirements
|
||||
|
||||
def _complete_partial_requirements(
|
||||
_self: pip._internal.resolution.resolvelib.resolver.Resolver,
|
||||
partially_downloaded_reqs: Iterable[InstallRequirement],
|
||||
parallel_builds: bool = False,
|
||||
) -> None:
|
||||
result_requirements.extend(
|
||||
partially_downloaded_reqs
|
||||
)
|
||||
raise NotImplementedError
|
||||
|
||||
patches : list[Any] = []
|
||||
|
||||
patches.append(
|
||||
unittest.mock.patch.object(
|
||||
pip._internal.network.download.Downloader, "__call__", downloader_call
|
||||
)
|
||||
)
|
||||
# patches.append(
|
||||
# unittest.mock.patch.object(
|
||||
# pip._internal.network.download.BatchDownloader,
|
||||
# '__call__',
|
||||
# batch_downloader_call
|
||||
# )
|
||||
# )
|
||||
# patches.append(
|
||||
# unittest.mock.patch.object(
|
||||
# pip._internal.resolution.base.BaseResolver, 'resolve', base_resolver_resolve))
|
||||
|
||||
patches.append(
|
||||
unittest.mock.patch.object(
|
||||
pip._internal.resolution.resolvelib.resolver.Resolver,
|
||||
"resolve",
|
||||
base_resolver_resolve,
|
||||
)
|
||||
)
|
||||
patches.append(
|
||||
unittest.mock.patch.object(
|
||||
pip._internal.operations.prepare,
|
||||
"get_http_url",
|
||||
get_http_url,
|
||||
)
|
||||
)
|
||||
patches.append(
|
||||
unittest.mock.patch.object(
|
||||
pip._internal.operations.prepare.RequirementPreparer,
|
||||
"prepare_linked_requirements_more",
|
||||
prepare_linked_requirements_more,
|
||||
)
|
||||
)
|
||||
# patches.append(
|
||||
# unittest.mock.patch.object(
|
||||
# pip._internal.operations.prepare.RequirementPreparer,
|
||||
# '_complete_partial_requirements',
|
||||
# _complete_partial_requirements
|
||||
# )
|
||||
# )
|
||||
|
||||
with contextlib.ExitStack() as stack:
|
||||
for p in patches:
|
||||
stack.enter_context(p)
|
||||
|
||||
pip._internal.cli.main.main(
|
||||
[
|
||||
"download",
|
||||
"-q",
|
||||
"--no-cache",
|
||||
"-d",
|
||||
"/dev/null",
|
||||
*argv,
|
||||
# 'numpy',
|
||||
]
|
||||
)
|
||||
|
||||
# return sum([
|
||||
# [
|
||||
# pip_resolve_t.res_t.download_info_t(
|
||||
# constraint=k,
|
||||
# sha256=v.download_info.info.hashes['sha256'],
|
||||
# url=v.download_info.url,
|
||||
# )
|
||||
# for k, v in o.requirements.items()
|
||||
# ]
|
||||
# for o in result_requirements
|
||||
# ], [])
|
||||
logger.warn(result_requirements)
|
||||
|
||||
res = pip_resolve_t.res_t()
|
||||
|
||||
res.entries = []
|
||||
|
||||
for o in result_requirements:
|
||||
assert isinstance(o, InstallRequirement)
|
||||
|
||||
sha256_hashes = o.hashes()._allowed["sha256"]
|
||||
assert len(sha256_hashes) == 1
|
||||
assert not o.link is None
|
||||
|
||||
res.entries.append(
|
||||
pip_resolve_t.res_t.download_info_t(
|
||||
constraint=str(o.req),
|
||||
sha256=sha256_hashes[0],
|
||||
url=o.link.url,
|
||||
)
|
||||
)
|
||||
|
||||
res.txt = pip_resolve_entries_to_txt(
|
||||
res.entries
|
||||
)
|
||||
|
||||
return res
|
||||
elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_freeze:
|
||||
assert len(argv) == 0
|
||||
|
||||
pip_freeze = subprocess.check_output(
|
||||
[
|
||||
sys.executable,
|
||||
"-m",
|
||||
"uv",
|
||||
"pip",
|
||||
"freeze",
|
||||
],
|
||||
).decode('utf-8')
|
||||
pip_compile = subprocess.check_output(
|
||||
[
|
||||
sys.executable, '-m',
|
||||
'uv', 'pip', 'compile',
|
||||
'--generate-hashes',
|
||||
'-',
|
||||
|
||||
],
|
||||
input=pip_freeze.encode('utf-8')
|
||||
).decode('utf-8')
|
||||
|
||||
return pip_resolve_t.res_t(
|
||||
txt=pip_compile,
|
||||
)
|
||||
elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_compile:
|
||||
with contextlib.ExitStack() as stack:
|
||||
if not requirements is None:
|
||||
# assert len(argv) == 0
|
||||
|
||||
f = stack.enter_context(
|
||||
tempfile.NamedTemporaryFile(
|
||||
suffix='.txt',
|
||||
)
|
||||
)
|
||||
f.write(
|
||||
('\n'.join(requirements)).encode('utf-8')
|
||||
)
|
||||
f.flush()
|
||||
|
||||
argv.append(f.name)
|
||||
|
||||
if argv[0] == '--':
|
||||
del argv[0]
|
||||
|
||||
pip_compile = subprocess.check_output(
|
||||
[
|
||||
sys.executable, '-m',
|
||||
'uv', 'pip', 'compile',
|
||||
'--generate-hashes',
|
||||
*argv,
|
||||
],
|
||||
).decode('utf-8')
|
||||
|
||||
return pip_resolve_t.res_t(
|
||||
txt=pip_compile,
|
||||
)
|
||||
else:
|
||||
raise NotImplementedError
|
27
python/online/fxreader/pr34/commands_typed/typing.py
Normal file
27
python/online/fxreader/pr34/commands_typed/typing.py
Normal file
@ -0,0 +1,27 @@
|
||||
# https://github.com/python/typing/issues/59#issuecomment-353878355
|
||||
# https://gitea.fxreader.online/fxreader.online/freelance-project-34-marketing-blog/issues/2#issue-25
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing_extensions import Protocol
|
||||
from abc import abstractmethod
|
||||
|
||||
C = typing.TypeVar("C", bound="Comparable")
|
||||
|
||||
class Comparable(Protocol):
|
||||
@abstractmethod
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def __lt__(self: C, other: C) -> bool:
|
||||
pass
|
||||
|
||||
def __gt__(self: C, other: C) -> bool:
|
||||
return (not self < other) and self != other
|
||||
|
||||
def __le__(self: C, other: C) -> bool:
|
||||
return self < other or self == other
|
||||
|
||||
def __ge__(self: C, other: C) -> bool:
|
||||
return (not self < other)
|
0
python/online/fxreader/pr34/py.typed
Normal file
0
python/online/fxreader/pr34/py.typed
Normal file
0
python/online/fxreader/pr34/tasks/__init__.py
Normal file
0
python/online/fxreader/pr34/tasks/__init__.py
Normal file
277
python/online/fxreader/pr34/tasks/cython.py
Normal file
277
python/online/fxreader/pr34/tasks/cython.py
Normal file
@ -0,0 +1,277 @@
|
||||
import time
|
||||
import glob
|
||||
import io
|
||||
import os
|
||||
import numpy
|
||||
import numpy.typing
|
||||
import functools
|
||||
import pathlib
|
||||
import threading
|
||||
import cython
|
||||
import datetime
|
||||
|
||||
from typing import (Any, Optional, TypeVar, Type, cast)
|
||||
# from scoping import scoping as s
|
||||
|
||||
def test(
|
||||
_id: int,
|
||||
T: float,
|
||||
a: numpy.ndarray[Any, numpy.dtype[numpy.int32]],
|
||||
) -> None:
|
||||
with cython.nogil:
|
||||
#if True:
|
||||
started_at = datetime.datetime.now()
|
||||
print('started')
|
||||
def elapsed() -> float:
|
||||
return (datetime.datetime.now() - started_at).total_seconds()
|
||||
#a = 0
|
||||
while elapsed() < T:
|
||||
#a += 1
|
||||
for k in range(1024 * 1024):
|
||||
a[_id] += 1
|
||||
|
||||
print(['done', started_at, elapsed(), a[_id]])
|
||||
|
||||
M = TypeVar('M', bound=Type[Any])
|
||||
|
||||
def build(content: str, module: M) -> M:
|
||||
import pathlib
|
||||
import tempfile
|
||||
import hashlib
|
||||
import Cython.Build.Inline
|
||||
|
||||
sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex()
|
||||
|
||||
output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute()
|
||||
|
||||
|
||||
if not output_dir.exists() or True:
|
||||
os.makedirs(str(output_dir), exist_ok=True)
|
||||
|
||||
source_path = output_dir / ('_%s.pyx' % sha256sum)
|
||||
if not source_path.exists():
|
||||
with io.open(str(source_path), 'w') as f:
|
||||
f.write(content)
|
||||
|
||||
t1 = Cython.Build.Inline._get_build_extension()
|
||||
t1.extensions = Cython.Build.cythonize(str(source_path))
|
||||
t1.build_temp = str(pathlib.Path('/'))
|
||||
t1.build_lib = str(output_dir)
|
||||
#t2 = Cython.Build.Inline.Extension(
|
||||
# name=sha256sum,
|
||||
#)
|
||||
t1.run()
|
||||
|
||||
return cast(
|
||||
M,
|
||||
Cython.Build.Inline.load_dynamic(
|
||||
'_%s' % sha256sum,
|
||||
glob.glob(
|
||||
str(output_dir / ('_%s*.so' % sha256sum))
|
||||
)[0]
|
||||
)
|
||||
)
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def mypyc_build(file_path: pathlib.Path) -> Any:
|
||||
import pathlib
|
||||
import tempfile
|
||||
import hashlib
|
||||
import mypyc.build
|
||||
import Cython.Build.Inline
|
||||
|
||||
assert isinstance(file_path, pathlib.Path)
|
||||
|
||||
#sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex()
|
||||
|
||||
#output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute()
|
||||
output_dir = pathlib.Path('.') / 'tmp' / 'mypyc'
|
||||
sha256sum = file_path.stem
|
||||
lib_pattern = file_path.parent / ('%s.cpython*.so' % sha256sum)
|
||||
lib_dir = pathlib.Path('.')
|
||||
|
||||
|
||||
def lib_path_glob(path: str | pathlib.Path) -> Optional[pathlib.Path]:
|
||||
res : list[str] = glob.glob(str(path))
|
||||
|
||||
if len(res) == 0:
|
||||
return None
|
||||
else:
|
||||
return pathlib.Path(res[0])
|
||||
|
||||
need_build : bool = False
|
||||
|
||||
lib_path : Optional[pathlib.Path] = None
|
||||
|
||||
lib_path = lib_path_glob(lib_pattern)
|
||||
|
||||
if not lib_path is None:
|
||||
t2 = file_path.stat()
|
||||
t3 = lib_path.stat()
|
||||
if t3.st_mtime < t2.st_mtime:
|
||||
need_build = True
|
||||
|
||||
del t2
|
||||
del t3
|
||||
else:
|
||||
need_build = True
|
||||
|
||||
|
||||
if need_build:
|
||||
for o in [
|
||||
output_dir,
|
||||
output_dir / 'build' / file_path.parent,
|
||||
]:
|
||||
os.makedirs(
|
||||
str(o),
|
||||
exist_ok=True
|
||||
)
|
||||
#source_path = output_dir / ('_%s.py' % sha256sum)
|
||||
source_path = file_path
|
||||
#with io.open(str(source_path), 'w') as f:
|
||||
# f.write(content)
|
||||
|
||||
t1 = Cython.Build.Inline._get_build_extension()
|
||||
t1.extensions = mypyc.build.mypycify(
|
||||
[str(source_path)],
|
||||
target_dir=str(output_dir / 'build')
|
||||
)
|
||||
t1.build_temp = str(output_dir)
|
||||
t1.build_lib = str(lib_dir)
|
||||
#t2 = Cython.Build.Inline.Extension(
|
||||
# name=sha256sum,
|
||||
#)
|
||||
t1.run()
|
||||
|
||||
lib_path = lib_path_glob(lib_pattern)
|
||||
|
||||
return Cython.Build.Inline.load_dynamic(
|
||||
#'_%s' % sha256sum,
|
||||
#t1.extensions[0].name,
|
||||
file_path.stem,
|
||||
str(lib_path),
|
||||
)
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
class Source:
|
||||
@staticmethod
|
||||
def test2(
|
||||
_a : numpy.ndarray[Any, numpy.dtype[numpy.int64]],
|
||||
_id : numpy.dtype[numpy.int32] | int,
|
||||
T : float=16
|
||||
) -> int:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
source = build(r'''
|
||||
cimport cython
|
||||
|
||||
@cython.boundscheck(False)
|
||||
@cython.wraparound(False)
|
||||
def test4(int[:] a, int[:] b):
|
||||
cdef int N = a.shape[0]
|
||||
assert N == b.shape[0]
|
||||
|
||||
with cython.nogil:
|
||||
for i in range(N):
|
||||
a[i] += b[i]
|
||||
return N
|
||||
|
||||
import datetime
|
||||
|
||||
def elapsed(started_at: datetime.datetime):
|
||||
res = (datetime.datetime.now() - started_at).total_seconds()
|
||||
|
||||
return res
|
||||
|
||||
@cython.boundscheck(False) # Deactivate bounds checking
|
||||
@cython.wraparound(False) # Deactivate negative indexing.
|
||||
def has_time(started_at: datetime.datetime, T: float):
|
||||
t1 = elapsed(started_at)
|
||||
|
||||
res = t1 < T
|
||||
|
||||
return res
|
||||
|
||||
@cython.boundscheck(False)
|
||||
@cython.wraparound(False)
|
||||
def test2(long long [:] _a, int _id, double T=16) -> int:
|
||||
started_at = datetime.datetime.now()
|
||||
|
||||
print('started')
|
||||
|
||||
cdef int C = 1;
|
||||
|
||||
cdef int cond;
|
||||
|
||||
with cython.nogil:
|
||||
#if True:
|
||||
#a = 0
|
||||
while True:
|
||||
|
||||
with cython.gil:
|
||||
cond = has_time(started_at, T)
|
||||
#cond = 0
|
||||
|
||||
if cond != 1:
|
||||
break
|
||||
|
||||
#a += 1
|
||||
for k in range(1024 * 1024 * 1024):
|
||||
_a[_id] += C
|
||||
|
||||
print(['done', started_at, elapsed(started_at), _a[_id]])
|
||||
|
||||
return _a[_id]
|
||||
|
||||
''', Source)
|
||||
|
||||
def test_cython(N: int=4, T:int=16) -> None:
|
||||
#a = [0] * N
|
||||
a = numpy.zeros((N,), dtype=numpy.int64)
|
||||
|
||||
t = [
|
||||
threading.Thread(
|
||||
target=functools.partial(
|
||||
source.test2,
|
||||
a,
|
||||
k,
|
||||
T,
|
||||
)
|
||||
)
|
||||
for k in range(N)
|
||||
]
|
||||
|
||||
for o in t:
|
||||
o.start()
|
||||
for o in t:
|
||||
o.join()
|
||||
|
||||
#cython_module['test2'](a, 0)
|
||||
|
||||
def test_mypyc(N: int=4, W:int=35) -> None:
|
||||
cython2 = mypyc_build(
|
||||
(pathlib.Path(__file__).parent / 'cython2.py').relative_to(
|
||||
pathlib.Path.cwd()
|
||||
)
|
||||
)
|
||||
|
||||
# from .cython2 import fib
|
||||
|
||||
#a = [0] * N
|
||||
t = [
|
||||
threading.Thread(
|
||||
target=functools.partial(
|
||||
cython2.fib,
|
||||
W,
|
||||
)
|
||||
)
|
||||
for k in range(N)
|
||||
]
|
||||
|
||||
for o in t:
|
||||
o.start()
|
||||
for o in t:
|
||||
o.join()
|
11
python/online/fxreader/pr34/tasks/cython2.py
Normal file
11
python/online/fxreader/pr34/tasks/cython2.py
Normal file
@ -0,0 +1,11 @@
|
||||
import time
|
||||
|
||||
def fib(n: int) -> int:
|
||||
if n <= 1:
|
||||
return n
|
||||
else:
|
||||
return fib(n - 2) + fib(n - 1)
|
||||
|
||||
t0 = time.time()
|
||||
fib(32)
|
||||
print(time.time() - t0)
|
0
python/online/fxreader/pr34/tests/__init__.py
Normal file
0
python/online/fxreader/pr34/tests/__init__.py
Normal file
36
python/online/fxreader/pr34/tests/test_crypto.py
Normal file
36
python/online/fxreader/pr34/tests/test_crypto.py
Normal file
@ -0,0 +1,36 @@
|
||||
from online.fxreader.pr34.commands_typed import crypto
|
||||
import unittest
|
||||
|
||||
|
||||
class TestCrypto(unittest.TestCase):
|
||||
def test_password_utils(self) -> None:
|
||||
salt = b'asdfasdfasdf'
|
||||
|
||||
secret = 'blah'
|
||||
|
||||
hash_res = crypto.PasswordUtils.secret_hash(
|
||||
secret,
|
||||
mode='bytes',
|
||||
salt=salt,
|
||||
)
|
||||
self.assertEqual(
|
||||
hash_res,
|
||||
(
|
||||
salt,
|
||||
b'\xdak\xd15\xfa\x8e\xc8\r\xc3\xd2c\xf1m\xb0\xbf\xe6\x98\x01$!j\xc8\xc0Hh\x84\xea,\x91\x8b\x08\xce',
|
||||
),
|
||||
)
|
||||
|
||||
check_res = crypto.PasswordUtils.secret_check(
|
||||
secret,
|
||||
*hash_res,
|
||||
)
|
||||
|
||||
self.assertTrue(check_res)
|
||||
|
||||
self.assertFalse(
|
||||
crypto.PasswordUtils.secret_check(
|
||||
secret + 'asdfasdfsdf',
|
||||
*hash_res,
|
||||
)
|
||||
)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user