From 8510d490159e026c6cbfb711ea56f50c0f7add95 Mon Sep 17 00:00:00 2001 From: Siarhei Siniak Date: Wed, 14 May 2025 17:21:24 +0300 Subject: [PATCH 1/4] [+] improve logging --- python/online/fxreader/pr34/commands_typed/cli_bootstrap.py | 1 + 1 file changed, 1 insertion(+) diff --git a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py index aa32a40..7ac5eb5 100644 --- a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py +++ b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py @@ -208,6 +208,7 @@ def env_bootstrap( ], []) logger.info(dict( + requirements_name=requirements_name, early_dependencies=early_dependencies, )) From 0f17070c627a44bdc18f5a55295cbb2a43ac33e4 Mon Sep 17 00:00:00 2001 From: Siarhei Siniak Date: Tue, 20 May 2025 11:13:17 +0300 Subject: [PATCH 2/4] [+] reformat with ruff --- python/_m.py | 390 +- python/cli.py | 228 +- python/online/fxreader/pr34/commands.py | 6925 ++++++++--------- .../fxreader/pr34/commands_typed/argparse.py | 31 +- .../fxreader/pr34/commands_typed/asyncio.py | 23 +- .../fxreader/pr34/commands_typed/cli.py | 923 +-- .../pr34/commands_typed/cli_bootstrap.py | 525 +- .../fxreader/pr34/commands_typed/crypto.py | 151 +- .../fxreader/pr34/commands_typed/debug.py | 52 +- .../fxreader/pr34/commands_typed/logging.py | 22 +- .../fxreader/pr34/commands_typed/mypy.py | 340 +- .../online/fxreader/pr34/commands_typed/os.py | 165 +- .../fxreader/pr34/commands_typed/pip.py | 838 +- .../fxreader/pr34/commands_typed/typing.py | 27 +- python/online/fxreader/pr34/tasks/ble.py | 170 +- python/online/fxreader/pr34/tasks/cython.py | 307 +- python/online/fxreader/pr34/tasks/cython2.py | 10 +- .../fxreader/pr34/tasks/jigsaw_toxic.py | 590 +- .../online/fxreader/pr34/tasks/mlb_player.py | 4077 +++++----- .../online/fxreader/pr34/tests/test_crypto.py | 52 +- 20 files changed, 7886 insertions(+), 7960 deletions(-) diff --git a/python/_m.py b/python/_m.py index f2a003d..b63426e 100644 --- a/python/_m.py +++ b/python/_m.py @@ -1,5 +1,5 @@ #!/usr/bin/env python3 -#vim: set filetype=python +# vim: set filetype=python import logging import json @@ -7,158 +7,184 @@ import enum import pathlib import sys import argparse -#import optparse + +# import optparse import dataclasses import subprocess import os - from typing import ( - Optional, Any, TypeAlias, Literal, cast, BinaryIO, Generator, - ClassVar, Self, + Optional, + Any, + TypeAlias, + Literal, + cast, + BinaryIO, + Generator, + ClassVar, + Self, ) logger = logging.getLogger() + @dataclasses.dataclass class Settings: - project_root : pathlib.Path = pathlib.Path.cwd() + project_root: pathlib.Path = pathlib.Path.cwd() - env_path : pathlib.Path = project_root / 'tmp' / 'env3' + env_path: pathlib.Path = project_root / 'tmp' / 'env3' - _settings : ClassVar[Optional['Settings']] = None + _settings: ClassVar[Optional['Settings']] = None - @classmethod - def settings(cls) -> Self: - if cls._settings is None: - cls._settings = cls() + @classmethod + def settings(cls) -> Self: + if cls._settings is None: + cls._settings = cls() + + return cls._settings - return cls._settings def js(argv: list[str]) -> int: - return subprocess.check_call([ - 'sudo', - 'docker-compose', - '--project-directory', - Settings.settings().project_root, - '-f', - Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml', - *argv, - ]) + return subprocess.check_call( + [ + 'sudo', + 'docker-compose', + '--project-directory', + Settings.settings().project_root, + '-f', + Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml', + *argv, + ] + ) + def env( - argv: Optional[list[str]] = None, - mode: Literal['exec', 'subprocess'] = 'subprocess', - **kwargs: Any, + argv: Optional[list[str]] = None, + mode: Literal['exec', 'subprocess'] = 'subprocess', + **kwargs: Any, ) -> Optional[subprocess.CompletedProcess[bytes]]: - env_path = Settings.settings().env_path + env_path = Settings.settings().env_path - if not env_path.exists(): - subprocess.check_call([ - sys.executable, '-m', 'venv', - '--system-site-packages', - str(env_path) - ]) + if not env_path.exists(): + subprocess.check_call([sys.executable, '-m', 'venv', '--system-site-packages', str(env_path)]) - subprocess.check_call([ - env_path / 'bin' / 'python3', - '-m', 'pip', - 'install', '-r', 'requirements.txt', - ]) + subprocess.check_call( + [ + env_path / 'bin' / 'python3', + '-m', + 'pip', + 'install', + '-r', + 'requirements.txt', + ] + ) - if not argv is None: - python_path = str(env_path / 'bin' / 'python3') + if not argv is None: + python_path = str(env_path / 'bin' / 'python3') - if mode == 'exec': - os.execv( - python_path, - [ - python_path, - *argv, - ], - ) - return None - elif mode == 'subprocess': - return subprocess.run([ - python_path, - *argv, - ], **kwargs) - else: - raise NotImplementedError + if mode == 'exec': + os.execv( + python_path, + [ + python_path, + *argv, + ], + ) + return None + elif mode == 'subprocess': + return subprocess.run( + [ + python_path, + *argv, + ], + **kwargs, + ) + else: + raise NotImplementedError + + return None - return None def ruff(argv: list[str]) -> None: - parser = argparse.ArgumentParser() - parser.add_argument( - '-i', - dest='paths', - help='specify paths to check', - default=[], - action='append', - ) - parser.add_argument( - '-e', - dest='exclude', - help='rules to ignore', - default=[], - action='append', - ) + parser = argparse.ArgumentParser() + parser.add_argument( + '-i', + dest='paths', + help='specify paths to check', + default=[], + action='append', + ) + parser.add_argument( + '-e', + dest='exclude', + help='rules to ignore', + default=[], + action='append', + ) - options, args = parser.parse_known_args(argv) + options, args = parser.parse_known_args(argv) - if len(options.paths) == 0: - options.paths.extend([ - '.', - 'dotfiles/.local/bin/commands', - ]) + if len(options.paths) == 0: + options.paths.extend( + [ + '.', + 'dotfiles/.local/bin/commands', + ] + ) - if len(options.exclude) == 0: - options.exclude.extend([ - 'E731', - 'E713', - 'E714', - 'E703', - ]) + if len(options.exclude) == 0: + options.exclude.extend( + [ + 'E731', + 'E713', + 'E714', + 'E703', + ] + ) - res = env([ - '-m', - 'ruff', - 'check', - *args, - '--output-format', 'json', - '--ignore', ','.join(options.exclude), - *options.paths, - ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + res = env( + [ + '-m', + 'ruff', + 'check', + *args, + '--output-format', + 'json', + '--ignore', + ','.join(options.exclude), + *options.paths, + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) - assert not res is None + assert not res is None - errors = json.loads(res.stdout.decode('utf-8')) + errors = json.loads(res.stdout.decode('utf-8')) - g: dict[str, Any] = dict() - for o in errors: - if not o['filename'] in g: - g[o['filename']] = [] - g[o['filename']].append(o) + g: dict[str, Any] = dict() + for o in errors: + if not o['filename'] in g: + g[o['filename']] = [] + g[o['filename']].append(o) - h = { - k : len(v) - for k, v in g.items() - } + h = {k: len(v) for k, v in g.items()} - logger.info(json.dumps(errors, indent=4)) - logger.info(json.dumps(h, indent=4)) + logger.info(json.dumps(errors, indent=4)) + logger.info(json.dumps(h, indent=4)) def inside_env() -> bool: - try: - import numpy - return True - except Exception: - return False + try: + import numpy -#class Commands(enum.StrEnum): + return True + except Exception: + return False + + +# class Commands(enum.StrEnum): # js = 'js' # mypy = 'mypy' # env = 'env' @@ -172,83 +198,97 @@ def inside_env() -> bool: # argv, # ) + def host_deps(argv: list[str]) -> None: - if sys.platform in ['linux']: - subprocess.check_call(r''' + if sys.platform in ['linux']: + subprocess.check_call( + r""" exec yay -S $(cat requirements-archlinux.txt) - ''', shell=True,) - else: - raise NotImplementedError + """, + shell=True, + ) + else: + raise NotImplementedError -Command_args = ['js', 'mypy', 'env', 'ruff', 'm2', 'host_deps',] -Command : TypeAlias = Literal['js', 'mypy', 'env', 'ruff', 'm2', 'host_deps',] +Command_args = [ + 'js', + 'mypy', + 'env', + 'ruff', + 'm2', + 'host_deps', +] + +Command: TypeAlias = Literal[ + 'js', + 'mypy', + 'env', + 'ruff', + 'm2', + 'host_deps', +] + def run(argv: Optional[list[str]] = None) -> None: - logging.basicConfig( - level=logging.INFO, - format=( - '%(levelname)s:%(name)s:%(message)s' - ':%(process)d' - ':%(asctime)s' - ':%(pathname)s:%(funcName)s:%(lineno)s' - ), - ) + logging.basicConfig( + level=logging.INFO, + format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'), + ) - if argv is None: - argv = sys.argv[:] + if argv is None: + argv = sys.argv[:] + parser = argparse.ArgumentParser() + parser.add_argument( + 'command', + #'_command', + choices=[o for o in Command_args], + # required=True, + ) - parser = argparse.ArgumentParser() - parser.add_argument( - 'command', - #'_command', - choices=[ - o - for o in Command_args - ], - #required=True, - ) + options, args = parser.parse_known_args(argv[1:]) - options, args = parser.parse_known_args(argv[1:]) + assert options.command in Command_args - assert options.command in Command_args + if len(args) > 0 and args[0] == '--': + del args[0] - if len(args) > 0 and args[0] == '--': - del args[0] + # options.command = Commands(options._command) - #options.command = Commands(options._command) + if options.command == 'js': + js(args) + elif options.command == 'host_deps': + host_deps(args) + elif options.command == 'env': + env( + args, + mode='exec', + ) + # elif options.command == 'mypy': + # if not inside_env(): + # env( + # [ + # pathlib.Path(__file__).parent / 'm.py', + # *argv[1:], + # ], + # mode='exec' + # ) + # else: + # mypy(args) + elif options.command == 'ruff': + ruff(args) + elif options.command == 'm2': + if not inside_env(): + env(['--', '_m.py', 'm2', *args]) + return - if options.command == 'js': - js(args) - elif options.command == 'host_deps': - host_deps(args) - elif options.command == 'env': - env(args, mode='exec',) - # elif options.command == 'mypy': - # if not inside_env(): - # env( - # [ - # pathlib.Path(__file__).parent / 'm.py', - # *argv[1:], - # ], - # mode='exec' - # ) - # else: - # mypy(args) - elif options.command == 'ruff': - ruff(args) - elif options.command == 'm2': - if not inside_env(): - env(['--', '_m.py', 'm2', *args]) - return + import python.tasks.cython + + python.tasks.cython.mypyc_build(pathlib.Path('_m.py')) + else: + raise NotImplementedError - import python.tasks.cython - python.tasks.cython.mypyc_build( - pathlib.Path('_m.py') - ) - else: - raise NotImplementedError if __name__ == '__main__': - run() \ No newline at end of file + run() diff --git a/python/cli.py b/python/cli.py index 3e9e031..4c9bc47 100644 --- a/python/cli.py +++ b/python/cli.py @@ -10,7 +10,10 @@ import enum import argparse import dataclasses -from typing import (Optional, override,) +from typing import ( + Optional, + override, +) from online.fxreader.pr34.commands_typed.logging import setup as logging_setup @@ -24,139 +27,134 @@ logger = logging.getLogger(__name__) class Command(enum.StrEnum): - mypy = 'mypy' - deploy_wheel = 'deploy:wheel' - tests = 'tests' + mypy = 'mypy' + deploy_wheel = 'deploy:wheel' + tests = 'tests' + @dataclasses.dataclass class Settings( - _cli.DistSettings, + _cli.DistSettings, ): - base_dir: pathlib.Path = pathlib.Path(__file__).parent.parent - build_dir: pathlib.Path = base_dir / 'tmp' / 'build' - wheel_dir: pathlib.Path = base_dir / 'deps' / 'dist' - env_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).env_path - python_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).python_path + base_dir: pathlib.Path = pathlib.Path(__file__).parent.parent + build_dir: pathlib.Path = base_dir / 'tmp' / 'build' + wheel_dir: pathlib.Path = base_dir / 'deps' / 'dist' + env_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).env_path + python_path: pathlib.Path = cli_bootstrap.BootstrapSettings.get(base_dir).python_path class CLI(_cli.CLI): - def __init__(self) -> None: - self.settings = Settings() - self._projects: dict[str, _cli.Project] = { - 'online.fxreader.pr34': _cli.Project( - source_dir=self.settings.base_dir / 'python', - build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build', - dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install', - ) - } + def __init__(self) -> None: + self.settings = Settings() + self._projects: dict[str, _cli.Project] = { + 'online.fxreader.pr34': _cli.Project( + source_dir=self.settings.base_dir / 'python', + build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build', + dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install', + ) + } - self._dependencies : dict[str, _cli.Dependency] = dict() + self._dependencies: dict[str, _cli.Dependency] = dict() - @override - @property - def dist_settings(self) -> _cli.DistSettings: - return self.settings + @override + @property + def dist_settings(self) -> _cli.DistSettings: + return self.settings - @override - @property - def projects(self) -> dict[str, _cli.Project]: - return self._projects + @override + @property + def projects(self) -> dict[str, _cli.Project]: + return self._projects - def mypy( - self, - argv: list[str], - ) -> None: - import online.fxreader.pr34.commands_typed.mypy as _mypy + def mypy( + self, + argv: list[str], + ) -> None: + import online.fxreader.pr34.commands_typed.mypy as _mypy - project = self._projects['online.fxreader.pr34'] + project = self._projects['online.fxreader.pr34'] - _mypy.run( - argv, - settings=_mypy.MypySettings( - paths=[ - #Settings.settings().project_root / 'dotfiles/.local/bin/commands', - # project.source_dir / 'm.py', - project.source_dir / '_m.py', - project.source_dir / 'online', - project.source_dir / 'cli.py', - self.settings.base_dir / 'm.py', - # Settings.settings().project_root / 'deps/com.github.aiortc.aiortc/src', - #Settings.settings().project_root / 'm.py', - ], - max_errors={ - 'python/online/fxreader/pr34/commands_typed': 0, - 'python/cli.py': 0, - 'm.py': 0, - 'deps/com.github.aiortc.aiortc/src/online_fxreader': 0, - 'deps/com.github.aiortc.aiortc/src/aiortc/contrib/signaling': 0 - } - ), - ) + _mypy.run( + argv, + settings=_mypy.MypySettings( + paths=[ + # Settings.settings().project_root / 'dotfiles/.local/bin/commands', + # project.source_dir / 'm.py', + project.source_dir / '_m.py', + project.source_dir / 'online', + project.source_dir / 'cli.py', + self.settings.base_dir / 'm.py', + # Settings.settings().project_root / 'deps/com.github.aiortc.aiortc/src', + # Settings.settings().project_root / 'm.py', + ], + max_errors={ + 'python/online/fxreader/pr34/commands_typed': 0, + 'python/cli.py': 0, + 'm.py': 0, + 'deps/com.github.aiortc.aiortc/src/online_fxreader': 0, + 'deps/com.github.aiortc.aiortc/src/aiortc/contrib/signaling': 0, + }, + ), + ) - @override - @property - def dependencies(self) -> dict[str, _cli.Dependency]: - return self._dependencies + @override + @property + def dependencies(self) -> dict[str, _cli.Dependency]: + return self._dependencies - def run(self, argv: Optional[list[str]] = None) -> None: - if argv is None: - argv = copy.deepcopy(sys.argv) + def run(self, argv: Optional[list[str]] = None) -> None: + if argv is None: + argv = copy.deepcopy(sys.argv) - parser = argparse.ArgumentParser() - parser.add_argument( - 'command', - choices=[ - o.value - for o in Command - ] - ) - parser.add_argument( - '-p', '--project', - choices=[ - o - for o in self.projects - ] - ) - parser.add_argument( - '-o', '--output_dir', - default=None, - help='wheel output dir for deploy:wheel', - ) - parser.add_argument( - '-f', '--force', - default=False, - action='store_true', - help='remove install dir, before installing, default = false', - ) + parser = argparse.ArgumentParser() + parser.add_argument('command', choices=[o.value for o in Command]) + parser.add_argument('-p', '--project', choices=[o for o in self.projects]) + parser.add_argument( + '-o', + '--output_dir', + default=None, + help='wheel output dir for deploy:wheel', + ) + parser.add_argument( + '-f', + '--force', + default=False, + action='store_true', + help='remove install dir, before installing, default = false', + ) - options, args = parser.parse_known_args(argv[1:]) + options, args = parser.parse_known_args(argv[1:]) - options.command = Command(options.command) + options.command = Command(options.command) - if options.command is Command.deploy_wheel: - assert not options.project is None + if options.command is Command.deploy_wheel: + assert not options.project is None + + self.deploy_wheel( + project_name=options.project, + argv=args, + output_dir=options.output_dir, + mypy=True, + ) + elif options.command is Command.mypy: + self.mypy( + argv=args, + ) + elif options.command is Command.tests: + for k, v in self.projects.items(): + subprocess.check_call( + [ + sys.executable, + '-m', + 'unittest', + 'online.fxreader.pr34.tests.test_crypto', + *args, + ], + cwd=str(v.source_dir), + ) + else: + raise NotImplementedError - self.deploy_wheel( - project_name=options.project, - argv=args, - output_dir=options.output_dir, - mypy=True, - ) - elif options.command is Command.mypy: - self.mypy( - argv=args, - ) - elif options.command is Command.tests: - for k, v in self.projects.items(): - subprocess.check_call([ - sys.executable, - '-m', - 'unittest', - 'online.fxreader.pr34.tests.test_crypto', - *args, - ], cwd=str(v.source_dir)) - else: - raise NotImplementedError if __name__ == '__main__': - CLI().run() + CLI().run() diff --git a/python/online/fxreader/pr34/commands.py b/python/online/fxreader/pr34/commands.py index c991081..9b78e20 100644 --- a/python/online/fxreader/pr34/commands.py +++ b/python/online/fxreader/pr34/commands.py @@ -25,8 +25,14 @@ import time import traceback from typing import ( - Literal, Optional, TypedDict, Callable, Generator, TypeAlias, Any, - cast, + Literal, + Optional, + TypedDict, + Callable, + Generator, + TypeAlias, + Any, + cast, ) @@ -34,1110 +40,1039 @@ logger = logging.getLogger(__name__) def custom_notify( - title: Optional[str]=None, - msg: Optional[str]=None, - timeout: Optional[int]=None, + title: Optional[str] = None, + msg: Optional[str] = None, + timeout: Optional[int] = None, ) -> None: - if timeout is None: - timeout = 5 + if timeout is None: + timeout = 5 - timeout2 = int(timeout * 1000) + timeout2 = int(timeout * 1000) - assert isinstance(timeout2, int) and timeout2 >= 500 + assert isinstance(timeout2, int) and timeout2 >= 500 - if title is None: - title = 'commands' + if title is None: + title = 'commands' - assert isinstance(title, str) and len(title) > 0 - assert isinstance(msg, str) and len(msg) > 0 + assert isinstance(title, str) and len(title) > 0 + assert isinstance(msg, str) and len(msg) > 0 - if sys.platform == 'darwin': - osascript_translate = functools.partial( - custom_translate, - check=lambda a, b: - not re.compile( - r'^[a-zA-Z0-9\<\>\/\(\)\s\.\,\:]*$' - )\ - .match(b) is None, - ) + if sys.platform == 'darwin': + osascript_translate = functools.partial( + custom_translate, + check=lambda a, b: not re.compile(r'^[a-zA-Z0-9\<\>\/\(\)\s\.\,\:]*$').match(b) is None, + ) + + subprocess.check_call( + [ + 'osascript', + '-e', + 'display notification "%s" with title "%s"' + % ( + osascript_translate(msg), + osascript_translate(title), + ), + ] + ) + else: + subprocess.check_call(['notify-send', '-t', '%d' % timeout2, title, msg[-128:]]) - subprocess.check_call([ - 'osascript', - '-e', - 'display notification "%s" with title "%s"' % ( - osascript_translate(msg), - osascript_translate(title), - ) - ]) - else: - subprocess.check_call([ - 'notify-send', - '-t', '%d' % timeout2, - title, - msg[-128:] - ]) class intercept_output_t: - class line_res_t(TypedDict): - aggregated: bool - line: bytes + class line_res_t(TypedDict): + aggregated: bool + line: bytes - class realtime_res_t(TypedDict): - aggregated: bool - data: bytes + class realtime_res_t(TypedDict): + aggregated: bool + data: bytes - class aggregated_res_t(TypedDict): - aggregated: bool - data: bytes - returncode: Optional[int] + class aggregated_res_t(TypedDict): + aggregated: bool + data: bytes + returncode: Optional[int] + + res_t: TypeAlias = line_res_t | realtime_res_t | aggregated_res_t - res_t: TypeAlias = line_res_t | realtime_res_t | aggregated_res_t def intercept_output( - current_subprocess: subprocess.Popen[bytes], - return_aggregated: Optional[bool]=None, - transform_callback: Optional[Callable[[bytes], Optional[bytes]]] =None, - real_time: Optional[bool]=None, - timeout: Optional[float]=None, - need_lines: Optional[bool]=None, -) -> Generator[intercept_output_t.res_t, None, None,]: - if real_time is None: - real_time = False + current_subprocess: subprocess.Popen[bytes], + return_aggregated: Optional[bool] = None, + transform_callback: Optional[Callable[[bytes], Optional[bytes]]] = None, + real_time: Optional[bool] = None, + timeout: Optional[float] = None, + need_lines: Optional[bool] = None, +) -> Generator[ + intercept_output_t.res_t, + None, + None, +]: + if real_time is None: + real_time = False - start_timestamp = datetime.datetime.now() + start_timestamp = datetime.datetime.now() - if not return_aggregated: - return_aggregated = False + if not return_aggregated: + return_aggregated = False - t1 = select.poll() + t1 = select.poll() - assert not current_subprocess.stdout is None + assert not current_subprocess.stdout is None - assert isinstance(current_subprocess.stdout, io.BufferedReader) + assert isinstance(current_subprocess.stdout, io.BufferedReader) - t1.register(current_subprocess.stdout, select.POLLIN) - #print([current_subprocess, current_subprocess.poll()]) - output: list[bytes] = [] - buffer: collections.deque[bytes] = collections.deque() - buffer_lines: collections.deque[bytes] = collections.deque() + t1.register(current_subprocess.stdout, select.POLLIN) + # print([current_subprocess, current_subprocess.poll()]) + output: list[bytes] = [] + buffer: collections.deque[bytes] = collections.deque() + buffer_lines: collections.deque[bytes] = collections.deque() - last_data = None + last_data = None - while not ( - not current_subprocess.poll() is None and \ - not last_data is None - ): - if not timeout is None and \ - (datetime.datetime.now() - start_timestamp).total_seconds() > timeout: - break + while not (not current_subprocess.poll() is None and not last_data is None): + if not timeout is None and (datetime.datetime.now() - start_timestamp).total_seconds() > timeout: + break - t2 = t1.poll(100) - if ( - len(t2) == 1 and (t2[0][1] & select.POLLIN) > 0 and \ - not (isinstance(last_data, bytes) and len(last_data) == 0) or - not current_subprocess.poll() is None - ): - t3 = current_subprocess.stdout.peek() + t2 = t1.poll(100) + if ( + len(t2) == 1 + and (t2[0][1] & select.POLLIN) > 0 + and not (isinstance(last_data, bytes) and len(last_data) == 0) + or not current_subprocess.poll() is None + ): + t3 = current_subprocess.stdout.peek() - t4: bytes = current_subprocess.stdout.read(len(t3)) - assert isinstance(t4, bytes) + t4: bytes = current_subprocess.stdout.read(len(t3)) + assert isinstance(t4, bytes) - last_data = t3 - output.append(t3) - if need_lines: - buffer.append(t3) + last_data = t3 + output.append(t3) + if need_lines: + buffer.append(t3) - if need_lines: - if b'\n' in t3: - t3_pos = t3.rfind(b'\n') - buffer_lines.extend([ - o + b'\n' - for o in b''.join( - list(buffer)[:-1] + [ - t3[:t3_pos] - ], - ).splitlines() - ]) - buffer.clear() - buffer.append(t3[t3_pos + 1:]) - while len(buffer_lines) > 0: - yield intercept_output_t.line_res_t( - aggregated=False, - line=buffer_lines.popleft(), - ) + if need_lines: + if b'\n' in t3: + t3_pos = t3.rfind(b'\n') + buffer_lines.extend( + [ + o + b'\n' + for o in b''.join( + list(buffer)[:-1] + [t3[:t3_pos]], + ).splitlines() + ] + ) + buffer.clear() + buffer.append(t3[t3_pos + 1 :]) + while len(buffer_lines) > 0: + yield intercept_output_t.line_res_t( + aggregated=False, + line=buffer_lines.popleft(), + ) - else: - yield dict( - data=t3, - aggregated=False, - ) - t6 = t3 - if not transform_callback is None: - t5 = transform_callback(t3) - if not t5 is None: - t6 = t5 + else: + yield dict( + data=t3, + aggregated=False, + ) + t6 = t3 + if not transform_callback is None: + t5 = transform_callback(t3) + if not t5 is None: + t6 = t5 - if len(t6) > 0: - os.write(sys.stdout.fileno(), t6) - elif real_time: - yield dict( - data=b'', - aggregated=False, - ) + if len(t6) > 0: + os.write(sys.stdout.fileno(), t6) + elif real_time: + yield dict( + data=b'', + aggregated=False, + ) + + if return_aggregated: + yield dict( + data=b''.join(output), + aggregated=True, + returncode=current_subprocess.poll(), + ) - if return_aggregated: - yield dict( - data=b''.join(output), - aggregated=True, - returncode=current_subprocess.poll(), - ) def player_metadata() -> Optional[str]: - for k in range(20): - try: - metadata = { - k : subprocess.check_output(['playerctl', 'metadata', k]).decode('utf-8').strip() - for k in ['artist', 'title'] - } - return '%s - %s' % (metadata['artist'], metadata['title']) - time.sleep(1.0) - except Exception: - continue + for k in range(20): + try: + metadata = {k: subprocess.check_output(['playerctl', 'metadata', k]).decode('utf-8').strip() for k in ['artist', 'title']} + return '%s - %s' % (metadata['artist'], metadata['title']) + time.sleep(1.0) + except Exception: + continue + + return None - return None class memory_stats_t: - class res_t(TypedDict): - mem_total: int - mem_used: int + class res_t(TypedDict): + mem_total: int + mem_used: int + def memory_stats() -> memory_stats_t.res_t: - if sys.platform == 'linux': - with io.BytesIO( - subprocess.check_output( - 'free', - shell=True - ) - ) as f: - t1 = f.read().decode('utf-8').splitlines() - mem_total = int(t1[1].strip().split()[1]) - mem_used = \ - int(t1[1].strip().split()[2]) + \ - int(t1[1].strip().split()[4]) + if sys.platform == 'linux': + with io.BytesIO(subprocess.check_output('free', shell=True)) as f: + t1 = f.read().decode('utf-8').splitlines() + mem_total = int(t1[1].strip().split()[1]) + mem_used = int(t1[1].strip().split()[2]) + int(t1[1].strip().split()[4]) - return dict( - mem_total=mem_total, - mem_used=mem_used, - ) - elif sys.platform == 'darwin': - sysctl_value = lambda name, custom_cast=int: \ - custom_cast( - subprocess.check_output( - 'sysctl -a | grep %s' % name, - shell=True, - ).decode('utf-8').split(':')[1] - ) + return dict( + mem_total=mem_total, + mem_used=mem_used, + ) + elif sys.platform == 'darwin': + sysctl_value = lambda name, custom_cast=int: custom_cast( + subprocess.check_output( + 'sysctl -a | grep %s' % name, + shell=True, + ) + .decode('utf-8') + .split(':')[1] + ) - vm_pagesize = sysctl_value('vm.pagesize') - mem_total = sysctl_value('hw.memsize') + vm_pagesize = sysctl_value('vm.pagesize') + mem_total = sysctl_value('hw.memsize') - t1 = subprocess.check_output('vm_stat').decode('utf-8') - t2 = [o.split(':') for o in t1.splitlines() if ':' in o] - t3 = { - o[0].replace(' ', '_').replace('-', '_').lower() \ - : \ - int(o[1].strip().rstrip('.')) - for o in t2 - if len(o) == 2 and len(o[0]) > 0 \ - and not re.compile(r'^\s*\d+\.\s*$').match(o[1]) is None \ - and not re.compile(r'^[a-zA-Z0-9\_\-\s]+$').match(o[0]) is None - } - mem_used = ( - t3['pages_active'] + \ - t3['pages_wired_down'] - ) * vm_pagesize + t1 = subprocess.check_output('vm_stat').decode('utf-8') + t2 = [o.split(':') for o in t1.splitlines() if ':' in o] + t3 = { + o[0].replace(' ', '_').replace('-', '_').lower(): int(o[1].strip().rstrip('.')) + for o in t2 + if len(o) == 2 + and len(o[0]) > 0 + and not re.compile(r'^\s*\d+\.\s*$').match(o[1]) is None + and not re.compile(r'^[a-zA-Z0-9\_\-\s]+$').match(o[0]) is None + } + mem_used = (t3['pages_active'] + t3['pages_wired_down']) * vm_pagesize - return dict( - mem_total=mem_total / 1024, - mem_used=mem_used / 1024, - ) - else: - raise NotImplementedError + return dict( + mem_total=mem_total / 1024, + mem_used=mem_used / 1024, + ) + else: + raise NotImplementedError -def chrome( - argv: list[str] -) -> int: - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - parser = optparse.OptionParser() - parser.add_option( - '--user_data_dir', - dest='user_data_dir', - default=None, - type=str, - ) - options, args = parser.parse_args(argv) +def chrome(argv: list[str]) -> int: + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + parser = optparse.OptionParser() + parser.add_option( + '--user_data_dir', + dest='user_data_dir', + default=None, + type=str, + ) - if options.user_data_dir is None: - options.user_data_dir = os.path.join( - os.environ['HOME'], - '.config', - 'google-chrome', - ) + options, args = parser.parse_args(argv) - #assert os.path.exists(options.user_data_dir) + if options.user_data_dir is None: + options.user_data_dir = os.path.join( + os.environ['HOME'], + '.config', + 'google-chrome', + ) + + # assert os.path.exists(options.user_data_dir) + + if sys.platform == 'linux': + return subprocess.check_call( + [ + 'google-chrome-stable', + '--enable-features=useOzonePlatform', + '--ozone-platform=wayland', + '--process-per-site', + '--user-data-dir=%s' % options.user_data_dir, + *args, + ] + ) + else: + raise NotImplementedError - if sys.platform == 'linux': - return subprocess.check_call([ - 'google-chrome-stable', - '--enable-features=useOzonePlatform', - '--ozone-platform=wayland', - '--process-per-site', - '--user-data-dir=%s' % options.user_data_dir, - *args, - ]) - else: - raise NotImplementedError def raise_not_implemented() -> None: - raise NotImplementedError + raise NotImplementedError + def eternal_oom(argv: list[str]) -> None: - import signal - import re - import time - import pprint - - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - parser = optparse.OptionParser() - parser.add_option( - '--cpu_wait', - dest='cpu_wait', - default=None, - type=float, - ) - parser.add_option( - '--mean_size', - dest='mean_size', - default=None, - type=int, - ) - parser.add_option( - '--one_shot_clean', - dest='one_shot_clean', - action='store_true', - default=None, - ) - parser.add_option( - '--cpu', - dest='cpu_json', - type=str, - default=None, - ) - parser.add_option( - '--one_shot_app', - dest='one_shot_app', - default=[], - action='append', - ) - parser.add_option( - '--period', - dest='period', - default=None, - type=float, - ) - parser.add_option( - '--memory_limit', - dest='memory_limit', - default=None, - type=float, - ) - parser.add_option( - '--cpu_limit', - dest='cpu_limit', - default=None, - type=float, - ) - parser.add_option( - '--debug', - dest='debug', - action='store_true', - default=False, - ) - options, args = parser.parse_args(argv) - - if not options.cpu_json is None: - options.cpu = json.loads(options.cpu_json) - else: - options.cpu = True - - self_pid = os.getpid() - - if isinstance(options.one_shot_clean, bool) and options.one_shot_clean: - if len(options.one_shot_app) == 0: - options.one_shot_app = ['chrome', 'teams'] - - config = dict( - chrome=(r'chrome.*type=renderer', r'^.*--extension-process.*$'), - teams=(r'teams.*type=renderer', None), - ) - - for app in options.one_shot_app: - p = config[app] - - try: - t1 = subprocess.check_output([ - 'pgrep', '-a', '-f', p[0] - ]).decode('utf-8') - except Exception: - continue - t2 = t1.splitlines() - if not p[1] is None: - t3 = [o for o in t2 if re.compile(p[1]).match(o) is None] - else: - t3 = t2 - t4 = [ - int(o.split()[0]) for o in t3 - ] - - for pid in t4: - if pid == self_pid: - raise NotImplementedError - - os.kill(pid, signal.SIGTERM) - - logging.info(json.dumps(dict( - apps=options.one_shot_app, - count=len(t4), - processes=[ - o.split()[:3] for o in t3 - ], - ))) - - if len(t4) > 0: - print( - '\n'.join([ - str(o.split()[:3]) for o in t3 - ]) - ) - return - - cpu_count = os.cpu_count() - assert isinstance(cpu_count, int) - - if options.period is None: - options.period = 1 - if options.memory_limit is None: - options.memory_limit = 3 * 1024 * 1024 - if options.cpu_limit is None: - options.cpu_limit = 0.6 * cpu_count - if options.cpu_wait is None: - options.cpu_wait = 10 - if options.mean_size is None: - options.mean_size = 30 - - if isinstance(options.memory_limit, float): - options.memory_limit = int(options.memory_limit) - - assert isinstance(options.memory_limit, int) \ - and options.memory_limit < memory_stats()['mem_total'] * 0.95 \ - and options.memory_limit > 512 * 1024 - - assert isinstance(options.cpu_limit, float) \ - and options.cpu_limit > 0.2 * cpu_count and \ - options.cpu_limit < cpu_count * 0.95 - - assert options.period >= 1 - - assert options.cpu_wait >= 10 - assert options.mean_size >= 16 - - def pandas_data_frame( - lines: list[str], - groups_regex: re.Pattern[str], - header_regex: re.Pattern[str], - extra_columns: dict[ - str, - Callable[ - [dict[str, str]], - Any - ] - ], - ) -> dict[str, list[Any]]: - header_match = re.compile(header_regex).search(lines[0]) - assert not header_match is None - header = header_match.groups() - rows = [] - for line in lines[1:]: - row_match = re.compile(groups_regex).search(line) - assert not row_match is None - rows.append(row_match.groups()) - - columns: dict[str, list[Any]] = { - column: [] - for column in header - } - - for row in rows: - for value, column in zip(row, header): - columns[column].append(value) - for column, transformation in extra_columns.items(): - columns[column] = [ - transformation( - { - k : v[index] - for k, v in columns.items() - } - ) - for index in range(len(rows)) - ] - - return columns - - def pandas_merge( - left: dict[str, list[Any]], - right: dict[str, list[Any]], - on: str, - ) -> dict[str, list[Any]]: - index : dict[str, dict[Any, list[int]]] = {} - - input_data_frames : list[ - tuple[ - str, - dict[str, list[Any]] - ] - ] = [ - ('left', left), - ('right', right), - ] - for index_name, data_frame in input_data_frames: - current_index: dict[Any, list[int]] = {} - for row_index, value in enumerate(data_frame[on]): - if not value in current_index: - current_index[value] = [] - current_index[value].append(row_index) - - index[index_name] = current_index - - class MergedDataFrame(TypedDict): - header: list[str] - columns: dict[str, list[Any]] - - merged_data_frame: MergedDataFrame = dict( - header=[ - column + '_x' - for column in left - ] + [ - column + '_y' - for column in right - ], - columns={}, - ) - - for column in merged_data_frame['header']: - merged_data_frame['columns'][column] = [] - - common_values: set[Any] = { - left_value - for left_value in index['left'] - if left_value in index['right'] - } - class RowMatch(TypedDict): - left_row_index: int - right_row_index: int - - common_rows: list[RowMatch] = sorted( - [ - dict( - left_row_index=index['left'][value][0], - right_row_index=index['right'][value][0], - ) - for value in common_values - ], - key=lambda x: x['left_row_index'], - ) - for common_row in common_rows: - row = sum([ - [ - values[ - common_row[ - cast( - Literal['left_row_index' | 'right_row_index'], - 'left_row_index' - if index_name == 'left' - else - 'right_row_index' - if index_name == 'right' - else raise_not_implemented() - ) - ] - ] - for column, values in data_frame.items() - ] - for index_name, data_frame in input_data_frames - ], []) - for column, value in zip(merged_data_frame['header'], row): - merged_data_frame['columns'][column].append(value) - - return merged_data_frame['columns'] - - def pandas_sort_values(data_frame, by, ascending): - assert len(by) == 1 - assert ascending is False - t1 = [ - o['row_index'] - for o in sorted( - [ - dict( - row_index=row_index, - value=value - ) - for row_index, value in enumerate(data_frame[by[0]]) - ], - key=lambda x: x['value'] - )[::-1] - ] - return { - column : [ - values[row_index] - for row_index in t1 - ] - for column, values in data_frame.items() - } - - def pandas_filter_values(data_frame, condition): - shape = [ - len(data_frame), - ] - if shape[0] > 0: - shape.append( - len(list(data_frame.values())[0]) - ) - t1 = [ - row_index - for row_index in range(shape[1]) - if condition( - { - column : values[row_index] - for column, values in data_frame.items() - } - ) - ] - return { - column : [ - values[row_index] - for row_index in t1 - ] - for column, values in data_frame.items() - } - - def pandas_row(data_frame, row_index): - return { - column : values[row_index] - for column, values in data_frame.items() - } - - def pandas_shape(data_frame): - columns_count = len(data_frame) - if columns_count > 0: - rows_count = len(data_frame[ - next(iter(data_frame.keys())) - ]) - else: - rows_count = 0 - - return [ - columns_count, - rows_count, - ] - - def ps_regex(groups_cnt): - assert groups_cnt >= 1 - return ''.join([ - r'^\s*', - r'([^\s]+)\s+' * (groups_cnt - 1), - r'([^\s]+)\s*$', - ]) - - def oom_get_processes(extra_filter=None,): - with io.BytesIO( - subprocess.check_output( - 'ps -e -o pid,rss,user,%cpu', - shell=True - ) - ) as f: - t1 = pandas_data_frame( - f.read().decode('utf-8').splitlines(), - ps_regex(4), - ps_regex(4), - dict( - PID=lambda row: int(row['PID']), - RSS=lambda row: int(row['RSS']), - CPU=lambda row: float(row['%CPU']), - ), - ) - del t1['%CPU'] - assert set(t1.keys()) == set(['PID', 'RSS', 'USER', 'CPU']) - - t5 = subprocess.check_output( - 'ps -e -o pid,args', - shell=True - ).decode('utf-8').splitlines() - t6 = pandas_data_frame( - t5, - r'^\s*(\d+)\s(.*)$', - r'^\s+(\w+)\s+(\w+)\s*$', - dict( - PID=lambda row: int(row['PID']) - ), - ) - - if not 'COMMAND' in t6: - if sys.platform == 'darwin' and 'ARGS' in t6: - t6['COMMAND'] = t6['ARGS'] - del t6['ARGS'] - else: - raise NotImplementedError - - assert set(t6.keys()) == set(['PID', 'COMMAND']) - t11 = pandas_merge(t1, t6, on='PID') - if extra_filter is None: - extra_filter = lambda *args : True - - t7 = pandas_filter_values( - t11, - lambda row: \ - row['PID_x'] != self_pid and \ - not 'freelancer' in row['COMMAND_y'] and \ - extra_filter(row) - ) - - t8 = pandas_sort_values( - t7, - by=['RSS_x'], - ascending=False - ) - t9 = pandas_sort_values( - t7, - by=['CPU_x'], - ascending=False - ) - t10 = sum(t9['CPU_x'], 0.0) / 100 - if options.debug: - pprint.pprint([t9['CPU_x'][:10], t10 * 100]) - - return dict( - by_mem=t8, - by_cpu=t9, - total_cpu=t10, - ) - - def oom_display_rows(current_dataframe): - print('\n'.join([ - ( - lambda row: \ - '% 8d\t% 6.3f GiB\t% 5.2f %%\t% 10s\t%s' % ( - row['PID_x'], - row['RSS_x'] / 1024 / 1024, - row['CPU_x'], - row['USER_x'], - row['COMMAND_y'], - ) - )( - pandas_row(current_dataframe, k) - ) - for k in range( - 0, - min( - 5, - pandas_shape(current_dataframe)[1], - ) - ) - ])) - - def oom_kill(pid): - assert isinstance(pid, int) - - try: - logging.info('%s oom_kill, pid %d' % ( - datetime.datetime.now().isoformat(), - pid, - )) - os.kill(pid, signal.SIGKILL) - except Exception: - logging.error(traceback.format_exc()) - custom_notify( - msg='oom_kill, failed to kill pid %d' % pid - ) - - def oom_status(): - print( - '\r%s %6.2f / %.2f %%, %6.2f / %.2f GiB' % ( - datetime.datetime.now().isoformat(), - oom_mean_cpu() / os.cpu_count() * 100, - options.cpu_limit / os.cpu_count() * 100, - memory_stats()['mem_used'] / 1024 / 1024, - options.memory_limit / 1024 / 1024, - ), - end='' - ) - - def first_check(): - current_memory_stats = memory_stats() - - t11 = oom_get_processes() - t8 = t11['by_mem'] - - if current_memory_stats['mem_used'] > options.memory_limit: - oom_display_rows(t8) - - if t11['total_cpu'] > options.cpu_limit: - oom_display_rows(t11['by_cpu']) - - free_before_oom = ( - options.memory_limit - current_memory_stats['mem_used'] - ) - - print( - 'available %5.2f %% out of %5.2f %% of cpu limit before OOC' % ( - (options.cpu_limit - t11['total_cpu']) * 100 / os.cpu_count(), - options.cpu_limit * 100 / os.cpu_count(), - ) - ) - - print( - '%5.2f GiB [%5.2f %%] out of %5.2f GiB of free memory before OOM' % ( - free_before_oom / 1024 / 1024, - free_before_oom / options.memory_limit * 100, - options.memory_limit / 1024 / 1024, - ) - ) - - del t8 - del t11 - - print('press Enter to start monitoring: ...', end='') - input() - print('\nstarted...') - - first_check() - - last_total_cpu = [] - - last_cpu_high = None - - def oom_add_cpu(total_cpu): - if options.debug: - pprint.pprint([total_cpu, last_total_cpu]) - last_total_cpu.append(total_cpu) - if len(last_total_cpu) > options.mean_size: - del last_total_cpu[-options.mean_size:] - - def oom_mean_cpu(): - return sum(last_total_cpu) / (len(last_total_cpu) + 1e-8) - - def oom_cpu_high(cpu_limit=None): - if cpu_limit is None: - cpu_limit = options.cpu_limit - - nonlocal last_cpu_high - - if oom_mean_cpu() > cpu_limit: - if last_cpu_high is None: - last_cpu_high = datetime.datetime.now().timestamp() - - if datetime.datetime.now().timestamp() - last_cpu_high > options.cpu_wait: - last_cpu_high = None - del last_total_cpu[:] - return True - - return False - - mem_used = None - mem_stat = None - - def oom_mem_high(memory_limit=None): - nonlocal mem_used - - if memory_limit is None: - memory_limit = options.memory_limit - - return mem_used > memory_limit - - while True: - mem_stat = memory_stats() - mem_used = mem_stat['mem_used'] - - if options.memory_limit < mem_stat['mem_total'] and not oom_mem_high( - mem_stat['mem_total'] - ( - mem_stat['mem_total'] - options.memory_limit - ) / 2 - ): - extra_filters = lambda row: ( - 'chrome' in row['COMMAND_y'] and '--type=renderer' in row['COMMAND_y'] - or not 'chrome' in row['COMMAND_y'] - ) - else: - extra_filters = None + import signal + import re + import time + import pprint + + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + parser = optparse.OptionParser() + parser.add_option( + '--cpu_wait', + dest='cpu_wait', + default=None, + type=float, + ) + parser.add_option( + '--mean_size', + dest='mean_size', + default=None, + type=int, + ) + parser.add_option( + '--one_shot_clean', + dest='one_shot_clean', + action='store_true', + default=None, + ) + parser.add_option( + '--cpu', + dest='cpu_json', + type=str, + default=None, + ) + parser.add_option( + '--one_shot_app', + dest='one_shot_app', + default=[], + action='append', + ) + parser.add_option( + '--period', + dest='period', + default=None, + type=float, + ) + parser.add_option( + '--memory_limit', + dest='memory_limit', + default=None, + type=float, + ) + parser.add_option( + '--cpu_limit', + dest='cpu_limit', + default=None, + type=float, + ) + parser.add_option( + '--debug', + dest='debug', + action='store_true', + default=False, + ) + options, args = parser.parse_args(argv) + + if not options.cpu_json is None: + options.cpu = json.loads(options.cpu_json) + else: + options.cpu = True + + self_pid = os.getpid() + + if isinstance(options.one_shot_clean, bool) and options.one_shot_clean: + if len(options.one_shot_app) == 0: + options.one_shot_app = ['chrome', 'teams'] + + config = dict( + chrome=(r'chrome.*type=renderer', r'^.*--extension-process.*$'), + teams=(r'teams.*type=renderer', None), + ) + + for app in options.one_shot_app: + p = config[app] + + try: + t1 = subprocess.check_output(['pgrep', '-a', '-f', p[0]]).decode('utf-8') + except Exception: + continue + t2 = t1.splitlines() + if not p[1] is None: + t3 = [o for o in t2 if re.compile(p[1]).match(o) is None] + else: + t3 = t2 + t4 = [int(o.split()[0]) for o in t3] + + for pid in t4: + if pid == self_pid: + raise NotImplementedError + + os.kill(pid, signal.SIGTERM) + + logging.info( + json.dumps( + dict( + apps=options.one_shot_app, + count=len(t4), + processes=[o.split()[:3] for o in t3], + ) + ) + ) + + if len(t4) > 0: + print('\n'.join([str(o.split()[:3]) for o in t3])) + return + + cpu_count = os.cpu_count() + assert isinstance(cpu_count, int) + + if options.period is None: + options.period = 1 + if options.memory_limit is None: + options.memory_limit = 3 * 1024 * 1024 + if options.cpu_limit is None: + options.cpu_limit = 0.6 * cpu_count + if options.cpu_wait is None: + options.cpu_wait = 10 + if options.mean_size is None: + options.mean_size = 30 + + if isinstance(options.memory_limit, float): + options.memory_limit = int(options.memory_limit) + + assert isinstance(options.memory_limit, int) and options.memory_limit < memory_stats()['mem_total'] * 0.95 and options.memory_limit > 512 * 1024 + + assert isinstance(options.cpu_limit, float) and options.cpu_limit > 0.2 * cpu_count and options.cpu_limit < cpu_count * 0.95 + + assert options.period >= 1 + + assert options.cpu_wait >= 10 + assert options.mean_size >= 16 + + def pandas_data_frame( + lines: list[str], + groups_regex: re.Pattern[str], + header_regex: re.Pattern[str], + extra_columns: dict[str, Callable[[dict[str, str]], Any]], + ) -> dict[str, list[Any]]: + header_match = re.compile(header_regex).search(lines[0]) + assert not header_match is None + header = header_match.groups() + rows = [] + for line in lines[1:]: + row_match = re.compile(groups_regex).search(line) + assert not row_match is None + rows.append(row_match.groups()) + + columns: dict[str, list[Any]] = {column: [] for column in header} + + for row in rows: + for value, column in zip(row, header): + columns[column].append(value) + for column, transformation in extra_columns.items(): + columns[column] = [transformation({k: v[index] for k, v in columns.items()}) for index in range(len(rows))] + + return columns + + def pandas_merge( + left: dict[str, list[Any]], + right: dict[str, list[Any]], + on: str, + ) -> dict[str, list[Any]]: + index: dict[str, dict[Any, list[int]]] = {} + + input_data_frames: list[tuple[str, dict[str, list[Any]]]] = [ + ('left', left), + ('right', right), + ] + for index_name, data_frame in input_data_frames: + current_index: dict[Any, list[int]] = {} + for row_index, value in enumerate(data_frame[on]): + if not value in current_index: + current_index[value] = [] + current_index[value].append(row_index) + + index[index_name] = current_index + + class MergedDataFrame(TypedDict): + header: list[str] + columns: dict[str, list[Any]] + + merged_data_frame: MergedDataFrame = dict( + header=[column + '_x' for column in left] + [column + '_y' for column in right], + columns={}, + ) + + for column in merged_data_frame['header']: + merged_data_frame['columns'][column] = [] + + common_values: set[Any] = {left_value for left_value in index['left'] if left_value in index['right']} + + class RowMatch(TypedDict): + left_row_index: int + right_row_index: int + + common_rows: list[RowMatch] = sorted( + [ + dict( + left_row_index=index['left'][value][0], + right_row_index=index['right'][value][0], + ) + for value in common_values + ], + key=lambda x: x['left_row_index'], + ) + for common_row in common_rows: + row = sum( + [ + [ + values[ + common_row[ + cast( + Literal['left_row_index' | 'right_row_index'], + 'left_row_index' if index_name == 'left' else 'right_row_index' if index_name == 'right' else raise_not_implemented(), + ) + ] + ] + for column, values in data_frame.items() + ] + for index_name, data_frame in input_data_frames + ], + [], + ) + for column, value in zip(merged_data_frame['header'], row): + merged_data_frame['columns'][column].append(value) + + return merged_data_frame['columns'] + + def pandas_sort_values(data_frame, by, ascending): + assert len(by) == 1 + assert ascending is False + t1 = [ + o['row_index'] + for o in sorted([dict(row_index=row_index, value=value) for row_index, value in enumerate(data_frame[by[0]])], key=lambda x: x['value'])[::-1] + ] + return {column: [values[row_index] for row_index in t1] for column, values in data_frame.items()} + + def pandas_filter_values(data_frame, condition): + shape = [ + len(data_frame), + ] + if shape[0] > 0: + shape.append(len(list(data_frame.values())[0])) + t1 = [row_index for row_index in range(shape[1]) if condition({column: values[row_index] for column, values in data_frame.items()})] + return {column: [values[row_index] for row_index in t1] for column, values in data_frame.items()} + + def pandas_row(data_frame, row_index): + return {column: values[row_index] for column, values in data_frame.items()} + + def pandas_shape(data_frame): + columns_count = len(data_frame) + if columns_count > 0: + rows_count = len(data_frame[next(iter(data_frame.keys()))]) + else: + rows_count = 0 + + return [ + columns_count, + rows_count, + ] + + def ps_regex(groups_cnt): + assert groups_cnt >= 1 + return ''.join( + [ + r'^\s*', + r'([^\s]+)\s+' * (groups_cnt - 1), + r'([^\s]+)\s*$', + ] + ) + + def oom_get_processes( + extra_filter=None, + ): + with io.BytesIO(subprocess.check_output('ps -e -o pid,rss,user,%cpu', shell=True)) as f: + t1 = pandas_data_frame( + f.read().decode('utf-8').splitlines(), + ps_regex(4), + ps_regex(4), + dict( + PID=lambda row: int(row['PID']), + RSS=lambda row: int(row['RSS']), + CPU=lambda row: float(row['%CPU']), + ), + ) + del t1['%CPU'] + assert set(t1.keys()) == set(['PID', 'RSS', 'USER', 'CPU']) + + t5 = subprocess.check_output('ps -e -o pid,args', shell=True).decode('utf-8').splitlines() + t6 = pandas_data_frame( + t5, + r'^\s*(\d+)\s(.*)$', + r'^\s+(\w+)\s+(\w+)\s*$', + dict(PID=lambda row: int(row['PID'])), + ) + + if not 'COMMAND' in t6: + if sys.platform == 'darwin' and 'ARGS' in t6: + t6['COMMAND'] = t6['ARGS'] + del t6['ARGS'] + else: + raise NotImplementedError + + assert set(t6.keys()) == set(['PID', 'COMMAND']) + t11 = pandas_merge(t1, t6, on='PID') + if extra_filter is None: + extra_filter = lambda *args: True + + t7 = pandas_filter_values(t11, lambda row: row['PID_x'] != self_pid and not 'freelancer' in row['COMMAND_y'] and extra_filter(row)) + + t8 = pandas_sort_values(t7, by=['RSS_x'], ascending=False) + t9 = pandas_sort_values(t7, by=['CPU_x'], ascending=False) + t10 = sum(t9['CPU_x'], 0.0) / 100 + if options.debug: + pprint.pprint([t9['CPU_x'][:10], t10 * 100]) + + return dict( + by_mem=t8, + by_cpu=t9, + total_cpu=t10, + ) + + def oom_display_rows(current_dataframe): + print( + '\n'.join( + [ + ( + lambda row: '% 8d\t% 6.3f GiB\t% 5.2f %%\t% 10s\t%s' + % ( + row['PID_x'], + row['RSS_x'] / 1024 / 1024, + row['CPU_x'], + row['USER_x'], + row['COMMAND_y'], + ) + )(pandas_row(current_dataframe, k)) + for k in range( + 0, + min( + 5, + pandas_shape(current_dataframe)[1], + ), + ) + ] + ) + ) + + def oom_kill(pid): + assert isinstance(pid, int) + + try: + logging.info( + '%s oom_kill, pid %d' + % ( + datetime.datetime.now().isoformat(), + pid, + ) + ) + os.kill(pid, signal.SIGKILL) + except Exception: + logging.error(traceback.format_exc()) + custom_notify(msg='oom_kill, failed to kill pid %d' % pid) + + def oom_status(): + print( + '\r%s %6.2f / %.2f %%, %6.2f / %.2f GiB' + % ( + datetime.datetime.now().isoformat(), + oom_mean_cpu() / os.cpu_count() * 100, + options.cpu_limit / os.cpu_count() * 100, + memory_stats()['mem_used'] / 1024 / 1024, + options.memory_limit / 1024 / 1024, + ), + end='', + ) + + def first_check(): + current_memory_stats = memory_stats() + + t11 = oom_get_processes() + t8 = t11['by_mem'] + + if current_memory_stats['mem_used'] > options.memory_limit: + oom_display_rows(t8) + + if t11['total_cpu'] > options.cpu_limit: + oom_display_rows(t11['by_cpu']) + + free_before_oom = options.memory_limit - current_memory_stats['mem_used'] + + print( + 'available %5.2f %% out of %5.2f %% of cpu limit before OOC' + % ( + (options.cpu_limit - t11['total_cpu']) * 100 / os.cpu_count(), + options.cpu_limit * 100 / os.cpu_count(), + ) + ) + + print( + '%5.2f GiB [%5.2f %%] out of %5.2f GiB of free memory before OOM' + % ( + free_before_oom / 1024 / 1024, + free_before_oom / options.memory_limit * 100, + options.memory_limit / 1024 / 1024, + ) + ) + + del t8 + del t11 + + print('press Enter to start monitoring: ...', end='') + input() + print('\nstarted...') + + first_check() + + last_total_cpu = [] + + last_cpu_high = None + + def oom_add_cpu(total_cpu): + if options.debug: + pprint.pprint([total_cpu, last_total_cpu]) + last_total_cpu.append(total_cpu) + if len(last_total_cpu) > options.mean_size: + del last_total_cpu[-options.mean_size :] + + def oom_mean_cpu(): + return sum(last_total_cpu) / (len(last_total_cpu) + 1e-8) + + def oom_cpu_high(cpu_limit=None): + if cpu_limit is None: + cpu_limit = options.cpu_limit + + nonlocal last_cpu_high + + if oom_mean_cpu() > cpu_limit: + if last_cpu_high is None: + last_cpu_high = datetime.datetime.now().timestamp() + + if datetime.datetime.now().timestamp() - last_cpu_high > options.cpu_wait: + last_cpu_high = None + del last_total_cpu[:] + return True + + return False + + mem_used = None + mem_stat = None + + def oom_mem_high(memory_limit=None): + nonlocal mem_used + + if memory_limit is None: + memory_limit = options.memory_limit + + return mem_used > memory_limit + + while True: + mem_stat = memory_stats() + mem_used = mem_stat['mem_used'] + + if options.memory_limit < mem_stat['mem_total'] and not oom_mem_high(mem_stat['mem_total'] - (mem_stat['mem_total'] - options.memory_limit) / 2): + extra_filters = lambda row: ('chrome' in row['COMMAND_y'] and '--type=renderer' in row['COMMAND_y'] or not 'chrome' in row['COMMAND_y']) + else: + extra_filters = None + + t11 = oom_get_processes(extra_filters) - t11 = oom_get_processes(extra_filters) + oom_add_cpu(t11['total_cpu']) - oom_add_cpu(t11['total_cpu']) + t8 = t11['by_mem'] + + t9 = t8 + t4 = lambda: oom_kill(t9['PID_x'][0]) + + oom_status() + + if oom_mem_high(): + print('\n', end='') + pprint.pprint( + [ + 'Killing [OOM]', + pandas_row(t9, 0), + mem_used, + ] + ) + t4() + + if options.cpu and oom_cpu_high(): + oom_display_rows(t11['by_cpu']) + print('\n', end='') + pprint.pprint( + [ + 'Killing [CPU]', + pandas_row(t11['by_cpu'], 0), + [options.cpu_limit, oom_mean_cpu(), t11['total_cpu']], + ] + ) + oom_kill(t11['by_cpu']['PID_x'][0]) + time.sleep(options.period) - t8 = t11['by_mem'] - - t9 = t8 - t4 = lambda : oom_kill(t9['PID_x'][0]) - - oom_status() - - if oom_mem_high(): - print('\n', end='') - pprint.pprint([ - 'Killing [OOM]', - pandas_row(t9, 0), - mem_used, - ]) - t4() - - if options.cpu and oom_cpu_high(): - oom_display_rows(t11['by_cpu']) - print('\n', end='') - pprint.pprint([ - 'Killing [CPU]', - pandas_row(t11['by_cpu'], 0), - [options.cpu_limit, oom_mean_cpu(), t11['total_cpu']], - ]) - oom_kill(t11['by_cpu']['PID_x'][0]) - time.sleep(options.period) def resilient_vlc(stream=None): - if stream is None: - streams_path = os.path.join( - os.environ['CACHE_PATH'], - 'resilient-vlc-streams.json' - ) + if stream is None: + streams_path = os.path.join(os.environ['CACHE_PATH'], 'resilient-vlc-streams.json') - if os.path.exists(streams_path): - with io.open( - streams_path, - 'r' - ) as f: - stream = json.load(f) - else: - raise RuntimeError( - 'not found, %s' % streams_path - ) + if os.path.exists(streams_path): + with io.open(streams_path, 'r') as f: + stream = json.load(f) + else: + raise RuntimeError('not found, %s' % streams_path) - if isinstance(stream, str): - stream = [stream] + if isinstance(stream, str): + stream = [stream] - if len(stream) == 0: - raise RuntimeError('no streams') + if len(stream) == 0: + raise RuntimeError('no streams') + + import subprocess + import time + + while True: + print('new start') + with subprocess.Popen( + [ + 'cvlc', + '--verbose', + '2', + *stream, + ], + stderr=subprocess.PIPE, + ) as p: + while p.returncode is None: + t1 = p.stderr.readline().decode('utf-8') + if len(t1) > 0: + print(t1) + if not all( + [ + o in t1 + for o in [ + 'prefetch stream error', + 'terror', + 'main interface error', + ] + ] + ) and any([o in t1 for o in ['pulse audio output debug: underflow']]): + print('shit') + p.kill() + while True: + try: + t2 = p.wait(timeout=1) + print(t2) + break + except Exception: + print('shit') + pass + time.sleep(1.0) - import subprocess - import time - while True: - print('new start') - with subprocess.Popen([ - 'cvlc', '--verbose', '2', *stream, - ], stderr=subprocess.PIPE) as p: - while p.returncode is None: - t1 = p.stderr.readline().decode('utf-8') - if len(t1) > 0: - print(t1) - if not all([ - o in t1 - for o in [ - 'prefetch stream error', - 'terror', - 'main interface error', - ] - ]) and any([ - o in t1 - for o in [ - 'pulse audio output debug: underflow' - ] - ]): - print('shit') - p.kill() - while True: - try: - t2 = p.wait(timeout=1) - print(t2) - break - except Exception: - print('shit') - pass - time.sleep(1.0) def sway_sock( - wait: bool = False, + wait: bool = False, ) -> Optional[str]: - while True: - import glob - uid = os.stat(os.environ['HOME']).st_uid - t1 = glob.glob( - os.path.join( - '/run', - 'user', - '%d' % uid, - 'sway-ipc.%d*.sock' % uid, - ) - ) - t2 = [ - os.stat(o).st_mtime - for o in t1 - ] - sorted_entries = sorted(enumerate(t1), key=lambda x: t2[x[0]]) - if len(sorted_entries) > 0: - t3 = sorted_entries[-1][0] - return t1[t3] - else: - if wait: - time.sleep(0.1) - continue - else: - return None + while True: + import glob + + uid = os.stat(os.environ['HOME']).st_uid + t1 = glob.glob( + os.path.join( + '/run', + 'user', + '%d' % uid, + 'sway-ipc.%d*.sock' % uid, + ) + ) + t2 = [os.stat(o).st_mtime for o in t1] + sorted_entries = sorted(enumerate(t1), key=lambda x: t2[x[0]]) + if len(sorted_entries) > 0: + t3 = sorted_entries[-1][0] + return t1[t3] + else: + if wait: + time.sleep(0.1) + continue + else: + return None + def eternal_firefox( - tabs=None, - profile=None, - group_name=None, - window_position=None, - debug=None, + tabs=None, + profile=None, + group_name=None, + window_position=None, + debug=None, ): - import os - import datetime - import pprint - import subprocess - import time - if debug is None: - debug = False - if tabs is None: - raise RuntimeError('no tabs provided') - if profile is None: - raise RuntimeError('no profile provided') - if group_name is None: - raise RuntimeError('no group provided') - if window_position is None: - #window_position = '1,600,0,600,540' - raise RuntimeError('no window-position provided') - while True: - os.system(r'''date''') - with subprocess.Popen([ - 'firefox', - '-P', profile, - *tabs, - ]) as p: - try: - if debug: - assert subprocess.check_call(['notify-send', '%s:Starting' % group_name]) == 0 + import os + import datetime + import pprint + import subprocess + import time - #t3 = '' - for k in range(300): - t1 = subprocess.check_output(r''' + if debug is None: + debug = False + if tabs is None: + raise RuntimeError('no tabs provided') + if profile is None: + raise RuntimeError('no profile provided') + if group_name is None: + raise RuntimeError('no group provided') + if window_position is None: + # window_position = '1,600,0,600,540' + raise RuntimeError('no window-position provided') + while True: + os.system(r"""date""") + with subprocess.Popen( + [ + 'firefox', + '-P', + profile, + *tabs, + ] + ) as p: + try: + if debug: + assert subprocess.check_call(['notify-send', '%s:Starting' % group_name]) == 0 + + # t3 = '' + for k in range(300): + t1 = subprocess.check_output( + r""" swaymsg -t get_tree | jq -r '..|try select(.pid== %d)' - ''' % p.pid, shell=True).decode('utf-8') - if len(t1) > 10: - break - #time.sleep(0.1) - #t1 = subprocess.check_output(['wmctrl', '-p', '-l']).decode('utf-8') - #t4 = [o for o in t1.splitlines() if str(p.pid) in o] - #if len(t4) == 1: - # t3 = t4[0] - # break + """ + % p.pid, + shell=True, + ).decode('utf-8') + if len(t1) > 10: + break + # time.sleep(0.1) + # t1 = subprocess.check_output(['wmctrl', '-p', '-l']).decode('utf-8') + # t4 = [o for o in t1.splitlines() if str(p.pid) in o] + # if len(t4) == 1: + # t3 = t4[0] + # break - #if t3 == '': - # raise RuntimeError + # if t3 == '': + # raise RuntimeError - #t2 = t3.split()[0] - #assert os.system('wmctrl -i -r %s -e %s' % (t2, window_position)) == 0 - #assert os.system('wmctrl -i -r %s -b add,below' % t2) == 0 - def reposition(): - t1 = lambda s: \ - s \ - .replace('{{PID}}', str(p.pid)) \ - .replace('{{X}}', str(window_position[1])) \ - .replace('{{Y}}', str(window_position[2])) \ - .replace('{{W}}', str(window_position[3])) \ - .replace('{{H}}', str(window_position[4])) \ - .replace('{{WORKSPACE}}', str(window_position[0])) + # t2 = t3.split()[0] + # assert os.system('wmctrl -i -r %s -e %s' % (t2, window_position)) == 0 + # assert os.system('wmctrl -i -r %s -b add,below' % t2) == 0 + def reposition(): + t1 = ( + lambda s: s.replace('{{PID}}', str(p.pid)) + .replace('{{X}}', str(window_position[1])) + .replace('{{Y}}', str(window_position[2])) + .replace('{{W}}', str(window_position[3])) + .replace('{{H}}', str(window_position[4])) + .replace('{{WORKSPACE}}', str(window_position[0])) + ) - assert os.system(t1(r''' + assert ( + os.system( + t1(r""" swaymsg '[pid="{{PID}}"] move window to workspace {{WORKSPACE}}' - ''')) == 0 + """) + ) + == 0 + ) - if window_position[1] != '' and window_position[2] != '': - assert os.system(t1(r''' + if window_position[1] != '' and window_position[2] != '': + assert ( + os.system( + t1(r""" swaymsg '[pid="{{PID}}"] floating enable' \ swaymsg '[pid="{{PID}}"] resize set width {{W}}px height {{H}}px' && \ swaymsg '[pid="{{PID}}"] move absolute position {{X}}px {{Y}}px' - ''')) == 0 - else: - assert os.system(t1(r''' + """) + ) + == 0 + ) + else: + assert ( + os.system( + t1(r""" swaymsg '[pid="{{PID}}"] floating disable' - ''')) == 0 + """) + ) + == 0 + ) + if False: + for tab in tabs[1:]: + time.sleep(10) + assert ( + subprocess.check_call( + [ + 'firefox', + '-P', + profile, + '--new-tab', + tab, + ] + ) + == 0 + ) - if False: - for tab in tabs[1:]: - time.sleep(10) - assert subprocess.check_call([ - 'firefox', - '-P', profile, - '--new-tab', - tab, - ]) == 0 + reposition() + if debug: + assert subprocess.check_call(['notify-send', '%s:Started' % group_name]) == 0 - reposition() + start = datetime.datetime.now() + is_to_restart = lambda: (datetime.datetime.now() - start).total_seconds() >= 900 * 4 + polling_count = 0 - if debug: - assert subprocess.check_call(['notify-send', '%s:Started' % group_name]) == 0 + while not is_to_restart(): + if polling_count == 0: + reposition() - start = datetime.datetime.now() - is_to_restart = lambda : (datetime.datetime.now() - start).total_seconds() >= 900 * 4 - polling_count = 0 + if not p.poll() is None: + break + time.sleep(10) + polling_count += 1 - while not is_to_restart(): - if polling_count == 0: - reposition() + if debug: + assert subprocess.check_call(['notify-send', '%s:Closing' % group_name]) == 0 - if not p.poll() is None: - break - time.sleep(10) - polling_count += 1 - - if debug: - assert subprocess.check_call(['notify-send', '%s:Closing' % group_name]) == 0 - - #assert os.system('wmctrl -i -c %s' % t2) == 0 - assert os.system(r''' + # assert os.system('wmctrl -i -c %s' % t2) == 0 + assert ( + os.system( + r""" swaymsg '[pid="%d"] kill' - ''' % (p.pid,)) == 0 + """ + % (p.pid,) + ) + == 0 + ) - except KeyboardInterrupt: - assert os.system(r''' + except KeyboardInterrupt: + assert ( + os.system( + r""" swaymsg '[pid="%d"] kill' - ''' % (p.pid,)) == 0 - break - except Exception: - import traceback - import pprint - pprint.pprint(traceback.format_exc()) - finally: - try: - p.wait(20) - except subprocess.TimeoutExpired: - pprint.pprint([p.pid, '20 seconds timeout', 'kill']) - p.kill() - if debug: - assert subprocess.check_call(['notify-send', '%s:Closed' % group_name]) == 0 + """ + % (p.pid,) + ) + == 0 + ) + break + except Exception: + import traceback + import pprint + + pprint.pprint(traceback.format_exc()) + finally: + try: + p.wait(20) + except subprocess.TimeoutExpired: + pprint.pprint([p.pid, '20 seconds timeout', 'kill']) + p.kill() + if debug: + assert subprocess.check_call(['notify-send', '%s:Closed' % group_name]) == 0 + def resilient_ethernet(ip_addr, ethernet_device): - subprocess.check_call( - r''' + subprocess.check_call( + r""" sudo sh -c '\ while true; \ do ping -c 3 -w 3 -W 1 {{IP_ADDR}} || (\ @@ -1147,170 +1082,146 @@ do ping -c 3 -w 3 -W 1 {{IP_ADDR}} || (\ ); \ sleep 10; clear; date; \ done' - '''.replace( - '{{IP_ADDR}}', - ip_addr - ).replace( - '{{ETHERNET_DEVICE}}}', - ethernet_device - ), - shell=True - ) + """.replace('{{IP_ADDR}}', ip_addr).replace('{{ETHERNET_DEVICE}}}', ethernet_device), + shell=True, + ) + def http_server(argv): - from .commands_typed import os as commands_os - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - parser = optparse.OptionParser() - parser.add_option( - '--public', - dest='public', - action='store_true', - default=False, - ) - parser.add_option( - '--force', - dest='force', - action='store_true', - default=False, - ) - parser.add_option( - '--token', - dest='token', - type=str, - default=None, - ) - parser.add_option( - '--port', - dest='port', - type='int', - default=80, - ) - parser.add_option( - '--no_docker', - dest='docker', - action='store_false', - default=None, - ) - parser.add_option( - '-H', '--header', - dest='response_headers', - type='str', - action='append', - default=[], - ) - parser.add_option( - '--host', - dest='host', - type='str', - default='127.0.0.1', - ) - parser.add_option( - '--prefix', - dest='prefix', - type='str', - default=None, - ) - options, args = parser.parse_args(argv) + from .commands_typed import os as commands_os - assert options.port >= 1 + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + parser = optparse.OptionParser() + parser.add_option( + '--public', + dest='public', + action='store_true', + default=False, + ) + parser.add_option( + '--force', + dest='force', + action='store_true', + default=False, + ) + parser.add_option( + '--token', + dest='token', + type=str, + default=None, + ) + parser.add_option( + '--port', + dest='port', + type='int', + default=80, + ) + parser.add_option( + '--no_docker', + dest='docker', + action='store_false', + default=None, + ) + parser.add_option( + '-H', + '--header', + dest='response_headers', + type='str', + action='append', + default=[], + ) + parser.add_option( + '--host', + dest='host', + type='str', + default='127.0.0.1', + ) + parser.add_option( + '--prefix', + dest='prefix', + type='str', + default=None, + ) + options, args = parser.parse_args(argv) - try: - if not options.docker and options.host == '0.0.0.0': - found : bool = False - for o in commands_os.interfaces_index(): - if found: - break - for o2 in o.addr_info: - if o2.family == 'inet' and o2.local != '127.0.0.1': - options.host = o2.local - logger.info(dict( - host=options.host, - msg='found', - )) - found = True - break + assert options.port >= 1 - assert not socket.inet_aton(options.host) is None - # subprocess.check_call([ - # 'ping', '-w', '1', - # options.host - # ]) - assert options.host in sum([ - [ - o2.local - for o2 in o.addr_info - ] - for o in commands_os.interfaces_index() - ], []) - except Exception: - raise RuntimeError('invalid ip address %s' % options.host) + try: + if not options.docker and options.host == '0.0.0.0': + found: bool = False + for o in commands_os.interfaces_index(): + if found: + break + for o2 in o.addr_info: + if o2.family == 'inet' and o2.local != '127.0.0.1': + options.host = o2.local + logger.info( + dict( + host=options.host, + msg='found', + ) + ) + found = True + break + assert not socket.inet_aton(options.host) is None + # subprocess.check_call([ + # 'ping', '-w', '1', + # options.host + # ]) + assert options.host in sum([[o2.local for o2 in o.addr_info] for o in commands_os.interfaces_index()], []) + except Exception: + raise RuntimeError('invalid ip address %s' % options.host) - if options.docker is None: - options.docker = True + if options.docker is None: + options.docker = True - index_section = 'autoindex on;' + index_section = 'autoindex on;' - if options.public: - options.token = 'public' + if options.public: + options.token = 'public' - location_section = 'location / {%s}' % index_section - else: - if options.token is None: - options.token = os.urandom(16).hex() + location_section = 'location / {%s}' % index_section + else: + if options.token is None: + options.token = os.urandom(16).hex() - if options.docker: - DATA_DIR = '/usr/share/nginx' - APP_DIR = '/app' - CONF_DIR = '/etc/nginx' - else: - DATA_DIR = '/opt/nginx/%s/data/' % options.token - CONF_DIR = '/opt/nginx/%s/conf/' % options.token - APP_DIR = os.path.abspath(os.path.curdir) + if options.docker: + DATA_DIR = '/usr/share/nginx' + APP_DIR = '/app' + CONF_DIR = '/etc/nginx' + else: + DATA_DIR = '/opt/nginx/%s/data/' % options.token + CONF_DIR = '/opt/nginx/%s/conf/' % options.token + APP_DIR = os.path.abspath(os.path.curdir) - if not options.public: - if not options.prefix is None: - path = options.prefix + options.token - else: - path = options.token + if not options.public: + if not options.prefix is None: + path = options.prefix + options.token + else: + path = options.token - logger.info( - 'access url is http://%s:%d/%s/' % ( - options.host, - options.port, - path, - ) - ) + logger.info( + 'access url is http://%s:%d/%s/' + % ( + options.host, + options.port, + path, + ) + ) - assert all([ - not re.compile( - r'^[A-Za-z-]+ [a-z0-9A-Z-\.]+$' - ).match(o) is None - for o in options.response_headers - ]) + assert all([not re.compile(r'^[A-Za-z-]+ [a-z0-9A-Z-\.]+$').match(o) is None for o in options.response_headers]) - location_section = ( - 'location / {' - 'deny all;' - '}' - 'location /%s/ {' - 'alias %s/;' - '%s' - '%s' - '}' - ) % ( - path, - APP_DIR, - '\n'.join([ - 'add_header %s;' % o - for o in options.response_headers - ]), - index_section - ) + location_section = ('location / {deny all;}location /%s/ {alias %s/;%s%s}') % ( + path, + APP_DIR, + '\n'.join(['add_header %s;' % o for o in options.response_headers]), + index_section, + ) - if options.docker: - subprocess.check_call( - r''' + if options.docker: + subprocess.check_call( + r""" sudo docker run \ -p %s:%d:80 \ -u root \ @@ -1319,38 +1230,37 @@ def http_server(argv): --log-driver none \ nginx:latest \ -c 'echo "server{listen 80; charset UTF-8; root /app; %s}" > /etc/nginx/conf.d/default.conf; nginx -g "daemon off;"' - ''' % ( - options.host, - options.port, - APP_DIR, - location_section, - ), - shell=True - ) - else: - if os.path.exists(CONF_DIR): - assert options.force - shutil.rmtree(CONF_DIR) + """ + % ( + options.host, + options.port, + APP_DIR, + location_section, + ), + shell=True, + ) + else: + if os.path.exists(CONF_DIR): + assert options.force + shutil.rmtree(CONF_DIR) - if os.path.exists(DATA_DIR): - assert options.force - shutil.rmtree(DATA_DIR) + if os.path.exists(DATA_DIR): + assert options.force + shutil.rmtree(DATA_DIR) - os.makedirs(CONF_DIR, exist_ok=True) - os.makedirs( - os.path.join( - CONF_DIR, - 'conf.d', - ), - exist_ok=True - ) - os.makedirs(DATA_DIR, exist_ok=True) + os.makedirs(CONF_DIR, exist_ok=True) + os.makedirs( + os.path.join( + CONF_DIR, + 'conf.d', + ), + exist_ok=True, + ) + os.makedirs(DATA_DIR, exist_ok=True) - with io.open( - os.path.join(CONF_DIR, 'nginx.conf'), - 'w' - ) as f: - f.write(r''' + with io.open(os.path.join(CONF_DIR, 'nginx.conf'), 'w') as f: + f.write( + r""" events { multi_accept on; @@ -1379,17 +1289,20 @@ http { '' close; } } - ''' % CONF_DIR) + """ + % CONF_DIR + ) - with io.open( - os.path.join( - CONF_DIR, - 'conf.d', - 'default.conf', - ), - 'w', - ) as f: - f.write(r''' + with io.open( + os.path.join( + CONF_DIR, + 'conf.d', + 'default.conf', + ), + 'w', + ) as f: + f.write( + r""" server { server_name %s; listen %d; @@ -1405,124 +1318,120 @@ server { root %s; %s } - ''' % ( - options.host, - options.port, - DATA_DIR, - location_section, - )) + """ + % ( + options.host, + options.port, + DATA_DIR, + location_section, + ) + ) - sys.stderr.flush() - sys.stdout.flush() + sys.stderr.flush() + sys.stdout.flush() + + os.execv( + '/usr/sbin/nginx', + [ + '/usr/sbin/nginx', + '-c', + os.path.join(CONF_DIR, 'nginx.conf'), + '-p', + DATA_DIR, + '-g', + 'daemon off;', + ], + ) - os.execv( - '/usr/sbin/nginx', - [ - '/usr/sbin/nginx', - '-c', - os.path.join(CONF_DIR, 'nginx.conf'), - '-p', DATA_DIR, - '-g', 'daemon off;', - ], - ) class pass_ssh_osx_t: - class kwargs_t: - class Mode(enum.StrEnum): - clipboard = 'clipboard' - qrcode = 'qrcode' + class kwargs_t: + class Mode(enum.StrEnum): + clipboard = 'clipboard' + qrcode = 'qrcode' + def pass_ssh_osx(argv): - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - parser = optparse.OptionParser() - parser.add_option( - '--list', - dest='list', - default=False, - action='store_true', - ) - parser.add_option( - '--pass_option', - dest='pass_option', - action='append', - default=[], - type=str, - help='pass secret path, like --pass_option google.com/login/password/v1', - ) - parser.add_option( - '--clipboard_copy', - dest='clipboard_copy', - default=None, - type=str, - ) + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + parser = optparse.OptionParser() + parser.add_option( + '--list', + dest='list', + default=False, + action='store_true', + ) + parser.add_option( + '--pass_option', + dest='pass_option', + action='append', + default=[], + type=str, + help='pass secret path, like --pass_option google.com/login/password/v1', + ) + parser.add_option( + '--clipboard_copy', + dest='clipboard_copy', + default=None, + type=str, + ) - parser.add_option( - '--mode', - dest='_mode', - choices=[ - o.value - for o in pass_ssh_osx_t.kwargs_t.Mode - ], - default=None, - help='a mode to retrieve the password', - ) - parser.add_option( - '--debug', - dest='debug', - action='store_true', - default=False, - ) - assert sys.platform in ['darwin', 'linux'] - options, args = parser.parse_args(argv) + parser.add_option( + '--mode', + dest='_mode', + choices=[o.value for o in pass_ssh_osx_t.kwargs_t.Mode], + default=None, + help='a mode to retrieve the password', + ) + parser.add_option( + '--debug', + dest='debug', + action='store_true', + default=False, + ) + assert sys.platform in ['darwin', 'linux'] + options, args = parser.parse_args(argv) - if options._mode is None: - options._mode = pass_ssh_osx_t.kwargs_t.Mode.clipboard.value + if options._mode is None: + options._mode = pass_ssh_osx_t.kwargs_t.Mode.clipboard.value - options.mode = pass_ssh_osx_t.kwargs_t.Mode(options._mode) + options.mode = pass_ssh_osx_t.kwargs_t.Mode(options._mode) - if options.clipboard_copy is None: - if sys.platform == 'linux': - options.clipboard_copy = 'wl-copy' - elif sys.platform == 'darwin': - options.clipboard_copy = 'pbcopy' - else: - raise NotImplementedError + if options.clipboard_copy is None: + if sys.platform == 'linux': + options.clipboard_copy = 'wl-copy' + elif sys.platform == 'darwin': + options.clipboard_copy = 'pbcopy' + else: + raise NotImplementedError - if len(args) == 0: - raise RuntimeError('ssh_command is required') + if len(args) == 0: + raise RuntimeError('ssh_command is required') - if options.debug: - print(options.pass_option) - pprint.pprint(args) + if options.debug: + print(options.pass_option) + pprint.pprint(args) - - - reset_gpg_agent = r''' + reset_gpg_agent = r""" gpgconf --kill gpg-agent && \ gpgconf --reload gpg-agent - ''' + """ - if not options.list: - t1 = options.pass_option - assert len(t1) > 0 + if not options.list: + t1 = options.pass_option + assert len(t1) > 0 - print( - 'select on of pass names\n%s' % '\n'.join([ - '%d: %s' % (k, v) - for k, v in enumerate(t1) - ]) - ) + print('select on of pass names\n%s' % '\n'.join(['%d: %s' % (k, v) for k, v in enumerate(t1)])) - while True: - try: - t2 = input() - t3 = int(t2) - assert t3 >= 0 and t3 < len(t1) - break - except AssertionError: - continue + while True: + try: + t2 = input() + t3 = int(t2) + assert t3 >= 0 and t3 < len(t1) + break + except AssertionError: + continue - command = r''' + command = r""" %s gpg \ --pinentry-mode=ask \ @@ -1531,521 +1440,516 @@ def pass_ssh_osx(argv): ~/.password-store/%s.gpg && \ echo -n '['$?']' && \ %s - ''' % ( - reset_gpg_agent, - t1[t3], - reset_gpg_agent, - ) - else: - command = 'pass list | less -R' + """ % ( + reset_gpg_agent, + t1[t3], + reset_gpg_agent, + ) + else: + command = 'pass list | less -R' - ssh_command = [ - 'ssh', '-C', - '-o', 'ConnectTimeout 10', - '-o', 'ServerAliveInterval 1', - *args, - '-t', - command, - ] + ssh_command = [ + 'ssh', + '-C', + '-o', + 'ConnectTimeout 10', + '-o', + 'ServerAliveInterval 1', + *args, + '-t', + command, + ] - if options.debug: - pprint.pprint( - dict( - ssh_command=ssh_command, - ) - ) + if options.debug: + pprint.pprint( + dict( + ssh_command=ssh_command, + ) + ) - if options.list: - subprocess.check_call(ssh_command) - else: - def clipboard_set(text): - with subprocess.Popen([ - options.clipboard_copy, - ], stdin=subprocess.PIPE) as p: - p.stdin.write(text.encode('utf-8')) - p.stdin.flush() - p.stdin.close() - p.wait(1) - assert p.poll() == 0 + if options.list: + subprocess.check_call(ssh_command) + else: - with subprocess.Popen( - ssh_command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) as p: - password = None - last_chunk = None + def clipboard_set(text): + with subprocess.Popen( + [ + options.clipboard_copy, + ], + stdin=subprocess.PIPE, + ) as p: + p.stdin.write(text.encode('utf-8')) + p.stdin.flush() + p.stdin.close() + p.wait(1) + assert p.poll() == 0 - hide_password = False - pinentry_delimeter = b'\x1b>' + with subprocess.Popen(ssh_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + password = None + last_chunk = None - def transform_callback(data): - nonlocal hide_password - nonlocal pinentry_delimeter + hide_password = False + pinentry_delimeter = b'\x1b>' - data2 = None + def transform_callback(data): + nonlocal hide_password + nonlocal pinentry_delimeter - if not last_chunk is None: - data = last_chunk['data'] + data + data2 = None - if hide_password: - data2 = b'' - elif pinentry_delimeter in data: - hide_password = True - pos = data.rfind(pinentry_delimeter) - if pos == -1: - data2 = data - else: - data2 = data[:pos + len(pinentry_delimeter)] - elif data == b'': - #return b'\r\n' - return b'' - else: - data2 = None + if not last_chunk is None: + data = last_chunk['data'] + data - return data2 + if hide_password: + data2 = b'' + elif pinentry_delimeter in data: + hide_password = True + pos = data.rfind(pinentry_delimeter) + if pos == -1: + data2 = data + else: + data2 = data[: pos + len(pinentry_delimeter)] + elif data == b'': + # return b'\r\n' + return b'' + else: + data2 = None - for chunk in intercept_output( - current_subprocess=p, - return_aggregated=True, - transform_callback=transform_callback, - real_time=True, - #timeout=10, - ): - if chunk['aggregated']: - last_chunk = chunk - break + return data2 - assert not p.poll() is None + for chunk in intercept_output( + current_subprocess=p, + return_aggregated=True, + transform_callback=transform_callback, + real_time=True, + # timeout=10, + ): + if chunk['aggregated']: + last_chunk = chunk + break - if p.poll() != 0: - logger.error(p.stderr.read()) - sys.exit(p.poll()) + assert not p.poll() is None - assert not last_chunk is None - assert last_chunk['returncode'] == 0 + if p.poll() != 0: + logger.error(p.stderr.read()) + sys.exit(p.poll()) - if options.debug: - pprint.pprint(last_chunk['data']) + assert not last_chunk is None + assert last_chunk['returncode'] == 0 - if last_chunk['data'].endswith('\r\n[0]'.encode('utf-8')) and \ - last_chunk['data'].rfind(pinentry_delimeter) != -1: - last_line = last_chunk['data'].splitlines()[-2] - else: - raise RuntimeError( - 'gpg failure %s' % str( - last_chunk['data'][ - max(last_chunk['data'].find(pinentry_delimeter), -128): - ] - ) - ) + if options.debug: + pprint.pprint(last_chunk['data']) - pos2 = last_line.rfind(pinentry_delimeter) - if pos2 == -1: - last_line2 = last_line - else: - last_line2 = last_line[ - pos2 + len(pinentry_delimeter): - ] + if last_chunk['data'].endswith('\r\n[0]'.encode('utf-8')) and last_chunk['data'].rfind(pinentry_delimeter) != -1: + last_line = last_chunk['data'].splitlines()[-2] + else: + raise RuntimeError('gpg failure %s' % str(last_chunk['data'][max(last_chunk['data'].find(pinentry_delimeter), -128) :])) - password = last_line2.decode('utf-8').rstrip('\r\n') - assert not password is None + pos2 = last_line.rfind(pinentry_delimeter) + if pos2 == -1: + last_line2 = last_line + else: + last_line2 = last_line[pos2 + len(pinentry_delimeter) :] + password = last_line2.decode('utf-8').rstrip('\r\n') + assert not password is None - if options.mode is pass_ssh_osx_t.kwargs_t.Mode.clipboard: - try: - clipboard_set(password) - get_time = lambda : datetime.datetime.now().timestamp() - start = get_time() - while True: - cur = get_time() - remains = 10 - (cur - start) - if remains <= 1e-8: - break - else: - print('\r%5.2fs remains' % remains, end='') - time.sleep(0.1) - except KeyboardInterrupt: - pass + if options.mode is pass_ssh_osx_t.kwargs_t.Mode.clipboard: + try: + clipboard_set(password) + get_time = lambda: datetime.datetime.now().timestamp() + start = get_time() + while True: + cur = get_time() + remains = 10 - (cur - start) + if remains <= 1e-8: + break + else: + print('\r%5.2fs remains' % remains, end='') + time.sleep(0.1) + except KeyboardInterrupt: + pass - clipboard_set('') - print('\rcleared cliboard\n', end='') - elif options.mode is pass_ssh_osx_t.kwargs_t.Mode.qrcode: - assert subprocess.run(r''' + clipboard_set('') + print('\rcleared cliboard\n', end='') + elif options.mode is pass_ssh_osx_t.kwargs_t.Mode.qrcode: + assert ( + subprocess.run( + r""" qrencode -t PNG -o - | feh - - ''', input=password.encode('utf-8'), shell=True).returncode == 0 - else: - raise NotImplementedError + """, + input=password.encode('utf-8'), + shell=True, + ).returncode + == 0 + ) + else: + raise NotImplementedError + def vpn(argv: list[str]) -> None: - python_path : list[str] + python_path: list[str] - if (pathlib.Path(__file__).parent / 'env3').exists(): - python_path = [ - str(pathlib.Path(__file__).parent / 'env3' / 'bin' / 'python3') - ] - elif (pathlib.Path(__file__).parent.parent.parent.parent / 'm').exists(): - python_path = [ - str(pathlib.Path(__file__).parent.parent.parent.parent / 'm'), - 'env', - '--', - ] - else: - python_path = [sys.executable] + if (pathlib.Path(__file__).parent / 'env3').exists(): + python_path = [str(pathlib.Path(__file__).parent / 'env3' / 'bin' / 'python3')] + elif (pathlib.Path(__file__).parent.parent.parent.parent / 'm').exists(): + python_path = [ + str(pathlib.Path(__file__).parent.parent.parent.parent / 'm'), + 'env', + '--', + ] + else: + python_path = [sys.executable] + + subprocess.check_call( + [ + 'sudo', + *python_path, + '-B', + '-Xfrozen_modules=off', + '-m', + 'online_fxreader.vpn.vpn', + *argv, + ] + ) - subprocess.check_call([ - 'sudo', - *python_path, - '-B', - '-Xfrozen_modules=off', - '-m', - 'online_fxreader.vpn.vpn', - *argv, - ]) def player_v1(folder_url, item_id): - #import sys - import urllib.parse - import re - import subprocess - import os - import tqdm - t4 = folder_url - t1 = subprocess.check_output(['curl', '-s', t4]).decode('utf-8') - t2 = re.compile(r"href=\"(.*\.mp3)\""); - t3 = [o.group(1) for o in t2.finditer(t1)]; - t5 = ['%s/%s' % (t4, o) for o in t3] - t6 = item_id - t9 = range(t6, len(t5)) - with tqdm.tqdm( - total=len(t5), - ) as progress_bar: - progress_bar.update(t6) - for k in t9: - t7 = t5[k] - t9 = urllib.parse.unquote(os.path.split(t7)[1]) - progress_bar.set_description('%03d %s' % (k, t9)) - with subprocess.Popen([ - 'ffprobe', - '-hide_banner', - '-i', - t7 - ], stderr=subprocess.PIPE, stdout=subprocess.PIPE) as p: - p.wait() - assert p.returncode == 0 - t8 = p.stderr.read().decode('utf-8') - assert isinstance(t8, str) - #print(t8) - with subprocess.Popen(['ffplay', '-hide_banner', '-nodisp', '-autoexit', '-loop', '1', t7], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) as p: - p.wait() - assert p.returncode == 0 - progress_bar.update(1) + # import sys + import urllib.parse + import re + import subprocess + import os + import tqdm + + t4 = folder_url + t1 = subprocess.check_output(['curl', '-s', t4]).decode('utf-8') + t2 = re.compile(r'href=\"(.*\.mp3)\"') + t3 = [o.group(1) for o in t2.finditer(t1)] + t5 = ['%s/%s' % (t4, o) for o in t3] + t6 = item_id + t9 = range(t6, len(t5)) + with tqdm.tqdm( + total=len(t5), + ) as progress_bar: + progress_bar.update(t6) + for k in t9: + t7 = t5[k] + t9 = urllib.parse.unquote(os.path.split(t7)[1]) + progress_bar.set_description('%03d %s' % (k, t9)) + with subprocess.Popen(['ffprobe', '-hide_banner', '-i', t7], stderr=subprocess.PIPE, stdout=subprocess.PIPE) as p: + p.wait() + assert p.returncode == 0 + t8 = p.stderr.read().decode('utf-8') + assert isinstance(t8, str) + # print(t8) + with subprocess.Popen( + ['ffplay', '-hide_banner', '-nodisp', '-autoexit', '-loop', '1', t7], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) as p: + p.wait() + assert p.returncode == 0 + progress_bar.update(1) + def numpy_linspace(a, b, count): - pos = a - step = (b - a) / count - steps = [] + pos = a + step = (b - a) / count + steps = [] - for i in range(count): - if i == 0: - pos = a - elif i == count - 1: - pos = b - else: - pos = a + i * step - steps.append(pos) + for i in range(count): + if i == 0: + pos = a + elif i == count - 1: + pos = b + else: + pos = a + i * step + steps.append(pos) + + return steps - return steps def pm_service(argv): - parser = optparse.OptionParser() - parser.add_option( - '--events', - dest='events', - default=[], - action='append', - help='pb,tp,kb', - ) - parser.add_option( - '--verbose', - dest='verbose', - type=str, - default=None, - help='true,false', - ) - options, args = parser.parse_args(argv) + parser = optparse.OptionParser() + parser.add_option( + '--events', + dest='events', + default=[], + action='append', + help='pb,tp,kb', + ) + parser.add_option( + '--verbose', + dest='verbose', + type=str, + default=None, + help='true,false', + ) + options, args = parser.parse_args(argv) - if options.verbose is None: - options.verbose = False - else: - val = json.loads(options.verbose) - assert isinstance(val, bool) - options.verbose = val + if options.verbose is None: + options.verbose = False + else: + val = json.loads(options.verbose) + assert isinstance(val, bool) + options.verbose = val - if len(options.events) == 0: - options.events.extend([ - 'pb', - #'tp', 'kb' - ]) + if len(options.events) == 0: + options.events.extend( + [ + 'pb', + #'tp', 'kb' + ] + ) - assert all([ - o in [ - 'pb','tp', 'kb' - ] - for o in options.events - ]) + assert all([o in ['pb', 'tp', 'kb'] for o in options.events]) - assert sys.platform == 'darwin' + assert sys.platform == 'darwin' - wu = 0 + wu = 0 - while True: - subprocess.check_call([ - 'osascript', - '-e', - 'tell application "Finder" to sleep' - ]) - subprocess.check_call( - ['pmset','sleepnow'], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) + while True: + subprocess.check_call(['osascript', '-e', 'tell application "Finder" to sleep']) + subprocess.check_call( + ['pmset', 'sleepnow'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) - wu += 1 + wu += 1 + sample = ( + r'2024-03-19 22:00:36.808589+0300 0x4caa7 Default 0x0 102' + r'0 powerd: [com.apple.powerd:assertions] Process WindowServer.156 TurnedOn ' + r'UserIsActive "com.apple.iohideventsystem.queue.tickle serviceID:10000267f ' + r'service:AppleMultitouchDevice product:Apple Internal Keyboard / Trackpad ' + r'eventType:11" age:00:00:00 id:38654742889 [System: PrevIdle DeclUser kDisp]' + ) + assert isinstance(sample, str) - sample = ( - r'2024-03-19 22:00:36.808589+0300 0x4caa7 Default 0x0 102' - r'0 powerd: [com.apple.powerd:assertions] Process WindowServer.156 TurnedOn ' - r'UserIsActive "com.apple.iohideventsystem.queue.tickle serviceID:10000267f ' - r'service:AppleMultitouchDevice product:Apple Internal Keyboard / Trackpad ' - r'eventType:11" age:00:00:00 id:38654742889 [System: PrevIdle DeclUser kDisp]' - ) - assert isinstance(sample, str) + action = None + with subprocess.Popen(['log', 'stream'], stdout=subprocess.PIPE) as p: + for chunk in intercept_output( + p, + return_aggregated=False, + need_lines=True, + transform_callback=lambda x: b'', + ): + line = chunk['line'].decode('utf-8') + # p.stdout.readline().decode('utf-8') + cmd = None + if 'powerd' in line: + cmd = line + if options.verbose: + logging.error(json.dumps(dict(line=cmd))) - action = None - with subprocess.Popen(['log', 'stream'], stdout=subprocess.PIPE) as p: - for chunk in intercept_output( - p, - return_aggregated=False, - need_lines=True, - transform_callback=lambda x: b'', - ): - line = chunk['line'].decode('utf-8') - #p.stdout.readline().decode('utf-8') - cmd = None - if 'powerd' in line: - cmd = line - if options.verbose: - logging.error(json.dumps(dict(line=cmd))) + # cmd = subprocess.check_output(r''' + # log stream | grep --line-buffered -i \ + # -E 'powerd.*TurnedOn.*UserIsActive' | head -n 1 + #''', shell=True).decode('utf-8') - #cmd = subprocess.check_output(r''' - # log stream | grep --line-buffered -i \ - # -E 'powerd.*TurnedOn.*UserIsActive' | head -n 1 - #''', shell=True).decode('utf-8') + if not cmd is None and ('TurnedOn' in cmd or 'PrevIdle' in cmd or 'PMRD: kIOMessageSystemWillPowerOn' in cmd): + if ( + ('AppleMultitouchDevice' in cmd and 'tp' in options.events) + or ('AppleACPIButton' in cmd and 'pb' in options.events) + or ('eventType:29' in cmd and 'kb' in options.events) + ): + action = 'wake-up' + break + else: + action = 'sleep' + break - if not cmd is None and ( - 'TurnedOn' in cmd or - 'PrevIdle' in cmd or - 'PMRD: kIOMessageSystemWillPowerOn' in cmd - ): - if ( - ('AppleMultitouchDevice' in cmd and 'tp' in options.events) or - ('AppleACPIButton' in cmd and 'pb' in options.events) or - ('eventType:29' in cmd and 'kb' in options.events) - ): - action = 'wake-up' - break - else: - action = 'sleep' - break + if options.verbose: + logging.error( + json.dumps( + dict( + cmd=cmd, + action=action, + ) + ) + ) + else: + print('\r%s wu : %d, la : %s' % (datetime.datetime.now().isoformat(), wu, action), end='') + if action == 'wake-up': + break + elif action == 'sleep': + continue + else: + raise NotImplementedError - if options.verbose: - logging.error(json.dumps(dict(cmd=cmd, action=action,))) - else: - print('\r%s wu : %d, la : %s' % ( - datetime.datetime.now().isoformat(), - wu, - action - ), end='') + print('') - if action == 'wake-up': - break - elif action == 'sleep': - continue - else: - raise NotImplementedError - - print('') def scrap_yt_music(argv: list[str]) -> None: - parser = optparse.OptionParser() - parser.add_option( - '--verbose', - dest='verbose', - type=str, - default=None, - help='true,false', - ) - parser.add_option( - '-l', - '--library_path', - dest='library_path', - type=str, - default=None, - ) - options, args = parser.parse_args(argv) + parser = optparse.OptionParser() + parser.add_option( + '--verbose', + dest='verbose', + type=str, + default=None, + help='true,false', + ) + parser.add_option( + '-l', + '--library_path', + dest='library_path', + type=str, + default=None, + ) + options, args = parser.parse_args(argv) - if options.library_path is None: - options.library_path = os.path.abspath(os.path.curdir) + if options.library_path is None: + options.library_path = os.path.abspath(os.path.curdir) - if options.verbose is None: - options.verbose = False - else: - val = json.loads(options.verbose) - assert isinstance(val, bool) - options.verbose = val + if options.verbose is None: + options.verbose = False + else: + val = json.loads(options.verbose) + assert isinstance(val, bool) + options.verbose = val - import aiohttp.web + import aiohttp.web - def http_events(context, res_cb): - data = [] + def http_events(context, res_cb): + data = [] - async def handle(request): - data.append(request.rel_url.query.copy()) + async def handle(request): + data.append(request.rel_url.query.copy()) - res_cb(event=data[-1], events=data) - if len(data) > 128: - del data[:128] + res_cb(event=data[-1], events=data) + if len(data) > 128: + del data[:128] - return aiohttp.web.Response(text='ok') + return aiohttp.web.Response(text='ok') - async def serve(): - logging.info('http_events starting') - app = aiohttp.web.Application() - app.add_routes([aiohttp.web.get('/status', handle)]) - runner = aiohttp.web.AppRunner(app, handle_signals=False,) - await runner.setup() - site = aiohttp.web.TCPSite(runner, host='127.0.0.1', port=8877) - await site.start() + async def serve(): + logging.info('http_events starting') + app = aiohttp.web.Application() + app.add_routes([aiohttp.web.get('/status', handle)]) + runner = aiohttp.web.AppRunner( + app, + handle_signals=False, + ) + await runner.setup() + site = aiohttp.web.TCPSite(runner, host='127.0.0.1', port=8877) + await site.start() - logging.info('http_events started') + logging.info('http_events started') - while True: - await asyncio.sleep(1) - if context['shutdown']: - break + while True: + await asyncio.sleep(1) + if context['shutdown']: + break - await runner.cleanup() + await runner.cleanup() - logging.info('http_events done') + logging.info('http_events done') - asyncio.run(serve()) + asyncio.run(serve()) - #aiohttp.web.run_app( - # app, - # host='127.0.0.1', - # port=8877, - # handle_signals=False, - #) + # aiohttp.web.run_app( + # app, + # host='127.0.0.1', + # port=8877, + # handle_signals=False, + # ) - #while True: - # data.append( - # subprocess.check_output(r''' - # nc -w 1 -l 127.0.0.1 8877 | head -n 1 - # ''', shell=True,) - # ) + # while True: + # data.append( + # subprocess.check_output(r''' + # nc -w 1 -l 127.0.0.1 8877 | head -n 1 + # ''', shell=True,) + # ) - def audio_recorder(context): - current_name = None + def audio_recorder(context): + current_name = None - p = None + p = None - try: - while True: - with context['track_cv']: - context['track_cv'].wait(1) + try: + while True: + with context['track_cv']: + context['track_cv'].wait(1) - if context['track_name'] != current_name: - logging.info('audio_record, track changed, started') - if not p is None: - logging.info('audio_record, track changed, terminating') - p.terminate() - p.wait() - p = None - logging.info('audio_record, track changed, terminated') - current_name = context['track_name'] + if context['track_name'] != current_name: + logging.info('audio_record, track changed, started') + if not p is None: + logging.info('audio_record, track changed, terminating') + p.terminate() + p.wait() + p = None + logging.info('audio_record, track changed, terminated') + current_name = context['track_name'] - if context['shutdown']: - if not p is None: - p.terminate() - break + if context['shutdown']: + if not p is None: + p.terminate() + break - if p is None and not current_name is None: - output_name = os.path.join( - options.library_path, - '%s.mp3' % current_name - ) - logging.info('audio_record, new recording') - p = subprocess.Popen( - ['sox', '-d', output_name], - stdout=subprocess.DEVNULL, - stdin=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - ) - logging.info(json.dumps(dict(output_name=output_name))) - except Exception: - logging.error(traceback.format_exc()) - finally: - if not p is None: - p.terminate() + if p is None and not current_name is None: + output_name = os.path.join(options.library_path, '%s.mp3' % current_name) + logging.info('audio_record, new recording') + p = subprocess.Popen( + ['sox', '-d', output_name], + stdout=subprocess.DEVNULL, + stdin=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + logging.info(json.dumps(dict(output_name=output_name))) + except Exception: + logging.error(traceback.format_exc()) + finally: + if not p is None: + p.terminate() - class Context(TypedDict): - http_on_event: Callable[..., None] - shutdown: bool - workers: list[threading.Thread] - track_cv: threading.Condition - main_cv: threading.Condition - track_name: Optional[str] + class Context(TypedDict): + http_on_event: Callable[..., None] + shutdown: bool + workers: list[threading.Thread] + track_cv: threading.Condition + main_cv: threading.Condition + track_name: Optional[str] - context: Context = dict( - http_on_event=lambda *args, **kwargs: None, - shutdown=False, - workers=[], - track_cv=threading.Condition(), - main_cv=threading.Condition(), - track_name=None, - ) + context: Context = dict( + http_on_event=lambda *args, **kwargs: None, + shutdown=False, + workers=[], + track_cv=threading.Condition(), + main_cv=threading.Condition(), + track_name=None, + ) - context['workers'].extend([ - threading.Thread( - target=functools.partial( - http_events, - context=context, - res_cb=lambda *args, **kwargs: context['http_on_event'](*args, **kwargs), - ) - ), - threading.Thread( - target=functools.partial( - audio_recorder, - context=context, - ) - ), - ]) + context['workers'].extend( + [ + threading.Thread( + target=functools.partial( + http_events, + context=context, + res_cb=lambda *args, **kwargs: context['http_on_event'](*args, **kwargs), + ) + ), + threading.Thread( + target=functools.partial( + audio_recorder, + context=context, + ) + ), + ] + ) - def http_on_event(event, events): - with context['track_cv']: - if 'title' in event and event['title'].strip() != '': - context['track_name'] = str(event['title'])[:128].replace('\n', '') - else: - context['track_name'] = None + def http_on_event(event, events): + with context['track_cv']: + if 'title' in event and event['title'].strip() != '': + context['track_name'] = str(event['title'])[:128].replace('\n', '') + else: + context['track_name'] = None - logging.info(event) + logging.info(event) - context['http_on_event'] = http_on_event + context['http_on_event'] = http_on_event - print(r''' + print(r""" https://github.com/ExistentialAudio/BlackHole/wiki/Multi-Output-Device#5-set-audio-output-to-multi-output-device Open Youtube Music, @@ -2085,558 +1989,607 @@ def scrap_yt_music(argv: list[str]) -> None: })(); ``` - ''') + """) - for w in context['workers']: - w.start() + for w in context['workers']: + w.start() - #context['main_cv'] = threading.Condition() + # context['main_cv'] = threading.Condition() - def on_interrupt(*args, **kwargs): - logging.info('on_interrupt') - with context['main_cv']: - context['main_cv'].notify() + def on_interrupt(*args, **kwargs): + logging.info('on_interrupt') + with context['main_cv']: + context['main_cv'].notify() - signal.signal( - signal.SIGINT, - on_interrupt, - ) - signal.signal( - signal.SIGTERM, - on_interrupt, - ) + signal.signal( + signal.SIGINT, + on_interrupt, + ) + signal.signal( + signal.SIGTERM, + on_interrupt, + ) - with context['main_cv']: - context['main_cv'].wait() + with context['main_cv']: + context['main_cv'].wait() - with context['main_cv']: - context['shutdown'] = True - context['main_cv'].notify() - with context['track_cv']: - context['track_cv'].notify() + with context['main_cv']: + context['shutdown'] = True + context['main_cv'].notify() + with context['track_cv']: + context['track_cv'].notify() + + for o in context['workers']: + o.join() - for o in context['workers']: - o.join() def loginctl(argv: list[str]) -> None: - parser = argparse.ArgumentParser() - parser.add_argument( - '--action', - choices=[ - 'lock-session', - ], - ) + parser = argparse.ArgumentParser() + parser.add_argument( + '--action', + choices=[ + 'lock-session', + ], + ) - options = parser.parse_args(argv) + options = parser.parse_args(argv) - if options.action == 'lock-session': - subprocess.check_call(r''' + if options.action == 'lock-session': + subprocess.check_call( + r""" loginctl list-sessions -j | jq -r ".[] | select(.uid==$UID) | \ .session" | loginctl lock-session - ''', shell=True, timeout=1) - else: - raise NotImplementedError + """, + shell=True, + timeout=1, + ) + else: + raise NotImplementedError + def desktop_services(argv): - parser = optparse.OptionParser() - parser.add_option( - '--background_image', - dest='background_image', - default=None, - type=str, - ) - parser.add_option( - '--cpufreq', - dest='cpufreq', - default=None, - type=int, - help='0 - mac book air (no turbo boost, max pct 30, every 4 seconds', - ) - parser.add_option( - '--cpufreq-action', - dest='cpufreq_action', - default=None, - choices=[ - 'performance', - 'powersave', - ], - #type=str, - ) - parser.add_option( - '--battery', - dest='battery', - default=None, - type=int, - help='0 - battery check with sleep <10%, every 10 seconds', - ) - parser.add_option( - '--backlight-increase', - dest='backlight_increase', - default=False, - action='store_true', - help='increase keyboard backlight', - ) - parser.add_option( - '--backlight-type', - dest='backlight_type', - default=[], - action='append', - help='backlight type, like keyboard, output', - ) - parser.add_option( - '--backlight-decrease', - dest='backlight_decrease', - default=False, - action='store_true', - help='decrease keyboard backlight', - ) - parser.add_option( - '--backlight_service', - dest='backlight_service', - action='store_true', - default=False, - help='enable backlight_service', - ) - parser.add_option( - '--polkit_service', - dest='polkit_service', - action='store_true', - default=False, - help='enable polkit_service', - ) + parser = optparse.OptionParser() + parser.add_option( + '--background_image', + dest='background_image', + default=None, + type=str, + ) + parser.add_option( + '--cpufreq', + dest='cpufreq', + default=None, + type=int, + help='0 - mac book air (no turbo boost, max pct 30, every 4 seconds', + ) + parser.add_option( + '--cpufreq-action', + dest='cpufreq_action', + default=None, + choices=[ + 'performance', + 'powersave', + ], + # type=str, + ) + parser.add_option( + '--battery', + dest='battery', + default=None, + type=int, + help='0 - battery check with sleep <10%, every 10 seconds', + ) + parser.add_option( + '--backlight-increase', + dest='backlight_increase', + default=False, + action='store_true', + help='increase keyboard backlight', + ) + parser.add_option( + '--backlight-type', + dest='backlight_type', + default=[], + action='append', + help='backlight type, like keyboard, output', + ) + parser.add_option( + '--backlight-decrease', + dest='backlight_decrease', + default=False, + action='store_true', + help='decrease keyboard backlight', + ) + parser.add_option( + '--backlight_service', + dest='backlight_service', + action='store_true', + default=False, + help='enable backlight_service', + ) + parser.add_option( + '--polkit_service', + dest='polkit_service', + action='store_true', + default=False, + help='enable polkit_service', + ) - options, args = parser.parse_args(argv) + options, args = parser.parse_args(argv) - class VLC: - @classmethod - def vlc_is_playing_fullscreen(cls): - import subprocess - import json - #import sys - #import pprint + class VLC: + @classmethod + def vlc_is_playing_fullscreen(cls): + import subprocess + import json + # import sys + # import pprint - t2 = [] - try: - t1 = subprocess.check_output(['swaymsg', '-t', 'get_tree']).decode('utf-8') - t2 = json.loads(t1) - except Exception: - logging.error(traceback.format_exc()) + t2 = [] + try: + t1 = subprocess.check_output(['swaymsg', '-t', 'get_tree']).decode('utf-8') + t2 = json.loads(t1) + except Exception: + logging.error(traceback.format_exc()) - def walk(o, cb): - if isinstance(o, dict): - cb(o) - for k, v in o.items(): - walk(v, cb,) - elif isinstance(o, list): - cb(o) - for o2 in o: - walk(o2, cb,) - else: - cb(o) + def walk(o, cb): + if isinstance(o, dict): + cb(o) + for k, v in o.items(): + walk( + v, + cb, + ) + elif isinstance(o, list): + cb(o) + for o2 in o: + walk( + o2, + cb, + ) + else: + cb(o) - t3 = [] + t3 = [] - walk(t2, lambda o: [ - t3.append(o) - if isinstance(o, dict) and \ - 'fullscreen_mode' in o and \ - o['fullscreen_mode'] == 1 and \ - 'window_properties' in o and \ - 'class' in o['window_properties'] and \ - o['window_properties']['class'] == 'vlc' - else None - ]) + walk( + t2, + lambda o: [ + t3.append(o) + if isinstance(o, dict) + and 'fullscreen_mode' in o + and o['fullscreen_mode'] == 1 + and 'window_properties' in o + and 'class' in o['window_properties'] + and o['window_properties']['class'] == 'vlc' + else None + ], + ) - t4 = False + t4 = False - try: - t4 = subprocess.check_output([ - 'playerctl', '-p', 'vlc', 'status' - ], timeout=1,).decode('utf-8').strip() == 'Playing' - except Exception: - logging.error(traceback.format_exc()) + try: + t4 = ( + subprocess.check_output( + ['playerctl', '-p', 'vlc', 'status'], + timeout=1, + ) + .decode('utf-8') + .strip() + == 'Playing' + ) + except Exception: + logging.error(traceback.format_exc()) - #pprint.pprint(t3) + # pprint.pprint(t3) - return len(t3) > 0 and t4 + return len(t3) > 0 and t4 + class Battery: + def __init__( + self, + should_start=None, + ): + if should_start is None: + should_start = False - class Battery: - def __init__(self, should_start=None,): - if should_start is None: - should_start = False + assert isinstance(should_start, bool) - assert isinstance(should_start, bool) + self.last_check = None + self.period = 10 + self.is_running = should_start - self.last_check = None - self.period = 10 - self.is_running = should_start + def check_is_needed(self): + now = datetime.datetime.now(tz=datetime.timezone.utc) - def check_is_needed(self): - now = datetime.datetime.now(tz=datetime.timezone.utc) + is_needed = None - is_needed = None + if self.last_check is None: + is_needed = True + else: + if (now - self.last_check).total_seconds() >= self.period: + is_needed = True + else: + is_needed = False - if self.last_check is None: - is_needed = True - else: - if ((now - self.last_check).total_seconds() >= self.period): - is_needed = True - else: - is_needed = False + if is_needed: + self.last_check = now - if is_needed: - self.last_check = now + return is_needed - return is_needed + def run(self): + while True: + self.check() - def run(self): - while True: - self.check() + time.sleep(self.period) - time.sleep(self.period) + def terminate(self): + self.is_running = False - def terminate(self): - self.is_running = False + def wait(self, *args, **kwargs): + if self.is_running: + raise NotImplementedError - def wait(self, *args, **kwargs): - if self.is_running: - raise NotImplementedError + def poll(self): + if self.is_running: + return None + else: + return 0 - def poll(self): - if self.is_running: - return None - else: - return 0 - - @property - def percentage_low(self) -> int: - try: - return int(subprocess.check_output(r''' + @property + def percentage_low(self) -> int: + try: + return int( + subprocess.check_output( + r""" cat /etc/UPower/UPower.conf | grep -Po '^PercentageLow=\d+' - ''', shell=True,).decode('utf-8').strip().split('=')[1]) - except: - logger.exception('') - return 15 + """, + shell=True, + ) + .decode('utf-8') + .strip() + .split('=')[1] + ) + except: + logger.exception('') + return 15 - @property - def percentage_critical(self) -> int: - try: - return int(subprocess.check_output(r''' + @property + def percentage_critical(self) -> int: + try: + return int( + subprocess.check_output( + r""" cat /etc/UPower/UPower.conf | grep -Po '^PercentageCritical=\d+' - ''', shell=True,).decode('utf-8').strip().split('=')[1]) - except: - logger.exception('') - return 10 + """, + shell=True, + ) + .decode('utf-8') + .strip() + .split('=')[1] + ) + except: + logger.exception('') + return 10 - def check(self): - try: - if not self.check_is_needed(): - return + def check(self): + try: + if not self.check_is_needed(): + return - t1 = subprocess.check_output( - ['upower', '-d'], - timeout=1, - ).decode('utf-8') - t2 = [ - o for o in t1.splitlines() if 'percentage' in o.lower() - ] - t4 = [ - o for o in t1.splitlines() if 'state' in o.lower() - ] - t3 = float(t2[0].split(':')[1].strip()[:-1]) - t5 = any(['discharging' in o.lower() for o in t4]) - if t3 < self.percentage_critical and t5: - logging.error(json.dumps(dict( - msg='too low', t3=t3, t5=t5 - ))) - subprocess.check_call(['systemctl', 'suspend']) - elif t3 < self.percentage_low and t5: - msg = 'battery near low' - logging.error(json.dumps(dict( - msg=msg, t3=t3, t5=t5 - ))) - subprocess.check_call([ - 'notify-send', '-t', '%d' % (5 * 1000), msg, '% 5.2f' % t3, - ]) - else: - pass - print( - '\r%s % 5.2f%% %s' % ( - datetime.datetime.now().isoformat(), t3, str(t5) - ), - end='' - ) - except Exception: - logging.error(traceback.format_exc()) + t1 = subprocess.check_output( + ['upower', '-d'], + timeout=1, + ).decode('utf-8') + t2 = [o for o in t1.splitlines() if 'percentage' in o.lower()] + t4 = [o for o in t1.splitlines() if 'state' in o.lower()] + t3 = float(t2[0].split(':')[1].strip()[:-1]) + t5 = any(['discharging' in o.lower() for o in t4]) + if t3 < self.percentage_critical and t5: + logging.error(json.dumps(dict(msg='too low', t3=t3, t5=t5))) + subprocess.check_call(['systemctl', 'suspend']) + elif t3 < self.percentage_low and t5: + msg = 'battery near low' + logging.error(json.dumps(dict(msg=msg, t3=t3, t5=t5))) + subprocess.check_call( + [ + 'notify-send', + '-t', + '%d' % (5 * 1000), + msg, + '% 5.2f' % t3, + ] + ) + else: + pass + print('\r%s % 5.2f%% %s' % (datetime.datetime.now().isoformat(), t3, str(t5)), end='') + except Exception: + logging.error(traceback.format_exc()) - class Backlight: - class Direction(enum.Enum): - increase = 'increase' - decrease = 'decrease' - absolute = 'absolute' - get_state = 'get_state' + class Backlight: + class Direction(enum.Enum): + increase = 'increase' + decrease = 'decrease' + absolute = 'absolute' + get_state = 'get_state' - class Mode(enum.Enum): - light = 'light' + class Mode(enum.Enum): + light = 'light' - def __init__(self): - self.state = [] - self.dpms = Backlight.dpms_get() + def __init__(self): + self.state = [] + self.dpms = Backlight.dpms_get() - @classmethod - def dpms_get(cls): - try: - t1 = subprocess.check_output( - ['swaymsg', '-r', '-t', 'get_outputs'], - timeout=1 - ) - t2 = t1.decode('utf-8') - t3 = json.loads(t2) - t4 = [ - dict( - id=o['id'], - name=o['name'], - dpms=o['dpms'], - ) - for o in t3 - ] + @classmethod + def dpms_get(cls): + try: + t1 = subprocess.check_output(['swaymsg', '-r', '-t', 'get_outputs'], timeout=1) + t2 = t1.decode('utf-8') + t3 = json.loads(t2) + t4 = [ + dict( + id=o['id'], + name=o['name'], + dpms=o['dpms'], + ) + for o in t3 + ] - return any([o['dpms'] for o in t4]) - except Exception: - return True + return any([o['dpms'] for o in t4]) + except Exception: + return True - def check(self): - try: - new_dpms = Backlight.dpms_get() - if new_dpms != self.dpms: - logging.info(json.dumps(dict( - module='backlight', - action='new_dpms', - dpms=self.dpms, - new_dpms=new_dpms, - ))) - if new_dpms: - Backlight.enable(self.state) - else: - self.state = Backlight.change( - Backlight.Direction.get_state, - ) - logging.info(json.dumps(dict( - state=pprint.pformat( - self.state, - width=1e+8, - compact=True, - ), - action='disable', - ))) - Backlight.disable() - self.dpms = new_dpms - except Exception: - logging.error(traceback.format_exc()) + def check(self): + try: + new_dpms = Backlight.dpms_get() + if new_dpms != self.dpms: + logging.info( + json.dumps( + dict( + module='backlight', + action='new_dpms', + dpms=self.dpms, + new_dpms=new_dpms, + ) + ) + ) + if new_dpms: + Backlight.enable(self.state) + else: + self.state = Backlight.change( + Backlight.Direction.get_state, + ) + logging.info( + json.dumps( + dict( + state=pprint.pformat( + self.state, + width=1e8, + compact=True, + ), + action='disable', + ) + ) + ) + Backlight.disable() + self.dpms = new_dpms + except Exception: + logging.error(traceback.format_exc()) - @classmethod - def get_state(cls): - raise NotImplementedError + @classmethod + def get_state(cls): + raise NotImplementedError - @classmethod - def set_state(cls): - raise NotImplementedError + @classmethod + def set_state(cls): + raise NotImplementedError - @classmethod - def disable(cls): - return cls.change( - cls.Direction.absolute, - 0, - ) + @classmethod + def disable(cls): + return cls.change( + cls.Direction.absolute, + 0, + ) - @classmethod - def enable(cls, state,): - res = [] - for device_state in state: - res.append( - cls.change( - direction=cls.Direction.absolute, - value=device_state['value'], - device_name=device_state['device_name'], - ) - ) - return res + @classmethod + def enable( + cls, + state, + ): + res = [] + for device_state in state: + res.append( + cls.change( + direction=cls.Direction.absolute, + value=device_state['value'], + device_name=device_state['device_name'], + ) + ) + return res - @classmethod - def change( - cls, - direction, - value=None, - devices=None, - device_name=None, - types=None, - ): - assert isinstance(direction, Backlight.Direction) + @classmethod + def change( + cls, + direction, + value=None, + devices=None, + device_name=None, + types=None, + ): + assert isinstance(direction, Backlight.Direction) - state = [] - devices_all = dict( - smc_kbd=dict( - sysfs_path='sysfs/leds/smc::kbd_backlight', - ), - intel_backlight=dict( - sysfs_path='sysfs/backlight/intel_backlight', - ), - ) + state = [] + devices_all = dict( + smc_kbd=dict( + sysfs_path='sysfs/leds/smc::kbd_backlight', + ), + intel_backlight=dict( + sysfs_path='sysfs/backlight/intel_backlight', + ), + ) - if devices is None: - devices = [] + if devices is None: + devices = [] - if not device_name is None: - devices.append(device_name) + if not device_name is None: + devices.append(device_name) - if len(devices) == 0: - if types is None: - types = [ - 'keyboard', - 'output', - ] + if len(devices) == 0: + if types is None: + types = [ + 'keyboard', + 'output', + ] - for current_type in types: - if current_type == 'keyboard': - devices.extend([ - 'smc_kbd' - ]) - elif current_type == 'output': - devices.extend([ - 'intel_backlight', - ]) - else: - raise NotImplementedError - else: - assert types is None + for current_type in types: + if current_type == 'keyboard': + devices.extend(['smc_kbd']) + elif current_type == 'output': + devices.extend( + [ + 'intel_backlight', + ] + ) + else: + raise NotImplementedError + else: + assert types is None - devices2 = list(set(devices)) + devices2 = list(set(devices)) - if sys.platform == 'linux': - assert all([ - o in devices_all - for o in devices2 - ]) + if sys.platform == 'linux': + assert all([o in devices_all for o in devices2]) - leds = \ - [ - o.strip() - for o in subprocess.check_output( - ['light', '-L'], - timeout=1, - )\ - .decode('utf-8')\ - .splitlines()[1:] - ] + leds = [ + o.strip() + for o in subprocess.check_output( + ['light', '-L'], + timeout=1, + ) + .decode('utf-8') + .splitlines()[1:] + ] + for current_device_name in devices2: + device = devices_all[current_device_name] - for current_device_name in devices2: - device = devices_all[current_device_name] + sysfs_path = device['sysfs_path'] - sysfs_path = device['sysfs_path'] + if not sysfs_path in leds: + raise NotImplementedError - if not sysfs_path in leds: - raise NotImplementedError + extra_args = [] + if value is None: + value = 20.0 - extra_args = [] - if value is None: - value = 20.0 + value2 = max(float(value), 0.0) - value2 = max(float(value), 0.0) + assert isinstance(value2, float) and value >= -1e-8 - assert isinstance(value2, float) and value >= -1e-8 + if direction == cls.Direction.increase: + extra_args.extend(['-A', '%f' % value2]) + elif direction == cls.Direction.decrease: + extra_args.extend(['-U', '%f' % value2]) + elif direction == cls.Direction.absolute: + extra_args.extend(['-S', '%f' % value2]) + elif direction == cls.Direction.get_state: + pass + else: + raise NotImplementedError - if direction == cls.Direction.increase: - extra_args.extend(['-A', '%f' % value2]) - elif direction == cls.Direction.decrease: - extra_args.extend(['-U', '%f' % value2]) - elif direction == cls.Direction.absolute: - extra_args.extend(['-S', '%f' % value2]) - elif direction == cls.Direction.get_state: - pass - else: - raise NotImplementedError + get_current = lambda: float( + subprocess.check_output( + [ + 'light', + '-G', + '-s', + sysfs_path, + ], + timeout=1, + ).decode('utf-8') + ) - get_current = lambda : float(subprocess.check_output([ - 'light', '-G', - '-s', sysfs_path, - ], timeout=1).decode('utf-8')) + if not (direction == cls.Direction.get_state): + old_value = get_current() - if not (direction == cls.Direction.get_state): - old_value = get_current() + value_steps = None - value_steps = None + if direction == cls.Direction.decrease: + value_steps = numpy_linspace( + old_value, + max(old_value - value2, 0), + 10, + ) + elif direction == cls.Direction.increase: + value_steps = numpy_linspace( + old_value, + min(old_value + value2, 100), + 10, + ) + elif direction == cls.Direction.absolute: + value_steps = numpy_linspace( + old_value, + min( + max( + 0, + value2, + ), + 100, + ), + 10, + ) + else: + raise NotImplementedError - if direction == cls.Direction.decrease: - value_steps = numpy_linspace( - old_value, - max(old_value - value2, 0), - 10, - ) - elif direction == cls.Direction.increase: - value_steps = numpy_linspace( - old_value, - min(old_value + value2, 100), - 10, - ) - elif direction == cls.Direction.absolute: - value_steps = numpy_linspace( - old_value, - min( - max( - 0, - value2, - ), - 100 - ), - 10, - ) - else: - raise NotImplementedError + for current_value in value_steps: + subprocess.check_call( + [ + 'light', + '-v', + '3', + '-s', + sysfs_path, + '-S', + '%f' % current_value, + ], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + time.sleep(0.05) - for current_value in value_steps: - subprocess.check_call( - [ - 'light', '-v', '3', - '-s', sysfs_path, - '-S', '%f' % current_value, - ], - stderr=subprocess.PIPE, - stdout=subprocess.PIPE - ) - time.sleep(0.05) + state.append( + dict( + mode=cls.Mode.light, + device_path=sysfs_path, + device_name=current_device_name, + value=get_current(), + ) + ) + else: + raise NotImplementedError - state.append( - dict( - mode=cls.Mode.light, - device_path=sysfs_path, - device_name=current_device_name, - value=get_current(), - ) - ) - else: - raise NotImplementedError + return state - return state + class Cpufreq: + @classmethod + def profile( + cls, + ) -> Literal[ + 'applesmc.768', + 'intel_pstate', + ]: + if os.path.exists('/sys/bus/platform/devices/applesmc.768'): + return 'applesmc.768' + if os.path.exists('/sys/devices/system/cpu/intel_pstate/no_turbo'): + return 'intel_pstate' + else: + raise NotImplementedError - class Cpufreq: - @classmethod - def profile(cls) -> Literal[ - 'applesmc.768', - 'intel_pstate', - ]: - if os.path.exists('/sys/bus/platform/devices/applesmc.768'): - return 'applesmc.768' - if os.path.exists('/sys/devices/system/cpu/intel_pstate/no_turbo'): - return 'intel_pstate' - else: - raise NotImplementedError - - @classmethod - def powersave(cls): - if cls.profile() == 'applesmc.768': - subprocess.check_call(r''' + @classmethod + def powersave(cls): + if cls.profile() == 'applesmc.768': + subprocess.check_call( + r""" #echo performance | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; #echo powersave | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; #echo 1 > /sys/bus/platform/devices/applesmc.768/fan1_manual; @@ -2649,21 +2602,27 @@ echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; echo 1 > /sys/bus/platform/devices/applesmc.768/fan1_manual; echo 2000 > /sys/bus/platform/devices/applesmc.768/fan1_output; - ''', shell=True) - elif cls.profile() == 'intel_pstate': - subprocess.check_call(r''' + """, + shell=True, + ) + elif cls.profile() == 'intel_pstate': + subprocess.check_call( + r""" echo performance | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; echo 40 > /sys/devices/system/cpu/intel_pstate/max_perf_pct; echo 900000 | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq; echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; - ''', shell=True) - else: - raise NotImplementedError + """, + shell=True, + ) + else: + raise NotImplementedError - @classmethod - def performance(cls): - if cls.profile() == 'applesmc.768': - subprocess.check_call(r''' + @classmethod + def performance(cls): + if cls.profile() == 'applesmc.768': + subprocess.check_call( + r""" #echo powersave | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; #echo performance | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; @@ -2674,92 +2633,102 @@ echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; echo 1 > /sys/bus/platform/devices/applesmc.768/fan1_manual; echo 6500 > /sys/bus/platform/devices/applesmc.768/fan1_output; - ''', shell=True) - elif cls.profile() == 'intel_pstate': - subprocess.check_call(r''' + """, + shell=True, + ) + elif cls.profile() == 'intel_pstate': + subprocess.check_call( + r""" echo powersave | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; echo 60 > /sys/devices/system/cpu/intel_pstate/max_perf_pct; echo 1200000 | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_max_freq; echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; - ''', shell=True) - else: - raise NotImplementedError + """, + shell=True, + ) + else: + raise NotImplementedError - if options.backlight_increase or options.backlight_decrease: - if options.backlight_increase: - direction = Backlight.Direction.increase - elif options.backlight_decrease: - direction = Backlight.Direction.decrease - else: - raise NotImplementedError + if options.backlight_increase or options.backlight_decrease: + if options.backlight_increase: + direction = Backlight.Direction.increase + elif options.backlight_decrease: + direction = Backlight.Direction.decrease + else: + raise NotImplementedError - Backlight.change( - direction=direction, - types=options.backlight_type, - ) + Backlight.change( + direction=direction, + types=options.backlight_type, + ) - return + return - elif not options.cpufreq_action is None: - if options.cpufreq_action == 'performance': - Cpufreq.performance() - elif options.cpufreq_action == 'powersave': - Cpufreq.powersave() - else: - raise NotImplementedError + elif not options.cpufreq_action is None: + if options.cpufreq_action == 'performance': + Cpufreq.performance() + elif options.cpufreq_action == 'powersave': + Cpufreq.powersave() + else: + raise NotImplementedError - return - else: - pass + return + else: + pass - #logger.info(dict( - # environ=os.environ, - # #session=subprocess.check_output(['loginctl', 'show-session'], timeout=1,), - #)) + # logger.info(dict( + # environ=os.environ, + # #session=subprocess.check_output(['loginctl', 'show-session'], timeout=1,), + # )) - sway_sock_res = sway_sock(True) + sway_sock_res = sway_sock(True) - if sway_sock_res: - os.environ['SWAYSOCK'] = sway_sock_res + if sway_sock_res: + os.environ['SWAYSOCK'] = sway_sock_res - assert all([ - env_name in os.environ - for env_name in [ - 'GTK_IM_MODULE', - 'XMODIFIERS', - 'QT_IM_MODULE', - #'I3SOCK', - #'XDG_SEAT', - 'SWAYSOCK', - 'WAYLAND_DISPLAY', - ] - ]) and os.environ['SWAYSOCK'] == sway_sock() - services = [] + assert ( + all( + [ + env_name in os.environ + for env_name in [ + 'GTK_IM_MODULE', + 'XMODIFIERS', + 'QT_IM_MODULE', + #'I3SOCK', + #'XDG_SEAT', + 'SWAYSOCK', + 'WAYLAND_DISPLAY', + ] + ] + ) + and os.environ['SWAYSOCK'] == sway_sock() + ) + services = [] - shutdown = False + shutdown = False - def on_interrupt(*args, **kwargs): - logging.info('blah') - nonlocal shutdown - shutdown = True + def on_interrupt(*args, **kwargs): + logging.info('blah') + nonlocal shutdown + shutdown = True - signal.signal( - signal.SIGINT, - on_interrupt, - ) - signal.signal( - signal.SIGTERM, - on_interrupt, - ) + signal.signal( + signal.SIGINT, + on_interrupt, + ) + signal.signal( + signal.SIGTERM, + on_interrupt, + ) - try: - if options.cpufreq == 0: - #logging.info('launching cpufreq, need sudo') - #subprocess.check_call(['sudo', 'whoami']) + try: + if options.cpufreq == 0: + # logging.info('launching cpufreq, need sudo') + # subprocess.check_call(['sudo', 'whoami']) - services.append( - subprocess.Popen( - r''' + services.append( + subprocess.Popen( + r""" exec sh -c 'echo cpufreq, user; whoami; while [[ -a /proc/{pid} ]]; do #echo passive > /sys/devices/system/cpu/intel_pstate/status; @@ -2773,46 +2742,44 @@ echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; done; sleep 10; done;' - '''.format(pid=os.getpid()), - shell=True, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - stdin=subprocess.DEVNULL, - ) - ) + """.format(pid=os.getpid()), + shell=True, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + stdin=subprocess.DEVNULL, + ) + ) - class start_swayidle: - def __init__(self): - swaylock_cmd = [ - 'swaylock', '-f', '-d', - ] - if not options.background_image is None: - swaylock_cmd.extend( - [ - '-i', - '"%s"' % options.background_image, - ] - ) + class start_swayidle: + def __init__(self): + swaylock_cmd = [ + 'swaylock', + '-f', + '-d', + ] + if not options.background_image is None: + swaylock_cmd.extend( + [ + '-i', + '"%s"' % options.background_image, + ] + ) - self.commands = dict( - swaylock_cmd2=' '.join(swaylock_cmd), - timeout1='echo timeout1; swaymsg "output * dpms off";', - lock='echo lock; pkill --signal SIGUSR1 swayidle;', - unlock='echo unlock; pkill --signal SIGINT swaylock; swaymsg "output * dpms on";', - unlock2='pkill --signal SIGINT swaylock;', - resume='echo resume; swaymsg "output * dpms on";', - before_sleep='echo before_sleep; commands loginctl lock-session;', - #before_sleep='echo blah;', - after_resume='echo after_resume; pkill --signal SIGUSR1 swayidle;', - ) - self.last_force_idle = None - self.commands.update( - timeout2='echo timeout2; {swaylock_cmd};'.format( - swaylock_cmd=self.commands['swaylock_cmd2'] - ) - ) - self.swayidle = subprocess.Popen( - r''' + self.commands = dict( + swaylock_cmd2=' '.join(swaylock_cmd), + timeout1='echo timeout1; swaymsg "output * dpms off";', + lock='echo lock; pkill --signal SIGUSR1 swayidle;', + unlock='echo unlock; pkill --signal SIGINT swaylock; swaymsg "output * dpms on";', + unlock2='pkill --signal SIGINT swaylock;', + resume='echo resume; swaymsg "output * dpms on";', + before_sleep='echo before_sleep; commands loginctl lock-session;', + # before_sleep='echo blah;', + after_resume='echo after_resume; pkill --signal SIGUSR1 swayidle;', + ) + self.last_force_idle = None + self.commands.update(timeout2='echo timeout2; {swaylock_cmd};'.format(swaylock_cmd=self.commands['swaylock_cmd2'])) + self.swayidle = subprocess.Popen( + r""" exec swayidle -d -w \ timeout 300 'echo t1; read;' \ resume 'echo t5; ' \ @@ -2822,1146 +2789,1121 @@ echo schedutil | tee /sys/devices/system/cpu/cpu*/cpufreq/scaling_governor; unlock 'echo t3;' \ before-sleep 'echo t6; read;' \ after-resume 'echo t7; read;' 2>&1 - ''', - shell=True, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.DEVNULL, - restore_signals=False, - preexec_fn=lambda : os.setpgrp(), - ) - self.output = intercept_output( - self.swayidle, - real_time=True, - transform_callback=lambda x: [logging.info(x), b''][-1], - ) - self.events = [] - self.last_skip_loop = None - self.data = [] - if options.backlight_service: - self.backlight = Backlight() - else: - self.backlight = None - self.bg = None - self.bg_terminate = False + """, + shell=True, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL, + restore_signals=False, + preexec_fn=lambda: os.setpgrp(), + ) + self.output = intercept_output( + self.swayidle, + real_time=True, + transform_callback=lambda x: [logging.info(x), b''][-1], + ) + self.events = [] + self.last_skip_loop = None + self.data = [] + if options.backlight_service: + self.backlight = Backlight() + else: + self.backlight = None + self.bg = None + self.bg_terminate = False - def skip_loop_long_ago(self): - if self.last_skip_loop is None or ( - datetime.datetime.now() - self.last_skip_loop - ).total_seconds() >= 30: - self.last_skip_loop = datetime.datetime.now() - return True - else: - return False + def skip_loop_long_ago(self): + if self.last_skip_loop is None or (datetime.datetime.now() - self.last_skip_loop).total_seconds() >= 30: + self.last_skip_loop = datetime.datetime.now() + return True + else: + return False - def background_check(self): - if ( - self.bg is None or \ - not self.bg.poll() is None - ) and not self.bg_terminate: - if not options.background_image is None: - self.bg = subprocess.Popen([ - 'swaybg', - '--output', - '*', - '--image', - options.background_image, - '--mode', - 'fill', - ]) + def background_check(self): + if (self.bg is None or not self.bg.poll() is None) and not self.bg_terminate: + if not options.background_image is None: + self.bg = subprocess.Popen( + [ + 'swaybg', + '--output', + '*', + '--image', + options.background_image, + '--mode', + 'fill', + ] + ) - def background_terminate(self, *args, **kwargs): - if not self.bg is None: - self.bg_terminate = True - self.bg.terminate(*args, **kwargs) + def background_terminate(self, *args, **kwargs): + if not self.bg is None: + self.bg_terminate = True + self.bg.terminate(*args, **kwargs) - def poll(self): - return self.swayidle.poll() + def poll(self): + return self.swayidle.poll() - def release_lock(self): - self.swayidle.stdin.write(b'\n') - self.swayidle.stdin.flush() + def release_lock(self): + self.swayidle.stdin.write(b'\n') + self.swayidle.stdin.flush() - def force_idle(self): - if self.last_force_idle is None or ( - datetime.datetime.now() - self.last_force_idle - ).total_seconds() >= 10: - self.last_force_idle = datetime.datetime.now() - return True - else: - return False + def force_idle(self): + if self.last_force_idle is None or (datetime.datetime.now() - self.last_force_idle).total_seconds() >= 10: + self.last_force_idle = datetime.datetime.now() + return True + else: + return False - def terminate(self, *args, **kwargs): - self.background_terminate() + def terminate(self, *args, **kwargs): + self.background_terminate() - return self.swayidle.terminate(*args, **kwargs) + return self.swayidle.terminate(*args, **kwargs) - def wait(self, *args, **kwargs): - return self.swayidle.wait(*args, **kwargs) + def wait(self, *args, **kwargs): + return self.swayidle.wait(*args, **kwargs) - def kill(self): - return self.swayidle.kill() + def kill(self): + return self.swayidle.kill() - def dpms(self, direction): - assert direction in ['on', 'off'] + def dpms(self, direction): + assert direction in ['on', 'off'] - raise NotImplementedError + raise NotImplementedError - def check(self): - new_events = [] + def check(self): + new_events = [] - class event_t(enum.Enum): - idle_state = 'idle state' - active_state = 'active state' + class event_t(enum.Enum): + idle_state = 'idle state' + active_state = 'active state' - while True: - if self.output is None: - break + while True: + if self.output is None: + break - chunk = next(self.output) + chunk = next(self.output) - if chunk['aggregated']: - self.output = None - continue + if chunk['aggregated']: + self.output = None + continue - if len(chunk['data']) == 0: - break + if len(chunk['data']) == 0: + break - self.data.append(chunk) + self.data.append(chunk) - if b'\n' in chunk['data']: - total = b''.join([ - o['data'] - for o in self.data - ]).decode('utf-8') - sep_pos = total.rfind('\n') - lines = total[:sep_pos].splitlines() - self.data = [ - dict( - data=total[sep_pos:].encode('utf-8'), - aggregated=False, - ) - ] - for line in lines: - if event_t.idle_state.value in line: - line = event_t.idle_state.value - elif event_t.active_state.value in line: - line = event_t.active_state.value - else: - pass + if b'\n' in chunk['data']: + total = b''.join([o['data'] for o in self.data]).decode('utf-8') + sep_pos = total.rfind('\n') + lines = total[:sep_pos].splitlines() + self.data = [ + dict( + data=total[sep_pos:].encode('utf-8'), + aggregated=False, + ) + ] + for line in lines: + if event_t.idle_state.value in line: + line = event_t.idle_state.value + elif event_t.active_state.value in line: + line = event_t.active_state.value + else: + pass - if line in [ - 't1', 't2', 't3', 't4', - 't5', 't5', 't6', 't7', - event_t.idle_state.value, - event_t.active_state.value, - ]: - new_events.append(line) + if line in [ + 't1', + 't2', + 't3', + 't4', + 't5', + 't5', + 't6', + 't7', + event_t.idle_state.value, + event_t.active_state.value, + ]: + new_events.append(line) - def retry(cb, cnt=None): - if cnt is None: - cnt = 10 + def retry(cb, cnt=None): + if cnt is None: + cnt = 10 - i = 0 - while True: - logging.info('retry i = %d, cnt = %d' % (i, cnt)) + i = 0 + while True: + logging.info('retry i = %d, cnt = %d' % (i, cnt)) - if not ( - subprocess.call(['swaymsg', '-t', 'get_version']) == 0 - ): - continue + if not (subprocess.call(['swaymsg', '-t', 'get_version']) == 0): + continue - if cb() == 0: - break + if cb() == 0: + break - time.sleep(0.5) + time.sleep(0.5) - i += 1 + i += 1 - if ( - len(new_events) > 0 or \ - len(self.events) > 0 and \ - self.skip_loop_long_ago() - ): - self.events.extend(new_events) + if len(new_events) > 0 or len(self.events) > 0 and self.skip_loop_long_ago(): + self.events.extend(new_events) - skip_loop = False + skip_loop = False - if ( - all([ - o in ['t1', 't4'] - for o in self.events - ]) and \ - VLC.vlc_is_playing_fullscreen() and \ - self.backlight.dpms - ): - skip_loop = True - logging.info( - 'skip loop, %s' % ( - [ - json.dumps(self.events), - self.backlight.dpms, - VLC.vlc_is_playing_fullscreen(), - self.events, - new_events, - ], - ) - ) - elif ( - len(new_events) == 0 and \ - len(self.events) > 1 and \ - all([ - o in ['t1', 't4'] - for o in self.events - ]) - ): - self.events = ['t4'] - elif len(self.events) > 1 and ( - self.events == ['t1', 't4', 't5', 't5'] or \ - self.events == ['t1', 't5', 't5'] or \ - self.events == ['t1', 't5'] - ): - for o in new_events: - self.release_lock() + if all([o in ['t1', 't4'] for o in self.events]) and VLC.vlc_is_playing_fullscreen() and self.backlight.dpms: + skip_loop = True + logging.info( + 'skip loop, %s' + % ( + [ + json.dumps(self.events), + self.backlight.dpms, + VLC.vlc_is_playing_fullscreen(), + self.events, + new_events, + ], + ) + ) + elif len(new_events) == 0 and len(self.events) > 1 and all([o in ['t1', 't4'] for o in self.events]): + self.events = ['t4'] + elif len(self.events) > 1 and (self.events == ['t1', 't4', 't5', 't5'] or self.events == ['t1', 't5', 't5'] or self.events == ['t1', 't5']): + for o in new_events: + self.release_lock() - self.events = [] + self.events = [] - for o in self.events: - if skip_loop: - self.release_lock() - continue + for o in self.events: + if skip_loop: + self.release_lock() + continue - if o == 't1': - #if self.force_idle(): - # subprocess.check_call(self.commands['lock'], shell=True) - logging.info('started t1') - if self.force_idle(): - subprocess.check_call(self.commands['timeout1'], shell=True) - logging.info('done t1') - self.release_lock() - elif o == 't2': - logging.info('started lock') - if self.force_idle(): - custom_notify( - title='swayidle', - msg='loginctl lock started', - ) - while True: - if not subprocess.call( - self.commands['lock'], shell=True - ) == 0: - continue - if not subprocess.call( - self.commands['timeout2'], shell=True - ) == 0: - #continue - pass - if not subprocess.call( - self.commands['timeout1'], shell=True - ) == 0: - continue - break - logging.info('done lock') - self.release_lock() - elif o == 't3': - pass - elif o == 't4': - logging.info('started t4') - if self.force_idle(): - subprocess.check_call(self.commands['lock'], shell=True) - subprocess.call(self.commands['timeout2'], shell=True) - subprocess.check_call(self.commands['timeout1'], shell=True) - logging.info('done t4') - self.release_lock() - elif o == 't5': - logging.info('started timeout resume') - if self.force_idle(): - subprocess.check_call(self.commands['lock'], shell=True) - retry( - lambda: subprocess.call(self.commands['resume'], shell=True), - ) - logging.info('done timeout resume') - elif o == 't6': - logging.info('started before-sleep') - if self.force_idle(): - subprocess.call(self.commands['timeout2'], shell=True), - subprocess.check_call(self.commands['timeout1'], shell=True), - logging.info('done before-sleep') - self.release_lock() - elif o == 't7': - logging.info('started after-resume') - #if self.force_idle(): - #subprocess.check_call(self.commands['lock'], shell=True) - while True: - if subprocess.call( - self.commands['resume'], - shell=True - ) == 0: - break - else: - time.sleep(0.5) - logging.info('done after-resume') - self.release_lock() - elif o in [ - event_t.idle_state.value, - event_t.active_state.value, - ]: - logging.info(json.dumps(dict(o=o))) - else: - logging.error(json.dumps(dict(o=o))) - raise NotImplementedError + if o == 't1': + # if self.force_idle(): + # subprocess.check_call(self.commands['lock'], shell=True) + logging.info('started t1') + if self.force_idle(): + subprocess.check_call(self.commands['timeout1'], shell=True) + logging.info('done t1') + self.release_lock() + elif o == 't2': + logging.info('started lock') + if self.force_idle(): + custom_notify( + title='swayidle', + msg='loginctl lock started', + ) + while True: + if not subprocess.call(self.commands['lock'], shell=True) == 0: + continue + if not subprocess.call(self.commands['timeout2'], shell=True) == 0: + # continue + pass + if not subprocess.call(self.commands['timeout1'], shell=True) == 0: + continue + break + logging.info('done lock') + self.release_lock() + elif o == 't3': + pass + elif o == 't4': + logging.info('started t4') + if self.force_idle(): + subprocess.check_call(self.commands['lock'], shell=True) + subprocess.call(self.commands['timeout2'], shell=True) + subprocess.check_call(self.commands['timeout1'], shell=True) + logging.info('done t4') + self.release_lock() + elif o == 't5': + logging.info('started timeout resume') + if self.force_idle(): + subprocess.check_call(self.commands['lock'], shell=True) + retry( + lambda: subprocess.call(self.commands['resume'], shell=True), + ) + logging.info('done timeout resume') + elif o == 't6': + logging.info('started before-sleep') + if self.force_idle(): + (subprocess.call(self.commands['timeout2'], shell=True),) + (subprocess.check_call(self.commands['timeout1'], shell=True),) + logging.info('done before-sleep') + self.release_lock() + elif o == 't7': + logging.info('started after-resume') + # if self.force_idle(): + # subprocess.check_call(self.commands['lock'], shell=True) + while True: + if subprocess.call(self.commands['resume'], shell=True) == 0: + break + else: + time.sleep(0.5) + logging.info('done after-resume') + self.release_lock() + elif o in [ + event_t.idle_state.value, + event_t.active_state.value, + ]: + logging.info(json.dumps(dict(o=o))) + else: + logging.error(json.dumps(dict(o=o))) + raise NotImplementedError - if not skip_loop: - pprint.pprint(self.events) - del self.events[:] + if not skip_loop: + pprint.pprint(self.events) + del self.events[:] - if not self.backlight is None: - self.backlight.check() + if not self.backlight is None: + self.backlight.check() - self.background_check() + self.background_check() - if options.polkit_service: - services.extend([ - subprocess.Popen( - ['/usr/lib/polkit-gnome/polkit-gnome-authentication-agent-1'] - ) - ]) + if options.polkit_service: + services.extend([subprocess.Popen(['/usr/lib/polkit-gnome/polkit-gnome-authentication-agent-1'])]) - services.extend([ - subprocess.Popen( - ['ibus-daemon'], - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - stdin=subprocess.DEVNULL, - ), - start_swayidle(), - ]) + services.extend( + [ + subprocess.Popen( + ['ibus-daemon'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + stdin=subprocess.DEVNULL, + ), + start_swayidle(), + ] + ) - if not options.battery is None: - assert options.battery in [0] + if not options.battery is None: + assert options.battery in [0] - logging.info('launching battery') - services.append( - Battery( - should_start=True, - ) - ) + logging.info('launching battery') + services.append( + Battery( + should_start=True, + ) + ) + while True: + if shutdown: + logging.info('shutdown') + break - while True: - if shutdown: - logging.info('shutdown') - break + if all([not o.poll() is None for o in services]): + logging.info('done') + break - if all([not o.poll() is None for o in services]): - logging.info('done') - break + for o in services: + if hasattr(o, 'check'): + o.check() - for o in services: - if hasattr(o, 'check'): - o.check() - - time.sleep(0.1) - except Exception: - logging.error(traceback.format_exc()) - finally: - for o in services: - try: - o.terminate() - o.wait(timeout=10) - except Exception: - logging.error(traceback.format_exc()) - logging.error('killed %s' % str(o.__dict__)) - o.kill() + time.sleep(0.1) + except Exception: + logging.error(traceback.format_exc()) + finally: + for o in services: + try: + o.terminate() + o.wait(timeout=10) + except Exception: + logging.error(traceback.format_exc()) + logging.error('killed %s' % str(o.__dict__)) + o.kill() def suspend_timer(argv): - import datetime; - import subprocess; - import time; - #import sys; - if len(argv) == 0: - print("enter HH:MM"); - t3 = input().strip() - else: - t3 = argv[0] - t2 = datetime.datetime.strptime(t3, "%H:%M").time() - while True: - t1 = datetime.datetime.now() - if ((t1.hour, t1.minute) >= (t2.hour, t2.minute)): - break - else: - t3 = [ - (t2.hour - t1.hour), - t2.minute - t1.minute - ] - if t3[1] < 0: - t3[1] += 60 - t3[0] -= 1 - print("\r%s, %02d:%02d" % ( - t1, - *t3, - ), end="") - time.sleep(1) - print("suspend computer at %s" % t1.isoformat()) - subprocess.check_call(["systemctl", "suspend"]); + import datetime + import subprocess + import time + + # import sys; + if len(argv) == 0: + print('enter HH:MM') + t3 = input().strip() + else: + t3 = argv[0] + t2 = datetime.datetime.strptime(t3, '%H:%M').time() + while True: + t1 = datetime.datetime.now() + if (t1.hour, t1.minute) >= (t2.hour, t2.minute): + break + else: + t3 = [(t2.hour - t1.hour), t2.minute - t1.minute] + if t3[1] < 0: + t3[1] += 60 + t3[0] -= 1 + print( + '\r%s, %02d:%02d' + % ( + t1, + *t3, + ), + end='', + ) + time.sleep(1) + print('suspend computer at %s' % t1.isoformat()) + subprocess.check_call(['systemctl', 'suspend']) + def gnome_shortcuts(argv: list[str]) -> None: - parser = optparse.OptionParser() - parser.add_option( - '-a', '--add', - action='store_true', - default=None, - ) - parser.add_option( - '-l', '--list', - action='store_true', - default=None, - ) + parser = optparse.OptionParser() + parser.add_option( + '-a', + '--add', + action='store_true', + default=None, + ) + parser.add_option( + '-l', + '--list', + action='store_true', + default=None, + ) - options, args = parser.parse_args(argv) + options, args = parser.parse_args(argv) - def commands_ids() -> list[str]: - bindings = subprocess.check_output([ - 'gsettings', 'get', 'org.gnome.settings-daemon.plugins.media-keys', - 'custom-keybindings', - ]).decode('utf-8').strip().replace('\'', '"',) - if bindings == '@as []': - t1 = [] - else: - t1 = json.loads(bindings) + def commands_ids() -> list[str]: + bindings = ( + subprocess.check_output( + [ + 'gsettings', + 'get', + 'org.gnome.settings-daemon.plugins.media-keys', + 'custom-keybindings', + ] + ) + .decode('utf-8') + .strip() + .replace( + "'", + '"', + ) + ) + if bindings == '@as []': + t1 = [] + else: + t1 = json.loads(bindings) - return t1 + return t1 - def add_command(name, command, binding): - command_id = len(commands_ids()) + def add_command(name, command, binding): + command_id = len(commands_ids()) - for cmd in [ - ( - 'gsettings', 'set', 'org.gnome.settings-daemon.plugins.media-keys', - 'custom-keybindings', '[%s]' % ','.join([ - "'%s'" % \ - ( - '/org/gnome/settings-daemon/plugins/media-keys' - '/custom-keybindings/custom%d/' - ) % o - for o in range(command_id + 1) - ]), - ), - ( - 'gsettings', 'set', - ( - 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding' - ':/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' - ) % command_id, - 'name', name, - ), - ( - 'gsettings', 'set', - ( - 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding' - ':/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' - ) % command_id, - 'command', command, - ), - ( - 'gsettings', 'set', - ( - 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding' - ':/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' - ) % command_id, - 'binding', binding, - ), - ]: - subprocess.check_call(cmd) + for cmd in [ + ( + 'gsettings', + 'set', + 'org.gnome.settings-daemon.plugins.media-keys', + 'custom-keybindings', + '[%s]' % ','.join(["'%s'" % ('/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') % o for o in range(command_id + 1)]), + ), + ( + 'gsettings', + 'set', + ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + % command_id, + 'name', + name, + ), + ( + 'gsettings', + 'set', + ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + % command_id, + 'command', + command, + ), + ( + 'gsettings', + 'set', + ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + % command_id, + 'binding', + binding, + ), + ]: + subprocess.check_call(cmd) - if options.list: - t1 = commands_ids() + if options.list: + t1 = commands_ids() - t2 = [ - { - k : json.loads(subprocess.check_output([ - 'gsettings', 'get', - ( - 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:%s' - ) % o, - k, - ]).decode('utf-8').replace('\'', '"',)) - for k in ['name', 'binding', 'command'] - } - for o in t1 - ] + t2 = [ + { + k: json.loads( + subprocess.check_output( + [ + 'gsettings', + 'get', + ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:%s') % o, + k, + ] + ) + .decode('utf-8') + .replace( + "'", + '"', + ) + ) + for k in ['name', 'binding', 'command'] + } + for o in t1 + ] - pprint.pprint(t2) - elif options.add: - add_command(*args) - else: - raise NotImplementedError + pprint.pprint(t2) + elif options.add: + add_command(*args) + else: + raise NotImplementedError def socat_ssh(argv): - parser = optparse.OptionParser() - parser.add_option( - '--local_port', - dest='local_port', - default=None, - type=int, - ) - parser.add_option( - '--ssh_key', - dest='ssh_key', - default=None, - type=str, - ) - parser.add_option( - '--socat_verbose', - dest='socat_verbose', - action='store_true', - default=False, - ) - parser.add_option( - '--ssh_host', - dest='ssh_host', - default=None, - type=str, - ) - parser.add_option( - '--target_port', - dest='target_port', - default=None, - type=int, - ) - parser.add_option( - '--ssh_command', - dest='ssh_command', - default=None, - type=str, - ) - parser.add_option( - '--gateway_command', - dest='gateway_command', - default=None, - type=str, - help=( - 'a shell command that forwards ssh socket data ' - 'somewhere else, like ' - 'busybox nc 127.0.0.1 $(cat remote-ssh.port)' - ), - ) - options, args = parser.parse_args(argv) + parser = optparse.OptionParser() + parser.add_option( + '--local_port', + dest='local_port', + default=None, + type=int, + ) + parser.add_option( + '--ssh_key', + dest='ssh_key', + default=None, + type=str, + ) + parser.add_option( + '--socat_verbose', + dest='socat_verbose', + action='store_true', + default=False, + ) + parser.add_option( + '--ssh_host', + dest='ssh_host', + default=None, + type=str, + ) + parser.add_option( + '--target_port', + dest='target_port', + default=None, + type=int, + ) + parser.add_option( + '--ssh_command', + dest='ssh_command', + default=None, + type=str, + ) + parser.add_option( + '--gateway_command', + dest='gateway_command', + default=None, + type=str, + help=('a shell command that forwards ssh socket data somewhere else, like busybox nc 127.0.0.1 $(cat remote-ssh.port)'), + ) + options, args = parser.parse_args(argv) - if options.ssh_command is None: - ssh_command = ['ssh', '-T', '-C'] - else: - ssh_command = options.ssh_command.split() + if options.ssh_command is None: + ssh_command = ['ssh', '-T', '-C'] + else: + ssh_command = options.ssh_command.split() - if not options.ssh_key is None: - subprocess.check_call(['ssh-add', options.ssh_key]) - ssh_command.extend([ - '-i', options.ssh_key, - ]) + if not options.ssh_key is None: + subprocess.check_call(['ssh-add', options.ssh_key]) + ssh_command.extend( + [ + '-i', + options.ssh_key, + ] + ) - if not options.ssh_host is None: - ssh_command.extend([options.ssh_host]) + if not options.ssh_host is None: + ssh_command.extend([options.ssh_host]) - restart = False + restart = False - def on_interrupt(*args, **kwargs): - nonlocal restart - restart = True + def on_interrupt(*args, **kwargs): + nonlocal restart + restart = True + socat_command = ['socat'] - socat_command = ['socat'] + if options.socat_verbose: + socat_command.extend(['-v']) - if options.socat_verbose: - socat_command.extend(['-v']) + socat_command.extend( + [ + 'tcp-listen:%d,fork,bind=127.0.0.1' % (options.local_port,), + ] + ) - socat_command.extend([ - 'tcp-listen:%d,fork,bind=127.0.0.1' % ( - options.local_port, - ), - ]) + signal.signal( + signal.SIGINT, + on_interrupt, + ) + signal.signal( + signal.SIGTERM, + on_interrupt, + ) - signal.signal( - signal.SIGINT, - on_interrupt, - ) - signal.signal( - signal.SIGTERM, - on_interrupt, - ) + gateway = None + p = None - gateway = None - p = None - - while True: - if gateway is None: - gateway = tempfile.NamedTemporaryFile(suffix='.sh', mode='w') - gateway.write( - r''' + while True: + if gateway is None: + gateway = tempfile.NamedTemporaryFile(suffix='.sh', mode='w') + gateway.write( + r""" exec %s - ''' % ' '.join( - ssh_command + [options.gateway_command] - ) - ) - gateway.flush() + """ + % ' '.join(ssh_command + [options.gateway_command]) + ) + gateway.flush() - if p is None: - p = subprocess.Popen( - socat_command + [ - 'EXEC:sh %s' % gateway.name, - ] - ) + if p is None: + p = subprocess.Popen( + socat_command + + [ + 'EXEC:sh %s' % gateway.name, + ] + ) - time.sleep(1) + time.sleep(1) - if restart: - try: - p.terminate() - p.wait(10) - except Exception: - p.kill() + if restart: + try: + p.terminate() + p.wait(10) + except Exception: + p.kill() - restart = False + restart = False - if not p.poll() is None: - p = None + if not p.poll() is None: + p = None - if not gateway is None: - os.path.unlink(gateway.name) - if not p is None: - p.terminate() + if not gateway is None: + os.path.unlink(gateway.name) + if not p is None: + p.terminate() def share_wifi(argv): - parser = optparse.OptionParser() - parser.add_option( - '--to-wifi', - dest='to_wifi', - default=None, - type=str, - ) - parser.add_option( - '--from-eth', - dest='from_eth', - default=None, - type=str, - ) - parser.add_option( - '--channel', - dest='channel', - default=None, - type=int, - ) - parser.add_option( - '--ap-name', - dest='ap_name', - default=None, - type=str, - ) - parser.add_option( - '--restart-delay', - dest='restart_delay', - default=None, - type=int, - ) - options, args = parser.parse_args(argv) + parser = optparse.OptionParser() + parser.add_option( + '--to-wifi', + dest='to_wifi', + default=None, + type=str, + ) + parser.add_option( + '--from-eth', + dest='from_eth', + default=None, + type=str, + ) + parser.add_option( + '--channel', + dest='channel', + default=None, + type=int, + ) + parser.add_option( + '--ap-name', + dest='ap_name', + default=None, + type=str, + ) + parser.add_option( + '--restart-delay', + dest='restart_delay', + default=None, + type=int, + ) + options, args = parser.parse_args(argv) - if options.restart_delay is None: - options.restart_delay = 2 + if options.restart_delay is None: + options.restart_delay = 2 + assert not options.to_wifi is None + assert not options.from_eth is None + assert not options.ap_name is None + assert options.restart_delay >= 1 - assert not options.to_wifi is None - assert not options.from_eth is None - assert not options.ap_name is None - assert options.restart_delay >= 1 + print('enter password:') - print('enter password:') + pw = subprocess.check_output('read -s PW; echo -n $PW', shell=True).decode('utf-8') + if len(pw) == 0: + pw = subprocess.check_output('pwgen -syn 20 1', shell=True).decode('utf-8').strip() + with subprocess.Popen(['qrencode', '-t', 'UTF8'], stdin=subprocess.PIPE) as p: + p.stdin.write(pw.encode('utf-8')) + p.stdin.flush() + p.stdin.close() + try: + p.wait(5) + except Exception as exception: + p.kill() + raise exception - pw = subprocess.check_output( - 'read -s PW; echo -n $PW', - shell=True - ).decode('utf-8') - if len(pw) == 0: - pw = subprocess.check_output( - 'pwgen -syn 20 1', - shell=True - ).decode('utf-8').strip() - with subprocess.Popen( - ['qrencode', '-t', 'UTF8'], - stdin=subprocess.PIPE - ) as p: - p.stdin.write(pw.encode('utf-8')) - p.stdin.flush() - p.stdin.close() - try: - p.wait(5) - except Exception as exception: - p.kill() - raise exception + last_timestamp = datetime.datetime.now() + hostapd = None + restart = False + shutdown = False - last_timestamp = datetime.datetime.now() - hostapd = None - restart = False - shutdown = False + def on_interrupt(sig, *args, **kwargs): + nonlocal restart + nonlocal shutdown - def on_interrupt(sig, *args, **kwargs): - nonlocal restart - nonlocal shutdown + if sig == signal.SIGINT: + restart = True + elif sig == signal.SIGTERM: + shutdown = True + else: + raise NotImplementedError - if sig == signal.SIGINT: - restart = True - elif sig == signal.SIGTERM: - shutdown = True - else: - raise NotImplementedError + signal.signal( + signal.SIGINT, + on_interrupt, + ) + signal.signal( + signal.SIGTERM, + on_interrupt, + ) - signal.signal( - signal.SIGINT, - on_interrupt, - ) - signal.signal( - signal.SIGTERM, - on_interrupt, - ) + hostapd_args = [ + 'create_ap', + '--hostapd-timestamps', + options.to_wifi, + options.from_eth, + options.ap_name, + pw, + ] + if not options.channel is None: + hostapd_args.extend(['-c', '%d' % options.channel]) - hostapd_args = [ - 'create_ap', - '--hostapd-timestamps', - options.to_wifi, - options.from_eth, - options.ap_name, - pw, - ] - if not options.channel is None: - hostapd_args.extend(['-c', '%d' % options.channel]) + while True: + try: + if hostapd is None: + print('\n%s, start new' % last_timestamp) + hostapd = subprocess.Popen(hostapd_args) + else: + if restart or shutdown: + print('\n%s, shutdown current' % last_timestamp) + os.kill(hostapd.pid, signal.SIGINT) + try: + hostapd.wait(20) + except Exception: + hostapd.terminate() + restart = False - while True: - try: - if hostapd is None: - print('\n%s, start new' % last_timestamp) - hostapd = subprocess.Popen(hostapd_args) - else: - if restart or shutdown: - print('\n%s, shutdown current' % last_timestamp) - os.kill( - hostapd.pid, - signal.SIGINT - ) - try: - hostapd.wait(20) - except Exception: - hostapd.terminate() - restart = False + if not hostapd.poll() is None: + hostapd = None - if not hostapd.poll() is None: - hostapd = None + if shutdown: + break - if shutdown: - break + if (datetime.datetime.now() - last_timestamp).total_seconds() > options.restart_delay: + restart = True - if ( - datetime.datetime.now() - last_timestamp - ).total_seconds() > options.restart_delay: - restart = True + last_timestamp = datetime.datetime.now() + except Exception: + print(traceback.format_exc()) + restart = True + finally: + time.sleep(1) - last_timestamp = datetime.datetime.now() - except Exception: - print(traceback.format_exc()) - restart = True - finally: - time.sleep(1) def status(argv): - import inspect - import textwrap + import inspect + import textwrap - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - class c1(optparse.IndentedHelpFormatter): - def format_option(self, *args, **kwargs): - f1 = lambda text, width: '\n'.join([ - textwrap.fill('\t' + o, width, replace_whitespace=False) - for o in text.splitlines() - ]).splitlines() - t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option) - t2 = '\n'.join([o[4:] for o in t1.splitlines()[:]]).replace( - 'textwrap.wrap', 'f1', - ).replace('format_option', 'f2') - exec(t2, dict(f1=f1), locals()) - return locals()['f2'](self, *args, **kwargs) + class c1(optparse.IndentedHelpFormatter): + def format_option(self, *args, **kwargs): + f1 = lambda text, width: '\n'.join([textwrap.fill('\t' + o, width, replace_whitespace=False) for o in text.splitlines()]).splitlines() + t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option) + t2 = ( + '\n'.join([o[4:] for o in t1.splitlines()[:]]) + .replace( + 'textwrap.wrap', + 'f1', + ) + .replace('format_option', 'f2') + ) + exec(t2, dict(f1=f1), locals()) + return locals()['f2'](self, *args, **kwargs) - parser = optparse.OptionParser( - formatter=c1( - width=None, - ), - ) - parser.add_option( - '--sh', - dest='sh', - default=[], - action='append', - type=str, - ) - parser.add_option( - '--timeout', - dest='timeout', - default=None, - type=float, - ) - parser.add_option( - '--config', - dest='config', - default=None, - type=str, - help=''.join([ - '.json file with array of strings, each is a shell command ', - 'that outputs a separate status text value, ', - 'like\n', - r''' + parser = optparse.OptionParser( + formatter=c1( + width=None, + ), + ) + parser.add_option( + '--sh', + dest='sh', + default=[], + action='append', + type=str, + ) + parser.add_option( + '--timeout', + dest='timeout', + default=None, + type=float, + ) + parser.add_option( + '--config', + dest='config', + default=None, + type=str, + help=''.join( + [ + '.json file with array of strings, each is a shell command ', + 'that outputs a separate status text value, ', + 'like\n', + r""" ping -w 1 -i 0.02 -c 3 | tail -n 2| head -n 1 | grep -Po $'time\\s+.*$' sensors -j | jq -r '.\"coretemp-isa-0000\".\"Package id 0\".temp1_input|tostring + \" C\"' printf '%d RPM' $(cat /sys/devices/platform/applesmc.768/fan1_input) printf '% 3.0f%%' $(upower -d | grep -Po 'percentage:\\s+\\d+(\\.\\d+)?%' | grep -Po '\\d+(\\.\\d+)?' | head -n 1) - '''.strip() - ]) - ) - options, args = parser.parse_args(argv) + """.strip(), + ] + ), + ) + options, args = parser.parse_args(argv) - if options.timeout is None: - options.timeout = 0.5 + if options.timeout is None: + options.timeout = 0.5 - timeout2 = max(options.timeout, 0.0) + timeout2 = max(options.timeout, 0.0) - assert timeout2 >= 0.0 and timeout2 <= 4 + assert timeout2 >= 0.0 and timeout2 <= 4 - config = dict() - try: - if not options.config is None: - with io.open(options.config, 'r') as f: - config.update( - json.load(f) - ) - except Exception: - logging.error(traceback.format_exc()) - pass + config = dict() + try: + if not options.config is None: + with io.open(options.config, 'r') as f: + config.update(json.load(f)) + except Exception: + logging.error(traceback.format_exc()) + pass - options.sh.extend( - config.get('sh', []) - ) + options.sh.extend(config.get('sh', [])) - t1 = [] + t1 = [] - for sh_index, o in enumerate([ - *options.sh, - *[ - r''' + for sh_index, o in enumerate( + [ + *options.sh, + *[ + r""" A=$(free -h | grep -P Mem: | grep -Po '[\w\.\d]+'); echo -n $A | awk '{print $2, $7}'; - ''', - r''' + """, + r""" date +'%Y-%m-%d %l:%M:%S %p'; - ''', - ], - ]): - try: - t1.append( - subprocess.check_output( - o, - shell=True, - timeout=timeout2, - ).decode('utf-8').strip() - ) - except Exception: - t1.append('fail %d' % sh_index) + """, + ], + ] + ): + try: + t1.append( + subprocess.check_output( + o, + shell=True, + timeout=timeout2, + ) + .decode('utf-8') + .strip() + ) + except Exception: + t1.append('fail %d' % sh_index) - t3 = ' | '.join(t1).replace('\n\r', '') + t3 = ' | '.join(t1).replace('\n\r', '') - sys.stdout.write(t3) - sys.stdout.flush() + sys.stdout.write(t3) + sys.stdout.flush() -def custom_translate(current_string, check, none_char=None,): - if none_char is None: - none_char = '.' - class A: - def __getitem__(self, k): - t1 = chr(k) +def custom_translate( + current_string, + check, + none_char=None, +): + if none_char is None: + none_char = '.' + + class A: + def __getitem__(self, k): + t1 = chr(k) + + t2 = check(k, t1) + if isinstance(t2, bool): + if t2: + return t1 + else: + return none_char + elif isinstance(t2, str): + return t2 + + return current_string.translate(A()) - t2 = check(k, t1) - if isinstance(t2, bool): - if t2: - return t1 - else: - return none_char - elif isinstance(t2, str): - return t2 - return current_string.translate( - A() - ) def media_keys(argv): - assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) - parser = argparse.ArgumentParser() - parser.add_argument( - #'-c', '--command', - 'command', - #dest='command', - type=str, - default=None, - ) + assert isinstance(argv, list) and all([isinstance(o, str) for o in argv]) + parser = argparse.ArgumentParser() + parser.add_argument( + #'-c', '--command', + 'command', + # dest='command', + type=str, + default=None, + ) - options, args = parser.parse_known_args(argv) + options, args = parser.parse_known_args(argv) - if options.command is None and len(args) > 0: - assert len(args) == 1 - options.command = args[0] + if options.command is None and len(args) > 0: + assert len(args) == 1 + options.command = args[0] - assert options.command in [ - 'media-play-pause', - 'media-next', - 'media-forward-seconds', - 'media-backward-seconds', - 'media-prev', - 'media-lower-volume', - 'media-raise-volume', - 'media-toggle-volume', - ] + assert options.command in [ + 'media-play-pause', + 'media-next', + 'media-forward-seconds', + 'media-backward-seconds', + 'media-prev', + 'media-lower-volume', + 'media-raise-volume', + 'media-toggle-volume', + ] - msg = None + msg = None - mode = None - is_mocp = lambda : \ - subprocess.call([ - 'pgrep', - '-u', os.environ['USER'], - 'mocp', - ], stdout=subprocess.PIPE) == 0 + mode = None + is_mocp = ( + lambda: subprocess.call( + [ + 'pgrep', + '-u', + os.environ['USER'], + 'mocp', + ], + stdout=subprocess.PIPE, + ) + == 0 + ) - def mocp_info() -> str: - t1 = subprocess.check_output(['mocp', '-i']) - t3 = t1.decode('utf-8') - t2 : dict[str, str] = dict([ - (lambda o2: (o2[0], o2[1]))(o.split(':')) - #tuple(o.split(':')[:2]) - for o in t3.splitlines() - ]) + def mocp_info() -> str: + t1 = subprocess.check_output(['mocp', '-i']) + t3 = t1.decode('utf-8') + t2: dict[str, str] = dict( + [ + (lambda o2: (o2[0], o2[1]))(o.split(':')) + # tuple(o.split(':')[:2]) + for o in t3.splitlines() + ] + ) - return t2['Title'].strip()[:128] + return t2['Title'].strip()[:128] - if is_mocp(): - mode = 'mocp' - else: - mode = 'playerctl' + if is_mocp(): + mode = 'mocp' + else: + mode = 'playerctl' - if options.command == 'media-play-pause': - if mode == 'mocp': - subprocess.check_call(['mocp', '--toggle-pause']) - msg = mocp_info() - elif mode == 'playerctl': - subprocess.check_call(['playerctl', 'play-pause']) - msg = player_metadata() - else: - raise NotImplementedError - elif options.command == 'media-next': - if mode == 'mocp': - subprocess.check_call(['mocp', '--next']) - msg = mocp_info() - elif mode == 'playerctl': - subprocess.check_call(['playerctl', 'next']) - msg = player_metadata() - else: - raise NotImplementedError - elif options.command == 'media-backward-seconds': - if mode == 'mocp': - raise NotImplementedError - elif mode == 'playerctl': - pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) - subprocess.check_call([ - 'playerctl', - 'position', - '%f' % (pos - float(args[0])) - ]) - #msg = player_metadata() - else: - raise NotImplementedError - elif options.command == 'media-forward-seconds': - if mode == 'mocp': - raise NotImplementedError - elif mode == 'playerctl': - pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) - subprocess.check_call([ - 'playerctl', - 'position', - '%f' % (pos + float(args[0])) - ]) - #msg = player_metadata() - else: - raise NotImplementedError - elif options.command == 'media-prev': - if mode == 'mocp': - subprocess.check_call(['mocp', '--previous']) - msg = mocp_info() - elif mode == 'playerctl': - subprocess.check_call(['playerctl', 'previous']) - msg = player_metadata() - else: - raise NotImplementedError - elif options.command == 'media-lower-volume': - subprocess.check_call([ - 'pactl', - 'set-sink-volume', - '@DEFAULT_SINK@', - '-5%' - ]) - msg = subprocess.check_output([ - 'pactl', - 'get-sink-volume', - '@DEFAULT_SINK@' - ]).decode('utf-8').strip() - elif options.command == 'media-toggle-volume': - subprocess.check_call([ - 'pactl', - 'set-sink-mute', - '@DEFAULT_SINK@', - 'toggle', - ]) - msg = subprocess.check_output([ - 'pactl', - 'get-sink-volume', - '@DEFAULT_SINK@' - ]).decode('utf-8').strip() - elif options.command == 'media-raise-volume': - subprocess.check_call([ - 'pactl', - 'set-sink-volume', - '@DEFAULT_SINK@', - '+5%' - ]) - msg = subprocess.check_output([ - 'pactl', - 'get-sink-volume', - '@DEFAULT_SINK@' - ]).decode('utf-8').strip() - else: - raise NotImplementedError + if options.command == 'media-play-pause': + if mode == 'mocp': + subprocess.check_call(['mocp', '--toggle-pause']) + msg = mocp_info() + elif mode == 'playerctl': + subprocess.check_call(['playerctl', 'play-pause']) + msg = player_metadata() + else: + raise NotImplementedError + elif options.command == 'media-next': + if mode == 'mocp': + subprocess.check_call(['mocp', '--next']) + msg = mocp_info() + elif mode == 'playerctl': + subprocess.check_call(['playerctl', 'next']) + msg = player_metadata() + else: + raise NotImplementedError + elif options.command == 'media-backward-seconds': + if mode == 'mocp': + raise NotImplementedError + elif mode == 'playerctl': + pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) + subprocess.check_call(['playerctl', 'position', '%f' % (pos - float(args[0]))]) + # msg = player_metadata() + else: + raise NotImplementedError + elif options.command == 'media-forward-seconds': + if mode == 'mocp': + raise NotImplementedError + elif mode == 'playerctl': + pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) + subprocess.check_call(['playerctl', 'position', '%f' % (pos + float(args[0]))]) + # msg = player_metadata() + else: + raise NotImplementedError + elif options.command == 'media-prev': + if mode == 'mocp': + subprocess.check_call(['mocp', '--previous']) + msg = mocp_info() + elif mode == 'playerctl': + subprocess.check_call(['playerctl', 'previous']) + msg = player_metadata() + else: + raise NotImplementedError + elif options.command == 'media-lower-volume': + subprocess.check_call(['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '-5%']) + msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + elif options.command == 'media-toggle-volume': + subprocess.check_call( + [ + 'pactl', + 'set-sink-mute', + '@DEFAULT_SINK@', + 'toggle', + ] + ) + msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + elif options.command == 'media-raise-volume': + subprocess.check_call(['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '+5%']) + msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + else: + raise NotImplementedError - logging.info( - json.dumps( - dict( - command=options.command, - msg=msg, - mode=mode, - ), - ensure_ascii=False - ) - ) + logging.info( + json.dumps( + dict( + command=options.command, + msg=msg, + mode=mode, + ), + ensure_ascii=False, + ) + ) + + return dict( + msg=msg, + ) - return dict( - msg=msg, - ) def install(argv: list[str]) -> None: - parser = argparse.ArgumentParser() - parser.add_argument( - '-r', - dest='recursive', - ) - parser.add_argument( - '-s', '--source', - help='source file/dir to install (copy with permissions preserving)', - dest='source', - type=pathlib.Path, - required=True, - ) - parser.add_argument( - '-p', '--relative', - type=pathlib.Path, - help='copy source relative to path, allows to strip extra components', - dest='relative', - default=None, - ) - parser.add_argument( - '-t', '--target', - type=pathlib.Path, - help='target file/dir to install (copy with permissions preserving)', - dest='target', - required=True, - ) - parser.add_argument( - '-f', '--overwrite', - help='overwrite if target is present', - dest='overwrite', - action='store_true', - default=False, - ) + parser = argparse.ArgumentParser() + parser.add_argument( + '-r', + dest='recursive', + ) + parser.add_argument( + '-s', + '--source', + help='source file/dir to install (copy with permissions preserving)', + dest='source', + type=pathlib.Path, + required=True, + ) + parser.add_argument( + '-p', + '--relative', + type=pathlib.Path, + help='copy source relative to path, allows to strip extra components', + dest='relative', + default=None, + ) + parser.add_argument( + '-t', + '--target', + type=pathlib.Path, + help='target file/dir to install (copy with permissions preserving)', + dest='target', + required=True, + ) + parser.add_argument( + '-f', + '--overwrite', + help='overwrite if target is present', + dest='overwrite', + action='store_true', + default=False, + ) - options, args = parser.parse_known_args(argv) + options, args = parser.parse_known_args(argv) - if options.relative: - relative_source = options.source.relative_to(options.relative) - else: - if options.source.is_absolute(): - relative_source = options.source.relative_to('/') - else: - relative_source = options.source + if options.relative: + relative_source = options.source.relative_to(options.relative) + else: + if options.source.is_absolute(): + relative_source = options.source.relative_to('/') + else: + relative_source = options.source - logger.info(dict(source=options.source, target=options.source)) + logger.info(dict(source=options.source, target=options.source)) - final_target = options.target / relative_source + final_target = options.target / relative_source - logger.info(dict(final_target=final_target, relative_source=relative_source)) + logger.info(dict(final_target=final_target, relative_source=relative_source)) - if final_target.exists(): - if not options.overwrite: - raise NotImplementedError + if final_target.exists(): + if not options.overwrite: + raise NotImplementedError - if final_target.is_dir(): - shutil.rmtree(final_target) - else: - os.unlink(final_target) + if final_target.is_dir(): + shutil.rmtree(final_target) + else: + os.unlink(final_target) - if options.source.is_dir() and not options.recursive: - raise NotImplementedError + if options.source.is_dir() and not options.recursive: + raise NotImplementedError - if options.source.is_dir(): - os.makedirs(final_target, exist_ok=True,) - else: - os.makedirs(final_target.parent, exist_ok=True,) + if options.source.is_dir(): + os.makedirs( + final_target, + exist_ok=True, + ) + else: + os.makedirs( + final_target.parent, + exist_ok=True, + ) - subprocess.check_call([ - 'cp', '-rp', - str(options.source), - '-T', - str(final_target), - ]) + subprocess.check_call( + [ + 'cp', + '-rp', + str(options.source), + '-T', + str(final_target), + ] + ) - #shutil.copy( - # options.source, - # final_target.parent, - #) + # shutil.copy( + # options.source, + # final_target.parent, + # ) + + logger.info(dict(msg='done')) - logger.info(dict(msg='done')) def backup(argv: list[str]) -> None: - parser = argparse.ArgumentParser() - parser.add_argument( - '-r', '--recipients', - #type=list[str], - action='append', - default=[], - ) - parser.add_argument( - '-o', '--output_dir', - #type=list[str], - required=True, - ) + parser = argparse.ArgumentParser() + parser.add_argument( + '-r', + '--recipients', + # type=list[str], + action='append', + default=[], + ) + parser.add_argument( + '-o', + '--output_dir', + # type=list[str], + required=True, + ) - options, args = parser.parse_known_args(argv) + options, args = parser.parse_known_args(argv) - assert len(options.recipients) > 0 + assert len(options.recipients) > 0 - subprocess.check_call(r''' + subprocess.check_call( + r""" (\ mkdir -p ~/.cache/; \ P=~/.cache/"secrets-$(date -Iseconds).tar.xz.gpg"; \ @@ -3975,204 +3917,199 @@ def backup(argv: list[str]) -> None: ';\ sync; \ ) - '''.replace( - '{GPG_ARGS}', - ' '.join([ - '-r %s' % o - for o in options.recipients - ]), - ).replace( - '{OUTPUT_DIR}', - options.output_dir, - ), shell=True) + """.replace( + '{GPG_ARGS}', + ' '.join(['-r %s' % o for o in options.recipients]), + ).replace( + '{OUTPUT_DIR}', + options.output_dir, + ), + shell=True, + ) class Command(enum.StrEnum): - media = 'media' - status = 'status' - http_server = 'http-server' - pass_ssh_osx = 'pass-ssh-osx' - wl_screenshot = 'wl-screenshot' - chrome = 'chrome' - eternal_oom = 'eternal-oom' - resilient_vlc = 'resilient-vlc' - eternal_firefox = 'eternal-firefox' - install = 'install' - resilient_ethernet = 'resilient-ethernet' - player = 'player' - share_wifi = 'share-wifi' - socat_ssh = 'socat-ssh' - gnome_shortcuts = 'gnome-shortcuts' - sway_sock = 'sway_sock' - loginctl = 'loginctl' - suspend_timer = 'suspend-timer' - desktop_services = 'desktop-services' - pm_service = 'pm-service' - scrap_yt_music = 'scrap-yt-music' - vpn = 'vpn' - backup = 'backup' - pip_resolve = 'pip_resolve' + media = 'media' + status = 'status' + http_server = 'http-server' + pass_ssh_osx = 'pass-ssh-osx' + wl_screenshot = 'wl-screenshot' + chrome = 'chrome' + eternal_oom = 'eternal-oom' + resilient_vlc = 'resilient-vlc' + eternal_firefox = 'eternal-firefox' + install = 'install' + resilient_ethernet = 'resilient-ethernet' + player = 'player' + share_wifi = 'share-wifi' + socat_ssh = 'socat-ssh' + gnome_shortcuts = 'gnome-shortcuts' + sway_sock = 'sway_sock' + loginctl = 'loginctl' + suspend_timer = 'suspend-timer' + desktop_services = 'desktop-services' + pm_service = 'pm-service' + scrap_yt_music = 'scrap-yt-music' + vpn = 'vpn' + backup = 'backup' + pip_resolve = 'pip_resolve' + def pip_resolve( - args: list[str], + args: list[str], ) -> None: - from online.fxreader.pr34.commands_typed.pip import pip_resolve, pip_resolve_t + from online.fxreader.pr34.commands_typed.pip import pip_resolve, pip_resolve_t - parser = argparse.ArgumentParser() - parser.add_argument( - '-m', '--mode', - choices=[ - o.value - for o in pip_resolve_t.kwargs_t.mode_t - ], - required=True, - ) - parser.add_argument( - '-r', '--requirement', - default=[], - dest='requirements', - type=str, - action='append', - help=r''' + parser = argparse.ArgumentParser() + parser.add_argument( + '-m', + '--mode', + choices=[o.value for o in pip_resolve_t.kwargs_t.mode_t], + required=True, + ) + parser.add_argument( + '-r', + '--requirement', + default=[], + dest='requirements', + type=str, + action='append', + help=r""" requirement, can be multiple in a single parameter, all of them are to be split by whitespace and printed into a temp file, that is fed into uv pip compile - ''', - ) + """, + ) + + options, argv = parser.parse_known_args(args) + + requirements: Optional[list[str]] = [] + + for o in options.requirements: + requirements.extend(o.split()) + + if len(requirements) == 0: + requirements = None + + options.mode = pip_resolve_t.kwargs_t.mode_t(options.mode) + + resolve_res = pip_resolve( + argv, + mode=options.mode, + requirements=requirements, + ) + + assert not resolve_res.txt is None + + sys.stdout.write(resolve_res.txt) + sys.stdout.flush() - options, argv = parser.parse_known_args(args) +def commands_cli(argv: Optional[list[str]] = None) -> int: + if argv is None: + argv = sys.argv[1:] - requirements : Optional[list[str]] = [] + from online.fxreader.pr34.commands_typed.logging import setup as logging_setup - for o in options.requirements: - requirements.extend(o.split()) + logging_setup() + # logging.getLogger().setLevel(logging.INFO) + # logger.setLevel(logging.INFO) + # handler = logging.StreamHandler(sys.stderr) + # logging.getLogger().addHandler(handler) - if len(requirements) == 0: - requirements = None + msg: Optional[str] = None - options.mode = pip_resolve_t.kwargs_t.mode_t( - options.mode - ) + try: + if len(argv) > 0 and argv[0].startswith('media'): + msg = media_keys(argv).get('msg') + else: + parser = argparse.ArgumentParser( + #'online_fxreader.commands' + ) + parser.add_argument( + '_command', + choices=[o.value for o in Command], + ) - resolve_res = pip_resolve( - argv, - mode=options.mode, - requirements=requirements, - ) + options, args = parser.parse_known_args() + options.command = Command(options._command) - assert not resolve_res.txt is None - - sys.stdout.write(resolve_res.txt) - sys.stdout.flush() - -def commands_cli( - argv: Optional[list[str]] = None -) -> int: - if argv is None: - argv = sys.argv[1:] - - from online.fxreader.pr34.commands_typed.logging import setup as logging_setup - logging_setup() - #logging.getLogger().setLevel(logging.INFO) - #logger.setLevel(logging.INFO) - #handler = logging.StreamHandler(sys.stderr) - #logging.getLogger().addHandler(handler) - - msg : Optional[str] = None - - try: - if len(argv) > 0 and argv[0].startswith('media'): - msg = media_keys(argv).get('msg') - else: - parser = argparse.ArgumentParser( - #'online_fxreader.commands' - ) - parser.add_argument( - '_command', - choices=[ - o.value - for o in Command - ], - ) - - options, args = parser.parse_known_args() - options.command = Command(options._command) - - - if options.command is Command.status: - status(args) - elif options.command is Command.http_server: - http_server(args) - elif options.command is Command.pass_ssh_osx: - pass_ssh_osx(args) - elif options.command is Command.wl_screenshot: - subprocess.check_call(r''' + if options.command is Command.status: + status(args) + elif options.command is Command.http_server: + http_server(args) + elif options.command is Command.pass_ssh_osx: + pass_ssh_osx(args) + elif options.command is Command.wl_screenshot: + subprocess.check_call( + r""" grim -g "$(slurp)" - | wl-copy - ''', shell=True) - elif options.command is Command.chrome: - chrome(args) - elif options.command is Command.eternal_oom: - eternal_oom(args) - elif options.command is Command.resilient_vlc: - resilient_vlc(args) - elif options.command is Command.eternal_firefox: - eternal_firefox( - profile=sys.argv[2], - group_name=sys.argv[3], - window_position=json.loads(sys.argv[4]), - debug=json.loads(sys.argv[5]), - tabs=sys.argv[6:], - ) - elif options.command is Command.install: - install(args) - elif options.command is Command.resilient_ethernet: - resilient_ethernet( - ip_addr=sys.argv[2], - ethernet_device=sys.argv[3], - ) - elif options.command is Command.player: - player_v1( - folder_url=sys.argv[2], - item_id=int(sys.argv[3]), - ) - elif options.command is Command.share_wifi: - share_wifi(args) - elif options.command is Command.socat_ssh: - socat_ssh(args) - elif options.command is Command.gnome_shortcuts: - gnome_shortcuts(args) - elif options.command is Command.sway_sock: - print(sway_sock()) - elif options.command is Command.loginctl: - loginctl(args) - elif options.command is Command.suspend_timer: - suspend_timer(args) - elif options.command is Command.desktop_services: - desktop_services(args) - elif options.command is Command.pip_resolve: - pip_resolve(args) - elif options.command is Command.pm_service: - pm_service(args) - elif options.command is Command.backup: - backup(args) - elif options.command is Command.scrap_yt_music: - scrap_yt_music(args) - elif options.command is Command.vpn: - vpn(args) - else: - raise NotImplementedError - except SystemExit: - raise - except Exception: - msg = 'not implemented\n%s' % traceback.format_exc() - logging.error(msg) - finally: - if not msg is None: - custom_notify(msg=msg) + """, + shell=True, + ) + elif options.command is Command.chrome: + chrome(args) + elif options.command is Command.eternal_oom: + eternal_oom(args) + elif options.command is Command.resilient_vlc: + resilient_vlc(args) + elif options.command is Command.eternal_firefox: + eternal_firefox( + profile=sys.argv[2], + group_name=sys.argv[3], + window_position=json.loads(sys.argv[4]), + debug=json.loads(sys.argv[5]), + tabs=sys.argv[6:], + ) + elif options.command is Command.install: + install(args) + elif options.command is Command.resilient_ethernet: + resilient_ethernet( + ip_addr=sys.argv[2], + ethernet_device=sys.argv[3], + ) + elif options.command is Command.player: + player_v1( + folder_url=sys.argv[2], + item_id=int(sys.argv[3]), + ) + elif options.command is Command.share_wifi: + share_wifi(args) + elif options.command is Command.socat_ssh: + socat_ssh(args) + elif options.command is Command.gnome_shortcuts: + gnome_shortcuts(args) + elif options.command is Command.sway_sock: + print(sway_sock()) + elif options.command is Command.loginctl: + loginctl(args) + elif options.command is Command.suspend_timer: + suspend_timer(args) + elif options.command is Command.desktop_services: + desktop_services(args) + elif options.command is Command.pip_resolve: + pip_resolve(args) + elif options.command is Command.pm_service: + pm_service(args) + elif options.command is Command.backup: + backup(args) + elif options.command is Command.scrap_yt_music: + scrap_yt_music(args) + elif options.command is Command.vpn: + vpn(args) + else: + raise NotImplementedError + except SystemExit: + raise + except Exception: + msg = 'not implemented\n%s' % traceback.format_exc() + logging.error(msg) + finally: + if not msg is None: + custom_notify(msg=msg) if __name__ == '__main__': - sys.exit(commands_cli()) + sys.exit(commands_cli()) diff --git a/python/online/fxreader/pr34/commands_typed/argparse.py b/python/online/fxreader/pr34/commands_typed/argparse.py index 229cfe0..ee09011 100644 --- a/python/online/fxreader/pr34/commands_typed/argparse.py +++ b/python/online/fxreader/pr34/commands_typed/argparse.py @@ -1,27 +1,28 @@ -__all__ = ( - 'parse_args', -) +__all__ = ('parse_args',) import sys import argparse -from typing import (Optional,) +from typing import ( + Optional, +) + def parse_args( - parser: argparse.ArgumentParser, - args: Optional[list[str]] = None, + parser: argparse.ArgumentParser, + args: Optional[list[str]] = None, ) -> tuple[argparse.Namespace, list[str]]: - if args is None: - args = sys.argv[1:] + if args is None: + args = sys.argv[1:] - argv : list[str] = [] + argv: list[str] = [] - for i, o in enumerate(args): - if o == '--': - argv.extend(args[i + 1:]) + for i, o in enumerate(args): + if o == '--': + argv.extend(args[i + 1 :]) - del args[i:] + del args[i:] - break + break - return parser.parse_args(args), argv + return parser.parse_args(args), argv diff --git a/python/online/fxreader/pr34/commands_typed/asyncio.py b/python/online/fxreader/pr34/commands_typed/asyncio.py index b69718f..9cbd40b 100644 --- a/python/online/fxreader/pr34/commands_typed/asyncio.py +++ b/python/online/fxreader/pr34/commands_typed/asyncio.py @@ -1,14 +1,23 @@ import logging import asyncio -from typing import (Any,) +from typing import ( + Any, +) logger = logging.getLogger(__name__) -def handle_task_result(fut: asyncio.Future[Any]) -> None: - try: - fut.result() - logger.debug(dict(fut=fut, msg='done'), stacklevel=2,) - except: - logger.exception('', stacklevel=2,) +def handle_task_result(fut: asyncio.Future[Any]) -> None: + try: + fut.result() + + logger.debug( + dict(fut=fut, msg='done'), + stacklevel=2, + ) + except: + logger.exception( + '', + stacklevel=2, + ) diff --git a/python/online/fxreader/pr34/commands_typed/cli.py b/python/online/fxreader/pr34/commands_typed/cli.py index b9e1b01..5b1f45c 100644 --- a/python/online/fxreader/pr34/commands_typed/cli.py +++ b/python/online/fxreader/pr34/commands_typed/cli.py @@ -12,467 +12,490 @@ import abc from .os import shutil_which from typing import ( - Optional, - Literal, - Any, + Optional, + Literal, + Any, ) logger = logging.getLogger(__name__) + @dataclasses.dataclass class Project: - source_dir : pathlib.Path - build_dir : pathlib.Path - dest_dir : pathlib.Path - meson_path: Optional[pathlib.Path] = None + source_dir: pathlib.Path + build_dir: pathlib.Path + dest_dir: pathlib.Path + meson_path: Optional[pathlib.Path] = None + @dataclasses.dataclass class Dependency: - name: str - mode : Literal['pyproject', 'meson', 'meson-python', 'm'] - source_path : pathlib.Path - args: Optional[list[str]] = None + name: str + mode: Literal['pyproject', 'meson', 'meson-python', 'm'] + source_path: pathlib.Path + args: Optional[list[str]] = None + @dataclasses.dataclass class DistSettings: - wheel_dir : pathlib.Path - python_path: pathlib.Path - env_path: pathlib.Path + wheel_dir: pathlib.Path + python_path: pathlib.Path + env_path: pathlib.Path + class CLI(abc.ABC): - @property - @abc.abstractmethod - def dist_settings(self) -> DistSettings: - raise NotImplementedError - - @property - @abc.abstractmethod - def projects(self) -> dict[str, Project]: - raise NotImplementedError - - @property - @abc.abstractmethod - def dependencies(self) -> dict[str, Dependency]: - raise NotImplementedError - - def mypy( - self, - argv: list[str] - ) -> None: - from . import mypy as _mypy - - _mypy.run( - argv, - ) - - def ruff( - self, - project_name: str, - argv: list[str], - ) -> None: - project = self.projects[project_name] - - if len(argv) == 0: - argv = ['check', '.',] - - subprocess.check_call([ - self.dist_settings.python_path, - '-m', - 'ruff', - '--config', str(project.source_dir / 'pyproject.toml'), - *argv, - ]) - - def pyright( - self, - project_name: str, - argv: list[str], - ) -> None: - project = self.projects[project_name] - - if len(argv) == 0: - argv = ['--threads', '3'] - - cmd = [ - str(self.dist_settings.python_path), - '-m', - 'pyright', - '--pythonpath', str(self.dist_settings.python_path), - '-p', str(project.source_dir / 'pyproject.toml'), - *argv, - ] - - logger.info(cmd) - - subprocess.check_call(cmd) - - def pip_sync( - self, - project: str, - features: list[str], - ) -> None: - from . import cli_bootstrap - - pyproject = cli_bootstrap.pyproject_load( - self.projects[project].source_dir / 'pyproject.toml' - ) - - dependencies = sum([ - pyproject.dependencies[o] - for o in features - ], []) - - pip_find_links : list[pathlib.Path] = [] - - if not pyproject.pip_find_links is None: - pip_find_links.extend(pyproject.pip_find_links) - - - logger.info(dict( - dependencies=dependencies, - )) - - if len(dependencies) > 0: - subprocess.check_call([ - self.dist_settings.python_path, - '-m', - 'uv', 'pip', 'install', - *sum([ - ['-f', str(o),] - for o in pip_find_links - ], []), - # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'), - '--offline', - *dependencies, - ]) - - def deploy_fetch_dist( - self, - force: bool, - ) -> None: - for k, d in self.dependencies.items(): - whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_')) - if len(glob.glob( - str(whl_glob) - )) == 0 or force: - if d.source_path.exists(): - def whl_files_get() -> list[dict[str, Any]]: - return [ - dict( - path=o, - stat=os.stat(o).st_mtime, - ) - for o in glob.glob( - str(whl_glob) - ) - ] - - present_files = whl_files_get() - - if d.mode == 'm': - if (d.source_path / 'm.py').exists(): - cmd = [ - sys.executable, - str(d.source_path / 'm.py'), - 'deploy:wheel', - '-o', - str(self.dist_settings.wheel_dir), - ] - - if not d.args is None: - cmd.extend(d.args) - - subprocess.check_call( - cmd, - cwd=d.source_path, - ) - else: - raise NotImplementedError - - updated_files = whl_files_get() - - def index_get(o: dict[str, Any]) -> tuple[Any, ...]: - return (o['path'], o['stat']) - - present_files_index = { - index_get(o) : o - for o in present_files - } - - new_files : list[dict[str, Any]] = [] - - for o in updated_files: - entry_index = index_get(o) - - if not entry_index in present_files_index: - new_files.append(o) - - if len(new_files) == 0: - raise NotImplementedError - - latest_file = sorted( - new_files, - key=lambda x: x['stat'] - )[-1] - - subprocess.check_call([ - self.dist_settings.python_path, - '-m', 'pip', - 'install', - latest_file['path'], - ]) - - @property - def pkg_config_path(self,) -> set[pathlib.Path]: - return { - pathlib.Path(o) - for o in glob.glob( - str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'), - recursive=True, - ) - } - - def deploy_wheel( - self, - project_name: str, - argv: Optional[list[str]] = None, - output_dir: Optional[pathlib.Path] = None, - force: Optional[bool] = None, - env: Optional[dict[str, str]] = None, - mypy: bool = False, - tests: bool = False, - ) -> None: - project = self.projects[project_name] - - # subprocess.check_call([ - # sys.argv[0], - # # sys.executable, - # '-p', options.project, - # Command.meson_setup.value, - # ]) - - if argv is None: - argv = [] - - # assert argv is None or len(argv) == 0 - - if not project.meson_path is None: - if tests: - self.meson_test( - project_name=project_name, - ) - - self.meson_install( - project_name=project_name, - force=force, - ) - - if mypy: - self.mypy([]) - - if env is None: - env = dict() - - extra_args: list[str] = [] - - if len(self.third_party_roots) > 0: - extra_args.extend([ - '-Csetup-args=%s' % ( - '-Dthird_party_roots=%s' % str(o.absolute()) - ) - for o in self.third_party_roots - ]) - - cmd = [ - sys.executable, - '-m', - 'build', - '-w', '-n', - *extra_args, - '-Csetup-args=-Dmodes=pyproject', - '-Cbuild-dir=%s' % str(project.build_dir / 'pyproject'), - '-Csetup-args=-Dinstall_path=%s' % str(project.dest_dir), - # '-Cbuild-dir=%s' % str(project.build_dir), - str(project.source_dir), - *argv, - ] - - if not output_dir is None: - cmd.extend(['-o', str(output_dir)]) - - logger.info(dict(env=env)) - - subprocess.check_call( - cmd, - env=dict(list(os.environ.items())) | env, - ) - - if not project.meson_path is None: - if tests: - subprocess.check_call( - [ - 'ninja', - '-C', - str(project.build_dir / 'pyproject'), - 'test', - ] - ) - - def meson_install( - self, - project_name: str, - force: Optional[bool] = None, - argv: Optional[list[str]] = None, - ) -> None: - project = self.projects[project_name] - - if force is None: - force = False - - if argv is None: - argv = [] - - if force and project.dest_dir.exists(): - shutil.rmtree(project.dest_dir) - - subprocess.check_call([ - shutil_which('meson', True,), - 'install', - '-C', - project.build_dir / 'meson', - '--destdir', project.dest_dir, - *argv, - ]) - - for o in glob.glob( - str(project.dest_dir / 'lib' / 'pkgconfig' / '*.pc'), - recursive=True, - ): - logger.info(dict( - path=o, - action='patch prefix', - )) - - with io.open(o, 'r') as f: - content = f.read() - - with io.open(o, 'w') as f: - f.write( - content.replace('prefix=/', 'prefix=${pcfiledir}/../../') - ) - def ninja( - self, - project_name: str, - argv: Optional[list[str]] = None, - env: Optional[dict[str, str]] = None, - ) -> None: - project = self.projects[project_name] - - if argv is None: - argv = [] - - if env is None: - env = dict() - - logger.info(dict(env=env)) - - subprocess.check_call( - [ - shutil_which('ninja', True), - '-C', - str(project.build_dir / 'meson'), - *argv, - ], - env=dict(list(os.environ.items())) | env, - ) - - def meson_test( - self, - project_name: str, - argv: Optional[list[str]] = None, - ) -> None: - project = self.projects[project_name] - - if argv is None: - argv = [] - - subprocess.check_call([ - shutil_which('meson', True,), - 'test', - '-C', - project.build_dir / 'meson', - *argv, - ]) - - - def meson_compile( - self, - project_name: str, - argv: Optional[list[str]] = None, - ) -> None: - project = self.projects[project_name] - - if argv is None: - argv = [] - - subprocess.check_call([ - shutil_which('meson', True,), - 'compile', - '-C', - project.build_dir / 'meson', - *argv, - ]) - - @property - def third_party_roots(self) -> list[pathlib.Path]: - return [] - - def meson_setup( - self, - project_name: str, - force: bool, - argv: Optional[list[str]] = None, - env: Optional[dict[str, str]] = None, - # third_party_roots: Optional[list[pathlib.Path]] = None, - ) -> None: - project = self.projects[project_name] - - if argv is None: - argv = [] - - if env is None: - env = dict() - - logger.info(dict(env=env)) - - if force: - if (project.build_dir / 'meson').exists(): - logger.info(dict(action='removing build dir', path=project.build_dir / 'meson')) - shutil.rmtree(project.build_dir / 'meson') - - extra_args : list[str] = [] - - if len(self.third_party_roots) > 0: - extra_args.extend([ - '-Dthird_party_roots=%s' % str(o.absolute()) - for o in self.third_party_roots - ]) - - cmd = [ - shutil_which('meson', True,), - 'setup', - str(project.source_dir), - str(project.build_dir / 'meson'), - '-Dmodes=["meson"]', - *extra_args, - # '-Dpkgconfig.relocatable=true', - '-Dprefix=/', - *argv, - ] - - logger.info(dict(cmd=cmd)) - - subprocess.check_call( - cmd, - env=dict(list(os.environ.items())) | env, - ) + @property + @abc.abstractmethod + def dist_settings(self) -> DistSettings: + raise NotImplementedError + + @property + @abc.abstractmethod + def projects(self) -> dict[str, Project]: + raise NotImplementedError + + @property + @abc.abstractmethod + def dependencies(self) -> dict[str, Dependency]: + raise NotImplementedError + + def mypy(self, argv: list[str]) -> None: + from . import mypy as _mypy + + _mypy.run( + argv, + ) + + def ruff( + self, + project_name: str, + argv: list[str], + ) -> None: + project = self.projects[project_name] + + if len(argv) == 0: + argv = [ + 'check', + '.', + ] + + subprocess.check_call( + [ + self.dist_settings.python_path, + '-m', + 'ruff', + '--config', + str(project.source_dir / 'pyproject.toml'), + *argv, + ] + ) + + def pyright( + self, + project_name: str, + argv: list[str], + ) -> None: + project = self.projects[project_name] + + if len(argv) == 0: + argv = ['--threads', '3'] + + cmd = [ + str(self.dist_settings.python_path), + '-m', + 'pyright', + '--pythonpath', + str(self.dist_settings.python_path), + '-p', + str(project.source_dir / 'pyproject.toml'), + *argv, + ] + + logger.info(cmd) + + subprocess.check_call(cmd) + + def pip_sync( + self, + project: str, + features: list[str], + ) -> None: + from . import cli_bootstrap + + pyproject = cli_bootstrap.pyproject_load(self.projects[project].source_dir / 'pyproject.toml') + + dependencies = sum([pyproject.dependencies[o] for o in features], []) + + pip_find_links: list[pathlib.Path] = [] + + if not pyproject.pip_find_links is None: + pip_find_links.extend(pyproject.pip_find_links) + + logger.info( + dict( + dependencies=dependencies, + ) + ) + + if len(dependencies) > 0: + subprocess.check_call( + [ + self.dist_settings.python_path, + '-m', + 'uv', + 'pip', + 'install', + *sum( + [ + [ + '-f', + str(o), + ] + for o in pip_find_links + ], + [], + ), + # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'), + '--offline', + *dependencies, + ] + ) + + def deploy_fetch_dist( + self, + force: bool, + ) -> None: + for k, d in self.dependencies.items(): + whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_')) + if len(glob.glob(str(whl_glob))) == 0 or force: + if d.source_path.exists(): + + def whl_files_get() -> list[dict[str, Any]]: + return [ + dict( + path=o, + stat=os.stat(o).st_mtime, + ) + for o in glob.glob(str(whl_glob)) + ] + + present_files = whl_files_get() + + if d.mode == 'm': + if (d.source_path / 'm.py').exists(): + cmd = [ + sys.executable, + str(d.source_path / 'm.py'), + 'deploy:wheel', + '-o', + str(self.dist_settings.wheel_dir), + ] + + if not d.args is None: + cmd.extend(d.args) + + subprocess.check_call( + cmd, + cwd=d.source_path, + ) + else: + raise NotImplementedError + + updated_files = whl_files_get() + + def index_get(o: dict[str, Any]) -> tuple[Any, ...]: + return (o['path'], o['stat']) + + present_files_index = {index_get(o): o for o in present_files} + + new_files: list[dict[str, Any]] = [] + + for o in updated_files: + entry_index = index_get(o) + + if not entry_index in present_files_index: + new_files.append(o) + + if len(new_files) == 0: + raise NotImplementedError + + latest_file = sorted(new_files, key=lambda x: x['stat'])[-1] + + subprocess.check_call( + [ + self.dist_settings.python_path, + '-m', + 'pip', + 'install', + latest_file['path'], + ] + ) + + @property + def pkg_config_path( + self, + ) -> set[pathlib.Path]: + return { + pathlib.Path(o) + for o in glob.glob( + str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'), + recursive=True, + ) + } + + def deploy_wheel( + self, + project_name: str, + argv: Optional[list[str]] = None, + output_dir: Optional[pathlib.Path] = None, + force: Optional[bool] = None, + env: Optional[dict[str, str]] = None, + mypy: bool = False, + tests: bool = False, + ) -> None: + project = self.projects[project_name] + + # subprocess.check_call([ + # sys.argv[0], + # # sys.executable, + # '-p', options.project, + # Command.meson_setup.value, + # ]) + + if argv is None: + argv = [] + + # assert argv is None or len(argv) == 0 + + if not project.meson_path is None: + if tests: + self.meson_test( + project_name=project_name, + ) + + self.meson_install( + project_name=project_name, + force=force, + ) + + if mypy: + self.mypy([]) + + if env is None: + env = dict() + + extra_args: list[str] = [] + + if len(self.third_party_roots) > 0: + extra_args.extend(['-Csetup-args=%s' % ('-Dthird_party_roots=%s' % str(o.absolute())) for o in self.third_party_roots]) + + cmd = [ + sys.executable, + '-m', + 'build', + '-w', + '-n', + *extra_args, + '-Csetup-args=-Dmodes=pyproject', + '-Cbuild-dir=%s' % str(project.build_dir / 'pyproject'), + '-Csetup-args=-Dinstall_path=%s' % str(project.dest_dir), + # '-Cbuild-dir=%s' % str(project.build_dir), + str(project.source_dir), + *argv, + ] + + if not output_dir is None: + cmd.extend(['-o', str(output_dir)]) + + logger.info(dict(env=env)) + + subprocess.check_call( + cmd, + env=dict(list(os.environ.items())) | env, + ) + + if not project.meson_path is None: + if tests: + subprocess.check_call( + [ + 'ninja', + '-C', + str(project.build_dir / 'pyproject'), + 'test', + ] + ) + + def meson_install( + self, + project_name: str, + force: Optional[bool] = None, + argv: Optional[list[str]] = None, + ) -> None: + project = self.projects[project_name] + + if force is None: + force = False + + if argv is None: + argv = [] + + if force and project.dest_dir.exists(): + shutil.rmtree(project.dest_dir) + + subprocess.check_call( + [ + shutil_which( + 'meson', + True, + ), + 'install', + '-C', + project.build_dir / 'meson', + '--destdir', + project.dest_dir, + *argv, + ] + ) + + for o in glob.glob( + str(project.dest_dir / 'lib' / 'pkgconfig' / '*.pc'), + recursive=True, + ): + logger.info( + dict( + path=o, + action='patch prefix', + ) + ) + + with io.open(o, 'r') as f: + content = f.read() + + with io.open(o, 'w') as f: + f.write(content.replace('prefix=/', 'prefix=${pcfiledir}/../../')) + + def ninja( + self, + project_name: str, + argv: Optional[list[str]] = None, + env: Optional[dict[str, str]] = None, + ) -> None: + project = self.projects[project_name] + + if argv is None: + argv = [] + + if env is None: + env = dict() + + logger.info(dict(env=env)) + + subprocess.check_call( + [ + shutil_which('ninja', True), + '-C', + str(project.build_dir / 'meson'), + *argv, + ], + env=dict(list(os.environ.items())) | env, + ) + + def meson_test( + self, + project_name: str, + argv: Optional[list[str]] = None, + ) -> None: + project = self.projects[project_name] + + if argv is None: + argv = [] + + subprocess.check_call( + [ + shutil_which( + 'meson', + True, + ), + 'test', + '-C', + project.build_dir / 'meson', + *argv, + ] + ) + + def meson_compile( + self, + project_name: str, + argv: Optional[list[str]] = None, + ) -> None: + project = self.projects[project_name] + + if argv is None: + argv = [] + + subprocess.check_call( + [ + shutil_which( + 'meson', + True, + ), + 'compile', + '-C', + project.build_dir / 'meson', + *argv, + ] + ) + + @property + def third_party_roots(self) -> list[pathlib.Path]: + return [] + + def meson_setup( + self, + project_name: str, + force: bool, + argv: Optional[list[str]] = None, + env: Optional[dict[str, str]] = None, + # third_party_roots: Optional[list[pathlib.Path]] = None, + ) -> None: + project = self.projects[project_name] + + if argv is None: + argv = [] + + if env is None: + env = dict() + + logger.info(dict(env=env)) + + if force: + if (project.build_dir / 'meson').exists(): + logger.info(dict(action='removing build dir', path=project.build_dir / 'meson')) + shutil.rmtree(project.build_dir / 'meson') + + extra_args: list[str] = [] + + if len(self.third_party_roots) > 0: + extra_args.extend(['-Dthird_party_roots=%s' % str(o.absolute()) for o in self.third_party_roots]) + + cmd = [ + shutil_which( + 'meson', + True, + ), + 'setup', + str(project.source_dir), + str(project.build_dir / 'meson'), + '-Dmodes=["meson"]', + *extra_args, + # '-Dpkgconfig.relocatable=true', + '-Dprefix=/', + *argv, + ] + + logger.info(dict(cmd=cmd)) + + subprocess.check_call( + cmd, + env=dict(list(os.environ.items())) | env, + ) diff --git a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py index 7ac5eb5..f282d75 100644 --- a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py +++ b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py @@ -10,327 +10,324 @@ import os import logging -from typing import (Optional, Any,) +from typing import ( + Optional, + Any, +) from typing_extensions import ( - Self, BinaryIO, + Self, + BinaryIO, ) logger = logging.getLogger(__name__) + def toml_load(f: BinaryIO) -> Any: - try: - import tomllib - return tomllib.load(f) - except: - pass + try: + import tomllib - try: - import tomli - return tomli.load(f) - except: - pass + return tomllib.load(f) + except: + pass + + try: + import tomli + + return tomli.load(f) + except: + pass + + raise NotImplementedError - raise NotImplementedError @dataclasses.dataclass class PyProject: - path: pathlib.Path - dependencies: dict[str, list[str]] - early_features: Optional[list[str]] = None - pip_find_links: Optional[list[pathlib.Path]] = None - runtime_libdirs: Optional[list[pathlib.Path]] = None - runtime_preload: Optional[list[pathlib.Path]] = None - requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda : dict()) + path: pathlib.Path + dependencies: dict[str, list[str]] + early_features: Optional[list[str]] = None + pip_find_links: Optional[list[pathlib.Path]] = None + runtime_libdirs: Optional[list[pathlib.Path]] = None + runtime_preload: Optional[list[pathlib.Path]] = None + requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict()) + def pyproject_load( - d: pathlib.Path, + d: pathlib.Path, ) -> PyProject: - with io.open(d, 'rb') as f: - content = toml_load(f) + with io.open(d, 'rb') as f: + content = toml_load(f) - assert isinstance(content, dict) + assert isinstance(content, dict) - dependencies : dict[str, list[str]] = dict() + dependencies: dict[str, list[str]] = dict() - dependencies['default'] = content['project']['dependencies'] + dependencies['default'] = content['project']['dependencies'] - if ( - 'optional-dependencies' in content['project'] - ): - assert isinstance( - content['project']['optional-dependencies'], - dict - ) + if 'optional-dependencies' in content['project']: + assert isinstance(content['project']['optional-dependencies'], dict) - for k, v in content['project']['optional-dependencies'].items(): - assert isinstance(v, list) - assert isinstance(k, str) + for k, v in content['project']['optional-dependencies'].items(): + assert isinstance(v, list) + assert isinstance(k, str) - dependencies[k] = v + dependencies[k] = v + res = PyProject( + path=d, + dependencies=dependencies, + ) - res = PyProject( - path=d, - dependencies=dependencies, - ) + tool_name = 'online.fxreader.pr34'.replace('.', '-') - tool_name = 'online.fxreader.pr34'.replace('.', '-') + if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict): + if 'early_features' in content['tool'][tool_name]: + res.early_features = content['tool'][tool_name]['early_features'] - if ( - 'tool' in content and - isinstance( - content['tool'], dict - ) and - tool_name in content['tool'] and - isinstance( - content['tool'][tool_name], - dict - ) - ): - if 'early_features' in content['tool'][tool_name]: - res.early_features = content['tool'][tool_name]['early_features'] + if 'pip_find_links' in content['tool'][tool_name]: + res.pip_find_links = [d.parent / pathlib.Path(o) for o in content['tool'][tool_name]['pip_find_links']] - if 'pip_find_links' in content['tool'][tool_name]: - res.pip_find_links = [ - d.parent / pathlib.Path(o) - for o in content['tool'][tool_name]['pip_find_links'] - ] + if 'runtime_libdirs' in content['tool'][tool_name]: + res.runtime_libdirs = [ + d.parent / pathlib.Path(o) + # pathlib.Path(o) + for o in content['tool'][tool_name]['runtime_libdirs'] + ] - if 'runtime_libdirs' in content['tool'][tool_name]: - res.runtime_libdirs = [ - d.parent / pathlib.Path(o) - # pathlib.Path(o) - for o in content['tool'][tool_name]['runtime_libdirs'] - ] + if 'runtime_preload' in content['tool'][tool_name]: + res.runtime_preload = [ + d.parent / pathlib.Path(o) + # pathlib.Path(o) + for o in content['tool'][tool_name]['runtime_preload'] + ] - if 'runtime_preload' in content['tool'][tool_name]: - res.runtime_preload = [ - d.parent / pathlib.Path(o) - # pathlib.Path(o) - for o in content['tool'][tool_name]['runtime_preload'] - ] + if 'requirements' in content['tool'][tool_name]: + assert isinstance(content['tool'][tool_name]['requirements'], dict) - if 'requirements' in content['tool'][tool_name]: - assert isinstance(content['tool'][tool_name]['requirements'], dict) + res.requirements = { + k: d.parent / pathlib.Path(v) + # pathlib.Path(o) + for k, v in content['tool'][tool_name]['requirements'].items() + } - res.requirements = { - k : d.parent / pathlib.Path(v) - # pathlib.Path(o) - for k, v in content['tool'][tool_name]['requirements'].items() - } + return res - return res @dataclasses.dataclass class BootstrapSettings: - env_path: pathlib.Path - python_path: pathlib.Path - base_dir: pathlib.Path - python_version: Optional[str] = dataclasses.field( - default_factory=lambda : os.environ.get( - 'PYTHON_VERSION', - '%d.%d' % ( - sys.version_info.major, - sys.version_info.minor, - ), - ).strip() - ) - uv_args: list[str] = dataclasses.field( - default_factory=lambda : os.environ.get( - 'UV_ARGS', - '--offline', - ).split(), - ) + env_path: pathlib.Path + python_path: pathlib.Path + base_dir: pathlib.Path + python_version: Optional[str] = dataclasses.field( + default_factory=lambda: os.environ.get( + 'PYTHON_VERSION', + '%d.%d' + % ( + sys.version_info.major, + sys.version_info.minor, + ), + ).strip() + ) + uv_args: list[str] = dataclasses.field( + default_factory=lambda: os.environ.get( + 'UV_ARGS', + '--offline', + ).split(), + ) - @classmethod - def get( - cls, - base_dir: Optional[pathlib.Path] = None, - ) -> Self: - if base_dir is None: - base_dir = pathlib.Path.cwd() + @classmethod + def get( + cls, + base_dir: Optional[pathlib.Path] = None, + ) -> Self: + if base_dir is None: + base_dir = pathlib.Path.cwd() - env_path = base_dir / '.venv' - python_path = env_path / 'bin' / 'python3' + env_path = base_dir / '.venv' + python_path = env_path / 'bin' / 'python3' + + return cls( + base_dir=base_dir, + env_path=env_path, + python_path=python_path, + ) - return cls( - base_dir=base_dir, - env_path=env_path, - python_path=python_path, - ) def env_bootstrap( - bootstrap_settings: BootstrapSettings, - pyproject: PyProject, + bootstrap_settings: BootstrapSettings, + pyproject: PyProject, ) -> None: - pip_find_links : list[pathlib.Path] = [] + pip_find_links: list[pathlib.Path] = [] - if not pyproject.pip_find_links is None: - pip_find_links.extend(pyproject.pip_find_links) + if not pyproject.pip_find_links is None: + pip_find_links.extend(pyproject.pip_find_links) - pip_find_links_args = sum([ - ['-f', str(o),] - for o in pip_find_links - ], []) + pip_find_links_args = sum( + [ + [ + '-f', + str(o), + ] + for o in pip_find_links + ], + [], + ) - features : list[str] = [] + features: list[str] = [] - if pyproject.early_features: - features.extend(pyproject.early_features) + if pyproject.early_features: + features.extend(pyproject.early_features) - requirements_python_version: Optional[str] = None - if not bootstrap_settings.python_version is None: - requirements_python_version = bootstrap_settings.python_version.replace('.', '_') + requirements_python_version: Optional[str] = None + if not bootstrap_settings.python_version is None: + requirements_python_version = bootstrap_settings.python_version.replace('.', '_') + + requirements_name = '_'.join(sorted(features)) + + if requirements_python_version: + requirements_name += '_' + requirements_python_version + + requirements_path: Optional[pathlib.Path] = None + + if requirements_name in pyproject.requirements: + requirements_path = pyproject.requirements[requirements_name] + else: + requirements_path = pyproject.path.parent / 'requirements.txt' + + requirements_in: list[str] = [] + + requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']) + + if pyproject.early_features: + early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], []) + + logger.info( + dict( + requirements_name=requirements_name, + early_dependencies=early_dependencies, + ) + ) + + requirements_in.extend(early_dependencies) + # if len(early_dependencies) > 0: + # subprocess.check_call([ + # bootstrap_settings.python_path, + # '-m', + # 'uv', 'pip', 'install', + # *pip_find_links_args, + # # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'), + # *bootstrap_settings.uv_args, + # *early_dependencies, + # ]) + + if not requirements_path.exists(): + with tempfile.NamedTemporaryFile( + mode='w', + prefix='requirements', + suffix='.in', + ) as f: + f.write('\n'.join(requirements_in)) + f.flush() + + subprocess.check_call( + [ + 'uv', + 'pip', + 'compile', + '--generate-hashes', + *pip_find_links_args, + # '-p', + # bootstrap_settings.python_path, + *bootstrap_settings.uv_args, + '-o', + str(requirements_path), + f.name, + ] + ) + + uv_python_version: list[str] = [] + + if not bootstrap_settings.python_version is None: + uv_python_version.extend( + [ + '-p', + bootstrap_settings.python_version, + ] + ) + + subprocess.check_call( + [ + 'uv', + 'venv', + *uv_python_version, + *pip_find_links_args, + # '--seed', + *bootstrap_settings.uv_args, + str(bootstrap_settings.env_path), + ] + ) + + subprocess.check_call( + [ + 'uv', + 'pip', + 'install', + *pip_find_links_args, + '-p', + bootstrap_settings.python_path, + '--require-hashes', + *bootstrap_settings.uv_args, + '-r', + str(requirements_path), + ] + ) - requirements_name = '_'.join(sorted(features)) +def paths_equal(a: pathlib.Path | str, b: pathlib.Path | str) -> bool: + return os.path.abspath(str(a)) == os.path.abspath(str(b)) - if requirements_python_version: - requirements_name += '_' + requirements_python_version - - requirements_path : Optional[pathlib.Path] = None - - if requirements_name in pyproject.requirements: - requirements_path = pyproject.requirements[requirements_name] - else: - requirements_path = pyproject.path.parent / 'requirements.txt' - - requirements_in : list[str] = [] - - requirements_in.extend([ - 'uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11' - ]) - - if pyproject.early_features: - early_dependencies = sum([ - pyproject.dependencies[o] - for o in pyproject.early_features - ], []) - - logger.info(dict( - requirements_name=requirements_name, - early_dependencies=early_dependencies, - )) - - requirements_in.extend(early_dependencies) - # if len(early_dependencies) > 0: - # subprocess.check_call([ - # bootstrap_settings.python_path, - # '-m', - # 'uv', 'pip', 'install', - # *pip_find_links_args, - # # '-f', str(pathlib.Path(__file__).parent / 'deps' / 'dist'), - # *bootstrap_settings.uv_args, - # *early_dependencies, - # ]) - - if not requirements_path.exists(): - with tempfile.NamedTemporaryFile( - mode='w', - prefix='requirements', - suffix='.in', - ) as f: - f.write( - '\n'.join(requirements_in) - ) - f.flush() - - subprocess.check_call([ - 'uv', - 'pip', - 'compile', - '--generate-hashes', - *pip_find_links_args, - # '-p', - # bootstrap_settings.python_path, - *bootstrap_settings.uv_args, - '-o', str(requirements_path), - f.name, - ]) - - uv_python_version: list[str] = [] - - if not bootstrap_settings.python_version is None: - uv_python_version.extend([ - '-p', bootstrap_settings.python_version, - ]) - - subprocess.check_call([ - 'uv', 'venv', - *uv_python_version, - *pip_find_links_args, - # '--seed', - *bootstrap_settings.uv_args, - str(bootstrap_settings.env_path) - ]) - - subprocess.check_call([ - 'uv', - 'pip', - 'install', - *pip_find_links_args, - '-p', - bootstrap_settings.python_path, - '--require-hashes', - *bootstrap_settings.uv_args, - '-r', str(requirements_path), - ]) - - -def paths_equal( - a: pathlib.Path | str, - b: pathlib.Path | str -) -> bool: - return ( - os.path.abspath(str(a)) == - os.path.abspath(str(b)) - ) def run( - d: Optional[pathlib.Path] = None, - cli_path: Optional[pathlib.Path] = None, + d: Optional[pathlib.Path] = None, + cli_path: Optional[pathlib.Path] = None, ) -> None: - if cli_path is None: - cli_path = pathlib.Path(__file__).parent / 'cli.py' + if cli_path is None: + cli_path = pathlib.Path(__file__).parent / 'cli.py' - if d is None: - d = pathlib.Path(__file__).parent / 'pyproject.toml' + if d is None: + d = pathlib.Path(__file__).parent / 'pyproject.toml' - bootstrap_settings = BootstrapSettings.get() + bootstrap_settings = BootstrapSettings.get() - pyproject : PyProject = pyproject_load( - d - ) + pyproject: PyProject = pyproject_load(d) - logging.basicConfig(level=logging.INFO) + logging.basicConfig(level=logging.INFO) - if not bootstrap_settings.env_path.exists(): - env_bootstrap( - bootstrap_settings=bootstrap_settings, - pyproject=pyproject, - ) + if not bootstrap_settings.env_path.exists(): + env_bootstrap( + bootstrap_settings=bootstrap_settings, + pyproject=pyproject, + ) - logger.info([sys.executable, sys.argv, bootstrap_settings.python_path]) + logger.info([sys.executable, sys.argv, bootstrap_settings.python_path]) - if not paths_equal(sys.executable, bootstrap_settings.python_path): - os.execv( - str(bootstrap_settings.python_path), - [ - str(bootstrap_settings.python_path), - *sys.argv, - ] - ) + if not paths_equal(sys.executable, bootstrap_settings.python_path): + os.execv( + str(bootstrap_settings.python_path), + [ + str(bootstrap_settings.python_path), + *sys.argv, + ], + ) + + os.execv( + str(bootstrap_settings.python_path), + [ + str(bootstrap_settings.python_path), + str(cli_path), + *sys.argv[1:], + ], + ) - os.execv( - str(bootstrap_settings.python_path), - [ - str(bootstrap_settings.python_path), - str( - cli_path - ), - *sys.argv[1:], - ] - ) if __name__ == '__main__': - run() + run() diff --git a/python/online/fxreader/pr34/commands_typed/crypto.py b/python/online/fxreader/pr34/commands_typed/crypto.py index 827fb0d..9efbd8c 100644 --- a/python/online/fxreader/pr34/commands_typed/crypto.py +++ b/python/online/fxreader/pr34/commands_typed/crypto.py @@ -3,88 +3,95 @@ import os import cryptography.hazmat.primitives.kdf.scrypt -from typing import (Literal, overload, Optional,) +from typing import ( + Literal, + overload, + Optional, +) + class PasswordUtils: - @overload - @classmethod - def secret_hash( - cls, - secret: str | bytes, - mode: Literal['base64'], - salt: Optional[bytes] = None, - ) -> tuple[str, str]: ... + @overload + @classmethod + def secret_hash( + cls, + secret: str | bytes, + mode: Literal['base64'], + salt: Optional[bytes] = None, + ) -> tuple[str, str]: ... - @overload - @classmethod - def secret_hash( - cls, - secret: str | bytes, - mode: Literal['bytes'], - salt: Optional[bytes] = None, - ) -> tuple[bytes, bytes]: ... + @overload + @classmethod + def secret_hash( + cls, + secret: str | bytes, + mode: Literal['bytes'], + salt: Optional[bytes] = None, + ) -> tuple[bytes, bytes]: ... - @classmethod - def secret_hash( - cls, - secret: str | bytes, - mode: Literal['bytes', 'base64'], - salt: Optional[bytes] = None, - ) -> tuple[str, str] | tuple[bytes, bytes]: - if salt is None: - salt = os.urandom(16) + @classmethod + def secret_hash( + cls, + secret: str | bytes, + mode: Literal['bytes', 'base64'], + salt: Optional[bytes] = None, + ) -> tuple[str, str] | tuple[bytes, bytes]: + if salt is None: + salt = os.urandom(16) - if isinstance(secret, str): - secret = secret.encode('utf-8') - # derive - kdf = cls._scrypt_init(salt=salt) + if isinstance(secret, str): + secret = secret.encode('utf-8') + # derive + kdf = cls._scrypt_init(salt=salt) - hashed_secret = kdf.derive(secret) + hashed_secret = kdf.derive(secret) - if mode == 'bytes': - return (salt, hashed_secret) - elif mode == 'base64': - res_tuple = tuple(( - base64.b64encode(o).decode('utf-8') - for o in (salt, hashed_secret,) - )) - return (res_tuple[0], res_tuple[1]) - else: - raise NotImplementedError + if mode == 'bytes': + return (salt, hashed_secret) + elif mode == 'base64': + res_tuple = tuple( + ( + base64.b64encode(o).decode('utf-8') + for o in ( + salt, + hashed_secret, + ) + ) + ) + return (res_tuple[0], res_tuple[1]) + else: + raise NotImplementedError - @classmethod - def _scrypt_init( - cls, - salt: bytes - ) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt: - return cryptography.hazmat.primitives.kdf.scrypt.Scrypt( - salt=salt, - length=32, - n=2**14, - r=8, - p=1, - ) + @classmethod + def _scrypt_init(cls, salt: bytes) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt: + return cryptography.hazmat.primitives.kdf.scrypt.Scrypt( + salt=salt, + length=32, + n=2**14, + r=8, + p=1, + ) - @classmethod - def secret_check( - cls, - secret: str | bytes, - salt: str | bytes, - hashed_secret: str | bytes, - ) -> bool: - if isinstance(salt, str): - salt = base64.b64decode(salt) + @classmethod + def secret_check( + cls, + secret: str | bytes, + salt: str | bytes, + hashed_secret: str | bytes, + ) -> bool: + if isinstance(salt, str): + salt = base64.b64decode(salt) - if isinstance(secret, str): - secret = secret.encode('utf-8') + if isinstance(secret, str): + secret = secret.encode('utf-8') - if isinstance(hashed_secret, str): - hashed_secret = base64.b64decode(hashed_secret) + if isinstance(hashed_secret, str): + hashed_secret = base64.b64decode(hashed_secret) - kdf = cls._scrypt_init(salt=salt) + kdf = cls._scrypt_init(salt=salt) - try: - kdf.verify(secret, hashed_secret) - return True - except cryptography.exceptions.InvalidKey: - return False + try: + kdf.verify(secret, hashed_secret) + return True + except cryptography.exceptions.InvalidKey: + return False diff --git a/python/online/fxreader/pr34/commands_typed/debug.py b/python/online/fxreader/pr34/commands_typed/debug.py index 045407e..0f37e4f 100644 --- a/python/online/fxreader/pr34/commands_typed/debug.py +++ b/python/online/fxreader/pr34/commands_typed/debug.py @@ -1,35 +1,39 @@ import os import logging -from typing import (Optional,) +from typing import ( + Optional, +) logger = logging.getLogger(__name__) + class DebugPy: - @classmethod - def set_trace( - cls, - host: Optional[str] = None, - port: Optional[int] = None, - wait: Optional[bool] = None, - ) -> None: - if host is None: - host = '127.0.0.1' - if port is None: - port = 4444 - if wait is None: - wait = True + @classmethod + def set_trace( + cls, + host: Optional[str] = None, + port: Optional[int] = None, + wait: Optional[bool] = None, + ) -> None: + if host is None: + host = '127.0.0.1' + if port is None: + port = 4444 + if wait is None: + wait = True - import debugpy + import debugpy - if os.environ.get('DEBUGPY_RUNNING') != 'true': - logger.info('debugpy init') - import debugpy - debugpy.listen((host, port)) - os.environ['DEBUGPY_RUNNING'] = 'true' + if os.environ.get('DEBUGPY_RUNNING') != 'true': + logger.info('debugpy init') + import debugpy - if wait: - debugpy.wait_for_client() - debugpy.breakpoint() + debugpy.listen((host, port)) + os.environ['DEBUGPY_RUNNING'] = 'true' - logger.info('debugpy done') \ No newline at end of file + if wait: + debugpy.wait_for_client() + debugpy.breakpoint() + + logger.info('debugpy done') diff --git a/python/online/fxreader/pr34/commands_typed/logging.py b/python/online/fxreader/pr34/commands_typed/logging.py index eb76c6d..afdd4c1 100644 --- a/python/online/fxreader/pr34/commands_typed/logging.py +++ b/python/online/fxreader/pr34/commands_typed/logging.py @@ -1,16 +1,14 @@ import logging -from typing import (Optional,) +from typing import ( + Optional, +) + def setup(level: Optional[int] = None) -> None: - if level is None: - level = logging.INFO + if level is None: + level = logging.INFO - logging.basicConfig( - level=level, - format=( - '%(levelname)s:%(name)s:%(message)s' - ':%(process)d' - ':%(asctime)s' - ':%(pathname)s:%(funcName)s:%(lineno)s' - ), - ) \ No newline at end of file + logging.basicConfig( + level=level, + format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'), + ) diff --git a/python/online/fxreader/pr34/commands_typed/mypy.py b/python/online/fxreader/pr34/commands_typed/mypy.py index 9abd9eb..6defea2 100644 --- a/python/online/fxreader/pr34/commands_typed/mypy.py +++ b/python/online/fxreader/pr34/commands_typed/mypy.py @@ -9,208 +9,232 @@ import logging import sys import argparse -from pydantic import (Field,) +from pydantic import ( + Field, +) -from typing import (ClassVar, Generator, Annotated, Optional, Any,) +from typing import ( + ClassVar, + Generator, + Annotated, + Optional, + Any, +) logger = logging.getLogger(__name__) + @pydantic.dataclasses.dataclass class MypyFormatEntry: - name : str - value : str + name: str + value: str - def __eq__(self, other: object) -> bool: - if not isinstance(other, type(self)): - raise NotImplementedError + def __eq__(self, other: object) -> bool: + if not isinstance(other, type(self)): + raise NotImplementedError + + return self.value == other.value - return self.value == other.value class MypyFormat: - vscode : ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode') - json : ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json') + vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode') + json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json') + @classmethod + def from_value(cls, value: str) -> MypyFormatEntry: + for e in cls.entries(): + if value == e.value: + return e - @classmethod - def from_value(cls, value: str) -> MypyFormatEntry: - for e in cls.entries(): - if value == e.value: - return e + raise NotImplementedError - raise NotImplementedError + @classmethod + def entries( + cls, + ) -> Generator[ + MypyFormatEntry, + None, + None, + ]: + for o in dir(cls): + e = getattr(cls, o) + if not isinstance(e, MypyFormatEntry): + continue - @classmethod - def entries(cls) -> Generator[MypyFormatEntry, None, None,]: - for o in dir(cls): - e = getattr(cls, o) - if not isinstance(e, MypyFormatEntry): - continue + yield e - yield e class MypySettings(pydantic_settings.BaseSettings): - model_config = pydantic_settings.SettingsConfigDict( - env_prefix='online_fxreader_pr34_mypy_', - case_sensitive=False, - ) + model_config = pydantic_settings.SettingsConfigDict( + env_prefix='online_fxreader_pr34_mypy_', + case_sensitive=False, + ) + + config_path: pathlib.Path = pathlib.Path.cwd() / '.mypy.ini' + max_errors: dict[str, int] = dict() + paths: Annotated[list[pathlib.Path], Field(default_factory=lambda: ['.'])] - config_path : pathlib.Path = pathlib.Path.cwd() / '.mypy.ini' - max_errors : dict[str, int] = dict() - paths : Annotated[list[pathlib.Path], Field(default_factory=lambda : ['.'])] def run( - argv: Optional[list[str]] = None, - settings: Optional[MypySettings] = None, + argv: Optional[list[str]] = None, + settings: Optional[MypySettings] = None, ) -> None: - if argv is None: - argv = [] + if argv is None: + argv = [] - if settings is None: - settings = MypySettings() + if settings is None: + settings = MypySettings() - parser = argparse.ArgumentParser() - parser.add_argument( - '-q', '--quiet', - dest='quiet', - action='store_true', - help='do not print anything if the program is correct according to max_errors limits', - default=False, - ) - parser.add_argument( - '-i', - dest='paths', - help='specify paths to check', - default=[], - action='append', - ) - parser.add_argument( - '-f', '--format', - dest='_format', - help='output format of errors', - default=MypyFormat.json.value, - choices=[ - o.value - for o in MypyFormat.entries() - ], - ) - options, args = parser.parse_known_args(argv) + parser = argparse.ArgumentParser() + parser.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help='do not print anything if the program is correct according to max_errors limits', + default=False, + ) + parser.add_argument( + '-i', + dest='paths', + help='specify paths to check', + default=[], + action='append', + ) + parser.add_argument( + '-f', + '--format', + dest='_format', + help='output format of errors', + default=MypyFormat.json.value, + choices=[o.value for o in MypyFormat.entries()], + ) + options, args = parser.parse_known_args(argv) - if len(args) > 0 and args[0] == '--': - del args[0] + if len(args) > 0 and args[0] == '--': + del args[0] - options.format = MypyFormat.from_value(options._format) + options.format = MypyFormat.from_value(options._format) - if len(options.paths) == 0: - options.paths.extend(settings.paths) + if len(options.paths) == 0: + options.paths.extend(settings.paths) - started_at = datetime.datetime.now() + started_at = datetime.datetime.now() - mypy_cmd = [ - sys.executable, - '-m', - 'mypy', - '--config-file', str(settings.config_path), - '--strict', - '-O', - 'json', - *args, - *options.paths, - ] + mypy_cmd = [ + sys.executable, + '-m', + 'mypy', + '--config-file', + str(settings.config_path), + '--strict', + '-O', + 'json', + *args, + *options.paths, + ] + logger.info(dict(cmd=mypy_cmd)) - logger.info(dict(cmd=mypy_cmd)) + res = subprocess.run( + mypy_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) - res = subprocess.run( - mypy_cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + done_at = datetime.datetime.now() - done_at = datetime.datetime.now() + try: + assert not res.returncode is None - try: - assert not res.returncode is None + errors = sorted( + [json.loads(o) for o in res.stdout.decode('utf-8').splitlines() if not o.strip() == ''], + key=lambda x: ( + x.get('file', ''), + x.get('line', 0), + ), + ) - errors = sorted([ - json.loads(o) - for o in res.stdout.decode('utf-8').splitlines() - if not o.strip() == '' - ], key=lambda x: ( - x.get('file', ''), - x.get('line', 0), - )) + if not options.quiet: + if (len(res.stderr)) > 0: + logger.error(res.stderr.decode('utf-8')) + except: + logger.exception('') + logger.error(res.stdout.decode('utf-8')) + logger.error(res.stderr.decode('utf-8')) + sys.exit(res.returncode) - if not options.quiet: - if (len(res.stderr)) > 0: - logger.error(res.stderr.decode('utf-8')) - except: - logger.exception('') - logger.error(res.stdout.decode('utf-8')) - logger.error(res.stderr.decode('utf-8')) - sys.exit(res.returncode) + g: dict[str, Any] = dict() + for o in errors: + if not o['file'] in g: + g[o['file']] = [] + g[o['file']].append(o) + h = { + k: len(v) + for k, v in sorted( + list(g.items()), + key=lambda x: x[0], + ) + } - g : dict[str, Any] = dict() - for o in errors: - if not o['file'] in g: - g[o['file']] = [] - g[o['file']].append(o) + mentioned_paths = marisa_trie.Trie(list(h)) - h = { - k : len(v) - for k, v in sorted( - list(g.items()), - key=lambda x: x[0], - ) - } + violated_limits: dict[str, str] = dict() - mentioned_paths = marisa_trie.Trie(list(h)) + for k, v in settings.max_errors.items(): + matching_paths = mentioned_paths.keys(k) + total_errors = sum([h[o] for o in matching_paths], 0) - violated_limits : dict[str, str] = dict() + if total_errors > v: + violated_limits[k] = '%s - [%s]: has %d errors > %d' % ( + k, + ', '.join(matching_paths), + total_errors, + v, + ) - for k, v in settings.max_errors.items(): - matching_paths = mentioned_paths.keys(k) - total_errors = sum([ - h[o] - for o in matching_paths - ], 0) + if len(violated_limits) > 0 or not options.quiet: + if options.format == MypyFormat.vscode: + for o in errors: + sys.stdout.write( + '[%s] %s:%d,%d %s - %s - %s\n' + % ( + o['severity'], + o['file'], + o['line'], + o['column'], + o['message'], + o['hint'], + o['code'], + ) + ) + sys.stdout.flush() + # logger.info(json.dumps(errors, indent=4)) + else: + logger.info(json.dumps(errors, indent=4)) - if total_errors > v: - violated_limits[k] = '%s - [%s]: has %d errors > %d' % ( - k, ', '.join(matching_paths), total_errors, v, - ) + # if len(violated_limits) > 0: + # logger.info(json.dumps(violated_limits, indent=4)) + logger.info( + json.dumps( + dict( + max_errors=settings.max_errors, + violated_limits=violated_limits, + histogram=h, + elapsed=(done_at - started_at).total_seconds(), + ), + indent=4, + ) + ) - if len(violated_limits) > 0 or not options.quiet: - if options.format == MypyFormat.vscode: - for o in errors: - sys.stdout.write('[%s] %s:%d,%d %s - %s - %s\n' % ( - o['severity'], - o['file'], - o['line'], - o['column'], - o['message'], - o['hint'], - o['code'], - )) - sys.stdout.flush() - #logger.info(json.dumps(errors, indent=4)) - else: - logger.info(json.dumps(errors, indent=4)) + if len(violated_limits) > 0: + sys.exit(1) - #if len(violated_limits) > 0: - # logger.info(json.dumps(violated_limits, indent=4)) - logger.info(json.dumps(dict( - max_errors=settings.max_errors, - violated_limits=violated_limits, - histogram=h, - elapsed=(done_at - started_at).total_seconds(), - ), indent=4)) - - if len(violated_limits) > 0: - sys.exit(1) if __name__ == '__main__': - from . import logging as _logging - _logging.setup() - run(sys.argv[1:]) \ No newline at end of file + from . import logging as _logging + + _logging.setup() + run(sys.argv[1:]) diff --git a/python/online/fxreader/pr34/commands_typed/os.py b/python/online/fxreader/pr34/commands_typed/os.py index ee2dfbe..68fcd9c 100644 --- a/python/online/fxreader/pr34/commands_typed/os.py +++ b/python/online/fxreader/pr34/commands_typed/os.py @@ -11,112 +11,115 @@ import dataclasses logger = logging.getLogger(__name__) -from typing import (overload, Optional, Literal, Any, Annotated,) +from typing import ( + overload, + Optional, + Literal, + Any, + Annotated, +) from .cli_bootstrap import PyProject + @overload def shutil_which( - name: str, - raise_on_failure: Literal[True], + name: str, + raise_on_failure: Literal[True], ) -> str: ... + @overload def shutil_which( - name: str, - raise_on_failure: bool, + name: str, + raise_on_failure: bool, ) -> Optional[str]: ... + def shutil_which( - name: str, - raise_on_failure: bool, + name: str, + raise_on_failure: bool, ) -> Optional[str]: - res = shutil.which(name) - if res is None and raise_on_failure: - raise NotImplementedError - else: - return res + res = shutil.which(name) + if res is None and raise_on_failure: + raise NotImplementedError + else: + return res + def runtime_libdirs_init( - project: PyProject, + project: PyProject, ) -> None: - if sys.platform == 'linux': - ld_library_path : list[pathlib.Path] = [ - o - for o in [ - *[ - o.absolute() - for o in ( - project.runtime_libdirs - if project.runtime_libdirs - else [] - ) - ], - *[ - pathlib.Path(o) - for o in os.environ.get( - 'LD_LIBRARY_PATH', - '' - ).split(os.path.pathsep) - if o != '' - ] - ] - ] + if sys.platform == 'linux': + ld_library_path: list[pathlib.Path] = [ + o + for o in [ + *[o.absolute() for o in (project.runtime_libdirs if project.runtime_libdirs else [])], + *[pathlib.Path(o) for o in os.environ.get('LD_LIBRARY_PATH', '').split(os.path.pathsep) if o != ''], + ] + ] - ld_library_path_present : list[pathlib.Path] = [] + ld_library_path_present: list[pathlib.Path] = [] - for o in ld_library_path: - if not o.exists(): - logger.warning(dict( - ld_library_path=o, - msg='not found', - )) + for o in ld_library_path: + if not o.exists(): + logger.warning( + dict( + ld_library_path=o, + msg='not found', + ) + ) - ld_library_path_present.append(o) + ld_library_path_present.append(o) - os.environ.update( - LD_LIBRARY_PATH=os.path.pathsep.join([ - str(o) for o in ld_library_path_present - ]) - ) + os.environ.update(LD_LIBRARY_PATH=os.path.pathsep.join([str(o) for o in ld_library_path_present])) - for preload_path in (project.runtime_preload or []): - for preload_found in glob.glob(str( - preload_path.parent / ('lib%s.so' % preload_path.name) - )): - logger.info(dict( - preload_path=preload_path, preload_found=preload_found, - # lib_path=o, - msg='load_library', - )) + for preload_path in project.runtime_preload or []: + for preload_found in glob.glob(str(preload_path.parent / ('lib%s.so' % preload_path.name))): + logger.info( + dict( + preload_path=preload_path, + preload_found=preload_found, + # lib_path=o, + msg='load_library', + ) + ) + + ctypes.cdll.LoadLibrary(preload_found) + else: + raise NotImplementedError - ctypes.cdll.LoadLibrary(preload_found) - else: - raise NotImplementedError class interfaces_index_t: - @dataclasses.dataclass - class Interface: - @dataclasses.dataclass - class AddrInfo: - family: str - local: str + @dataclasses.dataclass + class Interface: + @dataclasses.dataclass + class AddrInfo: + family: str + local: str + + name: Annotated[ + str, + pydantic.Field( + alias='ifname', + ), + ] + addr_info: list[AddrInfo] - name: Annotated[ - str, - pydantic.Field( - alias='ifname', - ) - ] - addr_info: list[AddrInfo] def interfaces_index() -> list[interfaces_index_t.Interface]: - res = pydantic.RootModel[ - list[interfaces_index_t.Interface] - ].model_validate_json( - subprocess.check_output([ - 'ip', '-j', 'addr', - ]).decode('utf-8') - ).root + res = ( + pydantic.RootModel[list[interfaces_index_t.Interface]] + .model_validate_json( + subprocess.check_output( + [ + 'ip', + '-j', + 'addr', + ] + ).decode('utf-8') + ) + .root + ) - return res + return res diff --git a/python/online/fxreader/pr34/commands_typed/pip.py b/python/online/fxreader/pr34/commands_typed/pip.py index 53419dd..079f434 100644 --- a/python/online/fxreader/pr34/commands_typed/pip.py +++ b/python/online/fxreader/pr34/commands_typed/pip.py @@ -11,514 +11,470 @@ import logging import typing if typing.TYPE_CHECKING: - import pip._internal.commands.show - import pip._internal.commands.download - import pip._internal.cli.main_parser - import pip._internal.models.index - import pip._internal.utils.temp_dir - import pip._internal.cli.main - import pip._internal.network.download - import pip._internal.resolution.base - import pip._internal.resolution.resolvelib.resolver - import pip._internal.operations.prepare + import pip._internal.commands.show + import pip._internal.commands.download + import pip._internal.cli.main_parser + import pip._internal.models.index + import pip._internal.utils.temp_dir + import pip._internal.cli.main + import pip._internal.network.download + import pip._internal.resolution.base + import pip._internal.resolution.resolvelib.resolver + import pip._internal.operations.prepare from typing import ( - Literal, Optional, Iterable, Any, + Literal, + Optional, + Iterable, + Any, ) logger = logging.getLogger(__name__) def pip_show( - argv: list[str], + argv: list[str], ) -> list['pip._internal.commands.show._PackageInfo']: - import pip._internal.commands.show - return list( - pip._internal.commands.show.search_packages_info( - argv, - ) - ) + import pip._internal.commands.show + + return list( + pip._internal.commands.show.search_packages_info( + argv, + ) + ) class pip_resolve_t: - class kwargs_t: - class mode_t(enum.StrEnum): - copy_paste = "copy_paste" - monkey_patch = "monkey_patch" - uv_pip_freeze = "uv_pip_freeze" - uv_pip_compile = "uv_pip_compile" + class kwargs_t: + class mode_t(enum.StrEnum): + copy_paste = 'copy_paste' + monkey_patch = 'monkey_patch' + uv_pip_freeze = 'uv_pip_freeze' + uv_pip_compile = 'uv_pip_compile' - @dataclasses.dataclass - class res_t: - @dataclasses.dataclass - class download_info_t: - url: str - sha256: str - constraint: str + @dataclasses.dataclass + class res_t: + @dataclasses.dataclass + class download_info_t: + url: str + sha256: str + constraint: str - txt: Optional[str] = None - entries: Optional[list[download_info_t]] = None + txt: Optional[str] = None + entries: Optional[list[download_info_t]] = None -def pip_resolve_entries_to_txt( - entries: list[pip_resolve_t.res_t.download_info_t] -) -> str: - return '\n'.join([ - '#%s\n%s %s' % ( - o.url, - o.constraint, - ' '.join([ - '--hash=sha256:%s' % o2 - for o2 in o.sha256 - ]) - ) - for o in entries - ]) +def pip_resolve_entries_to_txt(entries: list[pip_resolve_t.res_t.download_info_t]) -> str: + return '\n'.join(['#%s\n%s %s' % (o.url, o.constraint, ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256])) for o in entries]) + def pip_resolve( - argv: list[str], - mode: pip_resolve_t.kwargs_t.mode_t, - requirements: Optional[list[str]] = None, + argv: list[str], + mode: pip_resolve_t.kwargs_t.mode_t, + requirements: Optional[list[str]] = None, ) -> pip_resolve_t.res_t: - if mode is pip_resolve_t.kwargs_t.mode_t.copy_paste: - import pip._internal.commands.show - import pip._internal.commands.download - import pip._internal.cli.cmdoptions - import pip._internal.cli.main_parser - import pip._internal.models.index - import pip._internal.utils.temp_dir - import pip._internal.cli.main - import pip._internal.network.download - import pip._internal.resolution.base - import pip._internal.req.req_install - import pip._internal.resolution.resolvelib.resolver - import pip._internal.operations.prepare - import pip._internal.utils.temp_dir - import pip._internal.operations.build.build_tracker - import pip._internal.models.direct_url + if mode is pip_resolve_t.kwargs_t.mode_t.copy_paste: + import pip._internal.commands.show + import pip._internal.commands.download + import pip._internal.cli.cmdoptions + import pip._internal.cli.main_parser + import pip._internal.models.index + import pip._internal.utils.temp_dir + import pip._internal.cli.main + import pip._internal.network.download + import pip._internal.resolution.base + import pip._internal.req.req_install + import pip._internal.resolution.resolvelib.resolver + import pip._internal.operations.prepare + import pip._internal.utils.temp_dir + import pip._internal.operations.build.build_tracker + import pip._internal.models.direct_url - with contextlib.ExitStack() as stack: - stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager()) + with contextlib.ExitStack() as stack: + stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager()) - t2 = pip._internal.cli.main_parser.create_main_parser() + t2 = pip._internal.cli.main_parser.create_main_parser() - t3 = t2.parse_args(["download"]) - t1 = pip._internal.commands.download.DownloadCommand("blah", "shit") + t3 = t2.parse_args(['download']) + t1 = pip._internal.commands.download.DownloadCommand('blah', 'shit') - stack.enter_context(t1.main_context()) + stack.enter_context(t1.main_context()) - # options = pip._internal.commands.download.Values() - options = t3[0] - options.python_version = None - options.platforms = [] - options.abis = [] - options.implementation = [] - options.format_control = None - options.ignore_dependencies = None - options.index_url = pip._internal.models.index.PyPI.simple_url - options.extra_index_urls = [] - options.no_index = None - options.find_links = [] - options.pre = None - options.prefer_binary = True - options.only_binary = True - options.constraints = [] - options.use_pep517 = None - options.editables = [] - options.requirements = [] - options.src_dir = str(pathlib.Path(__file__).parent) - options.build_isolation = None - options.check_build_deps = None - options.progress_bar = True - options.require_hashes = None - options.ignore_requires_python = False - # options.cache_dir - pip._internal.cli.cmdoptions.check_dist_restriction(options) - # t1._in_main_context = True - session = t1.get_default_session(options) - target_python = pip._internal.cli.cmdoptions.make_target_python(options) - finder = t1._build_package_finder( - options=options, - session=session, - target_python=target_python, - ignore_requires_python=options.ignore_requires_python, - ) - build_tracker = t1.enter_context( - pip._internal.operations.build.build_tracker.get_build_tracker() - ) - reqs = t1.get_requirements( - [ - #'pip', 'uv', 'ipython', - *argv, - ], - options, - finder, - session, - ) - pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs) - directory = pip._internal.utils.temp_dir.TempDirectory( - delete=True, kind="download", globally_managed=True - ) - preparer = t1.make_requirement_preparer( - temp_build_dir=directory, - options=options, - build_tracker=build_tracker, - session=session, - finder=finder, - download_dir=None, - use_user_site=False, - verbosity=False, - ) - resolver = t1.make_resolver( - preparer=preparer, - finder=finder, - options=options, - ignore_requires_python=options.ignore_requires_python, - use_pep517=options.use_pep517, - py_version_info=options.python_version, - ) - t1.trace_basic_info(finder) - requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + # options = pip._internal.commands.download.Values() + options = t3[0] + options.python_version = None + options.platforms = [] + options.abis = [] + options.implementation = [] + options.format_control = None + options.ignore_dependencies = None + options.index_url = pip._internal.models.index.PyPI.simple_url + options.extra_index_urls = [] + options.no_index = None + options.find_links = [] + options.pre = None + options.prefer_binary = True + options.only_binary = True + options.constraints = [] + options.use_pep517 = None + options.editables = [] + options.requirements = [] + options.src_dir = str(pathlib.Path(__file__).parent) + options.build_isolation = None + options.check_build_deps = None + options.progress_bar = True + options.require_hashes = None + options.ignore_requires_python = False + # options.cache_dir + pip._internal.cli.cmdoptions.check_dist_restriction(options) + # t1._in_main_context = True + session = t1.get_default_session(options) + target_python = pip._internal.cli.cmdoptions.make_target_python(options) + finder = t1._build_package_finder( + options=options, + session=session, + target_python=target_python, + ignore_requires_python=options.ignore_requires_python, + ) + build_tracker = t1.enter_context(pip._internal.operations.build.build_tracker.get_build_tracker()) + reqs = t1.get_requirements( + [ + #'pip', 'uv', 'ipython', + *argv, + ], + options, + finder, + session, + ) + pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs) + directory = pip._internal.utils.temp_dir.TempDirectory(delete=True, kind='download', globally_managed=True) + preparer = t1.make_requirement_preparer( + temp_build_dir=directory, + options=options, + build_tracker=build_tracker, + session=session, + finder=finder, + download_dir=None, + use_user_site=False, + verbosity=False, + ) + resolver = t1.make_resolver( + preparer=preparer, + finder=finder, + options=options, + ignore_requires_python=options.ignore_requires_python, + use_pep517=options.use_pep517, + py_version_info=options.python_version, + ) + t1.trace_basic_info(finder) + requirement_set = resolver.resolve(reqs, check_supported_wheels=True) - res = pip_resolve_t.res_t() + res = pip_resolve_t.res_t() - res.entries = [] + res.entries = [] - for k, v in requirement_set.requirements.items(): - assert not v.download_info is None - assert isinstance( - v.download_info.info, - pip._internal.models.direct_url.ArchiveInfo, - ) - assert not v.download_info.info.hashes is None + for k, v in requirement_set.requirements.items(): + assert not v.download_info is None + assert isinstance( + v.download_info.info, + pip._internal.models.direct_url.ArchiveInfo, + ) + assert not v.download_info.info.hashes is None - res.entries.append( - pip_resolve_t.res_t.download_info_t( - constraint=k, - sha256=v.download_info.info.hashes["sha256"], - url=v.download_info.url, - ) - ) + res.entries.append( + pip_resolve_t.res_t.download_info_t( + constraint=k, + sha256=v.download_info.info.hashes['sha256'], + url=v.download_info.url, + ) + ) - res.txt = pip_resolve_entries_to_txt( - res.entries - ) + res.txt = pip_resolve_entries_to_txt(res.entries) - return res - elif mode is pip_resolve_t.kwargs_t.mode_t.monkey_patch: - import pip._internal.commands.show - import pip._internal.commands.download - import pip._internal.cli.main_parser - import pip._internal.models.index - import pip._internal.models.link - from pip._internal.models.link import ( - Link, - ) - import pip._internal.utils.temp_dir - from pip._internal.metadata.base import ( - BaseDistribution, - ) - import pip._internal.cli.main - import pip._internal.network.download - import pip._internal.resolution.base - import pip._internal.resolution.resolvelib.resolver - import pip._internal.operations.prepare - from pip._internal.network.download import ( - Downloader, - ) - from pip._internal.operations.prepare import ( - File, - ) - from pip._internal.req.req_set import RequirementSet - from pip._internal.utils.hashes import Hashes - from pip._internal.req.req_install import InstallRequirement + return res + elif mode is pip_resolve_t.kwargs_t.mode_t.monkey_patch: + import pip._internal.commands.show + import pip._internal.commands.download + import pip._internal.cli.main_parser + import pip._internal.models.index + import pip._internal.models.link + from pip._internal.models.link import ( + Link, + ) + import pip._internal.utils.temp_dir + from pip._internal.metadata.base import ( + BaseDistribution, + ) + import pip._internal.cli.main + import pip._internal.network.download + import pip._internal.resolution.base + import pip._internal.resolution.resolvelib.resolver + import pip._internal.operations.prepare + from pip._internal.network.download import ( + Downloader, + ) + from pip._internal.operations.prepare import ( + File, + ) + from pip._internal.req.req_set import RequirementSet + from pip._internal.utils.hashes import Hashes + from pip._internal.req.req_install import InstallRequirement - downloader_call_def = pip._internal.network.download.Downloader.__call__ + downloader_call_def = pip._internal.network.download.Downloader.__call__ - def downloader_call( - _self: pip._internal.network.download.Downloader, - link: pip._internal.models.link.Link, - location: str, - ) -> tuple[str, str]: - logger.info( - dict( - url=link.url, - ) - ) + def downloader_call( + _self: pip._internal.network.download.Downloader, + link: pip._internal.models.link.Link, + location: str, + ) -> tuple[str, str]: + logger.info( + dict( + url=link.url, + ) + ) - return downloader_call_def( - _self, - link, location, - ) + return downloader_call_def( + _self, + link, + location, + ) - batch_downloader_call_def = ( - pip._internal.network.download.BatchDownloader.__call__ - ) + batch_downloader_call_def = pip._internal.network.download.BatchDownloader.__call__ - def batch_downloader_call( - _self: pip._internal.network.download.BatchDownloader, - links: Iterable[pip._internal.models.link.Link], - location: str, - ) -> Iterable[ - tuple[ - pip._internal.models.link.Link, - tuple[str, str] - ] - ]: - # print(args) + def batch_downloader_call( + _self: pip._internal.network.download.BatchDownloader, + links: Iterable[pip._internal.models.link.Link], + location: str, + ) -> Iterable[tuple[pip._internal.models.link.Link, tuple[str, str]]]: + # print(args) - logger.info( - dict( - links=links, - location=location, - ) - ) + logger.info( + dict( + links=links, + location=location, + ) + ) - return [ - (o, ("/dev/null", '')) - for o in links - ] + return [(o, ('/dev/null', '')) for o in links] - # base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve - base_resolver_resolve_def = ( - pip._internal.resolution.resolvelib.resolver.Resolver.resolve - ) + # base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve + base_resolver_resolve_def = pip._internal.resolution.resolvelib.resolver.Resolver.resolve - result_requirements : list[ - RequirementSet | InstallRequirement - ] = [] + result_requirements: list[RequirementSet | InstallRequirement] = [] - def base_resolver_resolve( - _self: pip._internal.resolution.resolvelib.resolver.Resolver, - root_reqs: list[ - InstallRequirement, - ], - check_supported_wheels: bool, - ) -> RequirementSet: - # print(args, kwargs) + def base_resolver_resolve( + _self: pip._internal.resolution.resolvelib.resolver.Resolver, + root_reqs: list[InstallRequirement,], + check_supported_wheels: bool, + ) -> RequirementSet: + # print(args, kwargs) - res = base_resolver_resolve_def( - _self, - root_reqs, - check_supported_wheels - ) + res = base_resolver_resolve_def(_self, root_reqs, check_supported_wheels) - result_requirements.append(res) - raise NotImplementedError - return res + result_requirements.append(res) + raise NotImplementedError + return res - get_http_url_def = pip._internal.operations.prepare.get_http_url + get_http_url_def = pip._internal.operations.prepare.get_http_url - def get_http_url( - link: Link, - download: Downloader, - download_dir: Optional[str] = None, - hashes: Optional[Hashes] = None, - ) -> File: - logger.info( - dict( - url=link.url, - hashes=hashes, - ) - ) + def get_http_url( + link: Link, + download: Downloader, + download_dir: Optional[str] = None, + hashes: Optional[Hashes] = None, + ) -> File: + logger.info( + dict( + url=link.url, + hashes=hashes, + ) + ) - if link.url.endswith(".whl"): - print("blah") - hashes = None + if link.url.endswith('.whl'): + print('blah') + hashes = None - return File( - "/dev/null", - '', - ) - else: - return get_http_url_def( - link, - download, - download_dir, - hashes - ) + return File( + '/dev/null', + '', + ) + else: + return get_http_url_def(link, download, download_dir, hashes) - prepare_linked_requirements_more_def = pip._internal.operations.prepare.RequirementPreparer.prepare_linked_requirements_more + prepare_linked_requirements_more_def = pip._internal.operations.prepare.RequirementPreparer.prepare_linked_requirements_more - def prepare_linked_requirements_more( - _self: pip._internal.resolution.resolvelib.resolver.Resolver, - reqs: Iterable[InstallRequirement], - parallel_builds: bool = False, - ) -> None: - result_requirements.extend( - reqs - ) - raise NotImplementedError + def prepare_linked_requirements_more( + _self: pip._internal.resolution.resolvelib.resolver.Resolver, + reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + result_requirements.extend(reqs) + raise NotImplementedError - _complete_partial_requirements_def = pip._internal.operations.prepare.RequirementPreparer._complete_partial_requirements + _complete_partial_requirements_def = pip._internal.operations.prepare.RequirementPreparer._complete_partial_requirements - def _complete_partial_requirements( - _self: pip._internal.resolution.resolvelib.resolver.Resolver, - partially_downloaded_reqs: Iterable[InstallRequirement], - parallel_builds: bool = False, - ) -> None: - result_requirements.extend( - partially_downloaded_reqs - ) - raise NotImplementedError + def _complete_partial_requirements( + _self: pip._internal.resolution.resolvelib.resolver.Resolver, + partially_downloaded_reqs: Iterable[InstallRequirement], + parallel_builds: bool = False, + ) -> None: + result_requirements.extend(partially_downloaded_reqs) + raise NotImplementedError - patches : list[Any] = [] + patches: list[Any] = [] - patches.append( - unittest.mock.patch.object( - pip._internal.network.download.Downloader, "__call__", downloader_call - ) - ) - # patches.append( - # unittest.mock.patch.object( - # pip._internal.network.download.BatchDownloader, - # '__call__', - # batch_downloader_call - # ) - # ) - # patches.append( - # unittest.mock.patch.object( - # pip._internal.resolution.base.BaseResolver, 'resolve', base_resolver_resolve)) + patches.append(unittest.mock.patch.object(pip._internal.network.download.Downloader, '__call__', downloader_call)) + # patches.append( + # unittest.mock.patch.object( + # pip._internal.network.download.BatchDownloader, + # '__call__', + # batch_downloader_call + # ) + # ) + # patches.append( + # unittest.mock.patch.object( + # pip._internal.resolution.base.BaseResolver, 'resolve', base_resolver_resolve)) - patches.append( - unittest.mock.patch.object( - pip._internal.resolution.resolvelib.resolver.Resolver, - "resolve", - base_resolver_resolve, - ) - ) - patches.append( - unittest.mock.patch.object( - pip._internal.operations.prepare, - "get_http_url", - get_http_url, - ) - ) - patches.append( - unittest.mock.patch.object( - pip._internal.operations.prepare.RequirementPreparer, - "prepare_linked_requirements_more", - prepare_linked_requirements_more, - ) - ) - # patches.append( - # unittest.mock.patch.object( - # pip._internal.operations.prepare.RequirementPreparer, - # '_complete_partial_requirements', - # _complete_partial_requirements - # ) - # ) + patches.append( + unittest.mock.patch.object( + pip._internal.resolution.resolvelib.resolver.Resolver, + 'resolve', + base_resolver_resolve, + ) + ) + patches.append( + unittest.mock.patch.object( + pip._internal.operations.prepare, + 'get_http_url', + get_http_url, + ) + ) + patches.append( + unittest.mock.patch.object( + pip._internal.operations.prepare.RequirementPreparer, + 'prepare_linked_requirements_more', + prepare_linked_requirements_more, + ) + ) + # patches.append( + # unittest.mock.patch.object( + # pip._internal.operations.prepare.RequirementPreparer, + # '_complete_partial_requirements', + # _complete_partial_requirements + # ) + # ) - with contextlib.ExitStack() as stack: - for p in patches: - stack.enter_context(p) + with contextlib.ExitStack() as stack: + for p in patches: + stack.enter_context(p) - pip._internal.cli.main.main( - [ - "download", - "-q", - "--no-cache", - "-d", - "/dev/null", - *argv, - # 'numpy', - ] - ) + pip._internal.cli.main.main( + [ + 'download', + '-q', + '--no-cache', + '-d', + '/dev/null', + *argv, + # 'numpy', + ] + ) - # return sum([ - # [ - # pip_resolve_t.res_t.download_info_t( - # constraint=k, - # sha256=v.download_info.info.hashes['sha256'], - # url=v.download_info.url, - # ) - # for k, v in o.requirements.items() - # ] - # for o in result_requirements - # ], []) - logger.warn(result_requirements) + # return sum([ + # [ + # pip_resolve_t.res_t.download_info_t( + # constraint=k, + # sha256=v.download_info.info.hashes['sha256'], + # url=v.download_info.url, + # ) + # for k, v in o.requirements.items() + # ] + # for o in result_requirements + # ], []) + logger.warn(result_requirements) - res = pip_resolve_t.res_t() + res = pip_resolve_t.res_t() - res.entries = [] + res.entries = [] - for o in result_requirements: - assert isinstance(o, InstallRequirement) + for o in result_requirements: + assert isinstance(o, InstallRequirement) - sha256_hashes = o.hashes()._allowed["sha256"] - assert len(sha256_hashes) == 1 - assert not o.link is None + sha256_hashes = o.hashes()._allowed['sha256'] + assert len(sha256_hashes) == 1 + assert not o.link is None - res.entries.append( - pip_resolve_t.res_t.download_info_t( - constraint=str(o.req), - sha256=sha256_hashes[0], - url=o.link.url, - ) - ) + res.entries.append( + pip_resolve_t.res_t.download_info_t( + constraint=str(o.req), + sha256=sha256_hashes[0], + url=o.link.url, + ) + ) - res.txt = pip_resolve_entries_to_txt( - res.entries - ) + res.txt = pip_resolve_entries_to_txt(res.entries) - return res - elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_freeze: - assert len(argv) == 0 + return res + elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_freeze: + assert len(argv) == 0 - pip_freeze = subprocess.check_output( - [ - sys.executable, - "-m", - "uv", - "pip", - "freeze", - ], - ).decode('utf-8') - pip_compile = subprocess.check_output( - [ - sys.executable, '-m', - 'uv', 'pip', 'compile', - '--generate-hashes', - '-', + pip_freeze = subprocess.check_output( + [ + sys.executable, + '-m', + 'uv', + 'pip', + 'freeze', + ], + ).decode('utf-8') + pip_compile = subprocess.check_output( + [ + sys.executable, + '-m', + 'uv', + 'pip', + 'compile', + '--generate-hashes', + '-', + ], + input=pip_freeze.encode('utf-8'), + ).decode('utf-8') - ], - input=pip_freeze.encode('utf-8') - ).decode('utf-8') + return pip_resolve_t.res_t( + txt=pip_compile, + ) + elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_compile: + with contextlib.ExitStack() as stack: + if not requirements is None: + # assert len(argv) == 0 - return pip_resolve_t.res_t( - txt=pip_compile, - ) - elif mode is pip_resolve_t.kwargs_t.mode_t.uv_pip_compile: - with contextlib.ExitStack() as stack: - if not requirements is None: - # assert len(argv) == 0 + f = stack.enter_context( + tempfile.NamedTemporaryFile( + suffix='.txt', + ) + ) + f.write(('\n'.join(requirements)).encode('utf-8')) + f.flush() - f = stack.enter_context( - tempfile.NamedTemporaryFile( - suffix='.txt', - ) - ) - f.write( - ('\n'.join(requirements)).encode('utf-8') - ) - f.flush() + argv.append(f.name) - argv.append(f.name) + if argv[0] == '--': + del argv[0] - if argv[0] == '--': - del argv[0] + pip_compile = subprocess.check_output( + [ + sys.executable, + '-m', + 'uv', + 'pip', + 'compile', + '--generate-hashes', + *argv, + ], + ).decode('utf-8') - pip_compile = subprocess.check_output( - [ - sys.executable, '-m', - 'uv', 'pip', 'compile', - '--generate-hashes', - *argv, - ], - ).decode('utf-8') - - return pip_resolve_t.res_t( - txt=pip_compile, - ) - else: - raise NotImplementedError + return pip_resolve_t.res_t( + txt=pip_compile, + ) + else: + raise NotImplementedError diff --git a/python/online/fxreader/pr34/commands_typed/typing.py b/python/online/fxreader/pr34/commands_typed/typing.py index 517898c..66011df 100644 --- a/python/online/fxreader/pr34/commands_typed/typing.py +++ b/python/online/fxreader/pr34/commands_typed/typing.py @@ -6,22 +6,23 @@ from typing import Any from typing_extensions import Protocol from abc import abstractmethod -C = typing.TypeVar("C", bound="Comparable") +C = typing.TypeVar('C', bound='Comparable') + class Comparable(Protocol): - @abstractmethod - def __eq__(self, other: Any) -> bool: - pass + @abstractmethod + def __eq__(self, other: Any) -> bool: + pass - @abstractmethod - def __lt__(self: C, other: C) -> bool: - pass + @abstractmethod + def __lt__(self: C, other: C) -> bool: + pass - def __gt__(self: C, other: C) -> bool: - return (not self < other) and self != other + def __gt__(self: C, other: C) -> bool: + return (not self < other) and self != other - def __le__(self: C, other: C) -> bool: - return self < other or self == other + def __le__(self: C, other: C) -> bool: + return self < other or self == other - def __ge__(self: C, other: C) -> bool: - return (not self < other) + def __ge__(self: C, other: C) -> bool: + return not self < other diff --git a/python/online/fxreader/pr34/tasks/ble.py b/python/online/fxreader/pr34/tasks/ble.py index 8a25a8c..48aa22c 100644 --- a/python/online/fxreader/pr34/tasks/ble.py +++ b/python/online/fxreader/pr34/tasks/ble.py @@ -5,121 +5,107 @@ import pprint async def f1(): - devices = await bleak.BleakScanner.discover() - return devices + devices = await bleak.BleakScanner.discover() + return devices + async def f2(device, timeout=None): - if timeout is None: - timeout = 1.0 + if timeout is None: + timeout = 1.0 - assert isinstance(timeout, float) and timeout >= 1e-8 + assert isinstance(timeout, float) and timeout >= 1e-8 + + p = await bleak.BleakClient( + device, + timeout=timeout, + ).__aenter__() + return p - p = await bleak.BleakClient( - device, - timeout=timeout, - ).__aenter__() - return p async def f3(client): - t1 = [ - dict( - service=o.__dict__, - characteristics=[ - o2.__dict__ - for o2 in o.characteristics - ] - ) - for o in client.services - ] - return t1 + t1 = [dict(service=o.__dict__, characteristics=[o2.__dict__ for o2 in o.characteristics]) for o in client.services] + return t1 + async def f5( - name_check=None, + name_check=None, ): - t2 = [] + t2 = [] - attempt = 0 + attempt = 0 - while True: - t1 = await f1() - pprint.pprint([o.__dict__ for o in t1]) + while True: + t1 = await f1() + pprint.pprint([o.__dict__ for o in t1]) - if not name_check is None: - assert inspect.isfunction(name_check) + if not name_check is None: + assert inspect.isfunction(name_check) - t5 = { - i : o.details[0].name() - for i, o in enumerate(t1) - } + t5 = {i: o.details[0].name() for i, o in enumerate(t1)} - t2.extend( - [ - t1[k] - for k, v in t5.items() - if isinstance(v, str) and name_check(v) - ] - ) - else: - t2.extend(t1) + t2.extend([t1[k] for k, v in t5.items() if isinstance(v, str) and name_check(v)]) + else: + t2.extend(t1) - if len(t2) > 0: - break + if len(t2) > 0: + break - attempt += 1 - print('\rattempt #%d' % attempt, end='') + attempt += 1 + print('\rattempt #%d' % attempt, end='') + + return t2 - return t2 async def f4( - timeout=None, - characteristics=None, - operations=None, - name_check=None, + timeout=None, + characteristics=None, + operations=None, + name_check=None, ): - if isinstance(name_check, str): - assert name_check in [ - 'watch fit', - ] - name_check2 = lambda current_name: name_check.lower() in current_name.lower() - else: - name_check2 = name_check + if isinstance(name_check, str): + assert name_check in [ + 'watch fit', + ] + name_check2 = lambda current_name: name_check.lower() in current_name.lower() + else: + name_check2 = name_check - assert not name_check2 is None + assert not name_check2 is None - if characteristics is None: - characteristics = [ - '0000ffd1-0000-1000-8000-00805f9b34fb', - ] + if characteristics is None: + characteristics = [ + '0000ffd1-0000-1000-8000-00805f9b34fb', + ] - t2 = await f5( - name_check=name_check2, - ) + t2 = await f5( + name_check=name_check2, + ) - if len(t2) == 0: - print('not found') - return + if len(t2) == 0: + print('not found') + return - t3 = None - try: - t3 = await f2(t2[0], timeout=timeout) - t4 = await f3(t3) - pprint.pprint(t4) + t3 = None + try: + t3 = await f2(t2[0], timeout=timeout) + t4 = await f3(t3) + pprint.pprint(t4) - if not operations is None and inspect.isfunction(operations): - await operations( - client=t3, - t4=t4, - ) - else: - t6 = {} - for o in characteristics: - try: - t7 = await t3.read_gatt_char(o) - except Exception as exception: - print(traceback.format_exc()) - t7 = None - t6[o] = t7 - pprint.pprint(t6) - finally: - if not t3 is None: - await t3.disconnect() + if not operations is None and inspect.isfunction(operations): + await operations( + client=t3, + t4=t4, + ) + else: + t6 = {} + for o in characteristics: + try: + t7 = await t3.read_gatt_char(o) + except Exception as exception: + print(traceback.format_exc()) + t7 = None + t6[o] = t7 + pprint.pprint(t6) + finally: + if not t3 is None: + await t3.disconnect() diff --git a/python/online/fxreader/pr34/tasks/cython.py b/python/online/fxreader/pr34/tasks/cython.py index 1ad6aa6..a0b4f3f 100644 --- a/python/online/fxreader/pr34/tasks/cython.py +++ b/python/online/fxreader/pr34/tasks/cython.py @@ -10,162 +10,149 @@ import threading import cython import datetime -from typing import (Any, Optional, TypeVar, Type, cast) +from typing import Any, Optional, TypeVar, Type, cast # from scoping import scoping as s -def test( - _id: int, - T: float, - a: numpy.ndarray[Any, numpy.dtype[numpy.int32]], -) -> None: - with cython.nogil: - #if True: - started_at = datetime.datetime.now() - print('started') - def elapsed() -> float: - return (datetime.datetime.now() - started_at).total_seconds() - #a = 0 - while elapsed() < T: - #a += 1 - for k in range(1024 * 1024): - a[_id] += 1 - print(['done', started_at, elapsed(), a[_id]]) +def test( + _id: int, + T: float, + a: numpy.ndarray[Any, numpy.dtype[numpy.int32]], +) -> None: + with cython.nogil: + # if True: + started_at = datetime.datetime.now() + print('started') + + def elapsed() -> float: + return (datetime.datetime.now() - started_at).total_seconds() + + # a = 0 + while elapsed() < T: + # a += 1 + for k in range(1024 * 1024): + a[_id] += 1 + + print(['done', started_at, elapsed(), a[_id]]) + M = TypeVar('M', bound=Type[Any]) + def build(content: str, module: M) -> M: - import pathlib - import tempfile - import hashlib - import Cython.Build.Inline + import pathlib + import tempfile + import hashlib + import Cython.Build.Inline - sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex() + sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex() - output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute() + output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute() + if not output_dir.exists() or True: + os.makedirs(str(output_dir), exist_ok=True) - if not output_dir.exists() or True: - os.makedirs(str(output_dir), exist_ok=True) + source_path = output_dir / ('_%s.pyx' % sha256sum) + if not source_path.exists(): + with io.open(str(source_path), 'w') as f: + f.write(content) - source_path = output_dir / ('_%s.pyx' % sha256sum) - if not source_path.exists(): - with io.open(str(source_path), 'w') as f: - f.write(content) + t1 = Cython.Build.Inline._get_build_extension() + t1.extensions = Cython.Build.cythonize(str(source_path)) + t1.build_temp = str(pathlib.Path('/')) + t1.build_lib = str(output_dir) + # t2 = Cython.Build.Inline.Extension( + # name=sha256sum, + # ) + t1.run() - t1 = Cython.Build.Inline._get_build_extension() - t1.extensions = Cython.Build.cythonize(str(source_path)) - t1.build_temp = str(pathlib.Path('/')) - t1.build_lib = str(output_dir) - #t2 = Cython.Build.Inline.Extension( - # name=sha256sum, - #) - t1.run() + return cast(M, Cython.Build.Inline.load_dynamic('_%s' % sha256sum, glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0])) - return cast( - M, - Cython.Build.Inline.load_dynamic( - '_%s' % sha256sum, - glob.glob( - str(output_dir / ('_%s*.so' % sha256sum)) - )[0] - ) - ) + raise NotImplementedError - raise NotImplementedError def mypyc_build(file_path: pathlib.Path) -> Any: - import pathlib - import tempfile - import hashlib - import mypyc.build - import Cython.Build.Inline + import pathlib + import tempfile + import hashlib + import mypyc.build + import Cython.Build.Inline - assert isinstance(file_path, pathlib.Path) + assert isinstance(file_path, pathlib.Path) - #sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex() + # sha256sum = hashlib.sha256(content.encode('utf-8')).digest().hex() - #output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute() - output_dir = pathlib.Path('.') / 'tmp' / 'mypyc' - sha256sum = file_path.stem - lib_pattern = file_path.parent / ('%s.cpython*.so' % sha256sum) - lib_dir = pathlib.Path('.') + # output_dir = (pathlib.Path('.') / 'tmp' / 'cython' / sha256sum).absolute() + output_dir = pathlib.Path('.') / 'tmp' / 'mypyc' + sha256sum = file_path.stem + lib_pattern = file_path.parent / ('%s.cpython*.so' % sha256sum) + lib_dir = pathlib.Path('.') + def lib_path_glob(path: str | pathlib.Path) -> Optional[pathlib.Path]: + res: list[str] = glob.glob(str(path)) - def lib_path_glob(path: str | pathlib.Path) -> Optional[pathlib.Path]: - res : list[str] = glob.glob(str(path)) + if len(res) == 0: + return None + else: + return pathlib.Path(res[0]) - if len(res) == 0: - return None - else: - return pathlib.Path(res[0]) + need_build: bool = False - need_build : bool = False + lib_path: Optional[pathlib.Path] = None - lib_path : Optional[pathlib.Path] = None + lib_path = lib_path_glob(lib_pattern) - lib_path = lib_path_glob(lib_pattern) + if not lib_path is None: + t2 = file_path.stat() + t3 = lib_path.stat() + if t3.st_mtime < t2.st_mtime: + need_build = True - if not lib_path is None: - t2 = file_path.stat() - t3 = lib_path.stat() - if t3.st_mtime < t2.st_mtime: - need_build = True + del t2 + del t3 + else: + need_build = True - del t2 - del t3 - else: - need_build = True + if need_build: + for o in [ + output_dir, + output_dir / 'build' / file_path.parent, + ]: + os.makedirs(str(o), exist_ok=True) + # source_path = output_dir / ('_%s.py' % sha256sum) + source_path = file_path + # with io.open(str(source_path), 'w') as f: + # f.write(content) + t1 = Cython.Build.Inline._get_build_extension() + t1.extensions = mypyc.build.mypycify([str(source_path)], target_dir=str(output_dir / 'build')) + t1.build_temp = str(output_dir) + t1.build_lib = str(lib_dir) + # t2 = Cython.Build.Inline.Extension( + # name=sha256sum, + # ) + t1.run() - if need_build: - for o in [ - output_dir, - output_dir / 'build' / file_path.parent, - ]: - os.makedirs( - str(o), - exist_ok=True - ) - #source_path = output_dir / ('_%s.py' % sha256sum) - source_path = file_path - #with io.open(str(source_path), 'w') as f: - # f.write(content) + lib_path = lib_path_glob(lib_pattern) - t1 = Cython.Build.Inline._get_build_extension() - t1.extensions = mypyc.build.mypycify( - [str(source_path)], - target_dir=str(output_dir / 'build') - ) - t1.build_temp = str(output_dir) - t1.build_lib = str(lib_dir) - #t2 = Cython.Build.Inline.Extension( - # name=sha256sum, - #) - t1.run() + return Cython.Build.Inline.load_dynamic( + #'_%s' % sha256sum, + # t1.extensions[0].name, + file_path.stem, + str(lib_path), + ) - lib_path = lib_path_glob(lib_pattern) + raise NotImplementedError - return Cython.Build.Inline.load_dynamic( - #'_%s' % sha256sum, - #t1.extensions[0].name, - file_path.stem, - str(lib_path), - ) - - raise NotImplementedError class Source: - @staticmethod - def test2( - _a : numpy.ndarray[Any, numpy.dtype[numpy.int64]], - _id : numpy.dtype[numpy.int32] | int, - T : float=16 - ) -> int: - raise NotImplementedError + @staticmethod + def test2(_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]], _id: numpy.dtype[numpy.int32] | int, T: float = 16) -> int: + raise NotImplementedError -source = build(r''' +source = build( + r""" cimport cython @cython.boundscheck(False) @@ -226,52 +213,52 @@ def test2(long long [:] _a, int _id, double T=16) -> int: return _a[_id] -''', Source) +""", + Source, +) -def test_cython(N: int=4, T:int=16) -> None: - #a = [0] * N - a = numpy.zeros((N,), dtype=numpy.int64) - t = [ - threading.Thread( - target=functools.partial( - source.test2, - a, - k, - T, - ) - ) - for k in range(N) - ] +def test_cython(N: int = 4, T: int = 16) -> None: + # a = [0] * N + a = numpy.zeros((N,), dtype=numpy.int64) - for o in t: - o.start() - for o in t: - o.join() + t = [ + threading.Thread( + target=functools.partial( + source.test2, + a, + k, + T, + ) + ) + for k in range(N) + ] - #cython_module['test2'](a, 0) + for o in t: + o.start() + for o in t: + o.join() -def test_mypyc(N: int=4, W:int=35) -> None: - cython2 = mypyc_build( - (pathlib.Path(__file__).parent / 'cython2.py').relative_to( - pathlib.Path.cwd() - ) - ) + # cython_module['test2'](a, 0) - # from .cython2 import fib - #a = [0] * N - t = [ - threading.Thread( - target=functools.partial( - cython2.fib, - W, - ) - ) - for k in range(N) - ] +def test_mypyc(N: int = 4, W: int = 35) -> None: + cython2 = mypyc_build((pathlib.Path(__file__).parent / 'cython2.py').relative_to(pathlib.Path.cwd())) - for o in t: - o.start() - for o in t: - o.join() + # from .cython2 import fib + + # a = [0] * N + t = [ + threading.Thread( + target=functools.partial( + cython2.fib, + W, + ) + ) + for k in range(N) + ] + + for o in t: + o.start() + for o in t: + o.join() diff --git a/python/online/fxreader/pr34/tasks/cython2.py b/python/online/fxreader/pr34/tasks/cython2.py index 7b2fb48..a3baa86 100644 --- a/python/online/fxreader/pr34/tasks/cython2.py +++ b/python/online/fxreader/pr34/tasks/cython2.py @@ -1,10 +1,12 @@ import time + def fib(n: int) -> int: - if n <= 1: - return n - else: - return fib(n - 2) + fib(n - 1) + if n <= 1: + return n + else: + return fib(n - 2) + fib(n - 1) + t0 = time.time() fib(32) diff --git a/python/online/fxreader/pr34/tasks/jigsaw_toxic.py b/python/online/fxreader/pr34/tasks/jigsaw_toxic.py index 8cfe331..2daac22 100644 --- a/python/online/fxreader/pr34/tasks/jigsaw_toxic.py +++ b/python/online/fxreader/pr34/tasks/jigsaw_toxic.py @@ -5,378 +5,334 @@ import os def kernel_1_sample_scrap( - max_articles=None, + max_articles=None, ): - if max_articles is None: - max_articles = 1 + if max_articles is None: + max_articles = 1 - with requests.get( - 'https://dev.to', - ) as p: - t10 = p.content.decode('utf-8') - t11 = pyquery.PyQuery(t10) - t13 = t11('.crayons-story__title > a') - t12 = [ - pyquery.PyQuery(o).attr('href') - for o in t13 - ] - pprint.pprint(t12) - t14 = [ - 'https://dev.to/%s' % o - for o in t12 - ] + with requests.get( + 'https://dev.to', + ) as p: + t10 = p.content.decode('utf-8') + t11 = pyquery.PyQuery(t10) + t13 = t11('.crayons-story__title > a') + t12 = [pyquery.PyQuery(o).attr('href') for o in t13] + pprint.pprint(t12) + t14 = ['https://dev.to/%s' % o for o in t12] - t8 = [] - for t7 in t14[:max_articles]: - with requests.get( - t7, - ) as p: - t1 = p.content.decode('utf-8') - t2 = pyquery.PyQuery(t1) - t3 = t2('.comment__content') - t6 = [] - for o in t3: - t4 = pyquery.PyQuery(o) - t5 = t4('.comment__header > a').attr['href'] - t9 = t4('.comment__body').text() - t6.append( - dict( - author=t5, - text=t9, - ) - ) + t8 = [] + for t7 in t14[:max_articles]: + with requests.get( + t7, + ) as p: + t1 = p.content.decode('utf-8') + t2 = pyquery.PyQuery(t1) + t3 = t2('.comment__content') + t6 = [] + for o in t3: + t4 = pyquery.PyQuery(o) + t5 = t4('.comment__header > a').attr['href'] + t9 = t4('.comment__body').text() + t6.append( + dict( + author=t5, + text=t9, + ) + ) - #pprint.pprint(t3) - pprint.pprint(t6) - t8.append( - dict( - article=t7, - comments=t6, - ) - ) + # pprint.pprint(t3) + pprint.pprint(t6) + t8.append( + dict( + article=t7, + comments=t6, + ) + ) - pprint.pprint(t8) + pprint.pprint(t8) + + return dict( + t1=t1, + t2=t2, + t3=t3, + t6=t6, + t8=t8, + t12=t12, + ) - return dict( - t1=t1, - t2=t2, - t3=t3, - t6=t6, - t8=t8, - t12=t12, - ) def kernel_2(): - import numpy as np # linear algebra - import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv) - from tqdm import tqdm - from sklearn.model_selection import train_test_split - import tensorflow as tf - from keras.models import Sequential - from keras.layers.recurrent import LSTM, GRU,SimpleRNN - from keras.layers.core import Dense, Activation, Dropout - from keras.layers.embeddings import Embedding - from keras.layers.normalization import BatchNormalization - from keras.utils import np_utils - from sklearn import preprocessing, decomposition, model_selection, metrics, pipeline - from keras.layers import GlobalMaxPooling1D, Conv1D, MaxPooling1D, Flatten, Bidirectional, SpatialDropout1D - from keras.preprocessing import sequence, text - from keras.callbacks import EarlyStopping + import numpy as np # linear algebra + import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv) + from tqdm import tqdm + from sklearn.model_selection import train_test_split + import tensorflow as tf + from keras.models import Sequential + from keras.layers.recurrent import LSTM, GRU, SimpleRNN + from keras.layers.core import Dense, Activation, Dropout + from keras.layers.embeddings import Embedding + from keras.layers.normalization import BatchNormalization + from keras.utils import np_utils + from sklearn import preprocessing, decomposition, model_selection, metrics, pipeline + from keras.layers import GlobalMaxPooling1D, Conv1D, MaxPooling1D, Flatten, Bidirectional, SpatialDropout1D + from keras.preprocessing import sequence, text + from keras.callbacks import EarlyStopping + import matplotlib.pyplot as plt + import seaborn as sns - import matplotlib.pyplot as plt - import seaborn as sns - #%matplotlib inline - from plotly import graph_objs as go - import plotly.express as px - import plotly.figure_factory as ff + # %matplotlib inline + from plotly import graph_objs as go + import plotly.express as px + import plotly.figure_factory as ff - # %% [markdown] - # # Configuring TPU's - # - # For this version of Notebook we will be using TPU's as we have to built a BERT Model + # %% [markdown] + # # Configuring TPU's + # + # For this version of Notebook we will be using TPU's as we have to built a BERT Model - # %% [code] - # Detect hardware, return appropriate distribution strategy - try: - # TPU detection. No parameters necessary if TPU_NAME environment variable is - # set: this is always the case on Kaggle. - tpu = tf.distribute.cluster_resolver.TPUClusterResolver() - print('Running on TPU ', tpu.master()) - except ValueError: - tpu = None + # %% [code] + # Detect hardware, return appropriate distribution strategy + try: + # TPU detection. No parameters necessary if TPU_NAME environment variable is + # set: this is always the case on Kaggle. + tpu = tf.distribute.cluster_resolver.TPUClusterResolver() + print('Running on TPU ', tpu.master()) + except ValueError: + tpu = None - if tpu: - tf.config.experimental_connect_to_cluster(tpu) - tf.tpu.experimental.initialize_tpu_system(tpu) - strategy = tf.distribute.experimental.TPUStrategy(tpu) - else: - # Default distribution strategy in Tensorflow. Works on CPU and single GPU. - strategy = tf.distribute.get_strategy() + if tpu: + tf.config.experimental_connect_to_cluster(tpu) + tf.tpu.experimental.initialize_tpu_system(tpu) + strategy = tf.distribute.experimental.TPUStrategy(tpu) + else: + # Default distribution strategy in Tensorflow. Works on CPU and single GPU. + strategy = tf.distribute.get_strategy() - print("REPLICAS: ", strategy.num_replicas_in_sync) + print('REPLICAS: ', strategy.num_replicas_in_sync) - # %% [code] - train = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv') - validation = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv') - test = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv') + # %% [code] + train = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv') + validation = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv') + test = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv') - # %% [markdown] - # We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models + # %% [markdown] + # We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models - # %% [code] - train.drop(['severe_toxic','obscene','threat','insult','identity_hate'],axis=1,inplace=True) + # %% [code] + train.drop(['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'], axis=1, inplace=True) - # %% [code] - train = train.loc[:12000,:] - train.shape + # %% [code] + train = train.loc[:12000, :] + train.shape - # %% [markdown] - # We will check the maximum number of words that can be present in a comment , this will help us in padding later + # %% [markdown] + # We will check the maximum number of words that can be present in a comment , this will help us in padding later - # %% [code] - train['comment_text'].apply(lambda x:len(str(x).split())).max() + # %% [code] + train['comment_text'].apply(lambda x: len(str(x).split())).max() + # %% [markdown] + # ### Data Preparation - # %% [markdown] - # ### Data Preparation + # %% [code] + xtrain, xvalid, ytrain, yvalid = train_test_split( + train.comment_text.values, train.toxic.values, stratify=train.toxic.values, random_state=42, test_size=0.2, shuffle=True + ) - # %% [code] - xtrain, xvalid, ytrain, yvalid = train_test_split(train.comment_text.values, train.toxic.values, - stratify=train.toxic.values, - random_state=42, - test_size=0.2, shuffle=True) + # %% [markdown] + # # Before We Begin + # + # Before we Begin If you are a complete starter with NLP and never worked with text data, I am attaching a few kernels that will serve as a starting point of your journey + # * https://www.kaggle.com/arthurtok/spooky-nlp-and-topic-modelling-tutorial + # * https://www.kaggle.com/abhishek/approaching-almost-any-nlp-problem-on-kaggle + # + # If you want a more basic dataset to practice with here is another kernel which I wrote: + # * https://www.kaggle.com/tanulsingh077/what-s-cooking + # + # Below are some Resources to get started with basic level Neural Networks, It will help us to easily understand the upcoming parts + # * https://www.youtube.com/watch?v=aircAruvnKk&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv + # * https://www.youtube.com/watch?v=IHZwWFHWa-w&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=2 + # * https://www.youtube.com/watch?v=Ilg3gGewQ5U&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=3 + # * https://www.youtube.com/watch?v=tIeHLnjs5U8&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=4 + # + # For Learning how to visualize test data and what to use view: + # * https://www.kaggle.com/tanulsingh077/twitter-sentiment-extaction-analysis-eda-and-model + # * https://www.kaggle.com/jagangupta/stop-the-s-toxic-comments-eda - # %% [markdown] - # # Before We Begin - # - # Before we Begin If you are a complete starter with NLP and never worked with text data, I am attaching a few kernels that will serve as a starting point of your journey - # * https://www.kaggle.com/arthurtok/spooky-nlp-and-topic-modelling-tutorial - # * https://www.kaggle.com/abhishek/approaching-almost-any-nlp-problem-on-kaggle - # - # If you want a more basic dataset to practice with here is another kernel which I wrote: - # * https://www.kaggle.com/tanulsingh077/what-s-cooking - # - # Below are some Resources to get started with basic level Neural Networks, It will help us to easily understand the upcoming parts - # * https://www.youtube.com/watch?v=aircAruvnKk&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv - # * https://www.youtube.com/watch?v=IHZwWFHWa-w&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=2 - # * https://www.youtube.com/watch?v=Ilg3gGewQ5U&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=3 - # * https://www.youtube.com/watch?v=tIeHLnjs5U8&list=PL_h2yd2CGtBHEKwEH5iqTZH85wLS-eUzv&index=4 - # - # For Learning how to visualize test data and what to use view: - # * https://www.kaggle.com/tanulsingh077/twitter-sentiment-extaction-analysis-eda-and-model - # * https://www.kaggle.com/jagangupta/stop-the-s-toxic-comments-eda + # %% [markdown] + # # Simple RNN + # + # ## Basic Overview + # + # What is a RNN? + # + # Recurrent Neural Network(RNN) are a type of Neural Network where the output from previous step are fed as input to the current step. In traditional neural networks, all the inputs and outputs are independent of each other, but in cases like when it is required to predict the next word of a sentence, the previous words are required and hence there is a need to remember the previous words. Thus RNN came into existence, which solved this issue with the help of a Hidden Layer. + # + # Why RNN's? + # + # https://www.quora.com/Why-do-we-use-an-RNN-instead-of-a-simple-neural-network + # + # ## In-Depth Understanding + # + # * https://medium.com/mindorks/understanding-the-recurrent-neural-network-44d593f112a2 + # * https://www.youtube.com/watch?v=2E65LDnM2cA&list=PL1F3ABbhcqa3BBWo170U4Ev2wfsF7FN8l + # * https://www.d2l.ai/chapter_recurrent-neural-networks/rnn.html + # + # ## Code Implementation + # + # So first I will implement the and then I will explain the code step by step - # %% [markdown] - # # Simple RNN - # - # ## Basic Overview - # - # What is a RNN? - # - # Recurrent Neural Network(RNN) are a type of Neural Network where the output from previous step are fed as input to the current step. In traditional neural networks, all the inputs and outputs are independent of each other, but in cases like when it is required to predict the next word of a sentence, the previous words are required and hence there is a need to remember the previous words. Thus RNN came into existence, which solved this issue with the help of a Hidden Layer. - # - # Why RNN's? - # - # https://www.quora.com/Why-do-we-use-an-RNN-instead-of-a-simple-neural-network - # - # ## In-Depth Understanding - # - # * https://medium.com/mindorks/understanding-the-recurrent-neural-network-44d593f112a2 - # * https://www.youtube.com/watch?v=2E65LDnM2cA&list=PL1F3ABbhcqa3BBWo170U4Ev2wfsF7FN8l - # * https://www.d2l.ai/chapter_recurrent-neural-networks/rnn.html - # - # ## Code Implementation - # - # So first I will implement the and then I will explain the code step by step + # %% [code] + # using keras tokenizer here + token = text.Tokenizer(num_words=None) + max_len = 1500 - # %% [code] - # using keras tokenizer here - token = text.Tokenizer(num_words=None) - max_len = 1500 + token.fit_on_texts(list(xtrain) + list(xvalid)) + xtrain_seq = token.texts_to_sequences(xtrain) + xvalid_seq = token.texts_to_sequences(xvalid) - token.fit_on_texts(list(xtrain) + list(xvalid)) - xtrain_seq = token.texts_to_sequences(xtrain) - xvalid_seq = token.texts_to_sequences(xvalid) + # zero pad the sequences + xtrain_pad = sequence.pad_sequences(xtrain_seq, maxlen=max_len) + xvalid_pad = sequence.pad_sequences(xvalid_seq, maxlen=max_len) - #zero pad the sequences - xtrain_pad = sequence.pad_sequences(xtrain_seq, maxlen=max_len) - xvalid_pad = sequence.pad_sequences(xvalid_seq, maxlen=max_len) + word_index = token.word_index - word_index = token.word_index + # %% [code] + # %%time + with strategy.scope(): + # A simpleRNN without any pretrained embeddings and one dense layer + model = Sequential() + model.add(Embedding(len(word_index) + 1, 300, input_length=max_len)) + model.add(SimpleRNN(100)) + model.add(Dense(1, activation='sigmoid')) + model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) - # %% [code] - #%%time - with strategy.scope(): - # A simpleRNN without any pretrained embeddings and one dense layer - model = Sequential() - model.add(Embedding(len(word_index) + 1, - 300, - input_length=max_len)) - model.add(SimpleRNN(100)) - model.add(Dense(1, activation='sigmoid')) - model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) + model.summary() - model.summary() - - return dict( - model=model, - xtrain_pad=xtrain_pad, - strategy=strategy, - xvalid_pad=xvalid_pad, - xtrain_seq=xtrain_seq, - token=token, - max_len=max_len, - xtrain=xtrain, - xvalid=xvalid, - ytrain=ytrain, - yvalid=yvalid, - ) + return dict( + model=model, + xtrain_pad=xtrain_pad, + strategy=strategy, + xvalid_pad=xvalid_pad, + xtrain_seq=xtrain_seq, + token=token, + max_len=max_len, + xtrain=xtrain, + xvalid=xvalid, + ytrain=ytrain, + yvalid=yvalid, + ) def kernel_3( - o_2, - nb_epochs=None, + o_2, + nb_epochs=None, ): - if nb_epochs is None: - nb_epochs = 5 + if nb_epochs is None: + nb_epochs = 5 - # %% [markdown] - # Writing a function for getting auc score for validation + # %% [markdown] + # Writing a function for getting auc score for validation - # %% [code] - def roc_auc(predictions,target): - import sklearn.metrics - ''' + # %% [code] + def roc_auc(predictions, target): + import sklearn.metrics + + """ This methods returns the AUC Score when given the Predictions and Labels - ''' + """ - fpr, tpr, thresholds = sklearn.metrics.roc_curve(target, predictions) - roc_auc = sklearn.metrics.auc(fpr, tpr) - return roc_auc + fpr, tpr, thresholds = sklearn.metrics.roc_curve(target, predictions) + roc_auc = sklearn.metrics.auc(fpr, tpr) + return roc_auc - # %% [code] - if os.path.exists('model.h5'): - o_2['model'].load_weights('model.h5') - else: - o_2['model'].fit( - o_2['xtrain_pad'], - o_2['ytrain'], - nb_epoch=nb_epochs, - batch_size=64*o_2['strategy'].num_replicas_in_sync - ) #Multiplying by Strategy to run on TPU's - o_2['model'].save_weights('model.h5') + # %% [code] + if os.path.exists('model.h5'): + o_2['model'].load_weights('model.h5') + else: + o_2['model'].fit( + o_2['xtrain_pad'], o_2['ytrain'], nb_epoch=nb_epochs, batch_size=64 * o_2['strategy'].num_replicas_in_sync + ) # Multiplying by Strategy to run on TPU's + o_2['model'].save_weights('model.h5') - # %% [code] - scores = o_2['model'].predict(o_2['xvalid_pad']) - print( - "Auc: %.2f%%" % ( - roc_auc( - scores, - o_2['yvalid'] - ) - ) - ) + # %% [code] + scores = o_2['model'].predict(o_2['xvalid_pad']) + print('Auc: %.2f%%' % (roc_auc(scores, o_2['yvalid']))) - # %% [code] - scores_model = [] - scores_model.append( - { - 'Model': 'SimpleRNN', - 'AUC_Score': roc_auc( - scores, - o_2['yvalid'] - ) - } - ) + # %% [code] + scores_model = [] + scores_model.append({'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])}) - # %% [markdown] - # ## Code Explanantion - # * Tokenization

- # So if you have watched the videos and referred to the links, you would know that in an RNN we input a sentence word by word. We represent every word as one hot vectors of dimensions : Numbers of words in Vocab +1.
- # What keras Tokenizer does is , it takes all the unique words in the corpus,forms a dictionary with words as keys and their number of occurences as values,it then sorts the dictionary in descending order of counts. It then assigns the first value 1 , second value 2 and so on. So let's suppose word 'the' occured the most in the corpus then it will assigned index 1 and vector representing 'the' would be a one-hot vector with value 1 at position 1 and rest zereos.
- # Try printing first 2 elements of xtrain_seq you will see every word is represented as a digit now + # %% [markdown] + # ## Code Explanantion + # * Tokenization

+ # So if you have watched the videos and referred to the links, you would know that in an RNN we input a sentence word by word. We represent every word as one hot vectors of dimensions : Numbers of words in Vocab +1.
+ # What keras Tokenizer does is , it takes all the unique words in the corpus,forms a dictionary with words as keys and their number of occurences as values,it then sorts the dictionary in descending order of counts. It then assigns the first value 1 , second value 2 and so on. So let's suppose word 'the' occured the most in the corpus then it will assigned index 1 and vector representing 'the' would be a one-hot vector with value 1 at position 1 and rest zereos.
+ # Try printing first 2 elements of xtrain_seq you will see every word is represented as a digit now + + # %% [code] + o_2['xtrain_seq'][:1] - # %% [code] - o_2['xtrain_seq'][:1] def kernel_4( - o_2, - input_texts=None, + o_2, + input_texts=None, ): - import keras.preprocessing.sequence + import keras.preprocessing.sequence - if input_texts is None: - input_texts = [ - 'blahb blahb blah', - 'Hello World!', - 'This is very good!', - 'A very non toxic comment! This is so polite and polished one!' - ] + if input_texts is None: + input_texts = ['blahb blahb blah', 'Hello World!', 'This is very good!', 'A very non toxic comment! This is so polite and polished one!'] - t6 = [] - for o in input_texts: - t1 = o - t2 = o_2['token'].texts_to_sequences( - [t1], - ) - t3 = keras.preprocessing.sequence.pad_sequences( - t2, - maxlen=o_2['max_len'] - ) - t4 = o_2['model'].predict( - t3, - ) - t6.append( - dict( - text=o, - score=t4[0][0], - ) - ) - pprint.pprint( - dict( - t1=t1, - t2=t2, - t3=t3, - t4=t4, - ) - ) - pprint.pprint(t6) + t6 = [] + for o in input_texts: + t1 = o + t2 = o_2['token'].texts_to_sequences( + [t1], + ) + t3 = keras.preprocessing.sequence.pad_sequences(t2, maxlen=o_2['max_len']) + t4 = o_2['model'].predict( + t3, + ) + t6.append( + dict( + text=o, + score=t4[0][0], + ) + ) + pprint.pprint( + dict( + t1=t1, + t2=t2, + t3=t3, + t4=t4, + ) + ) + pprint.pprint(t6) + + return dict( + t6=t6, + ) - return dict( - t6=t6, - ) def kernel_5( - o_1=None, - o_2=None, + o_1=None, + o_2=None, ): - if o_1 is None: - o_1 = kernel_1_sample_scrap(max_articles=50) + if o_1 is None: + o_1 = kernel_1_sample_scrap(max_articles=50) - if o_2 is None: - o_2 = kernel_2() - o_3 = kernel_3( - o_2=o_2, - nb_epochs=1 - ) + if o_2 is None: + o_2 = kernel_2() + o_3 = kernel_3(o_2=o_2, nb_epochs=1) - t1 = sum( - [ - [ - o['text'] for o in o2['comments'] - ] for o2 in o_1['t8'] - ], [] - ) + t1 = sum([[o['text'] for o in o2['comments']] for o2 in o_1['t8']], []) - t2 = kernel_4( - o_2=o_2, - input_texts=t1 - ) + t2 = kernel_4(o_2=o_2, input_texts=t1) - t3 = sorted( - t2['t6'], - key=lambda x: x['score'], - ) - pprint.pprint(t3) + t3 = sorted( + t2['t6'], + key=lambda x: x['score'], + ) + pprint.pprint(t3) diff --git a/python/online/fxreader/pr34/tasks/mlb_player.py b/python/online/fxreader/pr34/tasks/mlb_player.py index ff28e08..5a1fd8a 100644 --- a/python/online/fxreader/pr34/tasks/mlb_player.py +++ b/python/online/fxreader/pr34/tasks/mlb_player.py @@ -16,157 +16,119 @@ import pandas import pickle import subprocess + def kernel_1(): - t4 = 'kernel_1-t3.dat' + t4 = 'kernel_1-t3.dat' - def preprocess(t4): - t1 = '/kaggle/input/mlb-player-digital-engagement-forecasting' - t2 = glob.glob( - os.path.join( - t1, - '*.csv' - ) - ) + def preprocess(t4): + t1 = '/kaggle/input/mlb-player-digital-engagement-forecasting' + t2 = glob.glob(os.path.join(t1, '*.csv')) - t3 = { - o : pandas.read_csv(o).to_xarray() - for o in t2 - } + t3 = {o: pandas.read_csv(o).to_xarray() for o in t2} - with io.open(t4, 'wb') as f: - pickle.dump(t3, f) + with io.open(t4, 'wb') as f: + pickle.dump(t3, f) - if not os.path.exists(t4): - preprocess(t4=t4) + if not os.path.exists(t4): + preprocess(t4=t4) - with io.open(t4, 'rb') as f: - t3 = pickle.load(f) + with io.open(t4, 'rb') as f: + t3 = pickle.load(f) + return dict( + t3=t3, + ) - return dict( - t3=t3, - ) def kernel_2( - o_1=None, + o_1=None, ): - t1 = {} + t1 = {} - for k in [ - 'playerTwitterFollowers', - 'teamTwitterFollowers', - 'games', - 'events' - ]: - t4 = '%s.nc' % k - if not os.path.exists(t4): - print('started %s' % t4) - t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv' - t3 = pandas.DataFrame( - sum( - [ - json.loads(o) - for o in o_1['t3'][t2][k].values - if isinstance(o, str) - ], - [] - ) - ).to_xarray() - t3.to_netcdf(t4) - print('cached %s' % t4) + for k in ['playerTwitterFollowers', 'teamTwitterFollowers', 'games', 'events']: + t4 = '%s.nc' % k + if not os.path.exists(t4): + print('started %s' % t4) + t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv' + t3 = pandas.DataFrame(sum([json.loads(o) for o in o_1['t3'][t2][k].values if isinstance(o, str)], [])).to_xarray() + t3.to_netcdf(t4) + print('cached %s' % t4) - if k == 'events': - t5 = '%s-v2.nc' % k - if not os.path.exists(t5): - t2 = xarray.load_dataset(t4) - t3 = t2.sel( - index=numpy.arange( - 2017653 - 10 * 1000, - 2017653 + 1 - ) - ) - t3.to_netcdf(t5) - t1[k] = xarray.load_dataset(t5) - print('loaded %s' % t5) - else: - t1[k] = xarray.load_dataset(t4) - print('loaded %s' % t4) + if k == 'events': + t5 = '%s-v2.nc' % k + if not os.path.exists(t5): + t2 = xarray.load_dataset(t4) + t3 = t2.sel(index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1)) + t3.to_netcdf(t5) + t1[k] = xarray.load_dataset(t5) + print('loaded %s' % t5) + else: + t1[k] = xarray.load_dataset(t4) + print('loaded %s' % t4) + return dict( + t1=t1, + ) - return dict( - t1=t1, - ) def kernel_3(should_exist=None): - if should_exist is None: - should_exist = False + if should_exist is None: + should_exist = False - t3 = [ - ('playerTwitterFollowers', None), - ('teamTwitterFollowers', None), - ('games', None), - ('events', 'events-v2.nc'), - ] + t3 = [ + ('playerTwitterFollowers', None), + ('teamTwitterFollowers', None), + ('games', None), + ('events', 'events-v2.nc'), + ] - o_1 = None - o_2 = None + o_1 = None + o_2 = None - t4 = '/kaggle/input/garbage' - t5 = {} - for k, v in t3: - if v is None: - t1 = os.path.join( - t4, - '%s.nc' % k, - ) - else: - t1 = os.path.join( - t4, - v, - ) + t4 = '/kaggle/input/garbage' + t5 = {} + for k, v in t3: + if v is None: + t1 = os.path.join( + t4, + '%s.nc' % k, + ) + else: + t1 = os.path.join( + t4, + v, + ) - if os.path.exists(t1): - t2 = xarray.load_dataset(t1) - else: - if should_exist: - pprint.pprint([k, v, t1]) - raise NotImplementedError + if os.path.exists(t1): + t2 = xarray.load_dataset(t1) + else: + if should_exist: + pprint.pprint([k, v, t1]) + raise NotImplementedError - if o_1 is None: - o_1 = kernel_1() - if o_2 is None: - o_2 = kernel_2( - o_1=o_1 - ) + if o_1 is None: + o_1 = kernel_1() + if o_2 is None: + o_2 = kernel_2(o_1=o_1) - t2 = o_2['events'] - t5[k] = t2 + t2 = o_2['events'] + t5[k] = t2 + + return dict( + t5=t5, + ) - return dict( - t5=t5, - ) def kernel_4( - o_3=None, + o_3=None, ): - [ - print( - o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4) - ) - for k in range(-10, -1) - ] + [print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)] - [ - print( - o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4) - ) - for k in range(-10, -1) - ] + [print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)] + t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M' - t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M' - - r""" + r""" { "gamePk":634280, "gameType":"R", @@ -203,1034 +165,1059 @@ def kernel_4( } """ - t1 = numpy.where(o_3['t5']['events']['gamePk'] == 634280)[0] - t5 = o_3['t5']['events'].index.data - t6 = t5[t1] - t2 = o_3['t5']['events'].sel(index=t6) - t3 = o_3['t5']['games'].to_dataframe().iloc[-2].to_dict() - pprint.pprint(t3) - assert t3['gamePk'] == 634280 + t1 = numpy.where(o_3['t5']['events']['gamePk'] == 634280)[0] + t5 = o_3['t5']['events'].index.data + t6 = t5[t1] + t2 = o_3['t5']['events'].sel(index=t6) + t3 = o_3['t5']['games'].to_dataframe().iloc[-2].to_dict() + pprint.pprint(t3) + assert t3['gamePk'] == 634280 - t7 = 'https://www.youtube.com/watch?v=T0MUK91ZWys' + t7 = 'https://www.youtube.com/watch?v=T0MUK91ZWys' + + return dict( + t2=t2, + t3=t3, + t4=t4, + t7=t7, + ) - return dict( - t2=t2, - t3=t3, - t4=t4, - t7=t7, - ) def kernel_5(o_4): - for o in [o_4['t4'], o_4['t7']]: - subprocess.check_call( - [ - 'youtube-dl', - '-f', - '18', - o, - ] - ) + for o in [o_4['t4'], o_4['t7']]: + subprocess.check_call( + [ + 'youtube-dl', + '-f', + '18', + o, + ] + ) + def kernel_12(): - import easyocr - t6 = easyocr.Reader(['en']) + import easyocr + + t6 = easyocr.Reader(['en']) + + return dict( + t6=t6, + ) - return dict( - t6=t6, - ) def kernel_6( - o_7=None, - o_10=None, - o_12=None, - max_frames=None, + o_7=None, + o_10=None, + o_12=None, + max_frames=None, ): - if max_frames is None: - max_frames = 10 + if max_frames is None: + max_frames = 10 - import tqdm - import cv2 + import tqdm + import cv2 - t1 = glob.glob('*.mp4') + t1 = glob.glob('*.mp4') - t8 = [] - for o in t1: - t7 = [] - t2 = None - try: - t2 = cv2.VideoCapture(o) - for k in tqdm.tqdm(range(max_frames)): - t3 = t2.read() - assert t3[0] - t4 = t3[1] - if not o_12 is None: - t5 = o_12['t6'].readtext(t4) - else: - t5 = None + t8 = [] + for o in t1: + t7 = [] + t2 = None + try: + t2 = cv2.VideoCapture(o) + for k in tqdm.tqdm(range(max_frames)): + t3 = t2.read() + assert t3[0] + t4 = t3[1] + if not o_12 is None: + t5 = o_12['t6'].readtext(t4) + else: + t5 = None - if not o_7 is None: - t10 = o_7['estimate_pose'](t4) - else: - t10 = None - if not o_10 is None: - t11 = o_10['model'](t4).pandas().xywhn - else: - t11 = None + if not o_7 is None: + t10 = o_7['estimate_pose'](t4) + else: + t10 = None + if not o_10 is None: + t11 = o_10['model'](t4).pandas().xywhn + else: + t11 = None - t7.append( - dict( - frame_id=k, - t5=t5, - t10=t10, - t11=t11, - ), - ) - finally: - if not t2 is None: - t2.release() - t8.append( - dict( - video_path=o, - frames=t7, - ) - ) + t7.append( + dict( + frame_id=k, + t5=t5, + t10=t10, + t11=t11, + ), + ) + finally: + if not t2 is None: + t2.release() + t8.append( + dict( + video_path=o, + frames=t7, + ) + ) - t9 = [] - for o in t1: - cap = None + t9 = [] + for o in t1: + cap = None - try: - cap = cv2.VideoCapture(o) - fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" - frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) - duration = frame_count/fps - finally: - if not cap is None: - cap.release() + try: + cap = cv2.VideoCapture(o) + fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" + frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) + duration = frame_count / fps + finally: + if not cap is None: + cap.release() - t9.append( - dict( - video_path=o, - fps=fps, - frame_count=frame_count, - duration=duration, - ) - ) + t9.append( + dict( + video_path=o, + fps=fps, + frame_count=frame_count, + duration=duration, + ) + ) + return dict( + t8=t8, + t9=t9, + ) - return dict( - t8=t8, - t9=t9, - ) def kernel_7( - use_gpu=None, + use_gpu=None, ): - #!/usr/bin/env python - # coding: utf-8 - - # - # - # NOTE: Turn on Internet and GPU + #!/usr/bin/env python + # coding: utf-8 + + # + # + # NOTE: Turn on Internet and GPU + + # The code hidden below handles all the imports and function definitions (the heavy lifting). If you're a beginner I'd advice you skip this for now. When you are able to understand the rest of the code, come back here and understand each function to get a deeper knowledge. + + # In[1]: + + # !/usr/bin/env python3 + # coding=utf-8 + # author=dave.fang@outlook.com + # create=20171225 + + import os + import pprint + import cv2 + import sys + import math + import time + import tempfile + import numpy as np + import matplotlib.pyplot as plt + + import torch + import torch.nn as nn + import torch.nn.parallel + import torch.backends.cudnn as cudnn + import torch.optim as optim + import torchvision.transforms as transforms + import torchvision.datasets as datasets + import torchvision.models as models + + from torch.autograd import Variable + + from scipy.ndimage.filters import gaussian_filter + + # get_ipython().run_line_magic('matplotlib', 'inline') + # get_ipython().run_line_magic('config', "InlineBackend.figure_format = 'retina'") + + # find connection in the specified sequence, center 29 is in the position 15 + limb_seq = [ + [2, 3], + [2, 6], + [3, 4], + [4, 5], + [6, 7], + [7, 8], + [2, 9], + [9, 10], + [10, 11], + [2, 12], + [12, 13], + [13, 14], + [2, 1], + [1, 15], + [15, 17], + [1, 16], + [16, 18], + [3, 17], + [6, 18], + ] + + # the middle joints heatmap correpondence + map_ids = [ + [31, 32], + [39, 40], + [33, 34], + [35, 36], + [41, 42], + [43, 44], + [19, 20], + [21, 22], + [23, 24], + [25, 26], + [27, 28], + [29, 30], + [47, 48], + [49, 50], + [53, 54], + [51, 52], + [55, 56], + [37, 38], + [45, 46], + ] + + # these are the colours for the 18 body points + colors = [ + [255, 0, 0], + [255, 85, 0], + [255, 170, 0], + [255, 255, 0], + [170, 255, 0], + [85, 255, 0], + [0, 255, 0], + [0, 255, 85], + [0, 255, 170], + [0, 255, 255], + [0, 170, 255], + [0, 85, 255], + [0, 0, 255], + [85, 0, 255], + [170, 0, 255], + [255, 0, 255], + [255, 0, 170], + [255, 0, 85], + ] + + class PoseEstimation(nn.Module): + def __init__(self, model_dict): + super(PoseEstimation, self).__init__() + + self.model0 = model_dict['block_0'] + self.model1_1 = model_dict['block1_1'] + self.model2_1 = model_dict['block2_1'] + self.model3_1 = model_dict['block3_1'] + self.model4_1 = model_dict['block4_1'] + self.model5_1 = model_dict['block5_1'] + self.model6_1 = model_dict['block6_1'] + + self.model1_2 = model_dict['block1_2'] + self.model2_2 = model_dict['block2_2'] + self.model3_2 = model_dict['block3_2'] + self.model4_2 = model_dict['block4_2'] + self.model5_2 = model_dict['block5_2'] + self.model6_2 = model_dict['block6_2'] + + def forward(self, x): + out1 = self.model0(x) + + out1_1 = self.model1_1(out1) + out1_2 = self.model1_2(out1) + out2 = torch.cat([out1_1, out1_2, out1], 1) + + out2_1 = self.model2_1(out2) + out2_2 = self.model2_2(out2) + out3 = torch.cat([out2_1, out2_2, out1], 1) + + out3_1 = self.model3_1(out3) + out3_2 = self.model3_2(out3) + out4 = torch.cat([out3_1, out3_2, out1], 1) + + out4_1 = self.model4_1(out4) + out4_2 = self.model4_2(out4) + out5 = torch.cat([out4_1, out4_2, out1], 1) + + out5_1 = self.model5_1(out5) + out5_2 = self.model5_2(out5) + out6 = torch.cat([out5_1, out5_2, out1], 1) + + out6_1 = self.model6_1(out6) + out6_2 = self.model6_2(out6) + + return out6_1, out6_2 + + def make_layers(layer_dict): + layers = [] + + for i in range(len(layer_dict) - 1): + layer = layer_dict[i] + for k in layer: + v = layer[k] + if 'pool' in k: + layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] + else: + conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + layers += [conv2d, nn.ReLU(inplace=True)] + layer = list(layer_dict[-1].keys()) + k = layer[0] + v = layer_dict[-1][k] + + conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + layers += [conv2d] + + return nn.Sequential(*layers) + + def get_pose_model(): + blocks = {} + + block_0 = [ + {'conv1_1': [3, 64, 3, 1, 1]}, + {'conv1_2': [64, 64, 3, 1, 1]}, + {'pool1_stage1': [2, 2, 0]}, + {'conv2_1': [64, 128, 3, 1, 1]}, + {'conv2_2': [128, 128, 3, 1, 1]}, + {'pool2_stage1': [2, 2, 0]}, + {'conv3_1': [128, 256, 3, 1, 1]}, + {'conv3_2': [256, 256, 3, 1, 1]}, + {'conv3_3': [256, 256, 3, 1, 1]}, + {'conv3_4': [256, 256, 3, 1, 1]}, + {'pool3_stage1': [2, 2, 0]}, + {'conv4_1': [256, 512, 3, 1, 1]}, + {'conv4_2': [512, 512, 3, 1, 1]}, + {'conv4_3_CPM': [512, 256, 3, 1, 1]}, + {'conv4_4_CPM': [256, 128, 3, 1, 1]}, + ] + + blocks['block1_1'] = [ + {'conv5_1_CPM_L1': [128, 128, 3, 1, 1]}, + {'conv5_2_CPM_L1': [128, 128, 3, 1, 1]}, + {'conv5_3_CPM_L1': [128, 128, 3, 1, 1]}, + {'conv5_4_CPM_L1': [128, 512, 1, 1, 0]}, + {'conv5_5_CPM_L1': [512, 38, 1, 1, 0]}, + ] + + blocks['block1_2'] = [ + {'conv5_1_CPM_L2': [128, 128, 3, 1, 1]}, + {'conv5_2_CPM_L2': [128, 128, 3, 1, 1]}, + {'conv5_3_CPM_L2': [128, 128, 3, 1, 1]}, + {'conv5_4_CPM_L2': [128, 512, 1, 1, 0]}, + {'conv5_5_CPM_L2': [512, 19, 1, 1, 0]}, + ] + + for i in range(2, 7): + blocks['block%d_1' % i] = [ + {'Mconv1_stage%d_L1' % i: [185, 128, 7, 1, 3]}, + {'Mconv2_stage%d_L1' % i: [128, 128, 7, 1, 3]}, + {'Mconv3_stage%d_L1' % i: [128, 128, 7, 1, 3]}, + {'Mconv4_stage%d_L1' % i: [128, 128, 7, 1, 3]}, + {'Mconv5_stage%d_L1' % i: [128, 128, 7, 1, 3]}, + {'Mconv6_stage%d_L1' % i: [128, 128, 1, 1, 0]}, + {'Mconv7_stage%d_L1' % i: [128, 38, 1, 1, 0]}, + ] + blocks['block%d_2' % i] = [ + {'Mconv1_stage%d_L2' % i: [185, 128, 7, 1, 3]}, + {'Mconv2_stage%d_L2' % i: [128, 128, 7, 1, 3]}, + {'Mconv3_stage%d_L2' % i: [128, 128, 7, 1, 3]}, + {'Mconv4_stage%d_L2' % i: [128, 128, 7, 1, 3]}, + {'Mconv5_stage%d_L2' % i: [128, 128, 7, 1, 3]}, + {'Mconv6_stage%d_L2' % i: [128, 128, 1, 1, 0]}, + {'Mconv7_stage%d_L2' % i: [128, 19, 1, 1, 0]}, + ] + + layers = [] + for block in block_0: + # print(block) + for key in block: + v = block[key] + if 'pool' in key: + layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] + else: + conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + layers += [conv2d, nn.ReLU(inplace=True)] + + models = {'block_0': nn.Sequential(*layers)} + + for k in blocks: + v = blocks[k] + models[k] = make_layers(v) + + return PoseEstimation(models) + + def get_paf_and_heatmap(model, img_raw, scale_search, param_stride=8, box_size=368): + multiplier = [scale * box_size / img_raw.shape[0] for scale in scale_search] + + heatmap_avg = torch.zeros((len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])).cuda() + paf_avg = torch.zeros((len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])).cuda() + + for i, scale in enumerate(multiplier): + img_test = cv2.resize(img_raw, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC) + img_test_pad, pad = pad_right_down_corner(img_test, param_stride, param_stride) + img_test_pad = np.transpose(np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)) / 256 - 0.5 + + feed = Variable(torch.from_numpy(img_test_pad)).cuda() + output1, output2 = model(feed) + + # print(output1.size()) + # print(output2.size()) + + heatmap = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output2) + + paf = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output1) + + heatmap_avg[i] = heatmap[0].data + paf_avg[i] = paf[0].data + + heatmap_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), 1, 2).cuda() + heatmap_avg = heatmap_avg.cpu().numpy() + + paf_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2).cuda() + paf_avg = paf_avg.cpu().numpy() + + return paf_avg, heatmap_avg + + def extract_heatmap_info(heatmap_avg, param_thre1=0.1): + all_peaks = [] + peak_counter = 0 + + for part in range(18): + map_ori = heatmap_avg[:, :, part] + map_gau = gaussian_filter(map_ori, sigma=3) + + map_left = np.zeros(map_gau.shape) + map_left[1:, :] = map_gau[:-1, :] + map_right = np.zeros(map_gau.shape) + map_right[:-1, :] = map_gau[1:, :] + map_up = np.zeros(map_gau.shape) + map_up[:, 1:] = map_gau[:, :-1] + map_down = np.zeros(map_gau.shape) + map_down[:, :-1] = map_gau[:, 1:] + + peaks_binary = np.logical_and.reduce((map_gau >= map_left, map_gau >= map_right, map_gau >= map_up, map_gau >= map_down, map_gau > param_thre1)) + + peaks = zip(np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]) # note reverse + peaks = list(peaks) + peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks] + ids = range(peak_counter, peak_counter + len(peaks)) + peaks_with_score_and_id = [peaks_with_score[i] + (ids[i],) for i in range(len(ids))] + + all_peaks.append(peaks_with_score_and_id) + peak_counter += len(peaks) + + return all_peaks + + def extract_paf_info(img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5): + connection_all = [] + special_k = [] + mid_num = 10 + + for k in range(len(map_ids)): + score_mid = paf_avg[:, :, [x - 19 for x in map_ids[k]]] + candA = all_peaks[limb_seq[k][0] - 1] + candB = all_peaks[limb_seq[k][1] - 1] + nA = len(candA) + nB = len(candB) + if nA != 0 and nB != 0: + connection_candidate = [] + for i in range(nA): + for j in range(nB): + vec = np.subtract(candB[j][:2], candA[i][:2]) + norm = math.sqrt(vec[0] * vec[0] + vec[1] * vec[1]) + if norm < 1e-8: + raise ZeroDivisionError + vec = np.divide(vec, norm) + + startend = zip(np.linspace(candA[i][0], candB[j][0], num=mid_num), np.linspace(candA[i][1], candB[j][1], num=mid_num)) + startend = list(startend) + + vec_x = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 0] for I in range(len(startend))]) + vec_y = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 1] for I in range(len(startend))]) + + score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(vec_y, vec[1]) + score_with_dist_prior = sum(score_midpts) / len(score_midpts) + score_with_dist_prior += min(0.5 * img_raw.shape[0] / norm - 1, 0) + + criterion1 = len(np.nonzero(score_midpts > param_thre2)[0]) > 0.8 * len(score_midpts) + criterion2 = score_with_dist_prior > 0 + if criterion1 and criterion2: + connection_candidate.append([i, j, score_with_dist_prior, score_with_dist_prior + candA[i][2] + candB[j][2]]) + + connection_candidate = sorted(connection_candidate, key=lambda x: x[2], reverse=True) + connection = np.zeros((0, 5)) + for c in range(len(connection_candidate)): + i, j, s = connection_candidate[c][0:3] + if i not in connection[:, 3] and j not in connection[:, 4]: + connection = np.vstack([connection, [candA[i][3], candB[j][3], s, i, j]]) + if len(connection) >= min(nA, nB): + break + + connection_all.append(connection) + else: + special_k.append(k) + connection_all.append([]) + + return special_k, connection_all + + def get_subsets(connection_all, special_k, all_peaks): + # last number in each row is the total parts number of that person + # the second last number in each row is the score of the overall configuration + subset = -1 * np.ones((0, 20)) + candidate = np.array([item for sublist in all_peaks for item in sublist]) + + for k in range(len(map_ids)): + if k not in special_k: + partAs = connection_all[k][:, 0] + partBs = connection_all[k][:, 1] + indexA, indexB = np.array(limb_seq[k]) - 1 + + for i in range(len(connection_all[k])): # = 1:size(temp,1) + found = 0 + subset_idx = [-1, -1] + for j in range(len(subset)): # 1:size(subset,1): + if subset[j][indexA] == partAs[i] or subset[j][indexB] == partBs[i]: + subset_idx[found] = j + found += 1 + + if found == 1: + j = subset_idx[0] + if subset[j][indexB] != partBs[i]: + subset[j][indexB] = partBs[i] + subset[j][-1] += 1 + subset[j][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] + elif found == 2: # if found 2 and disjoint, merge them + j1, j2 = subset_idx + print('found = 2') + membership = ((subset[j1] >= 0).astype(int) + (subset[j2] >= 0).astype(int))[:-2] + if len(np.nonzero(membership == 2)[0]) == 0: # merge + subset[j1][:-2] += subset[j2][:-2] + 1 + subset[j1][-2:] += subset[j2][-2:] + subset[j1][-2] += connection_all[k][i][2] + subset = np.delete(subset, j2, 0) + else: # as like found == 1 + subset[j1][indexB] = partBs[i] + subset[j1][-1] += 1 + subset[j1][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] + + # if find no partA in the subset, create a new subset + elif not found and k < 17: + row = -1 * np.ones(20) + row[indexA] = partAs[i] + row[indexB] = partBs[i] + row[-1] = 2 + row[-2] = sum(candidate[connection_all[k][i, :2].astype(int), 2]) + connection_all[k][i][2] + subset = np.vstack([subset, row]) + return subset, candidate + + def draw_key_point(subset, all_peaks, img_raw): + del_ids = [] + for i in range(len(subset)): + if subset[i][-1] < 4 or subset[i][-2] / subset[i][-1] < 0.4: + del_ids.append(i) + subset = np.delete(subset, del_ids, axis=0) + + img_canvas = img_raw.copy() # B,G,R order + + for i in range(18): + for j in range(len(all_peaks[i])): + cv2.circle(img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1) + + return subset, img_canvas + + def link_key_point(img_canvas, candidate, subset, stickwidth=4): + for i in range(17): + for n in range(len(subset)): + index = subset[n][np.array(limb_seq[i]) - 1] + if -1 in index: + continue + cur_canvas = img_canvas.copy() + Y = candidate[index.astype(int), 0] + X = candidate[index.astype(int), 1] + mX = np.mean(X) + mY = np.mean(Y) + length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5 + angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1])) + polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1) + cv2.fillConvexPoly(cur_canvas, polygon, colors[i]) + img_canvas = cv2.addWeighted(img_canvas, 0.4, cur_canvas, 0.6, 0) + + return img_canvas + + def pad_right_down_corner(img, stride, pad_value): + h = img.shape[0] + w = img.shape[1] + + pad = 4 * [None] + pad[0] = 0 # up + pad[1] = 0 # left + pad[2] = 0 if (h % stride == 0) else stride - (h % stride) # down + pad[3] = 0 if (w % stride == 0) else stride - (w % stride) # right + + img_padded = img + pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1)) + img_padded = np.concatenate((pad_up, img_padded), axis=0) + pad_left = np.tile(img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1)) + img_padded = np.concatenate((pad_left, img_padded), axis=1) + pad_down = np.tile(img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1)) + img_padded = np.concatenate((img_padded, pad_down), axis=0) + pad_right = np.tile(img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1)) + img_padded = np.concatenate((img_padded, pad_right), axis=1) + + return img_padded, pad + + if __name__ == '__main__': + print(get_pose_model()) + + # First let's download the pre-trained model. + + # In[2]: + + # Using gdown to download the model directly from Google Drive + + # assert os.system(' conda install -y gdown') == 0 + import gdown + + # In[3]: + + model = 'coco_pose_iter_440000.pth.tar' + if not os.path.exists(model): + url = 'https://drive.google.com/u/0/uc?export=download&confirm=f_Ix&id=0B1asvDK18cu_MmY1ZkpaOUhhRHM' + gdown.download(url, model, quiet=False) + + # In[4]: + + state_dict = torch.load(model)['state_dict'] # getting the pre-trained model's parameters + # A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor. + + model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell. + model_pose.load_state_dict(state_dict) # Loading the parameters (weights, biases) into the model. + + model_pose.float() # I'm not sure why this is used. No difference if you remove it. + + if use_gpu is None: + use_gpu = True + + if use_gpu: + model_pose.cuda() + model_pose = torch.nn.DataParallel(model_pose, device_ids=range(torch.cuda.device_count())) + cudnn.benchmark = True + + def estimate_pose( + img_ori, + name=None, + scale_param=None, + display=None, + ): + if display is None: + display = True + + if scale_param is None: + scale_param = [0.5, 1.0, 1.5, 2.0] + + if display: + if name is None: + name = tempfile.mktemp( + dir='/kaggle/working', + suffix='.png', + ) + pprint.pprint( + ['estimate_pose', dict(name=name)], + ) + + # People might be at different scales in the image, perform inference at multiple scales to boost results + + # Predict Heatmaps for approximate joint position + # Use Part Affinity Fields (PAF's) as guidance to link joints to form skeleton + # PAF's are just unit vectors along the limb encoding the direction of the limb + # A dot product of possible joint connection will be high if actual limb else low - # The code hidden below handles all the imports and function definitions (the heavy lifting). If you're a beginner I'd advice you skip this for now. When you are able to understand the rest of the code, come back here and understand each function to get a deeper knowledge. + img_canvas = None + img_points = None - # In[1]: + try: + paf_info, heatmap_info = get_paf_and_heatmap(model_pose, img_ori, scale_param) + peaks = extract_heatmap_info(heatmap_info) + sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks) + subsets, candidates = get_subsets(con_all, sp_k, peaks) - # !/usr/bin/env python3 - # coding=utf-8 - # author=dave.fang@outlook.com - # create=20171225 - - import os - import pprint - import cv2 - import sys - import math - import time - import tempfile - import numpy as np - import matplotlib.pyplot as plt - - import torch - import torch.nn as nn - import torch.nn.parallel - import torch.backends.cudnn as cudnn - import torch.optim as optim - import torchvision.transforms as transforms - import torchvision.datasets as datasets - import torchvision.models as models - - from torch.autograd import Variable - - from scipy.ndimage.filters import gaussian_filter - - #get_ipython().run_line_magic('matplotlib', 'inline') - #get_ipython().run_line_magic('config', "InlineBackend.figure_format = 'retina'") + subsets, img_points = draw_key_point(subsets, peaks, img_ori) + img_canvas = link_key_point(img_points, candidates, subsets) + except ZeroDivisionError: + pprint.pprint('zero de') - # find connection in the specified sequence, center 29 is in the position 15 - limb_seq = [[2, 3], [2, 6], [3, 4], [4, 5], [6, 7], [7, 8], [2, 9], [9, 10], - [10, 11], [2, 12], [12, 13], [13, 14], [2, 1], [1, 15], [15, 17], - [1, 16], [16, 18], [3, 17], [6, 18]] - - # the middle joints heatmap correpondence - map_ids = [[31, 32], [39, 40], [33, 34], [35, 36], [41, 42], [43, 44], [19, 20], [21, 22], - [23, 24], [25, 26], [27, 28], [29, 30], [47, 48], [49, 50], [53, 54], [51, 52], - [55, 56], [37, 38], [45, 46]] - - # these are the colours for the 18 body points - colors = [[255, 0, 0], [255, 85, 0], [255, 170, 0], [255, 255, 0], [170, 255, 0], [85, 255, 0], [0, 255, 0], - [0, 255, 85], [0, 255, 170], [0, 255, 255], [0, 170, 255], [0, 85, 255], [0, 0, 255], [85, 0, 255], - [170, 0, 255], [255, 0, 255], [255, 0, 170], [255, 0, 85]] - - - class PoseEstimation(nn.Module): - def __init__(self, model_dict): - super(PoseEstimation, self).__init__() - - self.model0 = model_dict['block_0'] - self.model1_1 = model_dict['block1_1'] - self.model2_1 = model_dict['block2_1'] - self.model3_1 = model_dict['block3_1'] - self.model4_1 = model_dict['block4_1'] - self.model5_1 = model_dict['block5_1'] - self.model6_1 = model_dict['block6_1'] - - self.model1_2 = model_dict['block1_2'] - self.model2_2 = model_dict['block2_2'] - self.model3_2 = model_dict['block3_2'] - self.model4_2 = model_dict['block4_2'] - self.model5_2 = model_dict['block5_2'] - self.model6_2 = model_dict['block6_2'] - - def forward(self, x): - out1 = self.model0(x) - - out1_1 = self.model1_1(out1) - out1_2 = self.model1_2(out1) - out2 = torch.cat([out1_1, out1_2, out1], 1) - - out2_1 = self.model2_1(out2) - out2_2 = self.model2_2(out2) - out3 = torch.cat([out2_1, out2_2, out1], 1) - - out3_1 = self.model3_1(out3) - out3_2 = self.model3_2(out3) - out4 = torch.cat([out3_1, out3_2, out1], 1) - - out4_1 = self.model4_1(out4) - out4_2 = self.model4_2(out4) - out5 = torch.cat([out4_1, out4_2, out1], 1) - - out5_1 = self.model5_1(out5) - out5_2 = self.model5_2(out5) - out6 = torch.cat([out5_1, out5_2, out1], 1) - - out6_1 = self.model6_1(out6) - out6_2 = self.model6_2(out6) - - return out6_1, out6_2 - - - def make_layers(layer_dict): - layers = [] - - for i in range(len(layer_dict) - 1): - layer = layer_dict[i] - for k in layer: - v = layer[k] - if 'pool' in k: - layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] - else: - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) - layers += [conv2d, nn.ReLU(inplace=True)] - layer = list(layer_dict[-1].keys()) - k = layer[0] - v = layer_dict[-1][k] - - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) - layers += [conv2d] + img_points = img_ori.copy() + img_canvas = img_ori.copy() - return nn.Sequential(*layers) - - - def get_pose_model(): - blocks = {} - - block_0 = [{'conv1_1': [3, 64, 3, 1, 1]}, {'conv1_2': [64, 64, 3, 1, 1]}, {'pool1_stage1': [2, 2, 0]}, - {'conv2_1': [64, 128, 3, 1, 1]}, {'conv2_2': [128, 128, 3, 1, 1]}, {'pool2_stage1': [2, 2, 0]}, - {'conv3_1': [128, 256, 3, 1, 1]}, {'conv3_2': [256, 256, 3, 1, 1]}, {'conv3_3': [256, 256, 3, 1, 1]}, - {'conv3_4': [256, 256, 3, 1, 1]}, {'pool3_stage1': [2, 2, 0]}, {'conv4_1': [256, 512, 3, 1, 1]}, - {'conv4_2': [512, 512, 3, 1, 1]}, {'conv4_3_CPM': [512, 256, 3, 1, 1]}, - {'conv4_4_CPM': [256, 128, 3, 1, 1]}] - - blocks['block1_1'] = [{'conv5_1_CPM_L1': [128, 128, 3, 1, 1]}, {'conv5_2_CPM_L1': [128, 128, 3, 1, 1]}, - {'conv5_3_CPM_L1': [128, 128, 3, 1, 1]}, {'conv5_4_CPM_L1': [128, 512, 1, 1, 0]}, - {'conv5_5_CPM_L1': [512, 38, 1, 1, 0]}] - - blocks['block1_2'] = [{'conv5_1_CPM_L2': [128, 128, 3, 1, 1]}, {'conv5_2_CPM_L2': [128, 128, 3, 1, 1]}, - {'conv5_3_CPM_L2': [128, 128, 3, 1, 1]}, {'conv5_4_CPM_L2': [128, 512, 1, 1, 0]}, - {'conv5_5_CPM_L2': [512, 19, 1, 1, 0]}] - - for i in range(2, 7): - blocks['block%d_1' % i] = [{'Mconv1_stage%d_L1' % i: [185, 128, 7, 1, 3]}, - {'Mconv2_stage%d_L1' % i: [128, 128, 7, 1, 3]}, - {'Mconv3_stage%d_L1' % i: [128, 128, 7, 1, 3]}, - {'Mconv4_stage%d_L1' % i: [128, 128, 7, 1, 3]}, - {'Mconv5_stage%d_L1' % i: [128, 128, 7, 1, 3]}, - {'Mconv6_stage%d_L1' % i: [128, 128, 1, 1, 0]}, - {'Mconv7_stage%d_L1' % i: [128, 38, 1, 1, 0]}] - blocks['block%d_2' % i] = [{'Mconv1_stage%d_L2' % i: [185, 128, 7, 1, 3]}, - {'Mconv2_stage%d_L2' % i: [128, 128, 7, 1, 3]}, - {'Mconv3_stage%d_L2' % i: [128, 128, 7, 1, 3]}, - {'Mconv4_stage%d_L2' % i: [128, 128, 7, 1, 3]}, - {'Mconv5_stage%d_L2' % i: [128, 128, 7, 1, 3]}, - {'Mconv6_stage%d_L2' % i: [128, 128, 1, 1, 0]}, - {'Mconv7_stage%d_L2' % i: [128, 19, 1, 1, 0]}] - - layers = [] - for block in block_0: - # print(block) - for key in block: - v = block[key] - if 'pool' in key: - layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] - else: - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) - layers += [conv2d, nn.ReLU(inplace=True)] - - models = { - 'block_0': nn.Sequential(*layers) - } - - for k in blocks: - v = blocks[k] - models[k] = make_layers(v) - - return PoseEstimation(models) - - - def get_paf_and_heatmap(model, img_raw, scale_search, param_stride=8, box_size=368): - multiplier = [scale * box_size / img_raw.shape[0] for scale in scale_search] - - heatmap_avg = torch.zeros((len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])).cuda() - paf_avg = torch.zeros((len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])).cuda() - - for i, scale in enumerate(multiplier): - img_test = cv2.resize(img_raw, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC) - img_test_pad, pad = pad_right_down_corner(img_test, param_stride, param_stride) - img_test_pad = np.transpose(np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)) / 256 - 0.5 - - feed = Variable(torch.from_numpy(img_test_pad)).cuda() - output1, output2 = model(feed) - - #print(output1.size()) - #print(output2.size()) - - heatmap = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output2) - - paf = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output1) - - heatmap_avg[i] = heatmap[0].data - paf_avg[i] = paf[0].data - - heatmap_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), 1, 2).cuda() - heatmap_avg = heatmap_avg.cpu().numpy() - - paf_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2).cuda() - paf_avg = paf_avg.cpu().numpy() - - return paf_avg, heatmap_avg - - - def extract_heatmap_info(heatmap_avg, param_thre1=0.1): - all_peaks = [] - peak_counter = 0 - - for part in range(18): - map_ori = heatmap_avg[:, :, part] - map_gau = gaussian_filter(map_ori, sigma=3) - - map_left = np.zeros(map_gau.shape) - map_left[1:, :] = map_gau[:-1, :] - map_right = np.zeros(map_gau.shape) - map_right[:-1, :] = map_gau[1:, :] - map_up = np.zeros(map_gau.shape) - map_up[:, 1:] = map_gau[:, :-1] - map_down = np.zeros(map_gau.shape) - map_down[:, :-1] = map_gau[:, 1:] - - peaks_binary = np.logical_and.reduce( - (map_gau >= map_left, map_gau >= map_right, map_gau >= map_up, - map_gau >= map_down, map_gau > param_thre1)) - - peaks = zip(np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]) # note reverse - peaks = list(peaks) - peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks] - ids = range(peak_counter, peak_counter + len(peaks)) - peaks_with_score_and_id = [peaks_with_score[i] + (ids[i],) for i in range(len(ids))] - - all_peaks.append(peaks_with_score_and_id) - peak_counter += len(peaks) - - return all_peaks - - - def extract_paf_info(img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5): - connection_all = [] - special_k = [] - mid_num = 10 - - for k in range(len(map_ids)): - score_mid = paf_avg[:, :, [x - 19 for x in map_ids[k]]] - candA = all_peaks[limb_seq[k][0] - 1] - candB = all_peaks[limb_seq[k][1] - 1] - nA = len(candA) - nB = len(candB) - if nA != 0 and nB != 0: - connection_candidate = [] - for i in range(nA): - for j in range(nB): - vec = np.subtract(candB[j][:2], candA[i][:2]) - norm = math.sqrt(vec[0] * vec[0] + vec[1] * vec[1]) - if norm < 1e-8: - raise ZeroDivisionError - vec = np.divide(vec, norm) - - startend = zip(np.linspace(candA[i][0], candB[j][0], num=mid_num), - np.linspace(candA[i][1], candB[j][1], num=mid_num)) - startend = list(startend) - - vec_x = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 0] - for I in range(len(startend))]) - vec_y = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 1] - for I in range(len(startend))]) - - score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(vec_y, vec[1]) - score_with_dist_prior = sum(score_midpts) / len(score_midpts) - score_with_dist_prior += min(0.5 * img_raw.shape[0] / norm - 1, 0) - - criterion1 = len(np.nonzero(score_midpts > param_thre2)[0]) > 0.8 * len(score_midpts) - criterion2 = score_with_dist_prior > 0 - if criterion1 and criterion2: - connection_candidate.append( - [i, j, score_with_dist_prior, score_with_dist_prior + candA[i][2] + candB[j][2]]) - - connection_candidate = sorted(connection_candidate, key=lambda x: x[2], reverse=True) - connection = np.zeros((0, 5)) - for c in range(len(connection_candidate)): - i, j, s = connection_candidate[c][0:3] - if i not in connection[:, 3] and j not in connection[:, 4]: - connection = np.vstack([connection, [candA[i][3], candB[j][3], s, i, j]]) - if len(connection) >= min(nA, nB): - break - - connection_all.append(connection) - else: - special_k.append(k) - connection_all.append([]) - - return special_k, connection_all - - - def get_subsets(connection_all, special_k, all_peaks): - # last number in each row is the total parts number of that person - # the second last number in each row is the score of the overall configuration - subset = -1 * np.ones((0, 20)) - candidate = np.array([item for sublist in all_peaks for item in sublist]) - - for k in range(len(map_ids)): - if k not in special_k: - partAs = connection_all[k][:, 0] - partBs = connection_all[k][:, 1] - indexA, indexB = np.array(limb_seq[k]) - 1 - - for i in range(len(connection_all[k])): # = 1:size(temp,1) - found = 0 - subset_idx = [-1, -1] - for j in range(len(subset)): # 1:size(subset,1): - if subset[j][indexA] == partAs[i] or subset[j][indexB] == partBs[i]: - subset_idx[found] = j - found += 1 - - if found == 1: - j = subset_idx[0] - if (subset[j][indexB] != partBs[i]): - subset[j][indexB] = partBs[i] - subset[j][-1] += 1 - subset[j][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] - elif found == 2: # if found 2 and disjoint, merge them - j1, j2 = subset_idx - print("found = 2") - membership = ((subset[j1] >= 0).astype(int) + (subset[j2] >= 0).astype(int))[:-2] - if len(np.nonzero(membership == 2)[0]) == 0: # merge - subset[j1][:-2] += (subset[j2][:-2] + 1) - subset[j1][-2:] += subset[j2][-2:] - subset[j1][-2] += connection_all[k][i][2] - subset = np.delete(subset, j2, 0) - else: # as like found == 1 - subset[j1][indexB] = partBs[i] - subset[j1][-1] += 1 - subset[j1][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] - - # if find no partA in the subset, create a new subset - elif not found and k < 17: - row = -1 * np.ones(20) - row[indexA] = partAs[i] - row[indexB] = partBs[i] - row[-1] = 2 - row[-2] = sum(candidate[connection_all[k][i, :2].astype(int), 2]) + connection_all[k][i][2] - subset = np.vstack([subset, row]) - return subset, candidate - - - def draw_key_point(subset, all_peaks, img_raw): - del_ids = [] - for i in range(len(subset)): - if subset[i][-1] < 4 or subset[i][-2] / subset[i][-1] < 0.4: - del_ids.append(i) - subset = np.delete(subset, del_ids, axis=0) - - img_canvas = img_raw.copy() # B,G,R order - - for i in range(18): - for j in range(len(all_peaks[i])): - cv2.circle(img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1) - - return subset, img_canvas - - - def link_key_point(img_canvas, candidate, subset, stickwidth=4): - for i in range(17): - for n in range(len(subset)): - index = subset[n][np.array(limb_seq[i]) - 1] - if -1 in index: - continue - cur_canvas = img_canvas.copy() - Y = candidate[index.astype(int), 0] - X = candidate[index.astype(int), 1] - mX = np.mean(X) - mY = np.mean(Y) - length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5 - angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1])) - polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1) - cv2.fillConvexPoly(cur_canvas, polygon, colors[i]) - img_canvas = cv2.addWeighted(img_canvas, 0.4, cur_canvas, 0.6, 0) - - return img_canvas - - def pad_right_down_corner(img, stride, pad_value): - h = img.shape[0] - w = img.shape[1] - - pad = 4 * [None] - pad[0] = 0 # up - pad[1] = 0 # left - pad[2] = 0 if (h % stride == 0) else stride - (h % stride) # down - pad[3] = 0 if (w % stride == 0) else stride - (w % stride) # right - - img_padded = img - pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1)) - img_padded = np.concatenate((pad_up, img_padded), axis=0) - pad_left = np.tile(img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1)) - img_padded = np.concatenate((pad_left, img_padded), axis=1) - pad_down = np.tile(img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1)) - img_padded = np.concatenate((img_padded, pad_down), axis=0) - pad_right = np.tile(img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1)) - img_padded = np.concatenate((img_padded, pad_right), axis=1) - - return img_padded, pad - - - if __name__ == '__main__': - print(get_pose_model()) - - - # First let's download the pre-trained model. - - # In[2]: - - - # Using gdown to download the model directly from Google Drive + # After predicting Heatmaps and PAF's, proceeed to link joints correctly + if display: + f = plt.figure(figsize=(15, 10)) - #assert os.system(' conda install -y gdown') == 0 - import gdown + plt.subplot(1, 2, 1) + plt.imshow(img_points[..., ::-1]) + plt.subplot(1, 2, 2) + plt.imshow(img_canvas[..., ::-1]) - # In[3]: + f.savefig(name) + return dict( + img_points=img_points, + img_canvas=img_canvas, + ) - model = 'coco_pose_iter_440000.pth.tar' - if not os.path.exists(model): - url = 'https://drive.google.com/u/0/uc?export=download&confirm=f_Ix&id=0B1asvDK18cu_MmY1ZkpaOUhhRHM' - gdown.download( - url, - model, - quiet=False - ) + # In[5]: - - # In[4]: - - - state_dict = torch.load(model)['state_dict'] # getting the pre-trained model's parameters - # A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor. - - model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell. - model_pose.load_state_dict(state_dict) # Loading the parameters (weights, biases) into the model. - - model_pose.float() # I'm not sure why this is used. No difference if you remove it. - - if use_gpu is None: - use_gpu = True - - if use_gpu: - model_pose.cuda() - model_pose = torch.nn.DataParallel(model_pose, device_ids=range(torch.cuda.device_count())) - cudnn.benchmark = True - - def estimate_pose( - img_ori, - name=None, - scale_param=None, - display=None, - ): - if display is None: - display = True - - if scale_param is None: - scale_param = [0.5, 1.0, 1.5, 2.0] - - if display: - if name is None: - name = tempfile.mktemp( - dir='/kaggle/working', - suffix='.png', - ) - pprint.pprint( - ['estimate_pose', dict(name=name)], - ) - - # People might be at different scales in the image, perform inference at multiple scales to boost results - - # Predict Heatmaps for approximate joint position - # Use Part Affinity Fields (PAF's) as guidance to link joints to form skeleton - # PAF's are just unit vectors along the limb encoding the direction of the limb - # A dot product of possible joint connection will be high if actual limb else low - - img_canvas = None - img_points = None - - try: - paf_info, heatmap_info = get_paf_and_heatmap(model_pose, img_ori, scale_param) - peaks = extract_heatmap_info(heatmap_info) - sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks) - - subsets, candidates = get_subsets(con_all, sp_k, peaks) - - subsets, img_points = draw_key_point(subsets, peaks, img_ori) - img_canvas = link_key_point(img_points, candidates, subsets) - except ZeroDivisionError: - pprint.pprint('zero de') - - img_points = img_ori.copy() - img_canvas = img_ori.copy() - - # After predicting Heatmaps and PAF's, proceeed to link joints correctly - if display: - f = plt.figure(figsize=(15, 10)) - - plt.subplot(1, 2, 1) - plt.imshow(img_points[...,::-1]) - - plt.subplot(1, 2, 2) - plt.imshow(img_canvas[...,::-1]) - - f.savefig(name) - - return dict( - img_points=img_points, - img_canvas=img_canvas, - ) - - - # In[5]: - - return dict( - cv2=cv2, - estimate_pose=estimate_pose, - model_pose=model_pose, - ) + return dict( + cv2=cv2, + estimate_pose=estimate_pose, + model_pose=model_pose, + ) def kernel_8( - o_7, + o_7, ): - for i, o in enumerate([ - '../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' % k - for k in range(6) - ]): - arch_image = o - img_ori = o_7['cv2'].imread(arch_image) - o_7['estimate_pose'](img_ori) + for i, o in enumerate(['../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' % k for k in range(6)]): + arch_image = o + img_ori = o_7['cv2'].imread(arch_image) + o_7['estimate_pose'](img_ori) + def kernel_9_benchmark( - o_7, + o_7, ): - import datetime + import datetime + + t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') + t5 = 10 + t2 = datetime.datetime.now() + for k in range(t5): + o_7['estimate_pose']( + img_ori=t1, + scale_param=[1.0], + display=False, + ) + t3 = datetime.datetime.now() + t4 = (t3 - t2).total_seconds() / t5 + pprint.pprint(['kernel_9_benchmark', dict(t4=t4, t5=t5)]) - t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') - t5 = 10 - t2 = datetime.datetime.now() - for k in range(t5): - o_7['estimate_pose']( - img_ori=t1, - scale_param=[1.0], - display=False, - ) - t3 = datetime.datetime.now() - t4 = (t3 - t2).total_seconds() / t5 - pprint.pprint( - ['kernel_9_benchmark', dict(t4=t4, t5=t5)] - ) def kernel_10(): - import torch + import torch - # Model - model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom + # Model + model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom - # Images - img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple + # Images + img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple - # Inference - results = model(img) + # Inference + results = model(img) - # Results - results.print() # or .show(), .save(), .crop(), .pandas(), etc. + # Results + results.print() # or .show(), .save(), .crop(), .pandas(), etc. + + return dict( + model=model, + ) - return dict( - model=model, - ) def kernel_11_benchmark( - o_7, - o_10, + o_7, + o_10, ): - import datetime + import datetime - t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') - t5 = 10 - t2 = datetime.datetime.now() - for k in range(t5): - t6 = o_10['model'](t1) - t7 = t6.pandas().xywhn + t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') + t5 = 10 + t2 = datetime.datetime.now() + for k in range(t5): + t6 = o_10['model'](t1) + t7 = t6.pandas().xywhn + + t3 = datetime.datetime.now() + t4 = (t3 - t2).total_seconds() / t5 + pprint.pprint(['kernel_11_benchmark', dict(t4=t4, t5=t5)]) - t3 = datetime.datetime.now() - t4 = (t3 - t2).total_seconds() / t5 - pprint.pprint( - ['kernel_11_benchmark', dict(t4=t4, t5=t5)] - ) def kernel_13( - o_6=None, + o_6=None, ): - t2 = [ - '/kaggle/working', - '/kaggle/input/garbage' - ] + t2 = ['/kaggle/working', '/kaggle/input/garbage'] - t3 = [ - os.path.join( - o, - 'kernel_13-object-detection.nc', - ) - for o in t2 - ] + t3 = [ + os.path.join( + o, + 'kernel_13-object-detection.nc', + ) + for o in t2 + ] - t4 = [ - o - for o in t3 - if os.path.exists(o) - ] + t4 = [o for o in t3 if os.path.exists(o)] + if not len(t4) > 0 or not o_6 is None: + t1 = pandas.concat( + sum([[o2['t11'][0].assign(frame_id=k, video_path=o['video_path']) for k, o2 in enumerate(o['frames'])] for o in o_6['t8']], []) + ).to_xarray() + t5 = t3[0] + t1.to_netcdf(t5) + del t1 + elif len(t4) > 0: + t5 = t4[0] + else: + raise NotImplementedError - if not len(t4) > 0 or not o_6 is None: - t1 = pandas.concat( - sum( - [ - [ - o2['t11'][0].assign( - frame_id=k, - video_path=o['video_path'] - ) - for k, o2 in enumerate(o['frames']) - ] for o in o_6['t8'] - ], - [] - ) - ).to_xarray() - t5 = t3[0] - t1.to_netcdf(t5) - del t1 - elif len(t4) > 0: - t5 = t4[0] - else: - raise NotImplementedError + t1 = xarray.load_dataset(t5) - t1 = xarray.load_dataset(t5) + return dict( + t1=t1, + ) - return dict( - t1=t1, - ) def kernel_14( - skip_o_6=None, - skip_o_7_10_12_13=None, - run_benchmark=None, + skip_o_6=None, + skip_o_7_10_12_13=None, + run_benchmark=None, ): - if skip_o_6 is None: - skip_o_6 = True + if skip_o_6 is None: + skip_o_6 = True - if skip_o_7_10_12_13 is None: - skip_o_7_10_12_13 = True + if skip_o_7_10_12_13 is None: + skip_o_7_10_12_13 = True - if run_benchmark is None: - run_benchmark = False + if run_benchmark is None: + run_benchmark = False - o_3 = kernel_3(should_exist=True) - o_4 = kernel_4(o_3=o_3) - o_5 = kernel_5(o_4=o_4) + o_3 = kernel_3(should_exist=True) + o_4 = kernel_4(o_3=o_3) + o_5 = kernel_5(o_4=o_4) - if not skip_o_7_10_12_13: - o_7 = kernel_7() + if not skip_o_7_10_12_13: + o_7 = kernel_7() - o_10 = kernel_10() - o_12 = kernel_12() - else: - o_7 = None - o_10 = None - o_12 = None + o_10 = kernel_10() + o_12 = kernel_12() + else: + o_7 = None + o_10 = None + o_12 = None - if not skip_o_6: - o_6 = kernel_6( - o_7=None, - o_10=o_10, - o_12=None, - max_frames=10000 - ) - else: - o_6 = None + if not skip_o_6: + o_6 = kernel_6(o_7=None, o_10=o_10, o_12=None, max_frames=10000) + else: + o_6 = None - if not skip_o_7_10_12_13: - o_13 = kernel_13( - o_6=o_6, - ) - else: - o_13 = None + if not skip_o_7_10_12_13: + o_13 = kernel_13( + o_6=o_6, + ) + else: + o_13 = None - if run_benchmark: - o_11 = kernel_11_benchmark(o_7=o_7, o_10=o_10) - o_9 = kernel_9_benchmark(o_7=o_7) - o_8 = kernel_8(o_7=o_7) + if run_benchmark: + o_11 = kernel_11_benchmark(o_7=o_7, o_10=o_10) + o_9 = kernel_9_benchmark(o_7=o_7) + o_8 = kernel_8(o_7=o_7) + + return dict( + o_3=o_3, + o_13=o_13, + o_7=o_7, + ) - return dict( - o_3=o_3, - o_13=o_13, - o_7=o_7, - ) def kernel_15( - o_14, + o_14, ): - t1 = pandas.DataFrame( - numpy.unique( - o_14['o_13']['t1']['name'].data, - return_counts=True - ) - ).T - pprint.pprint( - dict( - t1=t1, - ) - ) + t1 = pandas.DataFrame(numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)).T + pprint.pprint( + dict( + t1=t1, + ) + ) - t29 = 'output-gif' - if not os.path.exists(t29): - os.makedirs(t29, exist_ok=True) + t29 = 'output-gif' + if not os.path.exists(t29): + os.makedirs(t29, exist_ok=True) - for t2 in [ - 'baseball glove', - 'baseball bat', - 'sports ball', - 'person', - ]: - t28 = t2.replace(' ', '-') - t3 = o_14['o_13']['t1'] - t4 = numpy.where(t3.name.data == t2)[0] + for t2 in [ + 'baseball glove', + 'baseball bat', + 'sports ball', + 'person', + ]: + t28 = t2.replace(' ', '-') + t3 = o_14['o_13']['t1'] + t4 = numpy.where(t3.name.data == t2)[0] - t30 = 'output-png/%s' % t28 - if not os.path.exists(t30): - os.makedirs(t30, exist_ok=True) + t30 = 'output-png/%s' % t28 + if not os.path.exists(t30): + os.makedirs(t30, exist_ok=True) - numpy.random.seed(0) - t22 = numpy.random.choice(t4, 10) - pprint.pprint(t22) - import tqdm - t24 = [] - t27 = [] - for t5 in tqdm.tqdm(t22): - t6 = t3.video_path.data[t5] - t7 = t3.frame_id.data[t5] - t8 = t3.to_dataframe().iloc[t5] - #pprint.pprint([t6, t7]) - #pprint.pprint(t8) + numpy.random.seed(0) + t22 = numpy.random.choice(t4, 10) + pprint.pprint(t22) + import tqdm - import cv2 - import matplotlib.pyplot + t24 = [] + t27 = [] + for t5 in tqdm.tqdm(t22): + t6 = t3.video_path.data[t5] + t7 = t3.frame_id.data[t5] + t8 = t3.to_dataframe().iloc[t5] + # pprint.pprint([t6, t7]) + # pprint.pprint(t8) - t9 = cv2.VideoCapture(t6) - t9.set(cv2.CAP_PROP_POS_FRAMES, t7) - t10 = t9.read() - t9.release() - t11 = t10[1] - t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB) - t13 = t12.copy() - t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height]) - t16 = numpy.array([t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]]) - t17 = t15 * t16 - t18 = t17[:2] - t17[2:] / 2 - t19 = t17[:2] + t17[2:] / 2 - t20 = numpy.array([ - t18[0], t18[1], - t19[0], t19[1], - ]) - t21 = numpy.round(t20).astype(numpy.int32) - t14 = cv2.rectangle( - t13, - tuple(t21[:2]), - tuple(t21[2:]), - (0, 255, 0), - 1, - ) + import cv2 + import matplotlib.pyplot - if False: - t32 = o_14['o_7']['estimate_pose']( - t12, - scale_param=[1.0], - display=False, - )['img_canvas'] - else: - t32 = kernel_16([t12])['t6'][0] + t9 = cv2.VideoCapture(t6) + t9.set(cv2.CAP_PROP_POS_FRAMES, t7) + t10 = t9.read() + t9.release() + t11 = t10[1] + t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB) + t13 = t12.copy() + t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height]) + t16 = numpy.array([t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]]) + t17 = t15 * t16 + t18 = t17[:2] - t17[2:] / 2 + t19 = t17[:2] + t17[2:] / 2 + t20 = numpy.array( + [ + t18[0], + t18[1], + t19[0], + t19[1], + ] + ) + t21 = numpy.round(t20).astype(numpy.int32) + t14 = cv2.rectangle( + t13, + tuple(t21[:2]), + tuple(t21[2:]), + (0, 255, 0), + 1, + ) - f = matplotlib.pyplot.figure(figsize=(8, 12)) - f.suptitle( - 'name %s, frame_id %d\nvideo_path %s' % ( - t8['name'], - t8.frame_id, - t8.video_path, - ) - ) - matplotlib.pyplot.subplot(2, 1, 1) - matplotlib.pyplot.title( - 'score %s' % ( - t8.confidence, - ) - ) - matplotlib.pyplot.imshow(t14) - matplotlib.pyplot.subplot(2, 1, 2) - matplotlib.pyplot.imshow(t32) - t25 = os.path.join( - t30, - 'kernel_15-%s-%05d.jpg' % ( - t28, - t7, - ) - ) - f.savefig(t25) - t24.append(t25) - matplotlib.pyplot.close(f) + if False: + t32 = o_14['o_7']['estimate_pose']( + t12, + scale_param=[1.0], + display=False, + )['img_canvas'] + else: + t32 = kernel_16([t12])['t6'][0] - t27.append([t8, t21]) - pprint.pprint( - pandas.concat([ - o[0] - for o in t27 - ], axis=1).T - ) + f = matplotlib.pyplot.figure(figsize=(8, 12)) + f.suptitle( + 'name %s, frame_id %d\nvideo_path %s' + % ( + t8['name'], + t8.frame_id, + t8.video_path, + ) + ) + matplotlib.pyplot.subplot(2, 1, 1) + matplotlib.pyplot.title('score %s' % (t8.confidence,)) + matplotlib.pyplot.imshow(t14) + matplotlib.pyplot.subplot(2, 1, 2) + matplotlib.pyplot.imshow(t32) + t25 = os.path.join( + t30, + 'kernel_15-%s-%05d.jpg' + % ( + t28, + t7, + ), + ) + f.savefig(t25) + t24.append(t25) + matplotlib.pyplot.close(f) - t23 = os.path.join( - t29, - 'output-%s.gif' % t28 - ) - if os.path.exists(t23): - subprocess.check_call(['rm', t23]) + t27.append([t8, t21]) + pprint.pprint(pandas.concat([o[0] for o in t27], axis=1).T) + + t23 = os.path.join(t29, 'output-%s.gif' % t28) + if os.path.exists(t23): + subprocess.check_call(['rm', t23]) + + subprocess.check_call( + [ + 'convert', + '-delay', + '100', + '-loop', + '0', + *t24, + t23, + ] + ) - subprocess.check_call( - [ - 'convert', - '-delay', - '100', - '-loop', - '0', - *t24, - t23, - ] - ) def kernel_16(images): - assert isinstance(images, list) + assert isinstance(images, list) - import cv2 - import subprocess - import os + import cv2 + import subprocess + import os - t2 = '/kaggle/working/test-input' - subprocess.check_call([ - 'rm', - '-fr', - t2, - ]) + t2 = '/kaggle/working/test-input' + subprocess.check_call( + [ + 'rm', + '-fr', + t2, + ] + ) - subprocess.check_call([ - 'mkdir', - '-p', - t2, - ]) + subprocess.check_call( + [ + 'mkdir', + '-p', + t2, + ] + ) - t1 = [] - for i, o in enumerate(images): - t5 = cv2.cvtColor(o, cv2.COLOR_RGB2BGR) - t8 = 'image-%d.jpg' % i - t3 = os.path.join( - t2, - t8, - ) - cv2.imwrite(t3, t5) - t1.append( - dict( - image_name=t8, - image_path=t3, - image_canvas=o, - image_index=i, - ) - ) + t1 = [] + for i, o in enumerate(images): + t5 = cv2.cvtColor(o, cv2.COLOR_RGB2BGR) + t8 = 'image-%d.jpg' % i + t3 = os.path.join( + t2, + t8, + ) + cv2.imwrite(t3, t5) + t1.append( + dict( + image_name=t8, + image_path=t3, + image_canvas=o, + image_index=i, + ) + ) - t4 = '/kaggle/working/test-output' - subprocess.check_call([ - 'rm', - '-fr', - t4, - ]) + t4 = '/kaggle/working/test-output' + subprocess.check_call( + [ + 'rm', + '-fr', + t4, + ] + ) - subprocess.check_call([ - 'mkdir', - '-p', - t4, - ]) + subprocess.check_call( + [ + 'mkdir', + '-p', + t4, + ] + ) - with subprocess.Popen(''' + with subprocess.Popen( + """ cd /kaggle/working/AlphaPose && python3 \ scripts/demo_inference.py \ @@ -1239,398 +1226,414 @@ def kernel_16(images): --indir %s \ --outdir %s \ --save_img - ''' % (t2, t4), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + """ + % (t2, t4), + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as p: + pprint.pprint(p.communicate()) + p.wait() + assert p.returncode == 0 - pprint.pprint(p.communicate()) - p.wait() - assert p.returncode == 0 + t6 = [] + with io.open(os.path.join(t4, 'alphapose-results.json'), 'r') as f: + t7 = json.load(f) + for o in t1: + t9 = os.path.join(t4, 'vis', o['image_name']) + assert os.path.exists(t9) + t10 = cv2.imread(t9, cv2.IMREAD_COLOR) + t11 = cv2.cvtColor(t10, cv2.COLOR_BGR2RGB) + t6.append(t11) + return dict( + images=images, + t1=t1, + t6=t6, + t7=t7, + ) - t6 = [] - with io.open( - os.path.join( - t4, - 'alphapose-results.json' - ), - 'r' - ) as f: - t7 = json.load(f) - for o in t1: - t9 = os.path.join( - t4, - 'vis', - o['image_name'] - ) - assert os.path.exists(t9) - t10 = cv2.imread(t9, cv2.IMREAD_COLOR) - t11 = cv2.cvtColor(t10, cv2.COLOR_BGR2RGB) - t6.append(t11) - return dict( - images=images, - t1=t1, - t6=t6, - t7=t7, - ) def kernel_17( - o_14, - max_images=None, + o_14, + max_images=None, ): - if max_images is None: - max_images = 10 + if max_images is None: + max_images = 10 - t50 = [] - for t2 in [ - 'baseball glove', - 'baseball bat', - 'sports ball', - 'person', - ]: - t28 = t2.replace(' ', '-') - t3 = o_14['o_13']['t1'] - t4 = numpy.where(t3.name.data == t2)[0] + t50 = [] + for t2 in [ + 'baseball glove', + 'baseball bat', + 'sports ball', + 'person', + ]: + t28 = t2.replace(' ', '-') + t3 = o_14['o_13']['t1'] + t4 = numpy.where(t3.name.data == t2)[0] - t30 = 'output-png/%s' % t28 - if not os.path.exists(t30): - os.makedirs(t30, exist_ok=True) + t30 = 'output-png/%s' % t28 + if not os.path.exists(t30): + os.makedirs(t30, exist_ok=True) - numpy.random.seed(0) - t22 = numpy.random.choice(t4, max_images) - pprint.pprint(t22) - import tqdm - t24 = [] - t27 = [] - for t5 in tqdm.tqdm(t22): - t6 = t3.video_path.data[t5] - t7 = t3.frame_id.data[t5] - t8 = t3.to_dataframe().iloc[t5] - #pprint.pprint([t6, t7]) - #pprint.pprint(t8) + numpy.random.seed(0) + t22 = numpy.random.choice(t4, max_images) + pprint.pprint(t22) + import tqdm - import cv2 - import matplotlib.pyplot + t24 = [] + t27 = [] + for t5 in tqdm.tqdm(t22): + t6 = t3.video_path.data[t5] + t7 = t3.frame_id.data[t5] + t8 = t3.to_dataframe().iloc[t5] + # pprint.pprint([t6, t7]) + # pprint.pprint(t8) - t9 = cv2.VideoCapture(t6) - t9.set(cv2.CAP_PROP_POS_FRAMES, t7) - t10 = t9.read() - t9.release() - t11 = t10[1] - t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB) - t13 = t12.copy() + import cv2 + import matplotlib.pyplot - t50.append( - dict( - t2=t2, - t28=t28, - t6=t6, - t5=t5, - t7=t7, - t12=t12, - ) - ) + t9 = cv2.VideoCapture(t6) + t9.set(cv2.CAP_PROP_POS_FRAMES, t7) + t10 = t9.read() + t9.release() + t11 = t10[1] + t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB) + t13 = t12.copy() + + t50.append( + dict( + t2=t2, + t28=t28, + t6=t6, + t5=t5, + t7=t7, + t12=t12, + ) + ) + + return dict( + t50=t50, + ) - return dict( - t50=t50, - ) def kernel_18(o_17): - t1 = [o['t12'] for o in o_17['t50']] - t2 = kernel_16(t1) + t1 = [o['t12'] for o in o_17['t50']] + t2 = kernel_16(t1) + + return dict( + t2=t2, + ) - return dict( - t2=t2, - ) def kernel_19(o_18): - kernel_25( - o_18['t2']['t6'] - ) + kernel_25(o_18['t2']['t6']) + def kernel_20( - o_18, - o_21=None, + o_18, + o_21=None, ): - if o_21 is None: - o_21 = kernel_21() + if o_21 is None: + o_21 = kernel_21() - import cv2 - import numpy - import os + import cv2 + import numpy + import os + t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1) + t2 = o_18['t2']['t6'][0] + t3 = o_18['t2']['t1'][0]['image_canvas'].copy() + assert o_18['t2']['t7'][0]['image_id'] == os.path.split(o_18['t2']['t1'][0]['image_name'])[1] - t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1) - t2 = o_18['t2']['t6'][0] - t3 = o_18['t2']['t1'][0]['image_canvas'].copy() - assert o_18['t2']['t7'][0]['image_id'] == os.path.split(o_18['t2']['t1'][0]['image_name'])[1] + for i, o2 in enumerate(o_21['p_color']): + if i >= 17: + print('fuck %d' % i) + continue + o1 = t1[i, :] + cv2.circle(t3, tuple(o1[:2].astype(numpy.int32)), 3, o2, -1) + cv2.imwrite('output.jpg', cv2.cvtColor(t3, cv2.COLOR_RGB2BGR)) + cv2.imwrite('output-v2.jpg', cv2.cvtColor(t2, cv2.COLOR_RGB2BGR)) - for i, o2 in enumerate(o_21['p_color']): - if i >= 17: - print('fuck %d' % i) - continue - o1 = t1[i, :] - cv2.circle(t3, tuple(o1[:2].astype(numpy.int32)), 3, o2, -1) - cv2.imwrite('output.jpg', cv2.cvtColor(t3, cv2.COLOR_RGB2BGR)) - cv2.imwrite('output-v2.jpg', cv2.cvtColor(t2, cv2.COLOR_RGB2BGR)) def kernel_21(): - l_pair = [ - (0, 1), (0, 2), (1, 3), (2, 4), # Head - (5, 6), (5, 7), (7, 9), (6, 8), (8, 10), - (17, 11), (17, 12), # Body - (11, 13), (12, 14), (13, 15), (14, 16) - ] - p_color = [ - # Nose, LEye, REye, LEar, REar - (0, 255, 255), (0, 191, 255), (0, 255, 102), (0, 77, 255), (0, 255, 0), - # LShoulder, RShoulder, LElbow, RElbow, LWrist, RWrist - (77, 255, 255), (77, 255, 204), (77, 204, 255), (191, 255, 77), (77, 191, 255), (191, 255, 77), - # LHip, RHip, LKnee, Rknee, LAnkle, RAnkle - (204, 77, 255), (77, 255, 204), (191, 77, 255), (77, 255, 191), (127, 77, 255), (77, 255, 127), - # Not Present - ## Neck - #(0, 255, 255), - ] + l_pair = [ + (0, 1), + (0, 2), + (1, 3), + (2, 4), # Head + (5, 6), + (5, 7), + (7, 9), + (6, 8), + (8, 10), + (17, 11), + (17, 12), # Body + (11, 13), + (12, 14), + (13, 15), + (14, 16), + ] + p_color = [ + # Nose, LEye, REye, LEar, REar + (0, 255, 255), + (0, 191, 255), + (0, 255, 102), + (0, 77, 255), + (0, 255, 0), + # LShoulder, RShoulder, LElbow, RElbow, LWrist, RWrist + (77, 255, 255), + (77, 255, 204), + (77, 204, 255), + (191, 255, 77), + (77, 191, 255), + (191, 255, 77), + # LHip, RHip, LKnee, Rknee, LAnkle, RAnkle + (204, 77, 255), + (77, 255, 204), + (191, 77, 255), + (77, 255, 191), + (127, 77, 255), + (77, 255, 127), + # Not Present + ## Neck + # (0, 255, 255), + ] - line_color = [(0, 215, 255), (0, 255, 204), (0, 134, 255), (0, 255, 50), - (77, 255, 222), (77, 196, 255), (77, 135, 255), (191, 255, 77), (77, 255, 77), - (77, 222, 255), (255, 156, 127), - (0, 127, 255), (255, 127, 77), (0, 77, 255), (255, 77, 36)] - #https://debuggercafe.com/wp-content/uploads/2020/10/keypoint_exmp.jpg - # Seems like No Neck actually - title = [ - 'Nose', - 'LEye', - 'REye', - 'LEar', - 'REar', - 'LShoulder', - 'RShoulder', - 'LElbow', - 'RElbow', - 'LWrist', - 'RWrist', - 'LHip', - 'RHip', - 'LKnee', - 'RKnee', - 'LAnkle', - 'RAnkle', - #'Neck', - ] + line_color = [ + (0, 215, 255), + (0, 255, 204), + (0, 134, 255), + (0, 255, 50), + (77, 255, 222), + (77, 196, 255), + (77, 135, 255), + (191, 255, 77), + (77, 255, 77), + (77, 222, 255), + (255, 156, 127), + (0, 127, 255), + (255, 127, 77), + (0, 77, 255), + (255, 77, 36), + ] + # https://debuggercafe.com/wp-content/uploads/2020/10/keypoint_exmp.jpg + # Seems like No Neck actually + title = [ + 'Nose', + 'LEye', + 'REye', + 'LEar', + 'REar', + 'LShoulder', + 'RShoulder', + 'LElbow', + 'RElbow', + 'LWrist', + 'RWrist', + 'LHip', + 'RHip', + 'LKnee', + 'RKnee', + 'LAnkle', + 'RAnkle', + #'Neck', + ] + + return dict( + l_pair=l_pair, + p_color=p_color, + line_color=line_color, + ) - return dict( - l_pair=l_pair, - p_color=p_color, - line_color=line_color, - ) def kernel_22(o_18): + t1 = o_18['t2']['t7'] + t2 = [numpy.array(o['keypoints']).reshape(17, 3) for o in t1] - t1 = o_18['t2']['t7'] - t2 = [ - numpy.array(o['keypoints']).reshape(17, 3) - for o in t1 - ] + o_31 = kernel_31( + image_id=[o['image_id'] for o in t1], + image_size=numpy.array([[list(o['image_canvas'].shape) for o in o_18['t2']['t1'] if o['image_name'] == t1[i]['image_id']][0] for i in range(len(t2))]), + keypoints=numpy.stack(t2, axis=0), + ) + t12 = o_31['t12'] - o_31 = kernel_31( - image_id=[ - o['image_id'] - for o in t1 - ], - image_size=numpy.array([ - [ - list(o['image_canvas'].shape) - for o in o_18['t2']['t1'] - if o['image_name'] == t1[i]['image_id'] - ][0] - for i in range(len(t2)) - ]), - keypoints=numpy.stack(t2, axis=0), - ) - t12 = o_31['t12'] + return dict( + t4=t12, + ) - return dict( - t4=t12, - ) def kernel_24( - img, - keypoints, - o_21=None, + img, + keypoints, + o_21=None, ): - t3 = img.copy() + t3 = img.copy() - if o_21 is None: - o_21 = kernel_21() + if o_21 is None: + o_21 = kernel_21() - import cv2 - import numpy - import os + import cv2 + import numpy + import os - for i, o2 in enumerate(o_21['p_color']): - if i >= 17: - print('fuck %d' % i) - continue - o1 = keypoints[i, :] - cv2.circle(t3, tuple(o1[:2].astype(numpy.int32)), 3, o2, -1) + for i, o2 in enumerate(o_21['p_color']): + if i >= 17: + print('fuck %d' % i) + continue + o1 = keypoints[i, :] + cv2.circle(t3, tuple(o1[:2].astype(numpy.int32)), 3, o2, -1) + + return dict( + t3=t3, + ) - return dict( - t3=t3, - ) def kernel_25(images, delay=None): - if delay is None: - delay = 100 + if delay is None: + delay = 100 - import tqdm - import os - import cv2 - import subprocess + import tqdm + import os + import cv2 + import subprocess - if len(images) == 0: - pprint.pprint(['kernel_25', 'no images']) - return + if len(images) == 0: + pprint.pprint(['kernel_25', 'no images']) + return - t3 = 'kernel_25-output.dir' - os.makedirs(t3, exist_ok=True) - t6 = [] - for i, o in tqdm.tqdm(enumerate(images)): - t4 = 'image-%03d.jpg' % i - t5 = os.path.join(t3, t4) - t8 = cv2.cvtColor(o, cv2.COLOR_RGB2BGR) - cv2.imwrite(t5, t8) - t6.append(t5) + t3 = 'kernel_25-output.dir' + os.makedirs(t3, exist_ok=True) + t6 = [] + for i, o in tqdm.tqdm(enumerate(images)): + t4 = 'image-%03d.jpg' % i + t5 = os.path.join(t3, t4) + t8 = cv2.cvtColor(o, cv2.COLOR_RGB2BGR) + cv2.imwrite(t5, t8) + t6.append(t5) - t7 = os.path.join(t3, 'output.gif') + t7 = os.path.join(t3, 'output.gif') - if False: - subprocess.check_call( - [ - 'convert', - '-delay', - '%d' % delay, - '-loop', - '0', - *t6, - t7, - ] - ) + if False: + subprocess.check_call( + [ + 'convert', + '-delay', + '%d' % delay, + '-loop', + '0', + *t6, + t7, + ] + ) - t8 = os.path.join( - t3, - 'output.mp4', - ) - if os.path.exists(t8): - os.unlink(t8) + t8 = os.path.join( + t3, + 'output.mp4', + ) + if os.path.exists(t8): + os.unlink(t8) - if False: - subprocess.check_call( - [ - 'ffmpeg', - '-i', - t7, - t8, - ] - ) - t9 = os.path.join(t3, 'input.txt') + if False: + subprocess.check_call( + [ + 'ffmpeg', + '-i', + t7, + t8, + ] + ) + t9 = os.path.join(t3, 'input.txt') + + with io.open(t9, 'w') as f: + f.write('\n'.join(['file %s' % os.path.split(o)[1] for o in t6])) + subprocess.check_call( + [ + 'ffmpeg', + '-r', + '%d' % int(100 / delay), + '-f', + 'concat', + '-i', + '%s' % t9, + t8, + ] + ) - with io.open(t9, 'w') as f: - f.write( - '\n'.join( - [ - 'file %s' % os.path.split(o)[1] - for o in t6 - ] - ) - ) - subprocess.check_call( - [ - 'ffmpeg', - '-r', - '%d' % int(100 / delay), - '-f', - 'concat', - '-i', - '%s' % t9, - t8, - ] - ) def kernel_26(o_18, image_name): - t1 = [ - i - for i, o in enumerate(o_18['t2']['t1']) - if o['image_name'] == image_name - ] - assert len(t1) == 1 - return t1[0] + t1 = [i for i, o in enumerate(o_18['t2']['t1']) if o['image_name'] == image_name] + assert len(t1) == 1 + return t1[0] + def kernel_23(o_18, o_22, ids=None): - import numpy - import tqdm + import numpy + import tqdm - if ids is None: - ids = numpy.s_[:] + if ids is None: + ids = numpy.s_[:] - t1 = numpy.arange(len(o_22['t4'])) - t2 = t1[ids] + t1 = numpy.arange(len(o_22['t4'])) + t2 = t1[ids] - pprint.pprint(t2[:5]) - t7 = [] - for o in tqdm.tqdm(t2): - t3 = o_22['t4'][o] - t9 = kernel_26(o_18=o_18, image_name=t3['image_name']) - t4 = o_18['t2']['t1'][t9]['image_canvas'] - t10 = o_18['t2']['t6'][t9] - t4 = [ - o['image_canvas'] - for o in o_18['t2']['t1'] - if o['image_name'] == t3['image_name'] - ] - assert len(t4) == 1 - t5 = t4[0] - t6 = kernel_24(t5, t3['keypoints']) - t7.append(t6['t3']) - t7.append(t10) + pprint.pprint(t2[:5]) + t7 = [] + for o in tqdm.tqdm(t2): + t3 = o_22['t4'][o] + t9 = kernel_26(o_18=o_18, image_name=t3['image_name']) + t4 = o_18['t2']['t1'][t9]['image_canvas'] + t10 = o_18['t2']['t6'][t9] + t4 = [o['image_canvas'] for o in o_18['t2']['t1'] if o['image_name'] == t3['image_name']] + assert len(t4) == 1 + t5 = t4[0] + t6 = kernel_24(t5, t3['keypoints']) + t7.append(t6['t3']) + t7.append(t10) + + kernel_25(t7) - kernel_25(t7) def kernel_27(): - import tqdm - import os - import subprocess - import pprint + import tqdm + import os + import subprocess + import pprint - t5 = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' - t3 = '/kaggle/working/kernel_27-output.dir' - os.makedirs(t3, exist_ok=True) - #R = int(round(30 / 5)) - FRAMERATE = 4 - SLICE_LENGTH = 5 * 6 - for i in tqdm.tqdm(range(100)): - t1 = [SLICE_LENGTH * i, SLICE_LENGTH * (i + 1)] - t2 = os.path.join(t3, 'slice-%d' % i) - os.makedirs(t2, exist_ok=True) - t4 = os.path.join(t2, 'output.mp4') - if os.path.exists(t4): - os.unlink(t4) - with subprocess.Popen([ - 'ffmpeg', - '-i', - '%s' % t5, - '-filter:v', - 'fps=fps=%d' % FRAMERATE, - '-ss', - '%d' % t1[0], - '-t', - '%d' % (t1[1] - t1[0]), - '%s' % t4, - ], stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: - if False: - pprint.pprint(p.communicate()) - p.wait() - assert p.returncode == 0 + t5 = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + t3 = '/kaggle/working/kernel_27-output.dir' + os.makedirs(t3, exist_ok=True) + # R = int(round(30 / 5)) + FRAMERATE = 4 + SLICE_LENGTH = 5 * 6 + for i in tqdm.tqdm(range(100)): + t1 = [SLICE_LENGTH * i, SLICE_LENGTH * (i + 1)] + t2 = os.path.join(t3, 'slice-%d' % i) + os.makedirs(t2, exist_ok=True) + t4 = os.path.join(t2, 'output.mp4') + if os.path.exists(t4): + os.unlink(t4) + with subprocess.Popen( + [ + 'ffmpeg', + '-i', + '%s' % t5, + '-filter:v', + 'fps=fps=%d' % FRAMERATE, + '-ss', + '%d' % t1[0], + '-t', + '%d' % (t1[1] - t1[0]), + '%s' % t4, + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) as p: + if False: + pprint.pprint(p.communicate()) + p.wait() + assert p.returncode == 0 - t6 = ''' + t6 = """ cd /kaggle/working/AlphaPose && \ python3 \ scripts/demo_inference.py \ @@ -1638,90 +1641,91 @@ def kernel_27(): --checkpoint pretrained_models/fast_res50_256x192.pth \ --video %s \ --outdir %s - ''' % (t4, t2) - if False: - pprint.pprint([t4, t2, t6]) - with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: - if False: - pprint.pprint(p.communicate()) - p.wait() - assert p.returncode == 0 + """ % (t4, t2) + if False: + pprint.pprint([t4, t2, t6]) + with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + if False: + pprint.pprint(p.communicate()) + p.wait() + assert p.returncode == 0 + def kernel_28( - video_path=None, - framerate=None, - max_seconds=None, - video_id=None, + video_path=None, + framerate=None, + max_seconds=None, + video_id=None, ): - if video_id is None: - video_id = '' + if video_id is None: + video_id = '' - import cv2 - import tqdm - import os - import subprocess - import pprint + import cv2 + import tqdm + import os + import subprocess + import pprint - if framerate is None: - framerate = 4 - if max_seconds is None: - max_seconds = 999999 + if framerate is None: + framerate = 4 + if max_seconds is None: + max_seconds = 999999 - if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' - t5 = video_path - t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id - t13 = '/root/kernel_28-output.dir/tmp-slice' - os.makedirs(t3, exist_ok=True) + if video_path is None: + video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + t5 = video_path + t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id + t13 = '/root/kernel_28-output.dir/tmp-slice' + os.makedirs(t3, exist_ok=True) - cap = None + cap = None - try: - cap = cv2.VideoCapture(t5) - fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" - frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) - real_duration = frame_count/fps - duration = min(real_duration, max_seconds) - pprint.pprint( - dict( - fps=fps, - frame_count=frame_count, - duration=duration, - real_duration=real_duration, - ) - ) + try: + cap = cv2.VideoCapture(t5) + fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" + frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) + real_duration = frame_count / fps + duration = min(real_duration, max_seconds) + pprint.pprint( + dict( + fps=fps, + frame_count=frame_count, + duration=duration, + real_duration=real_duration, + ) + ) - #R = int(round(30 / 5)) - FRAMERATE = framerate - SLICE_LENGTH = 5 * 6 - for i in tqdm.tqdm(range(int(duration / SLICE_LENGTH + 1e-8))): - t2 = os.path.join(t3, 'slice-%d' % i) - if os.path.exists(t2): - pprint.pprint(['t2', t2, 'exists', 'continue']) - continue + # R = int(round(30 / 5)) + FRAMERATE = framerate + SLICE_LENGTH = 5 * 6 + for i in tqdm.tqdm(range(int(duration / SLICE_LENGTH + 1e-8))): + t2 = os.path.join(t3, 'slice-%d' % i) + if os.path.exists(t2): + pprint.pprint(['t2', t2, 'exists', 'continue']) + continue - t1 = [SLICE_LENGTH * i, SLICE_LENGTH * (i + 1)] + t1 = [SLICE_LENGTH * i, SLICE_LENGTH * (i + 1)] - if os.path.exists(t13): - subprocess.check_call(['rm', '-fr', t13]) - os.makedirs(t13, exist_ok=True) + if os.path.exists(t13): + subprocess.check_call(['rm', '-fr', t13]) + os.makedirs(t13, exist_ok=True) - t8 = numpy.array([t1[0] * fps, t1[1] * fps]).astype(numpy.int32) - cap.set(cv2.CAP_PROP_POS_FRAMES, t8[0]) - t12 = numpy.arange(t8[0], t8[1], SLICE_LENGTH * FRAMERATE) - for k in range(t8[1] - t8[0]): - ret, frame = cap.read() - t11 = cap.get(cv2.CAP_PROP_POS_FRAMES) - if numpy.isin(t11, t12): - t10 = os.path.join( - t13, - 'frame-%d.jpg' % t11, - ) - cv2.imwrite(t10, frame) + t8 = numpy.array([t1[0] * fps, t1[1] * fps]).astype(numpy.int32) + cap.set(cv2.CAP_PROP_POS_FRAMES, t8[0]) + t12 = numpy.arange(t8[0], t8[1], SLICE_LENGTH * FRAMERATE) + for k in range(t8[1] - t8[0]): + ret, frame = cap.read() + t11 = cap.get(cv2.CAP_PROP_POS_FRAMES) + if numpy.isin(t11, t12): + t10 = os.path.join( + t13, + 'frame-%d.jpg' % t11, + ) + cv2.imwrite(t10, frame) - os.makedirs(t2, exist_ok=True) + os.makedirs(t2, exist_ok=True) - t6 = ''' + t6 = """ cd /kaggle/working/AlphaPose && \ python3 \ scripts/demo_inference.py \ @@ -1729,530 +1733,494 @@ def kernel_28( --checkpoint pretrained_models/fast_res50_256x192.pth \ --indir %s \ --outdir %s - ''' % (t13, t2) - if False: - pprint.pprint([t4, t2, t6]) - with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: - if False: - pprint.pprint(p.communicate()) - p.wait() - assert p.returncode == 0 - finally: - if not cap is None: - cap.release() + """ % (t13, t2) + if False: + pprint.pprint([t4, t2, t6]) + with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + if False: + pprint.pprint(p.communicate()) + p.wait() + assert p.returncode == 0 + finally: + if not cap is None: + cap.release() -def kernel_29(video_path=None, video_id=None,): - if video_id is None: - video_id = '' - if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' +def kernel_29( + video_path=None, + video_id=None, +): + if video_id is None: + video_id = '' - assert os.path.exists(video_path) + if video_path is None: + video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' - t40 = 'kernel_29-poses%s.json' % video_id - t4 = os.path.join('/kaggle/working', t40) - t6 = [ - t4, - os.path.join('/kaggle/input/garbage', t40), - ] + assert os.path.exists(video_path) - t7 = [ - o - for o in t6 - if os.path.exists(o) - ] + t40 = 'kernel_29-poses%s.json' % video_id + t4 = os.path.join('/kaggle/working', t40) + t6 = [ + t4, + os.path.join('/kaggle/input/garbage', t40), + ] - if len(t7) == 0: + t7 = [o for o in t6 if os.path.exists(o)] - t1 = [ - dict( - data=json.load( - io.open( - o, - 'r' - ) - ), - input_path=o - ) - for o in glob.glob( - '/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' % video_id - ) - ] + if len(t7) == 0: + t1 = [dict(data=json.load(io.open(o, 'r')), input_path=o) for o in glob.glob('/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' % video_id)] - assert len(t1) > 0 + assert len(t1) > 0 - with io.open(t4, 'w') as f: - f.write(json.dumps(t1)) + with io.open(t4, 'w') as f: + f.write(json.dumps(t1)) - t7.append(t4) + t7.append(t4) - with io.open(t7[0], 'r') as f: - t1 = json.load(f) + with io.open(t7[0], 'r') as f: + t1 = json.load(f) - t8 = sum([ - o['data'] - for o in t1 - ], []) - t10 = re.compile('frame-(\d+)\.jpg') + t8 = sum([o['data'] for o in t1], []) + t10 = re.compile('frame-(\d+)\.jpg') - for i, o in enumerate(t8): - o['frame_id'] = int(t10.match(o['image_id'])[1]) + for i, o in enumerate(t8): + o['frame_id'] = int(t10.match(o['image_id'])[1]) - t9 = sorted( - t8, - key=lambda o: o['frame_id'] - ) + t9 = sorted(t8, key=lambda o: o['frame_id']) - t2 = pandas.DataFrame(t9) + t2 = pandas.DataFrame(t9) - t5 = t2.to_xarray() - t5['keypoints'] = xarray.DataArray( - [ - numpy.array(o).reshape(17, 3) - for o in t5['keypoints'].data - ], - dims=['index', 'joint', 'feature'], - ) - o_31 = kernel_31( - image_id=t5.image_id.data, - image_size=[ - kernel_32(video_path) - ] * t5.index.shape[0], - keypoints=t5.keypoints.data, - ) - for k, v in o_31['t13'].items(): - #pprint.pprint([k, v]) - t5[k] = v + t5 = t2.to_xarray() + t5['keypoints'] = xarray.DataArray( + [numpy.array(o).reshape(17, 3) for o in t5['keypoints'].data], + dims=['index', 'joint', 'feature'], + ) + o_31 = kernel_31( + image_id=t5.image_id.data, + image_size=[kernel_32(video_path)] * t5.index.shape[0], + keypoints=t5.keypoints.data, + ) + for k, v in o_31['t13'].items(): + # pprint.pprint([k, v]) + t5[k] = v + + return dict( + t5=t5, + t2=t2, + t9=t9, + ) - return dict( - t5=t5, - t2=t2, - t9=t9, - ) def kernel_30( - o_29, - ids=None, - delay=None, - prev_frames=None, - max_frames=None, - max_ids=None, - video_path=None, - low_portion=None, - low_mean_conf=None, - no_dots=None, + o_29, + ids=None, + delay=None, + prev_frames=None, + max_frames=None, + max_ids=None, + video_path=None, + low_portion=None, + low_mean_conf=None, + no_dots=None, ): - if no_dots is None: - no_dots = False + if no_dots is None: + no_dots = False - if low_portion is None: - low_portion = 0.1 - if low_mean_conf is None: - low_mean_conf = 0.6 + if low_portion is None: + low_portion = 0.1 + if low_mean_conf is None: + low_mean_conf = 0.6 - if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + if video_path is None: + video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' - if max_frames is None: - max_frames = 9999 - if max_ids is None: - max_ids = 70 + if max_frames is None: + max_frames = 9999 + if max_ids is None: + max_ids = 70 - if prev_frames is None: - prev_frames = 0 + if prev_frames is None: + prev_frames = 0 - t5 = o_29['t5'] + t5 = o_29['t5'] - if delay is None: - delay = 200 + if delay is None: + delay = 200 - if ids is None: - numpy.random.seed(0) - ids = numpy.random.choice( - t5.index.shape[0], - 10 - ) - elif ids == 'v1': - t8 = numpy.where( - numpy.logical_and( - o_29['t5'].portion > low_portion, - o_29['t5'].mean_conf > low_mean_conf, - ) - )[0] - ids = numpy.random.choice( - t8, - min(len(t8), 70), - replace=False, - ) - elif ids == 'v2': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.1, - o_29['t5'].mean_conf > 0.6, - o_29['t5']['t11'].data, - ], - axis=0 - ).prod(axis=0) - pprint.pprint([t8.sum(), t8.mean()]) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - ) - elif ids == 'v3': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.1, - o_29['t5'].mean_conf > 0.6, - o_29['t5']['t10'].data, - ], - axis=0 - ).prod(axis=0) - pprint.pprint([t8.sum(), t8.mean()]) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - ) - elif ids == 'v4': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.1, - o_29['t5'].min_conf > 0.6, - o_29['t5']['t9'].data > 0, - ], - axis=0 - ).prod(axis=0) - pprint.pprint([t8.sum(), t8.mean()]) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - ) - elif ids == 'v5': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.1, - o_29['t5'].min_conf > 0.5, - o_29['t5']['t8'].data > 0, - ], - axis=0 - ).prod(axis=0) - pprint.pprint([t8.sum(), t8.mean()]) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - ) - elif ids == 'v6': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.02, - o_29['t5'].min_conf > 0.4, - o_29['t5']['t8'].data > 0, - ], - axis=0 - ).prod(axis=0) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - replace=False, - ) - pprint.pprint([ - t8.sum(), - t8.mean(), - ids, - o_29['t5'].sel(index=o_29['t5'].index.data[ids[:5]]).to_dict(), - ]) - elif ids == 'v7': - t8 = numpy.stack( - [ - o_29['t5'].portion > 0.02, - o_29['t5'].min_conf > 0.4, - numpy.stack( - [ - o_29['t5']['t4'].data > 0, - o_29['t5']['t5'].data > 0, - ], - axis=0 - ).sum(axis=0) > 0, - ], - axis=0 - ).prod(axis=0) - ids = numpy.random.choice( - numpy.where(t8)[0], - min(70, t8.sum()), - replace=False, - ) - pprint.pprint([ - t8.sum(), - t8.mean(), - ids, - o_29['t5'].sel(index=o_29['t5'].index.data[ids[:5]]).to_dict(), - ]) - else: - assert isinstance(ids, numpy.ndarray) + if ids is None: + numpy.random.seed(0) + ids = numpy.random.choice(t5.index.shape[0], 10) + elif ids == 'v1': + t8 = numpy.where( + numpy.logical_and( + o_29['t5'].portion > low_portion, + o_29['t5'].mean_conf > low_mean_conf, + ) + )[0] + ids = numpy.random.choice( + t8, + min(len(t8), 70), + replace=False, + ) + elif ids == 'v2': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.1, + o_29['t5'].mean_conf > 0.6, + o_29['t5']['t11'].data, + ], + axis=0, + ).prod(axis=0) + pprint.pprint([t8.sum(), t8.mean()]) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + ) + elif ids == 'v3': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.1, + o_29['t5'].mean_conf > 0.6, + o_29['t5']['t10'].data, + ], + axis=0, + ).prod(axis=0) + pprint.pprint([t8.sum(), t8.mean()]) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + ) + elif ids == 'v4': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.1, + o_29['t5'].min_conf > 0.6, + o_29['t5']['t9'].data > 0, + ], + axis=0, + ).prod(axis=0) + pprint.pprint([t8.sum(), t8.mean()]) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + ) + elif ids == 'v5': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.1, + o_29['t5'].min_conf > 0.5, + o_29['t5']['t8'].data > 0, + ], + axis=0, + ).prod(axis=0) + pprint.pprint([t8.sum(), t8.mean()]) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + ) + elif ids == 'v6': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.02, + o_29['t5'].min_conf > 0.4, + o_29['t5']['t8'].data > 0, + ], + axis=0, + ).prod(axis=0) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + replace=False, + ) + pprint.pprint( + [ + t8.sum(), + t8.mean(), + ids, + o_29['t5'].sel(index=o_29['t5'].index.data[ids[:5]]).to_dict(), + ] + ) + elif ids == 'v7': + t8 = numpy.stack( + [ + o_29['t5'].portion > 0.02, + o_29['t5'].min_conf > 0.4, + numpy.stack( + [ + o_29['t5']['t4'].data > 0, + o_29['t5']['t5'].data > 0, + ], + axis=0, + ).sum(axis=0) + > 0, + ], + axis=0, + ).prod(axis=0) + ids = numpy.random.choice( + numpy.where(t8)[0], + min(70, t8.sum()), + replace=False, + ) + pprint.pprint( + [ + t8.sum(), + t8.mean(), + ids, + o_29['t5'].sel(index=o_29['t5'].index.data[ids[:5]]).to_dict(), + ] + ) + else: + assert isinstance(ids, numpy.ndarray) - ids = ids[:max_ids] - pprint.pprint(['ids', ids]) + ids = ids[:max_ids] + pprint.pprint(['ids', ids]) - t7 = [] + t7 = [] - t16 = video_path - t15 = kernel_32(t16) + t16 = video_path + t15 = kernel_32(t16) - for o in tqdm.tqdm(ids): - t2 = t5.keypoints.data[o] - t3 = t5.frame_id.data[o] + for o in tqdm.tqdm(ids): + t2 = t5.keypoints.data[o] + t3 = t5.frame_id.data[o] - cap = None - try: - cap = cv2.VideoCapture(t16) - if prev_frames > 0: - t14 = kernel_24( - numpy.zeros(t15, dtype=numpy.uint8), - t2, - )['t3'] - t17 = (t14 == 0).astype(numpy.uint8) + cap = None + try: + cap = cv2.VideoCapture(t16) + if prev_frames > 0: + t14 = kernel_24( + numpy.zeros(t15, dtype=numpy.uint8), + t2, + )['t3'] + t17 = (t14 == 0).astype(numpy.uint8) - for k in range(prev_frames): - cap.set(cv2.CAP_PROP_POS_FRAMES, t3 - prev_frames + k) - ret, frame = cap.read() - t12 = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) - #t13 = kernel_24(t12, t2)['t3'] - t13 = t12 * t17 + t14 - t7.append(t13) + for k in range(prev_frames): + cap.set(cv2.CAP_PROP_POS_FRAMES, t3 - prev_frames + k) + ret, frame = cap.read() + t12 = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + # t13 = kernel_24(t12, t2)['t3'] + t13 = t12 * t17 + t14 + t7.append(t13) + + cap.set(cv2.CAP_PROP_POS_FRAMES, t3) + ret, frame = cap.read() + t4 = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) + if not no_dots: + t6 = kernel_24(t4, t2)['t3'] + else: + t6 = t4 + t7.append(t6) + finally: + if not cap is None: + cap.release() + kernel_25(t7[:max_frames], delay=delay) - cap.set(cv2.CAP_PROP_POS_FRAMES, t3) - ret, frame = cap.read() - t4 = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) - if not no_dots: - t6 = kernel_24(t4, t2)['t3'] - else: - t6 = t4 - t7.append(t6) - finally: - if not cap is None: - cap.release() - kernel_25(t7[:max_frames], delay=delay) def kernel_31(image_id, image_size, keypoints): - def get_angle(a,b): - from math import sqrt, acos, degrees, atan, degrees - #print(a) - #print(b) - del_y = a[1]-b[1] - del_x = b[0]-a[0] - if del_x == 0: - del_x = 0.1 - #print("Del_X : "+str(del_x)+"-----Del_Y: "+str(del_y)) - angle = 0 + def get_angle(a, b): + from math import sqrt, acos, degrees, atan, degrees - if del_x > 0 and del_y > 0: - angle = degrees(atan(del_y / del_x)) - elif del_x < 0 and del_y > 0: - angle = degrees(atan(del_y / del_x)) + 180 + # print(a) + # print(b) + del_y = a[1] - b[1] + del_x = b[0] - a[0] + if del_x == 0: + del_x = 0.1 + # print("Del_X : "+str(del_x)+"-----Del_Y: "+str(del_y)) + angle = 0 - return angle + if del_x > 0 and del_y > 0: + angle = degrees(atan(del_y / del_x)) + elif del_x < 0 and del_y > 0: + angle = degrees(atan(del_y / del_x)) + 180 - def angle_gor(a,b,c,d): - import numpy as np - from math import sqrt, acos, degrees, atan, degrees - ab=[a[0]-b[0],a[1]-b[1]] - ab1=[c[0]-d[0],c[1]-d[1]] - cos = \ - abs( - ab[0] * ab1[0] + ab[1] * ab1[1] - ) / ( - sqrt( - ab[0] ** 2 + ab[1] ** 2 - ) * \ - sqrt( - ab1[0] ** 2 + ab1[1] ** 2 - ) + 1e-8 - ) - ang = acos(cos) - return ang*180/np.pi + return angle + def angle_gor(a, b, c, d): + import numpy as np + from math import sqrt, acos, degrees, atan, degrees - def sit_ang(a,b,c,d): - ang=angle_gor(a,b,c,d) - s1=0 - if ang != None: - #print("Angle",ang) - if ang < 120 and ang>40: - s1=1 - return s1 + ab = [a[0] - b[0], a[1] - b[1]] + ab1 = [c[0] - d[0], c[1] - d[1]] + cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2) + 1e-8) + ang = acos(cos) + return ang * 180 / np.pi - def sit_rec(a,b,c,d): - from math import sqrt, acos, degrees, atan, degrees + def sit_ang(a, b, c, d): + ang = angle_gor(a, b, c, d) + s1 = 0 + if ang != None: + # print("Angle",ang) + if ang < 120 and ang > 40: + s1 = 1 + return s1 - ab = [a[0] - b[0], a[1] - b[1]] - ab1 = [c[0] - d[0], c[1] - d[1]] - l1=sqrt(ab[0]**2+ab[1]**2) - l2=sqrt(ab1[0]**2+ab1[1]**2) - s=0 - if l1!=0 and l2!=0: - #print(l1,l2, "---------->>>") - if l2/l1>=1.5: - s=1 - return s + def sit_rec(a, b, c, d): + from math import sqrt, acos, degrees, atan, degrees - t3 = [] - for i, o in enumerate(keypoints): - t4 = numpy.min(o[:, 0]) - t5 = numpy.max(o[:, 0]) - t6 = numpy.min(o[:, 1]) - t7 = numpy.max(o[:, 1]) + ab = [a[0] - b[0], a[1] - b[1]] + ab1 = [c[0] - d[0], c[1] - d[1]] + l1 = sqrt(ab[0] ** 2 + ab[1] ** 2) + l2 = sqrt(ab1[0] ** 2 + ab1[1] ** 2) + s = 0 + if l1 != 0 and l2 != 0: + # print(l1,l2, "---------->>>") + if l2 / l1 >= 1.5: + s = 1 + return s - t8 = (t5 - t4) * (t7 - t6) + t3 = [] + for i, o in enumerate(keypoints): + t4 = numpy.min(o[:, 0]) + t5 = numpy.max(o[:, 0]) + t6 = numpy.min(o[:, 1]) + t7 = numpy.max(o[:, 1]) - t9 = image_size[i] - t10 = t9[0] * t9[1] - t3.append( - dict( - t8=t8, - t9=t9, - t10=t10, - ) - ) - t12 = [ - dict( - square=t3[i]['t8'], - total=t3[i]['t10'], - portion=t3[i]['t8'] / (t3[i]['t10'] + 1e-8), - keypoints=keypoints[i], - image_name=image_id[i], - ) - for i in range(len(t3)) - ] + t8 = (t5 - t4) * (t7 - t6) - for i, o in enumerate(t12): - t1 = o['keypoints'] - t2 = get_angle( - t1[3, :2], - t1[4, :2], - ) - t3 = get_angle( - t1[6, :2], - t1[7, :2], - ) - t4 = 0 - if 30 < t2 and t2 < 150: - t4 = 1 - t5 = 0 - if 30 < t3 and t3 < 150: - t5 = 1 - t6 = t4 + t6 - t7 = 0 - if t6 == 1: - t7 = 1 - t8 = 0 - t8 += sit_rec( - t1[9, :2], - t1[10, :2], - t1[10, :2], - t1[11, :2], - ) - t8 += sit_rec( - t1[12, :2], - t1[13, :2], - t1[13, :2], - t1[14, :2], - ) - t9 = 0 - t9 += sit_ang( - t1[9, :2], - t1[10, :2], - t1[10, :2], - t1[11, :2], - ) - t9 += sit_ang( - t1[12, :2], - t1[13, :2], - t1[13, :2], - t1[14, :2], - ) - t10 = 0 - if t8 > 0 or t9 > 0: - t10 = 1 - t11 = 0 - if t8 == 0 and t9 == 0: - t11 = 1 - o.update( - dict( - t2=t2, - t3=t3, - t4=t4, - t5=t5, - t6=t6, - t7=t7, - t8=t8, - t9=t9, - t10=t10, - t11=t11, - mean_conf=t1[:, 2].mean(), - min_conf=t1[:, 2].min(), - ) - ) + t9 = image_size[i] + t10 = t9[0] * t9[1] + t3.append( + dict( + t8=t8, + t9=t9, + t10=t10, + ) + ) + t12 = [ + dict( + square=t3[i]['t8'], + total=t3[i]['t10'], + portion=t3[i]['t8'] / (t3[i]['t10'] + 1e-8), + keypoints=keypoints[i], + image_name=image_id[i], + ) + for i in range(len(t3)) + ] - t13 = pandas.DataFrame(t12).drop(columns=['keypoints']).to_xarray() + for i, o in enumerate(t12): + t1 = o['keypoints'] + t2 = get_angle( + t1[3, :2], + t1[4, :2], + ) + t3 = get_angle( + t1[6, :2], + t1[7, :2], + ) + t4 = 0 + if 30 < t2 and t2 < 150: + t4 = 1 + t5 = 0 + if 30 < t3 and t3 < 150: + t5 = 1 + t6 = t4 + t6 + t7 = 0 + if t6 == 1: + t7 = 1 + t8 = 0 + t8 += sit_rec( + t1[9, :2], + t1[10, :2], + t1[10, :2], + t1[11, :2], + ) + t8 += sit_rec( + t1[12, :2], + t1[13, :2], + t1[13, :2], + t1[14, :2], + ) + t9 = 0 + t9 += sit_ang( + t1[9, :2], + t1[10, :2], + t1[10, :2], + t1[11, :2], + ) + t9 += sit_ang( + t1[12, :2], + t1[13, :2], + t1[13, :2], + t1[14, :2], + ) + t10 = 0 + if t8 > 0 or t9 > 0: + t10 = 1 + t11 = 0 + if t8 == 0 and t9 == 0: + t11 = 1 + o.update( + dict( + t2=t2, + t3=t3, + t4=t4, + t5=t5, + t6=t6, + t7=t7, + t8=t8, + t9=t9, + t10=t10, + t11=t11, + mean_conf=t1[:, 2].mean(), + min_conf=t1[:, 2].min(), + ) + ) + + t13 = pandas.DataFrame(t12).drop(columns=['keypoints']).to_xarray() + + return dict( + t12=t12, + t13=t13, + ) - return dict( - t12=t12, - t13=t13, - ) def kernel_32(video_path): - assert os.path.exists(video_path) + assert os.path.exists(video_path) - cap = None - try: - cap = cv2.VideoCapture(video_path) - ret, frame = cap.read() - return frame.shape - finally: - if not cap is None: - cap.release() + cap = None + try: + cap = cv2.VideoCapture(video_path) + ret, frame = cap.read() + return frame.shape + finally: + if not cap is None: + cap.release() def kernel_33(): - o_14 = kernel_14() - o_15 = kernel_15(o_14=o_14) + o_14 = kernel_14() + o_15 = kernel_15(o_14=o_14) - assert os.system(r''' python repo/d1/mlb_player_v5.py ''') == 0 - o_17 = kernel_17(o_14=o_14, max_images=100) + assert os.system(r""" python repo/d1/mlb_player_v5.py """) == 0 + o_17 = kernel_17(o_14=o_14, max_images=100) + + o_18 = kernel_18(o_17=o_17) + o_19 = kernel_19(o_18=o_18) + o_20 = kernel_20(o_18=o_18) + o_22 = kernel_22(o_18=o_18) + import pandas + + o_23 = kernel_23(o_18=o_18, o_22=o_22, ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values) + o_27 = kernel_27() + o_28 = kernel_28() + o_29 = kernel_29() + import numpy + + o_30 = kernel_30(o_29=o_29, ids='v1') - o_18 = kernel_18(o_17=o_17) - o_19 = kernel_19(o_18=o_18) - o_20 = kernel_20(o_18=o_18) - o_22 = kernel_22(o_18=o_18) - import pandas - o_23 = kernel_23( - o_18=o_18, - o_22=o_22, - ids=pandas.DataFrame( - o_22['t4'] - ).query('portion > 0.1').index.values - ) - o_27 = kernel_27() - o_28 = kernel_28() - o_29 = kernel_29() - import numpy - o_30 = kernel_30(o_29=o_29, ids='v1') def kernel_34(o_14): - t1 = numpy.unique( - o_14['o_3']['t5']['events']['event'], - return_counts=True, - ) - t2 = [ - [o, o2] - for o, o2 in zip(*t1) - ] - pprint.pprint( - t2 - ) + t1 = numpy.unique( + o_14['o_3']['t5']['events']['event'], + return_counts=True, + ) + t2 = [[o, o2] for o, o2 in zip(*t1)] + pprint.pprint(t2) + def kernel_35(): - return r''' + return r""" 1. https://posetrack.net/leaderboard.php 2. https://posetrack.net/users/view_entry_details.php?entry=CF_chl2 3. https://posetrack.net/users/view_entry_details.php?entry=DCPose_chl2 @@ -2262,23 +2230,27 @@ def kernel_35(): 7. https://github.com/leoxiaobin/deep-high-resolution-net.pytorch 8. https://github.com/CMU-Perceptual-Computing-Lab/openpose https://github.com/CMU-Perceptual-Computing-Lab/openpose/issues/1736#issuecomment-736846794 - ''' - -def kernel_36(): - # -*- coding: utf-8 -*- - """OpenPose.ipynb - - Automatically generated by Colaboratory. - - Original file is located at - https://colab.research.google.com/drive/1VDjRLKAu9KLQky0gv4RLJjeH6NRPiWXy """ - assert os.system(r''' - cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` - ''') == 0 - """# Pose Detection with OpenPose +def kernel_36(): + # -*- coding: utf-8 -*- + """OpenPose.ipynb + + Automatically generated by Colaboratory. + + Original file is located at + https://colab.research.google.com/drive/1VDjRLKAu9KLQky0gv4RLJjeH6NRPiWXy + """ + + assert ( + os.system(r""" + cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` + """) + == 0 + ) + + """# Pose Detection with OpenPose This notebook uses an open source project [CMU-Perceptual-Computing-Lab/openpose](https://github.com/CMU-Perceptual-Computing-Lab/openpose.git) to detect/track multi person poses on a video from your Google Drive @@ -2287,153 +2259,178 @@ def kernel_36(): ## Choose a video from your Google Drive """ - from google.colab import drive - drive.mount('/content/drive') + from google.colab import drive - """## Install OpenPose on Google Colab + drive.mount('/content/drive') + + """## Install OpenPose on Google Colab """ - import os - from os.path import exists, join, basename, splitext + import os + from os.path import exists, join, basename, splitext - git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git' - project_name = splitext(basename(git_repo_url))[0] + git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git' + project_name = splitext(basename(git_repo_url))[0] - if 1 or not exists(project_name): - assert os.system(r'''!rm -rf openpose''') == 0 - # see: https://github.com/CMU-Perceptual-Computing-Lab/openpose/issues/949 - print("install new CMake becaue of CUDA10") - cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz' - if not exists(cmake_version): - assert os.system(r'''!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' ''') == 0 - assert os.system(r'''!tar xfz {cmake_version} --strip-components=1 -C /usr/local ''') == 0 + if 1 or not exists(project_name): + assert os.system(r"""!rm -rf openpose""") == 0 + # see: https://github.com/CMU-Perceptual-Computing-Lab/openpose/issues/949 + print('install new CMake becaue of CUDA10') + cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz' + if not exists(cmake_version): + assert os.system(r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """) == 0 + assert os.system(r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """) == 0 - print("clone openpose") - assert os.system(r'''!git clone -q --depth 1 $git_repo_url ''') == 0 - print("install system dependencies") - assert os.system(r'''!apt-get -qq install -y libatlas-base-dev libprotobuf-dev libleveldb-dev libsnappy-dev libhdf5-serial-dev protobuf-compiler libgflags-dev libgoogle-glog-dev liblmdb-dev opencl-headers ocl-icd-opencl-dev libviennacl-dev ''') == 0 - print("build openpose") - assert os.system(r'''!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` ''') == 0 + print('clone openpose') + assert os.system(r"""!git clone -q --depth 1 $git_repo_url """) == 0 + print('install system dependencies') + assert ( + os.system( + r"""!apt-get -qq install -y libatlas-base-dev libprotobuf-dev libleveldb-dev libsnappy-dev libhdf5-serial-dev protobuf-compiler libgflags-dev libgoogle-glog-dev liblmdb-dev opencl-headers ocl-icd-opencl-dev libviennacl-dev """ + ) + == 0 + ) + print('build openpose') + assert os.system(r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """) == 0 - """## From a Google Drive's folder""" + """## From a Google Drive's folder""" - import os - from os.path import exists, join, basename, splitext + import os + from os.path import exists, join, basename, splitext - folder_path = '/content/drive/My Drive/openpose/' - files = os.listdir(folder_path) - files.reverse() - for filename in files: - if filename.endswith('.mp4') and not filename.endswith('-openpose.mp4'): - print(filename) - colab_video_path = folder_path + filename - print(colab_video_path) - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' - print(colab_openpose_video_path) - if not exists(colab_openpose_video_path): - assert os.system(r'''!cd openpose && ./build/examples/openpose/openpose.bin --hand --face --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 ''') == 0 + folder_path = '/content/drive/My Drive/openpose/' + files = os.listdir(folder_path) + files.reverse() + for filename in files: + if filename.endswith('.mp4') and not filename.endswith('-openpose.mp4'): + print(filename) + colab_video_path = folder_path + filename + print(colab_video_path) + colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + print(colab_openpose_video_path) + if not exists(colab_openpose_video_path): + assert ( + os.system( + r"""!cd openpose && ./build/examples/openpose/openpose.bin --hand --face --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 """ + ) + == 0 + ) - """## From Youtube (Downloaded to your Drive)""" + """## From Youtube (Downloaded to your Drive)""" - assert os.system(r'''!pip install youtube-dl ''') == 0 + assert os.system(r"""!pip install youtube-dl """) == 0 - youtube_id = '2021-05-07_22-00-55_UTC' - assert os.system(r'''!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} ''') == 0 - colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4' - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + youtube_id = '2021-05-07_22-00-55_UTC' + assert os.system(r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """) == 0 + colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4' + colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' - assert os.system(r'''!cd openpose && ./build/examples/openpose/openpose.bin --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 ''') == 0 + assert ( + os.system( + r"""!cd openpose && ./build/examples/openpose/openpose.bin --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 """ + ) + == 0 + ) + """## Run OpenPose""" + assert ( + os.system( + r"""!wget 'https://cdn-eu-hz-4.ufile.io/get/z5yes0he?token=ZWUyZjMyNDBmNTRlMWY0NjRkZTUzYzQzNjg2OWY4ODgzMzA1ODFjNzY0YzYxYWFjNzM1ZTU2OWYwNzQyNzVkOWUwYjY3MjdiZmI1MDg0NmIyMzA3ODhiZmQ2YmI3ZWYyOTE3NDBlNDEwY2JlZTc5N2MwMmIxYjYzZGJjYTZjYzl3K3dtK25xdXE2M1ZUZXZMUU5CYzI0WjBtUEk4OTJQYzFvaTJXa1huRkdtNlhjZGxlTHkvTEJVNVlucnBOZFdHMXBiVkszeHBKWEFaTzRKK1h4Zy9BRDNvbkttNDhJVXRKZjJuSUIyTDVaR0UydjM3amtVdE93aFQ4ZXFGTlZYeVZiTXQwT3pkOEdLTklQRUhZNmR1MHRPeVdMb3ZHRWV3R3BYWStQcElkeUdDTkVvUU1IRTUxaE1acDlCM3htRDBaS2ZjWEpLMXlpNXRWUWdLWEoreld3N3VvLy9ocXE2RUxPS2pDdENtVjB0Zi9qWGh3WEIwK3RzU0xZekloK3k2' -O output.mp4 """ + ) + == 0 + ) - """## Run OpenPose""" + # video_folder = os.path.dirname(colab_video_path) + # video_base_name = os.path.basename(colab_video_path) + # print(video_base_name) + import os + from os.path import exists, join, basename, splitext - assert os.system(r'''!wget 'https://cdn-eu-hz-4.ufile.io/get/z5yes0he?token=ZWUyZjMyNDBmNTRlMWY0NjRkZTUzYzQzNjg2OWY4ODgzMzA1ODFjNzY0YzYxYWFjNzM1ZTU2OWYwNzQyNzVkOWUwYjY3MjdiZmI1MDg0NmIyMzA3ODhiZmQ2YmI3ZWYyOTE3NDBlNDEwY2JlZTc5N2MwMmIxYjYzZGJjYTZjYzl3K3dtK25xdXE2M1ZUZXZMUU5CYzI0WjBtUEk4OTJQYzFvaTJXa1huRkdtNlhjZGxlTHkvTEJVNVlucnBOZFdHMXBiVkszeHBKWEFaTzRKK1h4Zy9BRDNvbkttNDhJVXRKZjJuSUIyTDVaR0UydjM3amtVdE93aFQ4ZXFGTlZYeVZiTXQwT3pkOEdLTklQRUhZNmR1MHRPeVdMb3ZHRWV3R3BYWStQcElkeUdDTkVvUU1IRTUxaE1acDlCM3htRDBaS2ZjWEpLMXlpNXRWUWdLWEoreld3N3VvLy9ocXE2RUxPS2pDdENtVjB0Zi9qWGh3WEIwK3RzU0xZekloK3k2' -O output.mp4 ''') == 0 + # colab_video_path = '/content/drive/My Drive/bachata.mp4' + colab_video_path = '/content/output.mp4' + colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' - # video_folder = os.path.dirname(colab_video_path) - # video_base_name = os.path.basename(colab_video_path) - # print(video_base_name) - import os - from os.path import exists, join, basename, splitext - #colab_video_path = '/content/drive/My Drive/bachata.mp4' - colab_video_path = '/content/output.mp4' - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + assert ( + os.system( + r"""!cd openpose && ./build/examples/openpose/openpose.bin --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 """ + ) + == 0 + ) - assert os.system(r'''!cd openpose && ./build/examples/openpose/openpose.bin --number_people_max 12 --video '{colab_video_path}' --display 0 --write_video_with_audio --write_video '{colab_openpose_video_path}' # --net_resolution "-1x736" --scale_number 4 --scale_gap 0.25 ''') == 0 + assert os.system(r"""!cmake -version """) == 0 - assert os.system(r'''!cmake -version ''') == 0 def kernel_37(): - return r''' + return r""" 1. https://www.youtube.com/channel/UCcTy8yBARva3WzrmIcaaWPA/about 2. https://www.play-cricket.com/website/results/4721802 - ''' + """ + def kernel_38( - video_path, - framerate=None, - max_seconds=None, - ids=None, - prev_frames=None, - delay=None, - low_mean_conf=None, - low_portion=None, - no_dots=None, + video_path, + framerate=None, + max_seconds=None, + ids=None, + prev_frames=None, + delay=None, + low_mean_conf=None, + low_portion=None, + no_dots=None, ): - if ids is None: - ids = 'v7' + if ids is None: + ids = 'v7' - if prev_frames is None: - prev_frames = 59 + if prev_frames is None: + prev_frames = 59 - if delay is None: - delay = 3 + if delay is None: + delay = 3 - t2 = hashlib.sha256( - video_path.encode('utf-8') - ).hexdigest() + t2 = hashlib.sha256(video_path.encode('utf-8')).hexdigest() - t1 = '/kaggle/working/video%s.mp4' % t2 + t1 = '/kaggle/working/video%s.mp4' % t2 - if not os.path.exists(t1): - subprocess.check_call( - [ - 'youtube-dl', - '-f', - '18', - video_path, - '-o', - t1, - ] - ) + if not os.path.exists(t1): + subprocess.check_call( + [ + 'youtube-dl', + '-f', + '18', + video_path, + '-o', + t1, + ] + ) - kernel_28( - t1, - framerate=framerate, - max_seconds=max_seconds, - video_id=t2, - ) + kernel_28( + t1, + framerate=framerate, + max_seconds=max_seconds, + video_id=t2, + ) - o_29 = kernel_29( - t1, - video_id=t2, - ) + o_29 = kernel_29( + t1, + video_id=t2, + ) - o_30 = kernel_30( - o_29=o_29, - ids=ids, - delay=delay, - prev_frames=prev_frames, - max_frames=9999, - max_ids=999, - video_path=t1, - low_mean_conf=low_mean_conf, - low_portion=low_portion, - no_dots=no_dots, - ) + o_30 = kernel_30( + o_29=o_29, + ids=ids, + delay=delay, + prev_frames=prev_frames, + max_frames=9999, + max_ids=999, + video_path=t1, + low_mean_conf=low_mean_conf, + low_portion=low_portion, + no_dots=no_dots, + ) - return dict( - o_29=o_29, - o_30=o_30, - ) + return dict( + o_29=o_29, + o_30=o_30, + ) diff --git a/python/online/fxreader/pr34/tests/test_crypto.py b/python/online/fxreader/pr34/tests/test_crypto.py index 1c39c70..f886ca0 100644 --- a/python/online/fxreader/pr34/tests/test_crypto.py +++ b/python/online/fxreader/pr34/tests/test_crypto.py @@ -3,34 +3,34 @@ import unittest class TestCrypto(unittest.TestCase): - def test_password_utils(self) -> None: - salt = b'asdfasdfasdf' + def test_password_utils(self) -> None: + salt = b'asdfasdfasdf' - secret = 'blah' + secret = 'blah' - hash_res = crypto.PasswordUtils.secret_hash( - secret, - mode='bytes', - salt=salt, - ) - self.assertEqual( - hash_res, - ( - salt, - b'\xdak\xd15\xfa\x8e\xc8\r\xc3\xd2c\xf1m\xb0\xbf\xe6\x98\x01$!j\xc8\xc0Hh\x84\xea,\x91\x8b\x08\xce', - ), - ) + hash_res = crypto.PasswordUtils.secret_hash( + secret, + mode='bytes', + salt=salt, + ) + self.assertEqual( + hash_res, + ( + salt, + b'\xdak\xd15\xfa\x8e\xc8\r\xc3\xd2c\xf1m\xb0\xbf\xe6\x98\x01$!j\xc8\xc0Hh\x84\xea,\x91\x8b\x08\xce', + ), + ) - check_res = crypto.PasswordUtils.secret_check( - secret, - *hash_res, - ) + check_res = crypto.PasswordUtils.secret_check( + secret, + *hash_res, + ) - self.assertTrue(check_res) + self.assertTrue(check_res) - self.assertFalse( - crypto.PasswordUtils.secret_check( - secret + 'asdfasdfsdf', - *hash_res, - ) - ) + self.assertFalse( + crypto.PasswordUtils.secret_check( + secret + 'asdfasdfsdf', + *hash_res, + ) + ) From 794b86d90e7adffe706ae8bca29c6454a1412ed4 Mon Sep 17 00:00:00 2001 From: Siarhei Siniak Date: Tue, 20 May 2025 12:03:08 +0300 Subject: [PATCH 3/4] [+] add .whl 1. fix type errors; 2. release new .whl; --- python/cli.py | 2 +- python/meson.build | 2 +- .../fxreader/pr34/commands_typed/cli_bootstrap.py | 11 ++++++++++- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/python/cli.py b/python/cli.py index 95a470e..cd38ef3 100644 --- a/python/cli.py +++ b/python/cli.py @@ -156,7 +156,7 @@ class CLI(_cli.CLI): project_name=options.project, argv=args, output_dir=options.output_dir, - mypy=True, + # mypy=True, ruff=True, pyright=True, ) diff --git a/python/meson.build b/python/meson.build index 23cd8ba..536ce9f 100644 --- a/python/meson.build +++ b/python/meson.build @@ -5,7 +5,7 @@ project( ).stdout().strip('\n'), # 'online.fxreader.uv', # ['c', 'cpp'], - version: '0.1.5.16+27.21', + version: '0.1.5.17', # default_options: [ # 'cpp_std=c++23', # # 'prefer_static=true', diff --git a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py index 087fcd5..06f837e 100644 --- a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py +++ b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py @@ -9,6 +9,7 @@ import sys import subprocess import os import logging +import typing from typing import ( @@ -97,7 +98,15 @@ def check_dict( assert isinstance(value, dict) value2 = cast(dict[Any, Any], value) - assert all([isinstance(k, KT) and (VT is None or isinstance(v, VT)) for k, v in value2.items()]) + VT_class: Optional[type[Any]] = None + + if not VT is None: + if not typing.get_origin(VT) is None: + VT_class = cast(type[Any], typing.get_origin(VT)) + else: + VT_class = VT + + assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()]) if VT is None: return cast( From 964c4131994fb8447b47cc6dfbcc5b44000e04ae Mon Sep 17 00:00:00 2001 From: Siarhei Siniak Date: Tue, 20 May 2025 12:06:36 +0300 Subject: [PATCH 4/4] [+] add .whl --- releases/whl/online_fxreader_pr34-0.1.5.17-py3-none-any.whl | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 releases/whl/online_fxreader_pr34-0.1.5.17-py3-none-any.whl diff --git a/releases/whl/online_fxreader_pr34-0.1.5.17-py3-none-any.whl b/releases/whl/online_fxreader_pr34-0.1.5.17-py3-none-any.whl new file mode 100644 index 0000000..efd3ec3 --- /dev/null +++ b/releases/whl/online_fxreader_pr34-0.1.5.17-py3-none-any.whl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d3ad280571a224906096fecde3007407f84114e2a2e7b9ca7853243a50beebc +size 69699