From 67fcefbce0adba0b4124d8605120ccb509f36f55 Mon Sep 17 00:00:00 2001 From: Siarhei Siniak Date: Thu, 4 Dec 2025 11:25:58 +0300 Subject: [PATCH] [+] reformat ruff 1. column width 80; --- python/_m.py | 19 +- python/cli.py | 18 +- python/m.py | 54 +- python/online/fxreader/pr34/commands.py | 554 ++++++++++++++---- .../pr34/commands_typed/async_api/fastapi.py | 5 +- .../fxreader/pr34/commands_typed/cli.py | 148 ++++- .../pr34/commands_typed/cli_bootstrap.py | 54 +- .../pr34/commands_typed/color_scheme.py | 20 +- .../fxreader/pr34/commands_typed/crypto.py | 4 +- .../fxreader/pr34/commands_typed/logging.py | 4 +- .../fxreader/pr34/commands_typed/metrics.py | 32 +- .../fxreader/pr34/commands_typed/mypy.py | 10 +- .../online/fxreader/pr34/commands_typed/os.py | 27 +- .../fxreader/pr34/commands_typed/pip.py | 69 ++- .../fxreader/pr34/commands_typed/pydantic.py | 30 +- .../fxreader/pr34/commands_typed/status.py | 7 +- python/online/fxreader/pr34/oom_firefox.py | 111 +++- python/online/fxreader/pr34/tasks/ble.py | 20 +- python/online/fxreader/pr34/tasks/cython.py | 24 +- .../fxreader/pr34/tasks/jigsaw_toxic.py | 66 ++- .../online/fxreader/pr34/tasks/mlb_player.py | 444 +++++++++++--- 21 files changed, 1405 insertions(+), 315 deletions(-) diff --git a/python/_m.py b/python/_m.py index b63426e..7f327ab 100644 --- a/python/_m.py +++ b/python/_m.py @@ -53,7 +53,10 @@ def js(argv: list[str]) -> int: '--project-directory', Settings.settings().project_root, '-f', - Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml', + Settings.settings().project_root + / 'docker' + / 'js' + / 'docker-compose.yml', *argv, ] ) @@ -67,7 +70,15 @@ def env( env_path = Settings.settings().env_path if not env_path.exists(): - subprocess.check_call([sys.executable, '-m', 'venv', '--system-site-packages', str(env_path)]) + subprocess.check_call( + [ + sys.executable, + '-m', + 'venv', + '--system-site-packages', + str(env_path), + ] + ) subprocess.check_call( [ @@ -233,7 +244,9 @@ Command: TypeAlias = Literal[ def run(argv: Optional[list[str]] = None) -> None: logging.basicConfig( level=logging.INFO, - format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'), + format=( + '%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s' + ), ) if argv is None: diff --git a/python/cli.py b/python/cli.py index 9a41347..81e16b5 100644 --- a/python/cli.py +++ b/python/cli.py @@ -56,8 +56,18 @@ class CLI(_cli.CLI): self._projects: dict[str, _cli.Project] = { 'online.fxreader.pr34': _cli.Project( source_dir=self.settings.base_dir / 'python', - build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build', - dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install', + build_dir=self.settings.base_dir + / 'tmp' + / 'online' + / 'fxreader' + / 'pr34' + / 'build', + dest_dir=self.settings.base_dir + / 'tmp' + / 'online' + / 'fxreader' + / 'pr34' + / 'install', meson_path=self.settings.base_dir / 'python' / 'meson.build', ) } @@ -117,7 +127,9 @@ class CLI(_cli.CLI): parser = argparse.ArgumentParser() parser.add_argument('command', choices=[o.value for o in Command]) - parser.add_argument('-p', '--project', choices=[o for o in self.projects]) + parser.add_argument( + '-p', '--project', choices=[o for o in self.projects] + ) parser.add_argument( '-o', '--output_dir', diff --git a/python/m.py b/python/m.py index a98e0c8..82a9d0b 100755 --- a/python/m.py +++ b/python/m.py @@ -78,7 +78,9 @@ class PyProject: third_party_roots: list[ThirdPartyRoot] = dataclasses.field( default_factory=lambda: [], ) - requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict()) + requirements: dict[str, pathlib.Path] = dataclasses.field( + default_factory=lambda: dict() + ) modules: list[Module] = dataclasses.field( default_factory=lambda: [], @@ -124,7 +126,12 @@ def check_dict( else: VT_class = VT - assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()]) + assert all( + [ + isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) + for k, v in value2.items() + ] + ) if VT is None: return cast( @@ -233,7 +240,12 @@ def pyproject_load( str, ) - if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict): + if ( + 'tool' in content + and isinstance(content['tool'], dict) + and tool_name in content['tool'] + and isinstance(content['tool'][tool_name], dict) + ): pr34_tool = check_dict( check_dict( content['tool'], @@ -246,7 +258,9 @@ def pyproject_load( res.early_features = pr34_tool['early_features'] if 'pip_find_links' in pr34_tool: - res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']] + res.pip_find_links = [ + d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links'] + ] if 'runtime_libdirs' in pr34_tool: res.runtime_libdirs = [ @@ -265,7 +279,9 @@ def pyproject_load( if 'third_party_roots' in pr34_tool: for o in check_list(pr34_tool['third_party_roots']): o2 = check_dict(o, str, str) - assert all([k in {'package', 'module_root', 'path'} for k in o2]) + assert all( + [k in {'package', 'module_root', 'path'} for k in o2] + ) res.third_party_roots.append( PyProject.ThirdPartyRoot( @@ -279,7 +295,9 @@ def pyproject_load( res.requirements = { k: d.parent / pathlib.Path(v) # pathlib.Path(o) - for k, v in check_dict(pr34_tool['requirements'], str, str).items() + for k, v in check_dict( + pr34_tool['requirements'], str, str + ).items() } if 'modules' in pr34_tool: @@ -328,7 +346,10 @@ class BootstrapSettings: ).strip() ) pip_check_conflicts: Optional[bool] = dataclasses.field( - default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)], + default_factory=lambda: os.environ.get( + 'PIP_CHECK_CONFLICTS', json.dumps(True) + ) + in [json.dumps(True)], ) uv_args: list[str] = dataclasses.field( default_factory=lambda: os.environ.get( @@ -390,7 +411,9 @@ def requirements_name_get( else: requirements_path = source_dir / 'requirements.txt' - requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in') + requirements_path_in = requirements_path.parent / ( + requirements_path.stem + '.in' + ) requirements_in: list[str] = [] @@ -436,10 +459,15 @@ def env_bootstrap( requirements_in: list[str] = [] - requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']) + requirements_in.extend( + ['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'] + ) if pyproject.early_features: - early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], [])) + early_dependencies = sum( + [pyproject.dependencies[o] for o in pyproject.early_features], + cast(list[str], []), + ) logger.info( dict( @@ -508,7 +536,11 @@ def env_bootstrap( subprocess.check_call( [ 'uv', - *[o for o in bootstrap_settings.uv_args if not o in ['-U', '--upgrade']], + *[ + o + for o in bootstrap_settings.uv_args + if not o in ['-U', '--upgrade'] + ], 'venv', *venv_python_version, *pip_find_links_args, diff --git a/python/online/fxreader/pr34/commands.py b/python/online/fxreader/pr34/commands.py index 3cfdb49..8b0d354 100644 --- a/python/online/fxreader/pr34/commands.py +++ b/python/online/fxreader/pr34/commands.py @@ -60,7 +60,10 @@ def custom_notify( if sys.platform == 'darwin': osascript_translate = functools.partial( custom_translate, - check=lambda a, b: not re.compile(r'^[a-zA-Z0-9\<\>\/\(\)\s\.\,\:]*$').match(b) is None, + check=lambda a, b: not re.compile( + r'^[a-zA-Z0-9\<\>\/\(\)\s\.\,\:]*$' + ).match(b) + is None, ) subprocess.check_call( @@ -75,7 +78,9 @@ def custom_notify( ] ) else: - subprocess.check_call(['notify-send', '-t', '%d' % timeout2, title, msg[-128:]]) + subprocess.check_call( + ['notify-send', '-t', '%d' % timeout2, title, msg[-128:]] + ) class intercept_output_t: @@ -130,7 +135,11 @@ def intercept_output( last_data = None while not (not current_subprocess.poll() is None and not last_data is None): - if not timeout is None and (datetime.datetime.now() - start_timestamp).total_seconds() > timeout: + if ( + not timeout is None + and (datetime.datetime.now() - start_timestamp).total_seconds() + > timeout + ): break t2 = t1.poll(100) @@ -199,7 +208,12 @@ def intercept_output( def player_metadata() -> Optional[str]: for k in range(20): try: - metadata = {k: subprocess.check_output(['playerctl', 'metadata', k]).decode('utf-8').strip() for k in ['artist', 'title']} + metadata = { + k: subprocess.check_output(['playerctl', 'metadata', k]) + .decode('utf-8') + .strip() + for k in ['artist', 'title'] + } return '%s - %s' % (metadata['artist'], metadata['title']) time.sleep(1.0) except Exception: @@ -219,7 +233,9 @@ def memory_stats() -> memory_stats_t.res_t: with io.BytesIO(subprocess.check_output('free', shell=True)) as f: t1 = f.read().decode('utf-8').splitlines() mem_total = int(t1[1].strip().split()[1]) - mem_used = int(t1[1].strip().split()[2]) + int(t1[1].strip().split()[4]) + mem_used = int(t1[1].strip().split()[2]) + int( + t1[1].strip().split()[4] + ) return dict( mem_total=mem_total, @@ -241,7 +257,9 @@ def memory_stats() -> memory_stats_t.res_t: t1 = subprocess.check_output('vm_stat').decode('utf-8') t2 = [o.split(':') for o in t1.splitlines() if ':' in o] t3 = { - o[0].replace(' ', '_').replace('-', '_').lower(): int(o[1].strip().rstrip('.')) + o[0].replace(' ', '_').replace('-', '_').lower(): int( + o[1].strip().rstrip('.') + ) for o in t2 if len(o) == 2 and len(o[0]) > 0 @@ -382,7 +400,9 @@ def eternal_oom(argv: list[str]) -> None: p = config[app] try: - t1 = subprocess.check_output(['pgrep', '-a', '-f', p[0]]).decode('utf-8') + t1 = subprocess.check_output( + ['pgrep', '-a', '-f', p[0]] + ).decode('utf-8') except Exception: continue t2 = t1.splitlines() @@ -429,9 +449,17 @@ def eternal_oom(argv: list[str]) -> None: if isinstance(options.memory_limit, float): options.memory_limit = int(options.memory_limit) - assert isinstance(options.memory_limit, int) and options.memory_limit < memory_stats()['mem_total'] * 0.95 and options.memory_limit > 512 * 1024 + assert ( + isinstance(options.memory_limit, int) + and options.memory_limit < memory_stats()['mem_total'] * 0.95 + and options.memory_limit > 512 * 1024 + ) - assert isinstance(options.cpu_limit, float) and options.cpu_limit > 0.2 * cpu_count and options.cpu_limit < cpu_count * 0.95 + assert ( + isinstance(options.cpu_limit, float) + and options.cpu_limit > 0.2 * cpu_count + and options.cpu_limit < cpu_count * 0.95 + ) assert options.period >= 1 @@ -459,7 +487,10 @@ def eternal_oom(argv: list[str]) -> None: for value, column in zip(row, header): columns[column].append(value) for column, transformation in extra_columns.items(): - columns[column] = [transformation({k: v[index] for k, v in columns.items()}) for index in range(len(rows))] + columns[column] = [ + transformation({k: v[index] for k, v in columns.items()}) + for index in range(len(rows)) + ] return columns @@ -488,14 +519,19 @@ def eternal_oom(argv: list[str]) -> None: columns: dict[str, list[Any]] merged_data_frame: MergedDataFrame = dict( - header=[column + '_x' for column in left] + [column + '_y' for column in right], + header=[column + '_x' for column in left] + + [column + '_y' for column in right], columns={}, ) for column in merged_data_frame['header']: merged_data_frame['columns'][column] = [] - common_values: set[Any] = {left_value for left_value in index['left'] if left_value in index['right']} + common_values: set[Any] = { + left_value + for left_value in index['left'] + if left_value in index['right'] + } class RowMatch(TypedDict): left_row_index: int @@ -518,8 +554,14 @@ def eternal_oom(argv: list[str]) -> None: values[ common_row[ cast( - Literal['left_row_index' | 'right_row_index'], - 'left_row_index' if index_name == 'left' else 'right_row_index' if index_name == 'right' else raise_not_implemented(), + Literal[ + 'left_row_index' | 'right_row_index' + ], + 'left_row_index' + if index_name == 'left' + else 'right_row_index' + if index_name == 'right' + else raise_not_implemented(), ) ] ] @@ -539,9 +581,18 @@ def eternal_oom(argv: list[str]) -> None: assert ascending is False t1 = [ o['row_index'] - for o in sorted([dict(row_index=row_index, value=value) for row_index, value in enumerate(data_frame[by[0]])], key=lambda x: x['value'])[::-1] + for o in sorted( + [ + dict(row_index=row_index, value=value) + for row_index, value in enumerate(data_frame[by[0]]) + ], + key=lambda x: x['value'], + )[::-1] ] - return {column: [values[row_index] for row_index in t1] for column, values in data_frame.items()} + return { + column: [values[row_index] for row_index in t1] + for column, values in data_frame.items() + } def pandas_filter_values(data_frame, condition): shape = [ @@ -549,11 +600,25 @@ def eternal_oom(argv: list[str]) -> None: ] if shape[0] > 0: shape.append(len(list(data_frame.values())[0])) - t1 = [row_index for row_index in range(shape[1]) if condition({column: values[row_index] for column, values in data_frame.items()})] - return {column: [values[row_index] for row_index in t1] for column, values in data_frame.items()} + t1 = [ + row_index + for row_index in range(shape[1]) + if condition( + { + column: values[row_index] + for column, values in data_frame.items() + } + ) + ] + return { + column: [values[row_index] for row_index in t1] + for column, values in data_frame.items() + } def pandas_row(data_frame, row_index): - return {column: values[row_index] for column, values in data_frame.items()} + return { + column: values[row_index] for column, values in data_frame.items() + } def pandas_shape(data_frame): columns_count = len(data_frame) @@ -580,7 +645,9 @@ def eternal_oom(argv: list[str]) -> None: def oom_get_processes( extra_filter=None, ): - with io.BytesIO(subprocess.check_output('ps -e -o pid,rss,user,%cpu', shell=True)) as f: + with io.BytesIO( + subprocess.check_output('ps -e -o pid,rss,user,%cpu', shell=True) + ) as f: t1 = pandas_data_frame( f.read().decode('utf-8').splitlines(), ps_regex(4), @@ -594,7 +661,11 @@ def eternal_oom(argv: list[str]) -> None: del t1['%CPU'] assert set(t1.keys()) == set(['PID', 'RSS', 'USER', 'CPU']) - t5 = subprocess.check_output('ps -e -o pid,args', shell=True).decode('utf-8').splitlines() + t5 = ( + subprocess.check_output('ps -e -o pid,args', shell=True) + .decode('utf-8') + .splitlines() + ) t6 = pandas_data_frame( t5, r'^\s*(\d+)\s(.*)$', @@ -614,7 +685,12 @@ def eternal_oom(argv: list[str]) -> None: if extra_filter is None: extra_filter = lambda *args: True - t7 = pandas_filter_values(t11, lambda row: row['PID_x'] != self_pid and not 'freelancer' in row['COMMAND_y'] and extra_filter(row)) + t7 = pandas_filter_values( + t11, + lambda row: row['PID_x'] != self_pid + and not 'freelancer' in row['COMMAND_y'] + and extra_filter(row), + ) t8 = pandas_sort_values(t7, by=['RSS_x'], ascending=False) t9 = pandas_sort_values(t7, by=['CPU_x'], ascending=False) @@ -694,7 +770,9 @@ def eternal_oom(argv: list[str]) -> None: if t11['total_cpu'] > options.cpu_limit: oom_display_rows(t11['by_cpu']) - free_before_oom = options.memory_limit - current_memory_stats['mem_used'] + free_before_oom = ( + options.memory_limit - current_memory_stats['mem_used'] + ) print( 'available %5.2f %% out of %5.2f %% of cpu limit before OOC' @@ -746,7 +824,10 @@ def eternal_oom(argv: list[str]) -> None: if last_cpu_high is None: last_cpu_high = datetime.datetime.now().timestamp() - if datetime.datetime.now().timestamp() - last_cpu_high > options.cpu_wait: + if ( + datetime.datetime.now().timestamp() - last_cpu_high + > options.cpu_wait + ): last_cpu_high = None del last_total_cpu[:] return True @@ -768,8 +849,15 @@ def eternal_oom(argv: list[str]) -> None: mem_stat = memory_stats() mem_used = mem_stat['mem_used'] - if options.memory_limit < mem_stat['mem_total'] and not oom_mem_high(mem_stat['mem_total'] - (mem_stat['mem_total'] - options.memory_limit) / 2): - extra_filters = lambda row: ('chrome' in row['COMMAND_y'] and '--type=renderer' in row['COMMAND_y'] or not 'chrome' in row['COMMAND_y']) + if options.memory_limit < mem_stat['mem_total'] and not oom_mem_high( + mem_stat['mem_total'] + - (mem_stat['mem_total'] - options.memory_limit) / 2 + ): + extra_filters = lambda row: ( + 'chrome' in row['COMMAND_y'] + and '--type=renderer' in row['COMMAND_y'] + or not 'chrome' in row['COMMAND_y'] + ) else: extra_filters = None @@ -811,7 +899,9 @@ def eternal_oom(argv: list[str]) -> None: def resilient_vlc(stream=None): if stream is None: - streams_path = os.path.join(os.environ['CACHE_PATH'], 'resilient-vlc-streams.json') + streams_path = os.path.join( + os.environ['CACHE_PATH'], 'resilient-vlc-streams.json' + ) if os.path.exists(streams_path): with io.open(streams_path, 'r') as f: @@ -852,7 +942,9 @@ def resilient_vlc(stream=None): 'main interface error', ] ] - ) and any([o in t1 for o in ['pulse audio output debug: underflow']]): + ) and any( + [o in t1 for o in ['pulse audio output debug: underflow']] + ): print('shit') p.kill() while True: @@ -930,7 +1022,12 @@ def eternal_firefox( ) as p: try: if debug: - assert subprocess.check_call(['notify-send', '%s:Starting' % group_name]) == 0 + assert ( + subprocess.check_call( + ['notify-send', '%s:Starting' % group_name] + ) + == 0 + ) # t3 = '' for k in range(300): @@ -1015,10 +1112,18 @@ def eternal_firefox( reposition() if debug: - assert subprocess.check_call(['notify-send', '%s:Started' % group_name]) == 0 + assert ( + subprocess.check_call( + ['notify-send', '%s:Started' % group_name] + ) + == 0 + ) start = datetime.datetime.now() - is_to_restart = lambda: (datetime.datetime.now() - start).total_seconds() >= 900 * 4 + is_to_restart = ( + lambda: (datetime.datetime.now() - start).total_seconds() + >= 900 * 4 + ) polling_count = 0 while not is_to_restart(): @@ -1031,7 +1136,12 @@ def eternal_firefox( polling_count += 1 if debug: - assert subprocess.check_call(['notify-send', '%s:Closing' % group_name]) == 0 + assert ( + subprocess.check_call( + ['notify-send', '%s:Closing' % group_name] + ) + == 0 + ) # assert os.system('wmctrl -i -c %s' % t2) == 0 assert ( @@ -1067,7 +1177,12 @@ def eternal_firefox( pprint.pprint([p.pid, '20 seconds timeout', 'kill']) p.kill() if debug: - assert subprocess.check_call(['notify-send', '%s:Closed' % group_name]) == 0 + assert ( + subprocess.check_call( + ['notify-send', '%s:Closed' % group_name] + ) + == 0 + ) def resilient_ethernet(ip_addr, ethernet_device): @@ -1082,7 +1197,9 @@ do ping -c 3 -w 3 -W 1 {{IP_ADDR}} || (\ ); \ sleep 10; clear; date; \ done' - """.replace('{{IP_ADDR}}', ip_addr).replace('{{ETHERNET_DEVICE}}}', ethernet_device), + """.replace('{{IP_ADDR}}', ip_addr).replace( + '{{ETHERNET_DEVICE}}}', ethernet_device + ), shell=True, ) @@ -1169,7 +1286,13 @@ def http_server(argv): # 'ping', '-w', '1', # options.host # ]) - assert options.host in sum([[o2.local for o2 in o.addr_info] for o in commands_os.interfaces_index()], []) + assert options.host in sum( + [ + [o2.local for o2 in o.addr_info] + for o in commands_os.interfaces_index() + ], + [], + ) except Exception: raise RuntimeError('invalid ip address %s' % options.host) @@ -1210,9 +1333,16 @@ def http_server(argv): ) ) - assert all([not re.compile(r'^[A-Za-z-]+ [a-z0-9A-Z-\.]+$').match(o) is None for o in options.response_headers]) + assert all( + [ + not re.compile(r'^[A-Za-z-]+ [a-z0-9A-Z-\.]+$').match(o) is None + for o in options.response_headers + ] + ) - location_section = ('location / {deny all;}location /%s/ {alias %s/;%s%s}') % ( + location_section = ( + 'location / {deny all;}location /%s/ {alias %s/;%s%s}' + ) % ( path, APP_DIR, '\n'.join(['add_header %s;' % o for o in options.response_headers]), @@ -1420,7 +1550,10 @@ def pass_ssh_osx(argv): t1 = options.pass_option assert len(t1) > 0 - print('select on of pass names\n%s' % '\n'.join(['%d: %s' % (k, v) for k, v in enumerate(t1)])) + print( + 'select on of pass names\n%s' + % '\n'.join(['%d: %s' % (k, v) for k, v in enumerate(t1)]) + ) while True: try: @@ -1484,7 +1617,9 @@ def pass_ssh_osx(argv): p.wait(1) assert p.poll() == 0 - with subprocess.Popen(ssh_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + with subprocess.Popen( + ssh_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as p: password = None last_chunk = None @@ -1540,10 +1675,23 @@ def pass_ssh_osx(argv): if options.debug: pprint.pprint(last_chunk['data']) - if last_chunk['data'].endswith('\r\n[0]'.encode('utf-8')) and last_chunk['data'].rfind(pinentry_delimeter) != -1: + if ( + last_chunk['data'].endswith('\r\n[0]'.encode('utf-8')) + and last_chunk['data'].rfind(pinentry_delimeter) != -1 + ): last_line = last_chunk['data'].splitlines()[-2] else: - raise RuntimeError('gpg failure %s' % str(last_chunk['data'][max(last_chunk['data'].find(pinentry_delimeter), -128) :])) + raise RuntimeError( + 'gpg failure %s' + % str( + last_chunk['data'][ + max( + last_chunk['data'].find(pinentry_delimeter), + -128, + ) : + ] + ) + ) pos2 = last_line.rfind(pinentry_delimeter) if pos2 == -1: @@ -1591,7 +1739,9 @@ def vpn(argv: list[str]) -> None: python_path: list[str] if (pathlib.Path(__file__).parent / 'env3').exists(): - python_path = [str(pathlib.Path(__file__).parent / 'env3' / 'bin' / 'python3')] + python_path = [ + str(pathlib.Path(__file__).parent / 'env3' / 'bin' / 'python3') + ] elif (pathlib.Path(__file__).parent.parent.parent.parent / 'm').exists(): python_path = [ str(pathlib.Path(__file__).parent.parent.parent.parent / 'm'), @@ -1637,14 +1787,28 @@ def player_v1(folder_url, item_id): t7 = t5[k] t9 = urllib.parse.unquote(os.path.split(t7)[1]) progress_bar.set_description('%03d %s' % (k, t9)) - with subprocess.Popen(['ffprobe', '-hide_banner', '-i', t7], stderr=subprocess.PIPE, stdout=subprocess.PIPE) as p: + with subprocess.Popen( + ['ffprobe', '-hide_banner', '-i', t7], + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) as p: p.wait() assert p.returncode == 0 t8 = p.stderr.read().decode('utf-8') assert isinstance(t8, str) # print(t8) with subprocess.Popen( - ['ffplay', '-hide_banner', '-nodisp', '-autoexit', '-loop', '1', t7], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + [ + 'ffplay', + '-hide_banner', + '-nodisp', + '-autoexit', + '-loop', + '1', + t7, + ], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, ) as p: p.wait() assert p.returncode == 0 @@ -1708,7 +1872,9 @@ def pm_service(argv): wu = 0 while True: - subprocess.check_call(['osascript', '-e', 'tell application "Finder" to sleep']) + subprocess.check_call( + ['osascript', '-e', 'tell application "Finder" to sleep'] + ) subprocess.check_call( ['pmset', 'sleepnow'], stdout=subprocess.DEVNULL, @@ -1747,9 +1913,16 @@ def pm_service(argv): # -E 'powerd.*TurnedOn.*UserIsActive' | head -n 1 #''', shell=True).decode('utf-8') - if not cmd is None and ('TurnedOn' in cmd or 'PrevIdle' in cmd or 'PMRD: kIOMessageSystemWillPowerOn' in cmd): + if not cmd is None and ( + 'TurnedOn' in cmd + or 'PrevIdle' in cmd + or 'PMRD: kIOMessageSystemWillPowerOn' in cmd + ): if ( - ('AppleMultitouchDevice' in cmd and 'tp' in options.events) + ( + 'AppleMultitouchDevice' in cmd + and 'tp' in options.events + ) or ('AppleACPIButton' in cmd and 'pb' in options.events) or ('eventType:29' in cmd and 'kb' in options.events) ): @@ -1769,7 +1942,11 @@ def pm_service(argv): ) ) else: - print('\r%s wu : %d, la : %s' % (datetime.datetime.now().isoformat(), wu, action), end='') + print( + '\r%s wu : %d, la : %s' + % (datetime.datetime.now().isoformat(), wu, action), + end='', + ) if action == 'wake-up': break @@ -1888,7 +2065,9 @@ def scrap_yt_music(argv: list[str]) -> None: break if p is None and not current_name is None: - output_name = os.path.join(options.library_path, '%s.mp3' % current_name) + output_name = os.path.join( + options.library_path, '%s.mp3' % current_name + ) logging.info('audio_record, new recording') p = subprocess.Popen( ['sox', '-d', output_name], @@ -1926,7 +2105,9 @@ def scrap_yt_music(argv: list[str]) -> None: target=functools.partial( http_events, context=context, - res_cb=lambda *args, **kwargs: context['http_on_event'](*args, **kwargs), + res_cb=lambda *args, **kwargs: context['http_on_event']( + *args, **kwargs + ), ) ), threading.Thread( @@ -1941,7 +2122,9 @@ def scrap_yt_music(argv: list[str]) -> None: def http_on_event(event, events): with context['track_cv']: if 'title' in event and event['title'].strip() != '': - context['track_name'] = str(event['title'])[:128].replace('\n', '') + context['track_name'] = str(event['title'])[:128].replace( + '\n', '' + ) else: context['track_name'] = None @@ -2188,7 +2371,11 @@ class Battery: ) else: pass - print('\r%s % 5.2f%% %s' % (datetime.datetime.now().isoformat(), t3, str(t5)), end='') + print( + '\r%s % 5.2f%% %s' + % (datetime.datetime.now().isoformat(), t3, str(t5)), + end='', + ) except Exception: logging.error(traceback.format_exc()) @@ -2273,7 +2460,9 @@ def desktop_services(argv): t2 = [] try: - t1 = subprocess.check_output(['swaymsg', '-t', 'get_tree']).decode('utf-8') + t1 = subprocess.check_output( + ['swaymsg', '-t', 'get_tree'] + ).decode('utf-8') t2 = json.loads(t1) except Exception: logging.error(traceback.format_exc()) @@ -2348,7 +2537,9 @@ def desktop_services(argv): @classmethod def dpms_get(cls): try: - t1 = subprocess.check_output(['swaymsg', '-r', '-t', 'get_outputs'], timeout=1) + t1 = subprocess.check_output( + ['swaymsg', '-r', '-t', 'get_outputs'], timeout=1 + ) t2 = t1.decode('utf-8') t3 = json.loads(t2) t4 = [ @@ -2605,7 +2796,9 @@ def desktop_services(argv): ]: if os.path.exists('/sys/bus/platform/devices/applesmc.768'): return 'applesmc.768' - elif os.path.exists('/sys/devices/system/cpu/intel_pstate/no_turbo'): + elif os.path.exists( + '/sys/devices/system/cpu/intel_pstate/no_turbo' + ): return 'intel_pstate' elif os.path.exists('/sys/devices/system/cpu/amd_pstate'): return 'amd_pstate' @@ -2822,7 +3015,11 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost after_resume='echo after_resume; pkill --signal SIGUSR1 swayidle;', ) self.last_force_idle = None - self.commands.update(timeout2='echo timeout2; {swaylock_cmd};'.format(swaylock_cmd=self.commands['swaylock_cmd2'])) + self.commands.update( + timeout2='echo timeout2; {swaylock_cmd};'.format( + swaylock_cmd=self.commands['swaylock_cmd2'] + ) + ) self.swayidle = subprocess.Popen( r""" exec swayidle -d -w \ @@ -2858,14 +3055,22 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost self.bg_terminate = False def skip_loop_long_ago(self): - if self.last_skip_loop is None or (datetime.datetime.now() - self.last_skip_loop).total_seconds() >= 30: + if ( + self.last_skip_loop is None + or ( + datetime.datetime.now() - self.last_skip_loop + ).total_seconds() + >= 30 + ): self.last_skip_loop = datetime.datetime.now() return True else: return False def background_check(self): - if (self.bg is None or not self.bg.poll() is None) and not self.bg_terminate: + if ( + self.bg is None or not self.bg.poll() is None + ) and not self.bg_terminate: if not options.background_image is None: self.bg = subprocess.Popen( [ @@ -2892,7 +3097,13 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost self.swayidle.stdin.flush() def force_idle(self): - if self.last_force_idle is None or (datetime.datetime.now() - self.last_force_idle).total_seconds() >= 10: + if ( + self.last_force_idle is None + or ( + datetime.datetime.now() - self.last_force_idle + ).total_seconds() + >= 10 + ): self.last_force_idle = datetime.datetime.now() return True else: @@ -2937,7 +3148,9 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost self.data.append(chunk) if b'\n' in chunk['data']: - total = b''.join([o['data'] for o in self.data]).decode('utf-8') + total = b''.join([o['data'] for o in self.data]).decode( + 'utf-8' + ) sep_pos = total.rfind('\n') lines = total[:sep_pos].splitlines() self.data = [ @@ -2976,7 +3189,10 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost while True: logging.info('retry i = %d, cnt = %d' % (i, cnt)) - if not (subprocess.call(['swaymsg', '-t', 'get_version']) == 0): + if not ( + subprocess.call(['swaymsg', '-t', 'get_version']) + == 0 + ): continue if cb() == 0: @@ -2986,12 +3202,20 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost i += 1 - if len(new_events) > 0 or len(self.events) > 0 and self.skip_loop_long_ago(): + if ( + len(new_events) > 0 + or len(self.events) > 0 + and self.skip_loop_long_ago() + ): self.events.extend(new_events) skip_loop = False - if all([o in ['t1', 't4'] for o in self.events]) and VLC.vlc_is_playing_fullscreen() and self.backlight.dpms: + if ( + all([o in ['t1', 't4'] for o in self.events]) + and VLC.vlc_is_playing_fullscreen() + and self.backlight.dpms + ): skip_loop = True logging.info( 'skip loop, %s' @@ -3005,9 +3229,17 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost ], ) ) - elif len(new_events) == 0 and len(self.events) > 1 and all([o in ['t1', 't4'] for o in self.events]): + elif ( + len(new_events) == 0 + and len(self.events) > 1 + and all([o in ['t1', 't4'] for o in self.events]) + ): self.events = ['t4'] - elif len(self.events) > 1 and (self.events == ['t1', 't4', 't5', 't5'] or self.events == ['t1', 't5', 't5'] or self.events == ['t1', 't5']): + elif len(self.events) > 1 and ( + self.events == ['t1', 't4', 't5', 't5'] + or self.events == ['t1', 't5', 't5'] + or self.events == ['t1', 't5'] + ): for o in new_events: self.release_lock() @@ -3023,7 +3255,9 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost # subprocess.check_call(self.commands['lock'], shell=True) logging.info('started t1') if self.force_idle(): - subprocess.check_call(self.commands['timeout1'], shell=True) + subprocess.check_call( + self.commands['timeout1'], shell=True + ) logging.info('done t1') self.release_lock() elif o == 't2': @@ -3034,12 +3268,29 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost msg='loginctl lock started', ) while True: - if not subprocess.call(self.commands['lock'], shell=True) == 0: + if ( + not subprocess.call( + self.commands['lock'], shell=True + ) + == 0 + ): continue - if not subprocess.call(self.commands['timeout2'], shell=True) == 0: + if ( + not subprocess.call( + self.commands['timeout2'], + shell=True, + ) + == 0 + ): # continue pass - if not subprocess.call(self.commands['timeout1'], shell=True) == 0: + if ( + not subprocess.call( + self.commands['timeout1'], + shell=True, + ) + == 0 + ): continue break logging.info('done lock') @@ -3049,24 +3300,42 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost elif o == 't4': logging.info('started t4') if self.force_idle(): - subprocess.check_call(self.commands['lock'], shell=True) - subprocess.call(self.commands['timeout2'], shell=True) - subprocess.check_call(self.commands['timeout1'], shell=True) + subprocess.check_call( + self.commands['lock'], shell=True + ) + subprocess.call( + self.commands['timeout2'], shell=True + ) + subprocess.check_call( + self.commands['timeout1'], shell=True + ) logging.info('done t4') self.release_lock() elif o == 't5': logging.info('started timeout resume') if self.force_idle(): - subprocess.check_call(self.commands['lock'], shell=True) + subprocess.check_call( + self.commands['lock'], shell=True + ) retry( - lambda: subprocess.call(self.commands['resume'], shell=True), + lambda: subprocess.call( + self.commands['resume'], shell=True + ), ) logging.info('done timeout resume') elif o == 't6': logging.info('started before-sleep') if self.force_idle(): - (subprocess.call(self.commands['timeout2'], shell=True),) - (subprocess.check_call(self.commands['timeout1'], shell=True),) + ( + subprocess.call( + self.commands['timeout2'], shell=True + ), + ) + ( + subprocess.check_call( + self.commands['timeout1'], shell=True + ), + ) logging.info('done before-sleep') self.release_lock() elif o == 't7': @@ -3074,7 +3343,12 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost # if self.force_idle(): # subprocess.check_call(self.commands['lock'], shell=True) while True: - if subprocess.call(self.commands['resume'], shell=True) == 0: + if ( + subprocess.call( + self.commands['resume'], shell=True + ) + == 0 + ): break else: time.sleep(0.5) @@ -3099,7 +3373,15 @@ echo 1 | tee /sys/devices/system/cpu/cpu*/cpufreq/boost self.background_check() if options.polkit_service: - services.extend([subprocess.Popen(['/usr/lib/polkit-gnome/polkit-gnome-authentication-agent-1'])]) + services.extend( + [ + subprocess.Popen( + [ + '/usr/lib/polkit-gnome/polkit-gnome-authentication-agent-1' + ] + ) + ] + ) services.extend( [ @@ -3234,12 +3516,24 @@ def gnome_shortcuts(argv: list[str]) -> None: 'set', 'org.gnome.settings-daemon.plugins.media-keys', 'custom-keybindings', - '[%s]' % ','.join(["'%s'" % ('/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') % o for o in range(command_id + 1)]), + '[%s]' + % ','.join( + [ + "'%s'" + % ( + '/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' + ) + % o + for o in range(command_id + 1) + ] + ), ), ( 'gsettings', 'set', - ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + ( + 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' + ) % command_id, 'name', name, @@ -3247,7 +3541,9 @@ def gnome_shortcuts(argv: list[str]) -> None: ( 'gsettings', 'set', - ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + ( + 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' + ) % command_id, 'command', command, @@ -3255,7 +3551,9 @@ def gnome_shortcuts(argv: list[str]) -> None: ( 'gsettings', 'set', - ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/') + ( + 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:/org/gnome/settings-daemon/plugins/media-keys/custom-keybindings/custom%d/' + ) % command_id, 'binding', binding, @@ -3273,7 +3571,10 @@ def gnome_shortcuts(argv: list[str]) -> None: [ 'gsettings', 'get', - ('org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:%s') % o, + ( + 'org.gnome.settings-daemon.plugins.media-keys.custom-keybinding:%s' + ) + % o, k, ] ) @@ -3338,7 +3639,9 @@ def socat_ssh(argv): dest='gateway_command', default=None, type=str, - help=('a shell command that forwards ssh socket data somewhere else, like busybox nc 127.0.0.1 $(cat remote-ssh.port)'), + help=( + 'a shell command that forwards ssh socket data somewhere else, like busybox nc 127.0.0.1 $(cat remote-ssh.port)' + ), ) options, args = parser.parse_args(argv) @@ -3471,10 +3774,18 @@ def share_wifi(argv): print('enter password:') - pw = subprocess.check_output('read -s PW; echo -n $PW', shell=True).decode('utf-8') + pw = subprocess.check_output('read -s PW; echo -n $PW', shell=True).decode( + 'utf-8' + ) if len(pw) == 0: - pw = subprocess.check_output('pwgen -syn 20 1', shell=True).decode('utf-8').strip() - with subprocess.Popen(['qrencode', '-t', 'UTF8'], stdin=subprocess.PIPE) as p: + pw = ( + subprocess.check_output('pwgen -syn 20 1', shell=True) + .decode('utf-8') + .strip() + ) + with subprocess.Popen( + ['qrencode', '-t', 'UTF8'], stdin=subprocess.PIPE + ) as p: p.stdin.write(pw.encode('utf-8')) p.stdin.flush() p.stdin.close() @@ -3541,7 +3852,9 @@ def share_wifi(argv): if shutdown: break - if (datetime.datetime.now() - last_timestamp).total_seconds() > options.restart_delay: + if ( + datetime.datetime.now() - last_timestamp + ).total_seconds() > options.restart_delay: restart = True last_timestamp = datetime.datetime.now() @@ -3660,8 +3973,14 @@ def media_keys(argv): if mode == 'mocp': raise NotImplementedError elif mode == 'playerctl': - pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) - subprocess.check_call(['playerctl', 'position', '%f' % (pos - float(args[0]))]) + pos = float( + subprocess.check_output(['playerctl', 'position']).decode( + 'utf-8' + ) + ) + subprocess.check_call( + ['playerctl', 'position', '%f' % (pos - float(args[0]))] + ) # msg = player_metadata() else: raise NotImplementedError @@ -3669,8 +3988,14 @@ def media_keys(argv): if mode == 'mocp': raise NotImplementedError elif mode == 'playerctl': - pos = float(subprocess.check_output(['playerctl', 'position']).decode('utf-8')) - subprocess.check_call(['playerctl', 'position', '%f' % (pos + float(args[0]))]) + pos = float( + subprocess.check_output(['playerctl', 'position']).decode( + 'utf-8' + ) + ) + subprocess.check_call( + ['playerctl', 'position', '%f' % (pos + float(args[0]))] + ) # msg = player_metadata() else: raise NotImplementedError @@ -3684,8 +4009,16 @@ def media_keys(argv): else: raise NotImplementedError elif options.command == 'media-lower-volume': - subprocess.check_call(['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '-5%']) - msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + subprocess.check_call( + ['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '-5%'] + ) + msg = ( + subprocess.check_output( + ['pactl', 'get-sink-volume', '@DEFAULT_SINK@'] + ) + .decode('utf-8') + .strip() + ) elif options.command == 'media-toggle-volume': subprocess.check_call( [ @@ -3695,10 +4028,24 @@ def media_keys(argv): 'toggle', ] ) - msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + msg = ( + subprocess.check_output( + ['pactl', 'get-sink-volume', '@DEFAULT_SINK@'] + ) + .decode('utf-8') + .strip() + ) elif options.command == 'media-raise-volume': - subprocess.check_call(['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '+5%']) - msg = subprocess.check_output(['pactl', 'get-sink-volume', '@DEFAULT_SINK@']).decode('utf-8').strip() + subprocess.check_call( + ['pactl', 'set-sink-volume', '@DEFAULT_SINK@', '+5%'] + ) + msg = ( + subprocess.check_output( + ['pactl', 'get-sink-volume', '@DEFAULT_SINK@'] + ) + .decode('utf-8') + .strip() + ) else: raise NotImplementedError @@ -3771,7 +4118,9 @@ def install(argv: list[str]) -> None: final_target = options.target / relative_source - logger.info(dict(final_target=final_target, relative_source=relative_source)) + logger.info( + dict(final_target=final_target, relative_source=relative_source) + ) if final_target.exists(): if not options.overwrite: @@ -3915,7 +4264,10 @@ def pip_check_conflicts( def pip_resolve( args: list[str], ) -> None: - from online.fxreader.pr34.commands_typed.pip import pip_resolve, pip_resolve_t + from online.fxreader.pr34.commands_typed.pip import ( + pip_resolve, + pip_resolve_t, + ) parser = argparse.ArgumentParser() parser.add_argument( @@ -3968,7 +4320,9 @@ def commands_cli(argv: Optional[list[str]] = None) -> int: if argv is None: argv = sys.argv[1:] - from online.fxreader.pr34.commands_typed.logging import setup as logging_setup + from online.fxreader.pr34.commands_typed.logging import ( + setup as logging_setup, + ) logging_setup() # logging.getLogger().setLevel(logging.INFO) diff --git a/python/online/fxreader/pr34/commands_typed/async_api/fastapi.py b/python/online/fxreader/pr34/commands_typed/async_api/fastapi.py index b75cc47..9ff821d 100644 --- a/python/online/fxreader/pr34/commands_typed/async_api/fastapi.py +++ b/python/online/fxreader/pr34/commands_typed/async_api/fastapi.py @@ -31,7 +31,10 @@ def create_app() -> fastapi.FastAPI: logger.info(dict(msg='start loading app = {}'.format(app_config))) app_module, app_method, app_prefix = app_config.split(':') - app_router = cast(Callable[[], Any], getattr(importlib.import_module(app_module), app_method))() + app_router = cast( + Callable[[], Any], + getattr(importlib.import_module(app_module), app_method), + )() assert isinstance(app_router, fastapi.APIRouter) diff --git a/python/online/fxreader/pr34/commands_typed/cli.py b/python/online/fxreader/pr34/commands_typed/cli.py index 912be7e..766bcd2 100644 --- a/python/online/fxreader/pr34/commands_typed/cli.py +++ b/python/online/fxreader/pr34/commands_typed/cli.py @@ -172,9 +172,13 @@ class CLI(abc.ABC): ) -> None: from . import cli_bootstrap - pyproject = cli_bootstrap.pyproject_load(self.projects[project].source_dir / 'pyproject.toml') + pyproject = cli_bootstrap.pyproject_load( + self.projects[project].source_dir / 'pyproject.toml' + ) - dependencies = sum([pyproject.dependencies[o] for o in features], cast(list[str], [])) + dependencies = sum( + [pyproject.dependencies[o] for o in features], cast(list[str], []) + ) pip_find_links: list[pathlib.Path] = [] @@ -216,7 +220,9 @@ class CLI(abc.ABC): force: bool, ) -> None: for k, d in self.dependencies.items(): - whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_')) + whl_glob = self.dist_settings.wheel_dir / ( + '*%s*.whl' % d.name.replace('.', '_') + ) if len(glob.glob(str(whl_glob))) == 0 or force: if d.source_path.exists(): @@ -256,7 +262,9 @@ class CLI(abc.ABC): def index_get(o: dict[str, Any]) -> tuple[Any, ...]: return (o['path'], o['stat']) - present_files_index = {index_get(o): o for o in present_files} + present_files_index = { + index_get(o): o for o in present_files + } new_files: list[dict[str, Any]] = [] @@ -295,7 +303,13 @@ class CLI(abc.ABC): [ pathlib.Path(o) for o in glob.glob( - str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'), + str( + self.dist_settings.env_path + / 'lib' + / 'python*' + / '**' + / 'pkgconfig' + ), recursive=True, ) ] @@ -388,7 +402,18 @@ class CLI(abc.ABC): shutil.rmtree(pyproject_build_dir) if len(self.third_party_roots(project_name)) > 0: - extra_args.append('-Csetup-args=%s' % ('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)]))) + extra_args.append( + '-Csetup-args=%s' + % ( + '-Dthird_party_roots=%s' + % json.dumps( + [ + str(o.absolute()) + for o in self.third_party_roots(project_name) + ] + ) + ) + ) cmd = [ sys.executable, @@ -449,11 +474,21 @@ class CLI(abc.ABC): preserve_top_path=True, ) - pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml') + pyproject = cli_bootstrap.pyproject_load( + project.source_dir / 'pyproject.toml' + ) - pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root + pyproject_tool = ( + pydantic.RootModel[PyProject.Tool] + .model_validate(pyproject.tool) + .root + ) - if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.install: + if ( + pyproject_tool.meson + and pyproject_tool.meson.args + and pyproject_tool.meson.args.install + ): argv = pyproject_tool.meson.args.install + argv cmd = [ @@ -495,7 +530,9 @@ class CLI(abc.ABC): content = f.read() with io.open(o, 'w') as f: - f.write(content.replace('prefix=/', 'prefix=${pcfiledir}/../../')) + f.write( + content.replace('prefix=/', 'prefix=${pcfiledir}/../../') + ) def ninja( self, @@ -589,18 +626,30 @@ class CLI(abc.ABC): res: list[pathlib.Path] = [] if not project_name is None: - pyproject = cli_bootstrap.pyproject_load(self.projects[project_name].source_dir / 'pyproject.toml') + pyproject = cli_bootstrap.pyproject_load( + self.projects[project_name].source_dir / 'pyproject.toml' + ) for third_party_root in pyproject.third_party_roots: if third_party_root.package: if not third_party_root.module_root: - third_party_root.module_root = third_party_root.package.replace('.', os.path.sep) + third_party_root.module_root = ( + third_party_root.package.replace('.', os.path.sep) + ) if not third_party_root.path: packages = pip_show([third_party_root.package]) assert len(packages) == 1 - third_party_root.path = str(pathlib.Path(packages[0].location) / third_party_root.module_root / 'lib') + third_party_root.path = str( + pathlib.Path(packages[0].location) + / third_party_root.module_root + / 'lib' + ) else: - assert not third_party_root.package and not third_party_root.module_root and third_party_root.path + assert ( + not third_party_root.package + and not third_party_root.module_root + and third_party_root.path + ) res.append(pathlib.Path(third_party_root.path)) @@ -616,8 +665,12 @@ class CLI(abc.ABC): path: Optional[pathlib.Path] = None @property - def meson_toolchains(self) -> dict[str, meson_toolchains_t.res_t.toolchain_t]: - t1 = pathlib.Path(importlib.import_module('online.fxreader.pr34').__path__[0]) + def meson_toolchains( + self, + ) -> dict[str, meson_toolchains_t.res_t.toolchain_t]: + t1 = pathlib.Path( + importlib.import_module('online.fxreader.pr34').__path__[0] + ) toolchains = glob.glob(str(t1 / 'meson' / 'toolchains' / '*')) res: dict[str, CLI.meson_toolchains_t.res_t.toolchain_t] = dict() @@ -642,7 +695,11 @@ class CLI(abc.ABC): ) -> list[str]: from . import argparse as pr34_argparse - if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.setup: + if ( + pyproject_tool.meson + and pyproject_tool.meson.args + and pyproject_tool.meson.args.setup + ): extra_args = pyproject_tool.meson.args.setup + extra_args parser = argparse.ArgumentParser() @@ -657,8 +714,13 @@ class CLI(abc.ABC): options, args = pr34_argparse.parse_args(parser, extra_args) if not options.cross_file is None: - if not options.cross_file.exists() and (not options.cross_file.is_absolute() and options.cross_file.stem in self.meson_toolchains): - options.cross_file = self.meson_toolchains[options.cross_file.stem].path + if not options.cross_file.exists() and ( + not options.cross_file.is_absolute() + and options.cross_file.stem in self.meson_toolchains + ): + options.cross_file = self.meson_toolchains[ + options.cross_file.stem + ].path extra_args = ['--cross-file', str(options.cross_file)] + args @@ -687,15 +749,26 @@ class CLI(abc.ABC): if env is None: env = dict() - pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml') + pyproject = cli_bootstrap.pyproject_load( + project.source_dir / 'pyproject.toml' + ) - pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root + pyproject_tool = ( + pydantic.RootModel[PyProject.Tool] + .model_validate(pyproject.tool) + .root + ) logger.info(dict(env=env)) if force: if (project.build_dir / mode).exists(): - logger.info(dict(action='removing build dir', path=project.build_dir / mode)) + logger.info( + dict( + action='removing build dir', + path=project.build_dir / mode, + ) + ) shutil.rmtree(project.build_dir / mode) extra_args: list[str] = [] @@ -706,7 +779,15 @@ class CLI(abc.ABC): ) if len(self.third_party_roots(project_name)) > 0: - extra_args.append('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)])) + extra_args.append( + '-Dthird_party_roots=%s' + % json.dumps( + [ + str(o.absolute()) + for o in self.third_party_roots(project_name) + ] + ) + ) cmd = [ # shutil_which( @@ -719,7 +800,9 @@ class CLI(abc.ABC): 'setup', str(project.source_dir), str(project.build_dir / mode), - '--pkg-config-path={}'.format(json.dumps([str(o) for o in self.pkg_config_path(project_name)])), + '--pkg-config-path={}'.format( + json.dumps([str(o) for o in self.pkg_config_path(project_name)]) + ), '-Dmodes=["{}"]'.format(mode), *extra_args, # '-Dpkgconfig.relocatable=true', @@ -769,14 +852,21 @@ class CLI(abc.ABC): argv, ) - pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml') + pyproject = cli_bootstrap.pyproject_load( + project.source_dir / 'pyproject.toml' + ) - dependencies = sum([pyproject.dependencies[o] for o in options.features], cast(list[str], [])) + dependencies = sum( + [pyproject.dependencies[o] for o in options.features], + cast(list[str], []), + ) pip_find_links: list[pathlib.Path] = [] if not pyproject.pip_find_links is None: - pip_find_links.extend([o for o in pyproject.pip_find_links if o.exists()]) + pip_find_links.extend( + [o for o in pyproject.pip_find_links if o.exists()] + ) requirements_name_get_res = cli_bootstrap.requirements_name_get( source_dir=project.source_dir, @@ -885,7 +975,9 @@ class CLI(abc.ABC): assert options.module in [o.name for o in pyproject.modules] - modules: dict[str, cli_bootstrap.PyProject.Module] = {o.name: o for o in pyproject.modules} + modules: dict[str, cli_bootstrap.PyProject.Module] = { + o.name: o for o in pyproject.modules + } module = modules[options.module] diff --git a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py index c1b4374..3b30e4a 100644 --- a/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py +++ b/python/online/fxreader/pr34/commands_typed/cli_bootstrap.py @@ -78,7 +78,9 @@ class PyProject: third_party_roots: list[ThirdPartyRoot] = dataclasses.field( default_factory=lambda: [], ) - requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict()) + requirements: dict[str, pathlib.Path] = dataclasses.field( + default_factory=lambda: dict() + ) modules: list[Module] = dataclasses.field( default_factory=lambda: [], @@ -124,7 +126,12 @@ def check_dict( else: VT_class = VT - assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()]) + assert all( + [ + isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) + for k, v in value2.items() + ] + ) if VT is None: return cast( @@ -233,7 +240,12 @@ def pyproject_load( str, ) - if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict): + if ( + 'tool' in content + and isinstance(content['tool'], dict) + and tool_name in content['tool'] + and isinstance(content['tool'][tool_name], dict) + ): pr34_tool = check_dict( check_dict( content['tool'], @@ -246,7 +258,9 @@ def pyproject_load( res.early_features = pr34_tool['early_features'] if 'pip_find_links' in pr34_tool: - res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']] + res.pip_find_links = [ + d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links'] + ] if 'runtime_libdirs' in pr34_tool: res.runtime_libdirs = [ @@ -265,7 +279,9 @@ def pyproject_load( if 'third_party_roots' in pr34_tool: for o in check_list(pr34_tool['third_party_roots']): o2 = check_dict(o, str, str) - assert all([k in {'package', 'module_root', 'path'} for k in o2]) + assert all( + [k in {'package', 'module_root', 'path'} for k in o2] + ) res.third_party_roots.append( PyProject.ThirdPartyRoot( @@ -279,7 +295,9 @@ def pyproject_load( res.requirements = { k: d.parent / pathlib.Path(v) # pathlib.Path(o) - for k, v in check_dict(pr34_tool['requirements'], str, str).items() + for k, v in check_dict( + pr34_tool['requirements'], str, str + ).items() } if 'modules' in pr34_tool: @@ -329,7 +347,10 @@ class BootstrapSettings: ).strip() ) pip_check_conflicts: Optional[bool] = dataclasses.field( - default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)], + default_factory=lambda: os.environ.get( + 'PIP_CHECK_CONFLICTS', json.dumps(True) + ) + in [json.dumps(True)], ) uv_args: list[str] = dataclasses.field( default_factory=lambda: os.environ.get( @@ -394,7 +415,9 @@ def requirements_name_get( else: requirements_path = source_dir / 'requirements.txt' - requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in') + requirements_path_in = requirements_path.parent / ( + requirements_path.stem + '.in' + ) requirements_in: list[str] = [] @@ -440,10 +463,15 @@ def env_bootstrap( requirements_in: list[str] = [] - requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']) + requirements_in.extend( + ['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'] + ) if pyproject.early_features: - early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], [])) + early_dependencies = sum( + [pyproject.dependencies[o] for o in pyproject.early_features], + cast(list[str], []), + ) logger.info( dict( @@ -532,7 +560,11 @@ def env_bootstrap( subprocess.check_call( [ 'uv', - *[o for o in bootstrap_settings.uv_args if not o in ['-U', '--upgrade']], + *[ + o + for o in bootstrap_settings.uv_args + if not o in ['-U', '--upgrade'] + ], 'venv', *venv_python_version, *cache_find_links_args, diff --git a/python/online/fxreader/pr34/commands_typed/color_scheme.py b/python/online/fxreader/pr34/commands_typed/color_scheme.py index a71f612..13106f5 100644 --- a/python/online/fxreader/pr34/commands_typed/color_scheme.py +++ b/python/online/fxreader/pr34/commands_typed/color_scheme.py @@ -58,9 +58,25 @@ def run(argv: list[str]) -> None: def set_theme(theme: Literal['light', 'dark', 'default']) -> None: if theme == 'light': - subprocess.check_call(['gsettings', 'set', 'org.gnome.desktop.interface', 'color-scheme', 'prefer-light']) + subprocess.check_call( + [ + 'gsettings', + 'set', + 'org.gnome.desktop.interface', + 'color-scheme', + 'prefer-light', + ] + ) elif theme == 'dark': - subprocess.check_call(['gsettings', 'set', 'org.gnome.desktop.interface', 'color-scheme', 'prefer-dark']) + subprocess.check_call( + [ + 'gsettings', + 'set', + 'org.gnome.desktop.interface', + 'color-scheme', + 'prefer-dark', + ] + ) elif theme == 'default': subprocess.check_call( [ diff --git a/python/online/fxreader/pr34/commands_typed/crypto.py b/python/online/fxreader/pr34/commands_typed/crypto.py index 327d1cb..aeac872 100644 --- a/python/online/fxreader/pr34/commands_typed/crypto.py +++ b/python/online/fxreader/pr34/commands_typed/crypto.py @@ -64,7 +64,9 @@ class PasswordUtils: raise NotImplementedError @classmethod - def _scrypt_init(cls, salt: bytes) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt: + def _scrypt_init( + cls, salt: bytes + ) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt: return cryptography.hazmat.primitives.kdf.scrypt.Scrypt( salt=salt, length=32, diff --git a/python/online/fxreader/pr34/commands_typed/logging.py b/python/online/fxreader/pr34/commands_typed/logging.py index afdd4c1..abc4bf8 100644 --- a/python/online/fxreader/pr34/commands_typed/logging.py +++ b/python/online/fxreader/pr34/commands_typed/logging.py @@ -10,5 +10,7 @@ def setup(level: Optional[int] = None) -> None: logging.basicConfig( level=level, - format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'), + format=( + '%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s' + ), ) diff --git a/python/online/fxreader/pr34/commands_typed/metrics.py b/python/online/fxreader/pr34/commands_typed/metrics.py index 7c6881e..c98cae1 100644 --- a/python/online/fxreader/pr34/commands_typed/metrics.py +++ b/python/online/fxreader/pr34/commands_typed/metrics.py @@ -47,7 +47,15 @@ class Metric(pydantic.BaseModel): if o.type == 'gauge': samples.append( - Metric.Sample(parameters=s.parameters, value='NaN', timestamp=(s.timestamp + datetime.timedelta(seconds=15) if s.timestamp else None)) + Metric.Sample( + parameters=s.parameters, + value='NaN', + timestamp=( + s.timestamp + datetime.timedelta(seconds=15) + if s.timestamp + else None + ), + ) ) return ''.join( @@ -65,7 +73,11 @@ class Metric(pydantic.BaseModel): ] ), value=s2.value, - timestamp=('%.f' % (s2.timestamp.timestamp() * 1000,) if s2.timestamp else ''), + timestamp=( + '%.f' % (s2.timestamp.timestamp() * 1000,) + if s2.timestamp + else '' + ), ) for s2 in samples ] @@ -87,9 +99,19 @@ def serialize( '{help}{type}{samples}'.format( # help='# HELP %s some metric' % o.name, # type='# TYPE %s counter' % o.name, - help=('# HELP {0} {1}\n'.format(o.name, o.help) if o.help else ''), - type=('# TYPE {0} {1}\n'.format(o.name, o.type) if o.type else ''), - samples=''.join([Metric.sample_serialize(o, s) for s in o.samples]), + help=( + '# HELP {0} {1}\n'.format(o.name, o.help) + if o.help + else '' + ), + type=( + '# TYPE {0} {1}\n'.format(o.name, o.type) + if o.type + else '' + ), + samples=''.join( + [Metric.sample_serialize(o, s) for s in o.samples] + ), ) for o in metrics if len(o.samples) > 0 diff --git a/python/online/fxreader/pr34/commands_typed/mypy.py b/python/online/fxreader/pr34/commands_typed/mypy.py index 82bb7d8..91de680 100644 --- a/python/online/fxreader/pr34/commands_typed/mypy.py +++ b/python/online/fxreader/pr34/commands_typed/mypy.py @@ -38,7 +38,9 @@ class MypyFormatEntry: class MypyFormat: - vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode') + vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry( + name='vscode', value='vscode' + ) json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json') @classmethod @@ -149,7 +151,11 @@ def run( assert not res.returncode is None errors = sorted( - [json.loads(o) for o in res.stdout.decode('utf-8').splitlines() if not o.strip() == ''], + [ + json.loads(o) + for o in res.stdout.decode('utf-8').splitlines() + if not o.strip() == '' + ], key=lambda x: ( x.get('file', ''), x.get('line', 0), diff --git a/python/online/fxreader/pr34/commands_typed/os.py b/python/online/fxreader/pr34/commands_typed/os.py index 4dc2b9b..13a646d 100644 --- a/python/online/fxreader/pr34/commands_typed/os.py +++ b/python/online/fxreader/pr34/commands_typed/os.py @@ -54,8 +54,21 @@ def runtime_libdirs_init( ld_library_path: list[pathlib.Path] = [ o for o in [ - *[o.absolute() for o in (project.runtime_libdirs if project.runtime_libdirs else [])], - *[pathlib.Path(o) for o in os.environ.get('LD_LIBRARY_PATH', '').split(os.path.pathsep) if o != ''], + *[ + o.absolute() + for o in ( + project.runtime_libdirs + if project.runtime_libdirs + else [] + ) + ], + *[ + pathlib.Path(o) + for o in os.environ.get('LD_LIBRARY_PATH', '').split( + os.path.pathsep + ) + if o != '' + ], ] ] @@ -72,10 +85,16 @@ def runtime_libdirs_init( ld_library_path_present.append(o) - os.environ.update(LD_LIBRARY_PATH=os.path.pathsep.join([str(o) for o in ld_library_path_present])) + os.environ.update( + LD_LIBRARY_PATH=os.path.pathsep.join( + [str(o) for o in ld_library_path_present] + ) + ) for preload_path in project.runtime_preload or []: - for preload_found in glob.glob(str(preload_path.parent / ('lib%s.so' % preload_path.name))): + for preload_found in glob.glob( + str(preload_path.parent / ('lib%s.so' % preload_path.name)) + ): logger.info( dict( preload_path=preload_path, diff --git a/python/online/fxreader/pr34/commands_typed/pip.py b/python/online/fxreader/pr34/commands_typed/pip.py index c958fb9..9fd1ab3 100644 --- a/python/online/fxreader/pr34/commands_typed/pip.py +++ b/python/online/fxreader/pr34/commands_typed/pip.py @@ -101,8 +101,20 @@ class pip_resolve_t: entries: Optional[list[download_info_t]] = None -def pip_resolve_entries_to_txt(entries: list[pip_resolve_t.res_t.download_info_t]) -> str: - return '\n'.join(['#%s\n%s %s' % (o.url, o.constraint, ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256])) for o in entries]) +def pip_resolve_entries_to_txt( + entries: list[pip_resolve_t.res_t.download_info_t], +) -> str: + return '\n'.join( + [ + '#%s\n%s %s' + % ( + o.url, + o.constraint, + ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256]), + ) + for o in entries + ] + ) def pip_resolve( @@ -128,7 +140,9 @@ def pip_resolve( import pip._internal.models.direct_url with contextlib.ExitStack() as stack: - stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager()) + stack.enter_context( + pip._internal.utils.temp_dir.global_tempdir_manager() + ) t2 = pip._internal.cli.main_parser.create_main_parser() @@ -166,15 +180,22 @@ def pip_resolve( pip._internal.cli.cmdoptions.check_dist_restriction(options) # t1._in_main_context = True session = t1.get_default_session(options) - target_python = pip._internal.cli.cmdoptions.make_target_python(options) - finder = cast(pip_resolve_t.build_package_finder_t, getattr(t1, '_build_package_finder'))( + target_python = pip._internal.cli.cmdoptions.make_target_python( + options + ) + finder = cast( + pip_resolve_t.build_package_finder_t, + getattr(t1, '_build_package_finder'), + )( options=options, session=session, target_python=target_python, ignore_requires_python=options.ignore_requires_python, ) - build_tracker = t1.enter_context(pip._internal.operations.build.build_tracker.get_build_tracker()) + build_tracker = t1.enter_context( + pip._internal.operations.build.build_tracker.get_build_tracker() + ) reqs = t1.get_requirements( [ #'pip', 'uv', 'ipython', @@ -184,8 +205,12 @@ def pip_resolve( finder, session, ) - pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs) - directory = pip._internal.utils.temp_dir.TempDirectory(delete=True, kind='download', globally_managed=True) + pip._internal.req.req_install.check_legacy_setup_py_options( + options, reqs + ) + directory = pip._internal.utils.temp_dir.TempDirectory( + delete=True, kind='download', globally_managed=True + ) preparer = t1.make_requirement_preparer( temp_build_dir=directory, options=options, @@ -205,7 +230,9 @@ def pip_resolve( py_version_info=options.python_version, ) t1.trace_basic_info(finder) - requirement_set = resolver.resolve(reqs, check_supported_wheels=True) + requirement_set = resolver.resolve( + reqs, check_supported_wheels=True + ) res = pip_resolve_t.res_t() @@ -279,7 +306,9 @@ def pip_resolve( location, ) - batch_downloader_call_def = pip._internal.network.download.BatchDownloader.__call__ + batch_downloader_call_def = ( + pip._internal.network.download.BatchDownloader.__call__ + ) def batch_downloader_call( _self: pip._internal.network.download.BatchDownloader, @@ -298,7 +327,9 @@ def pip_resolve( return [(o, ('/dev/null', '')) for o in links] # base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve - base_resolver_resolve_def = pip._internal.resolution.resolvelib.resolver.Resolver.resolve + base_resolver_resolve_def = ( + pip._internal.resolution.resolvelib.resolver.Resolver.resolve + ) result_requirements: list[RequirementSet | InstallRequirement] = [] @@ -309,7 +340,9 @@ def pip_resolve( ) -> RequirementSet: # print(args, kwargs) - res = base_resolver_resolve_def(_self, root_reqs, check_supported_wheels) + res = base_resolver_resolve_def( + _self, root_reqs, check_supported_wheels + ) result_requirements.append(res) raise NotImplementedError @@ -369,7 +402,13 @@ def pip_resolve( patches: list[Any] = [] - patches.append(unittest.mock.patch.object(pip._internal.network.download.Downloader, '__call__', downloader_call)) + patches.append( + unittest.mock.patch.object( + pip._internal.network.download.Downloader, + '__call__', + downloader_call, + ) + ) # patches.append( # unittest.mock.patch.object( # pip._internal.network.download.BatchDownloader, @@ -574,4 +613,6 @@ def pip_check_conflicts( if line.strip() != '' ] - return pip_check_conflicts_t.res_t(status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates) + return pip_check_conflicts_t.res_t( + status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates + ) diff --git a/python/online/fxreader/pr34/commands_typed/pydantic.py b/python/online/fxreader/pr34/commands_typed/pydantic.py index c2f4291..6d86c9c 100644 --- a/python/online/fxreader/pr34/commands_typed/pydantic.py +++ b/python/online/fxreader/pr34/commands_typed/pydantic.py @@ -21,28 +21,36 @@ R = TypeVar('R') @overload -def validate_params(view: Callable[..., Awaitable[R]]) -> Callable[..., Awaitable[R]]: ... +def validate_params( + view: Callable[..., Awaitable[R]], +) -> Callable[..., Awaitable[R]]: ... @overload def validate_params(view: Callable[..., R]) -> Callable[..., R]: ... -def validate_params(view: Callable[..., Awaitable[R]] | Callable[..., R]) -> Callable[..., Awaitable[R]] | Callable[..., R]: +def validate_params( + view: Callable[..., Awaitable[R]] | Callable[..., R], +) -> Callable[..., Awaitable[R]] | Callable[..., R]: class Parameter: kind: Any annotation: Any - parameters = cast(Mapping[str, Parameter], inspect.signature(view).parameters) + parameters = cast( + Mapping[str, Parameter], inspect.signature(view).parameters + ) - positional_parameters: collections.OrderedDict[str, type[Any]] = collections.OrderedDict( - ( - (k, v.annotation) - for k, v in parameters.items() - if v.kind - in ( - inspect.Parameter.POSITIONAL_ONLY, - inspect.Parameter.POSITIONAL_OR_KEYWORD, + positional_parameters: collections.OrderedDict[str, type[Any]] = ( + collections.OrderedDict( + ( + (k, v.annotation) + for k, v in parameters.items() + if v.kind + in ( + inspect.Parameter.POSITIONAL_ONLY, + inspect.Parameter.POSITIONAL_OR_KEYWORD, + ) ) ) ) diff --git a/python/online/fxreader/pr34/commands_typed/status.py b/python/online/fxreader/pr34/commands_typed/status.py index 3ae2246..f3043e4 100644 --- a/python/online/fxreader/pr34/commands_typed/status.py +++ b/python/online/fxreader/pr34/commands_typed/status.py @@ -23,7 +23,12 @@ def run(argv: list[str]): def format_option(self, *args: Any, **kwargs: Any) -> Any: def f1(text: str, width: Optional[int]) -> list[str]: width = None - return '\n'.join([textwrap.fill('\t' + o, width, replace_whitespace=False) for o in text.splitlines()]).splitlines() + return '\n'.join( + [ + textwrap.fill('\t' + o, width, replace_whitespace=False) + for o in text.splitlines() + ] + ).splitlines() t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option) t2 = ( diff --git a/python/online/fxreader/pr34/oom_firefox.py b/python/online/fxreader/pr34/oom_firefox.py index d50eca6..5e6733c 100644 --- a/python/online/fxreader/pr34/oom_firefox.py +++ b/python/online/fxreader/pr34/oom_firefox.py @@ -74,7 +74,9 @@ class get_firefox_procs_ps_t: cmd: str -def get_firefox_procs_ps(slice_name=None) -> list[get_firefox_procs_ps_t.res_t.entry_t]: +def get_firefox_procs_ps( + slice_name=None, +) -> list[get_firefox_procs_ps_t.res_t.entry_t]: entries: dict[int, dict[str, Any]] = dict() for regex, columns in [ @@ -182,7 +184,11 @@ def is_main_firefox(p): return False -def kill_prioritized(procs: list['get_firefox_procs_ps_t.res_t.entry_t'], to_free_mb, low_priority_pids): +def kill_prioritized( + procs: list['get_firefox_procs_ps_t.res_t.entry_t'], + to_free_mb, + low_priority_pids, +): candidates = [] for p in procs: if is_main_firefox(p): @@ -224,7 +230,9 @@ def kill_prioritized(procs: list['get_firefox_procs_ps_t.res_t.entry_t'], to_fre # — systemd-run logic — -def launch_firefox_with_limits(base_cmd, memory_high, swap_max, extra_args, unit_name): +def launch_firefox_with_limits( + base_cmd, memory_high, swap_max, extra_args, unit_name +): cmd = [ 'systemd-run', '--user', @@ -250,7 +258,9 @@ def launch_firefox_with_limits(base_cmd, memory_high, swap_max, extra_args, unit def main(): - os.makedirs(pathlib.Path('~/.cache/oom_firefox/').expanduser(), exist_ok=True) + os.makedirs( + pathlib.Path('~/.cache/oom_firefox/').expanduser(), exist_ok=True + ) logging.basicConfig( level=logging.INFO, @@ -263,14 +273,50 @@ def main(): ], ) - parser = argparse.ArgumentParser(description='Firefox memory manager with slice + graceful shutdown') - parser.add_argument('--max-mb', type=float, required=True, help='Memory threshold in MB (used for killing logic & MemoryHigh)') - parser.add_argument('--kill-percent', type=float, default=70.0, help='If over max, kill until usage ≤ this percent of max') - parser.add_argument('--swap-max-mb', type=float, default=None, help='MemorySwapMax (MB) for the systemd scope') - parser.add_argument('--interval', type=float, default=1.0, help='Monitoring interval in seconds') - parser.add_argument('--unit-name', type=str, default='firefox-limited', help='Name for systemd transient unit') - parser.add_argument('--firefox-extra', action='append', default=[], help='Extra CLI args to pass to Firefox (can repeat)') - parser.add_argument('firefox_cmd', nargs=argparse.REMAINDER, help='Firefox command + args (if launching it)') + parser = argparse.ArgumentParser( + description='Firefox memory manager with slice + graceful shutdown' + ) + parser.add_argument( + '--max-mb', + type=float, + required=True, + help='Memory threshold in MB (used for killing logic & MemoryHigh)', + ) + parser.add_argument( + '--kill-percent', + type=float, + default=70.0, + help='If over max, kill until usage ≤ this percent of max', + ) + parser.add_argument( + '--swap-max-mb', + type=float, + default=None, + help='MemorySwapMax (MB) for the systemd scope', + ) + parser.add_argument( + '--interval', + type=float, + default=1.0, + help='Monitoring interval in seconds', + ) + parser.add_argument( + '--unit-name', + type=str, + default='firefox-limited', + help='Name for systemd transient unit', + ) + parser.add_argument( + '--firefox-extra', + action='append', + default=[], + help='Extra CLI args to pass to Firefox (can repeat)', + ) + parser.add_argument( + 'firefox_cmd', + nargs=argparse.REMAINDER, + help='Firefox command + args (if launching it)', + ) args = parser.parse_args() @@ -323,7 +369,9 @@ def main(): if total > limit: to_free = total - kill_to - killed, freed = kill_prioritized(procs, to_free, low_priority_pids) + killed, freed = kill_prioritized( + procs, to_free, low_priority_pids + ) lines.append(f'Killed: {killed}') lines.append(f'Freed ≈ {freed:.1f} MB') else: @@ -332,7 +380,11 @@ def main(): if firefox_proc and firefox_proc.poll() is not None: print('Firefox died — restarting …', file=sys.stderr) firefox_proc = launch_firefox_with_limits( - args.firefox_cmd, memory_high=args.max_mb, swap_max=args.swap_max_mb, extra_args=args.firefox_extra, unit_name=args.unit_name + args.firefox_cmd, + memory_high=args.max_mb, + swap_max=args.swap_max_mb, + extra_args=args.firefox_extra, + unit_name=args.unit_name, ) body.text = '\n'.join(lines) @@ -354,7 +406,14 @@ def main(): close_dialog() dialog = Dialog( - title='Enter low‑priority PIDs', body=ta, buttons=[Button(text='OK', handler=on_ok), Button(text='Cancel', handler=on_cancel)], width=60, modal=True + title='Enter low‑priority PIDs', + body=ta, + buttons=[ + Button(text='OK', handler=on_ok), + Button(text='Cancel', handler=on_cancel), + ], + width=60, + modal=True, ) f = Float(content=dialog, left=2, top=2) dialog_float[0] = f @@ -365,7 +424,13 @@ def main(): def on_close(): close_dialog() - dialog = Dialog(title=title, body=Label(text=message), buttons=[Button(text='Close', handler=on_close)], width=50, modal=True) + dialog = Dialog( + title=title, + body=Label(text=message), + buttons=[Button(text='Close', handler=on_close)], + width=50, + modal=True, + ) f = Float(content=dialog, left=4, top=4) dialog_float[0] = f root_floats.append(f) @@ -409,7 +474,15 @@ def main(): root = FloatContainer( content=HSplit( - [Frame(body, title='Firefox Memory Manager'), Window(height=1, content=FormattedTextControl('q=quit, m=PID, h=help, s=setting, a=about'))] + [ + Frame(body, title='Firefox Memory Manager'), + Window( + height=1, + content=FormattedTextControl( + 'q=quit, m=PID, h=help, s=setting, a=about' + ), + ), + ] ), floats=root_floats, modal=True, @@ -457,7 +530,9 @@ def main(): t.start() # refresh_body() - app.run(handle_sigint=True) # from prompt‑toolkit API :contentReference[oaicite:0]{index=0} + app.run( + handle_sigint=True + ) # from prompt‑toolkit API :contentReference[oaicite:0]{index=0} t.join() diff --git a/python/online/fxreader/pr34/tasks/ble.py b/python/online/fxreader/pr34/tasks/ble.py index 48aa22c..b7ba7e5 100644 --- a/python/online/fxreader/pr34/tasks/ble.py +++ b/python/online/fxreader/pr34/tasks/ble.py @@ -23,7 +23,13 @@ async def f2(device, timeout=None): async def f3(client): - t1 = [dict(service=o.__dict__, characteristics=[o2.__dict__ for o2 in o.characteristics]) for o in client.services] + t1 = [ + dict( + service=o.__dict__, + characteristics=[o2.__dict__ for o2 in o.characteristics], + ) + for o in client.services + ] return t1 @@ -43,7 +49,13 @@ async def f5( t5 = {i: o.details[0].name() for i, o in enumerate(t1)} - t2.extend([t1[k] for k, v in t5.items() if isinstance(v, str) and name_check(v)]) + t2.extend( + [ + t1[k] + for k, v in t5.items() + if isinstance(v, str) and name_check(v) + ] + ) else: t2.extend(t1) @@ -66,7 +78,9 @@ async def f4( assert name_check in [ 'watch fit', ] - name_check2 = lambda current_name: name_check.lower() in current_name.lower() + name_check2 = ( + lambda current_name: name_check.lower() in current_name.lower() + ) else: name_check2 = name_check diff --git a/python/online/fxreader/pr34/tasks/cython.py b/python/online/fxreader/pr34/tasks/cython.py index a0b4f3f..58b63d2 100644 --- a/python/online/fxreader/pr34/tasks/cython.py +++ b/python/online/fxreader/pr34/tasks/cython.py @@ -66,7 +66,13 @@ def build(content: str, module: M) -> M: # ) t1.run() - return cast(M, Cython.Build.Inline.load_dynamic('_%s' % sha256sum, glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0])) + return cast( + M, + Cython.Build.Inline.load_dynamic( + '_%s' % sha256sum, + glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0], + ), + ) raise NotImplementedError @@ -125,7 +131,9 @@ def mypyc_build(file_path: pathlib.Path) -> Any: # f.write(content) t1 = Cython.Build.Inline._get_build_extension() - t1.extensions = mypyc.build.mypycify([str(source_path)], target_dir=str(output_dir / 'build')) + t1.extensions = mypyc.build.mypycify( + [str(source_path)], target_dir=str(output_dir / 'build') + ) t1.build_temp = str(output_dir) t1.build_lib = str(lib_dir) # t2 = Cython.Build.Inline.Extension( @@ -147,7 +155,11 @@ def mypyc_build(file_path: pathlib.Path) -> Any: class Source: @staticmethod - def test2(_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]], _id: numpy.dtype[numpy.int32] | int, T: float = 16) -> int: + def test2( + _a: numpy.ndarray[Any, numpy.dtype[numpy.int64]], + _id: numpy.dtype[numpy.int32] | int, + T: float = 16, + ) -> int: raise NotImplementedError @@ -243,7 +255,11 @@ def test_cython(N: int = 4, T: int = 16) -> None: def test_mypyc(N: int = 4, W: int = 35) -> None: - cython2 = mypyc_build((pathlib.Path(__file__).parent / 'cython2.py').relative_to(pathlib.Path.cwd())) + cython2 = mypyc_build( + (pathlib.Path(__file__).parent / 'cython2.py').relative_to( + pathlib.Path.cwd() + ) + ) # from .cython2 import fib diff --git a/python/online/fxreader/pr34/tasks/jigsaw_toxic.py b/python/online/fxreader/pr34/tasks/jigsaw_toxic.py index 2daac22..ea16158 100644 --- a/python/online/fxreader/pr34/tasks/jigsaw_toxic.py +++ b/python/online/fxreader/pr34/tasks/jigsaw_toxic.py @@ -73,8 +73,21 @@ def kernel_2(): from keras.layers.embeddings import Embedding from keras.layers.normalization import BatchNormalization from keras.utils import np_utils - from sklearn import preprocessing, decomposition, model_selection, metrics, pipeline - from keras.layers import GlobalMaxPooling1D, Conv1D, MaxPooling1D, Flatten, Bidirectional, SpatialDropout1D + from sklearn import ( + preprocessing, + decomposition, + model_selection, + metrics, + pipeline, + ) + from keras.layers import ( + GlobalMaxPooling1D, + Conv1D, + MaxPooling1D, + Flatten, + Bidirectional, + SpatialDropout1D, + ) from keras.preprocessing import sequence, text from keras.callbacks import EarlyStopping @@ -112,15 +125,25 @@ def kernel_2(): print('REPLICAS: ', strategy.num_replicas_in_sync) # %% [code] - train = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv') - validation = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv') - test = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv') + train = pd.read_csv( + '/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv' + ) + validation = pd.read_csv( + '/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv' + ) + test = pd.read_csv( + '/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv' + ) # %% [markdown] # We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models # %% [code] - train.drop(['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'], axis=1, inplace=True) + train.drop( + ['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'], + axis=1, + inplace=True, + ) # %% [code] train = train.loc[:12000, :] @@ -137,7 +160,12 @@ def kernel_2(): # %% [code] xtrain, xvalid, ytrain, yvalid = train_test_split( - train.comment_text.values, train.toxic.values, stratify=train.toxic.values, random_state=42, test_size=0.2, shuffle=True + train.comment_text.values, + train.toxic.values, + stratify=train.toxic.values, + random_state=42, + test_size=0.2, + shuffle=True, ) # %% [markdown] @@ -206,7 +234,9 @@ def kernel_2(): model.add(Embedding(len(word_index) + 1, 300, input_length=max_len)) model.add(SimpleRNN(100)) model.add(Dense(1, activation='sigmoid')) - model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) + model.compile( + loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'] + ) model.summary() @@ -253,7 +283,10 @@ def kernel_3( o_2['model'].load_weights('model.h5') else: o_2['model'].fit( - o_2['xtrain_pad'], o_2['ytrain'], nb_epoch=nb_epochs, batch_size=64 * o_2['strategy'].num_replicas_in_sync + o_2['xtrain_pad'], + o_2['ytrain'], + nb_epoch=nb_epochs, + batch_size=64 * o_2['strategy'].num_replicas_in_sync, ) # Multiplying by Strategy to run on TPU's o_2['model'].save_weights('model.h5') @@ -263,7 +296,9 @@ def kernel_3( # %% [code] scores_model = [] - scores_model.append({'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])}) + scores_model.append( + {'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])} + ) # %% [markdown] # ## Code Explanantion @@ -283,7 +318,12 @@ def kernel_4( import keras.preprocessing.sequence if input_texts is None: - input_texts = ['blahb blahb blah', 'Hello World!', 'This is very good!', 'A very non toxic comment! This is so polite and polished one!'] + input_texts = [ + 'blahb blahb blah', + 'Hello World!', + 'This is very good!', + 'A very non toxic comment! This is so polite and polished one!', + ] t6 = [] for o in input_texts: @@ -291,7 +331,9 @@ def kernel_4( t2 = o_2['token'].texts_to_sequences( [t1], ) - t3 = keras.preprocessing.sequence.pad_sequences(t2, maxlen=o_2['max_len']) + t3 = keras.preprocessing.sequence.pad_sequences( + t2, maxlen=o_2['max_len'] + ) t4 = o_2['model'].predict( t3, ) diff --git a/python/online/fxreader/pr34/tasks/mlb_player.py b/python/online/fxreader/pr34/tasks/mlb_player.py index 35701ef..a2d8fa3 100644 --- a/python/online/fxreader/pr34/tasks/mlb_player.py +++ b/python/online/fxreader/pr34/tasks/mlb_player.py @@ -42,12 +42,26 @@ def kernel_2( ): t1 = {} - for k in ['playerTwitterFollowers', 'teamTwitterFollowers', 'games', 'events']: + for k in [ + 'playerTwitterFollowers', + 'teamTwitterFollowers', + 'games', + 'events', + ]: t4 = '%s.nc' % k if not os.path.exists(t4): print('started %s' % t4) t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv' - t3 = pandas.DataFrame(sum([json.loads(o) for o in o_1['t3'][t2][k].values if isinstance(o, str)], [])).to_xarray() + t3 = pandas.DataFrame( + sum( + [ + json.loads(o) + for o in o_1['t3'][t2][k].values + if isinstance(o, str) + ], + [], + ) + ).to_xarray() t3.to_netcdf(t4) print('cached %s' % t4) @@ -55,7 +69,9 @@ def kernel_2( t5 = '%s-v2.nc' % k if not os.path.exists(t5): t2 = xarray.load_dataset(t4) - t3 = t2.sel(index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1)) + t3 = t2.sel( + index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1) + ) t3.to_netcdf(t5) t1[k] = xarray.load_dataset(t5) print('loaded %s' % t5) @@ -119,9 +135,15 @@ def kernel_3(should_exist=None): def kernel_4( o_3=None, ): - [print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)] + [ + print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4)) + for k in range(-10, -1) + ] - [print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)] + [ + print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4)) + for k in range(-10, -1) + ] t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M' @@ -264,7 +286,9 @@ def kernel_6( try: cap = cv2.VideoCapture(o) - fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" + fps = cap.get( + cv2.CAP_PROP_FPS + ) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) duration = frame_count / fps finally: @@ -454,15 +478,31 @@ def kernel_7( for k in layer: v = layer[k] if 'pool' in k: - layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] + layers += [ + nn.MaxPool2d( + kernel_size=v[0], stride=v[1], padding=v[2] + ) + ] else: - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + conv2d = nn.Conv2d( + in_channels=v[0], + out_channels=v[1], + kernel_size=v[2], + stride=v[3], + padding=v[4], + ) layers += [conv2d, nn.ReLU(inplace=True)] layer = list(layer_dict[-1].keys()) k = layer[0] v = layer_dict[-1][k] - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + conv2d = nn.Conv2d( + in_channels=v[0], + out_channels=v[1], + kernel_size=v[2], + stride=v[3], + padding=v[4], + ) layers += [conv2d] return nn.Sequential(*layers) @@ -530,9 +570,19 @@ def kernel_7( for key in block: v = block[key] if 'pool' in key: - layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])] + layers += [ + nn.MaxPool2d( + kernel_size=v[0], stride=v[1], padding=v[2] + ) + ] else: - conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4]) + conv2d = nn.Conv2d( + in_channels=v[0], + out_channels=v[1], + kernel_size=v[2], + stride=v[3], + padding=v[4], + ) layers += [conv2d, nn.ReLU(inplace=True)] models = {'block_0': nn.Sequential(*layers)} @@ -543,16 +593,38 @@ def kernel_7( return PoseEstimation(models) - def get_paf_and_heatmap(model, img_raw, scale_search, param_stride=8, box_size=368): - multiplier = [scale * box_size / img_raw.shape[0] for scale in scale_search] + def get_paf_and_heatmap( + model, img_raw, scale_search, param_stride=8, box_size=368 + ): + multiplier = [ + scale * box_size / img_raw.shape[0] for scale in scale_search + ] - heatmap_avg = torch.zeros((len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])).cuda() - paf_avg = torch.zeros((len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])).cuda() + heatmap_avg = torch.zeros( + (len(multiplier), 19, img_raw.shape[0], img_raw.shape[1]) + ).cuda() + paf_avg = torch.zeros( + (len(multiplier), 38, img_raw.shape[0], img_raw.shape[1]) + ).cuda() for i, scale in enumerate(multiplier): - img_test = cv2.resize(img_raw, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC) - img_test_pad, pad = pad_right_down_corner(img_test, param_stride, param_stride) - img_test_pad = np.transpose(np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)) / 256 - 0.5 + img_test = cv2.resize( + img_raw, + (0, 0), + fx=scale, + fy=scale, + interpolation=cv2.INTER_CUBIC, + ) + img_test_pad, pad = pad_right_down_corner( + img_test, param_stride, param_stride + ) + img_test_pad = ( + np.transpose( + np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1) + ) + / 256 + - 0.5 + ) feed = Variable(torch.from_numpy(img_test_pad)).cuda() output1, output2 = model(feed) @@ -560,17 +632,27 @@ def kernel_7( # print(output1.size()) # print(output2.size()) - heatmap = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output2) + heatmap = nn.UpsamplingBilinear2d( + (img_raw.shape[0], img_raw.shape[1]) + ).cuda()(output2) - paf = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output1) + paf = nn.UpsamplingBilinear2d( + (img_raw.shape[0], img_raw.shape[1]) + ).cuda()(output1) heatmap_avg[i] = heatmap[0].data paf_avg[i] = paf[0].data - heatmap_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), 1, 2).cuda() + heatmap_avg = torch.transpose( + torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), + 1, + 2, + ).cuda() heatmap_avg = heatmap_avg.cpu().numpy() - paf_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2).cuda() + paf_avg = torch.transpose( + torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2 + ).cuda() paf_avg = paf_avg.cpu().numpy() return paf_avg, heatmap_avg @@ -592,20 +674,34 @@ def kernel_7( map_down = np.zeros(map_gau.shape) map_down[:, :-1] = map_gau[:, 1:] - peaks_binary = np.logical_and.reduce((map_gau >= map_left, map_gau >= map_right, map_gau >= map_up, map_gau >= map_down, map_gau > param_thre1)) + peaks_binary = np.logical_and.reduce( + ( + map_gau >= map_left, + map_gau >= map_right, + map_gau >= map_up, + map_gau >= map_down, + map_gau > param_thre1, + ) + ) - peaks = zip(np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]) # note reverse + peaks = zip( + np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0] + ) # note reverse peaks = list(peaks) peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks] ids = range(peak_counter, peak_counter + len(peaks)) - peaks_with_score_and_id = [peaks_with_score[i] + (ids[i],) for i in range(len(ids))] + peaks_with_score_and_id = [ + peaks_with_score[i] + (ids[i],) for i in range(len(ids)) + ] all_peaks.append(peaks_with_score_and_id) peak_counter += len(peaks) return all_peaks - def extract_paf_info(img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5): + def extract_paf_info( + img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5 + ): connection_all = [] special_k = [] mid_num = 10 @@ -626,27 +722,69 @@ def kernel_7( raise ZeroDivisionError vec = np.divide(vec, norm) - startend = zip(np.linspace(candA[i][0], candB[j][0], num=mid_num), np.linspace(candA[i][1], candB[j][1], num=mid_num)) + startend = zip( + np.linspace(candA[i][0], candB[j][0], num=mid_num), + np.linspace(candA[i][1], candB[j][1], num=mid_num), + ) startend = list(startend) - vec_x = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 0] for I in range(len(startend))]) - vec_y = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 1] for I in range(len(startend))]) + vec_x = np.array( + [ + score_mid[ + int(round(startend[I][1])), + int(round(startend[I][0])), + 0, + ] + for I in range(len(startend)) + ] + ) + vec_y = np.array( + [ + score_mid[ + int(round(startend[I][1])), + int(round(startend[I][0])), + 1, + ] + for I in range(len(startend)) + ] + ) - score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(vec_y, vec[1]) - score_with_dist_prior = sum(score_midpts) / len(score_midpts) - score_with_dist_prior += min(0.5 * img_raw.shape[0] / norm - 1, 0) + score_midpts = np.multiply(vec_x, vec[0]) + np.multiply( + vec_y, vec[1] + ) + score_with_dist_prior = sum(score_midpts) / len( + score_midpts + ) + score_with_dist_prior += min( + 0.5 * img_raw.shape[0] / norm - 1, 0 + ) - criterion1 = len(np.nonzero(score_midpts > param_thre2)[0]) > 0.8 * len(score_midpts) + criterion1 = len( + np.nonzero(score_midpts > param_thre2)[0] + ) > 0.8 * len(score_midpts) criterion2 = score_with_dist_prior > 0 if criterion1 and criterion2: - connection_candidate.append([i, j, score_with_dist_prior, score_with_dist_prior + candA[i][2] + candB[j][2]]) + connection_candidate.append( + [ + i, + j, + score_with_dist_prior, + score_with_dist_prior + + candA[i][2] + + candB[j][2], + ] + ) - connection_candidate = sorted(connection_candidate, key=lambda x: x[2], reverse=True) + connection_candidate = sorted( + connection_candidate, key=lambda x: x[2], reverse=True + ) connection = np.zeros((0, 5)) for c in range(len(connection_candidate)): i, j, s = connection_candidate[c][0:3] if i not in connection[:, 3] and j not in connection[:, 4]: - connection = np.vstack([connection, [candA[i][3], candB[j][3], s, i, j]]) + connection = np.vstack( + [connection, [candA[i][3], candB[j][3], s, i, j]] + ) if len(connection) >= min(nA, nB): break @@ -661,7 +799,9 @@ def kernel_7( # last number in each row is the total parts number of that person # the second last number in each row is the score of the overall configuration subset = -1 * np.ones((0, 20)) - candidate = np.array([item for sublist in all_peaks for item in sublist]) + candidate = np.array( + [item for sublist in all_peaks for item in sublist] + ) for k in range(len(map_ids)): if k not in special_k: @@ -673,7 +813,10 @@ def kernel_7( found = 0 subset_idx = [-1, -1] for j in range(len(subset)): # 1:size(subset,1): - if subset[j][indexA] == partAs[i] or subset[j][indexB] == partBs[i]: + if ( + subset[j][indexA] == partAs[i] + or subset[j][indexB] == partBs[i] + ): subset_idx[found] = j found += 1 @@ -682,11 +825,17 @@ def kernel_7( if subset[j][indexB] != partBs[i]: subset[j][indexB] = partBs[i] subset[j][-1] += 1 - subset[j][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] + subset[j][-2] += ( + candidate[partBs[i].astype(int), 2] + + connection_all[k][i][2] + ) elif found == 2: # if found 2 and disjoint, merge them j1, j2 = subset_idx print('found = 2') - membership = ((subset[j1] >= 0).astype(int) + (subset[j2] >= 0).astype(int))[:-2] + membership = ( + (subset[j1] >= 0).astype(int) + + (subset[j2] >= 0).astype(int) + )[:-2] if len(np.nonzero(membership == 2)[0]) == 0: # merge subset[j1][:-2] += subset[j2][:-2] + 1 subset[j1][-2:] += subset[j2][-2:] @@ -695,7 +844,10 @@ def kernel_7( else: # as like found == 1 subset[j1][indexB] = partBs[i] subset[j1][-1] += 1 - subset[j1][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2] + subset[j1][-2] += ( + candidate[partBs[i].astype(int), 2] + + connection_all[k][i][2] + ) # if find no partA in the subset, create a new subset elif not found and k < 17: @@ -703,7 +855,14 @@ def kernel_7( row[indexA] = partAs[i] row[indexB] = partBs[i] row[-1] = 2 - row[-2] = sum(candidate[connection_all[k][i, :2].astype(int), 2]) + connection_all[k][i][2] + row[-2] = ( + sum( + candidate[ + connection_all[k][i, :2].astype(int), 2 + ] + ) + + connection_all[k][i][2] + ) subset = np.vstack([subset, row]) return subset, candidate @@ -718,7 +877,9 @@ def kernel_7( for i in range(18): for j in range(len(all_peaks[i])): - cv2.circle(img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1) + cv2.circle( + img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1 + ) return subset, img_canvas @@ -735,9 +896,18 @@ def kernel_7( mY = np.mean(Y) length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5 angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1])) - polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1) + polygon = cv2.ellipse2Poly( + (int(mY), int(mX)), + (int(length / 2), stickwidth), + int(angle), + 0, + 360, + 1, + ) cv2.fillConvexPoly(cur_canvas, polygon, colors[i]) - img_canvas = cv2.addWeighted(img_canvas, 0.4, cur_canvas, 0.6, 0) + img_canvas = cv2.addWeighted( + img_canvas, 0.4, cur_canvas, 0.6, 0 + ) return img_canvas @@ -754,11 +924,17 @@ def kernel_7( img_padded = img pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1)) img_padded = np.concatenate((pad_up, img_padded), axis=0) - pad_left = np.tile(img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1)) + pad_left = np.tile( + img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1) + ) img_padded = np.concatenate((pad_left, img_padded), axis=1) - pad_down = np.tile(img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1)) + pad_down = np.tile( + img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1) + ) img_padded = np.concatenate((img_padded, pad_down), axis=0) - pad_right = np.tile(img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1)) + pad_right = np.tile( + img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1) + ) img_padded = np.concatenate((img_padded, pad_right), axis=1) return img_padded, pad @@ -784,11 +960,15 @@ def kernel_7( # In[4]: - state_dict = torch.load(model)['state_dict'] # getting the pre-trained model's parameters + state_dict = torch.load(model)[ + 'state_dict' + ] # getting the pre-trained model's parameters # A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor. model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell. - model_pose.load_state_dict(state_dict) # Loading the parameters (weights, biases) into the model. + model_pose.load_state_dict( + state_dict + ) # Loading the parameters (weights, biases) into the model. model_pose.float() # I'm not sure why this is used. No difference if you remove it. @@ -797,7 +977,9 @@ def kernel_7( if use_gpu: model_pose.cuda() - model_pose = torch.nn.DataParallel(model_pose, device_ids=range(torch.cuda.device_count())) + model_pose = torch.nn.DataParallel( + model_pose, device_ids=range(torch.cuda.device_count()) + ) cudnn.benchmark = True def estimate_pose( @@ -833,7 +1015,9 @@ def kernel_7( img_points = None try: - paf_info, heatmap_info = get_paf_and_heatmap(model_pose, img_ori, scale_param) + paf_info, heatmap_info = get_paf_and_heatmap( + model_pose, img_ori, scale_param + ) peaks = extract_heatmap_info(heatmap_info) sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks) @@ -876,7 +1060,13 @@ def kernel_7( def kernel_8( o_7, ): - for i, o in enumerate(['../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' % k for k in range(6)]): + for i, o in enumerate( + [ + '../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' + % k + for k in range(6) + ] + ): arch_image = o img_ori = o_7['cv2'].imread(arch_image) o_7['estimate_pose'](img_ori) @@ -887,7 +1077,9 @@ def kernel_9_benchmark( ): import datetime - t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') + t1 = o_7['cv2'].imread( + '../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg' + ) t5 = 10 t2 = datetime.datetime.now() for k in range(t5): @@ -905,7 +1097,9 @@ def kernel_10(): import torch # Model - model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom + model = torch.hub.load( + 'ultralytics/yolov5', 'yolov5s' + ) # or yolov5m, yolov5x, custom # Images img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple @@ -927,7 +1121,9 @@ def kernel_11_benchmark( ): import datetime - t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg') + t1 = o_7['cv2'].imread( + '../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg' + ) t5 = 10 t2 = datetime.datetime.now() for k in range(t5): @@ -956,7 +1152,18 @@ def kernel_13( if not len(t4) > 0 or not o_6 is None: t1 = pandas.concat( - sum([[o2['t11'][0].assign(frame_id=k, video_path=o['video_path']) for k, o2 in enumerate(o['frames'])] for o in o_6['t8']], []) + sum( + [ + [ + o2['t11'][0].assign( + frame_id=k, video_path=o['video_path'] + ) + for k, o2 in enumerate(o['frames']) + ] + for o in o_6['t8'] + ], + [], + ) ).to_xarray() t5 = t3[0] t1.to_netcdf(t5) @@ -1028,7 +1235,9 @@ def kernel_14( def kernel_15( o_14, ): - t1 = pandas.DataFrame(numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)).T + t1 = pandas.DataFrame( + numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True) + ).T pprint.pprint( dict( t1=t1, @@ -1078,7 +1287,9 @@ def kernel_15( t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB) t13 = t12.copy() t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height]) - t16 = numpy.array([t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]]) + t16 = numpy.array( + [t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]] + ) t17 = t15 * t16 t18 = t17[:2] - t17[2:] / 2 t19 = t17[:2] + t17[2:] / 2 @@ -1340,7 +1551,10 @@ def kernel_20( t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1) t2 = o_18['t2']['t6'][0] t3 = o_18['t2']['t1'][0]['image_canvas'].copy() - assert o_18['t2']['t7'][0]['image_id'] == os.path.split(o_18['t2']['t1'][0]['image_name'])[1] + assert ( + o_18['t2']['t7'][0]['image_id'] + == os.path.split(o_18['t2']['t1'][0]['image_name'])[1] + ) for i, o2 in enumerate(o_21['p_color']): if i >= 17: @@ -1449,7 +1663,16 @@ def kernel_22(o_18): o_31 = kernel_31( image_id=[o['image_id'] for o in t1], - image_size=numpy.array([[list(o['image_canvas'].shape) for o in o_18['t2']['t1'] if o['image_name'] == t1[i]['image_id']][0] for i in range(len(t2))]), + image_size=numpy.array( + [ + [ + list(o['image_canvas'].shape) + for o in o_18['t2']['t1'] + if o['image_name'] == t1[i]['image_id'] + ][0] + for i in range(len(t2)) + ] + ), keypoints=numpy.stack(t2, axis=0), ) t12 = o_31['t12'] @@ -1558,7 +1781,11 @@ def kernel_25(images, delay=None): def kernel_26(o_18, image_name): - t1 = [i for i, o in enumerate(o_18['t2']['t1']) if o['image_name'] == image_name] + t1 = [ + i + for i, o in enumerate(o_18['t2']['t1']) + if o['image_name'] == image_name + ] assert len(t1) == 1 return t1[0] @@ -1580,7 +1807,11 @@ def kernel_23(o_18, o_22, ids=None): t9 = kernel_26(o_18=o_18, image_name=t3['image_name']) t4 = o_18['t2']['t1'][t9]['image_canvas'] t10 = o_18['t2']['t6'][t9] - t4 = [o['image_canvas'] for o in o_18['t2']['t1'] if o['image_name'] == t3['image_name']] + t4 = [ + o['image_canvas'] + for o in o_18['t2']['t1'] + if o['image_name'] == t3['image_name'] + ] assert len(t4) == 1 t5 = t4[0] t6 = kernel_24(t5, t3['keypoints']) @@ -1641,7 +1872,9 @@ def kernel_27(): """ % (t4, t2) if False: pprint.pprint([t4, t2, t6]) - with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + with subprocess.Popen( + t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as p: if False: pprint.pprint(p.communicate()) p.wait() @@ -1669,7 +1902,9 @@ def kernel_28( max_seconds = 999999 if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + video_path = ( + '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + ) t5 = video_path t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id t13 = '/root/kernel_28-output.dir/tmp-slice' @@ -1679,7 +1914,9 @@ def kernel_28( try: cap = cv2.VideoCapture(t5) - fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" + fps = cap.get( + cv2.CAP_PROP_FPS + ) # OpenCV2 version 2 used "CV_CAP_PROP_FPS" frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) real_duration = frame_count / fps duration = min(real_duration, max_seconds) @@ -1739,7 +1976,9 @@ def kernel_28( t6, ] ) - with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p: + with subprocess.Popen( + t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) as p: if False: pprint.pprint(p.communicate()) p.wait() @@ -1757,7 +1996,9 @@ def kernel_29( video_id = '' if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + video_path = ( + '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + ) assert os.path.exists(video_path) @@ -1771,7 +2012,13 @@ def kernel_29( t7 = [o for o in t6 if os.path.exists(o)] if len(t7) == 0: - t1 = [dict(data=json.load(io.open(o, 'r')), input_path=o) for o in glob.glob('/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' % video_id)] + t1 = [ + dict(data=json.load(io.open(o, 'r')), input_path=o) + for o in glob.glob( + '/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' + % video_id + ) + ] assert len(t1) > 0 @@ -1835,7 +2082,9 @@ def kernel_30( low_mean_conf = 0.6 if video_path is None: - video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + video_path = ( + '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4' + ) if max_frames is None: max_frames = 9999 @@ -2045,7 +2294,10 @@ def kernel_31(image_id, image_size, keypoints): ab = [a[0] - b[0], a[1] - b[1]] ab1 = [c[0] - d[0], c[1] - d[1]] - cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2) + 1e-8) + cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / ( + sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2) + + 1e-8 + ) ang = acos(cos) return ang * 180 / np.pi @@ -2204,7 +2456,11 @@ def kernel_33(): o_22 = kernel_22(o_18=o_18) import pandas - o_23 = kernel_23(o_18=o_18, o_22=o_22, ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values) + o_23 = kernel_23( + o_18=o_18, + o_22=o_22, + ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values, + ) o_27 = kernel_27() o_28 = kernel_28() o_29 = kernel_29() @@ -2273,7 +2529,9 @@ def kernel_36(): # import os from os.path import exists, join, basename, splitext - git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git' + git_repo_url = ( + 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git' + ) project_name = splitext(basename(git_repo_url))[0] if 1 or not exists(project_name): @@ -2282,8 +2540,18 @@ def kernel_36(): print('install new CMake becaue of CUDA10') cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz' if not exists(cmake_version): - assert os.system(r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """) == 0 - assert os.system(r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """) == 0 + assert ( + os.system( + r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """ + ) + == 0 + ) + assert ( + os.system( + r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """ + ) + == 0 + ) print('clone openpose') assert os.system(r"""!git clone -q --depth 1 $git_repo_url """) == 0 @@ -2295,7 +2563,12 @@ def kernel_36(): == 0 ) print('build openpose') - assert os.system(r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """) == 0 + assert ( + os.system( + r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """ + ) + == 0 + ) """## From a Google Drive's folder""" @@ -2310,7 +2583,9 @@ def kernel_36(): print(filename) colab_video_path = folder_path + filename print(colab_video_path) - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + colab_openpose_video_path = ( + colab_video_path.replace('.mp4', '') + '-openpose.mp4' + ) print(colab_openpose_video_path) if not exists(colab_openpose_video_path): assert ( @@ -2325,9 +2600,16 @@ def kernel_36(): assert os.system(r"""!pip install youtube-dl """) == 0 youtube_id = '2021-05-07_22-00-55_UTC' - assert os.system(r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """) == 0 + assert ( + os.system( + r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """ + ) + == 0 + ) colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4' - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + colab_openpose_video_path = ( + colab_video_path.replace('.mp4', '') + '-openpose.mp4' + ) assert ( os.system( @@ -2352,7 +2634,9 @@ def kernel_36(): # from os.path import exists, join, basename, splitext # colab_video_path = '/content/drive/My Drive/bachata.mp4' colab_video_path = '/content/output.mp4' - colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4' + colab_openpose_video_path = ( + colab_video_path.replace('.mp4', '') + '-openpose.mp4' + ) assert ( os.system(