[+] reformat ruff
1. column width 80;
This commit is contained in:
parent
314426c674
commit
67fcefbce0
19
python/_m.py
19
python/_m.py
@ -53,7 +53,10 @@ def js(argv: list[str]) -> int:
|
|||||||
'--project-directory',
|
'--project-directory',
|
||||||
Settings.settings().project_root,
|
Settings.settings().project_root,
|
||||||
'-f',
|
'-f',
|
||||||
Settings.settings().project_root / 'docker' / 'js' / 'docker-compose.yml',
|
Settings.settings().project_root
|
||||||
|
/ 'docker'
|
||||||
|
/ 'js'
|
||||||
|
/ 'docker-compose.yml',
|
||||||
*argv,
|
*argv,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ -67,7 +70,15 @@ def env(
|
|||||||
env_path = Settings.settings().env_path
|
env_path = Settings.settings().env_path
|
||||||
|
|
||||||
if not env_path.exists():
|
if not env_path.exists():
|
||||||
subprocess.check_call([sys.executable, '-m', 'venv', '--system-site-packages', str(env_path)])
|
subprocess.check_call(
|
||||||
|
[
|
||||||
|
sys.executable,
|
||||||
|
'-m',
|
||||||
|
'venv',
|
||||||
|
'--system-site-packages',
|
||||||
|
str(env_path),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
[
|
[
|
||||||
@ -233,7 +244,9 @@ Command: TypeAlias = Literal[
|
|||||||
def run(argv: Optional[list[str]] = None) -> None:
|
def run(argv: Optional[list[str]] = None) -> None:
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.INFO,
|
||||||
format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
|
format=(
|
||||||
|
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
if argv is None:
|
if argv is None:
|
||||||
|
|||||||
@ -56,8 +56,18 @@ class CLI(_cli.CLI):
|
|||||||
self._projects: dict[str, _cli.Project] = {
|
self._projects: dict[str, _cli.Project] = {
|
||||||
'online.fxreader.pr34': _cli.Project(
|
'online.fxreader.pr34': _cli.Project(
|
||||||
source_dir=self.settings.base_dir / 'python',
|
source_dir=self.settings.base_dir / 'python',
|
||||||
build_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'build',
|
build_dir=self.settings.base_dir
|
||||||
dest_dir=self.settings.base_dir / 'tmp' / 'online' / 'fxreader' / 'pr34' / 'install',
|
/ 'tmp'
|
||||||
|
/ 'online'
|
||||||
|
/ 'fxreader'
|
||||||
|
/ 'pr34'
|
||||||
|
/ 'build',
|
||||||
|
dest_dir=self.settings.base_dir
|
||||||
|
/ 'tmp'
|
||||||
|
/ 'online'
|
||||||
|
/ 'fxreader'
|
||||||
|
/ 'pr34'
|
||||||
|
/ 'install',
|
||||||
meson_path=self.settings.base_dir / 'python' / 'meson.build',
|
meson_path=self.settings.base_dir / 'python' / 'meson.build',
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -117,7 +127,9 @@ class CLI(_cli.CLI):
|
|||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('command', choices=[o.value for o in Command])
|
parser.add_argument('command', choices=[o.value for o in Command])
|
||||||
parser.add_argument('-p', '--project', choices=[o for o in self.projects])
|
parser.add_argument(
|
||||||
|
'-p', '--project', choices=[o for o in self.projects]
|
||||||
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o',
|
'-o',
|
||||||
'--output_dir',
|
'--output_dir',
|
||||||
|
|||||||
54
python/m.py
54
python/m.py
@ -78,7 +78,9 @@ class PyProject:
|
|||||||
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
|
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
|
||||||
default_factory=lambda: [],
|
default_factory=lambda: [],
|
||||||
)
|
)
|
||||||
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
|
requirements: dict[str, pathlib.Path] = dataclasses.field(
|
||||||
|
default_factory=lambda: dict()
|
||||||
|
)
|
||||||
|
|
||||||
modules: list[Module] = dataclasses.field(
|
modules: list[Module] = dataclasses.field(
|
||||||
default_factory=lambda: [],
|
default_factory=lambda: [],
|
||||||
@ -124,7 +126,12 @@ def check_dict(
|
|||||||
else:
|
else:
|
||||||
VT_class = VT
|
VT_class = VT
|
||||||
|
|
||||||
assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
|
assert all(
|
||||||
|
[
|
||||||
|
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
|
||||||
|
for k, v in value2.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
if VT is None:
|
if VT is None:
|
||||||
return cast(
|
return cast(
|
||||||
@ -233,7 +240,12 @@ def pyproject_load(
|
|||||||
str,
|
str,
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
|
if (
|
||||||
|
'tool' in content
|
||||||
|
and isinstance(content['tool'], dict)
|
||||||
|
and tool_name in content['tool']
|
||||||
|
and isinstance(content['tool'][tool_name], dict)
|
||||||
|
):
|
||||||
pr34_tool = check_dict(
|
pr34_tool = check_dict(
|
||||||
check_dict(
|
check_dict(
|
||||||
content['tool'],
|
content['tool'],
|
||||||
@ -246,7 +258,9 @@ def pyproject_load(
|
|||||||
res.early_features = pr34_tool['early_features']
|
res.early_features = pr34_tool['early_features']
|
||||||
|
|
||||||
if 'pip_find_links' in pr34_tool:
|
if 'pip_find_links' in pr34_tool:
|
||||||
res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
|
res.pip_find_links = [
|
||||||
|
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
|
||||||
|
]
|
||||||
|
|
||||||
if 'runtime_libdirs' in pr34_tool:
|
if 'runtime_libdirs' in pr34_tool:
|
||||||
res.runtime_libdirs = [
|
res.runtime_libdirs = [
|
||||||
@ -265,7 +279,9 @@ def pyproject_load(
|
|||||||
if 'third_party_roots' in pr34_tool:
|
if 'third_party_roots' in pr34_tool:
|
||||||
for o in check_list(pr34_tool['third_party_roots']):
|
for o in check_list(pr34_tool['third_party_roots']):
|
||||||
o2 = check_dict(o, str, str)
|
o2 = check_dict(o, str, str)
|
||||||
assert all([k in {'package', 'module_root', 'path'} for k in o2])
|
assert all(
|
||||||
|
[k in {'package', 'module_root', 'path'} for k in o2]
|
||||||
|
)
|
||||||
|
|
||||||
res.third_party_roots.append(
|
res.third_party_roots.append(
|
||||||
PyProject.ThirdPartyRoot(
|
PyProject.ThirdPartyRoot(
|
||||||
@ -279,7 +295,9 @@ def pyproject_load(
|
|||||||
res.requirements = {
|
res.requirements = {
|
||||||
k: d.parent / pathlib.Path(v)
|
k: d.parent / pathlib.Path(v)
|
||||||
# pathlib.Path(o)
|
# pathlib.Path(o)
|
||||||
for k, v in check_dict(pr34_tool['requirements'], str, str).items()
|
for k, v in check_dict(
|
||||||
|
pr34_tool['requirements'], str, str
|
||||||
|
).items()
|
||||||
}
|
}
|
||||||
|
|
||||||
if 'modules' in pr34_tool:
|
if 'modules' in pr34_tool:
|
||||||
@ -328,7 +346,10 @@ class BootstrapSettings:
|
|||||||
).strip()
|
).strip()
|
||||||
)
|
)
|
||||||
pip_check_conflicts: Optional[bool] = dataclasses.field(
|
pip_check_conflicts: Optional[bool] = dataclasses.field(
|
||||||
default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
|
default_factory=lambda: os.environ.get(
|
||||||
|
'PIP_CHECK_CONFLICTS', json.dumps(True)
|
||||||
|
)
|
||||||
|
in [json.dumps(True)],
|
||||||
)
|
)
|
||||||
uv_args: list[str] = dataclasses.field(
|
uv_args: list[str] = dataclasses.field(
|
||||||
default_factory=lambda: os.environ.get(
|
default_factory=lambda: os.environ.get(
|
||||||
@ -390,7 +411,9 @@ def requirements_name_get(
|
|||||||
else:
|
else:
|
||||||
requirements_path = source_dir / 'requirements.txt'
|
requirements_path = source_dir / 'requirements.txt'
|
||||||
|
|
||||||
requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
|
requirements_path_in = requirements_path.parent / (
|
||||||
|
requirements_path.stem + '.in'
|
||||||
|
)
|
||||||
|
|
||||||
requirements_in: list[str] = []
|
requirements_in: list[str] = []
|
||||||
|
|
||||||
@ -436,10 +459,15 @@ def env_bootstrap(
|
|||||||
|
|
||||||
requirements_in: list[str] = []
|
requirements_in: list[str] = []
|
||||||
|
|
||||||
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
|
requirements_in.extend(
|
||||||
|
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
|
||||||
|
)
|
||||||
|
|
||||||
if pyproject.early_features:
|
if pyproject.early_features:
|
||||||
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
|
early_dependencies = sum(
|
||||||
|
[pyproject.dependencies[o] for o in pyproject.early_features],
|
||||||
|
cast(list[str], []),
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
dict(
|
dict(
|
||||||
@ -508,7 +536,11 @@ def env_bootstrap(
|
|||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
[
|
[
|
||||||
'uv',
|
'uv',
|
||||||
*[o for o in bootstrap_settings.uv_args if not o in ['-U', '--upgrade']],
|
*[
|
||||||
|
o
|
||||||
|
for o in bootstrap_settings.uv_args
|
||||||
|
if not o in ['-U', '--upgrade']
|
||||||
|
],
|
||||||
'venv',
|
'venv',
|
||||||
*venv_python_version,
|
*venv_python_version,
|
||||||
*pip_find_links_args,
|
*pip_find_links_args,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@ -31,7 +31,10 @@ def create_app() -> fastapi.FastAPI:
|
|||||||
logger.info(dict(msg='start loading app = {}'.format(app_config)))
|
logger.info(dict(msg='start loading app = {}'.format(app_config)))
|
||||||
app_module, app_method, app_prefix = app_config.split(':')
|
app_module, app_method, app_prefix = app_config.split(':')
|
||||||
|
|
||||||
app_router = cast(Callable[[], Any], getattr(importlib.import_module(app_module), app_method))()
|
app_router = cast(
|
||||||
|
Callable[[], Any],
|
||||||
|
getattr(importlib.import_module(app_module), app_method),
|
||||||
|
)()
|
||||||
|
|
||||||
assert isinstance(app_router, fastapi.APIRouter)
|
assert isinstance(app_router, fastapi.APIRouter)
|
||||||
|
|
||||||
|
|||||||
@ -172,9 +172,13 @@ class CLI(abc.ABC):
|
|||||||
) -> None:
|
) -> None:
|
||||||
from . import cli_bootstrap
|
from . import cli_bootstrap
|
||||||
|
|
||||||
pyproject = cli_bootstrap.pyproject_load(self.projects[project].source_dir / 'pyproject.toml')
|
pyproject = cli_bootstrap.pyproject_load(
|
||||||
|
self.projects[project].source_dir / 'pyproject.toml'
|
||||||
|
)
|
||||||
|
|
||||||
dependencies = sum([pyproject.dependencies[o] for o in features], cast(list[str], []))
|
dependencies = sum(
|
||||||
|
[pyproject.dependencies[o] for o in features], cast(list[str], [])
|
||||||
|
)
|
||||||
|
|
||||||
pip_find_links: list[pathlib.Path] = []
|
pip_find_links: list[pathlib.Path] = []
|
||||||
|
|
||||||
@ -216,7 +220,9 @@ class CLI(abc.ABC):
|
|||||||
force: bool,
|
force: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
for k, d in self.dependencies.items():
|
for k, d in self.dependencies.items():
|
||||||
whl_glob = self.dist_settings.wheel_dir / ('*%s*.whl' % d.name.replace('.', '_'))
|
whl_glob = self.dist_settings.wheel_dir / (
|
||||||
|
'*%s*.whl' % d.name.replace('.', '_')
|
||||||
|
)
|
||||||
if len(glob.glob(str(whl_glob))) == 0 or force:
|
if len(glob.glob(str(whl_glob))) == 0 or force:
|
||||||
if d.source_path.exists():
|
if d.source_path.exists():
|
||||||
|
|
||||||
@ -256,7 +262,9 @@ class CLI(abc.ABC):
|
|||||||
def index_get(o: dict[str, Any]) -> tuple[Any, ...]:
|
def index_get(o: dict[str, Any]) -> tuple[Any, ...]:
|
||||||
return (o['path'], o['stat'])
|
return (o['path'], o['stat'])
|
||||||
|
|
||||||
present_files_index = {index_get(o): o for o in present_files}
|
present_files_index = {
|
||||||
|
index_get(o): o for o in present_files
|
||||||
|
}
|
||||||
|
|
||||||
new_files: list[dict[str, Any]] = []
|
new_files: list[dict[str, Any]] = []
|
||||||
|
|
||||||
@ -295,7 +303,13 @@ class CLI(abc.ABC):
|
|||||||
[
|
[
|
||||||
pathlib.Path(o)
|
pathlib.Path(o)
|
||||||
for o in glob.glob(
|
for o in glob.glob(
|
||||||
str(self.dist_settings.env_path / 'lib' / 'python*' / '**' / 'pkgconfig'),
|
str(
|
||||||
|
self.dist_settings.env_path
|
||||||
|
/ 'lib'
|
||||||
|
/ 'python*'
|
||||||
|
/ '**'
|
||||||
|
/ 'pkgconfig'
|
||||||
|
),
|
||||||
recursive=True,
|
recursive=True,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
@ -388,7 +402,18 @@ class CLI(abc.ABC):
|
|||||||
shutil.rmtree(pyproject_build_dir)
|
shutil.rmtree(pyproject_build_dir)
|
||||||
|
|
||||||
if len(self.third_party_roots(project_name)) > 0:
|
if len(self.third_party_roots(project_name)) > 0:
|
||||||
extra_args.append('-Csetup-args=%s' % ('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)])))
|
extra_args.append(
|
||||||
|
'-Csetup-args=%s'
|
||||||
|
% (
|
||||||
|
'-Dthird_party_roots=%s'
|
||||||
|
% json.dumps(
|
||||||
|
[
|
||||||
|
str(o.absolute())
|
||||||
|
for o in self.third_party_roots(project_name)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
sys.executable,
|
sys.executable,
|
||||||
@ -449,11 +474,21 @@ class CLI(abc.ABC):
|
|||||||
preserve_top_path=True,
|
preserve_top_path=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
|
pyproject = cli_bootstrap.pyproject_load(
|
||||||
|
project.source_dir / 'pyproject.toml'
|
||||||
|
)
|
||||||
|
|
||||||
pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root
|
pyproject_tool = (
|
||||||
|
pydantic.RootModel[PyProject.Tool]
|
||||||
|
.model_validate(pyproject.tool)
|
||||||
|
.root
|
||||||
|
)
|
||||||
|
|
||||||
if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.install:
|
if (
|
||||||
|
pyproject_tool.meson
|
||||||
|
and pyproject_tool.meson.args
|
||||||
|
and pyproject_tool.meson.args.install
|
||||||
|
):
|
||||||
argv = pyproject_tool.meson.args.install + argv
|
argv = pyproject_tool.meson.args.install + argv
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
@ -495,7 +530,9 @@ class CLI(abc.ABC):
|
|||||||
content = f.read()
|
content = f.read()
|
||||||
|
|
||||||
with io.open(o, 'w') as f:
|
with io.open(o, 'w') as f:
|
||||||
f.write(content.replace('prefix=/', 'prefix=${pcfiledir}/../../'))
|
f.write(
|
||||||
|
content.replace('prefix=/', 'prefix=${pcfiledir}/../../')
|
||||||
|
)
|
||||||
|
|
||||||
def ninja(
|
def ninja(
|
||||||
self,
|
self,
|
||||||
@ -589,18 +626,30 @@ class CLI(abc.ABC):
|
|||||||
res: list[pathlib.Path] = []
|
res: list[pathlib.Path] = []
|
||||||
|
|
||||||
if not project_name is None:
|
if not project_name is None:
|
||||||
pyproject = cli_bootstrap.pyproject_load(self.projects[project_name].source_dir / 'pyproject.toml')
|
pyproject = cli_bootstrap.pyproject_load(
|
||||||
|
self.projects[project_name].source_dir / 'pyproject.toml'
|
||||||
|
)
|
||||||
|
|
||||||
for third_party_root in pyproject.third_party_roots:
|
for third_party_root in pyproject.third_party_roots:
|
||||||
if third_party_root.package:
|
if third_party_root.package:
|
||||||
if not third_party_root.module_root:
|
if not third_party_root.module_root:
|
||||||
third_party_root.module_root = third_party_root.package.replace('.', os.path.sep)
|
third_party_root.module_root = (
|
||||||
|
third_party_root.package.replace('.', os.path.sep)
|
||||||
|
)
|
||||||
if not third_party_root.path:
|
if not third_party_root.path:
|
||||||
packages = pip_show([third_party_root.package])
|
packages = pip_show([third_party_root.package])
|
||||||
assert len(packages) == 1
|
assert len(packages) == 1
|
||||||
third_party_root.path = str(pathlib.Path(packages[0].location) / third_party_root.module_root / 'lib')
|
third_party_root.path = str(
|
||||||
|
pathlib.Path(packages[0].location)
|
||||||
|
/ third_party_root.module_root
|
||||||
|
/ 'lib'
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
assert not third_party_root.package and not third_party_root.module_root and third_party_root.path
|
assert (
|
||||||
|
not third_party_root.package
|
||||||
|
and not third_party_root.module_root
|
||||||
|
and third_party_root.path
|
||||||
|
)
|
||||||
|
|
||||||
res.append(pathlib.Path(third_party_root.path))
|
res.append(pathlib.Path(third_party_root.path))
|
||||||
|
|
||||||
@ -616,8 +665,12 @@ class CLI(abc.ABC):
|
|||||||
path: Optional[pathlib.Path] = None
|
path: Optional[pathlib.Path] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def meson_toolchains(self) -> dict[str, meson_toolchains_t.res_t.toolchain_t]:
|
def meson_toolchains(
|
||||||
t1 = pathlib.Path(importlib.import_module('online.fxreader.pr34').__path__[0])
|
self,
|
||||||
|
) -> dict[str, meson_toolchains_t.res_t.toolchain_t]:
|
||||||
|
t1 = pathlib.Path(
|
||||||
|
importlib.import_module('online.fxreader.pr34').__path__[0]
|
||||||
|
)
|
||||||
toolchains = glob.glob(str(t1 / 'meson' / 'toolchains' / '*'))
|
toolchains = glob.glob(str(t1 / 'meson' / 'toolchains' / '*'))
|
||||||
|
|
||||||
res: dict[str, CLI.meson_toolchains_t.res_t.toolchain_t] = dict()
|
res: dict[str, CLI.meson_toolchains_t.res_t.toolchain_t] = dict()
|
||||||
@ -642,7 +695,11 @@ class CLI(abc.ABC):
|
|||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
from . import argparse as pr34_argparse
|
from . import argparse as pr34_argparse
|
||||||
|
|
||||||
if pyproject_tool.meson and pyproject_tool.meson.args and pyproject_tool.meson.args.setup:
|
if (
|
||||||
|
pyproject_tool.meson
|
||||||
|
and pyproject_tool.meson.args
|
||||||
|
and pyproject_tool.meson.args.setup
|
||||||
|
):
|
||||||
extra_args = pyproject_tool.meson.args.setup + extra_args
|
extra_args = pyproject_tool.meson.args.setup + extra_args
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
@ -657,8 +714,13 @@ class CLI(abc.ABC):
|
|||||||
options, args = pr34_argparse.parse_args(parser, extra_args)
|
options, args = pr34_argparse.parse_args(parser, extra_args)
|
||||||
|
|
||||||
if not options.cross_file is None:
|
if not options.cross_file is None:
|
||||||
if not options.cross_file.exists() and (not options.cross_file.is_absolute() and options.cross_file.stem in self.meson_toolchains):
|
if not options.cross_file.exists() and (
|
||||||
options.cross_file = self.meson_toolchains[options.cross_file.stem].path
|
not options.cross_file.is_absolute()
|
||||||
|
and options.cross_file.stem in self.meson_toolchains
|
||||||
|
):
|
||||||
|
options.cross_file = self.meson_toolchains[
|
||||||
|
options.cross_file.stem
|
||||||
|
].path
|
||||||
|
|
||||||
extra_args = ['--cross-file', str(options.cross_file)] + args
|
extra_args = ['--cross-file', str(options.cross_file)] + args
|
||||||
|
|
||||||
@ -687,15 +749,26 @@ class CLI(abc.ABC):
|
|||||||
if env is None:
|
if env is None:
|
||||||
env = dict()
|
env = dict()
|
||||||
|
|
||||||
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
|
pyproject = cli_bootstrap.pyproject_load(
|
||||||
|
project.source_dir / 'pyproject.toml'
|
||||||
|
)
|
||||||
|
|
||||||
pyproject_tool = pydantic.RootModel[PyProject.Tool].model_validate(pyproject.tool).root
|
pyproject_tool = (
|
||||||
|
pydantic.RootModel[PyProject.Tool]
|
||||||
|
.model_validate(pyproject.tool)
|
||||||
|
.root
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(dict(env=env))
|
logger.info(dict(env=env))
|
||||||
|
|
||||||
if force:
|
if force:
|
||||||
if (project.build_dir / mode).exists():
|
if (project.build_dir / mode).exists():
|
||||||
logger.info(dict(action='removing build dir', path=project.build_dir / mode))
|
logger.info(
|
||||||
|
dict(
|
||||||
|
action='removing build dir',
|
||||||
|
path=project.build_dir / mode,
|
||||||
|
)
|
||||||
|
)
|
||||||
shutil.rmtree(project.build_dir / mode)
|
shutil.rmtree(project.build_dir / mode)
|
||||||
|
|
||||||
extra_args: list[str] = []
|
extra_args: list[str] = []
|
||||||
@ -706,7 +779,15 @@ class CLI(abc.ABC):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if len(self.third_party_roots(project_name)) > 0:
|
if len(self.third_party_roots(project_name)) > 0:
|
||||||
extra_args.append('-Dthird_party_roots=%s' % json.dumps([str(o.absolute()) for o in self.third_party_roots(project_name)]))
|
extra_args.append(
|
||||||
|
'-Dthird_party_roots=%s'
|
||||||
|
% json.dumps(
|
||||||
|
[
|
||||||
|
str(o.absolute())
|
||||||
|
for o in self.third_party_roots(project_name)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
# shutil_which(
|
# shutil_which(
|
||||||
@ -719,7 +800,9 @@ class CLI(abc.ABC):
|
|||||||
'setup',
|
'setup',
|
||||||
str(project.source_dir),
|
str(project.source_dir),
|
||||||
str(project.build_dir / mode),
|
str(project.build_dir / mode),
|
||||||
'--pkg-config-path={}'.format(json.dumps([str(o) for o in self.pkg_config_path(project_name)])),
|
'--pkg-config-path={}'.format(
|
||||||
|
json.dumps([str(o) for o in self.pkg_config_path(project_name)])
|
||||||
|
),
|
||||||
'-Dmodes=["{}"]'.format(mode),
|
'-Dmodes=["{}"]'.format(mode),
|
||||||
*extra_args,
|
*extra_args,
|
||||||
# '-Dpkgconfig.relocatable=true',
|
# '-Dpkgconfig.relocatable=true',
|
||||||
@ -769,14 +852,21 @@ class CLI(abc.ABC):
|
|||||||
argv,
|
argv,
|
||||||
)
|
)
|
||||||
|
|
||||||
pyproject = cli_bootstrap.pyproject_load(project.source_dir / 'pyproject.toml')
|
pyproject = cli_bootstrap.pyproject_load(
|
||||||
|
project.source_dir / 'pyproject.toml'
|
||||||
|
)
|
||||||
|
|
||||||
dependencies = sum([pyproject.dependencies[o] for o in options.features], cast(list[str], []))
|
dependencies = sum(
|
||||||
|
[pyproject.dependencies[o] for o in options.features],
|
||||||
|
cast(list[str], []),
|
||||||
|
)
|
||||||
|
|
||||||
pip_find_links: list[pathlib.Path] = []
|
pip_find_links: list[pathlib.Path] = []
|
||||||
|
|
||||||
if not pyproject.pip_find_links is None:
|
if not pyproject.pip_find_links is None:
|
||||||
pip_find_links.extend([o for o in pyproject.pip_find_links if o.exists()])
|
pip_find_links.extend(
|
||||||
|
[o for o in pyproject.pip_find_links if o.exists()]
|
||||||
|
)
|
||||||
|
|
||||||
requirements_name_get_res = cli_bootstrap.requirements_name_get(
|
requirements_name_get_res = cli_bootstrap.requirements_name_get(
|
||||||
source_dir=project.source_dir,
|
source_dir=project.source_dir,
|
||||||
@ -885,7 +975,9 @@ class CLI(abc.ABC):
|
|||||||
|
|
||||||
assert options.module in [o.name for o in pyproject.modules]
|
assert options.module in [o.name for o in pyproject.modules]
|
||||||
|
|
||||||
modules: dict[str, cli_bootstrap.PyProject.Module] = {o.name: o for o in pyproject.modules}
|
modules: dict[str, cli_bootstrap.PyProject.Module] = {
|
||||||
|
o.name: o for o in pyproject.modules
|
||||||
|
}
|
||||||
|
|
||||||
module = modules[options.module]
|
module = modules[options.module]
|
||||||
|
|
||||||
|
|||||||
@ -78,7 +78,9 @@ class PyProject:
|
|||||||
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
|
third_party_roots: list[ThirdPartyRoot] = dataclasses.field(
|
||||||
default_factory=lambda: [],
|
default_factory=lambda: [],
|
||||||
)
|
)
|
||||||
requirements: dict[str, pathlib.Path] = dataclasses.field(default_factory=lambda: dict())
|
requirements: dict[str, pathlib.Path] = dataclasses.field(
|
||||||
|
default_factory=lambda: dict()
|
||||||
|
)
|
||||||
|
|
||||||
modules: list[Module] = dataclasses.field(
|
modules: list[Module] = dataclasses.field(
|
||||||
default_factory=lambda: [],
|
default_factory=lambda: [],
|
||||||
@ -124,7 +126,12 @@ def check_dict(
|
|||||||
else:
|
else:
|
||||||
VT_class = VT
|
VT_class = VT
|
||||||
|
|
||||||
assert all([isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class)) for k, v in value2.items()])
|
assert all(
|
||||||
|
[
|
||||||
|
isinstance(k, KT) and (VT_class is None or isinstance(v, VT_class))
|
||||||
|
for k, v in value2.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
if VT is None:
|
if VT is None:
|
||||||
return cast(
|
return cast(
|
||||||
@ -233,7 +240,12 @@ def pyproject_load(
|
|||||||
str,
|
str,
|
||||||
)
|
)
|
||||||
|
|
||||||
if 'tool' in content and isinstance(content['tool'], dict) and tool_name in content['tool'] and isinstance(content['tool'][tool_name], dict):
|
if (
|
||||||
|
'tool' in content
|
||||||
|
and isinstance(content['tool'], dict)
|
||||||
|
and tool_name in content['tool']
|
||||||
|
and isinstance(content['tool'][tool_name], dict)
|
||||||
|
):
|
||||||
pr34_tool = check_dict(
|
pr34_tool = check_dict(
|
||||||
check_dict(
|
check_dict(
|
||||||
content['tool'],
|
content['tool'],
|
||||||
@ -246,7 +258,9 @@ def pyproject_load(
|
|||||||
res.early_features = pr34_tool['early_features']
|
res.early_features = pr34_tool['early_features']
|
||||||
|
|
||||||
if 'pip_find_links' in pr34_tool:
|
if 'pip_find_links' in pr34_tool:
|
||||||
res.pip_find_links = [d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']]
|
res.pip_find_links = [
|
||||||
|
d.parent / pathlib.Path(o) for o in pr34_tool['pip_find_links']
|
||||||
|
]
|
||||||
|
|
||||||
if 'runtime_libdirs' in pr34_tool:
|
if 'runtime_libdirs' in pr34_tool:
|
||||||
res.runtime_libdirs = [
|
res.runtime_libdirs = [
|
||||||
@ -265,7 +279,9 @@ def pyproject_load(
|
|||||||
if 'third_party_roots' in pr34_tool:
|
if 'third_party_roots' in pr34_tool:
|
||||||
for o in check_list(pr34_tool['third_party_roots']):
|
for o in check_list(pr34_tool['third_party_roots']):
|
||||||
o2 = check_dict(o, str, str)
|
o2 = check_dict(o, str, str)
|
||||||
assert all([k in {'package', 'module_root', 'path'} for k in o2])
|
assert all(
|
||||||
|
[k in {'package', 'module_root', 'path'} for k in o2]
|
||||||
|
)
|
||||||
|
|
||||||
res.third_party_roots.append(
|
res.third_party_roots.append(
|
||||||
PyProject.ThirdPartyRoot(
|
PyProject.ThirdPartyRoot(
|
||||||
@ -279,7 +295,9 @@ def pyproject_load(
|
|||||||
res.requirements = {
|
res.requirements = {
|
||||||
k: d.parent / pathlib.Path(v)
|
k: d.parent / pathlib.Path(v)
|
||||||
# pathlib.Path(o)
|
# pathlib.Path(o)
|
||||||
for k, v in check_dict(pr34_tool['requirements'], str, str).items()
|
for k, v in check_dict(
|
||||||
|
pr34_tool['requirements'], str, str
|
||||||
|
).items()
|
||||||
}
|
}
|
||||||
|
|
||||||
if 'modules' in pr34_tool:
|
if 'modules' in pr34_tool:
|
||||||
@ -329,7 +347,10 @@ class BootstrapSettings:
|
|||||||
).strip()
|
).strip()
|
||||||
)
|
)
|
||||||
pip_check_conflicts: Optional[bool] = dataclasses.field(
|
pip_check_conflicts: Optional[bool] = dataclasses.field(
|
||||||
default_factory=lambda: os.environ.get('PIP_CHECK_CONFLICTS', json.dumps(True)) in [json.dumps(True)],
|
default_factory=lambda: os.environ.get(
|
||||||
|
'PIP_CHECK_CONFLICTS', json.dumps(True)
|
||||||
|
)
|
||||||
|
in [json.dumps(True)],
|
||||||
)
|
)
|
||||||
uv_args: list[str] = dataclasses.field(
|
uv_args: list[str] = dataclasses.field(
|
||||||
default_factory=lambda: os.environ.get(
|
default_factory=lambda: os.environ.get(
|
||||||
@ -394,7 +415,9 @@ def requirements_name_get(
|
|||||||
else:
|
else:
|
||||||
requirements_path = source_dir / 'requirements.txt'
|
requirements_path = source_dir / 'requirements.txt'
|
||||||
|
|
||||||
requirements_path_in = requirements_path.parent / (requirements_path.stem + '.in')
|
requirements_path_in = requirements_path.parent / (
|
||||||
|
requirements_path.stem + '.in'
|
||||||
|
)
|
||||||
|
|
||||||
requirements_in: list[str] = []
|
requirements_in: list[str] = []
|
||||||
|
|
||||||
@ -440,10 +463,15 @@ def env_bootstrap(
|
|||||||
|
|
||||||
requirements_in: list[str] = []
|
requirements_in: list[str] = []
|
||||||
|
|
||||||
requirements_in.extend(['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11'])
|
requirements_in.extend(
|
||||||
|
['uv', 'pip', 'build', 'setuptools', 'meson-python', 'pybind11']
|
||||||
|
)
|
||||||
|
|
||||||
if pyproject.early_features:
|
if pyproject.early_features:
|
||||||
early_dependencies = sum([pyproject.dependencies[o] for o in pyproject.early_features], cast(list[str], []))
|
early_dependencies = sum(
|
||||||
|
[pyproject.dependencies[o] for o in pyproject.early_features],
|
||||||
|
cast(list[str], []),
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
dict(
|
dict(
|
||||||
@ -532,7 +560,11 @@ def env_bootstrap(
|
|||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
[
|
[
|
||||||
'uv',
|
'uv',
|
||||||
*[o for o in bootstrap_settings.uv_args if not o in ['-U', '--upgrade']],
|
*[
|
||||||
|
o
|
||||||
|
for o in bootstrap_settings.uv_args
|
||||||
|
if not o in ['-U', '--upgrade']
|
||||||
|
],
|
||||||
'venv',
|
'venv',
|
||||||
*venv_python_version,
|
*venv_python_version,
|
||||||
*cache_find_links_args,
|
*cache_find_links_args,
|
||||||
|
|||||||
@ -58,9 +58,25 @@ def run(argv: list[str]) -> None:
|
|||||||
|
|
||||||
def set_theme(theme: Literal['light', 'dark', 'default']) -> None:
|
def set_theme(theme: Literal['light', 'dark', 'default']) -> None:
|
||||||
if theme == 'light':
|
if theme == 'light':
|
||||||
subprocess.check_call(['gsettings', 'set', 'org.gnome.desktop.interface', 'color-scheme', 'prefer-light'])
|
subprocess.check_call(
|
||||||
|
[
|
||||||
|
'gsettings',
|
||||||
|
'set',
|
||||||
|
'org.gnome.desktop.interface',
|
||||||
|
'color-scheme',
|
||||||
|
'prefer-light',
|
||||||
|
]
|
||||||
|
)
|
||||||
elif theme == 'dark':
|
elif theme == 'dark':
|
||||||
subprocess.check_call(['gsettings', 'set', 'org.gnome.desktop.interface', 'color-scheme', 'prefer-dark'])
|
subprocess.check_call(
|
||||||
|
[
|
||||||
|
'gsettings',
|
||||||
|
'set',
|
||||||
|
'org.gnome.desktop.interface',
|
||||||
|
'color-scheme',
|
||||||
|
'prefer-dark',
|
||||||
|
]
|
||||||
|
)
|
||||||
elif theme == 'default':
|
elif theme == 'default':
|
||||||
subprocess.check_call(
|
subprocess.check_call(
|
||||||
[
|
[
|
||||||
|
|||||||
@ -64,7 +64,9 @@ class PasswordUtils:
|
|||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _scrypt_init(cls, salt: bytes) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
|
def _scrypt_init(
|
||||||
|
cls, salt: bytes
|
||||||
|
) -> cryptography.hazmat.primitives.kdf.scrypt.Scrypt:
|
||||||
return cryptography.hazmat.primitives.kdf.scrypt.Scrypt(
|
return cryptography.hazmat.primitives.kdf.scrypt.Scrypt(
|
||||||
salt=salt,
|
salt=salt,
|
||||||
length=32,
|
length=32,
|
||||||
|
|||||||
@ -10,5 +10,7 @@ def setup(level: Optional[int] = None) -> None:
|
|||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=level,
|
level=level,
|
||||||
format=('%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'),
|
format=(
|
||||||
|
'%(levelname)s:%(name)s:%(message)s:%(process)d:%(asctime)s:%(pathname)s:%(funcName)s:%(lineno)s'
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|||||||
@ -47,7 +47,15 @@ class Metric(pydantic.BaseModel):
|
|||||||
|
|
||||||
if o.type == 'gauge':
|
if o.type == 'gauge':
|
||||||
samples.append(
|
samples.append(
|
||||||
Metric.Sample(parameters=s.parameters, value='NaN', timestamp=(s.timestamp + datetime.timedelta(seconds=15) if s.timestamp else None))
|
Metric.Sample(
|
||||||
|
parameters=s.parameters,
|
||||||
|
value='NaN',
|
||||||
|
timestamp=(
|
||||||
|
s.timestamp + datetime.timedelta(seconds=15)
|
||||||
|
if s.timestamp
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return ''.join(
|
return ''.join(
|
||||||
@ -65,7 +73,11 @@ class Metric(pydantic.BaseModel):
|
|||||||
]
|
]
|
||||||
),
|
),
|
||||||
value=s2.value,
|
value=s2.value,
|
||||||
timestamp=('%.f' % (s2.timestamp.timestamp() * 1000,) if s2.timestamp else ''),
|
timestamp=(
|
||||||
|
'%.f' % (s2.timestamp.timestamp() * 1000,)
|
||||||
|
if s2.timestamp
|
||||||
|
else ''
|
||||||
|
),
|
||||||
)
|
)
|
||||||
for s2 in samples
|
for s2 in samples
|
||||||
]
|
]
|
||||||
@ -87,9 +99,19 @@ def serialize(
|
|||||||
'{help}{type}{samples}'.format(
|
'{help}{type}{samples}'.format(
|
||||||
# help='# HELP %s some metric' % o.name,
|
# help='# HELP %s some metric' % o.name,
|
||||||
# type='# TYPE %s counter' % o.name,
|
# type='# TYPE %s counter' % o.name,
|
||||||
help=('# HELP {0} {1}\n'.format(o.name, o.help) if o.help else ''),
|
help=(
|
||||||
type=('# TYPE {0} {1}\n'.format(o.name, o.type) if o.type else ''),
|
'# HELP {0} {1}\n'.format(o.name, o.help)
|
||||||
samples=''.join([Metric.sample_serialize(o, s) for s in o.samples]),
|
if o.help
|
||||||
|
else ''
|
||||||
|
),
|
||||||
|
type=(
|
||||||
|
'# TYPE {0} {1}\n'.format(o.name, o.type)
|
||||||
|
if o.type
|
||||||
|
else ''
|
||||||
|
),
|
||||||
|
samples=''.join(
|
||||||
|
[Metric.sample_serialize(o, s) for s in o.samples]
|
||||||
|
),
|
||||||
)
|
)
|
||||||
for o in metrics
|
for o in metrics
|
||||||
if len(o.samples) > 0
|
if len(o.samples) > 0
|
||||||
|
|||||||
@ -38,7 +38,9 @@ class MypyFormatEntry:
|
|||||||
|
|
||||||
|
|
||||||
class MypyFormat:
|
class MypyFormat:
|
||||||
vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='vscode', value='vscode')
|
vscode: ClassVar[MypyFormatEntry] = MypyFormatEntry(
|
||||||
|
name='vscode', value='vscode'
|
||||||
|
)
|
||||||
json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json')
|
json: ClassVar[MypyFormatEntry] = MypyFormatEntry(name='json', value='json')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -149,7 +151,11 @@ def run(
|
|||||||
assert not res.returncode is None
|
assert not res.returncode is None
|
||||||
|
|
||||||
errors = sorted(
|
errors = sorted(
|
||||||
[json.loads(o) for o in res.stdout.decode('utf-8').splitlines() if not o.strip() == ''],
|
[
|
||||||
|
json.loads(o)
|
||||||
|
for o in res.stdout.decode('utf-8').splitlines()
|
||||||
|
if not o.strip() == ''
|
||||||
|
],
|
||||||
key=lambda x: (
|
key=lambda x: (
|
||||||
x.get('file', ''),
|
x.get('file', ''),
|
||||||
x.get('line', 0),
|
x.get('line', 0),
|
||||||
|
|||||||
@ -54,8 +54,21 @@ def runtime_libdirs_init(
|
|||||||
ld_library_path: list[pathlib.Path] = [
|
ld_library_path: list[pathlib.Path] = [
|
||||||
o
|
o
|
||||||
for o in [
|
for o in [
|
||||||
*[o.absolute() for o in (project.runtime_libdirs if project.runtime_libdirs else [])],
|
*[
|
||||||
*[pathlib.Path(o) for o in os.environ.get('LD_LIBRARY_PATH', '').split(os.path.pathsep) if o != ''],
|
o.absolute()
|
||||||
|
for o in (
|
||||||
|
project.runtime_libdirs
|
||||||
|
if project.runtime_libdirs
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
],
|
||||||
|
*[
|
||||||
|
pathlib.Path(o)
|
||||||
|
for o in os.environ.get('LD_LIBRARY_PATH', '').split(
|
||||||
|
os.path.pathsep
|
||||||
|
)
|
||||||
|
if o != ''
|
||||||
|
],
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -72,10 +85,16 @@ def runtime_libdirs_init(
|
|||||||
|
|
||||||
ld_library_path_present.append(o)
|
ld_library_path_present.append(o)
|
||||||
|
|
||||||
os.environ.update(LD_LIBRARY_PATH=os.path.pathsep.join([str(o) for o in ld_library_path_present]))
|
os.environ.update(
|
||||||
|
LD_LIBRARY_PATH=os.path.pathsep.join(
|
||||||
|
[str(o) for o in ld_library_path_present]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
for preload_path in project.runtime_preload or []:
|
for preload_path in project.runtime_preload or []:
|
||||||
for preload_found in glob.glob(str(preload_path.parent / ('lib%s.so' % preload_path.name))):
|
for preload_found in glob.glob(
|
||||||
|
str(preload_path.parent / ('lib%s.so' % preload_path.name))
|
||||||
|
):
|
||||||
logger.info(
|
logger.info(
|
||||||
dict(
|
dict(
|
||||||
preload_path=preload_path,
|
preload_path=preload_path,
|
||||||
|
|||||||
@ -101,8 +101,20 @@ class pip_resolve_t:
|
|||||||
entries: Optional[list[download_info_t]] = None
|
entries: Optional[list[download_info_t]] = None
|
||||||
|
|
||||||
|
|
||||||
def pip_resolve_entries_to_txt(entries: list[pip_resolve_t.res_t.download_info_t]) -> str:
|
def pip_resolve_entries_to_txt(
|
||||||
return '\n'.join(['#%s\n%s %s' % (o.url, o.constraint, ' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256])) for o in entries])
|
entries: list[pip_resolve_t.res_t.download_info_t],
|
||||||
|
) -> str:
|
||||||
|
return '\n'.join(
|
||||||
|
[
|
||||||
|
'#%s\n%s %s'
|
||||||
|
% (
|
||||||
|
o.url,
|
||||||
|
o.constraint,
|
||||||
|
' '.join(['--hash=sha256:%s' % o2 for o2 in o.sha256]),
|
||||||
|
)
|
||||||
|
for o in entries
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def pip_resolve(
|
def pip_resolve(
|
||||||
@ -128,7 +140,9 @@ def pip_resolve(
|
|||||||
import pip._internal.models.direct_url
|
import pip._internal.models.direct_url
|
||||||
|
|
||||||
with contextlib.ExitStack() as stack:
|
with contextlib.ExitStack() as stack:
|
||||||
stack.enter_context(pip._internal.utils.temp_dir.global_tempdir_manager())
|
stack.enter_context(
|
||||||
|
pip._internal.utils.temp_dir.global_tempdir_manager()
|
||||||
|
)
|
||||||
|
|
||||||
t2 = pip._internal.cli.main_parser.create_main_parser()
|
t2 = pip._internal.cli.main_parser.create_main_parser()
|
||||||
|
|
||||||
@ -166,15 +180,22 @@ def pip_resolve(
|
|||||||
pip._internal.cli.cmdoptions.check_dist_restriction(options)
|
pip._internal.cli.cmdoptions.check_dist_restriction(options)
|
||||||
# t1._in_main_context = True
|
# t1._in_main_context = True
|
||||||
session = t1.get_default_session(options)
|
session = t1.get_default_session(options)
|
||||||
target_python = pip._internal.cli.cmdoptions.make_target_python(options)
|
target_python = pip._internal.cli.cmdoptions.make_target_python(
|
||||||
finder = cast(pip_resolve_t.build_package_finder_t, getattr(t1, '_build_package_finder'))(
|
options
|
||||||
|
)
|
||||||
|
finder = cast(
|
||||||
|
pip_resolve_t.build_package_finder_t,
|
||||||
|
getattr(t1, '_build_package_finder'),
|
||||||
|
)(
|
||||||
options=options,
|
options=options,
|
||||||
session=session,
|
session=session,
|
||||||
target_python=target_python,
|
target_python=target_python,
|
||||||
ignore_requires_python=options.ignore_requires_python,
|
ignore_requires_python=options.ignore_requires_python,
|
||||||
)
|
)
|
||||||
|
|
||||||
build_tracker = t1.enter_context(pip._internal.operations.build.build_tracker.get_build_tracker())
|
build_tracker = t1.enter_context(
|
||||||
|
pip._internal.operations.build.build_tracker.get_build_tracker()
|
||||||
|
)
|
||||||
reqs = t1.get_requirements(
|
reqs = t1.get_requirements(
|
||||||
[
|
[
|
||||||
#'pip', 'uv', 'ipython',
|
#'pip', 'uv', 'ipython',
|
||||||
@ -184,8 +205,12 @@ def pip_resolve(
|
|||||||
finder,
|
finder,
|
||||||
session,
|
session,
|
||||||
)
|
)
|
||||||
pip._internal.req.req_install.check_legacy_setup_py_options(options, reqs)
|
pip._internal.req.req_install.check_legacy_setup_py_options(
|
||||||
directory = pip._internal.utils.temp_dir.TempDirectory(delete=True, kind='download', globally_managed=True)
|
options, reqs
|
||||||
|
)
|
||||||
|
directory = pip._internal.utils.temp_dir.TempDirectory(
|
||||||
|
delete=True, kind='download', globally_managed=True
|
||||||
|
)
|
||||||
preparer = t1.make_requirement_preparer(
|
preparer = t1.make_requirement_preparer(
|
||||||
temp_build_dir=directory,
|
temp_build_dir=directory,
|
||||||
options=options,
|
options=options,
|
||||||
@ -205,7 +230,9 @@ def pip_resolve(
|
|||||||
py_version_info=options.python_version,
|
py_version_info=options.python_version,
|
||||||
)
|
)
|
||||||
t1.trace_basic_info(finder)
|
t1.trace_basic_info(finder)
|
||||||
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
requirement_set = resolver.resolve(
|
||||||
|
reqs, check_supported_wheels=True
|
||||||
|
)
|
||||||
|
|
||||||
res = pip_resolve_t.res_t()
|
res = pip_resolve_t.res_t()
|
||||||
|
|
||||||
@ -279,7 +306,9 @@ def pip_resolve(
|
|||||||
location,
|
location,
|
||||||
)
|
)
|
||||||
|
|
||||||
batch_downloader_call_def = pip._internal.network.download.BatchDownloader.__call__
|
batch_downloader_call_def = (
|
||||||
|
pip._internal.network.download.BatchDownloader.__call__
|
||||||
|
)
|
||||||
|
|
||||||
def batch_downloader_call(
|
def batch_downloader_call(
|
||||||
_self: pip._internal.network.download.BatchDownloader,
|
_self: pip._internal.network.download.BatchDownloader,
|
||||||
@ -298,7 +327,9 @@ def pip_resolve(
|
|||||||
return [(o, ('/dev/null', '')) for o in links]
|
return [(o, ('/dev/null', '')) for o in links]
|
||||||
|
|
||||||
# base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve
|
# base_resolver_resolve_def = pip._internal.resolution.base.BaseResolver.resolve
|
||||||
base_resolver_resolve_def = pip._internal.resolution.resolvelib.resolver.Resolver.resolve
|
base_resolver_resolve_def = (
|
||||||
|
pip._internal.resolution.resolvelib.resolver.Resolver.resolve
|
||||||
|
)
|
||||||
|
|
||||||
result_requirements: list[RequirementSet | InstallRequirement] = []
|
result_requirements: list[RequirementSet | InstallRequirement] = []
|
||||||
|
|
||||||
@ -309,7 +340,9 @@ def pip_resolve(
|
|||||||
) -> RequirementSet:
|
) -> RequirementSet:
|
||||||
# print(args, kwargs)
|
# print(args, kwargs)
|
||||||
|
|
||||||
res = base_resolver_resolve_def(_self, root_reqs, check_supported_wheels)
|
res = base_resolver_resolve_def(
|
||||||
|
_self, root_reqs, check_supported_wheels
|
||||||
|
)
|
||||||
|
|
||||||
result_requirements.append(res)
|
result_requirements.append(res)
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
@ -369,7 +402,13 @@ def pip_resolve(
|
|||||||
|
|
||||||
patches: list[Any] = []
|
patches: list[Any] = []
|
||||||
|
|
||||||
patches.append(unittest.mock.patch.object(pip._internal.network.download.Downloader, '__call__', downloader_call))
|
patches.append(
|
||||||
|
unittest.mock.patch.object(
|
||||||
|
pip._internal.network.download.Downloader,
|
||||||
|
'__call__',
|
||||||
|
downloader_call,
|
||||||
|
)
|
||||||
|
)
|
||||||
# patches.append(
|
# patches.append(
|
||||||
# unittest.mock.patch.object(
|
# unittest.mock.patch.object(
|
||||||
# pip._internal.network.download.BatchDownloader,
|
# pip._internal.network.download.BatchDownloader,
|
||||||
@ -574,4 +613,6 @@ def pip_check_conflicts(
|
|||||||
if line.strip() != ''
|
if line.strip() != ''
|
||||||
]
|
]
|
||||||
|
|
||||||
return pip_check_conflicts_t.res_t(status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates)
|
return pip_check_conflicts_t.res_t(
|
||||||
|
status=('error' if len(duplicates) > 0 else 'ok'), duplicates=duplicates
|
||||||
|
)
|
||||||
|
|||||||
@ -21,21 +21,28 @@ R = TypeVar('R')
|
|||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def validate_params(view: Callable[..., Awaitable[R]]) -> Callable[..., Awaitable[R]]: ...
|
def validate_params(
|
||||||
|
view: Callable[..., Awaitable[R]],
|
||||||
|
) -> Callable[..., Awaitable[R]]: ...
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def validate_params(view: Callable[..., R]) -> Callable[..., R]: ...
|
def validate_params(view: Callable[..., R]) -> Callable[..., R]: ...
|
||||||
|
|
||||||
|
|
||||||
def validate_params(view: Callable[..., Awaitable[R]] | Callable[..., R]) -> Callable[..., Awaitable[R]] | Callable[..., R]:
|
def validate_params(
|
||||||
|
view: Callable[..., Awaitable[R]] | Callable[..., R],
|
||||||
|
) -> Callable[..., Awaitable[R]] | Callable[..., R]:
|
||||||
class Parameter:
|
class Parameter:
|
||||||
kind: Any
|
kind: Any
|
||||||
annotation: Any
|
annotation: Any
|
||||||
|
|
||||||
parameters = cast(Mapping[str, Parameter], inspect.signature(view).parameters)
|
parameters = cast(
|
||||||
|
Mapping[str, Parameter], inspect.signature(view).parameters
|
||||||
|
)
|
||||||
|
|
||||||
positional_parameters: collections.OrderedDict[str, type[Any]] = collections.OrderedDict(
|
positional_parameters: collections.OrderedDict[str, type[Any]] = (
|
||||||
|
collections.OrderedDict(
|
||||||
(
|
(
|
||||||
(k, v.annotation)
|
(k, v.annotation)
|
||||||
for k, v in parameters.items()
|
for k, v in parameters.items()
|
||||||
@ -46,6 +53,7 @@ def validate_params(view: Callable[..., Awaitable[R]] | Callable[..., R]) -> Cal
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
)
|
||||||
positional_names = list(positional_parameters)
|
positional_names = list(positional_parameters)
|
||||||
|
|
||||||
model = pydantic.create_model(
|
model = pydantic.create_model(
|
||||||
|
|||||||
@ -23,7 +23,12 @@ def run(argv: list[str]):
|
|||||||
def format_option(self, *args: Any, **kwargs: Any) -> Any:
|
def format_option(self, *args: Any, **kwargs: Any) -> Any:
|
||||||
def f1(text: str, width: Optional[int]) -> list[str]:
|
def f1(text: str, width: Optional[int]) -> list[str]:
|
||||||
width = None
|
width = None
|
||||||
return '\n'.join([textwrap.fill('\t' + o, width, replace_whitespace=False) for o in text.splitlines()]).splitlines()
|
return '\n'.join(
|
||||||
|
[
|
||||||
|
textwrap.fill('\t' + o, width, replace_whitespace=False)
|
||||||
|
for o in text.splitlines()
|
||||||
|
]
|
||||||
|
).splitlines()
|
||||||
|
|
||||||
t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option)
|
t1 = inspect.getsource(optparse.IndentedHelpFormatter.format_option)
|
||||||
t2 = (
|
t2 = (
|
||||||
|
|||||||
@ -74,7 +74,9 @@ class get_firefox_procs_ps_t:
|
|||||||
cmd: str
|
cmd: str
|
||||||
|
|
||||||
|
|
||||||
def get_firefox_procs_ps(slice_name=None) -> list[get_firefox_procs_ps_t.res_t.entry_t]:
|
def get_firefox_procs_ps(
|
||||||
|
slice_name=None,
|
||||||
|
) -> list[get_firefox_procs_ps_t.res_t.entry_t]:
|
||||||
entries: dict[int, dict[str, Any]] = dict()
|
entries: dict[int, dict[str, Any]] = dict()
|
||||||
|
|
||||||
for regex, columns in [
|
for regex, columns in [
|
||||||
@ -182,7 +184,11 @@ def is_main_firefox(p):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def kill_prioritized(procs: list['get_firefox_procs_ps_t.res_t.entry_t'], to_free_mb, low_priority_pids):
|
def kill_prioritized(
|
||||||
|
procs: list['get_firefox_procs_ps_t.res_t.entry_t'],
|
||||||
|
to_free_mb,
|
||||||
|
low_priority_pids,
|
||||||
|
):
|
||||||
candidates = []
|
candidates = []
|
||||||
for p in procs:
|
for p in procs:
|
||||||
if is_main_firefox(p):
|
if is_main_firefox(p):
|
||||||
@ -224,7 +230,9 @@ def kill_prioritized(procs: list['get_firefox_procs_ps_t.res_t.entry_t'], to_fre
|
|||||||
# — systemd-run logic —
|
# — systemd-run logic —
|
||||||
|
|
||||||
|
|
||||||
def launch_firefox_with_limits(base_cmd, memory_high, swap_max, extra_args, unit_name):
|
def launch_firefox_with_limits(
|
||||||
|
base_cmd, memory_high, swap_max, extra_args, unit_name
|
||||||
|
):
|
||||||
cmd = [
|
cmd = [
|
||||||
'systemd-run',
|
'systemd-run',
|
||||||
'--user',
|
'--user',
|
||||||
@ -250,7 +258,9 @@ def launch_firefox_with_limits(base_cmd, memory_high, swap_max, extra_args, unit
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
os.makedirs(pathlib.Path('~/.cache/oom_firefox/').expanduser(), exist_ok=True)
|
os.makedirs(
|
||||||
|
pathlib.Path('~/.cache/oom_firefox/').expanduser(), exist_ok=True
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level=logging.INFO,
|
level=logging.INFO,
|
||||||
@ -263,14 +273,50 @@ def main():
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Firefox memory manager with slice + graceful shutdown')
|
parser = argparse.ArgumentParser(
|
||||||
parser.add_argument('--max-mb', type=float, required=True, help='Memory threshold in MB (used for killing logic & MemoryHigh)')
|
description='Firefox memory manager with slice + graceful shutdown'
|
||||||
parser.add_argument('--kill-percent', type=float, default=70.0, help='If over max, kill until usage ≤ this percent of max')
|
)
|
||||||
parser.add_argument('--swap-max-mb', type=float, default=None, help='MemorySwapMax (MB) for the systemd scope')
|
parser.add_argument(
|
||||||
parser.add_argument('--interval', type=float, default=1.0, help='Monitoring interval in seconds')
|
'--max-mb',
|
||||||
parser.add_argument('--unit-name', type=str, default='firefox-limited', help='Name for systemd transient unit')
|
type=float,
|
||||||
parser.add_argument('--firefox-extra', action='append', default=[], help='Extra CLI args to pass to Firefox (can repeat)')
|
required=True,
|
||||||
parser.add_argument('firefox_cmd', nargs=argparse.REMAINDER, help='Firefox command + args (if launching it)')
|
help='Memory threshold in MB (used for killing logic & MemoryHigh)',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--kill-percent',
|
||||||
|
type=float,
|
||||||
|
default=70.0,
|
||||||
|
help='If over max, kill until usage ≤ this percent of max',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--swap-max-mb',
|
||||||
|
type=float,
|
||||||
|
default=None,
|
||||||
|
help='MemorySwapMax (MB) for the systemd scope',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--interval',
|
||||||
|
type=float,
|
||||||
|
default=1.0,
|
||||||
|
help='Monitoring interval in seconds',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--unit-name',
|
||||||
|
type=str,
|
||||||
|
default='firefox-limited',
|
||||||
|
help='Name for systemd transient unit',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--firefox-extra',
|
||||||
|
action='append',
|
||||||
|
default=[],
|
||||||
|
help='Extra CLI args to pass to Firefox (can repeat)',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'firefox_cmd',
|
||||||
|
nargs=argparse.REMAINDER,
|
||||||
|
help='Firefox command + args (if launching it)',
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
@ -323,7 +369,9 @@ def main():
|
|||||||
|
|
||||||
if total > limit:
|
if total > limit:
|
||||||
to_free = total - kill_to
|
to_free = total - kill_to
|
||||||
killed, freed = kill_prioritized(procs, to_free, low_priority_pids)
|
killed, freed = kill_prioritized(
|
||||||
|
procs, to_free, low_priority_pids
|
||||||
|
)
|
||||||
lines.append(f'Killed: {killed}')
|
lines.append(f'Killed: {killed}')
|
||||||
lines.append(f'Freed ≈ {freed:.1f} MB')
|
lines.append(f'Freed ≈ {freed:.1f} MB')
|
||||||
else:
|
else:
|
||||||
@ -332,7 +380,11 @@ def main():
|
|||||||
if firefox_proc and firefox_proc.poll() is not None:
|
if firefox_proc and firefox_proc.poll() is not None:
|
||||||
print('Firefox died — restarting …', file=sys.stderr)
|
print('Firefox died — restarting …', file=sys.stderr)
|
||||||
firefox_proc = launch_firefox_with_limits(
|
firefox_proc = launch_firefox_with_limits(
|
||||||
args.firefox_cmd, memory_high=args.max_mb, swap_max=args.swap_max_mb, extra_args=args.firefox_extra, unit_name=args.unit_name
|
args.firefox_cmd,
|
||||||
|
memory_high=args.max_mb,
|
||||||
|
swap_max=args.swap_max_mb,
|
||||||
|
extra_args=args.firefox_extra,
|
||||||
|
unit_name=args.unit_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
body.text = '\n'.join(lines)
|
body.text = '\n'.join(lines)
|
||||||
@ -354,7 +406,14 @@ def main():
|
|||||||
close_dialog()
|
close_dialog()
|
||||||
|
|
||||||
dialog = Dialog(
|
dialog = Dialog(
|
||||||
title='Enter low‑priority PIDs', body=ta, buttons=[Button(text='OK', handler=on_ok), Button(text='Cancel', handler=on_cancel)], width=60, modal=True
|
title='Enter low‑priority PIDs',
|
||||||
|
body=ta,
|
||||||
|
buttons=[
|
||||||
|
Button(text='OK', handler=on_ok),
|
||||||
|
Button(text='Cancel', handler=on_cancel),
|
||||||
|
],
|
||||||
|
width=60,
|
||||||
|
modal=True,
|
||||||
)
|
)
|
||||||
f = Float(content=dialog, left=2, top=2)
|
f = Float(content=dialog, left=2, top=2)
|
||||||
dialog_float[0] = f
|
dialog_float[0] = f
|
||||||
@ -365,7 +424,13 @@ def main():
|
|||||||
def on_close():
|
def on_close():
|
||||||
close_dialog()
|
close_dialog()
|
||||||
|
|
||||||
dialog = Dialog(title=title, body=Label(text=message), buttons=[Button(text='Close', handler=on_close)], width=50, modal=True)
|
dialog = Dialog(
|
||||||
|
title=title,
|
||||||
|
body=Label(text=message),
|
||||||
|
buttons=[Button(text='Close', handler=on_close)],
|
||||||
|
width=50,
|
||||||
|
modal=True,
|
||||||
|
)
|
||||||
f = Float(content=dialog, left=4, top=4)
|
f = Float(content=dialog, left=4, top=4)
|
||||||
dialog_float[0] = f
|
dialog_float[0] = f
|
||||||
root_floats.append(f)
|
root_floats.append(f)
|
||||||
@ -409,7 +474,15 @@ def main():
|
|||||||
|
|
||||||
root = FloatContainer(
|
root = FloatContainer(
|
||||||
content=HSplit(
|
content=HSplit(
|
||||||
[Frame(body, title='Firefox Memory Manager'), Window(height=1, content=FormattedTextControl('q=quit, m=PID, h=help, s=setting, a=about'))]
|
[
|
||||||
|
Frame(body, title='Firefox Memory Manager'),
|
||||||
|
Window(
|
||||||
|
height=1,
|
||||||
|
content=FormattedTextControl(
|
||||||
|
'q=quit, m=PID, h=help, s=setting, a=about'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
),
|
),
|
||||||
floats=root_floats,
|
floats=root_floats,
|
||||||
modal=True,
|
modal=True,
|
||||||
@ -457,7 +530,9 @@ def main():
|
|||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
# refresh_body()
|
# refresh_body()
|
||||||
app.run(handle_sigint=True) # from prompt‑toolkit API :contentReference[oaicite:0]{index=0}
|
app.run(
|
||||||
|
handle_sigint=True
|
||||||
|
) # from prompt‑toolkit API :contentReference[oaicite:0]{index=0}
|
||||||
|
|
||||||
t.join()
|
t.join()
|
||||||
|
|
||||||
|
|||||||
@ -23,7 +23,13 @@ async def f2(device, timeout=None):
|
|||||||
|
|
||||||
|
|
||||||
async def f3(client):
|
async def f3(client):
|
||||||
t1 = [dict(service=o.__dict__, characteristics=[o2.__dict__ for o2 in o.characteristics]) for o in client.services]
|
t1 = [
|
||||||
|
dict(
|
||||||
|
service=o.__dict__,
|
||||||
|
characteristics=[o2.__dict__ for o2 in o.characteristics],
|
||||||
|
)
|
||||||
|
for o in client.services
|
||||||
|
]
|
||||||
return t1
|
return t1
|
||||||
|
|
||||||
|
|
||||||
@ -43,7 +49,13 @@ async def f5(
|
|||||||
|
|
||||||
t5 = {i: o.details[0].name() for i, o in enumerate(t1)}
|
t5 = {i: o.details[0].name() for i, o in enumerate(t1)}
|
||||||
|
|
||||||
t2.extend([t1[k] for k, v in t5.items() if isinstance(v, str) and name_check(v)])
|
t2.extend(
|
||||||
|
[
|
||||||
|
t1[k]
|
||||||
|
for k, v in t5.items()
|
||||||
|
if isinstance(v, str) and name_check(v)
|
||||||
|
]
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
t2.extend(t1)
|
t2.extend(t1)
|
||||||
|
|
||||||
@ -66,7 +78,9 @@ async def f4(
|
|||||||
assert name_check in [
|
assert name_check in [
|
||||||
'watch fit',
|
'watch fit',
|
||||||
]
|
]
|
||||||
name_check2 = lambda current_name: name_check.lower() in current_name.lower()
|
name_check2 = (
|
||||||
|
lambda current_name: name_check.lower() in current_name.lower()
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
name_check2 = name_check
|
name_check2 = name_check
|
||||||
|
|
||||||
|
|||||||
@ -66,7 +66,13 @@ def build(content: str, module: M) -> M:
|
|||||||
# )
|
# )
|
||||||
t1.run()
|
t1.run()
|
||||||
|
|
||||||
return cast(M, Cython.Build.Inline.load_dynamic('_%s' % sha256sum, glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0]))
|
return cast(
|
||||||
|
M,
|
||||||
|
Cython.Build.Inline.load_dynamic(
|
||||||
|
'_%s' % sha256sum,
|
||||||
|
glob.glob(str(output_dir / ('_%s*.so' % sha256sum)))[0],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
@ -125,7 +131,9 @@ def mypyc_build(file_path: pathlib.Path) -> Any:
|
|||||||
# f.write(content)
|
# f.write(content)
|
||||||
|
|
||||||
t1 = Cython.Build.Inline._get_build_extension()
|
t1 = Cython.Build.Inline._get_build_extension()
|
||||||
t1.extensions = mypyc.build.mypycify([str(source_path)], target_dir=str(output_dir / 'build'))
|
t1.extensions = mypyc.build.mypycify(
|
||||||
|
[str(source_path)], target_dir=str(output_dir / 'build')
|
||||||
|
)
|
||||||
t1.build_temp = str(output_dir)
|
t1.build_temp = str(output_dir)
|
||||||
t1.build_lib = str(lib_dir)
|
t1.build_lib = str(lib_dir)
|
||||||
# t2 = Cython.Build.Inline.Extension(
|
# t2 = Cython.Build.Inline.Extension(
|
||||||
@ -147,7 +155,11 @@ def mypyc_build(file_path: pathlib.Path) -> Any:
|
|||||||
|
|
||||||
class Source:
|
class Source:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def test2(_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]], _id: numpy.dtype[numpy.int32] | int, T: float = 16) -> int:
|
def test2(
|
||||||
|
_a: numpy.ndarray[Any, numpy.dtype[numpy.int64]],
|
||||||
|
_id: numpy.dtype[numpy.int32] | int,
|
||||||
|
T: float = 16,
|
||||||
|
) -> int:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
@ -243,7 +255,11 @@ def test_cython(N: int = 4, T: int = 16) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_mypyc(N: int = 4, W: int = 35) -> None:
|
def test_mypyc(N: int = 4, W: int = 35) -> None:
|
||||||
cython2 = mypyc_build((pathlib.Path(__file__).parent / 'cython2.py').relative_to(pathlib.Path.cwd()))
|
cython2 = mypyc_build(
|
||||||
|
(pathlib.Path(__file__).parent / 'cython2.py').relative_to(
|
||||||
|
pathlib.Path.cwd()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# from .cython2 import fib
|
# from .cython2 import fib
|
||||||
|
|
||||||
|
|||||||
@ -73,8 +73,21 @@ def kernel_2():
|
|||||||
from keras.layers.embeddings import Embedding
|
from keras.layers.embeddings import Embedding
|
||||||
from keras.layers.normalization import BatchNormalization
|
from keras.layers.normalization import BatchNormalization
|
||||||
from keras.utils import np_utils
|
from keras.utils import np_utils
|
||||||
from sklearn import preprocessing, decomposition, model_selection, metrics, pipeline
|
from sklearn import (
|
||||||
from keras.layers import GlobalMaxPooling1D, Conv1D, MaxPooling1D, Flatten, Bidirectional, SpatialDropout1D
|
preprocessing,
|
||||||
|
decomposition,
|
||||||
|
model_selection,
|
||||||
|
metrics,
|
||||||
|
pipeline,
|
||||||
|
)
|
||||||
|
from keras.layers import (
|
||||||
|
GlobalMaxPooling1D,
|
||||||
|
Conv1D,
|
||||||
|
MaxPooling1D,
|
||||||
|
Flatten,
|
||||||
|
Bidirectional,
|
||||||
|
SpatialDropout1D,
|
||||||
|
)
|
||||||
from keras.preprocessing import sequence, text
|
from keras.preprocessing import sequence, text
|
||||||
from keras.callbacks import EarlyStopping
|
from keras.callbacks import EarlyStopping
|
||||||
|
|
||||||
@ -112,15 +125,25 @@ def kernel_2():
|
|||||||
print('REPLICAS: ', strategy.num_replicas_in_sync)
|
print('REPLICAS: ', strategy.num_replicas_in_sync)
|
||||||
|
|
||||||
# %% [code]
|
# %% [code]
|
||||||
train = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv')
|
train = pd.read_csv(
|
||||||
validation = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv')
|
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/jigsaw-toxic-comment-train.csv'
|
||||||
test = pd.read_csv('/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv')
|
)
|
||||||
|
validation = pd.read_csv(
|
||||||
|
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/validation.csv'
|
||||||
|
)
|
||||||
|
test = pd.read_csv(
|
||||||
|
'/kaggle/input/jigsaw-multilingual-toxic-comment-classification/test.csv'
|
||||||
|
)
|
||||||
|
|
||||||
# %% [markdown]
|
# %% [markdown]
|
||||||
# We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models
|
# We will drop the other columns and approach this problem as a Binary Classification Problem and also we will have our exercise done on a smaller subsection of the dataset(only 12000 data points) to make it easier to train the models
|
||||||
|
|
||||||
# %% [code]
|
# %% [code]
|
||||||
train.drop(['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'], axis=1, inplace=True)
|
train.drop(
|
||||||
|
['severe_toxic', 'obscene', 'threat', 'insult', 'identity_hate'],
|
||||||
|
axis=1,
|
||||||
|
inplace=True,
|
||||||
|
)
|
||||||
|
|
||||||
# %% [code]
|
# %% [code]
|
||||||
train = train.loc[:12000, :]
|
train = train.loc[:12000, :]
|
||||||
@ -137,7 +160,12 @@ def kernel_2():
|
|||||||
|
|
||||||
# %% [code]
|
# %% [code]
|
||||||
xtrain, xvalid, ytrain, yvalid = train_test_split(
|
xtrain, xvalid, ytrain, yvalid = train_test_split(
|
||||||
train.comment_text.values, train.toxic.values, stratify=train.toxic.values, random_state=42, test_size=0.2, shuffle=True
|
train.comment_text.values,
|
||||||
|
train.toxic.values,
|
||||||
|
stratify=train.toxic.values,
|
||||||
|
random_state=42,
|
||||||
|
test_size=0.2,
|
||||||
|
shuffle=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# %% [markdown]
|
# %% [markdown]
|
||||||
@ -206,7 +234,9 @@ def kernel_2():
|
|||||||
model.add(Embedding(len(word_index) + 1, 300, input_length=max_len))
|
model.add(Embedding(len(word_index) + 1, 300, input_length=max_len))
|
||||||
model.add(SimpleRNN(100))
|
model.add(SimpleRNN(100))
|
||||||
model.add(Dense(1, activation='sigmoid'))
|
model.add(Dense(1, activation='sigmoid'))
|
||||||
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
|
model.compile(
|
||||||
|
loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']
|
||||||
|
)
|
||||||
|
|
||||||
model.summary()
|
model.summary()
|
||||||
|
|
||||||
@ -253,7 +283,10 @@ def kernel_3(
|
|||||||
o_2['model'].load_weights('model.h5')
|
o_2['model'].load_weights('model.h5')
|
||||||
else:
|
else:
|
||||||
o_2['model'].fit(
|
o_2['model'].fit(
|
||||||
o_2['xtrain_pad'], o_2['ytrain'], nb_epoch=nb_epochs, batch_size=64 * o_2['strategy'].num_replicas_in_sync
|
o_2['xtrain_pad'],
|
||||||
|
o_2['ytrain'],
|
||||||
|
nb_epoch=nb_epochs,
|
||||||
|
batch_size=64 * o_2['strategy'].num_replicas_in_sync,
|
||||||
) # Multiplying by Strategy to run on TPU's
|
) # Multiplying by Strategy to run on TPU's
|
||||||
o_2['model'].save_weights('model.h5')
|
o_2['model'].save_weights('model.h5')
|
||||||
|
|
||||||
@ -263,7 +296,9 @@ def kernel_3(
|
|||||||
|
|
||||||
# %% [code]
|
# %% [code]
|
||||||
scores_model = []
|
scores_model = []
|
||||||
scores_model.append({'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])})
|
scores_model.append(
|
||||||
|
{'Model': 'SimpleRNN', 'AUC_Score': roc_auc(scores, o_2['yvalid'])}
|
||||||
|
)
|
||||||
|
|
||||||
# %% [markdown]
|
# %% [markdown]
|
||||||
# ## Code Explanantion
|
# ## Code Explanantion
|
||||||
@ -283,7 +318,12 @@ def kernel_4(
|
|||||||
import keras.preprocessing.sequence
|
import keras.preprocessing.sequence
|
||||||
|
|
||||||
if input_texts is None:
|
if input_texts is None:
|
||||||
input_texts = ['blahb blahb blah', 'Hello World!', 'This is very good!', 'A very non toxic comment! This is so polite and polished one!']
|
input_texts = [
|
||||||
|
'blahb blahb blah',
|
||||||
|
'Hello World!',
|
||||||
|
'This is very good!',
|
||||||
|
'A very non toxic comment! This is so polite and polished one!',
|
||||||
|
]
|
||||||
|
|
||||||
t6 = []
|
t6 = []
|
||||||
for o in input_texts:
|
for o in input_texts:
|
||||||
@ -291,7 +331,9 @@ def kernel_4(
|
|||||||
t2 = o_2['token'].texts_to_sequences(
|
t2 = o_2['token'].texts_to_sequences(
|
||||||
[t1],
|
[t1],
|
||||||
)
|
)
|
||||||
t3 = keras.preprocessing.sequence.pad_sequences(t2, maxlen=o_2['max_len'])
|
t3 = keras.preprocessing.sequence.pad_sequences(
|
||||||
|
t2, maxlen=o_2['max_len']
|
||||||
|
)
|
||||||
t4 = o_2['model'].predict(
|
t4 = o_2['model'].predict(
|
||||||
t3,
|
t3,
|
||||||
)
|
)
|
||||||
|
|||||||
@ -42,12 +42,26 @@ def kernel_2(
|
|||||||
):
|
):
|
||||||
t1 = {}
|
t1 = {}
|
||||||
|
|
||||||
for k in ['playerTwitterFollowers', 'teamTwitterFollowers', 'games', 'events']:
|
for k in [
|
||||||
|
'playerTwitterFollowers',
|
||||||
|
'teamTwitterFollowers',
|
||||||
|
'games',
|
||||||
|
'events',
|
||||||
|
]:
|
||||||
t4 = '%s.nc' % k
|
t4 = '%s.nc' % k
|
||||||
if not os.path.exists(t4):
|
if not os.path.exists(t4):
|
||||||
print('started %s' % t4)
|
print('started %s' % t4)
|
||||||
t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv'
|
t2 = '/kaggle/input/mlb-player-digital-engagement-forecasting/train.csv'
|
||||||
t3 = pandas.DataFrame(sum([json.loads(o) for o in o_1['t3'][t2][k].values if isinstance(o, str)], [])).to_xarray()
|
t3 = pandas.DataFrame(
|
||||||
|
sum(
|
||||||
|
[
|
||||||
|
json.loads(o)
|
||||||
|
for o in o_1['t3'][t2][k].values
|
||||||
|
if isinstance(o, str)
|
||||||
|
],
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
).to_xarray()
|
||||||
t3.to_netcdf(t4)
|
t3.to_netcdf(t4)
|
||||||
print('cached %s' % t4)
|
print('cached %s' % t4)
|
||||||
|
|
||||||
@ -55,7 +69,9 @@ def kernel_2(
|
|||||||
t5 = '%s-v2.nc' % k
|
t5 = '%s-v2.nc' % k
|
||||||
if not os.path.exists(t5):
|
if not os.path.exists(t5):
|
||||||
t2 = xarray.load_dataset(t4)
|
t2 = xarray.load_dataset(t4)
|
||||||
t3 = t2.sel(index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1))
|
t3 = t2.sel(
|
||||||
|
index=numpy.arange(2017653 - 10 * 1000, 2017653 + 1)
|
||||||
|
)
|
||||||
t3.to_netcdf(t5)
|
t3.to_netcdf(t5)
|
||||||
t1[k] = xarray.load_dataset(t5)
|
t1[k] = xarray.load_dataset(t5)
|
||||||
print('loaded %s' % t5)
|
print('loaded %s' % t5)
|
||||||
@ -119,9 +135,15 @@ def kernel_3(should_exist=None):
|
|||||||
def kernel_4(
|
def kernel_4(
|
||||||
o_3=None,
|
o_3=None,
|
||||||
):
|
):
|
||||||
[print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)]
|
[
|
||||||
|
print(o_3['t5']['events'].to_dataframe().iloc[k].to_json(indent=4))
|
||||||
|
for k in range(-10, -1)
|
||||||
|
]
|
||||||
|
|
||||||
[print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4)) for k in range(-10, -1)]
|
[
|
||||||
|
print(o_3['t5']['games'].to_dataframe().iloc[k].to_json(indent=4))
|
||||||
|
for k in range(-10, -1)
|
||||||
|
]
|
||||||
|
|
||||||
t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M'
|
t4 = 'https://www.youtube.com/watch?v=reaC7BHgL3M'
|
||||||
|
|
||||||
@ -264,7 +286,9 @@ def kernel_6(
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
cap = cv2.VideoCapture(o)
|
cap = cv2.VideoCapture(o)
|
||||||
fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
|
fps = cap.get(
|
||||||
|
cv2.CAP_PROP_FPS
|
||||||
|
) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
|
||||||
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
||||||
duration = frame_count / fps
|
duration = frame_count / fps
|
||||||
finally:
|
finally:
|
||||||
@ -454,15 +478,31 @@ def kernel_7(
|
|||||||
for k in layer:
|
for k in layer:
|
||||||
v = layer[k]
|
v = layer[k]
|
||||||
if 'pool' in k:
|
if 'pool' in k:
|
||||||
layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])]
|
layers += [
|
||||||
|
nn.MaxPool2d(
|
||||||
|
kernel_size=v[0], stride=v[1], padding=v[2]
|
||||||
|
)
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
|
conv2d = nn.Conv2d(
|
||||||
|
in_channels=v[0],
|
||||||
|
out_channels=v[1],
|
||||||
|
kernel_size=v[2],
|
||||||
|
stride=v[3],
|
||||||
|
padding=v[4],
|
||||||
|
)
|
||||||
layers += [conv2d, nn.ReLU(inplace=True)]
|
layers += [conv2d, nn.ReLU(inplace=True)]
|
||||||
layer = list(layer_dict[-1].keys())
|
layer = list(layer_dict[-1].keys())
|
||||||
k = layer[0]
|
k = layer[0]
|
||||||
v = layer_dict[-1][k]
|
v = layer_dict[-1][k]
|
||||||
|
|
||||||
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
|
conv2d = nn.Conv2d(
|
||||||
|
in_channels=v[0],
|
||||||
|
out_channels=v[1],
|
||||||
|
kernel_size=v[2],
|
||||||
|
stride=v[3],
|
||||||
|
padding=v[4],
|
||||||
|
)
|
||||||
layers += [conv2d]
|
layers += [conv2d]
|
||||||
|
|
||||||
return nn.Sequential(*layers)
|
return nn.Sequential(*layers)
|
||||||
@ -530,9 +570,19 @@ def kernel_7(
|
|||||||
for key in block:
|
for key in block:
|
||||||
v = block[key]
|
v = block[key]
|
||||||
if 'pool' in key:
|
if 'pool' in key:
|
||||||
layers += [nn.MaxPool2d(kernel_size=v[0], stride=v[1], padding=v[2])]
|
layers += [
|
||||||
|
nn.MaxPool2d(
|
||||||
|
kernel_size=v[0], stride=v[1], padding=v[2]
|
||||||
|
)
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
conv2d = nn.Conv2d(in_channels=v[0], out_channels=v[1], kernel_size=v[2], stride=v[3], padding=v[4])
|
conv2d = nn.Conv2d(
|
||||||
|
in_channels=v[0],
|
||||||
|
out_channels=v[1],
|
||||||
|
kernel_size=v[2],
|
||||||
|
stride=v[3],
|
||||||
|
padding=v[4],
|
||||||
|
)
|
||||||
layers += [conv2d, nn.ReLU(inplace=True)]
|
layers += [conv2d, nn.ReLU(inplace=True)]
|
||||||
|
|
||||||
models = {'block_0': nn.Sequential(*layers)}
|
models = {'block_0': nn.Sequential(*layers)}
|
||||||
@ -543,16 +593,38 @@ def kernel_7(
|
|||||||
|
|
||||||
return PoseEstimation(models)
|
return PoseEstimation(models)
|
||||||
|
|
||||||
def get_paf_and_heatmap(model, img_raw, scale_search, param_stride=8, box_size=368):
|
def get_paf_and_heatmap(
|
||||||
multiplier = [scale * box_size / img_raw.shape[0] for scale in scale_search]
|
model, img_raw, scale_search, param_stride=8, box_size=368
|
||||||
|
):
|
||||||
|
multiplier = [
|
||||||
|
scale * box_size / img_raw.shape[0] for scale in scale_search
|
||||||
|
]
|
||||||
|
|
||||||
heatmap_avg = torch.zeros((len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])).cuda()
|
heatmap_avg = torch.zeros(
|
||||||
paf_avg = torch.zeros((len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])).cuda()
|
(len(multiplier), 19, img_raw.shape[0], img_raw.shape[1])
|
||||||
|
).cuda()
|
||||||
|
paf_avg = torch.zeros(
|
||||||
|
(len(multiplier), 38, img_raw.shape[0], img_raw.shape[1])
|
||||||
|
).cuda()
|
||||||
|
|
||||||
for i, scale in enumerate(multiplier):
|
for i, scale in enumerate(multiplier):
|
||||||
img_test = cv2.resize(img_raw, (0, 0), fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
|
img_test = cv2.resize(
|
||||||
img_test_pad, pad = pad_right_down_corner(img_test, param_stride, param_stride)
|
img_raw,
|
||||||
img_test_pad = np.transpose(np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)) / 256 - 0.5
|
(0, 0),
|
||||||
|
fx=scale,
|
||||||
|
fy=scale,
|
||||||
|
interpolation=cv2.INTER_CUBIC,
|
||||||
|
)
|
||||||
|
img_test_pad, pad = pad_right_down_corner(
|
||||||
|
img_test, param_stride, param_stride
|
||||||
|
)
|
||||||
|
img_test_pad = (
|
||||||
|
np.transpose(
|
||||||
|
np.float32(img_test_pad[:, :, :, np.newaxis]), (3, 2, 0, 1)
|
||||||
|
)
|
||||||
|
/ 256
|
||||||
|
- 0.5
|
||||||
|
)
|
||||||
|
|
||||||
feed = Variable(torch.from_numpy(img_test_pad)).cuda()
|
feed = Variable(torch.from_numpy(img_test_pad)).cuda()
|
||||||
output1, output2 = model(feed)
|
output1, output2 = model(feed)
|
||||||
@ -560,17 +632,27 @@ def kernel_7(
|
|||||||
# print(output1.size())
|
# print(output1.size())
|
||||||
# print(output2.size())
|
# print(output2.size())
|
||||||
|
|
||||||
heatmap = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output2)
|
heatmap = nn.UpsamplingBilinear2d(
|
||||||
|
(img_raw.shape[0], img_raw.shape[1])
|
||||||
|
).cuda()(output2)
|
||||||
|
|
||||||
paf = nn.UpsamplingBilinear2d((img_raw.shape[0], img_raw.shape[1])).cuda()(output1)
|
paf = nn.UpsamplingBilinear2d(
|
||||||
|
(img_raw.shape[0], img_raw.shape[1])
|
||||||
|
).cuda()(output1)
|
||||||
|
|
||||||
heatmap_avg[i] = heatmap[0].data
|
heatmap_avg[i] = heatmap[0].data
|
||||||
paf_avg[i] = paf[0].data
|
paf_avg[i] = paf[0].data
|
||||||
|
|
||||||
heatmap_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1), 1, 2).cuda()
|
heatmap_avg = torch.transpose(
|
||||||
|
torch.transpose(torch.squeeze(torch.mean(heatmap_avg, 0)), 0, 1),
|
||||||
|
1,
|
||||||
|
2,
|
||||||
|
).cuda()
|
||||||
heatmap_avg = heatmap_avg.cpu().numpy()
|
heatmap_avg = heatmap_avg.cpu().numpy()
|
||||||
|
|
||||||
paf_avg = torch.transpose(torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2).cuda()
|
paf_avg = torch.transpose(
|
||||||
|
torch.transpose(torch.squeeze(torch.mean(paf_avg, 0)), 0, 1), 1, 2
|
||||||
|
).cuda()
|
||||||
paf_avg = paf_avg.cpu().numpy()
|
paf_avg = paf_avg.cpu().numpy()
|
||||||
|
|
||||||
return paf_avg, heatmap_avg
|
return paf_avg, heatmap_avg
|
||||||
@ -592,20 +674,34 @@ def kernel_7(
|
|||||||
map_down = np.zeros(map_gau.shape)
|
map_down = np.zeros(map_gau.shape)
|
||||||
map_down[:, :-1] = map_gau[:, 1:]
|
map_down[:, :-1] = map_gau[:, 1:]
|
||||||
|
|
||||||
peaks_binary = np.logical_and.reduce((map_gau >= map_left, map_gau >= map_right, map_gau >= map_up, map_gau >= map_down, map_gau > param_thre1))
|
peaks_binary = np.logical_and.reduce(
|
||||||
|
(
|
||||||
|
map_gau >= map_left,
|
||||||
|
map_gau >= map_right,
|
||||||
|
map_gau >= map_up,
|
||||||
|
map_gau >= map_down,
|
||||||
|
map_gau > param_thre1,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
peaks = zip(np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]) # note reverse
|
peaks = zip(
|
||||||
|
np.nonzero(peaks_binary)[1], np.nonzero(peaks_binary)[0]
|
||||||
|
) # note reverse
|
||||||
peaks = list(peaks)
|
peaks = list(peaks)
|
||||||
peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks]
|
peaks_with_score = [x + (map_ori[x[1], x[0]],) for x in peaks]
|
||||||
ids = range(peak_counter, peak_counter + len(peaks))
|
ids = range(peak_counter, peak_counter + len(peaks))
|
||||||
peaks_with_score_and_id = [peaks_with_score[i] + (ids[i],) for i in range(len(ids))]
|
peaks_with_score_and_id = [
|
||||||
|
peaks_with_score[i] + (ids[i],) for i in range(len(ids))
|
||||||
|
]
|
||||||
|
|
||||||
all_peaks.append(peaks_with_score_and_id)
|
all_peaks.append(peaks_with_score_and_id)
|
||||||
peak_counter += len(peaks)
|
peak_counter += len(peaks)
|
||||||
|
|
||||||
return all_peaks
|
return all_peaks
|
||||||
|
|
||||||
def extract_paf_info(img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5):
|
def extract_paf_info(
|
||||||
|
img_raw, paf_avg, all_peaks, param_thre2=0.05, param_thre3=0.5
|
||||||
|
):
|
||||||
connection_all = []
|
connection_all = []
|
||||||
special_k = []
|
special_k = []
|
||||||
mid_num = 10
|
mid_num = 10
|
||||||
@ -626,27 +722,69 @@ def kernel_7(
|
|||||||
raise ZeroDivisionError
|
raise ZeroDivisionError
|
||||||
vec = np.divide(vec, norm)
|
vec = np.divide(vec, norm)
|
||||||
|
|
||||||
startend = zip(np.linspace(candA[i][0], candB[j][0], num=mid_num), np.linspace(candA[i][1], candB[j][1], num=mid_num))
|
startend = zip(
|
||||||
|
np.linspace(candA[i][0], candB[j][0], num=mid_num),
|
||||||
|
np.linspace(candA[i][1], candB[j][1], num=mid_num),
|
||||||
|
)
|
||||||
startend = list(startend)
|
startend = list(startend)
|
||||||
|
|
||||||
vec_x = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 0] for I in range(len(startend))])
|
vec_x = np.array(
|
||||||
vec_y = np.array([score_mid[int(round(startend[I][1])), int(round(startend[I][0])), 1] for I in range(len(startend))])
|
[
|
||||||
|
score_mid[
|
||||||
|
int(round(startend[I][1])),
|
||||||
|
int(round(startend[I][0])),
|
||||||
|
0,
|
||||||
|
]
|
||||||
|
for I in range(len(startend))
|
||||||
|
]
|
||||||
|
)
|
||||||
|
vec_y = np.array(
|
||||||
|
[
|
||||||
|
score_mid[
|
||||||
|
int(round(startend[I][1])),
|
||||||
|
int(round(startend[I][0])),
|
||||||
|
1,
|
||||||
|
]
|
||||||
|
for I in range(len(startend))
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(vec_y, vec[1])
|
score_midpts = np.multiply(vec_x, vec[0]) + np.multiply(
|
||||||
score_with_dist_prior = sum(score_midpts) / len(score_midpts)
|
vec_y, vec[1]
|
||||||
score_with_dist_prior += min(0.5 * img_raw.shape[0] / norm - 1, 0)
|
)
|
||||||
|
score_with_dist_prior = sum(score_midpts) / len(
|
||||||
|
score_midpts
|
||||||
|
)
|
||||||
|
score_with_dist_prior += min(
|
||||||
|
0.5 * img_raw.shape[0] / norm - 1, 0
|
||||||
|
)
|
||||||
|
|
||||||
criterion1 = len(np.nonzero(score_midpts > param_thre2)[0]) > 0.8 * len(score_midpts)
|
criterion1 = len(
|
||||||
|
np.nonzero(score_midpts > param_thre2)[0]
|
||||||
|
) > 0.8 * len(score_midpts)
|
||||||
criterion2 = score_with_dist_prior > 0
|
criterion2 = score_with_dist_prior > 0
|
||||||
if criterion1 and criterion2:
|
if criterion1 and criterion2:
|
||||||
connection_candidate.append([i, j, score_with_dist_prior, score_with_dist_prior + candA[i][2] + candB[j][2]])
|
connection_candidate.append(
|
||||||
|
[
|
||||||
|
i,
|
||||||
|
j,
|
||||||
|
score_with_dist_prior,
|
||||||
|
score_with_dist_prior
|
||||||
|
+ candA[i][2]
|
||||||
|
+ candB[j][2],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
connection_candidate = sorted(connection_candidate, key=lambda x: x[2], reverse=True)
|
connection_candidate = sorted(
|
||||||
|
connection_candidate, key=lambda x: x[2], reverse=True
|
||||||
|
)
|
||||||
connection = np.zeros((0, 5))
|
connection = np.zeros((0, 5))
|
||||||
for c in range(len(connection_candidate)):
|
for c in range(len(connection_candidate)):
|
||||||
i, j, s = connection_candidate[c][0:3]
|
i, j, s = connection_candidate[c][0:3]
|
||||||
if i not in connection[:, 3] and j not in connection[:, 4]:
|
if i not in connection[:, 3] and j not in connection[:, 4]:
|
||||||
connection = np.vstack([connection, [candA[i][3], candB[j][3], s, i, j]])
|
connection = np.vstack(
|
||||||
|
[connection, [candA[i][3], candB[j][3], s, i, j]]
|
||||||
|
)
|
||||||
if len(connection) >= min(nA, nB):
|
if len(connection) >= min(nA, nB):
|
||||||
break
|
break
|
||||||
|
|
||||||
@ -661,7 +799,9 @@ def kernel_7(
|
|||||||
# last number in each row is the total parts number of that person
|
# last number in each row is the total parts number of that person
|
||||||
# the second last number in each row is the score of the overall configuration
|
# the second last number in each row is the score of the overall configuration
|
||||||
subset = -1 * np.ones((0, 20))
|
subset = -1 * np.ones((0, 20))
|
||||||
candidate = np.array([item for sublist in all_peaks for item in sublist])
|
candidate = np.array(
|
||||||
|
[item for sublist in all_peaks for item in sublist]
|
||||||
|
)
|
||||||
|
|
||||||
for k in range(len(map_ids)):
|
for k in range(len(map_ids)):
|
||||||
if k not in special_k:
|
if k not in special_k:
|
||||||
@ -673,7 +813,10 @@ def kernel_7(
|
|||||||
found = 0
|
found = 0
|
||||||
subset_idx = [-1, -1]
|
subset_idx = [-1, -1]
|
||||||
for j in range(len(subset)): # 1:size(subset,1):
|
for j in range(len(subset)): # 1:size(subset,1):
|
||||||
if subset[j][indexA] == partAs[i] or subset[j][indexB] == partBs[i]:
|
if (
|
||||||
|
subset[j][indexA] == partAs[i]
|
||||||
|
or subset[j][indexB] == partBs[i]
|
||||||
|
):
|
||||||
subset_idx[found] = j
|
subset_idx[found] = j
|
||||||
found += 1
|
found += 1
|
||||||
|
|
||||||
@ -682,11 +825,17 @@ def kernel_7(
|
|||||||
if subset[j][indexB] != partBs[i]:
|
if subset[j][indexB] != partBs[i]:
|
||||||
subset[j][indexB] = partBs[i]
|
subset[j][indexB] = partBs[i]
|
||||||
subset[j][-1] += 1
|
subset[j][-1] += 1
|
||||||
subset[j][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2]
|
subset[j][-2] += (
|
||||||
|
candidate[partBs[i].astype(int), 2]
|
||||||
|
+ connection_all[k][i][2]
|
||||||
|
)
|
||||||
elif found == 2: # if found 2 and disjoint, merge them
|
elif found == 2: # if found 2 and disjoint, merge them
|
||||||
j1, j2 = subset_idx
|
j1, j2 = subset_idx
|
||||||
print('found = 2')
|
print('found = 2')
|
||||||
membership = ((subset[j1] >= 0).astype(int) + (subset[j2] >= 0).astype(int))[:-2]
|
membership = (
|
||||||
|
(subset[j1] >= 0).astype(int)
|
||||||
|
+ (subset[j2] >= 0).astype(int)
|
||||||
|
)[:-2]
|
||||||
if len(np.nonzero(membership == 2)[0]) == 0: # merge
|
if len(np.nonzero(membership == 2)[0]) == 0: # merge
|
||||||
subset[j1][:-2] += subset[j2][:-2] + 1
|
subset[j1][:-2] += subset[j2][:-2] + 1
|
||||||
subset[j1][-2:] += subset[j2][-2:]
|
subset[j1][-2:] += subset[j2][-2:]
|
||||||
@ -695,7 +844,10 @@ def kernel_7(
|
|||||||
else: # as like found == 1
|
else: # as like found == 1
|
||||||
subset[j1][indexB] = partBs[i]
|
subset[j1][indexB] = partBs[i]
|
||||||
subset[j1][-1] += 1
|
subset[j1][-1] += 1
|
||||||
subset[j1][-2] += candidate[partBs[i].astype(int), 2] + connection_all[k][i][2]
|
subset[j1][-2] += (
|
||||||
|
candidate[partBs[i].astype(int), 2]
|
||||||
|
+ connection_all[k][i][2]
|
||||||
|
)
|
||||||
|
|
||||||
# if find no partA in the subset, create a new subset
|
# if find no partA in the subset, create a new subset
|
||||||
elif not found and k < 17:
|
elif not found and k < 17:
|
||||||
@ -703,7 +855,14 @@ def kernel_7(
|
|||||||
row[indexA] = partAs[i]
|
row[indexA] = partAs[i]
|
||||||
row[indexB] = partBs[i]
|
row[indexB] = partBs[i]
|
||||||
row[-1] = 2
|
row[-1] = 2
|
||||||
row[-2] = sum(candidate[connection_all[k][i, :2].astype(int), 2]) + connection_all[k][i][2]
|
row[-2] = (
|
||||||
|
sum(
|
||||||
|
candidate[
|
||||||
|
connection_all[k][i, :2].astype(int), 2
|
||||||
|
]
|
||||||
|
)
|
||||||
|
+ connection_all[k][i][2]
|
||||||
|
)
|
||||||
subset = np.vstack([subset, row])
|
subset = np.vstack([subset, row])
|
||||||
return subset, candidate
|
return subset, candidate
|
||||||
|
|
||||||
@ -718,7 +877,9 @@ def kernel_7(
|
|||||||
|
|
||||||
for i in range(18):
|
for i in range(18):
|
||||||
for j in range(len(all_peaks[i])):
|
for j in range(len(all_peaks[i])):
|
||||||
cv2.circle(img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1)
|
cv2.circle(
|
||||||
|
img_canvas, all_peaks[i][j][0:2], 4, colors[i], thickness=-1
|
||||||
|
)
|
||||||
|
|
||||||
return subset, img_canvas
|
return subset, img_canvas
|
||||||
|
|
||||||
@ -735,9 +896,18 @@ def kernel_7(
|
|||||||
mY = np.mean(Y)
|
mY = np.mean(Y)
|
||||||
length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5
|
length = ((X[0] - X[1]) ** 2 + (Y[0] - Y[1]) ** 2) ** 0.5
|
||||||
angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1]))
|
angle = math.degrees(math.atan2(X[0] - X[1], Y[0] - Y[1]))
|
||||||
polygon = cv2.ellipse2Poly((int(mY), int(mX)), (int(length / 2), stickwidth), int(angle), 0, 360, 1)
|
polygon = cv2.ellipse2Poly(
|
||||||
|
(int(mY), int(mX)),
|
||||||
|
(int(length / 2), stickwidth),
|
||||||
|
int(angle),
|
||||||
|
0,
|
||||||
|
360,
|
||||||
|
1,
|
||||||
|
)
|
||||||
cv2.fillConvexPoly(cur_canvas, polygon, colors[i])
|
cv2.fillConvexPoly(cur_canvas, polygon, colors[i])
|
||||||
img_canvas = cv2.addWeighted(img_canvas, 0.4, cur_canvas, 0.6, 0)
|
img_canvas = cv2.addWeighted(
|
||||||
|
img_canvas, 0.4, cur_canvas, 0.6, 0
|
||||||
|
)
|
||||||
|
|
||||||
return img_canvas
|
return img_canvas
|
||||||
|
|
||||||
@ -754,11 +924,17 @@ def kernel_7(
|
|||||||
img_padded = img
|
img_padded = img
|
||||||
pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1))
|
pad_up = np.tile(img_padded[0:1, :, :] * 0 + pad_value, (pad[0], 1, 1))
|
||||||
img_padded = np.concatenate((pad_up, img_padded), axis=0)
|
img_padded = np.concatenate((pad_up, img_padded), axis=0)
|
||||||
pad_left = np.tile(img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1))
|
pad_left = np.tile(
|
||||||
|
img_padded[:, 0:1, :] * 0 + pad_value, (1, pad[1], 1)
|
||||||
|
)
|
||||||
img_padded = np.concatenate((pad_left, img_padded), axis=1)
|
img_padded = np.concatenate((pad_left, img_padded), axis=1)
|
||||||
pad_down = np.tile(img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1))
|
pad_down = np.tile(
|
||||||
|
img_padded[-2:-1, :, :] * 0 + pad_value, (pad[2], 1, 1)
|
||||||
|
)
|
||||||
img_padded = np.concatenate((img_padded, pad_down), axis=0)
|
img_padded = np.concatenate((img_padded, pad_down), axis=0)
|
||||||
pad_right = np.tile(img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1))
|
pad_right = np.tile(
|
||||||
|
img_padded[:, -2:-1, :] * 0 + pad_value, (1, pad[3], 1)
|
||||||
|
)
|
||||||
img_padded = np.concatenate((img_padded, pad_right), axis=1)
|
img_padded = np.concatenate((img_padded, pad_right), axis=1)
|
||||||
|
|
||||||
return img_padded, pad
|
return img_padded, pad
|
||||||
@ -784,11 +960,15 @@ def kernel_7(
|
|||||||
|
|
||||||
# In[4]:
|
# In[4]:
|
||||||
|
|
||||||
state_dict = torch.load(model)['state_dict'] # getting the pre-trained model's parameters
|
state_dict = torch.load(model)[
|
||||||
|
'state_dict'
|
||||||
|
] # getting the pre-trained model's parameters
|
||||||
# A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor.
|
# A state_dict is simply a Python dictionary object that maps each layer to its parameter tensor.
|
||||||
|
|
||||||
model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell.
|
model_pose = get_pose_model() # building the model (see fn. defn. above). To see the architecture, see below cell.
|
||||||
model_pose.load_state_dict(state_dict) # Loading the parameters (weights, biases) into the model.
|
model_pose.load_state_dict(
|
||||||
|
state_dict
|
||||||
|
) # Loading the parameters (weights, biases) into the model.
|
||||||
|
|
||||||
model_pose.float() # I'm not sure why this is used. No difference if you remove it.
|
model_pose.float() # I'm not sure why this is used. No difference if you remove it.
|
||||||
|
|
||||||
@ -797,7 +977,9 @@ def kernel_7(
|
|||||||
|
|
||||||
if use_gpu:
|
if use_gpu:
|
||||||
model_pose.cuda()
|
model_pose.cuda()
|
||||||
model_pose = torch.nn.DataParallel(model_pose, device_ids=range(torch.cuda.device_count()))
|
model_pose = torch.nn.DataParallel(
|
||||||
|
model_pose, device_ids=range(torch.cuda.device_count())
|
||||||
|
)
|
||||||
cudnn.benchmark = True
|
cudnn.benchmark = True
|
||||||
|
|
||||||
def estimate_pose(
|
def estimate_pose(
|
||||||
@ -833,7 +1015,9 @@ def kernel_7(
|
|||||||
img_points = None
|
img_points = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
paf_info, heatmap_info = get_paf_and_heatmap(model_pose, img_ori, scale_param)
|
paf_info, heatmap_info = get_paf_and_heatmap(
|
||||||
|
model_pose, img_ori, scale_param
|
||||||
|
)
|
||||||
peaks = extract_heatmap_info(heatmap_info)
|
peaks = extract_heatmap_info(heatmap_info)
|
||||||
sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks)
|
sp_k, con_all = extract_paf_info(img_ori, paf_info, peaks)
|
||||||
|
|
||||||
@ -876,7 +1060,13 @@ def kernel_7(
|
|||||||
def kernel_8(
|
def kernel_8(
|
||||||
o_7,
|
o_7,
|
||||||
):
|
):
|
||||||
for i, o in enumerate(['../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg' % k for k in range(6)]):
|
for i, o in enumerate(
|
||||||
|
[
|
||||||
|
'../input/indonesian-traditional-dance/tgagrakanyar/tga_00%d0.jpg'
|
||||||
|
% k
|
||||||
|
for k in range(6)
|
||||||
|
]
|
||||||
|
):
|
||||||
arch_image = o
|
arch_image = o
|
||||||
img_ori = o_7['cv2'].imread(arch_image)
|
img_ori = o_7['cv2'].imread(arch_image)
|
||||||
o_7['estimate_pose'](img_ori)
|
o_7['estimate_pose'](img_ori)
|
||||||
@ -887,7 +1077,9 @@ def kernel_9_benchmark(
|
|||||||
):
|
):
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg')
|
t1 = o_7['cv2'].imread(
|
||||||
|
'../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg'
|
||||||
|
)
|
||||||
t5 = 10
|
t5 = 10
|
||||||
t2 = datetime.datetime.now()
|
t2 = datetime.datetime.now()
|
||||||
for k in range(t5):
|
for k in range(t5):
|
||||||
@ -905,7 +1097,9 @@ def kernel_10():
|
|||||||
import torch
|
import torch
|
||||||
|
|
||||||
# Model
|
# Model
|
||||||
model = torch.hub.load('ultralytics/yolov5', 'yolov5s') # or yolov5m, yolov5x, custom
|
model = torch.hub.load(
|
||||||
|
'ultralytics/yolov5', 'yolov5s'
|
||||||
|
) # or yolov5m, yolov5x, custom
|
||||||
|
|
||||||
# Images
|
# Images
|
||||||
img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple
|
img = 'https://ultralytics.com/images/zidane.jpg' # or file, PIL, OpenCV, numpy, multiple
|
||||||
@ -927,7 +1121,9 @@ def kernel_11_benchmark(
|
|||||||
):
|
):
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
t1 = o_7['cv2'].imread('../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg')
|
t1 = o_7['cv2'].imread(
|
||||||
|
'../input/indonesian-traditional-dance/tgagrakanyar/tga_0000.jpg'
|
||||||
|
)
|
||||||
t5 = 10
|
t5 = 10
|
||||||
t2 = datetime.datetime.now()
|
t2 = datetime.datetime.now()
|
||||||
for k in range(t5):
|
for k in range(t5):
|
||||||
@ -956,7 +1152,18 @@ def kernel_13(
|
|||||||
|
|
||||||
if not len(t4) > 0 or not o_6 is None:
|
if not len(t4) > 0 or not o_6 is None:
|
||||||
t1 = pandas.concat(
|
t1 = pandas.concat(
|
||||||
sum([[o2['t11'][0].assign(frame_id=k, video_path=o['video_path']) for k, o2 in enumerate(o['frames'])] for o in o_6['t8']], [])
|
sum(
|
||||||
|
[
|
||||||
|
[
|
||||||
|
o2['t11'][0].assign(
|
||||||
|
frame_id=k, video_path=o['video_path']
|
||||||
|
)
|
||||||
|
for k, o2 in enumerate(o['frames'])
|
||||||
|
]
|
||||||
|
for o in o_6['t8']
|
||||||
|
],
|
||||||
|
[],
|
||||||
|
)
|
||||||
).to_xarray()
|
).to_xarray()
|
||||||
t5 = t3[0]
|
t5 = t3[0]
|
||||||
t1.to_netcdf(t5)
|
t1.to_netcdf(t5)
|
||||||
@ -1028,7 +1235,9 @@ def kernel_14(
|
|||||||
def kernel_15(
|
def kernel_15(
|
||||||
o_14,
|
o_14,
|
||||||
):
|
):
|
||||||
t1 = pandas.DataFrame(numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)).T
|
t1 = pandas.DataFrame(
|
||||||
|
numpy.unique(o_14['o_13']['t1']['name'].data, return_counts=True)
|
||||||
|
).T
|
||||||
pprint.pprint(
|
pprint.pprint(
|
||||||
dict(
|
dict(
|
||||||
t1=t1,
|
t1=t1,
|
||||||
@ -1078,7 +1287,9 @@ def kernel_15(
|
|||||||
t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB)
|
t12 = cv2.cvtColor(t11, cv2.COLOR_BGR2RGB)
|
||||||
t13 = t12.copy()
|
t13 = t12.copy()
|
||||||
t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height])
|
t15 = numpy.array([t8.xcenter, t8.ycenter, t8.width, t8.height])
|
||||||
t16 = numpy.array([t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]])
|
t16 = numpy.array(
|
||||||
|
[t13.shape[1], t13.shape[0], t13.shape[1], t13.shape[0]]
|
||||||
|
)
|
||||||
t17 = t15 * t16
|
t17 = t15 * t16
|
||||||
t18 = t17[:2] - t17[2:] / 2
|
t18 = t17[:2] - t17[2:] / 2
|
||||||
t19 = t17[:2] + t17[2:] / 2
|
t19 = t17[:2] + t17[2:] / 2
|
||||||
@ -1340,7 +1551,10 @@ def kernel_20(
|
|||||||
t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1)
|
t1 = numpy.array(o_18['t2']['t7'][0]['keypoints']).reshape(17, -1)
|
||||||
t2 = o_18['t2']['t6'][0]
|
t2 = o_18['t2']['t6'][0]
|
||||||
t3 = o_18['t2']['t1'][0]['image_canvas'].copy()
|
t3 = o_18['t2']['t1'][0]['image_canvas'].copy()
|
||||||
assert o_18['t2']['t7'][0]['image_id'] == os.path.split(o_18['t2']['t1'][0]['image_name'])[1]
|
assert (
|
||||||
|
o_18['t2']['t7'][0]['image_id']
|
||||||
|
== os.path.split(o_18['t2']['t1'][0]['image_name'])[1]
|
||||||
|
)
|
||||||
|
|
||||||
for i, o2 in enumerate(o_21['p_color']):
|
for i, o2 in enumerate(o_21['p_color']):
|
||||||
if i >= 17:
|
if i >= 17:
|
||||||
@ -1449,7 +1663,16 @@ def kernel_22(o_18):
|
|||||||
|
|
||||||
o_31 = kernel_31(
|
o_31 = kernel_31(
|
||||||
image_id=[o['image_id'] for o in t1],
|
image_id=[o['image_id'] for o in t1],
|
||||||
image_size=numpy.array([[list(o['image_canvas'].shape) for o in o_18['t2']['t1'] if o['image_name'] == t1[i]['image_id']][0] for i in range(len(t2))]),
|
image_size=numpy.array(
|
||||||
|
[
|
||||||
|
[
|
||||||
|
list(o['image_canvas'].shape)
|
||||||
|
for o in o_18['t2']['t1']
|
||||||
|
if o['image_name'] == t1[i]['image_id']
|
||||||
|
][0]
|
||||||
|
for i in range(len(t2))
|
||||||
|
]
|
||||||
|
),
|
||||||
keypoints=numpy.stack(t2, axis=0),
|
keypoints=numpy.stack(t2, axis=0),
|
||||||
)
|
)
|
||||||
t12 = o_31['t12']
|
t12 = o_31['t12']
|
||||||
@ -1558,7 +1781,11 @@ def kernel_25(images, delay=None):
|
|||||||
|
|
||||||
|
|
||||||
def kernel_26(o_18, image_name):
|
def kernel_26(o_18, image_name):
|
||||||
t1 = [i for i, o in enumerate(o_18['t2']['t1']) if o['image_name'] == image_name]
|
t1 = [
|
||||||
|
i
|
||||||
|
for i, o in enumerate(o_18['t2']['t1'])
|
||||||
|
if o['image_name'] == image_name
|
||||||
|
]
|
||||||
assert len(t1) == 1
|
assert len(t1) == 1
|
||||||
return t1[0]
|
return t1[0]
|
||||||
|
|
||||||
@ -1580,7 +1807,11 @@ def kernel_23(o_18, o_22, ids=None):
|
|||||||
t9 = kernel_26(o_18=o_18, image_name=t3['image_name'])
|
t9 = kernel_26(o_18=o_18, image_name=t3['image_name'])
|
||||||
t4 = o_18['t2']['t1'][t9]['image_canvas']
|
t4 = o_18['t2']['t1'][t9]['image_canvas']
|
||||||
t10 = o_18['t2']['t6'][t9]
|
t10 = o_18['t2']['t6'][t9]
|
||||||
t4 = [o['image_canvas'] for o in o_18['t2']['t1'] if o['image_name'] == t3['image_name']]
|
t4 = [
|
||||||
|
o['image_canvas']
|
||||||
|
for o in o_18['t2']['t1']
|
||||||
|
if o['image_name'] == t3['image_name']
|
||||||
|
]
|
||||||
assert len(t4) == 1
|
assert len(t4) == 1
|
||||||
t5 = t4[0]
|
t5 = t4[0]
|
||||||
t6 = kernel_24(t5, t3['keypoints'])
|
t6 = kernel_24(t5, t3['keypoints'])
|
||||||
@ -1641,7 +1872,9 @@ def kernel_27():
|
|||||||
""" % (t4, t2)
|
""" % (t4, t2)
|
||||||
if False:
|
if False:
|
||||||
pprint.pprint([t4, t2, t6])
|
pprint.pprint([t4, t2, t6])
|
||||||
with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
|
with subprocess.Popen(
|
||||||
|
t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
) as p:
|
||||||
if False:
|
if False:
|
||||||
pprint.pprint(p.communicate())
|
pprint.pprint(p.communicate())
|
||||||
p.wait()
|
p.wait()
|
||||||
@ -1669,7 +1902,9 @@ def kernel_28(
|
|||||||
max_seconds = 999999
|
max_seconds = 999999
|
||||||
|
|
||||||
if video_path is None:
|
if video_path is None:
|
||||||
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
video_path = (
|
||||||
|
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
||||||
|
)
|
||||||
t5 = video_path
|
t5 = video_path
|
||||||
t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id
|
t3 = '/kaggle/working/kernel_28-output%s.dir' % video_id
|
||||||
t13 = '/root/kernel_28-output.dir/tmp-slice'
|
t13 = '/root/kernel_28-output.dir/tmp-slice'
|
||||||
@ -1679,7 +1914,9 @@ def kernel_28(
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
cap = cv2.VideoCapture(t5)
|
cap = cv2.VideoCapture(t5)
|
||||||
fps = cap.get(cv2.CAP_PROP_FPS) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
|
fps = cap.get(
|
||||||
|
cv2.CAP_PROP_FPS
|
||||||
|
) # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
|
||||||
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
|
||||||
real_duration = frame_count / fps
|
real_duration = frame_count / fps
|
||||||
duration = min(real_duration, max_seconds)
|
duration = min(real_duration, max_seconds)
|
||||||
@ -1739,7 +1976,9 @@ def kernel_28(
|
|||||||
t6,
|
t6,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
with subprocess.Popen(t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as p:
|
with subprocess.Popen(
|
||||||
|
t6, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
) as p:
|
||||||
if False:
|
if False:
|
||||||
pprint.pprint(p.communicate())
|
pprint.pprint(p.communicate())
|
||||||
p.wait()
|
p.wait()
|
||||||
@ -1757,7 +1996,9 @@ def kernel_29(
|
|||||||
video_id = ''
|
video_id = ''
|
||||||
|
|
||||||
if video_path is None:
|
if video_path is None:
|
||||||
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
video_path = (
|
||||||
|
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
||||||
|
)
|
||||||
|
|
||||||
assert os.path.exists(video_path)
|
assert os.path.exists(video_path)
|
||||||
|
|
||||||
@ -1771,7 +2012,13 @@ def kernel_29(
|
|||||||
t7 = [o for o in t6 if os.path.exists(o)]
|
t7 = [o for o in t6 if os.path.exists(o)]
|
||||||
|
|
||||||
if len(t7) == 0:
|
if len(t7) == 0:
|
||||||
t1 = [dict(data=json.load(io.open(o, 'r')), input_path=o) for o in glob.glob('/kaggle/working/kernel_28-output%s.dir/slice-*/*.json' % video_id)]
|
t1 = [
|
||||||
|
dict(data=json.load(io.open(o, 'r')), input_path=o)
|
||||||
|
for o in glob.glob(
|
||||||
|
'/kaggle/working/kernel_28-output%s.dir/slice-*/*.json'
|
||||||
|
% video_id
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
assert len(t1) > 0
|
assert len(t1) > 0
|
||||||
|
|
||||||
@ -1835,7 +2082,9 @@ def kernel_30(
|
|||||||
low_mean_conf = 0.6
|
low_mean_conf = 0.6
|
||||||
|
|
||||||
if video_path is None:
|
if video_path is None:
|
||||||
video_path = '/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
video_path = (
|
||||||
|
'/kaggle/working/ATL AT TOR - April 19, 2015-T0MUK91ZWys.mp4'
|
||||||
|
)
|
||||||
|
|
||||||
if max_frames is None:
|
if max_frames is None:
|
||||||
max_frames = 9999
|
max_frames = 9999
|
||||||
@ -2045,7 +2294,10 @@ def kernel_31(image_id, image_size, keypoints):
|
|||||||
|
|
||||||
ab = [a[0] - b[0], a[1] - b[1]]
|
ab = [a[0] - b[0], a[1] - b[1]]
|
||||||
ab1 = [c[0] - d[0], c[1] - d[1]]
|
ab1 = [c[0] - d[0], c[1] - d[1]]
|
||||||
cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2) + 1e-8)
|
cos = abs(ab[0] * ab1[0] + ab[1] * ab1[1]) / (
|
||||||
|
sqrt(ab[0] ** 2 + ab[1] ** 2) * sqrt(ab1[0] ** 2 + ab1[1] ** 2)
|
||||||
|
+ 1e-8
|
||||||
|
)
|
||||||
ang = acos(cos)
|
ang = acos(cos)
|
||||||
return ang * 180 / np.pi
|
return ang * 180 / np.pi
|
||||||
|
|
||||||
@ -2204,7 +2456,11 @@ def kernel_33():
|
|||||||
o_22 = kernel_22(o_18=o_18)
|
o_22 = kernel_22(o_18=o_18)
|
||||||
import pandas
|
import pandas
|
||||||
|
|
||||||
o_23 = kernel_23(o_18=o_18, o_22=o_22, ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values)
|
o_23 = kernel_23(
|
||||||
|
o_18=o_18,
|
||||||
|
o_22=o_22,
|
||||||
|
ids=pandas.DataFrame(o_22['t4']).query('portion > 0.1').index.values,
|
||||||
|
)
|
||||||
o_27 = kernel_27()
|
o_27 = kernel_27()
|
||||||
o_28 = kernel_28()
|
o_28 = kernel_28()
|
||||||
o_29 = kernel_29()
|
o_29 = kernel_29()
|
||||||
@ -2273,7 +2529,9 @@ def kernel_36():
|
|||||||
# import os
|
# import os
|
||||||
from os.path import exists, join, basename, splitext
|
from os.path import exists, join, basename, splitext
|
||||||
|
|
||||||
git_repo_url = 'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git'
|
git_repo_url = (
|
||||||
|
'https://github.com/CMU-Perceptual-Computing-Lab/openpose.git'
|
||||||
|
)
|
||||||
project_name = splitext(basename(git_repo_url))[0]
|
project_name = splitext(basename(git_repo_url))[0]
|
||||||
|
|
||||||
if 1 or not exists(project_name):
|
if 1 or not exists(project_name):
|
||||||
@ -2282,8 +2540,18 @@ def kernel_36():
|
|||||||
print('install new CMake becaue of CUDA10')
|
print('install new CMake becaue of CUDA10')
|
||||||
cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz'
|
cmake_version = 'cmake-3.20.2-linux-x86_64.tar.gz'
|
||||||
if not exists(cmake_version):
|
if not exists(cmake_version):
|
||||||
assert os.system(r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """) == 0
|
assert (
|
||||||
assert os.system(r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """) == 0
|
os.system(
|
||||||
|
r"""!wget -q 'https://cmake.org/files/v3.20/{cmake_version}' """
|
||||||
|
)
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
os.system(
|
||||||
|
r"""!tar xfz {cmake_version} --strip-components=1 -C /usr/local """
|
||||||
|
)
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
|
||||||
print('clone openpose')
|
print('clone openpose')
|
||||||
assert os.system(r"""!git clone -q --depth 1 $git_repo_url """) == 0
|
assert os.system(r"""!git clone -q --depth 1 $git_repo_url """) == 0
|
||||||
@ -2295,7 +2563,12 @@ def kernel_36():
|
|||||||
== 0
|
== 0
|
||||||
)
|
)
|
||||||
print('build openpose')
|
print('build openpose')
|
||||||
assert os.system(r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """) == 0
|
assert (
|
||||||
|
os.system(
|
||||||
|
r"""!cd openpose && rm -rf build || true && mkdir build && cd build && cmake .. && make -j`nproc` """
|
||||||
|
)
|
||||||
|
== 0
|
||||||
|
)
|
||||||
|
|
||||||
"""## From a Google Drive's folder"""
|
"""## From a Google Drive's folder"""
|
||||||
|
|
||||||
@ -2310,7 +2583,9 @@ def kernel_36():
|
|||||||
print(filename)
|
print(filename)
|
||||||
colab_video_path = folder_path + filename
|
colab_video_path = folder_path + filename
|
||||||
print(colab_video_path)
|
print(colab_video_path)
|
||||||
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
colab_openpose_video_path = (
|
||||||
|
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
||||||
|
)
|
||||||
print(colab_openpose_video_path)
|
print(colab_openpose_video_path)
|
||||||
if not exists(colab_openpose_video_path):
|
if not exists(colab_openpose_video_path):
|
||||||
assert (
|
assert (
|
||||||
@ -2325,9 +2600,16 @@ def kernel_36():
|
|||||||
assert os.system(r"""!pip install youtube-dl """) == 0
|
assert os.system(r"""!pip install youtube-dl """) == 0
|
||||||
|
|
||||||
youtube_id = '2021-05-07_22-00-55_UTC'
|
youtube_id = '2021-05-07_22-00-55_UTC'
|
||||||
assert os.system(r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """) == 0
|
assert (
|
||||||
|
os.system(
|
||||||
|
r"""!youtube-dl -f mp4 -o '/content/drive/My Drive/openpose/%(id)s.mp4' {youtube_id} """
|
||||||
|
)
|
||||||
|
== 0
|
||||||
|
)
|
||||||
colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4'
|
colab_video_path = '/content/drive/My Drive/openpose/' + youtube_id + '.mp4'
|
||||||
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
colab_openpose_video_path = (
|
||||||
|
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
||||||
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
os.system(
|
os.system(
|
||||||
@ -2352,7 +2634,9 @@ def kernel_36():
|
|||||||
# from os.path import exists, join, basename, splitext
|
# from os.path import exists, join, basename, splitext
|
||||||
# colab_video_path = '/content/drive/My Drive/bachata.mp4'
|
# colab_video_path = '/content/drive/My Drive/bachata.mp4'
|
||||||
colab_video_path = '/content/output.mp4'
|
colab_video_path = '/content/output.mp4'
|
||||||
colab_openpose_video_path = colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
colab_openpose_video_path = (
|
||||||
|
colab_video_path.replace('.mp4', '') + '-openpose.mp4'
|
||||||
|
)
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
os.system(
|
os.system(
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user