[+] archlinux: restructure into apps/, cli/, resolver/, tests/ with 2-level packages dict
1. separate pacman-specific types (apps/pacman/types.py) from general types (models.py); 2. split CLI into cli/main.py dispatcher with compile, archive, list_installed, download subcommands; 3. move cache_db into apps/cache/db.py, pacman client/db into apps/pacman/; 4. add pacman_manager_t (apps/pacman/manager.py) implementing manager_t ABC; 5. move resolver into resolver/ with general.py (pure python) and solv.py (libsolv); 6. add solv_types.py for solv-internal types, separate from general models; 7. change package_index_t.packages to 2-level dict (name -> version -> pkg); 8. add .add() and .iter_all() helpers to package_index_t and solv_index_t; 9. fix solver: SELECTION_NAME only, SOLVER_SOLVABLE for exact name+version match; 10. default expand_groups=False to prevent group bleed-through; 11. add dedupe=True mode to cache_db.load_indices via SQL GROUP BY; 12. fix provides hijack: vim==X no longer resolves to gvim; 13. add comprehensive unit tests: test_cli, test_compile, test_db, test_models, test_pacman, test_resolver, test_solv_backend, test_integration; 14. add test fixtures in tests/res/ with core/extra .db snapshots; 15. add libsolv experiments in tests/experiments/libsolv/;
This commit is contained in:
parent
9ab5a03899
commit
bf3fd46953
0
python/online/fxreader/pr34/commands_typed/archlinux/apps/cache/__init__.py
vendored
Normal file
0
python/online/fxreader/pr34/commands_typed/archlinux/apps/cache/__init__.py
vendored
Normal file
778
python/online/fxreader/pr34/commands_typed/archlinux/apps/cache/db.py
vendored
Normal file
778
python/online/fxreader/pr34/commands_typed/archlinux/apps/cache/db.py
vendored
Normal file
@ -0,0 +1,778 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import pathlib
|
||||
import sqlite3
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Generator,
|
||||
Optional,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import pydantic
|
||||
|
||||
from ..pacman.types import (
|
||||
package_desc_t,
|
||||
repo_index_t,
|
||||
)
|
||||
|
||||
from ...models import (
|
||||
package_t,
|
||||
package_index_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_T = TypeVar('_T', bound=pydantic.BaseModel)
|
||||
|
||||
|
||||
class snapshot_row_t(pydantic.BaseModel):
|
||||
id: int
|
||||
date: str
|
||||
repo: str
|
||||
arch: str
|
||||
db_sha256: str
|
||||
db_rel_path: str
|
||||
synced_at: str
|
||||
|
||||
|
||||
class package_row_t(pydantic.BaseModel):
|
||||
id: int
|
||||
snapshot_id: int
|
||||
name: str
|
||||
version: str
|
||||
base: str = ''
|
||||
desc: str = ''
|
||||
filename: str = ''
|
||||
csize: int = 0
|
||||
isize: int = 0
|
||||
md5sum: str = ''
|
||||
sha256sum: str = ''
|
||||
url: str = ''
|
||||
arch: str = ''
|
||||
builddate: int = 0
|
||||
packager: str = ''
|
||||
|
||||
|
||||
class package_version_row_t(pydantic.BaseModel):
|
||||
date: str
|
||||
repo: str
|
||||
name: str
|
||||
version: str
|
||||
|
||||
|
||||
class date_count_row_t(pydantic.BaseModel):
|
||||
date: str
|
||||
count: int
|
||||
|
||||
|
||||
class package_hash_row_t(pydantic.BaseModel):
|
||||
sha256sum: str
|
||||
|
||||
|
||||
class local_package_row_t(pydantic.BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
version: str
|
||||
filename: str
|
||||
sha256sum: str
|
||||
local_path: str
|
||||
downloaded_at: str
|
||||
|
||||
|
||||
class signature_row_t(pydantic.BaseModel):
|
||||
id: int
|
||||
local_package_id: int
|
||||
sig_path: str
|
||||
keyring_package_version: Optional[str] = None
|
||||
gpg_key_id: Optional[str] = None
|
||||
verified_at: Optional[str] = None
|
||||
|
||||
|
||||
class trusted_entry_t(pydantic.BaseModel, frozen=True):
|
||||
name: str
|
||||
version: str
|
||||
|
||||
|
||||
def _stream_rows(
|
||||
cur: sqlite3.Cursor,
|
||||
model: type[_T],
|
||||
) -> Generator[_T, None, None]:
|
||||
columns = [desc[0] for desc in cur.description]
|
||||
for raw in cur:
|
||||
yield model.model_validate(dict(zip(columns, raw)))
|
||||
|
||||
|
||||
def _fetch_one(
|
||||
cur: sqlite3.Cursor,
|
||||
model: type[_T],
|
||||
) -> Optional[_T]:
|
||||
columns = [desc[0] for desc in cur.description]
|
||||
raw = cur.fetchone()
|
||||
if raw is None:
|
||||
return None
|
||||
return model.model_validate(dict(zip(columns, raw)))
|
||||
|
||||
|
||||
class cache_db_t:
|
||||
class constants_t:
|
||||
schema_version: ClassVar[int] = 1
|
||||
|
||||
list_relation_types: ClassVar[dict[str, str]] = {
|
||||
'license': 'license',
|
||||
'depends': 'depends',
|
||||
'optdepends': 'optdepends',
|
||||
'makedepends': 'makedepends',
|
||||
'checkdepends': 'checkdepends',
|
||||
'provides': 'provides',
|
||||
'conflicts': 'conflicts',
|
||||
'replaces': 'replaces',
|
||||
'groups': 'groups',
|
||||
}
|
||||
|
||||
def __init__(self, db_path: pathlib.Path) -> None:
|
||||
self._db_path = db_path
|
||||
self._conn = sqlite3.connect(str(db_path))
|
||||
self._conn.execute('PRAGMA journal_mode=WAL')
|
||||
self._conn.execute('PRAGMA foreign_keys=ON')
|
||||
self._ensure_schema()
|
||||
|
||||
def close(self) -> None:
|
||||
self._conn.close()
|
||||
|
||||
def _ensure_schema(self) -> None:
|
||||
cur = self._conn.cursor()
|
||||
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='schema_meta'")
|
||||
if cur.fetchone() is None:
|
||||
self._create_schema(cur)
|
||||
self._conn.commit()
|
||||
return
|
||||
|
||||
cur.execute('SELECT version FROM schema_meta LIMIT 1')
|
||||
row = cur.fetchone()
|
||||
if row is None or row[0] < cache_db_t.constants_t.schema_version:
|
||||
self._create_schema(cur)
|
||||
self._conn.commit()
|
||||
|
||||
def _create_schema(self, cur: sqlite3.Cursor) -> None:
|
||||
cur.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS schema_meta (
|
||||
version INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS snapshots (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
date TEXT NOT NULL,
|
||||
repo TEXT NOT NULL,
|
||||
arch TEXT NOT NULL DEFAULT 'x86_64',
|
||||
db_sha256 TEXT NOT NULL,
|
||||
db_rel_path TEXT NOT NULL DEFAULT '',
|
||||
synced_at TEXT NOT NULL,
|
||||
UNIQUE(date, repo, arch)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS packages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
snapshot_id INTEGER NOT NULL REFERENCES snapshots(id) ON DELETE CASCADE,
|
||||
name TEXT NOT NULL,
|
||||
version TEXT NOT NULL,
|
||||
base TEXT NOT NULL DEFAULT '',
|
||||
desc TEXT NOT NULL DEFAULT '',
|
||||
filename TEXT NOT NULL DEFAULT '',
|
||||
csize INTEGER NOT NULL DEFAULT 0,
|
||||
isize INTEGER NOT NULL DEFAULT 0,
|
||||
md5sum TEXT NOT NULL DEFAULT '',
|
||||
sha256sum TEXT NOT NULL DEFAULT '',
|
||||
url TEXT NOT NULL DEFAULT '',
|
||||
arch TEXT NOT NULL DEFAULT '',
|
||||
builddate INTEGER NOT NULL DEFAULT 0,
|
||||
packager TEXT NOT NULL DEFAULT '',
|
||||
UNIQUE(snapshot_id, name)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS package_relations (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
package_id INTEGER NOT NULL REFERENCES packages(id) ON DELETE CASCADE,
|
||||
relation_type TEXT NOT NULL,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS local_packages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
name TEXT NOT NULL,
|
||||
version TEXT NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
sha256sum TEXT NOT NULL DEFAULT '',
|
||||
local_path TEXT NOT NULL,
|
||||
downloaded_at TEXT NOT NULL,
|
||||
UNIQUE(name, version, filename)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS local_signatures (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
local_package_id INTEGER NOT NULL REFERENCES local_packages(id) ON DELETE CASCADE,
|
||||
sig_path TEXT NOT NULL,
|
||||
keyring_package_version TEXT DEFAULT NULL,
|
||||
gpg_key_id TEXT DEFAULT NULL,
|
||||
verified_at TEXT DEFAULT NULL,
|
||||
UNIQUE(local_package_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_snapshot ON packages(snapshot_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_name ON packages(name);
|
||||
CREATE INDEX IF NOT EXISTS idx_packages_name_version ON packages(name, version);
|
||||
CREATE INDEX IF NOT EXISTS idx_snapshots_date ON snapshots(date);
|
||||
CREATE INDEX IF NOT EXISTS idx_package_relations_pkg
|
||||
ON package_relations(package_id, relation_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_local_packages_name_version
|
||||
ON local_packages(name, version);
|
||||
""")
|
||||
|
||||
cur.execute('DELETE FROM schema_meta')
|
||||
cur.execute(
|
||||
'INSERT INTO schema_meta (version) VALUES (?)',
|
||||
(cache_db_t.constants_t.schema_version,),
|
||||
)
|
||||
|
||||
# ── helpers ──
|
||||
|
||||
@staticmethod
|
||||
def file_sha256(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with io.open(path, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(65536)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
# ── snapshot CRUD ──
|
||||
|
||||
def upsert_snapshot(
|
||||
self,
|
||||
date: str,
|
||||
repo: str,
|
||||
arch: str,
|
||||
db_sha256: str,
|
||||
db_rel_path: str = '',
|
||||
) -> int:
|
||||
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
||||
cur = self._conn.cursor()
|
||||
|
||||
cur.execute(
|
||||
'SELECT id, db_sha256 FROM snapshots WHERE date=? AND repo=? AND arch=?',
|
||||
(date, repo, arch),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
|
||||
if row is not None:
|
||||
snapshot_id: int = row[0]
|
||||
if row[1] == db_sha256:
|
||||
return snapshot_id
|
||||
|
||||
cur.execute(
|
||||
'DELETE FROM packages WHERE snapshot_id=?',
|
||||
(snapshot_id,),
|
||||
)
|
||||
cur.execute(
|
||||
'UPDATE snapshots SET db_sha256=?, db_rel_path=?, synced_at=? WHERE id=?',
|
||||
(db_sha256, db_rel_path, now, snapshot_id),
|
||||
)
|
||||
self._conn.commit()
|
||||
return snapshot_id
|
||||
|
||||
cur.execute(
|
||||
'INSERT INTO snapshots (date, repo, arch, db_sha256, db_rel_path, synced_at) VALUES (?, ?, ?, ?, ?, ?)',
|
||||
(date, repo, arch, db_sha256, db_rel_path, now),
|
||||
)
|
||||
self._conn.commit()
|
||||
assert cur.lastrowid is not None
|
||||
return cur.lastrowid
|
||||
|
||||
def get_snapshot(
|
||||
self,
|
||||
date: str,
|
||||
repo: str,
|
||||
arch: str,
|
||||
) -> Optional[snapshot_row_t]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'SELECT * FROM snapshots WHERE date=? AND repo=? AND arch=?',
|
||||
(date, repo, arch),
|
||||
)
|
||||
return _fetch_one(cur, snapshot_row_t)
|
||||
|
||||
def get_snapshot_by_id(
|
||||
self,
|
||||
snapshot_id: int,
|
||||
) -> Optional[snapshot_row_t]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'SELECT * FROM snapshots WHERE id=?',
|
||||
(snapshot_id,),
|
||||
)
|
||||
return _fetch_one(cur, snapshot_row_t)
|
||||
|
||||
def list_snapshots(self) -> Generator[snapshot_row_t, None, None]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute('SELECT * FROM snapshots ORDER BY date DESC, repo')
|
||||
yield from _stream_rows(cur, snapshot_row_t)
|
||||
|
||||
def list_dates(self) -> list[str]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute('SELECT DISTINCT date FROM snapshots ORDER BY date DESC')
|
||||
return [row[0] for row in cur.fetchall()]
|
||||
|
||||
def snapshot_package_count(self, snapshot_id: int) -> int:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'SELECT COUNT(*) FROM packages WHERE snapshot_id=?',
|
||||
(snapshot_id,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
return row[0] if row is not None else 0
|
||||
|
||||
# ── package CRUD ──
|
||||
|
||||
def store_index(
|
||||
self,
|
||||
snapshot_id: int,
|
||||
index: repo_index_t,
|
||||
) -> None:
|
||||
cur = self._conn.cursor()
|
||||
|
||||
pkg_rows: list[tuple[int, str, str, str, str, str, int, int, str, str, str, str, int, str]] = []
|
||||
for pkg in index.packages.values():
|
||||
pkg_rows.append(
|
||||
(
|
||||
snapshot_id,
|
||||
pkg.name,
|
||||
pkg.version,
|
||||
pkg.base,
|
||||
pkg.desc,
|
||||
pkg.filename,
|
||||
pkg.csize,
|
||||
pkg.isize,
|
||||
pkg.md5sum,
|
||||
pkg.sha256sum,
|
||||
pkg.url,
|
||||
pkg.arch,
|
||||
pkg.builddate,
|
||||
pkg.packager,
|
||||
)
|
||||
)
|
||||
|
||||
cur.executemany(
|
||||
'INSERT OR REPLACE INTO packages '
|
||||
'(snapshot_id, name, version, base, desc, filename, csize, isize, '
|
||||
'md5sum, sha256sum, url, arch, builddate, packager) '
|
||||
'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
||||
pkg_rows,
|
||||
)
|
||||
|
||||
cur.execute(
|
||||
'SELECT id, name FROM packages WHERE snapshot_id=?',
|
||||
(snapshot_id,),
|
||||
)
|
||||
pkg_id_map: dict[str, int] = {}
|
||||
for row_raw in cur.fetchall():
|
||||
pkg_id_map[row_raw[1]] = row_raw[0]
|
||||
|
||||
rel_rows: list[tuple[int, str, str]] = []
|
||||
for pkg in index.packages.values():
|
||||
pkg_id = pkg_id_map.get(pkg.name)
|
||||
if pkg_id is None:
|
||||
continue
|
||||
|
||||
for rel_type, attr_name in cache_db_t.constants_t.list_relation_types.items():
|
||||
values: list[str] = getattr(pkg, attr_name)
|
||||
for v in values:
|
||||
rel_rows.append((pkg_id, rel_type, v))
|
||||
|
||||
if len(rel_rows) > 0:
|
||||
cur.executemany(
|
||||
'INSERT INTO package_relations (package_id, relation_type, value) VALUES (?, ?, ?)',
|
||||
rel_rows,
|
||||
)
|
||||
|
||||
self._conn.commit()
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
msg='stored index',
|
||||
snapshot_id=snapshot_id,
|
||||
packages=len(pkg_rows),
|
||||
relations=len(rel_rows),
|
||||
)
|
||||
)
|
||||
|
||||
def package_count_per_date(self) -> Generator[date_count_row_t, None, None]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute('SELECT s.date AS date, COUNT(p.id) AS count FROM snapshots s JOIN packages p ON p.snapshot_id = s.id GROUP BY s.date ORDER BY s.date DESC')
|
||||
yield from _stream_rows(cur, date_count_row_t)
|
||||
|
||||
def get_package_versions(
|
||||
self,
|
||||
names: list[str],
|
||||
) -> Generator[package_version_row_t, None, None]:
|
||||
if len(names) == 0:
|
||||
yield from ()
|
||||
return
|
||||
|
||||
cur = self._conn.cursor()
|
||||
placeholders = ','.join('?' for _ in names)
|
||||
cur.execute(
|
||||
'SELECT s.date AS date, s.repo AS repo, p.name AS name, p.version AS version '
|
||||
'FROM packages p '
|
||||
'JOIN snapshots s ON s.id = p.snapshot_id '
|
||||
'WHERE p.name IN (%s) '
|
||||
'ORDER BY p.name, s.date DESC' % placeholders,
|
||||
names,
|
||||
)
|
||||
yield from _stream_rows(cur, package_version_row_t)
|
||||
|
||||
def find_package_hash(
|
||||
self,
|
||||
name: str,
|
||||
version: str,
|
||||
) -> Optional[package_hash_row_t]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
"SELECT sha256sum FROM packages WHERE name=? AND version=? AND sha256sum != '' ORDER BY snapshot_id DESC LIMIT 1",
|
||||
(name, version),
|
||||
)
|
||||
return _fetch_one(cur, package_hash_row_t)
|
||||
|
||||
# ── repo_index_t loading ──
|
||||
|
||||
def load_repo_index(
|
||||
self,
|
||||
snapshot_id: int,
|
||||
repo_name: str,
|
||||
) -> repo_index_t:
|
||||
cur = self._conn.cursor()
|
||||
|
||||
cur.execute(
|
||||
'SELECT * FROM packages WHERE snapshot_id=?',
|
||||
(snapshot_id,),
|
||||
)
|
||||
|
||||
index = repo_index_t(name=repo_name)
|
||||
|
||||
pkg_ids: list[int] = []
|
||||
pkg_by_id: dict[int, package_desc_t] = {}
|
||||
|
||||
columns = [desc[0] for desc in cur.description]
|
||||
for raw in cur.fetchall():
|
||||
row_dict = dict(zip(columns, raw))
|
||||
pkg = package_desc_t(
|
||||
name=row_dict['name'],
|
||||
version=row_dict['version'],
|
||||
base=row_dict['base'],
|
||||
desc=row_dict['desc'],
|
||||
filename=row_dict['filename'],
|
||||
csize=row_dict['csize'],
|
||||
isize=row_dict['isize'],
|
||||
md5sum=row_dict['md5sum'],
|
||||
sha256sum=row_dict['sha256sum'],
|
||||
url=row_dict['url'],
|
||||
arch=row_dict['arch'],
|
||||
builddate=row_dict['builddate'],
|
||||
packager=row_dict['packager'],
|
||||
)
|
||||
index.packages[pkg.name] = pkg
|
||||
pkg_ids.append(row_dict['id'])
|
||||
pkg_by_id[row_dict['id']] = pkg
|
||||
|
||||
if len(pkg_ids) > 0:
|
||||
self._load_relations(cur, pkg_ids, pkg_by_id)
|
||||
|
||||
index.build_provides_index()
|
||||
return index
|
||||
|
||||
def load_all_indices(self) -> list[repo_index_t]:
|
||||
"""Load all snapshots as repo_index_t objects via bulk queries.
|
||||
|
||||
Returns one index per (snapshot_id, repo) so the solver sees all
|
||||
package versions across all synced dates. Uses two bulk queries
|
||||
instead of per-snapshot loading for performance.
|
||||
"""
|
||||
cur = self._conn.cursor()
|
||||
|
||||
cur.execute('SELECT * FROM snapshots ORDER BY date ASC')
|
||||
snap_columns = [desc[0] for desc in cur.description]
|
||||
snapshots = [dict(zip(snap_columns, raw)) for raw in cur.fetchall()]
|
||||
|
||||
cur.execute(
|
||||
'SELECT id, snapshot_id, name, version, base, desc, filename, csize, isize, md5sum, sha256sum, url, arch, builddate, packager FROM packages'
|
||||
)
|
||||
pkg_columns = [desc[0] for desc in cur.description]
|
||||
|
||||
pkgs_by_snapshot: dict[int, dict[str, package_desc_t]] = {}
|
||||
all_pkg_ids: list[int] = []
|
||||
pkg_by_id: dict[int, package_desc_t] = {}
|
||||
|
||||
for raw in cur.fetchall():
|
||||
rd = dict(zip(pkg_columns, raw))
|
||||
pkg = package_desc_t(
|
||||
name=rd['name'],
|
||||
version=rd['version'],
|
||||
base=rd['base'],
|
||||
desc=rd['desc'],
|
||||
filename=rd['filename'],
|
||||
csize=rd['csize'],
|
||||
isize=rd['isize'],
|
||||
md5sum=rd['md5sum'],
|
||||
sha256sum=rd['sha256sum'],
|
||||
url=rd['url'],
|
||||
arch=rd['arch'],
|
||||
builddate=rd['builddate'],
|
||||
packager=rd['packager'],
|
||||
)
|
||||
snap_id: int = rd['snapshot_id']
|
||||
if snap_id not in pkgs_by_snapshot:
|
||||
pkgs_by_snapshot[snap_id] = {}
|
||||
pkgs_by_snapshot[snap_id][pkg.name] = pkg
|
||||
all_pkg_ids.append(rd['id'])
|
||||
pkg_by_id[rd['id']] = pkg
|
||||
|
||||
if len(all_pkg_ids) > 0:
|
||||
self._load_relations(cur, all_pkg_ids, pkg_by_id)
|
||||
|
||||
indices: list[repo_index_t] = []
|
||||
for snap in snapshots:
|
||||
pkgs = pkgs_by_snapshot.get(snap['id'])
|
||||
if pkgs is None or len(pkgs) == 0:
|
||||
continue
|
||||
idx = repo_index_t(name=snap['repo'], packages=pkgs)
|
||||
idx.build_provides_index()
|
||||
indices.append(idx)
|
||||
|
||||
return indices
|
||||
|
||||
def load_indices(self, dedupe: bool = False) -> list[package_index_t]:
|
||||
"""Load all snapshots as general package_index_t with parsed constraints.
|
||||
|
||||
When dedupe=True, only unique (name, version) pairs are kept across
|
||||
all snapshots, since content is identified by name+version+arch.
|
||||
All packages land in a single package_index_t whose 2-level
|
||||
packages map (name -> version -> package_t) holds every unique
|
||||
version naturally.
|
||||
"""
|
||||
from ..pacman.types import pacman_constraint_t
|
||||
|
||||
if not dedupe:
|
||||
pacman_indices = self.load_all_indices()
|
||||
|
||||
indices: list[package_index_t] = []
|
||||
for pidx in pacman_indices:
|
||||
idx = package_index_t(name=pidx.name)
|
||||
|
||||
for ppkg in pidx.packages.values():
|
||||
idx.add(
|
||||
package_t(
|
||||
name=ppkg.name,
|
||||
version=ppkg.version,
|
||||
filename=ppkg.filename,
|
||||
repo=pidx.name,
|
||||
sha256sum=ppkg.sha256sum,
|
||||
depends=[pacman_constraint_t.parse(d) for d in ppkg.depends],
|
||||
provides=[pacman_constraint_t.parse(p) for p in ppkg.provides],
|
||||
conflicts=[pacman_constraint_t.parse(c) for c in ppkg.conflicts],
|
||||
groups=ppkg.groups,
|
||||
)
|
||||
)
|
||||
|
||||
idx.build_provides_index()
|
||||
indices.append(idx)
|
||||
|
||||
return indices
|
||||
|
||||
# dedupe=True: one SQL query selects canonical (name, version) rows
|
||||
# via MIN(id), plus the source repo. Relations are loaded in one bulk
|
||||
# query by the standard helper.
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'''
|
||||
SELECT p.id, p.name, p.version, p.filename, p.sha256sum, s.repo
|
||||
FROM packages p
|
||||
JOIN snapshots s ON s.id = p.snapshot_id
|
||||
WHERE p.id IN (SELECT MIN(id) FROM packages GROUP BY name, version)
|
||||
'''
|
||||
)
|
||||
|
||||
pkg_by_id: dict[int, package_desc_t] = {}
|
||||
repo_of: dict[int, str] = {}
|
||||
for pid, name, version, filename, sha256sum, repo in cur.fetchall():
|
||||
pkg_by_id[pid] = package_desc_t(
|
||||
name=name,
|
||||
version=version,
|
||||
filename=filename,
|
||||
sha256sum=sha256sum,
|
||||
)
|
||||
repo_of[pid] = repo
|
||||
|
||||
if len(pkg_by_id) > 0:
|
||||
self._load_relations(cur, list(pkg_by_id.keys()), pkg_by_id)
|
||||
|
||||
merged = package_index_t(name='dedup')
|
||||
for pid, ppkg in pkg_by_id.items():
|
||||
merged.add(
|
||||
package_t(
|
||||
name=ppkg.name,
|
||||
version=ppkg.version,
|
||||
filename=ppkg.filename,
|
||||
repo=repo_of[pid],
|
||||
sha256sum=ppkg.sha256sum,
|
||||
depends=[pacman_constraint_t.parse(d) for d in ppkg.depends],
|
||||
provides=[pacman_constraint_t.parse(p) for p in ppkg.provides],
|
||||
conflicts=[pacman_constraint_t.parse(c) for c in ppkg.conflicts],
|
||||
groups=ppkg.groups,
|
||||
)
|
||||
)
|
||||
merged.build_provides_index()
|
||||
|
||||
return [merged]
|
||||
|
||||
def _load_relations(
|
||||
self,
|
||||
cur: sqlite3.Cursor,
|
||||
pkg_ids: list[int],
|
||||
pkg_by_id: dict[int, package_desc_t],
|
||||
) -> None:
|
||||
batch_size = 500
|
||||
for i in range(0, len(pkg_ids), batch_size):
|
||||
batch = pkg_ids[i : i + batch_size]
|
||||
placeholders = ','.join('?' for _ in batch)
|
||||
cur.execute(
|
||||
'SELECT package_id, relation_type, value FROM package_relations WHERE package_id IN (%s)' % placeholders,
|
||||
batch,
|
||||
)
|
||||
for row_raw in cur.fetchall():
|
||||
pkg = pkg_by_id.get(row_raw[0])
|
||||
if pkg is None:
|
||||
continue
|
||||
|
||||
attr_name = cache_db_t.constants_t.list_relation_types.get(row_raw[1])
|
||||
if attr_name is None:
|
||||
continue
|
||||
|
||||
target_list: list[str] = getattr(pkg, attr_name)
|
||||
target_list.append(row_raw[2])
|
||||
|
||||
# ── local packages & signatures ──
|
||||
|
||||
def record_local_package(
|
||||
self,
|
||||
name: str,
|
||||
version: str,
|
||||
filename: str,
|
||||
sha256sum: str,
|
||||
local_path: str,
|
||||
) -> int:
|
||||
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'INSERT OR REPLACE INTO local_packages (name, version, filename, sha256sum, local_path, downloaded_at) VALUES (?, ?, ?, ?, ?, ?)',
|
||||
(name, version, filename, sha256sum, local_path, now),
|
||||
)
|
||||
self._conn.commit()
|
||||
assert cur.lastrowid is not None
|
||||
return cur.lastrowid
|
||||
|
||||
def record_signature(
|
||||
self,
|
||||
local_package_id: int,
|
||||
sig_path: str,
|
||||
keyring_package_version: Optional[str] = None,
|
||||
gpg_key_id: Optional[str] = None,
|
||||
) -> None:
|
||||
now = datetime.datetime.now(datetime.timezone.utc).isoformat()
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'INSERT OR REPLACE INTO local_signatures (local_package_id, sig_path, keyring_package_version, gpg_key_id, verified_at) VALUES (?, ?, ?, ?, ?)',
|
||||
(local_package_id, sig_path, keyring_package_version, gpg_key_id, now),
|
||||
)
|
||||
self._conn.commit()
|
||||
|
||||
def get_signature_info(
|
||||
self,
|
||||
name: str,
|
||||
version: str,
|
||||
) -> Optional[signature_row_t]:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'SELECT ls.id, ls.local_package_id, ls.sig_path, '
|
||||
'ls.keyring_package_version, ls.gpg_key_id, ls.verified_at '
|
||||
'FROM local_signatures ls '
|
||||
'JOIN local_packages lp ON lp.id = ls.local_package_id '
|
||||
'WHERE lp.name=? AND lp.version=?',
|
||||
(name, version),
|
||||
)
|
||||
return _fetch_one(cur, signature_row_t)
|
||||
|
||||
def get_trusted_package_set(
|
||||
self,
|
||||
trust_keyring_versions: Optional[list[str]] = None,
|
||||
trust_gpg_keys: Optional[list[str]] = None,
|
||||
exclude_keyring_versions: Optional[list[str]] = None,
|
||||
exclude_gpg_keys: Optional[list[str]] = None,
|
||||
) -> Optional[set[trusted_entry_t]]:
|
||||
"""Return set of trusted (name, version) entries that pass trust filters.
|
||||
|
||||
Returns None if no trust filters are set (meaning all packages pass).
|
||||
"""
|
||||
has_filters = (
|
||||
(trust_keyring_versions is not None and len(trust_keyring_versions) > 0)
|
||||
or (trust_gpg_keys is not None and len(trust_gpg_keys) > 0)
|
||||
or (exclude_keyring_versions is not None and len(exclude_keyring_versions) > 0)
|
||||
or (exclude_gpg_keys is not None and len(exclude_gpg_keys) > 0)
|
||||
)
|
||||
if not has_filters:
|
||||
return None
|
||||
|
||||
cur = self._conn.cursor()
|
||||
cur.execute(
|
||||
'SELECT lp.name, lp.version, ls.keyring_package_version, ls.gpg_key_id '
|
||||
'FROM local_packages lp '
|
||||
'JOIN local_signatures ls ON ls.local_package_id = lp.id'
|
||||
)
|
||||
|
||||
trusted: set[trusted_entry_t] = set()
|
||||
|
||||
for row_raw in cur.fetchall():
|
||||
keyring_ver = row_raw[2]
|
||||
gpg_key = row_raw[3]
|
||||
|
||||
if exclude_keyring_versions and keyring_ver in exclude_keyring_versions:
|
||||
continue
|
||||
if exclude_gpg_keys and gpg_key in exclude_gpg_keys:
|
||||
continue
|
||||
|
||||
is_trusted = False
|
||||
|
||||
if trust_keyring_versions and keyring_ver in trust_keyring_versions:
|
||||
is_trusted = True
|
||||
if trust_gpg_keys and gpg_key in trust_gpg_keys:
|
||||
is_trusted = True
|
||||
|
||||
if not trust_keyring_versions and not trust_gpg_keys:
|
||||
is_trusted = True
|
||||
|
||||
if is_trusted:
|
||||
trusted.add(trusted_entry_t(name=row_raw[0], version=row_raw[1]))
|
||||
|
||||
return trusted
|
||||
|
||||
# ── status ──
|
||||
|
||||
def has_data(self) -> bool:
|
||||
cur = self._conn.cursor()
|
||||
cur.execute('SELECT COUNT(*) FROM snapshots')
|
||||
row = cur.fetchone()
|
||||
return row is not None and row[0] > 0
|
||||
@ -0,0 +1,265 @@
|
||||
import re
|
||||
import subprocess
|
||||
import pathlib
|
||||
import tempfile
|
||||
import dataclasses
|
||||
import logging
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Optional,
|
||||
Any,
|
||||
)
|
||||
|
||||
from .types import (
|
||||
package_desc_t,
|
||||
mirror_config_t,
|
||||
compile_options_t,
|
||||
repo_index_t,
|
||||
)
|
||||
from .db import db_parser_t
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class pacman_t:
|
||||
class constants_t:
|
||||
default_db_path: ClassVar[pathlib.Path] = pathlib.Path('/var/lib/pacman')
|
||||
default_cache_dir: ClassVar[pathlib.Path] = pathlib.Path('/var/cache/pacman/pkg')
|
||||
field_re: ClassVar[re.Pattern[str]] = re.compile(r'^([A-Za-z ]+?)\s*:\s*(.*)$')
|
||||
|
||||
@dataclasses.dataclass
|
||||
class query_entry_t:
|
||||
name: str
|
||||
version: str
|
||||
description: str = ''
|
||||
architecture: str = ''
|
||||
url: str = ''
|
||||
depends_on: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
provides: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
conflicts_with: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
replaces: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
install_size: str = ''
|
||||
packager: str = ''
|
||||
groups: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
|
||||
class list_installed_t:
|
||||
@dataclasses.dataclass
|
||||
class res_t:
|
||||
packages: list['pacman_t.query_entry_t'] = dataclasses.field(default_factory=lambda: list[pacman_t.query_entry_t]())
|
||||
|
||||
@staticmethod
|
||||
def parse_info_block(block: str) -> 'pacman_t.query_entry_t':
|
||||
fields: dict[str, list[str]] = {}
|
||||
current_key: Optional[str] = None
|
||||
|
||||
for line in block.split('\n'):
|
||||
m = pacman_t.constants_t.field_re.match(line)
|
||||
if m:
|
||||
current_key = m.group(1).strip()
|
||||
value = m.group(2).strip()
|
||||
assert isinstance(current_key, str)
|
||||
if current_key not in fields:
|
||||
fields[current_key] = []
|
||||
if value and value != 'None':
|
||||
fields[current_key].append(value)
|
||||
elif current_key and line.startswith(' '):
|
||||
value = line.strip()
|
||||
if value and value != 'None':
|
||||
fields[current_key].append(value)
|
||||
|
||||
name = fields.get('Name', [''])[0]
|
||||
version = fields.get('Version', [''])[0]
|
||||
|
||||
if not name or not version:
|
||||
raise ValueError('missing Name or Version in block')
|
||||
|
||||
return pacman_t.query_entry_t(
|
||||
name=name,
|
||||
version=version,
|
||||
description=fields.get('Description', [''])[0] if fields.get('Description') else '',
|
||||
architecture=fields.get('Architecture', [''])[0] if fields.get('Architecture') else '',
|
||||
url=fields.get('URL', [''])[0] if fields.get('URL') else '',
|
||||
depends_on=fields.get('Depends On', []),
|
||||
provides=fields.get('Provides', []),
|
||||
conflicts_with=fields.get('Conflicts With', []),
|
||||
replaces=fields.get('Replaces', []),
|
||||
install_size=fields.get('Installed Size', [''])[0] if fields.get('Installed Size') else '',
|
||||
packager=fields.get('Packager', [''])[0] if fields.get('Packager') else '',
|
||||
groups=fields.get('Groups', []),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def list_installed(
|
||||
db_path: Optional[pathlib.Path] = None,
|
||||
) -> 'pacman_t.list_installed_t.res_t':
|
||||
cmd: list[str] = ['pacman', '-Qi']
|
||||
|
||||
if db_path is not None:
|
||||
cmd.extend(['--dbpath', str(db_path)])
|
||||
|
||||
output = subprocess.check_output(
|
||||
cmd,
|
||||
stderr=subprocess.DEVNULL,
|
||||
).decode('utf-8')
|
||||
|
||||
blocks = output.split('\n\n')
|
||||
result = pacman_t.list_installed_t.res_t()
|
||||
|
||||
for block in blocks:
|
||||
block = block.strip()
|
||||
if not block:
|
||||
continue
|
||||
|
||||
try:
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
result.packages.append(entry)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
dict(
|
||||
msg='failed to parse pacman info block',
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def list_installed_simple(
|
||||
db_path: Optional[pathlib.Path] = None,
|
||||
) -> list[tuple[str, str]]:
|
||||
cmd: list[str] = ['pacman', '-Q']
|
||||
|
||||
if db_path is not None:
|
||||
cmd.extend(['--dbpath', str(db_path)])
|
||||
|
||||
output = subprocess.check_output(
|
||||
cmd,
|
||||
stderr=subprocess.DEVNULL,
|
||||
).decode('utf-8')
|
||||
|
||||
result: list[tuple[str, str]] = []
|
||||
|
||||
for line in output.strip().split('\n'):
|
||||
parts = line.strip().split(None, 1)
|
||||
if len(parts) == 2:
|
||||
result.append((parts[0], parts[1]))
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def sync_db(
|
||||
mirror_url: str,
|
||||
db_path: pathlib.Path,
|
||||
repos: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
if repos is None:
|
||||
repos = ['core', 'extra', 'multilib']
|
||||
|
||||
cmd: list[str] = [
|
||||
'pacman',
|
||||
'-Sy',
|
||||
'--dbpath',
|
||||
str(db_path),
|
||||
]
|
||||
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
@staticmethod
|
||||
def download_db(
|
||||
url: str,
|
||||
output_path: pathlib.Path,
|
||||
) -> None:
|
||||
import urllib.request
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
url=url,
|
||||
output_path=str(output_path),
|
||||
msg='downloading db',
|
||||
)
|
||||
)
|
||||
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
urllib.request.urlretrieve(
|
||||
url,
|
||||
str(output_path),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def build_mirror_config(options: compile_options_t) -> mirror_config_t:
|
||||
if options.archive_date is not None:
|
||||
return mirror_config_t.from_archive_date(
|
||||
date=options.archive_date,
|
||||
repos=options.repos,
|
||||
arch=options.arch,
|
||||
)
|
||||
elif options.index_url is not None:
|
||||
return mirror_config_t.from_mirror_url(
|
||||
mirror_url=options.index_url,
|
||||
repos=options.repos,
|
||||
arch=options.arch,
|
||||
)
|
||||
else:
|
||||
return mirror_config_t.from_mirror_url(
|
||||
mirror_url='https://archive.archlinux.org/repos/last',
|
||||
repos=options.repos,
|
||||
arch=options.arch,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def fetch_indices(
|
||||
mirror: mirror_config_t,
|
||||
cache_dir: Optional[pathlib.Path] = None,
|
||||
no_cache: bool = False,
|
||||
offline: bool = False,
|
||||
) -> list[repo_index_t]:
|
||||
indices: list[repo_index_t] = []
|
||||
|
||||
for repo in mirror.repos:
|
||||
db_url = '%s/%s.db' % (repo.url, repo.name)
|
||||
|
||||
if cache_dir is not None and not no_cache:
|
||||
cached_path = cache_dir / ('%s.db' % repo.name)
|
||||
|
||||
if cached_path.exists():
|
||||
logger.info(
|
||||
dict(
|
||||
repo=repo.name,
|
||||
msg='using cached db',
|
||||
path=str(cached_path),
|
||||
)
|
||||
)
|
||||
index = db_parser_t.parse_db_path(
|
||||
cached_path, repo_name=repo.name
|
||||
)
|
||||
indices.append(index)
|
||||
continue
|
||||
|
||||
if offline:
|
||||
raise FileNotFoundError(
|
||||
'offline mode: cached db not found for %s at %s'
|
||||
% (repo.name, str(cached_path))
|
||||
)
|
||||
|
||||
pacman_t.download_db(db_url, cached_path)
|
||||
index = db_parser_t.parse_db_path(
|
||||
cached_path, repo_name=repo.name
|
||||
)
|
||||
indices.append(index)
|
||||
else:
|
||||
if offline:
|
||||
raise FileNotFoundError(
|
||||
'offline mode requires --cache-dir with pre-fetched db files'
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.db') as tmp:
|
||||
pacman_t.download_db(
|
||||
db_url, pathlib.Path(tmp.name)
|
||||
)
|
||||
index = db_parser_t.parse_db_path(
|
||||
pathlib.Path(tmp.name), repo_name=repo.name
|
||||
)
|
||||
indices.append(index)
|
||||
|
||||
return indices
|
||||
@ -0,0 +1,157 @@
|
||||
import io
|
||||
import re
|
||||
import tarfile
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Optional,
|
||||
Any,
|
||||
BinaryIO,
|
||||
)
|
||||
|
||||
from .types import (
|
||||
package_desc_t,
|
||||
repo_index_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class db_parser_t:
|
||||
class constants_t:
|
||||
field_re: ClassVar[re.Pattern[str]] = re.compile(r'^%([A-Z0-9]+)%$')
|
||||
|
||||
list_fields: ClassVar[set[str]] = {
|
||||
'LICENSE',
|
||||
'DEPENDS',
|
||||
'OPTDEPENDS',
|
||||
'MAKEDEPENDS',
|
||||
'CHECKDEPENDS',
|
||||
'PROVIDES',
|
||||
'CONFLICTS',
|
||||
'REPLACES',
|
||||
'GROUPS',
|
||||
}
|
||||
|
||||
field_map: ClassVar[dict[str, str]] = {
|
||||
'FILENAME': 'filename',
|
||||
'NAME': 'name',
|
||||
'VERSION': 'version',
|
||||
'DESC': 'desc',
|
||||
'CSIZE': 'csize',
|
||||
'ISIZE': 'isize',
|
||||
'MD5SUM': 'md5sum',
|
||||
'SHA256SUM': 'sha256sum',
|
||||
'URL': 'url',
|
||||
'ARCH': 'arch',
|
||||
'BUILDDATE': 'builddate',
|
||||
'PACKAGER': 'packager',
|
||||
'LICENSE': 'license',
|
||||
'DEPENDS': 'depends',
|
||||
'OPTDEPENDS': 'optdepends',
|
||||
'MAKEDEPENDS': 'makedepends',
|
||||
'CHECKDEPENDS': 'checkdepends',
|
||||
'PROVIDES': 'provides',
|
||||
'CONFLICTS': 'conflicts',
|
||||
'REPLACES': 'replaces',
|
||||
'GROUPS': 'groups',
|
||||
'BASE': 'base',
|
||||
}
|
||||
|
||||
int_fields: ClassVar[set[str]] = {
|
||||
'CSIZE',
|
||||
'ISIZE',
|
||||
'BUILDDATE',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def parse_desc(content: str) -> package_desc_t:
|
||||
fields: dict[str, Any] = {}
|
||||
lines = content.split('\n')
|
||||
i = 0
|
||||
|
||||
while i < len(lines):
|
||||
line = lines[i].strip()
|
||||
|
||||
if line == '':
|
||||
i += 1
|
||||
continue
|
||||
|
||||
m = db_parser_t.constants_t.field_re.match(line)
|
||||
if not m:
|
||||
i += 1
|
||||
continue
|
||||
|
||||
field_name = m.group(1)
|
||||
i += 1
|
||||
|
||||
values: list[str] = []
|
||||
while i < len(lines) and lines[i].strip() != '':
|
||||
values.append(lines[i].strip())
|
||||
i += 1
|
||||
|
||||
attr_name = db_parser_t.constants_t.field_map.get(field_name)
|
||||
if attr_name is None:
|
||||
continue
|
||||
|
||||
if field_name in db_parser_t.constants_t.list_fields:
|
||||
fields[attr_name] = values
|
||||
elif field_name in db_parser_t.constants_t.int_fields:
|
||||
fields[attr_name] = int(values[0]) if len(values) > 0 else 0
|
||||
else:
|
||||
fields[attr_name] = values[0] if len(values) > 0 else ''
|
||||
|
||||
if 'name' not in fields or 'version' not in fields:
|
||||
raise ValueError('desc missing NAME or VERSION')
|
||||
|
||||
return package_desc_t(**fields)
|
||||
|
||||
@staticmethod
|
||||
def parse_db(
|
||||
f: BinaryIO,
|
||||
repo_name: str = '',
|
||||
) -> repo_index_t:
|
||||
index = repo_index_t(name=repo_name)
|
||||
|
||||
with tarfile.open(fileobj=f, mode='r:*') as tar:
|
||||
desc_members: list[tarfile.TarInfo] = []
|
||||
|
||||
for member in tar.getmembers():
|
||||
if member.name.endswith('/desc') and member.isfile():
|
||||
desc_members.append(member)
|
||||
|
||||
for member in desc_members:
|
||||
extracted = tar.extractfile(member)
|
||||
if extracted is None:
|
||||
continue
|
||||
|
||||
content = extracted.read().decode('utf-8')
|
||||
extracted.close()
|
||||
|
||||
try:
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
index.packages[pkg.name] = pkg
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
dict(
|
||||
member=member.name,
|
||||
msg='failed to parse desc',
|
||||
)
|
||||
)
|
||||
|
||||
index.build_provides_index()
|
||||
|
||||
return index
|
||||
|
||||
@staticmethod
|
||||
def parse_db_path(
|
||||
path: pathlib.Path,
|
||||
repo_name: Optional[str] = None,
|
||||
) -> repo_index_t:
|
||||
if repo_name is None:
|
||||
repo_name = path.stem.split('.')[0]
|
||||
|
||||
with io.open(path, 'rb') as f:
|
||||
return db_parser_t.parse_db(f, repo_name=repo_name)
|
||||
@ -0,0 +1,164 @@
|
||||
"""Pacman implementation of the archive manager interface."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ...cli.archive_types import manager_t
|
||||
from ..cache.db import cache_db_t
|
||||
from .client import pacman_t
|
||||
from .db import db_parser_t
|
||||
from .types import mirror_config_t
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class pacman_manager_t(manager_t):
|
||||
class constants_t:
|
||||
base_url: ClassVar[str] = 'https://archive.archlinux.org/repos/'
|
||||
href_re: ClassVar[re.Pattern[str]] = re.compile(
|
||||
r'href="(\d{4}/\d{2}/\d{2})/"'
|
||||
)
|
||||
default_repos: ClassVar[list[str]] = ['core', 'extra', 'multilib']
|
||||
|
||||
def list_remote_dates(self) -> list[str]:
|
||||
import urllib.request
|
||||
|
||||
base_url = pacman_manager_t.constants_t.base_url
|
||||
|
||||
logger.info(dict(msg='fetching archive index', url=base_url))
|
||||
|
||||
with urllib.request.urlopen(base_url) as resp:
|
||||
html = resp.read().decode('utf-8')
|
||||
|
||||
dates: list[str] = []
|
||||
for m in pacman_manager_t.constants_t.href_re.finditer(html):
|
||||
dates.append(m.group(1))
|
||||
|
||||
dates.sort(reverse=True)
|
||||
return dates
|
||||
|
||||
def sync_date(
|
||||
self,
|
||||
date: str,
|
||||
cache_dir: pathlib.Path,
|
||||
cache_db: cache_db_t,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> None:
|
||||
if repos is None:
|
||||
repos = list(pacman_manager_t.constants_t.default_repos)
|
||||
|
||||
mirror = mirror_config_t.from_archive_date(
|
||||
date=date,
|
||||
repos=repos,
|
||||
arch=arch,
|
||||
)
|
||||
|
||||
db_dir = cache_dir / date
|
||||
db_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for repo_cfg in mirror.repos:
|
||||
db_url = '%s/%s.db' % (repo_cfg.url, repo_cfg.name)
|
||||
db_path = db_dir / ('%s.db' % repo_cfg.name)
|
||||
db_rel_path = '%s/%s.db' % (date, repo_cfg.name)
|
||||
|
||||
if not db_path.exists():
|
||||
logger.info(
|
||||
dict(msg='downloading db', url=db_url, dest=str(db_path))
|
||||
)
|
||||
pacman_t.download_db(db_url, db_path)
|
||||
else:
|
||||
logger.info(
|
||||
dict(msg='db already cached on disk', path=str(db_path))
|
||||
)
|
||||
|
||||
db_sha256 = cache_db_t.file_sha256(db_path)
|
||||
|
||||
snapshot_id = cache_db.upsert_snapshot(
|
||||
date=date,
|
||||
repo=repo_cfg.name,
|
||||
arch=arch,
|
||||
db_sha256=db_sha256,
|
||||
db_rel_path=db_rel_path,
|
||||
)
|
||||
|
||||
if cache_db.snapshot_package_count(snapshot_id) > 0:
|
||||
snap = cache_db.get_snapshot_by_id(snapshot_id)
|
||||
if snap is not None and snap.db_sha256 == db_sha256:
|
||||
logger.info(
|
||||
dict(
|
||||
msg='snapshot already in sqlite',
|
||||
date=date,
|
||||
repo=repo_cfg.name,
|
||||
snapshot_id=snapshot_id,
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
index = db_parser_t.parse_db_path(db_path, repo_name=repo_cfg.name)
|
||||
|
||||
cache_db.store_index(
|
||||
snapshot_id=snapshot_id,
|
||||
index=index,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
msg='synced',
|
||||
date=date,
|
||||
repo=repo_cfg.name,
|
||||
packages=len(index.packages),
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_date(s: str) -> datetime.date:
|
||||
parts = s.split('/')
|
||||
if len(parts) == 3:
|
||||
return datetime.date(int(parts[0]), int(parts[1]), int(parts[2]))
|
||||
return datetime.date.fromisoformat(s)
|
||||
|
||||
@staticmethod
|
||||
def _format_date(d: datetime.date) -> str:
|
||||
return '%04d/%02d/%02d' % (d.year, d.month, d.day)
|
||||
|
||||
def sync_date_range(
|
||||
self,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
cache_dir: pathlib.Path,
|
||||
cache_db: cache_db_t,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
step_days: int = 1,
|
||||
) -> None:
|
||||
start = pacman_manager_t._parse_date(start_date)
|
||||
end = pacman_manager_t._parse_date(end_date)
|
||||
step = datetime.timedelta(days=step_days)
|
||||
|
||||
current = end
|
||||
while current >= start:
|
||||
date_str = pacman_manager_t._format_date(current)
|
||||
|
||||
try:
|
||||
self.sync_date(
|
||||
date=date_str,
|
||||
cache_dir=cache_dir,
|
||||
cache_db=cache_db,
|
||||
repos=repos,
|
||||
arch=arch,
|
||||
)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
dict(msg='failed to sync date, skipping', date=date_str),
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
current -= step
|
||||
@ -0,0 +1,196 @@
|
||||
import dataclasses
|
||||
import enum
|
||||
import re
|
||||
import logging
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ...models import (
|
||||
constraint_op_t,
|
||||
package_constraint_t,
|
||||
vercmp_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class pacman_constraint_t:
|
||||
"""Parses pacman-format constraints (single = for equality) and converts to package_constraint_t."""
|
||||
|
||||
class constants_t:
|
||||
constraint_re: ClassVar[re.Pattern[str]] = re.compile(
|
||||
r'^([a-zA-Z0-9@._+\-]+?)(?:(>=|<=|>|<|=)(.+))?$'
|
||||
)
|
||||
pacman_op_map: ClassVar[dict[str, constraint_op_t]] = {
|
||||
'=': constraint_op_t.eq,
|
||||
'>=': constraint_op_t.ge,
|
||||
'<=': constraint_op_t.le,
|
||||
'>': constraint_op_t.gt,
|
||||
'<': constraint_op_t.lt,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def parse(s: str) -> package_constraint_t:
|
||||
m = pacman_constraint_t.constants_t.constraint_re.match(s.strip())
|
||||
if not m:
|
||||
raise ValueError('invalid pacman constraint: %s' % s)
|
||||
|
||||
name = m.group(1)
|
||||
op_str = m.group(2)
|
||||
version = m.group(3)
|
||||
|
||||
op: Optional[constraint_op_t] = None
|
||||
if op_str:
|
||||
op = pacman_constraint_t.constants_t.pacman_op_map.get(op_str)
|
||||
if op is None:
|
||||
raise ValueError('unknown pacman operator: %s' % op_str)
|
||||
|
||||
return package_constraint_t(
|
||||
name=name,
|
||||
op=op,
|
||||
version=version,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class package_desc_t:
|
||||
name: str
|
||||
version: str
|
||||
desc: str = ''
|
||||
filename: str = ''
|
||||
csize: int = 0
|
||||
isize: int = 0
|
||||
md5sum: str = ''
|
||||
sha256sum: str = ''
|
||||
url: str = ''
|
||||
arch: str = ''
|
||||
builddate: int = 0
|
||||
packager: str = ''
|
||||
license: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
depends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
optdepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
makedepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
checkdepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
provides: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
conflicts: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
replaces: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
groups: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
base: str = ''
|
||||
|
||||
def parsed_depends(self) -> list[package_constraint_t]:
|
||||
return [pacman_constraint_t.parse(d) for d in self.depends]
|
||||
|
||||
def parsed_provides(self) -> list[package_constraint_t]:
|
||||
return [pacman_constraint_t.parse(p) for p in self.provides]
|
||||
|
||||
def parsed_conflicts(self) -> list[package_constraint_t]:
|
||||
return [pacman_constraint_t.parse(c) for c in self.conflicts]
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class repo_index_t:
|
||||
name: str
|
||||
packages: dict[str, package_desc_t] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, package_desc_t]()
|
||||
)
|
||||
provides_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
groups_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
|
||||
def build_provides_index(self) -> None:
|
||||
self.provides_index = {}
|
||||
self.groups_index = {}
|
||||
for pkg_name, pkg in self.packages.items():
|
||||
for prov in pkg.provides:
|
||||
prov_constraint = pacman_constraint_t.parse(prov)
|
||||
if prov_constraint.name not in self.provides_index:
|
||||
self.provides_index[prov_constraint.name] = []
|
||||
self.provides_index[prov_constraint.name].append(pkg_name)
|
||||
for group in pkg.groups:
|
||||
if group not in self.groups_index:
|
||||
self.groups_index[group] = []
|
||||
self.groups_index[group].append(pkg_name)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class repo_config_t:
|
||||
name: str
|
||||
url: str
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class mirror_config_t:
|
||||
repos: list[repo_config_t] = dataclasses.field(
|
||||
default_factory=lambda: list[repo_config_t]()
|
||||
)
|
||||
arch: str = 'x86_64'
|
||||
|
||||
@staticmethod
|
||||
def from_archive_date(
|
||||
date: str,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> 'mirror_config_t':
|
||||
if repos is None:
|
||||
repos = ['core', 'extra', 'multilib']
|
||||
|
||||
base_url = 'https://archive.archlinux.org/repos/%s' % date
|
||||
|
||||
return mirror_config_t(
|
||||
repos=[
|
||||
repo_config_t(
|
||||
name=r,
|
||||
url='%s/%s/os/%s' % (base_url, r, arch),
|
||||
)
|
||||
for r in repos
|
||||
],
|
||||
arch=arch,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_mirror_url(
|
||||
mirror_url: str,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> 'mirror_config_t':
|
||||
if repos is None:
|
||||
repos = ['core', 'extra', 'multilib']
|
||||
|
||||
return mirror_config_t(
|
||||
repos=[
|
||||
repo_config_t(
|
||||
name=r,
|
||||
url='%s/%s/os/%s' % (mirror_url.rstrip('/'), r, arch),
|
||||
)
|
||||
for r in repos
|
||||
],
|
||||
arch=arch,
|
||||
)
|
||||
|
||||
|
||||
class resolution_strategy_t(enum.Enum):
|
||||
upgrade_all = 'upgrade-all'
|
||||
pin_referenced = 'pin-referenced'
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class compile_options_t:
|
||||
packages: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
index_url: Optional[str] = None
|
||||
archive_date: Optional[str] = None
|
||||
offline: bool = False
|
||||
no_cache: bool = False
|
||||
generate_hashes: bool = False
|
||||
repos: list[str] = dataclasses.field(default_factory=lambda: ['core', 'extra', 'multilib'])
|
||||
arch: str = 'x86_64'
|
||||
cache_dir: Optional[str] = None
|
||||
reference: Optional[str] = None
|
||||
resolution_strategy: resolution_strategy_t = resolution_strategy_t.upgrade_all
|
||||
|
||||
|
||||
@ -0,0 +1,168 @@
|
||||
"""Archive CLI: sync repos into sqlite cache, query cached data.
|
||||
|
||||
Manager-agnostic. Uses --manager to select backend (default: pacman).
|
||||
Query actions (list-dates, list-packages, show-versions) use only cache_db.
|
||||
Sync actions delegate to the selected manager implementation.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import enum
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ..apps.cache.db import cache_db_t
|
||||
from .archive_types import manager_t
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArchiveAction(enum.Enum):
|
||||
list_dates = 'list-dates'
|
||||
list_packages = 'list-packages'
|
||||
show_versions = 'show-versions'
|
||||
sync = 'sync'
|
||||
|
||||
|
||||
def _get_manager(name: str) -> manager_t:
|
||||
if name == 'pacman':
|
||||
from ..apps.pacman.manager import pacman_manager_t
|
||||
|
||||
return pacman_manager_t()
|
||||
else:
|
||||
raise ValueError('unknown manager: %s' % name)
|
||||
|
||||
|
||||
def main(args: list[str]) -> int:
|
||||
archive_parser = argparse.ArgumentParser(
|
||||
prog='online-fxreader-pr34-archlinux archive',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'action',
|
||||
choices=[o.value for o in ArchiveAction],
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--cache-dir',
|
||||
dest='cache_dir',
|
||||
required=True,
|
||||
help='directory for cached .db files and sqlite database',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--manager',
|
||||
default='pacman',
|
||||
help='package manager backend (default: pacman)',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--repos',
|
||||
nargs='*',
|
||||
default=['core', 'extra', 'multilib'],
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--arch',
|
||||
default='x86_64',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--date',
|
||||
default=None,
|
||||
help='single date (e.g. 2024/01/15) for sync',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--date-range',
|
||||
dest='date_range',
|
||||
nargs=2,
|
||||
metavar=('START', 'END'),
|
||||
default=None,
|
||||
help='date range for sync (e.g. 2024/01/01 2024/06/30)',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--date-step',
|
||||
dest='date_step',
|
||||
type=int,
|
||||
default=1,
|
||||
help='step in days when iterating date range, default 1',
|
||||
)
|
||||
archive_parser.add_argument(
|
||||
'--packages',
|
||||
default=None,
|
||||
help='package names for show-versions, comma-separated',
|
||||
)
|
||||
|
||||
archive_options = archive_parser.parse_args(args)
|
||||
archive_options.action = ArchiveAction(archive_options.action)
|
||||
|
||||
if archive_options.packages is not None:
|
||||
archive_options.packages = [
|
||||
p.strip()
|
||||
for p in archive_options.packages.split(',')
|
||||
if p.strip()
|
||||
]
|
||||
|
||||
cache_dir = pathlib.Path(archive_options.cache_dir)
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
db = cache_db_t(cache_dir / 'archlinux_cache.db')
|
||||
|
||||
try:
|
||||
if archive_options.action is ArchiveAction.list_dates:
|
||||
mgr = _get_manager(archive_options.manager)
|
||||
|
||||
if db.has_data():
|
||||
print('=== cached dates ===')
|
||||
for date_str in db.list_dates():
|
||||
print(date_str)
|
||||
|
||||
print('=== remote dates ===')
|
||||
for date_str in mgr.list_remote_dates():
|
||||
print(date_str)
|
||||
|
||||
elif archive_options.action is ArchiveAction.list_packages:
|
||||
for row in db.package_count_per_date():
|
||||
print('%s %d' % (row.date, row.count))
|
||||
|
||||
elif archive_options.action is ArchiveAction.show_versions:
|
||||
if (
|
||||
archive_options.packages is None
|
||||
or len(archive_options.packages) == 0
|
||||
):
|
||||
logger.error('--packages required for show-versions')
|
||||
return 1
|
||||
|
||||
for row in db.get_package_versions(archive_options.packages):
|
||||
print(
|
||||
'%s %s %s %s'
|
||||
% (row.date, row.repo, row.name, row.version)
|
||||
)
|
||||
|
||||
elif archive_options.action is ArchiveAction.sync:
|
||||
mgr = _get_manager(archive_options.manager)
|
||||
|
||||
if archive_options.date is not None:
|
||||
mgr.sync_date(
|
||||
date=archive_options.date,
|
||||
cache_dir=cache_dir,
|
||||
cache_db=db,
|
||||
repos=archive_options.repos,
|
||||
arch=archive_options.arch,
|
||||
)
|
||||
elif archive_options.date_range is not None:
|
||||
mgr.sync_date_range(
|
||||
start_date=archive_options.date_range[0],
|
||||
end_date=archive_options.date_range[1],
|
||||
cache_dir=cache_dir,
|
||||
cache_db=db,
|
||||
repos=archive_options.repos,
|
||||
arch=archive_options.arch,
|
||||
step_days=archive_options.date_step,
|
||||
)
|
||||
else:
|
||||
logger.error('sync requires --date or --date-range')
|
||||
return 1
|
||||
else:
|
||||
raise NotImplementedError
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
return 0
|
||||
@ -0,0 +1,48 @@
|
||||
"""General interfaces for archive management.
|
||||
|
||||
Package managers implement manager_t to provide sync/fetch capabilities.
|
||||
"""
|
||||
|
||||
import abc
|
||||
import pathlib
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ..apps.cache.db import cache_db_t
|
||||
|
||||
|
||||
class manager_t(abc.ABC):
|
||||
"""Abstract package manager interface for archive operations."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def list_remote_dates(self) -> list[str]:
|
||||
"""List available date snapshots from the remote archive."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def sync_date(
|
||||
self,
|
||||
date: str,
|
||||
cache_dir: pathlib.Path,
|
||||
cache_db: cache_db_t,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> None:
|
||||
"""Download repo .db files for a date, parse, store in sqlite."""
|
||||
...
|
||||
|
||||
@abc.abstractmethod
|
||||
def sync_date_range(
|
||||
self,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
cache_dir: pathlib.Path,
|
||||
cache_db: cache_db_t,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
step_days: int = 1,
|
||||
) -> None:
|
||||
"""Sync a range of dates."""
|
||||
...
|
||||
@ -0,0 +1,193 @@
|
||||
"""Compile CLI: parse args, resolve packages from cached data, output results.
|
||||
|
||||
Uses cache_db for package data and general resolver interface.
|
||||
No pacman-specific imports in the main flow.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ..apps.cache.db import cache_db_t
|
||||
from ..models import (
|
||||
compile_entry_t,
|
||||
compile_result_t,
|
||||
package_index_t,
|
||||
package_t,
|
||||
resolve_result_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main(args: list[str]) -> int:
|
||||
compile_parser = argparse.ArgumentParser(
|
||||
prog='online-fxreader-pr34-archlinux compile',
|
||||
description='Resolve package versions from archive repos and output compiled requirements.',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'packages',
|
||||
nargs='*',
|
||||
help='package specs to resolve (e.g. bash glibc>=2.39)',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'-r',
|
||||
dest='requirements_file',
|
||||
default=None,
|
||||
help='path to file with package specs, one per line (like pip -r)',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'--generate-hashes',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='include sha256 hashes in compiled output',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'--backend',
|
||||
choices=['python', 'solv'],
|
||||
default='solv',
|
||||
help='resolver backend: solv (libsolv, fast) or python (pure python, slow)',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'--cache-dir',
|
||||
dest='cache_dir',
|
||||
default=None,
|
||||
help='directory for cached .db files and sqlite database (created by "archive sync")',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'--reference',
|
||||
default=None,
|
||||
help='path to previously compiled requirements file; '
|
||||
'versions from this file are used as pins with --resolution-strategy pin-referenced',
|
||||
)
|
||||
compile_parser.add_argument(
|
||||
'--resolution-strategy',
|
||||
dest='resolution_strategy',
|
||||
choices=['upgrade-all', 'pin-referenced'],
|
||||
default='upgrade-all',
|
||||
help='upgrade-all: resolve latest versions; '
|
||||
'pin-referenced: keep --reference versions, only upgrade explicitly requested packages',
|
||||
)
|
||||
|
||||
compile_options = compile_parser.parse_args(args)
|
||||
|
||||
packages: list[str] = list(compile_options.packages)
|
||||
|
||||
if compile_options.requirements_file is not None:
|
||||
for line in pathlib.Path(compile_options.requirements_file).read_text().splitlines():
|
||||
line = line.strip()
|
||||
if line != '' and not line.startswith('#'):
|
||||
packages.append(line)
|
||||
|
||||
if compile_options.cache_dir is None:
|
||||
logger.error('--cache-dir is required')
|
||||
return 1
|
||||
|
||||
cache_dir = pathlib.Path(compile_options.cache_dir)
|
||||
db_path = cache_dir / 'archlinux_cache.db'
|
||||
if not db_path.exists():
|
||||
logger.error('cache db not found: %s' % db_path)
|
||||
return 1
|
||||
|
||||
cache_db = cache_db_t(db_path)
|
||||
|
||||
try:
|
||||
indices = cache_db.load_indices(dedupe=True)
|
||||
finally:
|
||||
cache_db.close()
|
||||
|
||||
pinned: Optional[dict[str, str]] = None
|
||||
upgrade_packages: Optional[list[str]] = None
|
||||
|
||||
if compile_options.reference is not None:
|
||||
ref_txt = pathlib.Path(compile_options.reference).read_text()
|
||||
pinned = _parse_reference(ref_txt)
|
||||
|
||||
if compile_options.resolution_strategy == 'pin-referenced':
|
||||
upgrade_packages = packages
|
||||
packages = list(pinned.keys()) + [
|
||||
p for p in packages if p not in pinned
|
||||
]
|
||||
|
||||
try:
|
||||
if compile_options.backend == 'solv':
|
||||
from ..resolver.solv import resolve as solv_resolve
|
||||
|
||||
resolved = solv_resolve(
|
||||
indices=indices,
|
||||
packages=packages,
|
||||
pinned=pinned,
|
||||
upgrade_packages=upgrade_packages,
|
||||
)
|
||||
else:
|
||||
from ..resolver.general import resolver_t
|
||||
|
||||
resolved = resolver_t.resolve(
|
||||
packages=packages,
|
||||
indices=indices,
|
||||
)
|
||||
except RuntimeError as e:
|
||||
logger.error(str(e))
|
||||
return 1
|
||||
|
||||
if len(resolved.problems) > 0:
|
||||
logger.error(
|
||||
'resolution failed with %d problem(s):\n%s'
|
||||
% (len(resolved.problems), '\n'.join(resolved.problems))
|
||||
)
|
||||
return 1
|
||||
|
||||
result = compile_result_t.res_t()
|
||||
|
||||
for pkg_name in resolved.resolution_order:
|
||||
pkg = resolved.resolved[pkg_name]
|
||||
|
||||
url = ''
|
||||
if pkg.filename:
|
||||
url = 'https://archive.archlinux.org/packages/%s/%s/%s' % (
|
||||
pkg.name[0],
|
||||
pkg.name,
|
||||
pkg.filename,
|
||||
)
|
||||
|
||||
is_pinned = (
|
||||
pinned is not None
|
||||
and pkg.name in pinned
|
||||
and pinned[pkg.name] == pkg.version
|
||||
)
|
||||
|
||||
entry = compile_entry_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
repo=pkg.repo,
|
||||
url=url,
|
||||
sha256=pkg.sha256sum if compile_options.generate_hashes else '',
|
||||
pinned=is_pinned,
|
||||
depends=pkg.depends,
|
||||
)
|
||||
|
||||
result.entries.append(entry)
|
||||
|
||||
result.txt = result.to_txt()
|
||||
print(result.txt)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def _parse_reference(txt: str) -> dict[str, str]:
|
||||
pinned: dict[str, str] = {}
|
||||
for line in txt.splitlines():
|
||||
line = line.strip()
|
||||
if line == '' or line.startswith('#'):
|
||||
continue
|
||||
parts = line.split()
|
||||
pkg_spec = parts[0]
|
||||
if '==' in pkg_spec:
|
||||
name, version = pkg_spec.split('==', 1)
|
||||
pinned[name] = version
|
||||
return pinned
|
||||
@ -0,0 +1,109 @@
|
||||
"""List installed pacman packages."""
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import logging
|
||||
import pathlib
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ..apps.pacman.client import pacman_t
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _find_cached_pkg(
|
||||
cache_dir: pathlib.Path,
|
||||
name: str,
|
||||
version: str,
|
||||
) -> Optional[pathlib.Path]:
|
||||
"""Find a cached .pkg.tar.* file for a given package name and version."""
|
||||
for suffix in ['.pkg.tar.zst', '.pkg.tar.xz', '.pkg.tar.gz', '.pkg.tar.bz2', '.pkg.tar']:
|
||||
for arch in ['x86_64', 'any']:
|
||||
candidate = cache_dir / ('%s-%s-%s%s' % (name, version, arch, suffix))
|
||||
if candidate.exists():
|
||||
return candidate
|
||||
return None
|
||||
|
||||
|
||||
def main(args: list[str]) -> int:
|
||||
list_parser = argparse.ArgumentParser(
|
||||
prog='online-fxreader-pr34-archlinux list-installed',
|
||||
)
|
||||
list_parser.add_argument(
|
||||
'--format',
|
||||
choices=['plain', 'constraints', 'compiled'],
|
||||
default='plain',
|
||||
help='plain: name version; constraints: name>=version; compiled: name==version with optional hashes',
|
||||
)
|
||||
list_parser.add_argument(
|
||||
'--generate-hashes',
|
||||
action='store_true',
|
||||
default=False,
|
||||
help='include sha256 from local /var/cache/pacman/pkg/ files; fails if file not found for any package',
|
||||
)
|
||||
list_parser.add_argument(
|
||||
'--db-path',
|
||||
dest='db_path',
|
||||
default='/var/lib/pacman',
|
||||
help='pacman db path, default /var/lib/pacman',
|
||||
)
|
||||
list_parser.add_argument(
|
||||
'--pkg-cache-dir',
|
||||
dest='pkg_cache_dir',
|
||||
default='/var/cache/pacman/pkg',
|
||||
help='local pacman package cache directory, default /var/cache/pacman/pkg',
|
||||
)
|
||||
|
||||
list_options = list_parser.parse_args(args)
|
||||
|
||||
if list_options.generate_hashes and list_options.format != 'compiled':
|
||||
logger.error('--generate-hashes requires --format compiled')
|
||||
return 1
|
||||
|
||||
installed = pacman_t.list_installed_simple(
|
||||
db_path=pathlib.Path(list_options.db_path),
|
||||
)
|
||||
|
||||
pkg_cache_dir = pathlib.Path(list_options.pkg_cache_dir)
|
||||
|
||||
if list_options.format == 'plain':
|
||||
for name, version in installed:
|
||||
print('%s %s' % (name, version))
|
||||
elif list_options.format == 'constraints':
|
||||
for name, version in installed:
|
||||
print('%s>=%s' % (name, version))
|
||||
elif list_options.format == 'compiled':
|
||||
missing_hashes: list[str] = []
|
||||
|
||||
for name, version in installed:
|
||||
line = '%s==%s' % (name, version)
|
||||
|
||||
if list_options.generate_hashes:
|
||||
pkg_file = _find_cached_pkg(pkg_cache_dir, name, version)
|
||||
|
||||
if pkg_file is not None:
|
||||
h = hashlib.sha256()
|
||||
with open(pkg_file, 'rb') as fh:
|
||||
while True:
|
||||
chunk = fh.read(65536)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
line += ' --hash=sha256:%s' % h.hexdigest()
|
||||
else:
|
||||
missing_hashes.append(name)
|
||||
|
||||
print(line)
|
||||
|
||||
if len(missing_hashes) > 0:
|
||||
logger.error(
|
||||
"can't determine checksum of installed package(s) - "
|
||||
'no cached file found for %d package(s): %s'
|
||||
% (len(missing_hashes), missing_hashes)
|
||||
)
|
||||
return 1
|
||||
|
||||
return 0
|
||||
@ -0,0 +1,79 @@
|
||||
"""Main CLI entry point. Dispatches to subcommands."""
|
||||
|
||||
import argparse
|
||||
import enum
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from online.fxreader.pr34.commands_typed import logging as pr34_logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(enum.Enum):
|
||||
list_installed = 'list-installed'
|
||||
compile = 'compile'
|
||||
download = 'download'
|
||||
archive = 'archive'
|
||||
diff = 'diff'
|
||||
|
||||
|
||||
def main(argv: Optional[list[str]] = None) -> int:
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='online-fxreader-pr34-archlinux',
|
||||
description='Arch Linux package management tools',
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-level',
|
||||
dest='log_level',
|
||||
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
|
||||
default='INFO',
|
||||
help='log level (default: INFO)',
|
||||
)
|
||||
parser.add_argument(
|
||||
'command',
|
||||
choices=[o.value for o in Command],
|
||||
)
|
||||
|
||||
options, args = parser.parse_known_args(argv)
|
||||
options.command = Command(options.command)
|
||||
|
||||
pr34_logging.setup(
|
||||
level=getattr(logging, options.log_level),
|
||||
format=pr34_logging.format_t.json,
|
||||
use_console=True,
|
||||
)
|
||||
|
||||
if options.command is Command.list_installed:
|
||||
from . import list_installed
|
||||
|
||||
return list_installed.main(args)
|
||||
elif options.command is Command.compile:
|
||||
from . import compile
|
||||
|
||||
return compile.main(args)
|
||||
elif options.command is Command.download:
|
||||
from . import download
|
||||
|
||||
return download.main(args)
|
||||
elif options.command is Command.archive:
|
||||
from . import archive
|
||||
|
||||
return archive.main(args)
|
||||
elif options.command is Command.diff:
|
||||
from . import diff
|
||||
|
||||
return diff.main(args)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
@ -5,8 +5,8 @@ import logging
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Iterator,
|
||||
Optional,
|
||||
Literal,
|
||||
Any,
|
||||
)
|
||||
|
||||
@ -80,7 +80,7 @@ class vercmp_t:
|
||||
|
||||
|
||||
class constraint_op_t(enum.Enum):
|
||||
eq = '='
|
||||
eq = '=='
|
||||
ge = '>='
|
||||
le = '<='
|
||||
gt = '>'
|
||||
@ -89,7 +89,9 @@ class constraint_op_t(enum.Enum):
|
||||
|
||||
class package_constraint_t:
|
||||
class constants_t:
|
||||
constraint_re: ClassVar[re.Pattern[str]] = re.compile(r'^([a-zA-Z0-9@._+\-]+?)(?:(>=|<=|>|<|=)(.+))?$')
|
||||
constraint_re: ClassVar[re.Pattern[str]] = re.compile(
|
||||
r'^([a-zA-Z0-9@._+\-]+?)(?:(==|>=|<=|>|<)(.+))?$'
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@ -166,134 +168,70 @@ class package_constraint_t:
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class package_desc_t:
|
||||
class package_t:
|
||||
"""General package with parsed constraints. Not pacman-specific."""
|
||||
|
||||
name: str
|
||||
version: str
|
||||
desc: str = ''
|
||||
filename: str = ''
|
||||
csize: int = 0
|
||||
isize: int = 0
|
||||
md5sum: str = ''
|
||||
repo: str = ''
|
||||
sha256sum: str = ''
|
||||
url: str = ''
|
||||
arch: str = ''
|
||||
builddate: int = 0
|
||||
packager: str = ''
|
||||
license: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
depends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
optdepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
makedepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
checkdepends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
provides: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
conflicts: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
replaces: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
groups: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
base: str = ''
|
||||
|
||||
def parsed_depends(self) -> list[package_constraint_t]:
|
||||
return [package_constraint_t.parse(d) for d in self.depends]
|
||||
|
||||
def parsed_provides(self) -> list[package_constraint_t]:
|
||||
return [package_constraint_t.parse(p) for p in self.provides]
|
||||
|
||||
def parsed_conflicts(self) -> list[package_constraint_t]:
|
||||
return [package_constraint_t.parse(c) for c in self.conflicts]
|
||||
depends: list[package_constraint_t] = dataclasses.field(default_factory=list)
|
||||
provides: list[package_constraint_t] = dataclasses.field(default_factory=list)
|
||||
conflicts: list[package_constraint_t] = dataclasses.field(default_factory=list)
|
||||
groups: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class repo_config_t:
|
||||
class package_index_t:
|
||||
"""General package index with parsed constraints.
|
||||
|
||||
packages is a 2-level map: name -> version -> package_t. Multiple
|
||||
versions of the same name are first-class.
|
||||
"""
|
||||
|
||||
name: str
|
||||
url: str
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class mirror_config_t:
|
||||
repos: list[repo_config_t] = dataclasses.field(default_factory=lambda: list[repo_config_t]())
|
||||
arch: str = 'x86_64'
|
||||
|
||||
@staticmethod
|
||||
def from_archive_date(
|
||||
date: str,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> 'mirror_config_t':
|
||||
if repos is None:
|
||||
repos = ['core', 'extra', 'multilib']
|
||||
|
||||
base_url = 'https://archive.archlinux.org/repos/%s' % date
|
||||
|
||||
return mirror_config_t(
|
||||
repos=[
|
||||
repo_config_t(
|
||||
name=r,
|
||||
url='%s/%s/os/%s' % (base_url, r, arch),
|
||||
packages: dict[str, dict[str, package_t]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, dict[str, package_t]]()
|
||||
)
|
||||
for r in repos
|
||||
],
|
||||
arch=arch,
|
||||
provides_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
groups_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_mirror_url(
|
||||
mirror_url: str,
|
||||
repos: Optional[list[str]] = None,
|
||||
arch: str = 'x86_64',
|
||||
) -> 'mirror_config_t':
|
||||
if repos is None:
|
||||
repos = ['core', 'extra', 'multilib']
|
||||
def add(self, pkg: package_t) -> None:
|
||||
self.packages.setdefault(pkg.name, {})[pkg.version] = pkg
|
||||
|
||||
return mirror_config_t(
|
||||
repos=[
|
||||
repo_config_t(
|
||||
name=r,
|
||||
url='%s/%s/os/%s' % (mirror_url.rstrip('/'), r, arch),
|
||||
)
|
||||
for r in repos
|
||||
],
|
||||
arch=arch,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class repo_index_t:
|
||||
name: str
|
||||
packages: dict[str, package_desc_t] = dataclasses.field(default_factory=lambda: dict[str, package_desc_t]())
|
||||
provides_index: dict[str, list[str]] = dataclasses.field(default_factory=lambda: dict[str, list[str]]())
|
||||
groups_index: dict[str, list[str]] = dataclasses.field(default_factory=lambda: dict[str, list[str]]())
|
||||
def iter_all(self) -> 'Iterator[package_t]':
|
||||
for version_map in self.packages.values():
|
||||
for pkg in version_map.values():
|
||||
yield pkg
|
||||
|
||||
def build_provides_index(self) -> None:
|
||||
self.provides_index = {}
|
||||
self.groups_index = {}
|
||||
for pkg_name, pkg in self.packages.items():
|
||||
for pkg in self.iter_all():
|
||||
for prov in pkg.provides:
|
||||
prov_constraint = package_constraint_t.parse(prov)
|
||||
if prov_constraint.name not in self.provides_index:
|
||||
self.provides_index[prov_constraint.name] = []
|
||||
self.provides_index[prov_constraint.name].append(pkg_name)
|
||||
if prov.name not in self.provides_index:
|
||||
self.provides_index[prov.name] = []
|
||||
self.provides_index[prov.name].append(pkg.name)
|
||||
for group in pkg.groups:
|
||||
if group not in self.groups_index:
|
||||
self.groups_index[group] = []
|
||||
self.groups_index[group].append(pkg_name)
|
||||
|
||||
|
||||
class resolution_strategy_t(enum.Enum):
|
||||
upgrade_all = 'upgrade-all'
|
||||
pin_referenced = 'pin-referenced'
|
||||
self.groups_index[group].append(pkg.name)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class compile_options_t:
|
||||
packages: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
index_url: Optional[str] = None
|
||||
archive_date: Optional[str] = None
|
||||
offline: bool = False
|
||||
no_cache: bool = False
|
||||
generate_hashes: bool = False
|
||||
repos: list[str] = dataclasses.field(default_factory=lambda: ['core', 'extra', 'multilib'])
|
||||
arch: str = 'x86_64'
|
||||
cache_dir: Optional[str] = None
|
||||
reference: Optional[str] = None
|
||||
resolution_strategy: resolution_strategy_t = resolution_strategy_t.upgrade_all
|
||||
class resolve_result_t:
|
||||
"""Result of dependency resolution."""
|
||||
|
||||
resolved: dict[str, package_t] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, package_t]()
|
||||
)
|
||||
resolution_order: list[str] = dataclasses.field(default_factory=list)
|
||||
problems: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
@ -304,13 +242,18 @@ class compile_entry_t:
|
||||
repo: str
|
||||
url: str
|
||||
sha256: str = ''
|
||||
depends: list[str] = dataclasses.field(default_factory=lambda: list[str]())
|
||||
pinned: bool = False
|
||||
depends: list[package_constraint_t] = dataclasses.field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
|
||||
class compile_result_t:
|
||||
@dataclasses.dataclass
|
||||
class res_t:
|
||||
entries: list[compile_entry_t] = dataclasses.field(default_factory=lambda: list[compile_entry_t]())
|
||||
entries: list[compile_entry_t] = dataclasses.field(
|
||||
default_factory=lambda: list[compile_entry_t]()
|
||||
)
|
||||
txt: str = ''
|
||||
|
||||
def to_txt(self) -> str:
|
||||
@ -319,6 +262,9 @@ class compile_result_t:
|
||||
line = '%s==%s' % (e.name, e.version)
|
||||
if e.sha256:
|
||||
line += ' --hash=sha256:%s' % e.sha256
|
||||
lines.append('# %s' % e.url if e.url else '# %s/%s' % (e.repo, e.filename))
|
||||
if e.pinned:
|
||||
line += ' # pinned'
|
||||
comment = '# %s' % e.url if e.url else '# %s/%s' % (e.repo, e.filename)
|
||||
lines.append(comment)
|
||||
lines.append(line)
|
||||
return '\n'.join(lines)
|
||||
|
||||
@ -0,0 +1,158 @@
|
||||
import dataclasses
|
||||
import logging
|
||||
|
||||
from typing import (
|
||||
Optional,
|
||||
)
|
||||
|
||||
from ..models import (
|
||||
package_t,
|
||||
package_constraint_t,
|
||||
package_index_t,
|
||||
resolve_result_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class resolver_t:
|
||||
class error_t:
|
||||
class not_found_t(Exception):
|
||||
def __init__(self, name: str) -> None:
|
||||
self.name = name
|
||||
super().__init__('package not found: %s' % name)
|
||||
|
||||
class conflict_t(Exception):
|
||||
def __init__(self, pkg_a: str, pkg_b: str, constraint: str) -> None:
|
||||
self.pkg_a = pkg_a
|
||||
self.pkg_b = pkg_b
|
||||
self.constraint = constraint
|
||||
super().__init__(
|
||||
'conflict: %s conflicts with %s (%s)' % (pkg_a, pkg_b, constraint)
|
||||
)
|
||||
|
||||
class unsatisfied_t(Exception):
|
||||
def __init__(self, parent: str, dep: str) -> None:
|
||||
self.parent = parent
|
||||
self.dep = dep
|
||||
super().__init__(
|
||||
'unsatisfied dependency: %s requires %s' % (parent, dep)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _find_provider(
|
||||
constraint: package_constraint_t,
|
||||
indices: list[package_index_t],
|
||||
) -> Optional[tuple[package_t, str]]:
|
||||
for index in indices:
|
||||
if constraint.name in index.packages:
|
||||
for pkg in index.packages[constraint.name].values():
|
||||
if constraint.satisfied_by(pkg.version):
|
||||
return (pkg, index.name)
|
||||
|
||||
for index in indices:
|
||||
if constraint.name in index.provides_index:
|
||||
for provider_name in index.provides_index[constraint.name]:
|
||||
for pkg in index.packages[provider_name].values():
|
||||
for prov in pkg.provides:
|
||||
if prov.name == constraint.name:
|
||||
if constraint.version is None or prov.version is None:
|
||||
return (pkg, index.name)
|
||||
if constraint.satisfied_by(prov.version):
|
||||
return (pkg, index.name)
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def resolve(
|
||||
packages: list[str],
|
||||
indices: list[package_index_t],
|
||||
skip_installed: Optional[set[str]] = None,
|
||||
) -> resolve_result_t:
|
||||
if skip_installed is None:
|
||||
skip_installed = set()
|
||||
|
||||
result = resolve_result_t()
|
||||
visited: set[str] = set()
|
||||
stack: list[tuple[package_constraint_t, Optional[str]]] = []
|
||||
|
||||
for pkg_str in packages:
|
||||
constraint = package_constraint_t.parse(pkg_str)
|
||||
stack.append((constraint, None))
|
||||
|
||||
while len(stack) > 0:
|
||||
constraint, parent = stack.pop()
|
||||
|
||||
if constraint.name in visited:
|
||||
if constraint.name in result.resolved:
|
||||
pkg = result.resolved[constraint.name]
|
||||
if not constraint.satisfied_by(pkg.version):
|
||||
result.problems.append(
|
||||
'unsatisfied: %s requires %s'
|
||||
% (parent or '<root>', constraint.to_str())
|
||||
)
|
||||
continue
|
||||
|
||||
if constraint.name in skip_installed:
|
||||
visited.add(constraint.name)
|
||||
continue
|
||||
|
||||
found = resolver_t._find_provider(constraint, indices)
|
||||
|
||||
if found is None:
|
||||
result.problems.append(
|
||||
'nothing provides %s%s'
|
||||
% (
|
||||
constraint.to_str(),
|
||||
' needed by %s' % parent if parent else '',
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
pkg, repo_name = found
|
||||
|
||||
if pkg.name in visited:
|
||||
if pkg.name in result.resolved and constraint.op is not None:
|
||||
resolved_pkg = result.resolved[pkg.name]
|
||||
if constraint.name == resolved_pkg.name:
|
||||
if not constraint.satisfied_by(resolved_pkg.version):
|
||||
result.problems.append(
|
||||
'unsatisfied: %s requires %s'
|
||||
% (parent or '<root>', constraint.to_str())
|
||||
)
|
||||
else:
|
||||
matched = False
|
||||
for prov in resolved_pkg.provides:
|
||||
if prov.name == constraint.name:
|
||||
if prov.version is None or constraint.satisfied_by(
|
||||
prov.version
|
||||
):
|
||||
matched = True
|
||||
break
|
||||
if not matched:
|
||||
result.problems.append(
|
||||
'unsatisfied: %s requires %s'
|
||||
% (parent or '<root>', constraint.to_str())
|
||||
)
|
||||
continue
|
||||
|
||||
visited.add(pkg.name)
|
||||
visited.add(constraint.name)
|
||||
|
||||
result.resolved[pkg.name] = pkg
|
||||
result.resolution_order.append(pkg.name)
|
||||
|
||||
for conflict in pkg.conflicts:
|
||||
if conflict.name in result.resolved:
|
||||
resolved_version = result.resolved[conflict.name].version
|
||||
if conflict.satisfied_by(resolved_version):
|
||||
result.problems.append(
|
||||
'conflict: %s conflicts with %s (%s)'
|
||||
% (pkg.name, conflict.name, conflict.to_str())
|
||||
)
|
||||
|
||||
for dep in pkg.depends:
|
||||
if dep.name not in visited and dep.name not in skip_installed:
|
||||
stack.append((dep, pkg.name))
|
||||
|
||||
return result
|
||||
@ -0,0 +1,422 @@
|
||||
"""Libsolv-based dependency resolver.
|
||||
|
||||
Uses solv_types.py internally for raw pacman-format deps.
|
||||
Exposes resolve() taking general types from models.py.
|
||||
No imports from apps/pacman or apps/cache.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Optional,
|
||||
Any,
|
||||
)
|
||||
|
||||
from ..models import (
|
||||
package_t,
|
||||
package_index_t,
|
||||
package_constraint_t,
|
||||
resolve_result_t,
|
||||
)
|
||||
|
||||
from .solv_types import (
|
||||
solv_package_t,
|
||||
solv_index_t,
|
||||
repo_store_t,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class solv_pool_t:
|
||||
class constants_t:
|
||||
# accepts both == (general) and = (pacman) since solv deps use =
|
||||
dep_re: ClassVar[re.Pattern[str]] = re.compile(
|
||||
r'^([a-zA-Z0-9@._+\-]+?)(?:(==|>=|<=|>|<|=)(.+))?$'
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
stores: Optional[list[repo_store_t]] = None,
|
||||
cache_dir: Optional[pathlib.Path] = None,
|
||||
) -> None:
|
||||
import solv
|
||||
|
||||
self._solv = solv
|
||||
self._pool = solv.Pool()
|
||||
self._pool.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
self._pool.setarch('x86_64')
|
||||
self._rel_map = {
|
||||
'==': solv.REL_EQ,
|
||||
'=': solv.REL_EQ,
|
||||
'>=': solv.REL_GT | solv.REL_EQ,
|
||||
'<=': solv.REL_LT | solv.REL_EQ,
|
||||
'>': solv.REL_GT,
|
||||
'<': solv.REL_LT,
|
||||
}
|
||||
self._stores: list[repo_store_t] = []
|
||||
|
||||
if stores is not None:
|
||||
for store in stores:
|
||||
self.add_store(store, cache_dir=cache_dir)
|
||||
self.finalize()
|
||||
|
||||
def _parse_dep(self, dep_str: str) -> Any:
|
||||
m = solv_pool_t.constants_t.dep_re.match(dep_str.strip())
|
||||
if not m:
|
||||
return self._pool.str2id(dep_str)
|
||||
|
||||
name = m.group(1)
|
||||
op = m.group(2)
|
||||
ver = m.group(3)
|
||||
|
||||
name_id = self._pool.str2id(name)
|
||||
|
||||
if op and ver:
|
||||
ver_id = self._pool.str2id(ver)
|
||||
return self._pool.rel2id(name_id, ver_id, self._rel_map[op])
|
||||
|
||||
return name_id
|
||||
|
||||
def add_store(
|
||||
self,
|
||||
store: repo_store_t,
|
||||
cache_dir: Optional[pathlib.Path] = None,
|
||||
) -> None:
|
||||
solv = self._solv
|
||||
|
||||
self._stores.append(store)
|
||||
|
||||
loaded_from_cache = False
|
||||
|
||||
if cache_dir is not None:
|
||||
solv_cache_path = cache_dir / ('%s.solv' % store.index.name)
|
||||
checksum_path = cache_dir / ('%s.solv.sha256' % store.index.name)
|
||||
|
||||
if solv_cache_path.exists() and checksum_path.exists():
|
||||
stored_checksum = checksum_path.read_text().strip()
|
||||
if stored_checksum == store.db_checksum:
|
||||
repo = self._pool.add_repo(store.index.name)
|
||||
f = solv.xfopen(str(solv_cache_path))
|
||||
repo.add_solv(f)
|
||||
f.close()
|
||||
loaded_from_cache = True
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
repo=store.index.name,
|
||||
msg='loaded solv from cache',
|
||||
solvables=repo.nsolvables,
|
||||
)
|
||||
)
|
||||
|
||||
if not loaded_from_cache:
|
||||
repo = self._pool.add_repo(store.index.name)
|
||||
for pkg in store.index.iter_all():
|
||||
s = repo.add_solvable()
|
||||
s.name = pkg.name
|
||||
s.evr = pkg.version
|
||||
s.arch = 'noarch' if pkg.arch == 'any' else (pkg.arch or 'x86_64')
|
||||
|
||||
for dep_str in pkg.depends:
|
||||
s.add_requires(self._parse_dep(dep_str))
|
||||
|
||||
for prov_str in pkg.provides:
|
||||
s.add_provides(self._parse_dep(prov_str))
|
||||
|
||||
s.add_provides(
|
||||
self._pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
||||
)
|
||||
|
||||
for conf_str in pkg.conflicts:
|
||||
s.add_conflicts(self._parse_dep(conf_str))
|
||||
|
||||
repo.internalize()
|
||||
|
||||
if cache_dir is not None:
|
||||
store.write_solv_cache(cache_dir, repo)
|
||||
|
||||
def add_pinned(self, pinned: dict[str, str], upgrade_set: Optional[set[str]] = None) -> None:
|
||||
"""Add synthetic solvables for pinned versions not already in a repo."""
|
||||
solv = self._solv
|
||||
|
||||
pinned_repo = self._pool.add_repo('@pinned')
|
||||
added = False
|
||||
|
||||
for pkg_name, pkg_ver in pinned.items():
|
||||
if upgrade_set is not None and pkg_name in upgrade_set:
|
||||
continue
|
||||
|
||||
# check if this exact version exists in any repo
|
||||
found_exact = False
|
||||
for store in self._stores:
|
||||
version_map = store.index.packages.get(pkg_name)
|
||||
if version_map is not None and pkg_ver in version_map:
|
||||
found_exact = True
|
||||
break
|
||||
if not found_exact:
|
||||
s = pinned_repo.add_solvable()
|
||||
s.name = pkg_name
|
||||
s.evr = pkg_ver
|
||||
s.arch = 'x86_64'
|
||||
s.add_provides(
|
||||
self._pool.rel2id(s.nameid, s.evrid, solv.REL_EQ)
|
||||
)
|
||||
added = True
|
||||
|
||||
if added:
|
||||
pinned_repo.internalize()
|
||||
|
||||
def finalize(self) -> None:
|
||||
self._pool.createwhatprovides()
|
||||
|
||||
class resolve_t:
|
||||
class res_t:
|
||||
def __init__(self) -> None:
|
||||
self.resolved: dict[str, Any] = {}
|
||||
self.problems: list[str] = []
|
||||
|
||||
def expand_groups(
|
||||
self,
|
||||
packages: list[str],
|
||||
) -> list[str]:
|
||||
expanded: list[str] = []
|
||||
for pkg_name in packages:
|
||||
found_group = False
|
||||
for store in self._stores:
|
||||
if pkg_name in store.index.groups_index:
|
||||
expanded.extend(store.index.groups_index[pkg_name])
|
||||
found_group = True
|
||||
break
|
||||
if not found_group:
|
||||
expanded.append(pkg_name)
|
||||
return expanded
|
||||
|
||||
@staticmethod
|
||||
def parse_reference(txt: str) -> dict[str, str]:
|
||||
pinned: dict[str, str] = {}
|
||||
for line in txt.splitlines():
|
||||
line = line.strip()
|
||||
if line == '' or line.startswith('#'):
|
||||
continue
|
||||
parts = line.split()
|
||||
pkg_spec = parts[0]
|
||||
if '==' in pkg_spec:
|
||||
name, version = pkg_spec.split('==', 1)
|
||||
pinned[name] = version
|
||||
return pinned
|
||||
|
||||
def resolve(
|
||||
self,
|
||||
packages: list[str],
|
||||
expand_groups: bool = False,
|
||||
pinned: Optional[dict[str, str]] = None,
|
||||
upgrade_packages: Optional[list[str]] = None,
|
||||
) -> 'solv_pool_t.resolve_t.res_t':
|
||||
solv = self._solv
|
||||
|
||||
if expand_groups:
|
||||
packages = self.expand_groups(packages)
|
||||
|
||||
result = solv_pool_t.resolve_t.res_t()
|
||||
|
||||
solver = self._pool.Solver()
|
||||
jobs: list[Any] = []
|
||||
|
||||
upgrade_set: set[str] = set()
|
||||
if upgrade_packages is not None:
|
||||
if expand_groups:
|
||||
upgrade_packages = self.expand_groups(upgrade_packages)
|
||||
upgrade_set = set(upgrade_packages)
|
||||
|
||||
for pkg_spec in packages:
|
||||
pkg_name = (
|
||||
pkg_spec.split('>=')[0]
|
||||
.split('<=')[0]
|
||||
.split('>')[0]
|
||||
.split('<')[0]
|
||||
.split('==')[0]
|
||||
.split('=')[0]
|
||||
)
|
||||
|
||||
target_evr: Optional[str] = None
|
||||
if (
|
||||
pinned is not None
|
||||
and pkg_name in pinned
|
||||
and pkg_name not in upgrade_set
|
||||
):
|
||||
target_evr = pinned[pkg_name]
|
||||
elif pkg_name != pkg_spec and '==' in pkg_spec:
|
||||
target_evr = pkg_spec.split('==', 1)[1]
|
||||
|
||||
sel = self._pool.select(
|
||||
pkg_name,
|
||||
solv.Selection.SELECTION_NAME,
|
||||
)
|
||||
if sel.isempty():
|
||||
result.problems.append('package not found: %s' % pkg_spec)
|
||||
continue
|
||||
|
||||
if target_evr is not None:
|
||||
# match by exact name + version, never via provides hijack
|
||||
matching = [s for s in sel.solvables() if s.evr == target_evr]
|
||||
if len(matching) == 0:
|
||||
result.problems.append(
|
||||
'no name match for %s==%s' % (pkg_name, target_evr)
|
||||
)
|
||||
continue
|
||||
# pick exactly one solvable — multiple snapshots may yield
|
||||
# duplicates of the same pkg which libsolv considers conflicting
|
||||
jobs.append(
|
||||
self._pool.Job(
|
||||
solv.Job.SOLVER_INSTALL | solv.Job.SOLVER_SOLVABLE,
|
||||
matching[0].id,
|
||||
)
|
||||
)
|
||||
else:
|
||||
jobs += sel.jobs(solv.Job.SOLVER_INSTALL)
|
||||
|
||||
if len(result.problems) > 0:
|
||||
return result
|
||||
|
||||
problems = solver.solve(jobs)
|
||||
|
||||
if problems:
|
||||
for p in problems:
|
||||
result.problems.append(str(p))
|
||||
return result
|
||||
|
||||
trans = solver.transaction()
|
||||
new_solvables = list(trans.newsolvables())
|
||||
for s in new_solvables:
|
||||
result.resolved[s.name] = s
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
self._log_resolution_origins(new_solvables, packages)
|
||||
|
||||
return result
|
||||
|
||||
def _log_resolution_origins(
|
||||
self,
|
||||
new_solvables: list[Any],
|
||||
packages: list[str],
|
||||
) -> None:
|
||||
requested_names: set[str] = set()
|
||||
for pkg_spec in packages:
|
||||
pn = (
|
||||
pkg_spec.split('>=')[0]
|
||||
.split('<=')[0]
|
||||
.split('>')[0]
|
||||
.split('<')[0]
|
||||
.split('==')[0]
|
||||
.split('=')[0]
|
||||
)
|
||||
requested_names.add(pn)
|
||||
|
||||
resolved_names = {s.name for s in new_solvables}
|
||||
|
||||
# name + provides token -> set of resolved pkg names that satisfy it
|
||||
provides_to_pkgs: dict[str, set[str]] = {}
|
||||
deps_of: dict[str, list[str]] = {}
|
||||
|
||||
for store in self._stores:
|
||||
for pkg in store.index.iter_all():
|
||||
if pkg.name not in resolved_names:
|
||||
continue
|
||||
provides_to_pkgs.setdefault(pkg.name, set()).add(pkg.name)
|
||||
for prov in pkg.provides:
|
||||
key = prov.split('=')[0].split('>')[0].split('<')[0]
|
||||
provides_to_pkgs.setdefault(key, set()).add(pkg.name)
|
||||
deps_of[pkg.name] = list(pkg.depends)
|
||||
|
||||
for s in new_solvables:
|
||||
if s.name in requested_names:
|
||||
logger.debug(
|
||||
dict(
|
||||
pkg=s.name,
|
||||
evr=str(s.evr),
|
||||
origin='REQUESTED',
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
parent: Optional[str] = None
|
||||
for cand_parent, dlist in deps_of.items():
|
||||
if cand_parent == s.name:
|
||||
continue
|
||||
for d in dlist:
|
||||
key = d.split('=')[0].split('>')[0].split('<')[0]
|
||||
if s.name in provides_to_pkgs.get(key, set()):
|
||||
parent = cand_parent
|
||||
break
|
||||
if parent is not None:
|
||||
break
|
||||
|
||||
logger.debug(
|
||||
dict(
|
||||
pkg=s.name,
|
||||
evr=str(s.evr),
|
||||
origin='PULLED_BY',
|
||||
parent=parent or '<unknown>',
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def resolve(
|
||||
indices: list[package_index_t],
|
||||
packages: list[str],
|
||||
pinned: Optional[dict[str, str]] = None,
|
||||
upgrade_packages: Optional[list[str]] = None,
|
||||
) -> resolve_result_t:
|
||||
"""Resolve using libsolv. Takes general types, returns general types.
|
||||
|
||||
Converts package_index_t → solv_index_t internally.
|
||||
"""
|
||||
# convert general → solv internal types
|
||||
stores: list[repo_store_t] = []
|
||||
for idx in indices:
|
||||
solv_idx = solv_index_t(name=idx.name)
|
||||
for pkg in idx.iter_all():
|
||||
solv_idx.add(
|
||||
solv_package_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
sha256sum=pkg.sha256sum,
|
||||
depends=[d.to_str().replace('==', '=') for d in pkg.depends],
|
||||
provides=[p.to_str().replace('==', '=') for p in pkg.provides],
|
||||
conflicts=[c.to_str().replace('==', '=') for c in pkg.conflicts],
|
||||
groups=pkg.groups,
|
||||
)
|
||||
)
|
||||
solv_idx.build_provides_index()
|
||||
stores.append(repo_store_t(index=solv_idx))
|
||||
|
||||
pool = solv_pool_t(stores=stores)
|
||||
|
||||
solv_result = pool.resolve(
|
||||
packages=packages,
|
||||
pinned=pinned,
|
||||
upgrade_packages=upgrade_packages,
|
||||
)
|
||||
|
||||
# convert solv result → general resolve_result_t
|
||||
result = resolve_result_t()
|
||||
result.problems = list(solv_result.problems)
|
||||
|
||||
for pkg_name, solvable in solv_result.resolved.items():
|
||||
# find the general package_t matching this solvable by name+version
|
||||
for idx in indices:
|
||||
version_map = idx.packages.get(pkg_name)
|
||||
if version_map is None:
|
||||
continue
|
||||
candidate = version_map.get(str(solvable.evr))
|
||||
if candidate is not None:
|
||||
result.resolved[pkg_name] = candidate
|
||||
result.resolution_order.append(pkg_name)
|
||||
break
|
||||
|
||||
return result
|
||||
@ -0,0 +1,130 @@
|
||||
"""Types internal to the libsolv resolver.
|
||||
|
||||
These use raw dependency strings (pacman = format) because libsolv
|
||||
operates on pacman-format constraints internally.
|
||||
"""
|
||||
|
||||
import dataclasses
|
||||
import hashlib
|
||||
import io
|
||||
import logging
|
||||
import pathlib
|
||||
import re
|
||||
|
||||
from typing import (
|
||||
ClassVar,
|
||||
Iterator,
|
||||
Optional,
|
||||
Any,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class solv_package_t:
|
||||
"""Package as understood by the solv resolver. Raw dep strings."""
|
||||
|
||||
name: str
|
||||
version: str
|
||||
filename: str = ''
|
||||
sha256sum: str = ''
|
||||
arch: str = ''
|
||||
depends: list[str] = dataclasses.field(default_factory=list)
|
||||
provides: list[str] = dataclasses.field(default_factory=list)
|
||||
conflicts: list[str] = dataclasses.field(default_factory=list)
|
||||
groups: list[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class solv_index_t:
|
||||
"""Package index for the solv resolver. Raw dep strings.
|
||||
|
||||
packages is a 2-level map: name -> version -> solv_package_t.
|
||||
"""
|
||||
|
||||
name: str
|
||||
packages: dict[str, dict[str, solv_package_t]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, dict[str, solv_package_t]]()
|
||||
)
|
||||
provides_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
groups_index: dict[str, list[str]] = dataclasses.field(
|
||||
default_factory=lambda: dict[str, list[str]]()
|
||||
)
|
||||
|
||||
def add(self, pkg: solv_package_t) -> None:
|
||||
self.packages.setdefault(pkg.name, {})[pkg.version] = pkg
|
||||
|
||||
def iter_all(self) -> 'Iterator[solv_package_t]':
|
||||
for version_map in self.packages.values():
|
||||
for pkg in version_map.values():
|
||||
yield pkg
|
||||
|
||||
def build_provides_index(self) -> None:
|
||||
self.provides_index = {}
|
||||
self.groups_index = {}
|
||||
dep_re = re.compile(r'^([a-zA-Z0-9@._+\-]+?)(?:(?:>=|<=|>|<|=).+)?$')
|
||||
|
||||
for pkg in self.iter_all():
|
||||
for prov in pkg.provides:
|
||||
m = dep_re.match(prov)
|
||||
prov_name = m.group(1) if m else prov
|
||||
if prov_name not in self.provides_index:
|
||||
self.provides_index[prov_name] = []
|
||||
self.provides_index[prov_name].append(pkg.name)
|
||||
for group in pkg.groups:
|
||||
if group not in self.groups_index:
|
||||
self.groups_index[group] = []
|
||||
self.groups_index[group].append(pkg.name)
|
||||
|
||||
|
||||
class repo_store_t:
|
||||
class constants_t:
|
||||
checksum_filename: ClassVar[str] = 'checksum.sha256'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
index: solv_index_t,
|
||||
db_checksum: str = '',
|
||||
) -> None:
|
||||
self.index = index
|
||||
self.db_checksum = db_checksum
|
||||
|
||||
@staticmethod
|
||||
def _file_checksum(path: pathlib.Path) -> str:
|
||||
h = hashlib.sha256()
|
||||
with io.open(path, 'rb') as f:
|
||||
while True:
|
||||
chunk = f.read(65536)
|
||||
if not chunk:
|
||||
break
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
def write_solv_cache(
|
||||
self,
|
||||
cache_dir: pathlib.Path,
|
||||
solv_repo: Any,
|
||||
) -> None:
|
||||
import solv
|
||||
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
solv_cache_path = cache_dir / ('%s.solv' % self.index.name)
|
||||
checksum_path = cache_dir / ('%s.solv.sha256' % self.index.name)
|
||||
|
||||
f = solv.xfopen(str(solv_cache_path), 'w')
|
||||
solv_repo.write(f)
|
||||
f.close()
|
||||
|
||||
checksum_path.write_text(self.db_checksum)
|
||||
|
||||
logger.info(
|
||||
dict(
|
||||
repo=self.index.name,
|
||||
msg='wrote solv cache',
|
||||
path=str(solv_cache_path),
|
||||
size=solv_cache_path.stat().st_size,
|
||||
)
|
||||
)
|
||||
@ -0,0 +1,15 @@
|
||||
"""Re-export general types from models for convenience."""
|
||||
|
||||
from ..models import (
|
||||
package_t,
|
||||
package_index_t,
|
||||
resolve_result_t,
|
||||
package_constraint_t,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'package_t',
|
||||
'package_index_t',
|
||||
'resolve_result_t',
|
||||
'package_constraint_t',
|
||||
]
|
||||
0
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/__init__.py
Normal file
0
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/__init__.py
Normal file
105
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/bench_writer.py
Normal file
105
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/bench_writer.py
Normal file
@ -0,0 +1,105 @@
|
||||
"""Benchmark: write .solv files with 16K, 64K, 128K synthetic packages."""
|
||||
|
||||
import io
|
||||
import pathlib
|
||||
import time
|
||||
|
||||
from .solv_writer import (
|
||||
provide_input_t,
|
||||
solvable_input_t,
|
||||
write_solv,
|
||||
)
|
||||
|
||||
_HERE = pathlib.Path(__file__).resolve().parent
|
||||
_PROJECT_ROOT = _HERE.parents[8]
|
||||
OUT_DIR = _PROJECT_ROOT / 'tmp' / 'experiments' / 'libsolv'
|
||||
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def build_array(n: int) -> tuple[type, float]:
|
||||
t0 = time.monotonic()
|
||||
|
||||
arr_type = solvable_input_t * n
|
||||
arr = arr_type()
|
||||
|
||||
# keep byte string refs alive
|
||||
strings: list[bytes] = []
|
||||
|
||||
for i in range(n):
|
||||
s = arr[i]
|
||||
name = b'pkg-%d' % i
|
||||
evr = b'%d.%d-%d' % (i // 1000, i % 1000, 1)
|
||||
arch = b'x86_64'
|
||||
strings.extend([name, evr, arch])
|
||||
|
||||
s.name = name
|
||||
s.evr = evr
|
||||
s.arch = arch
|
||||
|
||||
# 1 self-provide
|
||||
prov_name = name
|
||||
prov_evr = evr
|
||||
s.n_provides = 1
|
||||
s.provides[0].name = prov_name
|
||||
s.provides[0].evr = prov_evr
|
||||
|
||||
# 0-4 requires (depends on i)
|
||||
n_req = i % 5
|
||||
s.n_requires = n_req
|
||||
for j in range(n_req):
|
||||
dep_idx = (i + j + 1) % n
|
||||
dep_name = b'pkg-%d' % dep_idx
|
||||
strings.append(dep_name)
|
||||
s.requires[j] = dep_name
|
||||
|
||||
# 0-1 conflicts
|
||||
if i % 10 == 0:
|
||||
conf_name = b'pkg-%d' % ((i + n // 2) % n)
|
||||
strings.append(conf_name)
|
||||
s.n_conflicts = 1
|
||||
s.conflicts[0] = conf_name
|
||||
else:
|
||||
s.n_conflicts = 0
|
||||
|
||||
t_build = time.monotonic() - t0
|
||||
return arr, t_build, strings # type: ignore[return-value]
|
||||
|
||||
|
||||
def bench_write(n: int) -> None:
|
||||
arr, t_build, strings = build_array(n)
|
||||
|
||||
out_path = OUT_DIR / ('bench_%dk.solv' % (n // 1000))
|
||||
|
||||
t0 = time.monotonic()
|
||||
with open(out_path, 'wb') as fp:
|
||||
write_solv(arr, n, fp)
|
||||
t_write = time.monotonic() - t0
|
||||
|
||||
size = out_path.stat().st_size
|
||||
|
||||
# verify it loads
|
||||
import solv
|
||||
|
||||
t1 = time.monotonic()
|
||||
pool = solv.Pool()
|
||||
pool.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
pool.setarch('x86_64')
|
||||
repo = pool.add_repo('bench')
|
||||
f = solv.xfopen(str(out_path))
|
||||
ret = repo.add_solv(f)
|
||||
f.close()
|
||||
t_load = time.monotonic() - t1
|
||||
|
||||
print(
|
||||
'n=%6d build=%.3fs write=%.3fs load=%.3fs size=%7d bytes (%5.1f MiB) ok=%s solvables=%d'
|
||||
% (n, t_build, t_write, t_load, size, size / 1048576, ret, repo.nsolvables)
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
for n in [1000, 4000, 16000, 64000, 128000]:
|
||||
bench_write(n)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
122
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/gen_reference.py
Normal file
122
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/gen_reference.py
Normal file
@ -0,0 +1,122 @@
|
||||
"""Generate a reference .solv file using libsolv Python API, then hex-dump it."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import solv
|
||||
|
||||
# project root: where Makefile lives
|
||||
_HERE = pathlib.Path(__file__).resolve().parent
|
||||
_PROJECT_ROOT = _HERE.parents[8] # archlinux/tests/experiments/libsolv -> ... -> /app
|
||||
OUT_DIR = _PROJECT_ROOT / 'tmp' / 'experiments' / 'libsolv'
|
||||
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
pool = solv.Pool()
|
||||
pool.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
pool.setarch('x86_64')
|
||||
|
||||
repo = pool.add_repo('test')
|
||||
|
||||
pkgs = [
|
||||
{
|
||||
'name': 'bash',
|
||||
'evr': '5.2.26-2',
|
||||
'arch': 'x86_64',
|
||||
'requires': ['glibc', 'readline'],
|
||||
'provides': [('bash', '5.2.26-2')],
|
||||
'conflicts': [],
|
||||
},
|
||||
{
|
||||
'name': 'readline',
|
||||
'evr': '8.2-1',
|
||||
'arch': 'x86_64',
|
||||
'requires': ['glibc', 'ncurses'],
|
||||
'provides': [('readline', '8.2-1'), ('libreadline', None)],
|
||||
'conflicts': [],
|
||||
},
|
||||
{
|
||||
'name': 'glibc',
|
||||
'evr': '2.39-1',
|
||||
'arch': 'x86_64',
|
||||
'requires': ['filesystem'],
|
||||
'provides': [('glibc', '2.39-1')],
|
||||
'conflicts': [],
|
||||
},
|
||||
{
|
||||
'name': 'ncurses',
|
||||
'evr': '6.4-1',
|
||||
'arch': 'x86_64',
|
||||
'requires': ['glibc'],
|
||||
'provides': [('ncurses', '6.4-1')],
|
||||
'conflicts': [],
|
||||
},
|
||||
{
|
||||
'name': 'filesystem',
|
||||
'evr': '2024.01-1',
|
||||
'arch': 'any',
|
||||
'requires': [],
|
||||
'provides': [('filesystem', '2024.01-1')],
|
||||
'conflicts': [],
|
||||
},
|
||||
]
|
||||
|
||||
for p in pkgs:
|
||||
s = repo.add_solvable()
|
||||
s.name = p['name']
|
||||
s.evr = p['evr']
|
||||
s.arch = p['arch']
|
||||
for dep in p['requires']:
|
||||
s.add_deparray(solv.SOLVABLE_REQUIRES, pool.Dep(dep))
|
||||
for prov_name, prov_ver in p['provides']:
|
||||
if prov_ver is not None:
|
||||
dep = pool.rel2id(
|
||||
pool.str2id(prov_name),
|
||||
pool.str2id(prov_ver),
|
||||
solv.REL_EQ,
|
||||
)
|
||||
else:
|
||||
dep = pool.str2id(prov_name)
|
||||
s.add_deparray(solv.SOLVABLE_PROVIDES, dep)
|
||||
for conf in p['conflicts']:
|
||||
s.add_deparray(solv.SOLVABLE_CONFLICTS, pool.Dep(conf))
|
||||
|
||||
repo.internalize()
|
||||
|
||||
solv_path = OUT_DIR / 'reference.solv'
|
||||
f = solv.xfopen(str(solv_path), 'w')
|
||||
repo.write(f)
|
||||
f.close()
|
||||
|
||||
print('wrote %s (%d bytes)' % (solv_path, os.path.getsize(solv_path)))
|
||||
|
||||
# Verify round-trip
|
||||
pool2 = solv.Pool()
|
||||
pool2.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
pool2.setarch('x86_64')
|
||||
repo2 = pool2.add_repo('verify')
|
||||
f2 = solv.xfopen(str(solv_path))
|
||||
repo2.add_solv(f2)
|
||||
f2.close()
|
||||
|
||||
print('read back %d solvables:' % repo2.nsolvables)
|
||||
for s in repo2.solvables_iter():
|
||||
print(' %s-%s.%s' % (s.name, s.evr, s.arch))
|
||||
deps = s.lookup_deparray(solv.SOLVABLE_REQUIRES)
|
||||
for d in deps:
|
||||
print(' requires: %s' % d)
|
||||
provs = s.lookup_deparray(solv.SOLVABLE_PROVIDES)
|
||||
for pr in provs:
|
||||
print(' provides: %s' % pr)
|
||||
|
||||
# Hex dump
|
||||
with open(solv_path, 'rb') as fh:
|
||||
data = fh.read()
|
||||
|
||||
hex_path = OUT_DIR / 'reference.hex'
|
||||
with open(hex_path, 'w') as hf:
|
||||
for i in range(0, len(data), 16):
|
||||
chunk = data[i : i + 16]
|
||||
hex_part = ' '.join('%02x' % b for b in chunk)
|
||||
ascii_part = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk)
|
||||
hf.write('%04x %-48s %s\n' % (i, hex_part, ascii_part))
|
||||
|
||||
print('hex dump: %s' % hex_path)
|
||||
460
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/solv_writer.py
Normal file
460
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/solv_writer.py
Normal file
@ -0,0 +1,460 @@
|
||||
"""
|
||||
Minimal .solv binary writer using ctypes for input layout.
|
||||
Matches libsolv's repo_write.c output byte-for-byte.
|
||||
"""
|
||||
|
||||
import ctypes
|
||||
import io
|
||||
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# ── constants ──
|
||||
|
||||
SOLV_MAGIC = 0x534F4C56
|
||||
SOLV_VERSION_8 = 8
|
||||
SOLV_FLAG_PREFIX_POOL = 4
|
||||
SOLV_FLAG_SIZE_BYTES = 8
|
||||
|
||||
REL_EQ = 2
|
||||
|
||||
KEY_STORAGE_SOLVABLE = 1
|
||||
KEY_STORAGE_INCORE = 2
|
||||
|
||||
# well-known strings (must match libsolv knownid.h)
|
||||
TYPE_ID = b'repokey:type:id'
|
||||
TYPE_RELIDARRAY = b'repokey:type:relidarray'
|
||||
TYPE_FLEXARRAY = b'repokey:type:flexarray'
|
||||
|
||||
KEY_REPO_SOLVABLES = b'repository:solvables'
|
||||
KEY_SOLVABLE_NAME = b'solvable:name'
|
||||
KEY_SOLVABLE_EVR = b'solvable:evr'
|
||||
KEY_SOLVABLE_ARCH = b'solvable:arch'
|
||||
KEY_SOLVABLE_PROVIDES = b'solvable:provides'
|
||||
KEY_SOLVABLE_REQUIRES = b'solvable:requires'
|
||||
KEY_SOLVABLE_CONFLICTS = b'solvable:conflicts'
|
||||
|
||||
# ── ctypes header ──
|
||||
|
||||
|
||||
class solv_header_t(ctypes.BigEndianStructure):
|
||||
_pack_ = 1
|
||||
_fields_ = [
|
||||
('magic', ctypes.c_uint32),
|
||||
('version', ctypes.c_uint32),
|
||||
('nstrings', ctypes.c_uint32),
|
||||
('nrels', ctypes.c_uint32),
|
||||
('ndirs', ctypes.c_uint32),
|
||||
('nsolvables', ctypes.c_uint32),
|
||||
('nkeys', ctypes.c_uint32),
|
||||
('nschemata', ctypes.c_uint32),
|
||||
('flags', ctypes.c_uint32),
|
||||
]
|
||||
|
||||
|
||||
class solv_stringpool_sizes_t(ctypes.BigEndianStructure):
|
||||
_pack_ = 1
|
||||
_fields_ = [
|
||||
('sizeid', ctypes.c_uint32),
|
||||
('pfsize', ctypes.c_uint32),
|
||||
]
|
||||
|
||||
|
||||
# ── ctypes input structures ──
|
||||
|
||||
MAX_DEPS = 32
|
||||
MAX_PROVIDES = 32
|
||||
|
||||
|
||||
class provide_input_t(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('name', ctypes.c_char_p),
|
||||
('evr', ctypes.c_char_p),
|
||||
]
|
||||
|
||||
|
||||
class solvable_input_t(ctypes.Structure):
|
||||
_fields_ = [
|
||||
('name', ctypes.c_char_p),
|
||||
('evr', ctypes.c_char_p),
|
||||
('arch', ctypes.c_char_p),
|
||||
('n_requires', ctypes.c_uint32),
|
||||
('requires', ctypes.c_char_p * MAX_DEPS),
|
||||
('n_provides', ctypes.c_uint32),
|
||||
('provides', provide_input_t * MAX_PROVIDES),
|
||||
('n_conflicts', ctypes.c_uint32),
|
||||
('conflicts', ctypes.c_char_p * MAX_DEPS),
|
||||
]
|
||||
|
||||
|
||||
# ── varint encoding ──
|
||||
|
||||
|
||||
def _encode_id(val: int) -> bytes:
|
||||
if val < 0x80:
|
||||
return bytes([val])
|
||||
parts: list[int] = []
|
||||
while val >= 0x80:
|
||||
parts.append(val & 0x7F)
|
||||
val >>= 7
|
||||
parts.append(val)
|
||||
parts.reverse()
|
||||
# set bit 7 on all bytes except the last
|
||||
for i in range(len(parts) - 1):
|
||||
parts[i] |= 0x80
|
||||
return bytes(parts)
|
||||
|
||||
|
||||
def _encode_idarray_val(buf: bytearray, val: int, is_last: bool) -> None:
|
||||
"""Encode one value in an idarray.
|
||||
|
||||
Varint with 7-bit payload per byte, bit 7 = continuation.
|
||||
Final byte of varint: bit 6 = more array elements (set) or last (clear).
|
||||
"""
|
||||
if val < 0x40:
|
||||
buf.append(val if is_last else val | 0x40)
|
||||
return
|
||||
# build varint bytes: 7 bits per byte, MSB first
|
||||
parts: list[int] = []
|
||||
tmp = val
|
||||
# extract 6 bits for the final byte (leaves room for bit 6 flag)
|
||||
parts.append(tmp & 0x3F)
|
||||
tmp >>= 6
|
||||
while tmp > 0:
|
||||
parts.append(tmp & 0x7F)
|
||||
tmp >>= 7
|
||||
parts.reverse()
|
||||
# set bit 7 on all bytes except the last (continuation flag)
|
||||
for i in range(len(parts) - 1):
|
||||
parts[i] |= 0x80
|
||||
# set bit 6 on last byte if not last element
|
||||
if not is_last:
|
||||
parts[-1] |= 0x40
|
||||
buf.extend(parts)
|
||||
|
||||
|
||||
def _encode_idarray(ids: list[int]) -> bytes:
|
||||
"""Encode id array. Bit 6 set = more elements. Bit 6 clear = last."""
|
||||
if len(ids) == 0:
|
||||
return b''
|
||||
buf = bytearray()
|
||||
for idx, val in enumerate(ids):
|
||||
_encode_idarray_val(buf, val, idx == len(ids) - 1)
|
||||
return bytes(buf)
|
||||
|
||||
|
||||
def _encode_rel_idarray(ids: list[int]) -> bytes:
|
||||
"""Encode delta-compressed id array for RELIDARRAY type.
|
||||
|
||||
IDs must be sorted. Each value is encoded as (delta_from_previous + 1).
|
||||
Bit 6 set = more elements. Bit 6 clear = last.
|
||||
"""
|
||||
if len(ids) == 0:
|
||||
return b''
|
||||
sorted_ids = sorted(ids)
|
||||
buf = bytearray()
|
||||
old = 0
|
||||
for idx, val in enumerate(sorted_ids):
|
||||
delta = val - old + 1
|
||||
old = val
|
||||
_encode_idarray_val(buf, delta, idx == len(sorted_ids) - 1)
|
||||
return bytes(buf)
|
||||
|
||||
|
||||
# ── writer ──
|
||||
|
||||
|
||||
def write_solv(
|
||||
solvables: ctypes.Array, # type: ignore[type-arg]
|
||||
count: int,
|
||||
fp: io.BufferedIOBase,
|
||||
) -> None:
|
||||
# Phase 1: collect solvable strings and count references (need)
|
||||
str_need: dict[bytes, int] = {}
|
||||
|
||||
def ref(s: bytes) -> None:
|
||||
str_need[s] = str_need.get(s, 0) + 1
|
||||
|
||||
has_provides = False
|
||||
has_requires = False
|
||||
has_conflicts = False
|
||||
|
||||
for i in range(count):
|
||||
s = solvables[i]
|
||||
ref(s.name)
|
||||
ref(s.evr)
|
||||
ref(s.arch)
|
||||
for j in range(s.n_requires):
|
||||
ref(s.requires[j])
|
||||
has_requires = True
|
||||
for j in range(s.n_provides):
|
||||
p = s.provides[j]
|
||||
ref(p.name)
|
||||
if p.evr is not None:
|
||||
ref(p.evr)
|
||||
has_provides = True
|
||||
for j in range(s.n_conflicts):
|
||||
ref(s.conflicts[j])
|
||||
has_conflicts = True
|
||||
|
||||
# Ref key/type strings for keys that are actually used.
|
||||
# Each key references its name string and type string once.
|
||||
active_keys: list[tuple[bytes, bytes]] = [
|
||||
(KEY_SOLVABLE_NAME, TYPE_ID),
|
||||
(KEY_SOLVABLE_ARCH, TYPE_ID),
|
||||
(KEY_SOLVABLE_EVR, TYPE_ID),
|
||||
]
|
||||
if has_provides:
|
||||
active_keys.append((KEY_SOLVABLE_PROVIDES, TYPE_RELIDARRAY))
|
||||
if has_requires:
|
||||
active_keys.append((KEY_SOLVABLE_REQUIRES, TYPE_RELIDARRAY))
|
||||
if has_conflicts:
|
||||
active_keys.append((KEY_SOLVABLE_CONFLICTS, TYPE_RELIDARRAY))
|
||||
active_keys.append((KEY_REPO_SOLVABLES, TYPE_FLEXARRAY))
|
||||
|
||||
for kname, ktype in active_keys:
|
||||
ref(kname)
|
||||
ref(ktype)
|
||||
|
||||
# Phase 2: sort strings like libsolv
|
||||
# Sort by: descending need, then ascending strcmp
|
||||
# String index 0 = "" (implicit), index 1 = "" (forced)
|
||||
sorted_strings = sorted(str_need.keys(), key=lambda s: (-str_need[s], s))
|
||||
|
||||
# Build final string list: [0]="" [1]="" [2..]=sorted
|
||||
final_strings: list[bytes] = [b'', b'']
|
||||
final_strings.extend(sorted_strings)
|
||||
|
||||
# Build old->new mapping
|
||||
str_to_id: dict[bytes, int] = {b'': 0}
|
||||
for i, s in enumerate(final_strings):
|
||||
if i == 0:
|
||||
continue
|
||||
str_to_id[s] = i
|
||||
|
||||
nstrings = len(final_strings)
|
||||
|
||||
# Phase 3: build reldeps
|
||||
reldeps: list[tuple[int, int, int]] = []
|
||||
reldep_map: dict[tuple[int, int, int], int] = {}
|
||||
|
||||
def make_reldep(name_id: int, evr_id: int, flags: int) -> int:
|
||||
key = (name_id, evr_id, flags)
|
||||
if key in reldep_map:
|
||||
return reldep_map[key]
|
||||
idx = len(reldeps)
|
||||
reldeps.append(key)
|
||||
reldep_map[key] = idx
|
||||
return idx
|
||||
|
||||
for i in range(count):
|
||||
s = solvables[i]
|
||||
for j in range(s.n_provides):
|
||||
p = s.provides[j]
|
||||
if p.evr is not None:
|
||||
make_reldep(str_to_id[p.name], str_to_id[p.evr], REL_EQ)
|
||||
|
||||
nrels = len(reldeps)
|
||||
|
||||
# Sort reldeps like libsolv: descending need, then ascending map
|
||||
# For simplicity, reldep need = 1 each (one reference per provide)
|
||||
# libsolv sorts by need desc then map asc — with equal needs, original order
|
||||
# Since all needs are equal, sort by original index (= keep order)
|
||||
|
||||
# Phase 4: determine which keys are used
|
||||
# libsolv only writes keys that are actually used
|
||||
# Keys order: name, arch, evr first (SOLVABLE storage), then provides,
|
||||
# requires, conflicts (SOLVABLE storage), then repo:solvables (INCORE)
|
||||
used_keys: list[tuple[bytes, bytes, int, int]] = [
|
||||
(KEY_SOLVABLE_NAME, TYPE_ID, 0, KEY_STORAGE_SOLVABLE),
|
||||
(KEY_SOLVABLE_ARCH, TYPE_ID, 0, KEY_STORAGE_SOLVABLE),
|
||||
(KEY_SOLVABLE_EVR, TYPE_ID, 0, KEY_STORAGE_SOLVABLE),
|
||||
]
|
||||
if has_provides:
|
||||
used_keys.append((KEY_SOLVABLE_PROVIDES, TYPE_RELIDARRAY, 0, KEY_STORAGE_SOLVABLE))
|
||||
if has_requires:
|
||||
used_keys.append((KEY_SOLVABLE_REQUIRES, TYPE_RELIDARRAY, 0, KEY_STORAGE_SOLVABLE))
|
||||
if has_conflicts:
|
||||
used_keys.append((KEY_SOLVABLE_CONFLICTS, TYPE_RELIDARRAY, 0, KEY_STORAGE_SOLVABLE))
|
||||
|
||||
# repo:solvables is last, size=1 (1 flexarray entry at repo level)
|
||||
repo_solvables_key_idx = len(used_keys) + 1 # +1 because key 0 is implicit
|
||||
used_keys.append((KEY_REPO_SOLVABLES, TYPE_FLEXARRAY, 1, KEY_STORAGE_INCORE))
|
||||
|
||||
nkeys = len(used_keys) + 1 # +1 for key 0
|
||||
|
||||
# Build key name -> key index mapping
|
||||
key_name_to_idx: dict[bytes, int] = {}
|
||||
for ki, (kname, _, _, _) in enumerate(used_keys):
|
||||
key_name_to_idx[kname] = ki + 1 # +1 for key 0
|
||||
|
||||
# Compute provides/requires/conflicts key sizes (count + 1 per solvable for terminator)
|
||||
prov_key_size = 0
|
||||
req_key_size = 0
|
||||
conf_key_size = 0
|
||||
for i in range(count):
|
||||
s = solvables[i]
|
||||
if s.n_provides > 0:
|
||||
prov_key_size += s.n_provides + 1
|
||||
if s.n_requires > 0:
|
||||
req_key_size += s.n_requires + 1
|
||||
if s.n_conflicts > 0:
|
||||
conf_key_size += s.n_conflicts + 1
|
||||
|
||||
# Update sizes in keys
|
||||
for ki in range(len(used_keys)):
|
||||
kname = used_keys[ki][0]
|
||||
if kname == KEY_SOLVABLE_PROVIDES:
|
||||
used_keys[ki] = (kname, used_keys[ki][1], prov_key_size, used_keys[ki][3])
|
||||
elif kname == KEY_SOLVABLE_REQUIRES:
|
||||
used_keys[ki] = (kname, used_keys[ki][1], req_key_size, used_keys[ki][3])
|
||||
elif kname == KEY_SOLVABLE_CONFLICTS:
|
||||
used_keys[ki] = (kname, used_keys[ki][1], conf_key_size, used_keys[ki][3])
|
||||
|
||||
# Phase 5: build schemata
|
||||
# Each unique combination of keys used by a solvable = one schema
|
||||
schema_map: dict[tuple[int, ...], int] = {}
|
||||
solvable_schema_ids: list[int] = []
|
||||
|
||||
for i in range(count):
|
||||
s = solvables[i]
|
||||
key_indices: list[int] = [
|
||||
key_name_to_idx[KEY_SOLVABLE_NAME],
|
||||
key_name_to_idx[KEY_SOLVABLE_ARCH],
|
||||
key_name_to_idx[KEY_SOLVABLE_EVR],
|
||||
]
|
||||
if s.n_provides > 0:
|
||||
key_indices.append(key_name_to_idx[KEY_SOLVABLE_PROVIDES])
|
||||
if s.n_requires > 0:
|
||||
key_indices.append(key_name_to_idx[KEY_SOLVABLE_REQUIRES])
|
||||
if s.n_conflicts > 0:
|
||||
key_indices.append(key_name_to_idx[KEY_SOLVABLE_CONFLICTS])
|
||||
|
||||
kt = tuple(key_indices)
|
||||
if kt not in schema_map:
|
||||
schema_map[kt] = len(schema_map) + 1 # schema IDs start at 1
|
||||
solvable_schema_ids.append(schema_map[kt])
|
||||
|
||||
# Solvable schemas first, repo schema last
|
||||
all_schemata: list[list[int]] = []
|
||||
for kt in sorted(schema_map, key=lambda k: schema_map[k]):
|
||||
all_schemata.append(list(kt))
|
||||
repo_schema_id = len(all_schemata) + 1
|
||||
all_schemata.append([repo_solvables_key_idx])
|
||||
|
||||
nschemata = len(all_schemata) + 1 # +1 for schema 0
|
||||
|
||||
# Phase 6: encode solvable blobs
|
||||
solvable_blobs: list[bytes] = []
|
||||
for i in range(count):
|
||||
s = solvables[i]
|
||||
buf = bytearray()
|
||||
# schema id
|
||||
buf.extend(_encode_id(solvable_schema_ids[i]))
|
||||
# name (mapped to new string id)
|
||||
buf.extend(_encode_id(str_to_id[s.name]))
|
||||
# arch
|
||||
buf.extend(_encode_id(str_to_id[s.arch]))
|
||||
# evr
|
||||
buf.extend(_encode_id(str_to_id[s.evr]))
|
||||
# provides (RELIDARRAY: sorted, delta-encoded)
|
||||
if s.n_provides > 0:
|
||||
prov_ids: list[int] = []
|
||||
for j in range(s.n_provides):
|
||||
p = s.provides[j]
|
||||
if p.evr is not None:
|
||||
rdx = reldep_map[(str_to_id[p.name], str_to_id[p.evr], REL_EQ)]
|
||||
prov_ids.append(nstrings + rdx)
|
||||
else:
|
||||
prov_ids.append(str_to_id[p.name])
|
||||
buf.extend(_encode_rel_idarray(prov_ids))
|
||||
# requires (RELIDARRAY: sorted, delta-encoded)
|
||||
if s.n_requires > 0:
|
||||
req_ids = [str_to_id[s.requires[j]] for j in range(s.n_requires)]
|
||||
buf.extend(_encode_rel_idarray(req_ids))
|
||||
# conflicts (RELIDARRAY: sorted, delta-encoded)
|
||||
if s.n_conflicts > 0:
|
||||
conf_ids = [str_to_id[s.conflicts[j]] for j in range(s.n_conflicts)]
|
||||
buf.extend(_encode_rel_idarray(conf_ids))
|
||||
solvable_blobs.append(bytes(buf))
|
||||
|
||||
# Phase 7: build incore data
|
||||
# repo-level: schema_id + nentries + solvable blobs (no length prefix per entry)
|
||||
repo_incore = bytearray()
|
||||
repo_incore.extend(_encode_id(repo_schema_id))
|
||||
repo_incore.extend(_encode_id(count))
|
||||
for blob in solvable_blobs:
|
||||
repo_incore.extend(blob)
|
||||
|
||||
# maxdata = max size of single solvable entry blob
|
||||
maxdata = max((len(b) for b in solvable_blobs), default=0)
|
||||
|
||||
# Phase 8: write file
|
||||
|
||||
# header
|
||||
hdr = solv_header_t(
|
||||
magic=SOLV_MAGIC,
|
||||
version=SOLV_VERSION_8,
|
||||
nstrings=nstrings,
|
||||
nrels=nrels,
|
||||
ndirs=0,
|
||||
nsolvables=count,
|
||||
nkeys=nkeys,
|
||||
nschemata=nschemata,
|
||||
flags=SOLV_FLAG_PREFIX_POOL | SOLV_FLAG_SIZE_BYTES,
|
||||
)
|
||||
fp.write(bytes(hdr))
|
||||
|
||||
# string pool (prefix-encoded)
|
||||
sizeid = 0
|
||||
for s in final_strings[1:]:
|
||||
sizeid += len(s) + 1
|
||||
|
||||
pfbuf = bytearray()
|
||||
prev = b''
|
||||
for i in range(1, len(final_strings)):
|
||||
s = final_strings[i]
|
||||
shared = 0
|
||||
limit = min(len(prev), len(s), 255)
|
||||
while shared < limit and prev[shared] == s[shared]:
|
||||
shared += 1
|
||||
pfbuf.append(shared)
|
||||
pfbuf.extend(s[shared:])
|
||||
pfbuf.append(0)
|
||||
prev = s
|
||||
|
||||
sp_sizes = solv_stringpool_sizes_t(sizeid=sizeid, pfsize=len(pfbuf))
|
||||
fp.write(bytes(sp_sizes))
|
||||
fp.write(bytes(pfbuf))
|
||||
|
||||
# reldeps
|
||||
rdbuf = bytearray()
|
||||
for name_id, evr_id, flags in reldeps:
|
||||
rdbuf.extend(_encode_id(name_id))
|
||||
rdbuf.extend(_encode_id(evr_id))
|
||||
rdbuf.append(flags)
|
||||
fp.write(bytes(rdbuf))
|
||||
|
||||
# keys (skip key 0)
|
||||
kbuf = bytearray()
|
||||
for kname, ktype, ksize, kstorage in used_keys:
|
||||
kbuf.extend(_encode_id(str_to_id[kname]))
|
||||
kbuf.extend(_encode_id(str_to_id[ktype]))
|
||||
kbuf.extend(_encode_id(ksize))
|
||||
kbuf.extend(_encode_id(kstorage))
|
||||
fp.write(bytes(kbuf))
|
||||
|
||||
# schemata: datalen includes 1 for the initial schemadata[0] sentinel
|
||||
schemadatalen = 1
|
||||
for schema_keys in all_schemata:
|
||||
schemadatalen += len(schema_keys) + 1
|
||||
schema_blob = bytearray()
|
||||
for schema_keys in all_schemata:
|
||||
schema_blob.extend(_encode_idarray(schema_keys))
|
||||
fp.write(_encode_id(schemadatalen))
|
||||
fp.write(bytes(schema_blob))
|
||||
|
||||
# incore data: maxdata + allsize + blob
|
||||
fp.write(_encode_id(maxdata))
|
||||
fp.write(_encode_id(len(repo_incore)))
|
||||
fp.write(bytes(repo_incore))
|
||||
414
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/test_cases.py
Normal file
414
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/test_cases.py
Normal file
@ -0,0 +1,414 @@
|
||||
"""
|
||||
32 corner-case test: generate .solv with libsolv API and our writer,
|
||||
compare binaries byte-for-byte.
|
||||
"""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import solv
|
||||
|
||||
from .solv_writer import (
|
||||
provide_input_t,
|
||||
solvable_input_t,
|
||||
write_solv,
|
||||
)
|
||||
|
||||
_HERE = pathlib.Path(__file__).resolve().parent
|
||||
_PROJECT_ROOT = _HERE.parents[8]
|
||||
OUT_DIR = _PROJECT_ROOT / 'tmp' / 'experiments' / 'libsolv' / 'cases'
|
||||
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# type alias for case definition
|
||||
# (name, evr, arch, requires, provides_with_ver, provides_plain, conflicts)
|
||||
CaseDef = list[tuple[str, str, str, list[str], list[tuple[str, str]], list[str], list[str]]]
|
||||
|
||||
|
||||
def define_cases() -> dict[str, CaseDef]:
|
||||
cases: dict[str, CaseDef] = {}
|
||||
|
||||
# 1. single package, no deps
|
||||
cases['01_single_nodep'] = [
|
||||
('pkg', '1.0-1', 'x86_64', [], [('pkg', '1.0-1')], [], []),
|
||||
]
|
||||
|
||||
# 2. single package, arch=any
|
||||
cases['02_single_any'] = [
|
||||
('pkg', '1.0-1', 'any', [], [('pkg', '1.0-1')], [], []),
|
||||
]
|
||||
|
||||
# 3. single with one require
|
||||
cases['03_single_1req'] = [
|
||||
('a', '1-1', 'x86_64', ['b'], [('a', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 4. two packages, dep chain
|
||||
cases['04_two_chain'] = [
|
||||
('a', '1-1', 'x86_64', ['b'], [('a', '1-1')], [], []),
|
||||
('b', '1-1', 'x86_64', [], [('b', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 5. package with multiple requires
|
||||
cases['05_multi_req'] = [
|
||||
('a', '1-1', 'x86_64', ['b', 'c', 'd'], [('a', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 6. package with plain provide (no version)
|
||||
cases['06_plain_provide'] = [
|
||||
('a', '1-1', 'x86_64', [], [('a', '1-1')], ['libfoo'], []),
|
||||
]
|
||||
|
||||
# 7. package with multiple provides
|
||||
cases['07_multi_provide'] = [
|
||||
('a', '1-1', 'x86_64', [], [('a', '1-1')], ['libfoo', 'libbar'], []),
|
||||
]
|
||||
|
||||
# 8. package with conflict
|
||||
cases['08_conflict'] = [
|
||||
('a', '1-1', 'x86_64', [], [('a', '1-1')], [], ['b']),
|
||||
]
|
||||
|
||||
# 9. package with req + provide + conflict
|
||||
cases['09_all_deps'] = [
|
||||
('a', '1-1', 'x86_64', ['c'], [('a', '1-1')], ['libfoo'], ['b']),
|
||||
]
|
||||
|
||||
# 10. three package chain
|
||||
cases['10_three_chain'] = [
|
||||
('a', '2-1', 'x86_64', ['b'], [('a', '2-1')], [], []),
|
||||
('b', '3-1', 'x86_64', ['c'], [('b', '3-1')], [], []),
|
||||
('c', '1-1', 'x86_64', [], [('c', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 11. diamond dependency
|
||||
cases['11_diamond'] = [
|
||||
('a', '1-1', 'x86_64', ['b', 'c'], [('a', '1-1')], [], []),
|
||||
('b', '1-1', 'x86_64', ['d'], [('b', '1-1')], [], []),
|
||||
('c', '1-1', 'x86_64', ['d'], [('c', '1-1')], [], []),
|
||||
('d', '1-1', 'x86_64', [], [('d', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 12. same version different arch
|
||||
cases['12_diff_arch'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [('pkg', '1-1')], [], []),
|
||||
('pkg2', '1-1', 'any', [], [('pkg2', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 13. epoch in version
|
||||
cases['13_epoch'] = [
|
||||
('pkg', '1:2.0-1', 'x86_64', [], [('pkg', '1:2.0-1')], [], []),
|
||||
]
|
||||
|
||||
# 14. long package name
|
||||
cases['14_long_name'] = [
|
||||
('very-long-package-name-for-testing', '1-1', 'x86_64', [], [('very-long-package-name-for-testing', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 15. package with no provides at all
|
||||
cases['15_no_provides'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [], [], []),
|
||||
]
|
||||
|
||||
# 16. only requires, no provides
|
||||
cases['16_only_req'] = [
|
||||
('pkg', '1-1', 'x86_64', ['dep1', 'dep2'], [], [], []),
|
||||
]
|
||||
|
||||
# 17. only conflicts
|
||||
cases['17_only_conflict'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [], [], ['enemy']),
|
||||
]
|
||||
|
||||
# 18. five packages all independent
|
||||
cases['18_five_independent'] = [
|
||||
('a', '1-1', 'x86_64', [], [('a', '1-1')], [], []),
|
||||
('b', '2-1', 'x86_64', [], [('b', '2-1')], [], []),
|
||||
('c', '3-1', 'x86_64', [], [('c', '3-1')], [], []),
|
||||
('d', '4-1', 'x86_64', [], [('d', '4-1')], [], []),
|
||||
('e', '5-1', 'x86_64', [], [('e', '5-1')], [], []),
|
||||
]
|
||||
|
||||
# 19. shared dependency
|
||||
cases['19_shared_dep'] = [
|
||||
('a', '1-1', 'x86_64', ['shared'], [('a', '1-1')], [], []),
|
||||
('b', '1-1', 'x86_64', ['shared'], [('b', '1-1')], [], []),
|
||||
('shared', '1-1', 'x86_64', [], [('shared', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 20. self-provide only
|
||||
cases['20_self_provide'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [('pkg', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 21. provide with different name than package
|
||||
cases['21_alias_provide'] = [
|
||||
('real-name', '1-1', 'x86_64', [], [('real-name', '1-1'), ('alias', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 22. mixed provides (versioned + plain)
|
||||
cases['22_mixed_provides'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [('pkg', '1-1')], ['libfoo', 'libbar'], []),
|
||||
]
|
||||
|
||||
# 23. many requires (8)
|
||||
cases['23_many_req'] = [
|
||||
('pkg', '1-1', 'x86_64', ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'], [('pkg', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 24. version with dots and underscores
|
||||
cases['24_complex_version'] = [
|
||||
('pkg', '1.2.3_beta4-1', 'x86_64', [], [('pkg', '1.2.3_beta4-1')], [], []),
|
||||
]
|
||||
|
||||
# 25. two packages mutual conflict
|
||||
cases['25_mutual_conflict'] = [
|
||||
('a', '1-1', 'x86_64', [], [('a', '1-1')], [], ['b']),
|
||||
('b', '1-1', 'x86_64', [], [('b', '1-1')], [], ['a']),
|
||||
]
|
||||
|
||||
# 26. package requiring itself
|
||||
cases['26_self_req'] = [
|
||||
('pkg', '1-1', 'x86_64', ['pkg'], [('pkg', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 27. ten packages linear chain
|
||||
cases['27_ten_chain'] = [('p%d' % i, '1-1', 'x86_64', ['p%d' % (i + 1)] if i < 9 else [], [('p%d' % i, '1-1')], [], []) for i in range(10)]
|
||||
|
||||
# 28. package with all three dep types
|
||||
cases['28_all_three'] = [
|
||||
('pkg', '1-1', 'x86_64', ['req1', 'req2'], [('pkg', '1-1')], ['virtual'], ['enemy1', 'enemy2']),
|
||||
]
|
||||
|
||||
# 29. identical names different versions (simulating multi-version pool)
|
||||
cases['29_multi_version'] = [
|
||||
('pkg', '1-1', 'x86_64', [], [('pkg', '1-1')], [], []),
|
||||
('pkg2', '2-1', 'x86_64', [], [('pkg2', '2-1')], [], []),
|
||||
('pkg3', '3-1', 'x86_64', [], [('pkg3', '3-1')], [], []),
|
||||
]
|
||||
|
||||
# 30. package names that share prefix (for prefix encoding)
|
||||
cases['30_shared_prefix'] = [
|
||||
('libfoo', '1-1', 'x86_64', [], [('libfoo', '1-1')], [], []),
|
||||
('libfoo-dev', '1-1', 'x86_64', ['libfoo'], [('libfoo-dev', '1-1')], [], []),
|
||||
('libfoo-doc', '1-1', 'x86_64', [], [('libfoo-doc', '1-1')], [], []),
|
||||
]
|
||||
|
||||
# 31. empty repo (0 packages)
|
||||
cases['31_empty'] = []
|
||||
|
||||
# 32. single package all dep types populated
|
||||
cases['32_kitchen_sink'] = [
|
||||
(
|
||||
'kitchen-sink',
|
||||
'99.99-1',
|
||||
'x86_64',
|
||||
['water', 'drain', 'faucet'],
|
||||
[('kitchen-sink', '99.99-1'), ('sink', '99.99-1')],
|
||||
['basin', 'tub'],
|
||||
['bathtub', 'shower'],
|
||||
),
|
||||
]
|
||||
|
||||
# 33. 64 packages with deps
|
||||
cases['33_scale_64'] = [
|
||||
(
|
||||
'pkg-%d' % i,
|
||||
'%d.0-1' % i,
|
||||
'x86_64',
|
||||
['pkg-%d' % ((i + 1) % 64)] if i % 3 != 0 else [],
|
||||
[('pkg-%d' % i, '%d.0-1' % i)],
|
||||
[],
|
||||
['pkg-%d' % ((i + 32) % 64)] if i % 7 == 0 else [],
|
||||
)
|
||||
for i in range(64)
|
||||
]
|
||||
|
||||
# 34. 128 packages with mixed deps and provides
|
||||
cases['34_scale_128'] = [
|
||||
(
|
||||
'lib-%d' % i,
|
||||
'%d.%d-1' % (i // 10, i % 10),
|
||||
'x86_64' if i % 2 == 0 else 'any',
|
||||
['lib-%d' % ((i + 1) % 128), 'lib-%d' % ((i + 2) % 128)] if i % 2 == 0 else [],
|
||||
[('lib-%d' % i, '%d.%d-1' % (i // 10, i % 10))],
|
||||
['virtual-%d' % i] if i % 5 == 0 else [],
|
||||
[],
|
||||
)
|
||||
for i in range(128)
|
||||
]
|
||||
|
||||
# 35. 256 packages (crosses varint boundary for string IDs)
|
||||
cases['35_scale_256'] = [
|
||||
(
|
||||
'p%03d' % i,
|
||||
'1.%d-1' % i,
|
||||
'x86_64',
|
||||
['p%03d' % ((i + 1) % 256)] if i < 255 else [],
|
||||
[('p%03d' % i, '1.%d-1' % i)],
|
||||
[],
|
||||
[],
|
||||
)
|
||||
for i in range(256)
|
||||
]
|
||||
|
||||
# 36. 512 packages
|
||||
cases['36_scale_512'] = [
|
||||
(
|
||||
'pkg%04d' % i,
|
||||
'%d-1' % (i + 1),
|
||||
'x86_64',
|
||||
['pkg%04d' % ((i + 1) % 512)] if i % 2 == 0 else [],
|
||||
[('pkg%04d' % i, '%d-1' % (i + 1))],
|
||||
[],
|
||||
[],
|
||||
)
|
||||
for i in range(512)
|
||||
]
|
||||
|
||||
# 37. 1024 packages (tests larger varint IDs)
|
||||
cases['37_scale_1024'] = [
|
||||
(
|
||||
'x%04d' % i,
|
||||
'%d.0-1' % i,
|
||||
'x86_64',
|
||||
['x%04d' % ((i + j) % 1024) for j in range(1, min(i % 4 + 1, 4))],
|
||||
[('x%04d' % i, '%d.0-1' % i)],
|
||||
[],
|
||||
[],
|
||||
)
|
||||
for i in range(1024)
|
||||
]
|
||||
|
||||
return cases
|
||||
|
||||
|
||||
def gen_libsolv(case: CaseDef) -> bytes:
|
||||
pool = solv.Pool()
|
||||
pool.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
pool.setarch('x86_64')
|
||||
repo = pool.add_repo('test')
|
||||
|
||||
for name, evr, arch, reqs, vprovs, pprovs, confs in case:
|
||||
s = repo.add_solvable()
|
||||
s.name = name
|
||||
s.evr = evr
|
||||
s.arch = arch
|
||||
for r in reqs:
|
||||
s.add_deparray(solv.SOLVABLE_REQUIRES, pool.Dep(r))
|
||||
for pn, pv in vprovs:
|
||||
dep = pool.rel2id(pool.str2id(pn), pool.str2id(pv), solv.REL_EQ)
|
||||
s.add_deparray(solv.SOLVABLE_PROVIDES, dep)
|
||||
for pp in pprovs:
|
||||
s.add_deparray(solv.SOLVABLE_PROVIDES, pool.str2id(pp))
|
||||
for c in confs:
|
||||
s.add_deparray(solv.SOLVABLE_CONFLICTS, pool.Dep(c))
|
||||
|
||||
repo.internalize()
|
||||
|
||||
path = OUT_DIR / '_tmp_ref.solv'
|
||||
f = solv.xfopen(str(path), 'w')
|
||||
repo.write(f)
|
||||
f.close()
|
||||
|
||||
with open(path, 'rb') as fh:
|
||||
return fh.read()
|
||||
|
||||
|
||||
def gen_ours(case: CaseDef) -> bytes:
|
||||
n = len(case)
|
||||
if n == 0:
|
||||
arr = (solvable_input_t * 1)()
|
||||
path = OUT_DIR / '_tmp_ours.solv'
|
||||
with open(path, 'wb') as fp:
|
||||
write_solv(arr, 0, fp)
|
||||
with open(path, 'rb') as fh:
|
||||
return fh.read()
|
||||
|
||||
arr = (solvable_input_t * n)()
|
||||
for i, (name, evr, arch, reqs, vprovs, pprovs, confs) in enumerate(case):
|
||||
s = arr[i]
|
||||
s.name = name.encode()
|
||||
s.evr = evr.encode()
|
||||
s.arch = arch.encode()
|
||||
|
||||
s.n_requires = len(reqs)
|
||||
for j, r in enumerate(reqs):
|
||||
s.requires[j] = r.encode()
|
||||
|
||||
all_provs: list[tuple[bytes, bytes | None]] = []
|
||||
for pn, pv in vprovs:
|
||||
all_provs.append((pn.encode(), pv.encode()))
|
||||
for pp in pprovs:
|
||||
all_provs.append((pp.encode(), None))
|
||||
s.n_provides = len(all_provs)
|
||||
for j, (pn, pv) in enumerate(all_provs):
|
||||
s.provides[j].name = pn
|
||||
s.provides[j].evr = pv
|
||||
|
||||
s.n_conflicts = len(confs)
|
||||
for j, c in enumerate(confs):
|
||||
s.conflicts[j] = c.encode()
|
||||
|
||||
path = OUT_DIR / '_tmp_ours.solv'
|
||||
with open(path, 'wb') as fp:
|
||||
write_solv(arr, n, fp)
|
||||
with open(path, 'rb') as fh:
|
||||
return fh.read()
|
||||
|
||||
|
||||
def compare(name: str, ref: bytes, ours: bytes) -> bool:
|
||||
if ref == ours:
|
||||
return True
|
||||
|
||||
print(' MISMATCH %s: ref=%d ours=%d bytes' % (name, len(ref), len(ours)))
|
||||
maxl = max(len(ref), len(ours))
|
||||
shown = 0
|
||||
for i in range(0, maxl, 16):
|
||||
rc = ref[i : i + 16]
|
||||
oc = ours[i : i + 16]
|
||||
if rc != oc:
|
||||
rh = ' '.join('%02x' % b for b in rc).ljust(48)
|
||||
oh = ' '.join('%02x' % b for b in oc).ljust(48)
|
||||
print(' %04x %s | %s' % (i, rh, oh))
|
||||
shown += 1
|
||||
if shown >= 5:
|
||||
print(' ... (%d more differing rows)' % (sum(1 for j in range(i + 16, maxl, 16) if ref[j : j + 16] != ours[j : j + 16])))
|
||||
break
|
||||
return False
|
||||
|
||||
|
||||
def main() -> None:
|
||||
cases = define_cases()
|
||||
passed = 0
|
||||
failed = 0
|
||||
errors: list[str] = []
|
||||
|
||||
for name in sorted(cases):
|
||||
case = cases[name]
|
||||
try:
|
||||
ref = gen_libsolv(case)
|
||||
ours = gen_ours(case)
|
||||
except Exception as e:
|
||||
print('FAIL %s: %s' % (name, e))
|
||||
failed += 1
|
||||
errors.append(name)
|
||||
continue
|
||||
|
||||
if compare(name, ref, ours):
|
||||
print('OK %s' % name)
|
||||
passed += 1
|
||||
else:
|
||||
failed += 1
|
||||
errors.append(name)
|
||||
|
||||
print()
|
||||
print('%d passed, %d failed' % (passed, failed))
|
||||
if errors:
|
||||
print('failed: %s' % ', '.join(errors))
|
||||
|
||||
sys.exit(1 if failed > 0 else 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
141
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/test_roundtrip.py
Normal file
141
python/online/fxreader/pr34/commands_typed/archlinux/tests/experiments/libsolv/test_roundtrip.py
Normal file
@ -0,0 +1,141 @@
|
||||
"""
|
||||
Test: generate .solv with both libsolv API and our ctypes writer,
|
||||
then binary-compare byte-by-byte.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import solv
|
||||
|
||||
from .solv_writer import (
|
||||
provide_input_t,
|
||||
solvable_input_t,
|
||||
write_solv,
|
||||
)
|
||||
|
||||
_HERE = pathlib.Path(__file__).resolve().parent
|
||||
_PROJECT_ROOT = _HERE.parents[8]
|
||||
OUT_DIR = _PROJECT_ROOT / 'tmp' / 'experiments' / 'libsolv'
|
||||
OUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
PKGS = [
|
||||
('bash', '5.2.26-2', 'x86_64', ['glibc', 'readline'], [('bash', '5.2.26-2')]),
|
||||
('readline', '8.2-1', 'x86_64', ['glibc', 'ncurses'], [('readline', '8.2-1'), ('libreadline', None)]),
|
||||
('glibc', '2.39-1', 'x86_64', ['filesystem'], [('glibc', '2.39-1')]),
|
||||
('ncurses', '6.4-1', 'x86_64', ['glibc'], [('ncurses', '6.4-1')]),
|
||||
('filesystem', '2024.01-1', 'any', [], [('filesystem', '2024.01-1')]),
|
||||
]
|
||||
|
||||
|
||||
def gen_reference() -> bytes:
|
||||
pool = solv.Pool()
|
||||
pool.setdisttype(solv.Pool.DISTTYPE_ARCH)
|
||||
pool.setarch('x86_64')
|
||||
repo = pool.add_repo('test')
|
||||
|
||||
for name, evr, arch, reqs, provs in PKGS:
|
||||
s = repo.add_solvable()
|
||||
s.name = name
|
||||
s.evr = evr
|
||||
s.arch = arch
|
||||
for r in reqs:
|
||||
s.add_deparray(solv.SOLVABLE_REQUIRES, pool.Dep(r))
|
||||
for pn, pv in provs:
|
||||
if pv is not None:
|
||||
dep = pool.rel2id(pool.str2id(pn), pool.str2id(pv), solv.REL_EQ)
|
||||
else:
|
||||
dep = pool.str2id(pn)
|
||||
s.add_deparray(solv.SOLVABLE_PROVIDES, dep)
|
||||
|
||||
repo.internalize()
|
||||
|
||||
ref_path = OUT_DIR / 'ref.solv'
|
||||
f = solv.xfopen(str(ref_path), 'w')
|
||||
repo.write(f)
|
||||
f.close()
|
||||
|
||||
with open(ref_path, 'rb') as fh:
|
||||
return fh.read()
|
||||
|
||||
|
||||
def gen_ours() -> bytes:
|
||||
def mk(
|
||||
name: bytes,
|
||||
evr: bytes,
|
||||
arch: bytes,
|
||||
reqs: list[bytes],
|
||||
provs: list[tuple[bytes, bytes | None]],
|
||||
) -> solvable_input_t:
|
||||
s = solvable_input_t()
|
||||
s.name = name
|
||||
s.evr = evr
|
||||
s.arch = arch
|
||||
s.n_requires = len(reqs)
|
||||
for i, r in enumerate(reqs):
|
||||
s.requires[i] = r
|
||||
s.n_provides = len(provs)
|
||||
for i, (pn, pv) in enumerate(provs):
|
||||
s.provides[i].name = pn
|
||||
s.provides[i].evr = pv
|
||||
s.n_conflicts = 0
|
||||
return s
|
||||
|
||||
arr = (solvable_input_t * 5)(
|
||||
mk(b'bash', b'5.2.26-2', b'x86_64', [b'glibc', b'readline'], [(b'bash', b'5.2.26-2')]),
|
||||
mk(b'readline', b'8.2-1', b'x86_64', [b'glibc', b'ncurses'], [(b'readline', b'8.2-1'), (b'libreadline', None)]),
|
||||
mk(b'glibc', b'2.39-1', b'x86_64', [b'filesystem'], [(b'glibc', b'2.39-1')]),
|
||||
mk(b'ncurses', b'6.4-1', b'x86_64', [b'glibc'], [(b'ncurses', b'6.4-1')]),
|
||||
mk(b'filesystem', b'2024.01-1', b'any', [], [(b'filesystem', b'2024.01-1')]),
|
||||
)
|
||||
|
||||
ours_path = OUT_DIR / 'ours.solv'
|
||||
with open(ours_path, 'wb') as fp:
|
||||
write_solv(arr, 5, fp)
|
||||
|
||||
with open(ours_path, 'rb') as fh:
|
||||
return fh.read()
|
||||
|
||||
|
||||
def hex_dump_compare(ref: bytes, ours: bytes) -> int:
|
||||
maxl = max(len(ref), len(ours))
|
||||
errors = 0
|
||||
for i in range(0, maxl, 16):
|
||||
rc = ref[i : i + 16]
|
||||
oc = ours[i : i + 16]
|
||||
rh = ' '.join('%02x' % b for b in rc).ljust(48)
|
||||
oh = ' '.join('%02x' % b for b in oc).ljust(48)
|
||||
ra = ''.join(chr(b) if 32 <= b < 127 else '.' for b in rc).ljust(16)
|
||||
oa = ''.join(chr(b) if 32 <= b < 127 else '.' for b in oc).ljust(16)
|
||||
if rc != oc:
|
||||
marker = '<<'
|
||||
errors += 1
|
||||
else:
|
||||
marker = ' '
|
||||
print('%04x %s %s | %s %s %s' % (i, rh, ra, oh, oa, marker))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main() -> None:
|
||||
ref = gen_reference()
|
||||
ours = gen_ours()
|
||||
|
||||
print('ref: %d bytes ours: %d bytes' % (len(ref), len(ours)))
|
||||
print()
|
||||
|
||||
errors = hex_dump_compare(ref, ours)
|
||||
|
||||
print()
|
||||
if len(ref) != len(ours):
|
||||
print('FAIL: size mismatch ref=%d ours=%d' % (len(ref), len(ours)))
|
||||
if errors > 0:
|
||||
print('FAIL: %d differing 16-byte rows' % errors)
|
||||
elif len(ref) == len(ours):
|
||||
print('OK: binary identical')
|
||||
|
||||
sys.exit(1 if errors > 0 or len(ref) != len(ours) else 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2025_03_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2025_03_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2025_09_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2025_09_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2026_03_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/core_2026_03_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2025_03_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2025_03_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2025_09_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2025_09_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2026_03_15.db
(Stored with Git LFS)
Normal file
BIN
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/extra_2026_03_15.db
(Stored with Git LFS)
Normal file
Binary file not shown.
195
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/installed_packages.txt
Normal file
195
python/online/fxreader/pr34/commands_typed/archlinux/tests/res/installed_packages.txt
Normal file
@ -0,0 +1,195 @@
|
||||
acl 2.3.2-1
|
||||
archlinux-keyring 20260301-1
|
||||
attr 2.5.2-1
|
||||
audit 4.1.3-1
|
||||
autoconf 2.72-1
|
||||
automake 1.18.1-1
|
||||
base 3-3
|
||||
base-devel 1-2
|
||||
bash 5.3.9-1
|
||||
binutils 2.46-1
|
||||
bison 3.8.2-8
|
||||
brotli 1.2.0-1
|
||||
bzip2 1.0.8-6
|
||||
c-ares 1.34.6-1
|
||||
ca-certificates 20240618-1
|
||||
ca-certificates-mozilla 3.121-1
|
||||
ca-certificates-utils 20240618-1
|
||||
cmake 4.2.3-1
|
||||
coreutils 9.10-1
|
||||
cppdap 1.58.0-2
|
||||
cryptsetup 2.8.4-1
|
||||
curl 8.18.0-3
|
||||
db5.3 5.3.28-7
|
||||
dbus 1.16.2-1
|
||||
dbus-broker 37-3
|
||||
dbus-broker-units 37-3
|
||||
dbus-units 37-3
|
||||
debugedit 5.2-1
|
||||
device-mapper 2.03.38-1
|
||||
diffutils 3.12-2
|
||||
e2fsprogs 1.47.3-2
|
||||
expat 2.7.4-1
|
||||
fakeroot 1.37.2-1
|
||||
file 5.47-1
|
||||
filesystem 2025.10.12-1
|
||||
findutils 4.10.0-3
|
||||
flex 2.6.4-5
|
||||
gawk 5.4.0-1
|
||||
gc 8.2.12-1
|
||||
gcc 15.2.1+r604+g0b99615a8aef-1
|
||||
gcc-libs 15.2.1+r604+g0b99615a8aef-1
|
||||
gdbm 1.26-2
|
||||
gettext 1.0-2
|
||||
git 2.53.0-1
|
||||
git-lfs 3.7.1-1
|
||||
glib2 2.86.4-1
|
||||
glibc 2.43+r5+g856c426a7534-1
|
||||
gmp 6.3.0-3
|
||||
gnulib-l10n 20241231-1
|
||||
gnupg 2.4.9-1
|
||||
gnutls 3.8.12-2
|
||||
gpgme 2.0.1-3
|
||||
gpm 1.20.7.r38.ge82d1a6-6
|
||||
grep 3.12-2
|
||||
groff 1.24.0-2
|
||||
guile 3.0.11-1
|
||||
gzip 1.14-2
|
||||
hicolor-icon-theme 0.18-1
|
||||
htop 3.4.1-1
|
||||
hwdata 0.405-1
|
||||
iana-etc 20260225-2
|
||||
icu 78.2-2
|
||||
iproute2 6.19.0-2
|
||||
iptables 1:1.8.11-2
|
||||
iputils 20250605-1
|
||||
jansson 2.15.0-1
|
||||
json-c 0.18-2
|
||||
jsoncpp 1.9.6-3
|
||||
kbd 2.9.0-1
|
||||
keychain 2.9.8-1
|
||||
keyutils 1.6.3-3
|
||||
kmod 34.2-1
|
||||
krb5 1.21.3-2
|
||||
leancrypto 1.6.0-1
|
||||
less 1:692-1
|
||||
libarchive 3.8.5-1
|
||||
libasan 15.2.1+r604+g0b99615a8aef-1
|
||||
libassuan 3.0.0-1
|
||||
libatomic 15.2.1+r604+g0b99615a8aef-1
|
||||
libbpf 1.6.2-1
|
||||
libcap 2.77-1
|
||||
libcap-ng 0.9-1
|
||||
libedit 20251016_3.1-1
|
||||
libelf 0.194-2
|
||||
libevent 2.1.12-5
|
||||
libffi 3.5.2-1
|
||||
libgcc 15.2.1+r604+g0b99615a8aef-1
|
||||
libgcrypt 1.12.1-1
|
||||
libgfortran 15.2.1+r604+g0b99615a8aef-1
|
||||
libgomp 15.2.1+r604+g0b99615a8aef-1
|
||||
libgpg-error 1.59-1
|
||||
libidn2 2.3.8-1
|
||||
libisl 0.27-1
|
||||
libksba 1.6.8-1
|
||||
libldap 2.6.12-1
|
||||
liblsan 15.2.1+r604+g0b99615a8aef-1
|
||||
libmakepkg-dropins 20-1
|
||||
libmnl 1.0.5-2
|
||||
libmpc 1.3.1-2
|
||||
libnetfilter_conntrack 1.0.9-2
|
||||
libnfnetlink 1.0.2-2
|
||||
libnftnl 1.3.1-1
|
||||
libnghttp2 1.68.0-1
|
||||
libnghttp3 1.15.0-1
|
||||
libngtcp2 1.21.0-1
|
||||
libnl 3.12.0-1
|
||||
libnsl 2.0.1-1
|
||||
libobjc 15.2.1+r604+g0b99615a8aef-1
|
||||
libp11-kit 0.26.2-1
|
||||
libpcap 1.10.6-1
|
||||
libpsl 0.21.5-2
|
||||
libquadmath 15.2.1+r604+g0b99615a8aef-1
|
||||
libsasl 2.1.28-5
|
||||
libseccomp 2.6.0-1
|
||||
libsecret 0.21.7-1
|
||||
libssh2 1.11.1-1
|
||||
libstdc++ 15.2.1+r604+g0b99615a8aef-1
|
||||
libsysprof-capture 49.0-2
|
||||
libtasn1 4.21.0-1
|
||||
libtirpc 1.3.7-1
|
||||
libtool 2.6.0-4
|
||||
libtsan 15.2.1+r604+g0b99615a8aef-1
|
||||
libubsan 15.2.1+r604+g0b99615a8aef-1
|
||||
libunistring 1.4.1-1
|
||||
libusb 1.0.29-1
|
||||
libutempter 1.2.3-1
|
||||
libuv 1.52.1-1
|
||||
libverto 0.3.2-5
|
||||
libxcrypt 4.5.2-1
|
||||
libxml2 2.15.2-1
|
||||
licenses 20240728-1
|
||||
linux-api-headers 6.19-1
|
||||
lmdb 0.9.34-1
|
||||
lz4 1:1.10.0-2
|
||||
m4 1.4.21-1
|
||||
make 4.4.1-2
|
||||
mpdecimal 4.0.1-1
|
||||
mpfr 4.2.2-1
|
||||
ncurses 6.6-1
|
||||
nettle 3.10.2-1
|
||||
nload 0.7.4-9
|
||||
node-gyp 12.2.0-1
|
||||
nodejs 25.7.0-2
|
||||
nodejs-nopt 8.1.0-1
|
||||
npm 11.11.1-1
|
||||
npth 1.8-1
|
||||
openssh 10.2p1-2
|
||||
openssl 3.6.1-1
|
||||
p11-kit 0.26.2-1
|
||||
pacman 7.1.0.r9.g54d9411-1
|
||||
pacman-mirrorlist 20260213-1
|
||||
pam 1.7.2-2
|
||||
pambase 20250719-1
|
||||
patch 2.8-1
|
||||
pciutils 3.14.0-1
|
||||
pcre2 10.47-1
|
||||
perl 5.42.0-1
|
||||
perl-error 0.17030-3
|
||||
perl-mailtools 2.22-3
|
||||
perl-timedate 2.34-1
|
||||
pinentry 1.3.2-2
|
||||
pkgconf 2.5.1-1
|
||||
popt 1.19-2
|
||||
procps-ng 4.0.6-1
|
||||
psmisc 23.7-1
|
||||
python 3.14.3-1
|
||||
readline 8.3.003-1
|
||||
rhash 1.4.6-1
|
||||
sed 4.9-3
|
||||
semver 7.7.4-1
|
||||
shadow 4.18.0-1
|
||||
simdjson 1:4.4.0-1
|
||||
sqlite 3.51.2-1
|
||||
sudo 1.9.17.p2-2
|
||||
systemd 259.3-1
|
||||
systemd-libs 259.3-1
|
||||
systemd-sysvcompat 259.3-1
|
||||
tar 1.35-2
|
||||
texinfo 7.2-1
|
||||
tmux 3.6_a-1
|
||||
tpm2-tss 4.1.3-1
|
||||
tzdata 2026a-1
|
||||
util-linux 2.41.3-2
|
||||
util-linux-libs 2.41.3-2
|
||||
uv 0.10.10-1
|
||||
vim 9.2.0081-1
|
||||
vim-runtime 9.2.0081-1
|
||||
which 2.23-1
|
||||
xxhash 0.8.3-1
|
||||
xz 5.8.2-1
|
||||
yarn 1.22.22-2
|
||||
zlib 1:1.3.2-2
|
||||
zlib-ng 2.3.3-1
|
||||
zsh 5.9-5
|
||||
zstd 1.5.7-3
|
||||
@ -0,0 +1,420 @@
|
||||
import io
|
||||
import pathlib
|
||||
import tarfile
|
||||
import tempfile
|
||||
import unittest
|
||||
import unittest.mock
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ..cli.download import (
|
||||
parse_rate_t,
|
||||
downloader_t,
|
||||
download_requirements_t,
|
||||
)
|
||||
from ..resolver.solv import (
|
||||
solv_pool_t,
|
||||
)
|
||||
from ..resolver.solv_types import (
|
||||
repo_store_t,
|
||||
solv_index_t,
|
||||
solv_package_t,
|
||||
)
|
||||
|
||||
|
||||
class TestParseRate(unittest.TestCase):
|
||||
def test_bytes(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('100'), 100)
|
||||
|
||||
def test_bytes_b(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('100b'), 100)
|
||||
|
||||
def test_bytes_B(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('100B'), 100)
|
||||
|
||||
def test_kib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('128K'), 128 * 1024)
|
||||
|
||||
def test_kib_lower(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('128k'), 128 * 1024)
|
||||
|
||||
def test_kib_full(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('128KiB/s'), 128 * 1024)
|
||||
|
||||
def test_kib_kb(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('128KB'), 128 * 1024)
|
||||
|
||||
def test_mib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1M'), 1024 * 1024)
|
||||
|
||||
def test_mib_full(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1MiB/s'), 1024 * 1024)
|
||||
|
||||
def test_gib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1G'), 1024**3)
|
||||
|
||||
def test_tib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1T'), 1024**4)
|
||||
|
||||
def test_pib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1P'), 1024**5)
|
||||
|
||||
def test_float_value(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('1.5M'), int(1.5 * 1024 * 1024))
|
||||
|
||||
def test_float_kib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('0.5K'), 512)
|
||||
|
||||
def test_whitespace(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse(' 128K '), 128 * 1024)
|
||||
|
||||
def test_invalid_raises(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
parse_rate_t.parse('abc')
|
||||
|
||||
def test_empty_raises(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
parse_rate_t.parse('')
|
||||
|
||||
def test_default_128kib(self) -> None:
|
||||
self.assertEqual(parse_rate_t.parse('128KiB/s'), 128 * 1024)
|
||||
|
||||
|
||||
class TestDownloadRequirementsParse(unittest.TestCase):
|
||||
def test_simple(self) -> None:
|
||||
txt = '# https://example.com/core/bash-5.2-1-x86_64.pkg.tar.zst\nbash==5.2-1 --hash=sha256:abc123\n'
|
||||
entries = download_requirements_t.parse_requirements(txt)
|
||||
self.assertEqual(len(entries), 1)
|
||||
self.assertEqual(entries[0][0], 'https://example.com/core/bash-5.2-1-x86_64.pkg.tar.zst')
|
||||
self.assertEqual(entries[0][1], 'bash-5.2-1-x86_64.pkg.tar.zst')
|
||||
|
||||
def test_multiple(self) -> None:
|
||||
txt = (
|
||||
'# https://example.com/core/bash-5.2-1-x86_64.pkg.tar.zst\nbash==5.2-1\n# https://example.com/core/glibc-2.38-1-x86_64.pkg.tar.zst\nglibc==2.38-1\n'
|
||||
)
|
||||
entries = download_requirements_t.parse_requirements(txt)
|
||||
self.assertEqual(len(entries), 2)
|
||||
self.assertEqual(entries[0][1], 'bash-5.2-1-x86_64.pkg.tar.zst')
|
||||
self.assertEqual(entries[1][1], 'glibc-2.38-1-x86_64.pkg.tar.zst')
|
||||
|
||||
def test_no_url_skipped(self) -> None:
|
||||
txt = 'bash==5.2-1\n'
|
||||
entries = download_requirements_t.parse_requirements(txt)
|
||||
self.assertEqual(len(entries), 0)
|
||||
|
||||
def test_comment_without_url_ignored(self) -> None:
|
||||
txt = '# just a comment\n# https://example.com/core/bash-5.2-1-x86_64.pkg.tar.zst\nbash==5.2-1\n'
|
||||
entries = download_requirements_t.parse_requirements(txt)
|
||||
self.assertEqual(len(entries), 1)
|
||||
|
||||
def test_empty_input(self) -> None:
|
||||
entries = download_requirements_t.parse_requirements('')
|
||||
self.assertEqual(len(entries), 0)
|
||||
|
||||
def test_blank_lines_ignored(self) -> None:
|
||||
txt = '\n\n# https://example.com/bash.pkg\n\nbash==5.2-1\n\n'
|
||||
entries = download_requirements_t.parse_requirements(txt)
|
||||
self.assertEqual(len(entries), 1)
|
||||
|
||||
|
||||
class TestDownloader(unittest.TestCase):
|
||||
@unittest.mock.patch('urllib.request.urlretrieve')
|
||||
def test_urllib_backend(self, mock_urlretrieve: unittest.mock.MagicMock) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dest = pathlib.Path(tmpdir) / 'test.pkg'
|
||||
downloader_t.download(
|
||||
url='https://example.com/test.pkg',
|
||||
dest=dest,
|
||||
backend=downloader_t.constants_t.backend_t.urllib,
|
||||
limit_rate=128 * 1024,
|
||||
)
|
||||
mock_urlretrieve.assert_called_once_with('https://example.com/test.pkg', str(dest))
|
||||
|
||||
@unittest.mock.patch('subprocess.check_call')
|
||||
def test_curl_backend(self, mock_check_call: unittest.mock.MagicMock) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dest = pathlib.Path(tmpdir) / 'test.pkg'
|
||||
downloader_t.download(
|
||||
url='https://example.com/test.pkg',
|
||||
dest=dest,
|
||||
backend=downloader_t.constants_t.backend_t.curl,
|
||||
limit_rate=128 * 1024,
|
||||
)
|
||||
cmd = mock_check_call.call_args[0][0]
|
||||
self.assertEqual(cmd[0], 'curl')
|
||||
self.assertIn('--limit-rate', cmd)
|
||||
self.assertIn(str(128 * 1024), cmd)
|
||||
|
||||
@unittest.mock.patch('subprocess.check_call')
|
||||
def test_aria2c_backend(self, mock_check_call: unittest.mock.MagicMock) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
dest = pathlib.Path(tmpdir) / 'test.pkg'
|
||||
downloader_t.download(
|
||||
url='https://example.com/test.pkg',
|
||||
dest=dest,
|
||||
backend=downloader_t.constants_t.backend_t.aria2c,
|
||||
limit_rate=1024 * 1024,
|
||||
)
|
||||
cmd = mock_check_call.call_args[0][0]
|
||||
self.assertEqual(cmd[0], 'aria2c')
|
||||
self.assertIn('--max-download-limit=%d' % (1024 * 1024), cmd)
|
||||
|
||||
|
||||
class TestGroupExpansion(unittest.TestCase):
|
||||
def test_expand_group(self) -> None:
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='autoconf', version='2.72-1', arch='any', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='automake', version='1.17-1', arch='any', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='gcc', version='14.1-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
|
||||
expanded = pool.expand_groups(['base-devel'])
|
||||
self.assertIn('autoconf', expanded)
|
||||
self.assertIn('automake', expanded)
|
||||
self.assertIn('gcc', expanded)
|
||||
self.assertNotIn('bash', expanded)
|
||||
|
||||
def test_non_group_passthrough(self) -> None:
|
||||
idx = solv_index_t(name='core')
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
|
||||
expanded = pool.expand_groups(['bash'])
|
||||
self.assertEqual(expanded, ['bash'])
|
||||
|
||||
def test_mixed_groups_and_packages(self) -> None:
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='gcc', version='14.1-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='make', version='4.4-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
|
||||
expanded = pool.expand_groups(['bash', 'base-devel'])
|
||||
self.assertIn('bash', expanded)
|
||||
self.assertIn('gcc', expanded)
|
||||
self.assertIn('make', expanded)
|
||||
|
||||
def test_resolve_with_group(self) -> None:
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='gcc', version='14.1-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='make', version='4.4-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
|
||||
result = pool.resolve(['base-devel'], expand_groups=True)
|
||||
self.assertIn('gcc', result.resolved)
|
||||
self.assertIn('make', result.resolved)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
|
||||
|
||||
class TestNoGroupBleedThrough(unittest.TestCase):
|
||||
"""Verify that resolving a package does NOT pull in unrelated group members.
|
||||
|
||||
Regression: when group members were also providing the same dep names,
|
||||
libsolv used to pick the heavy group route instead of the direct package.
|
||||
"""
|
||||
|
||||
def test_direct_package_preferred_over_group(self) -> None:
|
||||
idx = solv_index_t(name='extra')
|
||||
# direct minimal package satisfying 'foo'
|
||||
idx.add(solv_package_t(name='foo', version='1.0-1', arch='x86_64'))
|
||||
# heavy group members that also "provide" foo via groups
|
||||
idx.add(solv_package_t(name='mega1', version='1.0-1', arch='x86_64', groups=['mega']))
|
||||
idx.add(solv_package_t(name='mega2', version='1.0-1', arch='x86_64', groups=['mega']))
|
||||
idx.add(solv_package_t(name='mega3', version='1.0-1', arch='x86_64', groups=['mega']))
|
||||
# user package wants foo
|
||||
idx.add(solv_package_t(name='app', version='1.0-1', arch='x86_64', depends=['foo']))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['app'])
|
||||
|
||||
self.assertIn('app', result.resolved)
|
||||
self.assertIn('foo', result.resolved)
|
||||
self.assertNotIn('mega1', result.resolved)
|
||||
self.assertNotIn('mega2', result.resolved)
|
||||
self.assertNotIn('mega3', result.resolved)
|
||||
|
||||
def test_group_not_expanded_by_default(self) -> None:
|
||||
"""Asking for a group name without expand_groups must NOT pull members."""
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='gcc', version='14.1-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='make', version='4.4-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
# default expand_groups=False — 'base-devel' is not a real package
|
||||
result = pool.resolve(['base-devel'])
|
||||
# nothing should be resolved; it should report a problem
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
self.assertNotIn('gcc', result.resolved)
|
||||
self.assertNotIn('make', result.resolved)
|
||||
|
||||
def test_group_expanded_when_requested(self) -> None:
|
||||
"""With expand_groups=True, group members ARE pulled in (opt-in)."""
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='gcc', version='14.1-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='make', version='4.4-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['base-devel'], expand_groups=True)
|
||||
self.assertIn('gcc', result.resolved)
|
||||
self.assertIn('make', result.resolved)
|
||||
|
||||
def test_provides_not_used_for_name_match(self) -> None:
|
||||
"""A package that 'provides foo' must NOT be selected when user asks 'foo'
|
||||
if there is also a real package named 'foo'."""
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='foo', version='1.0-1', arch='x86_64'))
|
||||
# a heavy package that also provides 'foo' via provides field
|
||||
idx.add(
|
||||
solv_package_t(
|
||||
name='heavy',
|
||||
version='1.0-1',
|
||||
arch='x86_64',
|
||||
provides=['foo=1.0-1'],
|
||||
depends=['extra-bloat'],
|
||||
)
|
||||
)
|
||||
idx.add(solv_package_t(name='extra-bloat', version='1.0-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['foo'])
|
||||
|
||||
self.assertIn('foo', result.resolved)
|
||||
self.assertNotIn('heavy', result.resolved)
|
||||
self.assertNotIn('extra-bloat', result.resolved)
|
||||
|
||||
|
||||
class TestParseReference(unittest.TestCase):
|
||||
def test_simple(self) -> None:
|
||||
txt = (
|
||||
'# https://example.com/core/bash-5.2-1-x86_64.pkg.tar.zst\n'
|
||||
'bash==5.2-1 --hash=sha256:abc\n'
|
||||
'# https://example.com/core/glibc-2.38-1-x86_64.pkg.tar.zst\n'
|
||||
'glibc==2.38-1\n'
|
||||
)
|
||||
pinned = solv_pool_t.parse_reference(txt)
|
||||
self.assertEqual(pinned, {'bash': '5.2-1', 'glibc': '2.38-1'})
|
||||
|
||||
def test_empty(self) -> None:
|
||||
pinned = solv_pool_t.parse_reference('')
|
||||
self.assertEqual(pinned, {})
|
||||
|
||||
def test_comments_only(self) -> None:
|
||||
pinned = solv_pool_t.parse_reference('# just a comment\n# another\n')
|
||||
self.assertEqual(pinned, {})
|
||||
|
||||
def test_no_version_skipped(self) -> None:
|
||||
txt = 'bash\nglibc==2.38-1\n'
|
||||
pinned = solv_pool_t.parse_reference(txt)
|
||||
self.assertEqual(pinned, {'glibc': '2.38-1'})
|
||||
|
||||
|
||||
class TestPinReferenced(unittest.TestCase):
|
||||
def _make_pool(self, pinned: Optional[dict[str, str]] = None) -> tuple[solv_pool_t, solv_index_t]:
|
||||
idx = solv_index_t(name='core')
|
||||
idx.add(solv_package_t(name='bash', version='5.3-1', arch='x86_64'))
|
||||
idx.add(solv_package_t(name='glibc', version='2.40-1', arch='x86_64'))
|
||||
idx.add(solv_package_t(name='openssl', version='3.3-1', arch='x86_64'))
|
||||
idx.add(solv_package_t(name='zlib', version='1.3.1-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t()
|
||||
pool.add_store(repo_store_t(index=idx))
|
||||
if pinned is not None:
|
||||
pool.add_pinned(pinned)
|
||||
pool.finalize()
|
||||
return pool, idx
|
||||
|
||||
def test_pin_keeps_old_versions(self) -> None:
|
||||
pinned = {'bash': '5.2-1', 'glibc': '2.38-1', 'openssl': '3.2-1'}
|
||||
pool, idx = self._make_pool(pinned=pinned)
|
||||
|
||||
result = pool.resolve(
|
||||
['bash', 'glibc', 'openssl'],
|
||||
pinned=pinned,
|
||||
)
|
||||
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
self.assertIn('openssl', result.resolved)
|
||||
self.assertEqual(result.resolved['bash'].evr, '5.2-1')
|
||||
self.assertEqual(result.resolved['glibc'].evr, '2.38-1')
|
||||
self.assertEqual(result.resolved['openssl'].evr, '3.2-1')
|
||||
|
||||
def test_upgrade_specific_package(self) -> None:
|
||||
pinned = {'bash': '5.2-1', 'glibc': '2.38-1', 'openssl': '3.2-1'}
|
||||
pool, idx = self._make_pool(pinned=pinned)
|
||||
|
||||
result = pool.resolve(
|
||||
['bash', 'glibc', 'openssl'],
|
||||
pinned=pinned,
|
||||
upgrade_packages=['openssl'],
|
||||
)
|
||||
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertEqual(result.resolved['bash'].evr, '5.2-1')
|
||||
self.assertEqual(result.resolved['glibc'].evr, '2.38-1')
|
||||
self.assertEqual(result.resolved['openssl'].evr, '3.3-1')
|
||||
|
||||
def test_upgrade_all_without_pinning(self) -> None:
|
||||
pool, idx = self._make_pool()
|
||||
|
||||
result = pool.resolve(['bash', 'glibc', 'openssl'])
|
||||
|
||||
self.assertEqual(result.resolved['bash'].evr, '5.3-1')
|
||||
self.assertEqual(result.resolved['glibc'].evr, '2.40-1')
|
||||
self.assertEqual(result.resolved['openssl'].evr, '3.3-1')
|
||||
|
||||
def test_pin_with_new_package(self) -> None:
|
||||
pinned = {'bash': '5.2-1', 'glibc': '2.38-1'}
|
||||
pool, idx = self._make_pool(pinned=pinned)
|
||||
|
||||
result = pool.resolve(
|
||||
['bash', 'glibc', 'zlib'],
|
||||
pinned=pinned,
|
||||
upgrade_packages=['zlib'],
|
||||
)
|
||||
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertEqual(result.resolved['bash'].evr, '5.2-1')
|
||||
self.assertEqual(result.resolved['glibc'].evr, '2.38-1')
|
||||
self.assertEqual(result.resolved['zlib'].evr, '1.3.1-1')
|
||||
|
||||
def test_pin_with_group_upgrade(self) -> None:
|
||||
idx = solv_index_t(name='extra')
|
||||
idx.add(solv_package_t(name='gcc', version='14.2-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='make', version='4.5-1', arch='x86_64', groups=['base-devel']))
|
||||
idx.add(solv_package_t(name='bash', version='5.3-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pinned = {'gcc': '14.1-1', 'make': '4.4-1', 'bash': '5.2-1'}
|
||||
|
||||
pool = solv_pool_t()
|
||||
pool.add_store(repo_store_t(index=idx))
|
||||
pool.add_pinned(pinned, upgrade_set={'gcc', 'make'})
|
||||
pool.finalize()
|
||||
|
||||
result = pool.resolve(
|
||||
['gcc', 'make', 'bash'],
|
||||
pinned=pinned,
|
||||
upgrade_packages=['base-devel'],
|
||||
expand_groups=True,
|
||||
)
|
||||
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertEqual(result.resolved['gcc'].evr, '14.2-1')
|
||||
self.assertEqual(result.resolved['make'].evr, '4.5-1')
|
||||
self.assertEqual(result.resolved['bash'].evr, '5.2-1')
|
||||
@ -0,0 +1,467 @@
|
||||
import io
|
||||
import tarfile
|
||||
import tempfile
|
||||
import pathlib
|
||||
import unittest
|
||||
import unittest.mock
|
||||
|
||||
from ..apps.pacman.client import pacman_t
|
||||
from ..apps.pacman.db import db_parser_t
|
||||
from ..apps.pacman.types import (
|
||||
compile_options_t,
|
||||
mirror_config_t,
|
||||
pacman_constraint_t,
|
||||
repo_config_t,
|
||||
repo_index_t,
|
||||
)
|
||||
from ..models import (
|
||||
compile_entry_t,
|
||||
compile_result_t,
|
||||
package_index_t,
|
||||
package_t,
|
||||
resolve_result_t,
|
||||
)
|
||||
from ..resolver.general import resolver_t
|
||||
|
||||
|
||||
class TestBuildMirrorConfig(unittest.TestCase):
|
||||
def test_with_archive_date(self) -> None:
|
||||
opts = compile_options_t(archive_date='2024/01/15')
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertIn('archive.archlinux.org', mirror.repos[0].url)
|
||||
self.assertIn('2024/01/15', mirror.repos[0].url)
|
||||
|
||||
def test_with_index_url(self) -> None:
|
||||
opts = compile_options_t(index_url='https://mirror.example.com')
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertIn('mirror.example.com', mirror.repos[0].url)
|
||||
|
||||
def test_default_mirror(self) -> None:
|
||||
opts = compile_options_t()
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertIn('archive.archlinux.org', mirror.repos[0].url)
|
||||
|
||||
def test_custom_repos(self) -> None:
|
||||
opts = compile_options_t(
|
||||
archive_date='2024/01/15',
|
||||
repos=['core', 'extra'],
|
||||
)
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertEqual(len(mirror.repos), 2)
|
||||
|
||||
def test_custom_arch(self) -> None:
|
||||
opts = compile_options_t(
|
||||
archive_date='2024/01/15',
|
||||
arch='aarch64',
|
||||
)
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertIn('aarch64', mirror.repos[0].url)
|
||||
|
||||
def test_archive_date_takes_priority(self) -> None:
|
||||
opts = compile_options_t(
|
||||
archive_date='2024/01/15',
|
||||
index_url='https://mirror.example.com',
|
||||
)
|
||||
mirror = pacman_t.build_mirror_config(opts)
|
||||
|
||||
self.assertIn('archive.archlinux.org', mirror.repos[0].url)
|
||||
|
||||
|
||||
class TestFetchIndices(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _make_db_bytes(packages: list[tuple[str, str]]) -> bytes:
|
||||
buf = io.BytesIO()
|
||||
with tarfile.open(fileobj=buf, mode='w:gz') as tar:
|
||||
for dir_name, desc_content in packages:
|
||||
desc_bytes = desc_content.encode('utf-8')
|
||||
desc_info = tarfile.TarInfo(name='%s/desc' % dir_name)
|
||||
desc_info.size = len(desc_bytes)
|
||||
desc_info.type = tarfile.REGTYPE
|
||||
tar.addfile(desc_info, io.BytesIO(desc_bytes))
|
||||
return buf.getvalue()
|
||||
|
||||
def test_offline_no_cache_raises(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[repo_config_t(name='core', url='https://example.com/core')],
|
||||
)
|
||||
|
||||
with self.assertRaises(FileNotFoundError):
|
||||
pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
offline=True,
|
||||
)
|
||||
|
||||
def test_offline_with_cache_missing_raises(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[repo_config_t(name='core', url='https://example.com/core')],
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
with self.assertRaises(FileNotFoundError):
|
||||
pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
cache_dir=pathlib.Path(tmpdir),
|
||||
offline=True,
|
||||
)
|
||||
|
||||
def test_offline_with_cached_db(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[repo_config_t(name='core', url='https://example.com/core')],
|
||||
)
|
||||
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cached_path = pathlib.Path(tmpdir) / 'core.db'
|
||||
cached_path.write_bytes(db_bytes)
|
||||
|
||||
indices = pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
cache_dir=pathlib.Path(tmpdir),
|
||||
offline=True,
|
||||
)
|
||||
|
||||
self.assertEqual(len(indices), 1)
|
||||
self.assertIn('bash', indices[0].packages)
|
||||
|
||||
def test_cache_hit(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[repo_config_t(name='extra', url='https://example.com/extra')],
|
||||
)
|
||||
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('python-3.11.5-1', '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cached_path = pathlib.Path(tmpdir) / 'extra.db'
|
||||
cached_path.write_bytes(db_bytes)
|
||||
|
||||
indices = pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
cache_dir=pathlib.Path(tmpdir),
|
||||
no_cache=False,
|
||||
)
|
||||
|
||||
self.assertEqual(len(indices), 1)
|
||||
self.assertIn('python', indices[0].packages)
|
||||
|
||||
def test_no_cache_flag_redownloads(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[repo_config_t(name='core', url='https://example.com/core')],
|
||||
)
|
||||
|
||||
old_db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.1.000-1', '%NAME%\nbash\n\n%VERSION%\n5.1.000-1\n'),
|
||||
]
|
||||
)
|
||||
new_db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cached_path = pathlib.Path(tmpdir) / 'core.db'
|
||||
cached_path.write_bytes(old_db_bytes)
|
||||
|
||||
def mock_download(url: str, output_path: pathlib.Path) -> None:
|
||||
output_path.write_bytes(new_db_bytes)
|
||||
|
||||
with unittest.mock.patch.object(
|
||||
pacman_t,
|
||||
'download_db',
|
||||
side_effect=mock_download,
|
||||
):
|
||||
indices = pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
cache_dir=pathlib.Path(tmpdir),
|
||||
no_cache=True,
|
||||
)
|
||||
|
||||
self.assertEqual(len(indices), 1)
|
||||
self.assertEqual(indices[0].packages['bash'].version, '5.2.015-1')
|
||||
|
||||
def test_multiple_repos_cached(self) -> None:
|
||||
mirror = mirror_config_t(
|
||||
repos=[
|
||||
repo_config_t(name='core', url='https://example.com/core'),
|
||||
repo_config_t(name='extra', url='https://example.com/extra'),
|
||||
],
|
||||
)
|
||||
|
||||
core_db = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
extra_db = self._make_db_bytes(
|
||||
[
|
||||
('python-3.11.5-1', '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
(pathlib.Path(tmpdir) / 'core.db').write_bytes(core_db)
|
||||
(pathlib.Path(tmpdir) / 'extra.db').write_bytes(extra_db)
|
||||
|
||||
indices = pacman_t.fetch_indices(
|
||||
mirror=mirror,
|
||||
cache_dir=pathlib.Path(tmpdir),
|
||||
offline=True,
|
||||
)
|
||||
|
||||
self.assertEqual(len(indices), 2)
|
||||
self.assertIn('bash', indices[0].packages)
|
||||
self.assertIn('python', indices[1].packages)
|
||||
|
||||
|
||||
def _pacman_to_general(idx: repo_index_t) -> package_index_t:
|
||||
"""Convert pacman repo_index_t to general package_index_t for compile tests."""
|
||||
result = package_index_t(name=idx.name)
|
||||
for name, pkg in idx.packages.items():
|
||||
result.add(
|
||||
package_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
sha256sum=pkg.sha256sum,
|
||||
repo=idx.name,
|
||||
depends=[pacman_constraint_t.parse(d) for d in pkg.depends],
|
||||
provides=[pacman_constraint_t.parse(p) for p in pkg.provides],
|
||||
conflicts=[pacman_constraint_t.parse(c) for c in pkg.conflicts],
|
||||
groups=pkg.groups,
|
||||
)
|
||||
)
|
||||
result.build_provides_index()
|
||||
return result
|
||||
|
||||
|
||||
def _compile_resolved(
|
||||
resolved: 'resolve_result_t',
|
||||
mirror_url: str,
|
||||
generate_hashes: bool = False,
|
||||
) -> compile_result_t.res_t:
|
||||
"""Build compile_result_t from resolve_result_t, mirroring cli/compile.py logic."""
|
||||
result = compile_result_t.res_t()
|
||||
for pkg_name in resolved.resolution_order:
|
||||
pkg = resolved.resolved[pkg_name]
|
||||
url = ''
|
||||
if pkg.filename:
|
||||
url = '%s/%s' % (mirror_url, pkg.filename)
|
||||
entry = compile_entry_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
repo=pkg.repo,
|
||||
url=url,
|
||||
sha256=pkg.sha256sum if generate_hashes else '',
|
||||
)
|
||||
result.entries.append(entry)
|
||||
result.txt = result.to_txt()
|
||||
return result
|
||||
|
||||
|
||||
class TestCompile(unittest.TestCase):
|
||||
"""Tests the compile flow: fetch_indices → convert → resolve → format.
|
||||
|
||||
Exercises the same logical coverage as the old pacman_t.compile() method,
|
||||
but through the current separated API surface.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def _make_db_bytes(packages: list[tuple[str, str]]) -> bytes:
|
||||
buf = io.BytesIO()
|
||||
with tarfile.open(fileobj=buf, mode='w:gz') as tar:
|
||||
for dir_name, desc_content in packages:
|
||||
desc_bytes = desc_content.encode('utf-8')
|
||||
desc_info = tarfile.TarInfo(name='%s/desc' % dir_name)
|
||||
desc_info.size = len(desc_bytes)
|
||||
desc_info.type = tarfile.REGTYPE
|
||||
tar.addfile(desc_info, io.BytesIO(desc_bytes))
|
||||
return buf.getvalue()
|
||||
|
||||
def _fetch_and_resolve(
|
||||
self,
|
||||
db_files: dict[str, bytes],
|
||||
packages: list[str],
|
||||
mirror_url: str = 'https://archive.archlinux.org/repos/2024/01/15',
|
||||
generate_hashes: bool = False,
|
||||
) -> compile_result_t.res_t:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
indices: list[package_index_t] = []
|
||||
for name, data in db_files.items():
|
||||
db_path = pathlib.Path(tmpdir) / ('%s.db' % name)
|
||||
db_path.write_bytes(data)
|
||||
raw = db_parser_t.parse_db_path(db_path, repo_name=name)
|
||||
indices.append(_pacman_to_general(raw))
|
||||
|
||||
resolved = resolver_t.resolve(packages, indices)
|
||||
return _compile_resolved(resolved, mirror_url, generate_hashes)
|
||||
|
||||
def test_compile_single_package(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'bash-5.2.015-1',
|
||||
'%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n\n%FILENAME%\nbash-5.2.015-1-x86_64.pkg.tar.zst\n\n%SHA256SUM%\nabc123\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve(
|
||||
{'core': db_bytes},
|
||||
['bash'],
|
||||
generate_hashes=True,
|
||||
)
|
||||
|
||||
self.assertEqual(len(result.entries), 1)
|
||||
self.assertEqual(result.entries[0].name, 'bash')
|
||||
self.assertEqual(result.entries[0].version, '5.2.015-1')
|
||||
self.assertEqual(result.entries[0].sha256, 'abc123')
|
||||
|
||||
def test_compile_no_hashes(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'bash-5.2.015-1',
|
||||
'%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n\n%FILENAME%\nbash-5.2.015-1-x86_64.pkg.tar.zst\n\n%SHA256SUM%\nabc123\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve(
|
||||
{'core': db_bytes},
|
||||
['bash'],
|
||||
generate_hashes=False,
|
||||
)
|
||||
|
||||
self.assertEqual(result.entries[0].sha256, '')
|
||||
|
||||
def test_compile_with_deps(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'python-3.11.5-1',
|
||||
'%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%FILENAME%\npython-3.11.5-1-x86_64.pkg.tar.zst\n\n%DEPENDS%\nglibc\n\n',
|
||||
),
|
||||
(
|
||||
'glibc-2.38-1',
|
||||
'%NAME%\nglibc\n\n%VERSION%\n2.38-1\n\n%FILENAME%\nglibc-2.38-1-x86_64.pkg.tar.zst\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve({'core': db_bytes}, ['python'])
|
||||
|
||||
names = {e.name for e in result.entries}
|
||||
self.assertIn('python', names)
|
||||
self.assertIn('glibc', names)
|
||||
|
||||
def test_compile_txt_output(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'bash-5.2.015-1',
|
||||
'%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n\n%FILENAME%\nbash-5.2.015-1-x86_64.pkg.tar.zst\n\n%SHA256SUM%\nabc123\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve(
|
||||
{'core': db_bytes},
|
||||
['bash'],
|
||||
generate_hashes=True,
|
||||
)
|
||||
|
||||
self.assertIn('bash==5.2.015-1', result.txt)
|
||||
self.assertIn('--hash=sha256:abc123', result.txt)
|
||||
|
||||
def test_compile_url_construction(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'bash-5.2.015-1',
|
||||
'%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n\n%FILENAME%\nbash-5.2.015-1-x86_64.pkg.tar.zst\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve(
|
||||
{'core': db_bytes},
|
||||
['bash'],
|
||||
mirror_url='https://archive.archlinux.org/repos/2024/01/15',
|
||||
)
|
||||
|
||||
self.assertIn('2024/01/15', result.entries[0].url)
|
||||
self.assertIn('bash-5.2.015-1-x86_64.pkg.tar.zst', result.entries[0].url)
|
||||
|
||||
def test_compile_multiple_repos(self) -> None:
|
||||
core_db = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'glibc-2.38-1',
|
||||
'%NAME%\nglibc\n\n%VERSION%\n2.38-1\n\n%FILENAME%\nglibc-2.38-1-x86_64.pkg.tar.zst\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
extra_db = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'python-3.11.5-1',
|
||||
'%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%FILENAME%\npython-3.11.5-1-x86_64.pkg.tar.zst\n\n%DEPENDS%\nglibc\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve(
|
||||
{'core': core_db, 'extra': extra_db},
|
||||
['python'],
|
||||
)
|
||||
|
||||
names = {e.name for e in result.entries}
|
||||
self.assertIn('python', names)
|
||||
self.assertIn('glibc', names)
|
||||
|
||||
repos = {e.name: e.repo for e in result.entries}
|
||||
self.assertEqual(repos['glibc'], 'core')
|
||||
self.assertEqual(repos['python'], 'extra')
|
||||
|
||||
def test_compile_not_found(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
db_path = pathlib.Path(tmpdir) / 'core.db'
|
||||
db_path.write_bytes(db_bytes)
|
||||
raw = db_parser_t.parse_db_path(db_path, repo_name='core')
|
||||
idx = _pacman_to_general(raw)
|
||||
result = resolver_t.resolve(['nonexistent'], [idx])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
self.assertTrue(any('nonexistent' in p for p in result.problems))
|
||||
|
||||
def test_compile_empty_packages(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
result = self._fetch_and_resolve({'core': db_bytes}, [])
|
||||
|
||||
self.assertEqual(len(result.entries), 0)
|
||||
@ -0,0 +1,241 @@
|
||||
import io
|
||||
import tarfile
|
||||
import tempfile
|
||||
import pathlib
|
||||
import unittest
|
||||
|
||||
from ..apps.pacman.db import db_parser_t
|
||||
from ..apps.pacman.types import (
|
||||
package_desc_t,
|
||||
repo_index_t,
|
||||
)
|
||||
|
||||
|
||||
class TestParseDesc(unittest.TestCase):
|
||||
def test_minimal_desc(self) -> None:
|
||||
content = '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.name, 'bash')
|
||||
self.assertEqual(pkg.version, '5.2.015-1')
|
||||
|
||||
def test_full_desc(self) -> None:
|
||||
content = (
|
||||
'%FILENAME%\nbash-5.2.015-1-x86_64.pkg.tar.zst\n\n'
|
||||
'%NAME%\nbash\n\n'
|
||||
'%VERSION%\n5.2.015-1\n\n'
|
||||
'%DESC%\nThe GNU Bourne Again shell\n\n'
|
||||
'%CSIZE%\n1852456\n\n'
|
||||
'%ISIZE%\n9503476\n\n'
|
||||
'%MD5SUM%\naabbccdd\n\n'
|
||||
'%SHA256SUM%\neeff0011223344\n\n'
|
||||
'%URL%\nhttps://www.gnu.org/software/bash/\n\n'
|
||||
'%ARCH%\nx86_64\n\n'
|
||||
'%BUILDDATE%\n1693000000\n\n'
|
||||
'%PACKAGER%\nArch Packager\n\n'
|
||||
'%LICENSE%\nGPL-3.0-or-later\n\n'
|
||||
'%BASE%\nbash\n\n'
|
||||
)
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.name, 'bash')
|
||||
self.assertEqual(pkg.version, '5.2.015-1')
|
||||
self.assertEqual(pkg.filename, 'bash-5.2.015-1-x86_64.pkg.tar.zst')
|
||||
self.assertEqual(pkg.desc, 'The GNU Bourne Again shell')
|
||||
self.assertEqual(pkg.csize, 1852456)
|
||||
self.assertEqual(pkg.isize, 9503476)
|
||||
self.assertEqual(pkg.md5sum, 'aabbccdd')
|
||||
self.assertEqual(pkg.sha256sum, 'eeff0011223344')
|
||||
self.assertEqual(pkg.url, 'https://www.gnu.org/software/bash/')
|
||||
self.assertEqual(pkg.arch, 'x86_64')
|
||||
self.assertEqual(pkg.builddate, 1693000000)
|
||||
self.assertEqual(pkg.packager, 'Arch Packager')
|
||||
self.assertEqual(pkg.license, ['GPL-3.0-or-later'])
|
||||
self.assertEqual(pkg.base, 'bash')
|
||||
|
||||
def test_depends(self) -> None:
|
||||
content = '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%DEPENDS%\nglibc\nexpat\nopenssl>=1.1\nlibffi\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.depends, ['glibc', 'expat', 'openssl>=1.1', 'libffi'])
|
||||
|
||||
def test_provides(self) -> None:
|
||||
content = '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%PROVIDES%\npython3=3.11.5\npython3.11\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.provides, ['python3=3.11.5', 'python3.11'])
|
||||
|
||||
def test_conflicts(self) -> None:
|
||||
content = '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%CONFLICTS%\npython2\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.conflicts, ['python2'])
|
||||
|
||||
def test_multiple_licenses(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n1.0\n\n%LICENSE%\nMIT\nApache-2.0\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.license, ['MIT', 'Apache-2.0'])
|
||||
|
||||
def test_optdepends(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n1.0\n\n%OPTDEPENDS%\npython-pip: for pip support\npython-setuptools: for setuptools\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(len(pkg.optdepends), 2)
|
||||
|
||||
def test_missing_name_raises(self) -> None:
|
||||
content = '%VERSION%\n1.0\n'
|
||||
with self.assertRaises(ValueError):
|
||||
db_parser_t.parse_desc(content)
|
||||
|
||||
def test_missing_version_raises(self) -> None:
|
||||
content = '%NAME%\ntest\n'
|
||||
with self.assertRaises(ValueError):
|
||||
db_parser_t.parse_desc(content)
|
||||
|
||||
def test_empty_content_raises(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
db_parser_t.parse_desc('')
|
||||
|
||||
def test_unknown_fields_ignored(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n1.0\n\n%UNKNOWNFIELD%\nsome_value\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.name, 'test')
|
||||
self.assertEqual(pkg.version, '1.0')
|
||||
|
||||
def test_groups(self) -> None:
|
||||
content = '%NAME%\nvim\n\n%VERSION%\n9.0-1\n\n%GROUPS%\nbase-devel\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.groups, ['base-devel'])
|
||||
|
||||
def test_replaces(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n2.0\n\n%REPLACES%\ntest-old\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.replaces, ['test-old'])
|
||||
|
||||
def test_makedepends(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n1.0\n\n%MAKEDEPENDS%\ngcc\nmake\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.makedepends, ['gcc', 'make'])
|
||||
|
||||
def test_checkdepends(self) -> None:
|
||||
content = '%NAME%\ntest\n\n%VERSION%\n1.0\n\n%CHECKDEPENDS%\npytest\n\n'
|
||||
pkg = db_parser_t.parse_desc(content)
|
||||
self.assertEqual(pkg.checkdepends, ['pytest'])
|
||||
|
||||
|
||||
class TestParseDb(unittest.TestCase):
|
||||
@staticmethod
|
||||
def _make_db_bytes(packages: list[tuple[str, str]]) -> bytes:
|
||||
buf = io.BytesIO()
|
||||
with tarfile.open(fileobj=buf, mode='w:gz') as tar:
|
||||
for dir_name, desc_content in packages:
|
||||
desc_bytes = desc_content.encode('utf-8')
|
||||
|
||||
desc_info = tarfile.TarInfo(name='%s/desc' % dir_name)
|
||||
desc_info.size = len(desc_bytes)
|
||||
desc_info.type = tarfile.REGTYPE
|
||||
tar.addfile(desc_info, io.BytesIO(desc_bytes))
|
||||
|
||||
return buf.getvalue()
|
||||
|
||||
def test_single_package(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(db_bytes), repo_name='core')
|
||||
|
||||
self.assertEqual(index.name, 'core')
|
||||
self.assertEqual(len(index.packages), 1)
|
||||
self.assertIn('bash', index.packages)
|
||||
self.assertEqual(index.packages['bash'].version, '5.2.015-1')
|
||||
|
||||
def test_multiple_packages(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
('python-3.11.5-1', '%NAME%\npython\n\n%VERSION%\n3.11.5-1\n'),
|
||||
('gcc-13.2.1-1', '%NAME%\ngcc\n\n%VERSION%\n13.2.1-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(db_bytes), repo_name='core')
|
||||
|
||||
self.assertEqual(len(index.packages), 3)
|
||||
self.assertIn('bash', index.packages)
|
||||
self.assertIn('python', index.packages)
|
||||
self.assertIn('gcc', index.packages)
|
||||
|
||||
def test_provides_index_built(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'python-3.11.5-1',
|
||||
'%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%PROVIDES%\npython3=3.11.5\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(db_bytes), repo_name='extra')
|
||||
|
||||
self.assertIn('python3', index.provides_index)
|
||||
self.assertEqual(index.provides_index['python3'], ['python'])
|
||||
|
||||
def test_empty_db(self) -> None:
|
||||
buf = io.BytesIO()
|
||||
with tarfile.open(fileobj=buf, mode='w:gz') as tar:
|
||||
pass
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(buf.getvalue()), repo_name='empty')
|
||||
self.assertEqual(len(index.packages), 0)
|
||||
|
||||
def test_malformed_desc_skipped(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
('broken-1.0', 'this is not a valid desc file'),
|
||||
]
|
||||
)
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(db_bytes), repo_name='core')
|
||||
|
||||
self.assertEqual(len(index.packages), 1)
|
||||
self.assertIn('bash', index.packages)
|
||||
|
||||
def test_with_depends(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
(
|
||||
'python-3.11.5-1',
|
||||
'%NAME%\npython\n\n%VERSION%\n3.11.5-1\n\n%DEPENDS%\nglibc\nopenssl>=1.1\nlibffi\n\n',
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
index = db_parser_t.parse_db(io.BytesIO(db_bytes), repo_name='extra')
|
||||
pkg = index.packages['python']
|
||||
self.assertEqual(pkg.depends, ['glibc', 'openssl>=1.1', 'libffi'])
|
||||
|
||||
def test_parse_db_path(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.db') as tmp:
|
||||
tmp.write(db_bytes)
|
||||
tmp.flush()
|
||||
|
||||
index = db_parser_t.parse_db_path(pathlib.Path(tmp.name), repo_name='test')
|
||||
self.assertEqual(len(index.packages), 1)
|
||||
|
||||
def test_parse_db_path_auto_name(self) -> None:
|
||||
db_bytes = self._make_db_bytes(
|
||||
[
|
||||
('bash-5.2.015-1', '%NAME%\nbash\n\n%VERSION%\n5.2.015-1\n'),
|
||||
]
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix='.db', prefix='core.') as tmp:
|
||||
tmp.write(db_bytes)
|
||||
tmp.flush()
|
||||
|
||||
index = db_parser_t.parse_db_path(pathlib.Path(tmp.name))
|
||||
self.assertEqual(len(index.packages), 1)
|
||||
@ -0,0 +1,326 @@
|
||||
import pathlib
|
||||
import unittest
|
||||
|
||||
from ..apps.pacman.db import db_parser_t
|
||||
from ..apps.pacman.types import pacman_constraint_t, repo_index_t
|
||||
from ..resolver.general import resolver_t
|
||||
from ..models import (
|
||||
package_constraint_t,
|
||||
package_t,
|
||||
package_index_t,
|
||||
resolve_result_t,
|
||||
)
|
||||
|
||||
|
||||
def _pacman_to_general(idx: repo_index_t) -> package_index_t:
|
||||
"""Convert pacman repo_index_t to general package_index_t for tests."""
|
||||
result = package_index_t(name=idx.name)
|
||||
for name, pkg in idx.packages.items():
|
||||
result.add(
|
||||
package_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
sha256sum=pkg.sha256sum,
|
||||
depends=[pacman_constraint_t.parse(d) for d in pkg.depends],
|
||||
provides=[pacman_constraint_t.parse(p) for p in pkg.provides],
|
||||
conflicts=[pacman_constraint_t.parse(c) for c in pkg.conflicts],
|
||||
groups=pkg.groups,
|
||||
)
|
||||
)
|
||||
result.build_provides_index()
|
||||
return result
|
||||
|
||||
|
||||
class TestIntegrationBase(unittest.TestCase):
|
||||
class constants_t:
|
||||
res_dir: pathlib.Path = pathlib.Path(__file__).parent / 'res'
|
||||
|
||||
dates: list[str] = [
|
||||
'2025_03_15',
|
||||
'2025_09_15',
|
||||
'2026_03_15',
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _load_indices(date: str) -> list[package_index_t]:
|
||||
res_dir = TestIntegrationBase.constants_t.res_dir
|
||||
core = db_parser_t.parse_db_path(res_dir / ('core_%s.db' % date), repo_name='core')
|
||||
extra = db_parser_t.parse_db_path(res_dir / ('extra_%s.db' % date), repo_name='extra')
|
||||
return [_pacman_to_general(core), _pacman_to_general(extra)]
|
||||
|
||||
@staticmethod
|
||||
def _load_installed() -> list[tuple[str, str]]:
|
||||
res_dir = TestIntegrationBase.constants_t.res_dir
|
||||
result: list[tuple[str, str]] = []
|
||||
with open(res_dir / 'installed_packages.txt') as f:
|
||||
for line in f:
|
||||
parts = line.strip().split(None, 1)
|
||||
if len(parts) == 2:
|
||||
result.append((parts[0], parts[1]))
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def _available_packages(indices: list[package_index_t]) -> set[str]:
|
||||
available: set[str] = set()
|
||||
for idx in indices:
|
||||
available.update(idx.packages.keys())
|
||||
return available
|
||||
|
||||
|
||||
class TestDbParsing(TestIntegrationBase):
|
||||
def test_core_2025_03_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2025_03_15')
|
||||
self.assertGreater(len(indices[0].packages), 200)
|
||||
|
||||
def test_extra_2025_03_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2025_03_15')
|
||||
self.assertGreater(len(indices[1].packages), 14000)
|
||||
|
||||
def test_core_2025_09_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2025_09_15')
|
||||
self.assertGreater(len(indices[0].packages), 200)
|
||||
|
||||
def test_extra_2025_09_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2025_09_15')
|
||||
self.assertGreater(len(indices[1].packages), 14000)
|
||||
|
||||
def test_core_2026_03_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2026_03_15')
|
||||
self.assertGreater(len(indices[0].packages), 200)
|
||||
|
||||
def test_extra_2026_03_15_has_packages(self) -> None:
|
||||
indices = self._load_indices('2026_03_15')
|
||||
self.assertGreater(len(indices[1].packages), 14000)
|
||||
|
||||
def test_package_count_non_decreasing(self) -> None:
|
||||
counts: list[int] = []
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
total = sum(len(idx.packages) for idx in indices)
|
||||
counts.append(total)
|
||||
|
||||
for i in range(1, len(counts)):
|
||||
self.assertGreaterEqual(counts[i], counts[i - 1])
|
||||
|
||||
def test_core_has_glibc(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
self.assertIn('glibc', indices[0].packages)
|
||||
|
||||
def test_extra_has_python(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
self.assertIn('python', indices[1].packages)
|
||||
|
||||
def test_provides_index_populated(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
for idx in indices:
|
||||
self.assertGreater(len(idx.provides_index), 0)
|
||||
|
||||
|
||||
class TestInstalledPackages(TestIntegrationBase):
|
||||
def test_installed_list_nonempty(self) -> None:
|
||||
installed = self._load_installed()
|
||||
self.assertGreater(len(installed), 100)
|
||||
|
||||
def test_installed_have_versions(self) -> None:
|
||||
installed = self._load_installed()
|
||||
for name, version in installed:
|
||||
self.assertGreater(len(name), 0)
|
||||
self.assertGreater(len(version), 0)
|
||||
|
||||
def test_all_installed_in_current_snapshot(self) -> None:
|
||||
indices = self._load_indices('2026_03_15')
|
||||
available = self._available_packages(indices)
|
||||
installed = self._load_installed()
|
||||
|
||||
for name, version in installed:
|
||||
self.assertIn(
|
||||
name,
|
||||
available,
|
||||
'installed package %s not found in 2026_03_15 snapshot' % name,
|
||||
)
|
||||
|
||||
|
||||
class TestResolveAgainstSnapshots(TestIntegrationBase):
|
||||
def resolve_installed(
|
||||
self,
|
||||
date: str,
|
||||
) -> tuple[resolve_result_t, list[str], list[str]]:
|
||||
indices = self._load_indices(date)
|
||||
available = self._available_packages(indices)
|
||||
installed = self._load_installed()
|
||||
|
||||
found = [name for name, version in installed if name in available]
|
||||
missing = [name for name, version in installed if name not in available]
|
||||
|
||||
result = resolver_t.resolve(found, indices)
|
||||
|
||||
return result, found, missing
|
||||
|
||||
def test_resolve_2025_03_15(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2025_03_15')
|
||||
|
||||
self.assertGreater(len(result.resolved), len(found))
|
||||
self.assertGreater(len(result.resolution_order), 0)
|
||||
|
||||
for name in found:
|
||||
self.assertIn(name, result.resolved)
|
||||
|
||||
def test_resolve_2025_09_15(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2025_09_15')
|
||||
|
||||
self.assertGreater(len(result.resolved), len(found))
|
||||
|
||||
for name in found:
|
||||
self.assertIn(name, result.resolved)
|
||||
|
||||
def test_resolve_2026_03_15(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2026_03_15')
|
||||
|
||||
self.assertEqual(len(missing), 0)
|
||||
self.assertGreater(len(result.resolved), len(found))
|
||||
|
||||
installed = self._load_installed()
|
||||
for name, version in installed:
|
||||
self.assertIn(name, result.resolved)
|
||||
|
||||
def test_resolve_2026_03_15_no_missing(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2026_03_15')
|
||||
self.assertEqual(missing, [])
|
||||
|
||||
def test_resolve_2025_03_15_some_missing(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2025_03_15')
|
||||
self.assertGreater(len(missing), 0)
|
||||
|
||||
def test_resolved_includes_transitive_deps(self) -> None:
|
||||
result, found, missing = self.resolve_installed('2026_03_15')
|
||||
|
||||
self.assertGreater(
|
||||
len(result.resolved),
|
||||
len(found),
|
||||
'transitive deps should add extra packages beyond the requested set',
|
||||
)
|
||||
|
||||
def test_resolved_packages_have_versions(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
result, found, missing = self.resolve_installed(date)
|
||||
|
||||
for name, pkg in result.resolved.items():
|
||||
self.assertGreater(len(pkg.version), 0, 'package %s has empty version' % name)
|
||||
|
||||
def test_resolution_order_no_duplicates(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
result, found, missing = self.resolve_installed(date)
|
||||
|
||||
self.assertEqual(
|
||||
len(result.resolution_order),
|
||||
len(set(result.resolution_order)),
|
||||
)
|
||||
|
||||
def test_glibc_resolved_in_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
result, found, missing = self.resolve_installed(date)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolved_versions_match_snapshot(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result, found, missing = self.resolve_installed(date)
|
||||
|
||||
for name, pkg in result.resolved.items():
|
||||
for idx in indices:
|
||||
if name in idx.packages:
|
||||
self.assertIn(
|
||||
pkg.version,
|
||||
idx.packages[name],
|
||||
'%s version mismatch in %s' % (name, date),
|
||||
)
|
||||
break
|
||||
|
||||
|
||||
class TestResolveSinglePackages(TestIntegrationBase):
|
||||
def test_resolve_glibc_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['glibc'], indices)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_bash_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['bash'], indices)
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_python_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['python'], indices)
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertGreater(len(result.resolved), 3)
|
||||
|
||||
def test_resolve_gcc_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['gcc'], indices)
|
||||
self.assertIn('gcc', result.resolved)
|
||||
|
||||
def test_resolve_openssl_all_snapshots(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['openssl'], indices)
|
||||
self.assertIn('openssl', result.resolved)
|
||||
|
||||
def test_resolve_nonexistent_raises(self) -> None:
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
with self.assertRaises(resolver_t.error_t.not_found_t):
|
||||
resolver_t.resolve(['this-package-does-not-exist-xyz'], indices)
|
||||
|
||||
|
||||
class TestCrossSnapshotComparison(TestIntegrationBase):
|
||||
def test_glibc_version_non_decreasing(self) -> None:
|
||||
from ..models import vercmp_t
|
||||
|
||||
versions: list[str] = []
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['glibc'], indices)
|
||||
versions.append(result.resolved['glibc'].version)
|
||||
|
||||
for i in range(1, len(versions)):
|
||||
self.assertGreaterEqual(
|
||||
vercmp_t.vercmp(versions[i], versions[i - 1]),
|
||||
0,
|
||||
'glibc version decreased from %s to %s' % (versions[i - 1], versions[i]),
|
||||
)
|
||||
|
||||
def test_python_version_non_decreasing(self) -> None:
|
||||
from ..models import vercmp_t
|
||||
|
||||
versions: list[str] = []
|
||||
for date in self.constants_t.dates:
|
||||
indices = self._load_indices(date)
|
||||
result = resolver_t.resolve(['python'], indices)
|
||||
versions.append(result.resolved['python'].version)
|
||||
|
||||
for i in range(1, len(versions)):
|
||||
self.assertGreaterEqual(
|
||||
vercmp_t.vercmp(versions[i], versions[i - 1]),
|
||||
0,
|
||||
'python version decreased from %s to %s' % (versions[i - 1], versions[i]),
|
||||
)
|
||||
|
||||
def test_older_snapshot_resolves_fewer_installed(self) -> None:
|
||||
counts: list[int] = []
|
||||
for date in self.constants_t.dates:
|
||||
result, found, missing = TestResolveAgainstSnapshots.resolve_installed(
|
||||
TestResolveAgainstSnapshots(),
|
||||
date,
|
||||
)
|
||||
counts.append(len(found))
|
||||
|
||||
self.assertLessEqual(counts[0], counts[-1])
|
||||
@ -0,0 +1,479 @@
|
||||
import unittest
|
||||
import dataclasses
|
||||
|
||||
from ..models import (
|
||||
vercmp_t,
|
||||
constraint_op_t,
|
||||
package_constraint_t,
|
||||
compile_entry_t,
|
||||
compile_result_t,
|
||||
)
|
||||
from ..apps.pacman.types import (
|
||||
pacman_constraint_t,
|
||||
package_desc_t,
|
||||
repo_config_t,
|
||||
mirror_config_t,
|
||||
repo_index_t,
|
||||
compile_options_t,
|
||||
)
|
||||
|
||||
|
||||
class TestVercmp(unittest.TestCase):
|
||||
def test_equal_versions(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0', '1.0'), 0)
|
||||
|
||||
def test_simple_greater(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.1', '1.0'), 1)
|
||||
|
||||
def test_simple_less(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0', '1.1'), -1)
|
||||
|
||||
def test_epoch_greater(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('2:1.0', '1:2.0'), 1)
|
||||
|
||||
def test_epoch_less(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1:1.0', '2:1.0'), -1)
|
||||
|
||||
def test_epoch_vs_no_epoch(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1:1.0', '999.0'), 1)
|
||||
|
||||
def test_release_compare(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0-2', '1.0-1'), 1)
|
||||
|
||||
def test_release_equal(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0-1', '1.0-1'), 0)
|
||||
|
||||
def test_release_less(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0-1', '1.0-3'), -1)
|
||||
|
||||
def test_alpha_vs_numeric(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0a', '1.01'), -1)
|
||||
|
||||
def test_numeric_vs_alpha(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.01', '1.0a'), 1)
|
||||
|
||||
def test_longer_version(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0.1', '1.0'), 1)
|
||||
|
||||
def test_shorter_version(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0', '1.0.1'), -1)
|
||||
|
||||
def test_alpha_ordering(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('1.0a', '1.0b'), -1)
|
||||
|
||||
def test_complex_version(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('2:3.5.1-7', '2:3.5.1-6'), 1)
|
||||
|
||||
def testsplit_evr_no_epoch_no_rel(self) -> None:
|
||||
epoch, ver, rel = vercmp_t.split_evr('1.2.3')
|
||||
self.assertEqual(epoch, 0)
|
||||
self.assertEqual(ver, '1.2.3')
|
||||
self.assertEqual(rel, '0')
|
||||
|
||||
def testsplit_evr_with_epoch(self) -> None:
|
||||
epoch, ver, rel = vercmp_t.split_evr('3:1.2.3')
|
||||
self.assertEqual(epoch, 3)
|
||||
self.assertEqual(ver, '1.2.3')
|
||||
self.assertEqual(rel, '0')
|
||||
|
||||
def testsplit_evr_with_rel(self) -> None:
|
||||
epoch, ver, rel = vercmp_t.split_evr('1.2.3-5')
|
||||
self.assertEqual(epoch, 0)
|
||||
self.assertEqual(ver, '1.2.3')
|
||||
self.assertEqual(rel, '5')
|
||||
|
||||
def testsplit_evr_full(self) -> None:
|
||||
epoch, ver, rel = vercmp_t.split_evr('2:1.2.3-5')
|
||||
self.assertEqual(epoch, 2)
|
||||
self.assertEqual(ver, '1.2.3')
|
||||
self.assertEqual(rel, '5')
|
||||
|
||||
def test_identical_strings(self) -> None:
|
||||
self.assertEqual(vercmp_t.vercmp('5.15.133.arch1-1', '5.15.133.arch1-1'), 0)
|
||||
|
||||
def test_kernel_versions(self) -> None:
|
||||
self.assertGreater(vercmp_t.vercmp('6.1.0.arch1-1', '5.15.133.arch1-1'), 0)
|
||||
|
||||
def testcompare_segment_empty(self) -> None:
|
||||
self.assertEqual(vercmp_t.compare_segment('', ''), 0)
|
||||
|
||||
|
||||
class TestConstraintOp(unittest.TestCase):
|
||||
def test_values(self) -> None:
|
||||
self.assertEqual(constraint_op_t.eq.value, '==')
|
||||
self.assertEqual(constraint_op_t.ge.value, '>=')
|
||||
self.assertEqual(constraint_op_t.le.value, '<=')
|
||||
self.assertEqual(constraint_op_t.gt.value, '>')
|
||||
self.assertEqual(constraint_op_t.lt.value, '<')
|
||||
|
||||
|
||||
class TestPackageConstraint(unittest.TestCase):
|
||||
def test_parse_name_only(self) -> None:
|
||||
c = package_constraint_t.parse('glibc')
|
||||
self.assertEqual(c.name, 'glibc')
|
||||
self.assertIsNone(c.op)
|
||||
self.assertIsNone(c.version)
|
||||
|
||||
def test_parse_eq(self) -> None:
|
||||
c = package_constraint_t.parse('glibc==2.38-1')
|
||||
self.assertEqual(c.name, 'glibc')
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.version, '2.38-1')
|
||||
|
||||
def test_parse_eq_epoch(self) -> None:
|
||||
c = package_constraint_t.parse('less==1:692-1')
|
||||
self.assertEqual(c.name, 'less')
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.version, '1:692-1')
|
||||
|
||||
def test_parse_single_eq_not_supported(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
package_constraint_t.parse('glibc=2.38-1')
|
||||
|
||||
def test_parse_ge(self) -> None:
|
||||
c = package_constraint_t.parse('python>=3.11')
|
||||
self.assertEqual(c.name, 'python')
|
||||
self.assertEqual(c.op, constraint_op_t.ge)
|
||||
self.assertEqual(c.version, '3.11')
|
||||
|
||||
def test_parse_le(self) -> None:
|
||||
c = package_constraint_t.parse('gcc<=13.2.1')
|
||||
self.assertEqual(c.name, 'gcc')
|
||||
self.assertEqual(c.op, constraint_op_t.le)
|
||||
self.assertEqual(c.version, '13.2.1')
|
||||
|
||||
def test_parse_gt(self) -> None:
|
||||
c = package_constraint_t.parse('bash>5.0')
|
||||
self.assertEqual(c.name, 'bash')
|
||||
self.assertEqual(c.op, constraint_op_t.gt)
|
||||
self.assertEqual(c.version, '5.0')
|
||||
|
||||
def test_parse_lt(self) -> None:
|
||||
c = package_constraint_t.parse('zlib<1.3')
|
||||
self.assertEqual(c.name, 'zlib')
|
||||
self.assertEqual(c.op, constraint_op_t.lt)
|
||||
self.assertEqual(c.version, '1.3')
|
||||
|
||||
def test_parse_whitespace(self) -> None:
|
||||
c = package_constraint_t.parse(' glibc>=2.38 ')
|
||||
self.assertEqual(c.name, 'glibc')
|
||||
self.assertEqual(c.op, constraint_op_t.ge)
|
||||
self.assertEqual(c.version, '2.38')
|
||||
|
||||
def test_parse_invalid(self) -> None:
|
||||
with self.assertRaises(ValueError):
|
||||
package_constraint_t.parse('')
|
||||
|
||||
def test_parse_name_with_special_chars(self) -> None:
|
||||
c = package_constraint_t.parse('lib32-glibc>=2.38')
|
||||
self.assertEqual(c.name, 'lib32-glibc')
|
||||
self.assertEqual(c.op, constraint_op_t.ge)
|
||||
|
||||
def test_parse_name_with_plus(self) -> None:
|
||||
c = package_constraint_t.parse('c++utilities>=5.0')
|
||||
self.assertEqual(c.name, 'c++utilities')
|
||||
|
||||
def test_parse_name_with_at(self) -> None:
|
||||
c = package_constraint_t.parse('python@3.11')
|
||||
self.assertEqual(c.name, 'python@3.11')
|
||||
self.assertIsNone(c.op)
|
||||
|
||||
def test_satisfied_by_no_constraint(self) -> None:
|
||||
c = package_constraint_t.parse('glibc')
|
||||
self.assertTrue(c.satisfied_by('1.0'))
|
||||
self.assertTrue(c.satisfied_by('999.0'))
|
||||
|
||||
def test_satisfied_by_eq(self) -> None:
|
||||
c = package_constraint_t.parse('glibc==2.38')
|
||||
self.assertTrue(c.satisfied_by('2.38'))
|
||||
self.assertFalse(c.satisfied_by('2.37'))
|
||||
self.assertFalse(c.satisfied_by('2.39'))
|
||||
|
||||
def test_satisfied_by_ge(self) -> None:
|
||||
c = package_constraint_t.parse('glibc>=2.38')
|
||||
self.assertTrue(c.satisfied_by('2.38'))
|
||||
self.assertTrue(c.satisfied_by('2.39'))
|
||||
self.assertFalse(c.satisfied_by('2.37'))
|
||||
|
||||
def test_satisfied_by_le(self) -> None:
|
||||
c = package_constraint_t.parse('glibc<=2.38')
|
||||
self.assertTrue(c.satisfied_by('2.38'))
|
||||
self.assertTrue(c.satisfied_by('2.37'))
|
||||
self.assertFalse(c.satisfied_by('2.39'))
|
||||
|
||||
def test_satisfied_by_gt(self) -> None:
|
||||
c = package_constraint_t.parse('glibc>2.38')
|
||||
self.assertFalse(c.satisfied_by('2.38'))
|
||||
self.assertTrue(c.satisfied_by('2.39'))
|
||||
|
||||
def test_satisfied_by_lt(self) -> None:
|
||||
c = package_constraint_t.parse('glibc<2.38')
|
||||
self.assertFalse(c.satisfied_by('2.38'))
|
||||
self.assertTrue(c.satisfied_by('2.37'))
|
||||
|
||||
def test_to_str_name_only(self) -> None:
|
||||
c = package_constraint_t.parse('glibc')
|
||||
self.assertEqual(c.to_str(), 'glibc')
|
||||
|
||||
def test_to_str_with_op(self) -> None:
|
||||
c = package_constraint_t.parse('glibc>=2.38')
|
||||
self.assertEqual(c.to_str(), 'glibc>=2.38')
|
||||
|
||||
def test_eq_and_hash(self) -> None:
|
||||
a = package_constraint_t.parse('glibc>=2.38')
|
||||
b = package_constraint_t.parse('glibc>=2.38')
|
||||
self.assertEqual(a, b)
|
||||
self.assertEqual(hash(a), hash(b))
|
||||
|
||||
def test_neq(self) -> None:
|
||||
a = package_constraint_t.parse('glibc>=2.38')
|
||||
b = package_constraint_t.parse('glibc>=2.39')
|
||||
self.assertNotEqual(a, b)
|
||||
|
||||
def test_repr_name_only(self) -> None:
|
||||
c = package_constraint_t.parse('glibc')
|
||||
self.assertIn('glibc', repr(c))
|
||||
|
||||
def test_repr_with_op(self) -> None:
|
||||
c = package_constraint_t.parse('glibc>=2.38')
|
||||
r = repr(c)
|
||||
self.assertIn('glibc', r)
|
||||
self.assertIn('>=', r)
|
||||
self.assertIn('2.38', r)
|
||||
|
||||
def test_eq_with_non_constraint(self) -> None:
|
||||
c = package_constraint_t.parse('glibc')
|
||||
self.assertNotEqual(c, 'glibc')
|
||||
|
||||
|
||||
class TestPacmanConstraint(unittest.TestCase):
|
||||
def test_parse_single_eq(self) -> None:
|
||||
c = pacman_constraint_t.parse('glibc=2.38-1')
|
||||
self.assertEqual(c.name, 'glibc')
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.version, '2.38-1')
|
||||
|
||||
def test_parse_single_eq_epoch(self) -> None:
|
||||
c = pacman_constraint_t.parse('less=1:692-1')
|
||||
self.assertEqual(c.name, 'less')
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.version, '1:692-1')
|
||||
|
||||
def test_parse_so_version(self) -> None:
|
||||
c = pacman_constraint_t.parse('libfoo.so=1')
|
||||
self.assertEqual(c.name, 'libfoo.so')
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.version, '1')
|
||||
|
||||
def test_parse_ge(self) -> None:
|
||||
c = pacman_constraint_t.parse('linux-api-headers>=4.10')
|
||||
self.assertEqual(c.name, 'linux-api-headers')
|
||||
self.assertEqual(c.op, constraint_op_t.ge)
|
||||
self.assertEqual(c.version, '4.10')
|
||||
|
||||
def test_parse_name_only(self) -> None:
|
||||
c = pacman_constraint_t.parse('filesystem')
|
||||
self.assertEqual(c.name, 'filesystem')
|
||||
self.assertIsNone(c.op)
|
||||
|
||||
def test_to_str_uses_double_eq(self) -> None:
|
||||
c = pacman_constraint_t.parse('gcc-libs=13.2.1')
|
||||
self.assertEqual(c.to_str(), 'gcc-libs==13.2.1')
|
||||
|
||||
def test_converts_to_package_constraint(self) -> None:
|
||||
c = pacman_constraint_t.parse('glibc=2.38')
|
||||
self.assertIsInstance(c, package_constraint_t)
|
||||
self.assertEqual(c.op, constraint_op_t.eq)
|
||||
self.assertEqual(c.op.value, '==')
|
||||
|
||||
|
||||
class TestPackageDesc(unittest.TestCase):
|
||||
def test_defaults(self) -> None:
|
||||
pkg = package_desc_t(name='test', version='1.0')
|
||||
self.assertEqual(pkg.name, 'test')
|
||||
self.assertEqual(pkg.version, '1.0')
|
||||
self.assertEqual(pkg.depends, [])
|
||||
self.assertEqual(pkg.provides, [])
|
||||
self.assertEqual(pkg.conflicts, [])
|
||||
|
||||
def test_parsed_depends(self) -> None:
|
||||
pkg = package_desc_t(
|
||||
name='test',
|
||||
version='1.0',
|
||||
depends=['glibc>=2.38', 'bash'],
|
||||
)
|
||||
deps = pkg.parsed_depends()
|
||||
self.assertEqual(len(deps), 2)
|
||||
self.assertEqual(deps[0].name, 'glibc')
|
||||
self.assertEqual(deps[0].op, constraint_op_t.ge)
|
||||
self.assertEqual(deps[1].name, 'bash')
|
||||
|
||||
def test_parsed_provides(self) -> None:
|
||||
pkg = package_desc_t(
|
||||
name='test',
|
||||
version='1.0',
|
||||
provides=['libfoo.so=1', 'libbar.so'],
|
||||
)
|
||||
provs = pkg.parsed_provides()
|
||||
self.assertEqual(len(provs), 2)
|
||||
self.assertEqual(provs[0].name, 'libfoo.so')
|
||||
|
||||
def test_parsed_conflicts(self) -> None:
|
||||
pkg = package_desc_t(
|
||||
name='test',
|
||||
version='1.0',
|
||||
conflicts=['other-test'],
|
||||
)
|
||||
confs = pkg.parsed_conflicts()
|
||||
self.assertEqual(len(confs), 1)
|
||||
self.assertEqual(confs[0].name, 'other-test')
|
||||
|
||||
|
||||
class TestRepoConfig(unittest.TestCase):
|
||||
def test_basic(self) -> None:
|
||||
r = repo_config_t(name='core', url='https://example.com/core/os/x86_64')
|
||||
self.assertEqual(r.name, 'core')
|
||||
self.assertEqual(r.url, 'https://example.com/core/os/x86_64')
|
||||
|
||||
|
||||
class TestMirrorConfig(unittest.TestCase):
|
||||
def test_from_archive_date(self) -> None:
|
||||
m = mirror_config_t.from_archive_date('2024/01/15')
|
||||
self.assertEqual(len(m.repos), 3)
|
||||
self.assertEqual(m.repos[0].name, 'core')
|
||||
self.assertIn('2024/01/15', m.repos[0].url)
|
||||
self.assertIn('archive.archlinux.org', m.repos[0].url)
|
||||
|
||||
def test_from_archive_date_custom_repos(self) -> None:
|
||||
m = mirror_config_t.from_archive_date('2024/01/15', repos=['core', 'extra'])
|
||||
self.assertEqual(len(m.repos), 2)
|
||||
|
||||
def test_from_archive_date_custom_arch(self) -> None:
|
||||
m = mirror_config_t.from_archive_date('2024/01/15', arch='aarch64')
|
||||
self.assertEqual(m.arch, 'aarch64')
|
||||
self.assertIn('aarch64', m.repos[0].url)
|
||||
|
||||
def test_from_mirror_url(self) -> None:
|
||||
m = mirror_config_t.from_mirror_url('https://mirror.example.com')
|
||||
self.assertEqual(len(m.repos), 3)
|
||||
self.assertIn('mirror.example.com', m.repos[0].url)
|
||||
|
||||
def test_from_mirror_url_trailing_slash(self) -> None:
|
||||
m = mirror_config_t.from_mirror_url('https://mirror.example.com/')
|
||||
self.assertNotIn('//', m.repos[0].url.replace('https://', ''))
|
||||
|
||||
def test_default_arch(self) -> None:
|
||||
m = mirror_config_t.from_archive_date('2024/01/15')
|
||||
self.assertEqual(m.arch, 'x86_64')
|
||||
|
||||
|
||||
class TestRepoIndex(unittest.TestCase):
|
||||
def test_build_provides_index(self) -> None:
|
||||
idx = repo_index_t(name='core')
|
||||
idx.packages['gcc'] = package_desc_t(
|
||||
name='gcc',
|
||||
version='13.2.1-1',
|
||||
provides=['gcc-multilib', 'gcc-libs=13.2.1'],
|
||||
)
|
||||
idx.packages['python'] = package_desc_t(
|
||||
name='python',
|
||||
version='3.11.5-1',
|
||||
provides=['python3=3.11.5'],
|
||||
)
|
||||
|
||||
idx.build_provides_index()
|
||||
|
||||
self.assertIn('gcc-multilib', idx.provides_index)
|
||||
self.assertEqual(idx.provides_index['gcc-multilib'], ['gcc'])
|
||||
self.assertIn('python3', idx.provides_index)
|
||||
self.assertEqual(idx.provides_index['python3'], ['python'])
|
||||
|
||||
def test_empty_index(self) -> None:
|
||||
idx = repo_index_t(name='empty')
|
||||
idx.build_provides_index()
|
||||
self.assertEqual(idx.provides_index, {})
|
||||
|
||||
def test_multiple_providers(self) -> None:
|
||||
idx = repo_index_t(name='core')
|
||||
idx.packages['java-runtime-common'] = package_desc_t(
|
||||
name='java-runtime-common',
|
||||
version='3-5',
|
||||
provides=['java-runtime'],
|
||||
)
|
||||
idx.packages['jre-openjdk'] = package_desc_t(
|
||||
name='jre-openjdk',
|
||||
version='17.0.8-1',
|
||||
provides=['java-runtime'],
|
||||
)
|
||||
|
||||
idx.build_provides_index()
|
||||
|
||||
self.assertIn('java-runtime', idx.provides_index)
|
||||
self.assertEqual(len(idx.provides_index['java-runtime']), 2)
|
||||
|
||||
|
||||
class TestCompileOptions(unittest.TestCase):
|
||||
def test_defaults(self) -> None:
|
||||
opts = compile_options_t()
|
||||
self.assertEqual(opts.packages, [])
|
||||
self.assertIsNone(opts.index_url)
|
||||
self.assertIsNone(opts.archive_date)
|
||||
self.assertFalse(opts.offline)
|
||||
self.assertFalse(opts.no_cache)
|
||||
self.assertFalse(opts.generate_hashes)
|
||||
self.assertEqual(opts.repos, ['core', 'extra', 'multilib'])
|
||||
self.assertEqual(opts.arch, 'x86_64')
|
||||
|
||||
|
||||
class TestCompileResult(unittest.TestCase):
|
||||
def test_to_txt_empty(self) -> None:
|
||||
res = compile_result_t.res_t()
|
||||
self.assertEqual(res.to_txt(), '')
|
||||
|
||||
def test_to_txt_single(self) -> None:
|
||||
res = compile_result_t.res_t(
|
||||
entries=[
|
||||
compile_entry_t(
|
||||
name='bash',
|
||||
version='5.2.015-1',
|
||||
filename='bash-5.2.015-1-x86_64.pkg.tar.zst',
|
||||
repo='core',
|
||||
url='https://example.com/bash-5.2.015-1-x86_64.pkg.tar.zst',
|
||||
sha256='abc123',
|
||||
),
|
||||
],
|
||||
)
|
||||
txt = res.to_txt()
|
||||
self.assertIn('bash==5.2.015-1', txt)
|
||||
self.assertIn('--hash=sha256:abc123', txt)
|
||||
self.assertIn('# https://example.com/bash-5.2.015-1-x86_64.pkg.tar.zst', txt)
|
||||
|
||||
def test_to_txt_no_hash(self) -> None:
|
||||
res = compile_result_t.res_t(
|
||||
entries=[
|
||||
compile_entry_t(
|
||||
name='bash',
|
||||
version='5.2.015-1',
|
||||
filename='bash-5.2.015-1-x86_64.pkg.tar.zst',
|
||||
repo='core',
|
||||
url='https://example.com/bash',
|
||||
),
|
||||
],
|
||||
)
|
||||
txt = res.to_txt()
|
||||
self.assertIn('bash==5.2.015-1', txt)
|
||||
self.assertNotIn('--hash', txt)
|
||||
|
||||
def test_to_txt_sorted(self) -> None:
|
||||
res = compile_result_t.res_t(
|
||||
entries=[
|
||||
compile_entry_t(name='zlib', version='1.3', filename='zlib.pkg', repo='core', url=''),
|
||||
compile_entry_t(name='bash', version='5.2', filename='bash.pkg', repo='core', url=''),
|
||||
compile_entry_t(name='gcc', version='13.2', filename='gcc.pkg', repo='core', url=''),
|
||||
],
|
||||
)
|
||||
txt = res.to_txt()
|
||||
lines = [l for l in txt.split('\n') if not l.startswith('#')]
|
||||
self.assertTrue(lines[0].startswith('bash'))
|
||||
self.assertTrue(lines[1].startswith('gcc'))
|
||||
self.assertTrue(lines[2].startswith('zlib'))
|
||||
@ -0,0 +1,135 @@
|
||||
import unittest
|
||||
import unittest.mock
|
||||
import subprocess
|
||||
|
||||
from ..apps.pacman.client import pacman_t
|
||||
|
||||
|
||||
class TestPacmanParseInfoBlock(unittest.TestCase):
|
||||
def test_parse_simple_block(self) -> None:
|
||||
block = (
|
||||
'Name : bash\n'
|
||||
'Version : 5.2.015-1\n'
|
||||
'Description : The GNU Bourne Again shell\n'
|
||||
'Architecture : x86_64\n'
|
||||
'URL : https://www.gnu.org/software/bash/\n'
|
||||
'Installed Size : 9.06 MiB\n'
|
||||
'Packager : Arch Linux\n'
|
||||
)
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.name, 'bash')
|
||||
self.assertEqual(entry.version, '5.2.015-1')
|
||||
self.assertEqual(entry.description, 'The GNU Bourne Again shell')
|
||||
self.assertEqual(entry.architecture, 'x86_64')
|
||||
self.assertEqual(entry.url, 'https://www.gnu.org/software/bash/')
|
||||
|
||||
def test_parse_with_depends(self) -> None:
|
||||
block = (
|
||||
'Name : python\n'
|
||||
'Version : 3.11.5-1\n'
|
||||
'Description : Python programming language\n'
|
||||
'Architecture : x86_64\n'
|
||||
'Depends On : glibc\n'
|
||||
' expat\n'
|
||||
' openssl\n'
|
||||
)
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.name, 'python')
|
||||
self.assertEqual(entry.depends_on, ['glibc', 'expat', 'openssl'])
|
||||
|
||||
def test_parse_with_provides(self) -> None:
|
||||
block = 'Name : python\nVersion : 3.11.5-1\nProvides : python3=3.11.5\n python3.11\n'
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.provides, ['python3=3.11.5', 'python3.11'])
|
||||
|
||||
def test_parse_with_conflicts(self) -> None:
|
||||
block = 'Name : iptables-nft\nVersion : 1.8.9-1\nConflicts With : iptables\n'
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.conflicts_with, ['iptables'])
|
||||
|
||||
def test_parse_with_replaces(self) -> None:
|
||||
block = 'Name : iptables-nft\nVersion : 1.8.9-1\nReplaces : iptables\n'
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.replaces, ['iptables'])
|
||||
|
||||
def test_parse_with_groups(self) -> None:
|
||||
block = 'Name : gcc\nVersion : 13.2.1-1\nGroups : base-devel\n'
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.groups, ['base-devel'])
|
||||
|
||||
def test_parse_none_depends(self) -> None:
|
||||
block = 'Name : filesystem\nVersion : 2023.09.18-1\nDepends On : None\n'
|
||||
entry = pacman_t.parse_info_block(block)
|
||||
self.assertEqual(entry.depends_on, [])
|
||||
|
||||
def test_parse_missing_name_raises(self) -> None:
|
||||
block = 'Version : 1.0\n'
|
||||
with self.assertRaises(ValueError):
|
||||
pacman_t.parse_info_block(block)
|
||||
|
||||
def test_parse_missing_version_raises(self) -> None:
|
||||
block = 'Name : test\n'
|
||||
with self.assertRaises(ValueError):
|
||||
pacman_t.parse_info_block(block)
|
||||
|
||||
|
||||
class TestListInstalledSimple(unittest.TestCase):
|
||||
@unittest.mock.patch('subprocess.check_output')
|
||||
def test_basic(self, mock_check_output: unittest.mock.MagicMock) -> None:
|
||||
mock_check_output.return_value = b'bash 5.2.015-1\npython 3.11.5-1\ngcc 13.2.1-1\n'
|
||||
|
||||
result = pacman_t.list_installed_simple()
|
||||
|
||||
self.assertEqual(len(result), 3)
|
||||
self.assertEqual(result[0], ('bash', '5.2.015-1'))
|
||||
self.assertEqual(result[1], ('python', '3.11.5-1'))
|
||||
self.assertEqual(result[2], ('gcc', '13.2.1-1'))
|
||||
|
||||
@unittest.mock.patch('subprocess.check_output')
|
||||
def test_empty(self, mock_check_output: unittest.mock.MagicMock) -> None:
|
||||
mock_check_output.return_value = b'\n'
|
||||
|
||||
result = pacman_t.list_installed_simple()
|
||||
self.assertEqual(len(result), 0)
|
||||
|
||||
@unittest.mock.patch('subprocess.check_output')
|
||||
def test_custom_db_path(self, mock_check_output: unittest.mock.MagicMock) -> None:
|
||||
import pathlib
|
||||
|
||||
mock_check_output.return_value = b'bash 5.2.015-1\n'
|
||||
|
||||
pacman_t.list_installed_simple(db_path=pathlib.Path('/tmp/testdb'))
|
||||
|
||||
call_args = mock_check_output.call_args
|
||||
cmd = call_args[0][0]
|
||||
self.assertIn('--dbpath', cmd)
|
||||
self.assertIn('/tmp/testdb', cmd)
|
||||
|
||||
|
||||
class TestListInstalled(unittest.TestCase):
|
||||
@unittest.mock.patch('subprocess.check_output')
|
||||
def test_basic(self, mock_check_output: unittest.mock.MagicMock) -> None:
|
||||
mock_check_output.return_value = (
|
||||
b'Name : bash\n'
|
||||
b'Version : 5.2.015-1\n'
|
||||
b'Description : The GNU Bourne Again shell\n'
|
||||
b'Architecture : x86_64\n'
|
||||
b'\n'
|
||||
b'Name : python\n'
|
||||
b'Version : 3.11.5-1\n'
|
||||
b'Description : Python programming language\n'
|
||||
b'Architecture : x86_64\n'
|
||||
)
|
||||
|
||||
result = pacman_t.list_installed()
|
||||
|
||||
self.assertEqual(len(result.packages), 2)
|
||||
self.assertEqual(result.packages[0].name, 'bash')
|
||||
self.assertEqual(result.packages[1].name, 'python')
|
||||
|
||||
@unittest.mock.patch('subprocess.check_output')
|
||||
def test_empty_output(self, mock_check_output: unittest.mock.MagicMock) -> None:
|
||||
mock_check_output.return_value = b'\n'
|
||||
|
||||
result = pacman_t.list_installed()
|
||||
self.assertEqual(len(result.packages), 0)
|
||||
@ -0,0 +1,345 @@
|
||||
import unittest
|
||||
from typing import Optional
|
||||
|
||||
from ..resolver.general import resolver_t
|
||||
from ..models import (
|
||||
package_t,
|
||||
package_index_t,
|
||||
package_constraint_t,
|
||||
)
|
||||
|
||||
|
||||
def _parse_deps(deps: list[str]) -> list[package_constraint_t]:
|
||||
return [package_constraint_t.parse(d) for d in deps]
|
||||
|
||||
|
||||
class TestResolver(unittest.TestCase):
|
||||
def _make_index(self, name: str, packages: list[package_t]) -> package_index_t:
|
||||
idx = package_index_t(name=name)
|
||||
for pkg in packages:
|
||||
idx.add(pkg)
|
||||
idx.build_provides_index()
|
||||
return idx
|
||||
|
||||
def _pkg(
|
||||
self,
|
||||
name: str,
|
||||
version: str,
|
||||
depends: Optional[list[str]] = None,
|
||||
provides: Optional[list[str]] = None,
|
||||
conflicts: Optional[list[str]] = None,
|
||||
) -> package_t:
|
||||
return package_t(
|
||||
name=name,
|
||||
version=version,
|
||||
depends=_parse_deps(depends or []),
|
||||
provides=_parse_deps(provides or []),
|
||||
conflicts=_parse_deps(conflicts or []),
|
||||
)
|
||||
|
||||
def test_resolve_single_no_deps(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash'], [idx])
|
||||
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertEqual(result.resolved['bash'].version, '5.2.015-1')
|
||||
self.assertEqual(result.resolution_order, ['bash'])
|
||||
|
||||
def test_resolve_with_dependency(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc']),
|
||||
self._pkg(name='glibc', version='2.38-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['python'], [idx])
|
||||
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_chain_deps(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='a', version='1.0', depends=['b']),
|
||||
self._pkg(name='b', version='1.0', depends=['c']),
|
||||
self._pkg(name='c', version='1.0'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['a'], [idx])
|
||||
|
||||
self.assertEqual(len(result.resolved), 3)
|
||||
self.assertIn('a', result.resolved)
|
||||
self.assertIn('b', result.resolved)
|
||||
self.assertIn('c', result.resolved)
|
||||
|
||||
def test_resolve_version_constraint_satisfied(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc>=2.35']),
|
||||
self._pkg(name='glibc', version='2.38-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['python'], [idx])
|
||||
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_version_constraint_not_satisfied(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc>=2.40']),
|
||||
self._pkg(name='glibc', version='2.38-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['python'], [idx])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
|
||||
def test_resolve_not_found(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['nonexistent'], [idx])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
self.assertTrue(any('nonexistent' in p for p in result.problems))
|
||||
|
||||
def test_resolve_multiple_packages(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
self._pkg(name='python', version='3.11.5-1'),
|
||||
self._pkg(name='gcc', version='13.2.1-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash', 'python', 'gcc'], [idx])
|
||||
|
||||
self.assertEqual(len(result.resolved), 3)
|
||||
|
||||
def test_resolve_shared_dependency(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc']),
|
||||
self._pkg(name='bash', version='5.2.015-1', depends=['glibc']),
|
||||
self._pkg(name='glibc', version='2.38-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['python', 'bash'], [idx])
|
||||
|
||||
self.assertEqual(len(result.resolved), 3)
|
||||
self.assertEqual(result.resolution_order.count('glibc'), 1)
|
||||
|
||||
def test_resolve_across_repos(self) -> None:
|
||||
core = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='glibc', version='2.38-1'),
|
||||
],
|
||||
)
|
||||
extra = self._make_index(
|
||||
'extra',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['python'], [extra, core])
|
||||
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_repo_priority(self) -> None:
|
||||
core = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
extra = self._make_index(
|
||||
'extra',
|
||||
[
|
||||
self._pkg(name='bash', version='5.1.000-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash'], [core, extra])
|
||||
|
||||
self.assertEqual(result.resolved['bash'].version, '5.2.015-1')
|
||||
|
||||
def test_resolve_via_provides(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(
|
||||
name='python',
|
||||
version='3.11.5-1',
|
||||
provides=['python3==3.11.5'],
|
||||
),
|
||||
self._pkg(name='app', version='1.0', depends=['python3']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['app'], [idx])
|
||||
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertIn('app', result.resolved)
|
||||
|
||||
def test_resolve_provides_with_version(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(
|
||||
name='python',
|
||||
version='3.11.5-1',
|
||||
provides=['python3==3.11.5'],
|
||||
),
|
||||
self._pkg(name='app', version='1.0', depends=['python3>=3.10']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['app'], [idx])
|
||||
|
||||
self.assertIn('python', result.resolved)
|
||||
|
||||
def test_resolve_conflict(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='iptables-nft', version='1.8.9-1', conflicts=['iptables']),
|
||||
self._pkg(name='iptables', version='1.8.9-1'),
|
||||
self._pkg(name='app', version='1.0', depends=['iptables-nft', 'iptables']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['app'], [idx])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
self.assertTrue(any('conflict' in p for p in result.problems))
|
||||
|
||||
def test_resolve_skip_installed(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='python', version='3.11.5-1', depends=['glibc']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(
|
||||
['python'],
|
||||
[idx],
|
||||
skip_installed={'glibc'},
|
||||
)
|
||||
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertNotIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_empty_packages(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve([], [idx])
|
||||
|
||||
self.assertEqual(len(result.resolved), 0)
|
||||
self.assertEqual(result.resolution_order, [])
|
||||
|
||||
def test_resolve_circular_deps_handled(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='a', version='1.0', depends=['b']),
|
||||
self._pkg(name='b', version='1.0', depends=['a']),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['a'], [idx])
|
||||
|
||||
self.assertIn('a', result.resolved)
|
||||
self.assertIn('b', result.resolved)
|
||||
|
||||
def test_resolve_diamond_deps(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='app', version='1.0', depends=['liba', 'libb']),
|
||||
self._pkg(name='liba', version='1.0', depends=['libcommon']),
|
||||
self._pkg(name='libb', version='1.0', depends=['libcommon']),
|
||||
self._pkg(name='libcommon', version='1.0'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['app'], [idx])
|
||||
|
||||
self.assertEqual(len(result.resolved), 4)
|
||||
self.assertEqual(result.resolution_order.count('libcommon'), 1)
|
||||
|
||||
def test_resolve_with_constraint_string(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash>=5.0'], [idx])
|
||||
|
||||
self.assertIn('bash', result.resolved)
|
||||
|
||||
def test_resolve_with_eq_constraint(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash==5.2.015-1'], [idx])
|
||||
|
||||
self.assertIn('bash', result.resolved)
|
||||
|
||||
def test_resolve_with_eq_constraint_mismatch(self) -> None:
|
||||
idx = self._make_index(
|
||||
'core',
|
||||
[
|
||||
self._pkg(name='bash', version='5.2.015-1'),
|
||||
],
|
||||
)
|
||||
|
||||
result = resolver_t.resolve(['bash==5.1.000-1'], [idx])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
|
||||
def test_error_not_found_message(self) -> None:
|
||||
err = resolver_t.error_t.not_found_t('missing-pkg')
|
||||
self.assertIn('missing-pkg', str(err))
|
||||
self.assertEqual(err.name, 'missing-pkg')
|
||||
|
||||
def test_error_conflict_message(self) -> None:
|
||||
err = resolver_t.error_t.conflict_t('a', 'b', 'b>=1.0')
|
||||
self.assertIn('a', str(err))
|
||||
self.assertIn('b', str(err))
|
||||
|
||||
def test_error_unsatisfied_message(self) -> None:
|
||||
err = resolver_t.error_t.unsatisfied_t('parent', 'dep>=2.0')
|
||||
self.assertIn('parent', str(err))
|
||||
self.assertIn('dep>=2.0', str(err))
|
||||
@ -0,0 +1,328 @@
|
||||
import pathlib
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from ..apps.pacman.db import db_parser_t
|
||||
from ..apps.pacman.types import repo_index_t
|
||||
from ..resolver.solv import (
|
||||
solv_pool_t,
|
||||
)
|
||||
from ..resolver.solv_types import (
|
||||
repo_store_t,
|
||||
solv_index_t,
|
||||
solv_package_t,
|
||||
)
|
||||
|
||||
|
||||
def _repo_index_to_solv(idx: repo_index_t) -> solv_index_t:
|
||||
"""Convert pacman repo_index_t to solv_index_t for tests."""
|
||||
solv_idx = solv_index_t(name=idx.name)
|
||||
for pkg in idx.packages.values():
|
||||
solv_idx.add(
|
||||
solv_package_t(
|
||||
name=pkg.name,
|
||||
version=pkg.version,
|
||||
filename=pkg.filename,
|
||||
sha256sum=pkg.sha256sum,
|
||||
arch=pkg.arch,
|
||||
depends=list(pkg.depends),
|
||||
provides=list(pkg.provides),
|
||||
conflicts=list(pkg.conflicts),
|
||||
groups=list(pkg.groups),
|
||||
)
|
||||
)
|
||||
solv_idx.build_provides_index()
|
||||
return solv_idx
|
||||
|
||||
|
||||
class TestSolvPoolUnit(unittest.TestCase):
|
||||
def test_add_single_package(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['bash'])
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
|
||||
def test_resolve_with_dependency(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='python', version='3.11-1', arch='x86_64', depends=['glibc']))
|
||||
idx.add(solv_package_t(name='glibc', version='2.38-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['python'])
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
|
||||
def test_resolve_any_arch(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='ca-certs', version='1.0-1', arch='any'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['ca-certs'])
|
||||
self.assertIn('ca-certs', result.resolved)
|
||||
|
||||
def test_resolve_via_provides(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='ncurses', version='6.6-1', arch='x86_64', provides=['libncursesw.so=6-64']))
|
||||
idx.add(solv_package_t(name='readline', version='8.3-1', arch='x86_64', depends=['libncursesw.so=6-64']))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['readline'])
|
||||
self.assertIn('readline', result.resolved)
|
||||
self.assertIn('ncurses', result.resolved)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
|
||||
def test_resolve_not_found(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['nonexistent'])
|
||||
self.assertNotIn('nonexistent', result.resolved)
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
|
||||
def test_resolve_version_constraint(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='app', version='1.0-1', arch='x86_64', depends=['glibc>=2.35']))
|
||||
idx.add(solv_package_t(name='glibc', version='2.38-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=idx)])
|
||||
result = pool.resolve(['app'])
|
||||
self.assertIn('glibc', result.resolved)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
|
||||
def test_resolve_multiple_repos(self) -> None:
|
||||
core = solv_index_t(name='core')
|
||||
core.add(solv_package_t(name='glibc', version='2.38-1', arch='x86_64'))
|
||||
core.build_provides_index()
|
||||
|
||||
extra = solv_index_t(name='extra')
|
||||
extra.add(solv_package_t(name='python', version='3.11-1', arch='x86_64', depends=['glibc']))
|
||||
extra.build_provides_index()
|
||||
|
||||
pool = solv_pool_t(stores=[repo_store_t(index=core), repo_store_t(index=extra)])
|
||||
result = pool.resolve(['python'])
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
|
||||
class TestRepoStoreCache(unittest.TestCase):
|
||||
def test_write_and_load_solv_cache(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64', depends=['glibc']))
|
||||
idx.add(solv_package_t(name='glibc', version='2.38-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
store = repo_store_t(index=idx, db_checksum='abc123')
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cache_dir = pathlib.Path(tmpdir)
|
||||
|
||||
pool1 = solv_pool_t()
|
||||
pool1.add_store(store, cache_dir=cache_dir)
|
||||
pool1.finalize()
|
||||
|
||||
self.assertTrue((cache_dir / 'test.solv').exists())
|
||||
self.assertTrue((cache_dir / 'test.solv.sha256').exists())
|
||||
self.assertEqual((cache_dir / 'test.solv.sha256').read_text().strip(), 'abc123')
|
||||
|
||||
pool2 = solv_pool_t()
|
||||
pool2.add_store(store, cache_dir=cache_dir)
|
||||
pool2.finalize()
|
||||
|
||||
result = pool2.resolve(['bash'])
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_cache_invalidated_on_checksum_mismatch(self) -> None:
|
||||
idx = solv_index_t(name='test')
|
||||
idx.add(solv_package_t(name='bash', version='5.2-1', arch='x86_64'))
|
||||
idx.build_provides_index()
|
||||
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
cache_dir = pathlib.Path(tmpdir)
|
||||
|
||||
store1 = repo_store_t(index=idx, db_checksum='checksum_v1')
|
||||
pool1 = solv_pool_t()
|
||||
pool1.add_store(store1, cache_dir=cache_dir)
|
||||
pool1.finalize()
|
||||
|
||||
self.assertEqual((cache_dir / 'test.solv.sha256').read_text().strip(), 'checksum_v1')
|
||||
|
||||
idx2 = solv_index_t(name='test')
|
||||
idx2.add(solv_package_t(name='bash', version='5.3-1', arch='x86_64'))
|
||||
idx2.build_provides_index()
|
||||
|
||||
store2 = repo_store_t(index=idx2, db_checksum='checksum_v2')
|
||||
pool2 = solv_pool_t()
|
||||
pool2.add_store(store2, cache_dir=cache_dir)
|
||||
pool2.finalize()
|
||||
|
||||
self.assertEqual((cache_dir / 'test.solv.sha256').read_text().strip(), 'checksum_v2')
|
||||
|
||||
result = pool2.resolve(['bash'])
|
||||
self.assertIn('bash', result.resolved)
|
||||
|
||||
def test_from_parsed_db(self) -> None:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
import io
|
||||
import tarfile
|
||||
|
||||
db_path = pathlib.Path(tmpdir) / 'test.db'
|
||||
buf = io.BytesIO()
|
||||
with tarfile.open(fileobj=buf, mode='w:gz') as tar:
|
||||
desc = b'%NAME%\nbash\n\n%VERSION%\n5.2-1\n'
|
||||
info = tarfile.TarInfo(name='bash-5.2-1/desc')
|
||||
info.size = len(desc)
|
||||
info.type = tarfile.REGTYPE
|
||||
tar.addfile(info, io.BytesIO(desc))
|
||||
db_path.write_bytes(buf.getvalue())
|
||||
|
||||
idx = db_parser_t.parse_db_path(db_path, repo_name='test')
|
||||
store = repo_store_t(index=_repo_index_to_solv(idx))
|
||||
self.assertIn('bash', store.index.packages)
|
||||
|
||||
|
||||
class _SharedStores:
|
||||
_cache: dict[str, list[repo_store_t]] = {}
|
||||
_installed: list[tuple[str, str]] = []
|
||||
|
||||
class constants_t:
|
||||
res_dir: pathlib.Path = pathlib.Path(__file__).parent / 'res'
|
||||
|
||||
dates: list[str] = [
|
||||
'2025_03_15',
|
||||
'2025_09_15',
|
||||
'2026_03_15',
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_stores(cls, date: str) -> list[repo_store_t]:
|
||||
if date not in cls._cache:
|
||||
res_dir = cls.constants_t.res_dir
|
||||
core_idx = db_parser_t.parse_db_path(res_dir / ('core_%s.db' % date), repo_name='core')
|
||||
extra_idx = db_parser_t.parse_db_path(res_dir / ('extra_%s.db' % date), repo_name='extra')
|
||||
cls._cache[date] = [
|
||||
repo_store_t(index=_repo_index_to_solv(core_idx)),
|
||||
repo_store_t(index=_repo_index_to_solv(extra_idx)),
|
||||
]
|
||||
return cls._cache[date]
|
||||
|
||||
@classmethod
|
||||
def get_installed(cls) -> list[tuple[str, str]]:
|
||||
if not cls._installed:
|
||||
with open(cls.constants_t.res_dir / 'installed_packages.txt') as f:
|
||||
for line in f:
|
||||
parts = line.strip().split(None, 1)
|
||||
if len(parts) == 2:
|
||||
cls._installed.append((parts[0], parts[1]))
|
||||
return cls._installed
|
||||
|
||||
|
||||
class TestSolvResolveInstalled(unittest.TestCase):
|
||||
def _make_pool(self, date: str) -> solv_pool_t:
|
||||
return solv_pool_t(stores=_SharedStores.get_stores(date))
|
||||
|
||||
def test_resolve_2025_03_15(self) -> None:
|
||||
pool = self._make_pool('2025_03_15')
|
||||
installed = _SharedStores.get_installed()
|
||||
stores = _SharedStores.get_stores('2025_03_15')
|
||||
|
||||
available: set[str] = set()
|
||||
for store in stores:
|
||||
available.update(store.index.packages.keys())
|
||||
found = [name for name, ver in installed if name in available]
|
||||
|
||||
result = pool.resolve(found)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertGreaterEqual(len(result.resolved), len(found))
|
||||
|
||||
def test_resolve_2025_09_15(self) -> None:
|
||||
pool = self._make_pool('2025_09_15')
|
||||
installed = _SharedStores.get_installed()
|
||||
stores = _SharedStores.get_stores('2025_09_15')
|
||||
|
||||
available: set[str] = set()
|
||||
for store in stores:
|
||||
available.update(store.index.packages.keys())
|
||||
found = [name for name, ver in installed if name in available]
|
||||
|
||||
result = pool.resolve(found)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertGreaterEqual(len(result.resolved), len(found))
|
||||
|
||||
def test_resolve_2026_03_15(self) -> None:
|
||||
pool = self._make_pool('2026_03_15')
|
||||
installed = _SharedStores.get_installed()
|
||||
|
||||
names = [name for name, ver in installed]
|
||||
result = pool.resolve(names)
|
||||
self.assertEqual(len(result.problems), 0)
|
||||
self.assertGreaterEqual(len(result.resolved), len(names))
|
||||
|
||||
for name, ver in installed:
|
||||
self.assertIn(name, result.resolved)
|
||||
|
||||
def test_resolve_2026_03_15_no_missing(self) -> None:
|
||||
stores = _SharedStores.get_stores('2026_03_15')
|
||||
available: set[str] = set()
|
||||
for store in stores:
|
||||
available.update(store.index.packages.keys())
|
||||
installed = _SharedStores.get_installed()
|
||||
missing = [name for name, ver in installed if name not in available]
|
||||
self.assertEqual(missing, [])
|
||||
|
||||
def test_resolved_no_duplicates(self) -> None:
|
||||
for date in _SharedStores.constants_t.dates:
|
||||
pool = self._make_pool(date)
|
||||
installed = _SharedStores.get_installed()
|
||||
stores = _SharedStores.get_stores(date)
|
||||
|
||||
available: set[str] = set()
|
||||
for store in stores:
|
||||
available.update(store.index.packages.keys())
|
||||
found = [name for name, ver in installed if name in available]
|
||||
|
||||
result = pool.resolve(found)
|
||||
names = list(result.resolved.keys())
|
||||
self.assertEqual(len(names), len(set(names)))
|
||||
|
||||
|
||||
class TestSolvResolveSingle(unittest.TestCase):
|
||||
def _make_pool(self, date: str) -> solv_pool_t:
|
||||
return solv_pool_t(stores=_SharedStores.get_stores(date))
|
||||
|
||||
def test_resolve_glibc(self) -> None:
|
||||
for date in _SharedStores.constants_t.dates:
|
||||
pool = self._make_pool(date)
|
||||
result = pool.resolve(['glibc'])
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_bash(self) -> None:
|
||||
for date in _SharedStores.constants_t.dates:
|
||||
pool = self._make_pool(date)
|
||||
result = pool.resolve(['bash'])
|
||||
self.assertIn('bash', result.resolved)
|
||||
self.assertIn('glibc', result.resolved)
|
||||
|
||||
def test_resolve_python(self) -> None:
|
||||
for date in _SharedStores.constants_t.dates:
|
||||
pool = self._make_pool(date)
|
||||
result = pool.resolve(['python'])
|
||||
self.assertIn('python', result.resolved)
|
||||
self.assertGreater(len(result.resolved), 3)
|
||||
|
||||
def test_resolve_nonexistent(self) -> None:
|
||||
for date in _SharedStores.constants_t.dates:
|
||||
pool = self._make_pool(date)
|
||||
result = pool.resolve(['this-package-does-not-exist-xyz'])
|
||||
self.assertGreater(len(result.problems), 0)
|
||||
Loading…
Reference in New Issue
Block a user