summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbill-auger <mr.j.spam.me@gmail.com>2022-02-23 15:21:31 -0500
committerbill-auger <mr.j.spam.me@gmail.com>2022-02-23 15:24:20 -0500
commitdadb8dad48d29ec03cbda83c8456643c14a01253 (patch)
treef3a98e23416fb91ea6a117399df71c06e3c019f8
parent5538856cf150207ba1ad9f7879e568e672d074d1 (diff)
wip - mixed concernswip-2022-02-22
-rw-r--r--parabola_repolint/repocache.py376
1 files changed, 243 insertions, 133 deletions
diff --git a/parabola_repolint/repocache.py b/parabola_repolint/repocache.py
index e7e6a5a..e721f43 100644
--- a/parabola_repolint/repocache.py
+++ b/parabola_repolint/repocache.py
@@ -150,11 +150,10 @@ class PkgFile():
]
- def __init__(self, repo, path, arch):
+ def __init__(self, repo, path):
''' constructor '''
self._repo = repo
self._path = path
- self._arch = arch
self._pkgfiles_dir = os.path.normpath(RepoCache.PKGFILES_DIR + '/' + self._path)
self._pkginfo = {} # init()
@@ -267,12 +266,23 @@ class PkgFile():
@property
def pkgname(self):
''' produce the name of the package '''
- return self._pkginfo['pkgname']
+
+ parsed_name = os.path.basename(self._path).rsplit('-',3)[0]
+
+ if 'pkgname' in self._pkginfo:
+ assert self._pkginfo['pkgname'] == parsed_name
+
+ return parsed_name
@property
def arch(self):
''' produce the architecture of the package '''
- return self._arch
+ parsed_arch = os.path.basename(self._path).rsplit('-',3)[2]
+
+ if 'arch' in self._pkginfo:
+ assert self._pkginfo['arch'] == parsed_arch
+
+ return parsed_arch
@property
def builddate(self):
@@ -349,6 +359,8 @@ class PkgEntry():
else:
self._data[cur] = line
+ self._pkgver = PkgVersion(self._data['VERSION'])
+
self._pkgbuilds = {} # register_pkgbuild()
self._pkgfiles = {} # register_pkgfile()
self._pkgfile = None # register_pkgfile()
@@ -380,7 +392,12 @@ class PkgEntry():
@property
def pkgver(self):
''' produce the pkgver of the package '''
- return PkgVersion(self._data['VERSION'])
+ return self._pkgver.pkgver
+
+ @property
+ def pkgrel(self):
+ ''' produce the pkgrel of the package '''
+ return self._pkgver.pkgrel
@property
def pgpsig(self):
@@ -444,7 +461,7 @@ class Srcinfo():
SRCINFO_VALUE = [
'pkgbase',
- 'pkgname',
+ 'pkgname', # FIXME: 'pkgname' can be a list
'pkgdesc',
'pkgver',
'pkgrel',
@@ -511,10 +528,14 @@ class Srcinfo():
line = line.strip()
if not line:
continue
+ #if line.startswith('#'):
+ #continue
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
+ if not key or not value:
+ continue
if key == 'pkgname':
self._pkginfo[value] = {}
@@ -555,9 +576,11 @@ class PkgBuild():
self._path = path
self._pkgbuilds_dir = os.path.normpath(RepoCache.PKGBUILDS_DIR + '/' + self._path)
- self._valid = None # _load_metadata()
- self._srcinfo = {} # _load_metadata()
- self._pkglist = {} # _load_metadata()
+ self._valid = None # _load_metadata()
+ self._srcinfo = {} # _load_metadata()
+ self._pkglist = {} # _load_metadata()
+ self._pkgbase = None # _load_metadata()
+ self._pkgver = None # _load_metadata()
self._arches = set()
self._pkgentries = {} # register_pkgentry()
@@ -600,6 +623,21 @@ class PkgBuild():
return self._pkglist
@property
+ def pkgbase(self):
+ ''' produce the base name of the package '''
+ return self._pkgbase
+
+ @property
+ def pkgver(self):
+ ''' produce the pkgver of the package '''
+ return self._pkgver.pkgver
+
+ @property
+ def pkgrel(self):
+ ''' produce the pkgrel of the package '''
+ return self._pkgver.pkgrel
+
+ @property
def srcinfo(self):
''' produce the srcinfo of the PKGBUILD '''
if self._valid is None:
@@ -636,6 +674,8 @@ class PkgBuild():
srcinfo = Srcinfo(si_str)
+ self._pkgver = PkgVersion(srcinfo.pkgbase['pkgver'])
+
self._arches = srcinfo.pkgbase['arch']
if 'any' in self._arches:
self._arches = self._arches.difference(['any'])
@@ -680,40 +720,39 @@ class PkgBuild():
class Repo():
''' represent a single pacman repository '''
- def __init__(self, name, has_abs_tree=False):
+ def __init__(self, name, known_pkgfiles=None, has_abs_tree=False):
''' constructor '''
+ # TODO: explain known_pkgfiles,
+ # and how this class is reused/abused for POOLS and SOURCES
self._name = name
+ self._known_pkgfiles = known_pkgfiles
+ self._is_pool = self._known_pkgfiles is not None
self._abs_dir = os.path.join(RepoCache.ABSLIBRE_DIR, name) if has_abs_tree else None
self._pkgbuilds_dir = os.path.join(RepoCache.PKGBUILDS_DIR, name)
self._pkgentries_dir = os.path.join(RepoCache.PKGENTRIES_DIR, name)
self._pkgfiles_dir = os.path.join(RepoCache.PKGFILES_DIR, name)
self._mirror_dir = os.path.join(RepoCache.MIRROR_DIR, name)
- # load abslibre tree into pkgbuilds
- self._pkgbuilds = [] # _load_pkgbuilds()
- self._pkgbuilds_cache = {} # _load_pkgbuilds()
- self._load_pkgbuilds()
-
- # load repo.db files into pkgentries
+ self._pkgbuilds = [] # _load_pkgbuilds()
+ self._pkgbuilds_cache = {} # _load_pkgbuilds()
self._pkgentries = [] # _load_pkgentries()
self._pkgentries_cache = {} # _load_pkgentries()
self._provides_cache = {} # _load_pkgentries()
- #self._load_pkgentries()
-
- # load repo symlinks into pkgfiles
- self._pkgfiles = [] # _load_pkgfiles()
- self._litterfiles = [] # _load_pkgfiles()
- #self._load_pkgfiles()
-
- # load repo symlinks into pkgfiles
- self._poolfiles = [] # _load_poolfiles()
- self._srcfiles = [] # _load_srcfiles()
- #self._load_poolfiles()
- #self._load_srcfiles()
+ self._pkgfiles = [] # _load_pkgbuilds(), _load_pkgentries(), _load_pkgfiles()
+ self._pkgfiles_cache = {} # _load_pkgfiles()
+ self._litterfiles = [] # _load_pkgfiles()
+ self._poolfiles = [] # _load_pkgfiles()
+
+ if not self._is_pool:
+ self._load_pkgbuilds() # load abslibre tree into pkgbuilds and pkgfiles
+ self._load_pkgentries() # load repo.db files into pkgentries and pkgfiles
+ self._load_pkgfiles() # load repo symlinks into pkgfiles
+ else:
+ self._load_pkgfiles_dir(pool) # load repo pool files into pkgfiles
# correlate the collected PKGBUILDs, DB entries, and repo symlinks
- #self._associate_components()
+ self._associate_components()
## accessors ##
@@ -756,10 +795,48 @@ class Repo():
## helpers ##
- def pkgfiles_cache(self): # TODO: why no _pkgfiles_cache? symmetry would be nice
- return { name: pkgfile.name ** 2 for pkgfile in self._pkgfiles }
+ #def pkgfiles_cache(self): # TODO: why no _pkgfiles_cache? symmetry would be nice
+ #return { name: pkgfile.name ** 2 for pkgfile in self._pkgfiles }
+
+ def _log_scan(self, varname, components):
+ if sys.stdout.isatty():
+ self._clear_log_line()
+ sys.stdout.write('loading [%s] %s: %i' % (self._name, varname, len(components)))
+ sys.stdout.flush()
+
+ def _log_scan_done(self, varname, components):
+ self._clear_log_line()
+ logging.info('loaded [%s] %s: %i' % (self._name, varname, len(components)))
+
+ def _clear_log_line(self):
+ if sys.stdout.isatty(): sys.stdout.write('\33[2K\r') ; sys.stdout.flush() ;
+
+ def _poolfile(self, pkgpath):
+ return os.path.realpath(pkgpath)
+
+ def _is_known_poolfile(self, pkgpath):
+ return self._poolfile(pkgpath) in self._known_pkgfiles
+
+ def _find_or_create_pool_pkgfile(self, pkgname, pkgver, pkgrel, arch)
+ # guess the anticipated path(s) to the pool package file
+ pkgname_xz = "%s-%s-%s-%s.pkg.tar.xz" % (pkgname, pkgver, pkgrel, arch)
+ pkgname_zst = "%s-%s-%s-%s.pkg.tar.zst" % (pkgname, pkgver, pkgrel, arch)
+ pool_name = ''TODO
+ pools_dir = os.path.join(self._mirror_dir, 'pool', pool_name)
+ pkgpath_xz = os.path.join(pools_dir, pkgname_xz)
+ pkgpath_zst = os.path.join(pools_dir, pkgname_zst)
+ if os.path.exists(pkgpath_xz):
+ pkgfile = self._find_or_create_pkgfile(pkgpath_xz)
+ else:
+ pkgfile = self._find_or_create_pkgfile(pkgpath_zst)
- def _scan_log(self, fmt, args): logging.info('\33[2K' + fmt % args)
+ def _find_or_create_pkgfile(self, pkgpath):
+ ''' Ensure that all PkgFiles are unique by pool path '''
+ poolfile = self._poolfile(pkgpath)
+ if self._is_known_poolfile(poolfile):
+ return self._known_pkgfiles[poolfile]
+ else:
+ return Pkgfile(self, poolfile)
def _is_loaded(self):
''' 'True', if all relevent data has been loaded '''
@@ -778,33 +855,43 @@ class Repo():
logging.info('skipping %s pkgbuilds', self._name)
return
- i = 0
for root, _, files in os.walk(self._abs_dir):
if 'PKGBUILD' in files:
- i += 1
- if sys.stdout.isatty():
- sys.stdout.write('loading [%s] pkgbuilds: %i\r' % (self._name, i))
- sys.stdout.flush()
-
pkgbuild = PkgBuild(self, os.path.join(root, 'PKGBUILD'))
self._pkgbuilds.append(pkgbuild)
for arch in pkgbuild.arches.intersection(CONFIG.parabola.arches):
if arch not in self._pkgbuilds_cache:
self._pkgbuilds_cache[arch] = {}
+ cache = self._pkgbuilds_cache[arch]
+
+ pkgname = pkgbuild.srcinfo[arch].pkgbase['pkgbase']
+ if pkgname not in cache:
+ pkgbuilds = cache[pkgname] = []
+ pkgbuilds.append(pkgbuild)
dbg_pkgname = '%s-debug' % pkgbuild.srcinfo[arch].pkgbase['pkgbase']
- if dbg_pkgname not in self._pkgbuilds_cache[arch]:
- self._pkgbuilds_cache[arch][dbg_pkgname] = []
- self._pkgbuilds_cache[arch][dbg_pkgname].append(pkgbuild)
+ if dbg_pkgname not in cache:
+ pkgbuilds = cache[dbg_pkgname] = []
+ pkgbuilds.append(pkgbuild)
for src_pkgname in pkgbuild.srcinfo[arch].pkginfo:
- if src_pkgname not in self._pkgbuilds_cache[arch]:
- self._pkgbuilds_cache[arch][src_pkgname] = []
- self._pkgbuilds_cache[arch][src_pkgname].append(pkgbuild)
+ if src_pkgname not in cache:
+ pkgbuilds = cache[src_pkgname] = []
+ pkgbuilds.append(pkgbuild)
+
+ # create placeholder PkgFiles
+ pkgver = pkgbuild.pkgver
+ pkgrel = pkgbuild.pkgrel
+ for arch in pkgbuild.pkglist:
+ for pkgname in pkgbuild.pkglist[arch]:
+ pkgfile = self._find_or_create_dummy_pkgfile(pkgname, pkgver, pkgrel, arch)
+ self._pkgfiles.append(pkgfile)
+
+ self._log_scan('pkgbuilds', self._pkgbuilds)
# cache results
- self._scan_log('loaded [%s] pkgbuilds: %i', (self._name, len(self._pkgbuilds)))
+ self._log_scan_done('pkgbuilds', self._pkgbuilds)
os.makedirs(self._pkgbuilds_dir, exist_ok=True)
with open(os.path.join(self._pkgbuilds_dir, '.pkgbuilds'), 'w') as out:
out.write(json.dumps(self._pkgbuilds, indent=4, sort_keys=True, default=str))
@@ -832,7 +919,6 @@ class Repo():
os.makedirs(dst, exist_ok=True)
sh.tar('xf', db_file, _cwd=dst)
- i = 0
arches_dir = os.path.join(self._pkgentries_dir, 'os')
arch_dirs = sorted(os.scandir(arches_dir), key=lambda arch: arch.name)
for arch_dir in arch_dirs:
@@ -841,11 +927,6 @@ class Repo():
repofiles = sorted(os.scandir(arch_dir.path), key=lambda repofile: repofile.name)
for repofile in repofiles:
- i += 1
- if sys.stdout.isatty():
- sys.stdout.write('loading [%s] pkgentries: %i\r' % (self._name, i))
- sys.stdout.flush()
-
pkgentry = PkgEntry(self, repofile.path, arch_dir.name)
self._pkgentries.append(pkgentry)
@@ -869,8 +950,16 @@ class Repo():
self._provides_cache[pkgentry.arch][provides] = []
self._provides_cache[pkgentry.arch][provides].append(pkgentry)
+ # create placeholder PkgFiles
+ pkgname = pkgentry.pkgname ; pkgver = pkgentry.pkgver ;
+ pkgrel = pkgentry.pkgrel ; pkgrel = pkgentry.arch ;
+ pkgfile = self._find_or_create_dummy_pkgfile(pkgname, pkgver, pkgrel, arch)
+ self._pkgfiles.append(pkgfile)
+
+ self._log_scan('pkgentries', self._pkgentries)
+
# cache results
- self._scan_log('loaded [%s] pkgentries: %i', (self._name, len(self._pkgentries)))
+ self._log_scan_done('pkgentries', self._pkgentries)
with open(os.path.join(self._pkgentries_dir, '.pkgentries'), 'w') as out:
out.write(json.dumps(self._pkgentries, indent=4, sort_keys=True, default=str))
with open(os.path.join(self._pkgentries_dir, '.pkgentries_cache'), 'w') as out:
@@ -881,45 +970,62 @@ class Repo():
def _load_pkgfiles(self):
''' load the pkg.tar.xz files from the repo '''
- i = 0
arches_dir = os.path.join(self._mirror_dir, 'os')
arch_dirs = sorted(os.scandir(arches_dir), key=lambda arch: arch.name)
for arch_dir in arch_dirs:
- arch = arch_dir.name
- if arch not in CONFIG.parabola.arches:
+ if arch_dir.name not in CONFIG.parabola.arches:
continue
-
- # collect valid package file-sets and invalid litter files
- repofiles = sorted(os.scandir(arch_dir.path), key=lambda repofile: repofile.name)
- package_files = set()
- for repofile in repofiles:
- # NOTE: Information is intentionally discarded here,
- # which will be recovered as these are assigned to a PackageSet.
- # See the PkgFile and PackageSet class descriptions.
- pkgpath = repofile.path.removesuffix('.sig').removesuffix('.torrent')
- if repofile.name.endswith(( '.pkg.tar.xz', '.pkg.tar.zst',
- '.pkg.tar.xz.sig', '.pkg.tar.zst.sig',
- '.pkg.tar.xz.torrent', '.pkg.tar.zst.torrent')):
- package_files.add(pkgpath)
- else:
- self._litterfiles.append(repofile.path)
- for pkgpath in package_files:
- i += 1
- if sys.stdout.isatty():
- sys.stdout.write('loading [%s] pkgfiles: %i\r' % (self._name, i))
- sys.stdout.flush()
-
- pkgfile = PkgFile(self, pkgpath, arch)
- self._pkgfiles.append(pkgfile)
+ else:
+ _load_pkgfiles_dir(arch_dir)
# cache results
- self._scan_log('loaded [%s] pkgfiles: %i', (self._name, len(self._pkgfiles)))
+ self._log_scan_done('pkgfiles', self._pkgfiles)
os.makedirs(self._pkgfiles_dir, exist_ok=True)
with open(os.path.join(self._pkgfiles_dir, '.pkgfiles'), 'w') as out:
out.write(json.dumps(self._pkgfiles, indent=4, sort_keys=True, default=str))
+ with open(os.path.join(self._pkgfiles_dir, '.pkgfiles_cache'), 'w') as out:
+ out.write(json.dumps(self._pkgfiles_cache, indent=4, sort_keys=True, default=str))
with open(os.path.join(self._pkgfiles_dir, '.litterfiles'), 'w') as out:
out.write(json.dumps(self._litterfiles, indent=4, sort_keys=True, default=str))
+ def _load_pkgfiles_dir(self, pool_name):
+ ''' collect valid package-set components and invalid litter files '''
+ # NOTE: Information is intentionally discarded here, which will be recovered later,
+ # when these PkgFiles are each assigned to a PackageSet.
+ # Each PackageSet will have exactly one PkgFile.
+ # and will account for all valid components, including those discarded here.
+ # This is also an optimization; because normally,
+ # seven different real files will match this filter,
+ # and correspond to the same DB entry and package file in the pool.
+ # See the PkgFile and PackageSet class descriptions for details.
+
+ pkgfiles_dir = os.path.join(self._mirror_dir, 'pool', pool_name)
+ repofiles = sorted(os.scandir(pkgfiles_dir), key=lambda repofile: repofile.name)
+ pkgpaths = set()
+ for repofile in repofiles:
+ pkgpath = repofile.path.removesuffix('.sig').removesuffix('.torrent')
+ if repofile.name.endswith(( '.pkg.tar.xz', '.pkg.tar.zst',
+ '.pkg.tar.xz.sig', '.pkg.tar.zst.sig',
+ '.pkg.tar.xz.torrent', '.pkg.tar.zst.torrent',
+ '.src.tar.xz', '.src.tar.zst' )):
+ pkgpaths.add(pkgpath)
+ else:
+ self._litterfiles.append(repofile.path)
+ for pkgpath in pkgpaths:
+ is_known_poolfile = self._is_known_poolfile(pkgpath):
+ pkgfile = self._find_or_create_pkgfile(pkgpath)
+ if not self._is_pool:
+ self._pkgfiles.append(pkgfile)
+ if pkgfile.arch not in self._pkgfiles_cache:
+ self._pkgfiles_cache[pkgfile.arch] = {}
+ if pkgfile.pkgname not in self._pkgfiles_cache[pkgfile.arch]:
+ self._pkgfiles_cache[pkgfile.arch][pkgfile.pkgname] = []
+ self._pkgfiles_cache[pkgfile.arch][pkgfile.pkgname].append(pkgfile)
+ elif not is_known_poolfile
+ self._poolfiles.append(pkgfile)
+
+ self._log_scan('pkgfiles', self._pkgfiles)
+
def _associate_components(self):
''' correlate the collected PKGBUILDs, DB entries, repo symlinks,
pool files, and source-balls
@@ -933,8 +1039,8 @@ class Repo():
for pkgbuild in self._pkgbuilds:
pkgname = pkgbuild.pkgname ; arch = pkgbuild.arch ;
- pkgentries_cache = self.pkgentries_cache.get(arch, {})
- pkgfiles_cache = self.pkgfiles_cache.get(arch, {})
+ pkgentries_cache = self._pkgentries_cache.get(arch, {})
+ pkgfiles_cache = self._pkgfiles_cache.get(arch, {})
pkgentries = pkgentries_cache.get(pkgname, {})
pkgfiles = pkgfiles_cache.get(pkgname, {})
@@ -954,8 +1060,8 @@ class Repo():
for pkgentry in self._pkgentries:
pkgname = pkgentry.pkgname ; arch = pkgentry.arch ;
- pkgbuilds_cache = self.pkgbuilds_cache.get(arch, {})
- pkgfiles_cache = self.pkgfiles_cache.get(arch, {})
+ pkgbuilds_cache = self._pkgbuilds_cache.get(arch, {})
+ pkgfiles_cache = self._pkgfiles_cache.get(arch, {})
pkgbuilds = pkgfiles_cache.get(pkgname, {})
pkgfiles = pkgfiles_cache.get(pkgname, {})
@@ -975,8 +1081,8 @@ class Repo():
for pkgfile in self._pkgfiles:
pkgname = pkgfile.pkgname ; arch = pkgfile.arch ;
- pkgbuilds_cache = self.pkgbuilds_cache.get(pkgfile, {})
- pkgentries_cache = self.pkgentries_cache.get(pkgfile, {})
+ pkgbuilds_cache = self._pkgbuilds_cache.get(arch, {})
+ pkgentries_cache = self._pkgentries_cache.get(arch, {})
pkgbuilds = pkgbuilds_cache.get(pkgname, {})
pkgentries = pkgentries_cache.get(pkgname, {})
@@ -993,30 +1099,25 @@ class Repo():
# associate each pool file and source-ball
# with PkgBuild(s), PkgEntry(s), and/or PkgFile(s), if also present
- for poolfile in self._poolfiles.union(self._srcfiles):
+ for poolfile in self._poolfiles:
pkgname = poolfile.pkgname ; arch = poolfile.arch ;
- pkgbuilds_cache = self.pkgbuilds_cache.get(poolfile, {})
- pkgentries_cache = self.pkgentries_cache.get(poolfile, {})
- pkgfiles_cache = self.pkgfiles_cache.get(poolfile, {})
- pkgbuilds = pkgbuilds_cache.get(pkgname, {})
- pkgentries = pkgentries_cache.get(pkgname, {})
- pkgfiles = pkgfiles_cache.get(pkgname, {})
-
- for pkgfile in pkgfiles:
- # TODO: find or create PkgFile by pkgname
+ for pkgfile in self._known_pkgfiles:
+ assert not self._is_known_poolfile(pkgfile.path)
- for pkgbuild in pkgbuilds:
- pkgbuild.register_pkgfile(pkgfile, arch)
- pkgfile.register_pkgbuild(pkgbuild)
-
- for pkgentry in pkgentries:
- pkgentry.register_pkgfile(pkgfile, arch)
- pkgfile.register_pkgentry(pkgentry)
- for pkgbuild in pkgbuilds:
- pkgbuild.register_pkgentry(pkgentry, arch)
- pkgentry.register_pkgbuild(pkgbuild, arch)
+ for pkgbuild in self._known_pkgbuilds:
+ if pkgbuild.pkgname == pkgname:
+ pkgbuild.register_pkgfile(pkgfile, arch)
+ pkgfile.register_pkgbuild(pkgbuild)
+ for pkgentry in self._known_pkgentries:
+ if pkgentry.pkgname == pkgname:
+ pkgentry.register_pkgfile(pkgfile, arch)
+ pkgfile.register_pkgentry(pkgentry)
+ for pkgbuild in self._known_pkgbuilds:
+ if pkgbuild.pkgname == pkgname:
+ pkgbuild.register_pkgentry(pkgentry, arch)
+ pkgentry.register_pkgbuild(pkgbuild, arch)
def __repr__(self):
''' produce a string representation of the repo '''
@@ -1041,11 +1142,12 @@ class RepoCache():
MIRROR_DIR = PKGFILES_DIR
# apply user config
- ARCH_REPOS = CONFIG.arch.repos if CONFIG.arch.repos else []
- PARABOLA_REPOS = CONFIG.parabola.repos if CONFIG.parabola.repos else []
- #POOLS = SOURCES = CONFIG.parabola.pools if CONFIG.parabola.pools else [] # WIP:
- POOLS = SOURCES = [] if CONFIG.parabola.pools else [] # WIP:
- ARCHES = CONFIG.parabola.arches if CONFIG.parabola.arches else []
+ ARCH_REPOS = set(CONFIG.arch.repos ) if CONFIG.arch.repos else set()
+ PARABOLA_REPOS = set(CONFIG.parabola.repos ) if CONFIG.parabola.repos else set()
+ #POOLS = SOURCES = set(CONFIG.parabola.pools ) if CONFIG.parabola.pools else set() # WIP:
+ POOLS = SOURCES = set() # WIP:
+ ARCHES = set(CONFIG.parabola.arches) if CONFIG.parabola.arches else set()
+ ALL_REPOS = ARCH_REPOS | PARABOLA_REPOS
if CONFIG.local.abslibre:
if os.path.exists(CONFIG.local.abslibre):
@@ -1059,7 +1161,10 @@ class RepoCache():
raise FileNotFoundError("directory not found: '%s' (CONFIG.local.mirror)" % CONFIG.local.mirror)
# sanity checks
- assert not set(POOLS).intersection(set(PARABOLA_REPOS))
+ assert not set(PARABOLA_REPOS) & set(ARCH_REPOS)
+ assert not set(PARABOLA_REPOS) & set(POOLS )
+ assert not set(ARCH_REPOS ) & set(POOLS ) - set('community')
+ assert not set(ALL_REPOS ) & set(SOURCES )
for repo_name in set(ARCH_REPOS).union(set(PARABOLA_REPOS)):
arch_dirs = os.scandir(os.path.join(MIRROR_DIR, repo_name, 'os'))
repo_arches = [ arch.name for arch in arch_dirs ]
@@ -1080,17 +1185,12 @@ class RepoCache():
os.makedirs(PKGFILES_DIR, exist_ok=True)
- def __init__(self, clean_cache, no_sync):
+ def __init__(self):
''' constructor '''
-
- is_mirror = os.path.exists(os.path.join(RepoCache.MIRROR_DIR, 'lastsync' )) or \
- os.path.exists(os.path.join(RepoCache.MIRROR_DIR, 'lastupdate'))
-
- self._clean_cache = clean_cache
- self._no_sync = no_sync or is_mirror
-
- self._repos = {}
+ self._clean_cache = False
+ self._no_sync = False
self._arch_repos = {}
+ self._repos = {}
self._keyring = []
self._key_cache = {}
self._pkgsets = {}
@@ -1111,17 +1211,22 @@ class RepoCache():
@property
def pkgfiles(self):
''' produce the list of pkg.tar.xz files in all repos '''
- return [p for r in self._repos.values() for p in r.pkgfiles]
+ return [p for r in self.all_repos.values() for p in r.pkgfiles]
+
+ @property
+ def arch_repos(self):
+ ''' produce repo objects for the arch repos '''
+ return self._arch_repos
@property
def repos(self):
- ''' produce repo objects for the parabola repos under test '''
+ ''' produce repo objects for the parabola repos '''
return self._repos
@property
- def arch_repos(self):
- ''' produce repo objects for core, extra and community '''
- return self._arch_repos
+ def all_repos(self):
+ ''' produce repo objects for the arch and parabola repos '''
+ return self._arch_repos.merge(self._repos)
@property
def keyring(self):
@@ -1136,14 +1241,19 @@ class RepoCache():
## business ##
- def load_repos(self):
+ def load_repos(self, clean_cache, no_sync):
''' sync repos and load repo data from cache '''
- if self._clean_cache:
+ is_mirror = os.path.exists(os.path.join(RepoCache.MIRROR_DIR, 'lastsync' )) or \
+ os.path.exists(os.path.join(RepoCache.MIRROR_DIR, 'lastupdate'))
+ self._clean_cache = clean_cache
+ self._no_sync = no_sync or is_mirror
+
+ if clean_cache:
shutil.rmtree(RepoCache.CACHE_DIR)
os.makedirs(RepoCache.CACHE_DIR, exist_ok=True)
- if not self._no_sync:
+ if not no_sync:
self._sync_abslibre()
self._sync_packages()
@@ -1154,17 +1264,17 @@ class RepoCache():
self._repos[repo_name] = Repo(repo_name, has_abs_tree=True)
for pool_name in sorted(RepoCache.POOLS):
- self._pools[pool_name] = Repo(os.path.join('pool', pool_name))
+ self._pools[pool_name] = Repo(pool_name, known_pkgfiles=self.pkgfiles())
- for pool_name in sorted(RepoCache.SOURCES):
- self._sources[pool_name] = Repo(os.path.join('sources', pool_name))
+ for sources_name in sorted(RepoCache.SOURCES):
+ self._sources[sources_name] = Repo(sources_name, known_pkgfiles=self.pkgfiles())
self._extract_keyring()
for pkgfile in self.pkgfiles:
pkgfile.link_keyring(self._key_cache)
# sanity checks (see PackageSet class description)
- #self._collect_package_sets()
+ self._collect_package_sets()
def _sync_abslibre(self):
''' update the PKGBUILDs '''
@@ -1189,7 +1299,7 @@ class RepoCache():
def _extract_keyring(self):
''' extract the parabola keyring '''
- assert self._repos['libre'] and self._repos['libre'].pkgentries_cache
+ assert 'libre' in self._repos and self._repos['libre'].pkgentries_cache
cache = next(iter(self._repos['libre'].pkgentries_cache.values()))
try: