1
0
Fork 0

SImplify script so that it queries all repos at once

Move mapping file to JSON
Save a JSON dump of the cache as well (for inspection)
This commit is contained in:
Luca Beltrame 2016-09-21 07:53:10 +02:00
parent 9c98bd83b0
commit 5780e3b6c6
Signed by: einar
GPG key ID: 40C8281493B01C16
3 changed files with 364 additions and 20 deletions

View file

@ -3,6 +3,7 @@
import argparse
import csv
import logging
import json
from pathlib import Path
import pickle
@ -26,6 +27,9 @@ class GitHashCache:
def __setitem__(self, key, value):
self._data[key] = value
def get(self, key, *args, **kwargs):
return self._data.get(key, *args, **kwargs)
def save(self):
logging.debug("Saving pickled data")
with open(self.cache, "wb") as handle:
@ -41,6 +45,11 @@ class GitHashCache:
with open(self.cache, "rb") as handle:
self._data = pickle.load(handle)
def to_json(self):
with Path(self.cache).with_suffix(".json").open("w") as handle:
json.dump(self._data, handle, indent=4)
def lsremote(url):
@ -64,6 +73,7 @@ def run_osc(repository, package_name):
cmd = cmd.format(repository, package_name)
logging.debug("Running {}".format(cmd))
logging.info("Updating package {0}".format(package_name))
pid = sarge.run(cmd)
@ -80,56 +90,70 @@ def update_package(hash_data, package_name, remote_name, obs_repository,
repo_name = "kde:{}".format(remote_name)
remote_hash = get_remote_hash(repo_name, branch)
current_hash = hash_data[remote_name]
repo_hashes = hash_data.get(obs_repository)
if hash_data.get(obs_repository) is None:
logging.debug("No prior data - initializing empty")
hash_data[obs_repository] = dict()
current_hash = hash_data[obs_repository].get(remote_name, "")
logging.debug("Package {}, theirs {}, ours {}".format(remote_name,
remote_hash, current_hash))
remote_hash,
current_hash))
if remote_hash != current_hash:
logging.debug("Hash doesn't match, updating")
run_osc(obs_repository, package_name)
hash_data[remote_name] = remote_hash
hash_data[obs_repository][remote_name] = remote_hash
hash_data.save()
def update_packages(cache_file, obs_repository, repo_mapping_file):
def update_packages(cache_file, repo_mapping_file):
hash_data = GitHashCache(cache_file)
hash_data.load()
logging.info("Updating packages for {}".format(obs_repository))
with open(repo_mapping_file, "r") as mapping:
reader = csv.reader(mapping, delimiter="\t")
for row in reader:
kde_name, obs_name, branch = row
branch = "master" if not branch else branch
logging.debug("Updating package {} ({})".format(kde_name,
obs_name))
logging.debug("Using branch {}".format(branch))
update_package(hash_data, obs_name, kde_name, obs_repository,
branch)
repo_data = json.load(mapping)
for obs_repository, branch_data in repo_data.items():
logging.info("Updating packages for {}".format(obs_repository))
for package in branch_data:
kde_name = package["kde"]
obs_name = package["obs"]
branch = package["branch"]
logging.debug("Updating package {} ({})".format(kde_name,
obs_name))
logging.debug("Using branch {}".format(branch))
update_package(hash_data, obs_name, kde_name, obs_repository,
branch)
logging.debug("Saving data")
hash_data.save()
hash_data.to_json()
def main():
parser = argparse.ArgumentParser()
parser.add_argument("repository", help="OBS repository to use")
parser.add_argument("mapping_file", help="KDE:OBS repository mapping file")
parser.add_argument("--debug", help="Show debugging output", action="store_true")
parser.add_argument("--debug", help="Show debugging output",
action="store_true")
options = parser.parse_args()
level = logging.INFO if not options.debug else logging.DEBUG
logging.basicConfig(format='%(levelname)s:%(message)s',
level=level)
logging.basicConfig(format='%(levelname)s - %(message)s',
level=level)
cache_file = Path.home() / ".local/share/obs_{}_cache".format(options.repository.replace(":", "_"))
cache_file = Path.home() / ".local/share/obs_repo_cache.cache"
cache_file = str(cache_file)
update_packages(cache_file, options.repository, options.mapping_file)
update_packages(cache_file, options.mapping_file)
logging.info("Complete")
if __name__ == "__main__":