Hello community, here is the log from the commit of package openSUSE-release-tools for openSUSE:Factory checked in at 2018-10-15 09:43:32 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Comparing /work/SRC/openSUSE:Factory/openSUSE-release-tools (Old) and /work/SRC/openSUSE:Factory/.openSUSE-release-tools.new (New) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Package is "openSUSE-release-tools" Mon Oct 15 09:43:32 2018 rev:136 rq:641762 version:20181011.61e1049 Changes: -------- --- /work/SRC/openSUSE:Factory/openSUSE-release-tools/openSUSE-release-tools.changes 2018-10-11 11:56:52.897914465 +0200 +++ /work/SRC/openSUSE:Factory/.openSUSE-release-tools.new/openSUSE-release-tools.changes 2018-10-15 09:43:53.127341797 +0200 @@ -1,0 +2,26 @@ +Thu Oct 11 09:47:38 UTC 2018 - opensuse-releaset...@opensuse.org + +- Update to version 20181011.61e1049: + * Move unlink_list to pkglistgen class too + * Move update_repos into Pkglistgen class + * Don't make missing repos (per arch) a fatal error + +------------------------------------------------------------------- +Thu Oct 11 09:01:08 UTC 2018 - opensuse-releaset...@opensuse.org + +- Update to version 20181011.37ac71d: + * [ARM 15.1] Include JeOS images in ToTest + +------------------------------------------------------------------- +Thu Oct 11 05:27:50 UTC 2018 - opensuse-releaset...@opensuse.org + +- Update to version 20181011.5d542a8: + * Add openSUSE-Tumbleweed-Kubic-dvd5-dvd-aarch64 to openSUSE:Factory:ARM + +------------------------------------------------------------------- +Wed Oct 10 09:05:22 UTC 2018 - opensuse-releaset...@opensuse.org + +- Update to version 20181010.e90ac73: + * pkglistgen: Fix copy&paste error on uploading results + +------------------------------------------------------------------- Old: ---- openSUSE-release-tools-20181010.97f1eda.obscpio New: ---- openSUSE-release-tools-20181011.61e1049.obscpio ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ Other differences: ------------------ ++++++ openSUSE-release-tools.spec ++++++ --- /var/tmp/diff_new_pack.qWOfV7/_old 2018-10-15 09:43:58.951335262 +0200 +++ /var/tmp/diff_new_pack.qWOfV7/_new 2018-10-15 09:43:58.959335254 +0200 @@ -20,7 +20,7 @@ %define source_dir openSUSE-release-tools %define announcer_filename factory-package-news Name: openSUSE-release-tools -Version: 20181010.97f1eda +Version: 20181011.61e1049 Release: 0 Summary: Tools to aid in staging and release work for openSUSE/SUSE License: GPL-2.0-or-later AND MIT ++++++ _servicedata ++++++ --- /var/tmp/diff_new_pack.qWOfV7/_old 2018-10-15 09:43:58.995335213 +0200 +++ /var/tmp/diff_new_pack.qWOfV7/_new 2018-10-15 09:43:58.995335213 +0200 @@ -1,6 +1,6 @@ <servicedata> <service name="tar_scm"> <param name="url">https://github.com/openSUSE/openSUSE-release-tools.git</param> - <param name="changesrevision">fb4d63ec3dd4b8d337f03cbac427c71529632c04</param> + <param name="changesrevision">61e10498957db3e608bd7541f79d51f37945e437</param> </service> </servicedata> ++++++ openSUSE-release-tools-20181010.97f1eda.obscpio -> openSUSE-release-tools-20181011.61e1049.obscpio ++++++ diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/openSUSE-release-tools-20181010.97f1eda/pkglistgen.py new/openSUSE-release-tools-20181011.61e1049/pkglistgen.py --- old/openSUSE-release-tools-20181010.97f1eda/pkglistgen.py 2018-10-10 07:09:33.000000000 +0200 +++ new/openSUSE-release-tools-20181011.61e1049/pkglistgen.py 2018-10-11 11:38:48.000000000 +0200 @@ -424,6 +424,7 @@ self.unwanted = set() self.output = None self.locales = set() + self.did_update = False def _load_supportstatus(self): # XXX @@ -562,7 +563,10 @@ s = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, reponame, arch)) r = repo.add_solv(s) if not r: - raise Exception("failed to add repo {}/{}/{}. Need to run update first?".format(project, reponame, arch)) + if not self.did_update: + raise Exception( + "failed to add repo {}/{}/{}. Need to run update first?".format(project, reponame, arch)) + continue for solvable in repo.solvables_iter(): if solvable.name in solvables: self.lockjobs[arch].append(pool.Job(solv.Job.SOLVER_SOLVABLE | solv.Job.SOLVER_LOCK, solvable.id)) @@ -674,6 +678,71 @@ return 'devel package of ' + g.develpkgs[package] return None + def update_repos(self, opts): + # only there to parse the repos + bs_mirrorfull = os.path.join(SCRIPT_PATH, 'bs_mirrorfull') + global_update = False + for project, repo in self.repos: + for arch in opts.filtered_architectures: + # TODO: refactor to common function with repo_checker.py + d = os.path.join(CACHEDIR, project, repo, arch) + if not os.path.exists(d): + os.makedirs(d) + + try: + # Fetch state before mirroring in-case it changes during download. + state = repository_arch_state(self.apiurl, project, repo, arch) + except HTTPError: + continue + + # Would be preferable to include hash in name, but cumbersome to handle without + # reworking a fair bit since the state needs to be tracked. + solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch)) + solv_file_hash = '{}::{}'.format(solv_file, state) + if os.path.exists(solv_file) and os.path.exists(solv_file_hash): + # Solve file exists and hash unchanged, skip updating solv. + logger.debug('skipping solv generation for {} due to matching state {}'.format( + '/'.join([project, repo, arch]), state)) + continue + + # Either hash changed or new, so remove any old hash files. + self.unlink_list(None, glob.glob(solv_file + '::*')) + global_update = True + + logger.debug('updating %s', d) + args = [bs_mirrorfull] + args.append('--nodebug') + args.append('{}/public/build/{}/{}/{}'.format(self.apiurl, project, repo, arch)) + args.append(d) + p = subprocess.Popen(args, stdout=subprocess.PIPE) + for line in p.stdout: + logger.info(line.rstrip()) + + files = [os.path.join(d, f) + for f in os.listdir(d) if f.endswith('.rpm')] + fh = open(solv_file, 'w') + p = subprocess.Popen( + ['rpms2solv', '-m', '-', '-0'], stdin=subprocess.PIPE, stdout=fh) + p.communicate('\0'.join(files)) + p.wait() + fh.close() + + # Create hash file now that solv creation is complete. + open(solv_file_hash, 'a').close() + self.did_update = True + + return global_update + + def unlink_list(self, path, names): + for name in names: + if path is None: + name_path = name + else: + name_path = os.path.join(path, name) + + if os.path.isfile(name_path): + os.unlink(name_path) + class CommandLineInterface(ToolBase.CommandLineInterface): SCOPES = ['all', 'target', 'rings', 'staging', 'arm'] @@ -725,64 +794,12 @@ self.tool.list_products() def do_update(self, subcmd, opts): - """${cmd_name}: Solve groups + """${cmd_name}: Update groups ${cmd_usage} ${cmd_option_list} """ - - # only there to parse the repos - bs_mirrorfull = os.path.join(SCRIPT_PATH, 'bs_mirrorfull') - global_update = False - for project, repo in self.repos: - for arch in opts.filtered_architectures: - # TODO: refactor to common function with repo_checker.py - d = os.path.join(CACHEDIR, project, repo, arch) - if not os.path.exists(d): - os.makedirs(d) - - try: - # Fetch state before mirroring in-case it changes during download. - state = repository_arch_state(self.tool.apiurl, project, repo, arch) - except HTTPError: - continue - - # Would be preferable to include hash in name, but cumbersome to handle without - # reworking a fair bit since the state needs to be tracked. - solv_file = os.path.join(CACHEDIR, 'repo-{}-{}-{}.solv'.format(project, repo, arch)) - solv_file_hash = '{}::{}'.format(solv_file, state) - if os.path.exists(solv_file) and os.path.exists(solv_file_hash): - # Solve file exists and hash unchanged, skip updating solv. - logger.debug('skipping solv generation for {} due to matching state {}'.format( - '/'.join([project, repo, arch]), state)) - continue - - # Either hash changed or new, so remove any old hash files. - self.unlink_list(None, glob.glob(solv_file + '::*')) - global_update = True - - logger.debug('updating %s', d) - args = [bs_mirrorfull] - args.append('--nodebug') - args.append('{}/public/build/{}/{}/{}'.format(self.tool.apiurl, project, repo, arch)) - args.append(d) - p = subprocess.Popen(args, stdout=subprocess.PIPE) - for line in p.stdout: - logger.info(line.rstrip()) - - files = [os.path.join(d, f) - for f in os.listdir(d) if f.endswith('.rpm')] - fh = open(solv_file, 'w') - p = subprocess.Popen( - ['rpms2solv', '-m', '-', '-0'], stdin=subprocess.PIPE, stdout=fh) - p.communicate('\0'.join(files)) - p.wait() - fh.close() - - # Create hash file now that solv creation is complete. - open(solv_file_hash, 'a').close() - - return global_update + self.tool.update_repos(opts) def update_merge(self, nonfree): """Merge free and nonfree solv files or copy free to merged""" @@ -823,7 +840,7 @@ logger.debug("processing %s", prj) self.tool.expand_repos(prj, 'standard') opts.project = prj - self.do_update('update', opts) + self.tool.update_repos(opts) drops = dict() for arch in self.tool.architectures: @@ -1299,7 +1316,7 @@ self.postoptparse() print('-> do_update') - self.do_update('update', opts) + self.tool.update_repos(opts) nonfree = target_config.get('nonfree') if opts.scope not in ('arm', 'ports') and nonfree and drop_list: @@ -1310,7 +1327,7 @@ opts_nonfree = copy.deepcopy(opts) opts_nonfree.project = nonfree self.repos = self.tool.expand_repos(nonfree, main_repo) - self.do_update('update', opts_nonfree) + self.update_repos(opts_nonfree) # Switch repo back to main target project. self.repos = repos_ @@ -1351,7 +1368,7 @@ self.do_create_droplist('create_droplist', opts, *solv_prior) delete_products = target_config.get('pkglistgen-delete-products', '').split(' ') - self.unlink_list(product_dir, delete_products) + self.tool.unlink_list(product_dir, delete_products) print('-> product service') for product_file in glob.glob(os.path.join(product_dir, '*.product')): @@ -1359,13 +1376,13 @@ [PRODUCT_SERVICE, product_file, product_dir, opts.project])) delete_kiwis = target_config.get('pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ') - self.unlink_list(product_dir, delete_kiwis) + self.tool.unlink_list(product_dir, delete_kiwis) if opts.scope == 'staging': self.strip_medium_from_staging(product_dir) spec_files = glob.glob(os.path.join(product_dir, '*.spec')) if skip_release: - self.unlink_list(None, spec_files) + self.tool.unlink_list(None, spec_files) else: self.move_list(spec_files, release_dir) inc_files = glob.glob(os.path.join(group_dir, '*.inc')) @@ -1392,9 +1409,11 @@ for package in sorted(summary[group]): summary_str += " - " + package + "\n" - source_file_ensure(api.apiurl, opts.project, '000product-summary', 'summary.yml', summary_str, 'Updating summary.yml') + source_file_ensure(api.apiurl, opts.project, '000product-summary', + 'summary.yml', summary_str, 'Updating summary.yml') unsorted_yml = open(os.path.join(product_dir, 'unsorted.yml')).read() - source_file_ensure(api.apiurl, opts.project, '000product-summary', 'unsorted.yml', summary_str, 'Updating unsorted.yml') + source_file_ensure(api.apiurl, opts.project, '000product-summary', + 'unsorted.yml', unsorted_yml, 'Updating unsorted.yml') def solv_cache_update(self, apiurl, cache_dir_solv, target_project, family_last, family_include, opts): """Dump solv files (do_dump_solv) for all products in family.""" @@ -1470,16 +1489,6 @@ for name in file_list: os.rename(name, os.path.join(destination, os.path.basename(name))) - def unlink_list(self, path, names): - for name in names: - if path is None: - name_path = name - else: - name_path = os.path.join(path, name) - - if os.path.isfile(name_path): - os.unlink(name_path) - def unlink_all_except(self, path, ignore_list=['_service'], ignore_hidden=True): for name in os.listdir(path): if name in ignore_list or (ignore_hidden and name.startswith('.')): diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' '--exclude=.svnignore' old/openSUSE-release-tools-20181010.97f1eda/totest-manager.py new/openSUSE-release-tools-20181011.61e1049/totest-manager.py --- old/openSUSE-release-tools-20181010.97f1eda/totest-manager.py 2018-10-10 07:09:33.000000000 +0200 +++ new/openSUSE-release-tools-20181011.61e1049/totest-manager.py 2018-10-11 11:38:48.000000000 +0200 @@ -770,7 +770,8 @@ class ToTestFactoryARM(ToTestFactory): main_products = ['000product:openSUSE-cd-mini-aarch64', - '000product:openSUSE-dvd5-dvd-aarch64'] + '000product:openSUSE-dvd5-dvd-aarch64', + '000product:openSUSE-Tumbleweed-Kubic-dvd5-dvd-aarch64'] ftp_products = ['000product:openSUSE-ftp-ftp-aarch64', '000product:openSUSE-ftp-ftp-armv7hl', @@ -821,7 +822,8 @@ '000product:openSUSE-ftp-ftp-armv7hl', ] - livecd_products = [] + livecd_products = ['JeOS'] + livecd_archs = ['armv7l'] # Leap 15.1 ARM still need to update snapshot set_snapshot_number = True ++++++ openSUSE-release-tools.obsinfo ++++++ --- /var/tmp/diff_new_pack.qWOfV7/_old 2018-10-15 09:43:59.567334571 +0200 +++ /var/tmp/diff_new_pack.qWOfV7/_new 2018-10-15 09:43:59.567334571 +0200 @@ -1,5 +1,5 @@ name: openSUSE-release-tools -version: 20181010.97f1eda -mtime: 1539148173 -commit: 97f1eda281bb826f418c999a6b3267e930191a5b +version: 20181011.61e1049 +mtime: 1539250728 +commit: 61e10498957db3e608bd7541f79d51f37945e437