From 31982d5946fcd3b916bc77dc1708017bd80fd826 Mon Sep 17 00:00:00 2001 From: Neal Gompa Date: Mar 24 2019 22:21:58 +0000 Subject: Update to 0.164.2 - Add proposed patches to build for Python 3 for Fedora 30+ - Add Recommends for obs-build --- diff --git a/.gitignore b/.gitignore index c5576d9..ddc0830 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ /osc-0.161.1.tar.gz /osc-0.162.1.tar.gz /osc-0.163.0.tar.gz +/osc-0.164.2.tar.gz diff --git a/001_helper.patch b/001_helper.patch new file mode 100644 index 0000000..348bab1 --- /dev/null +++ b/001_helper.patch @@ -0,0 +1,159 @@ +From 4b29e1c543b72ca3db6f84c6f9ea6e21bbe9af40 Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Wed, 7 Nov 2018 15:03:43 +0100 +Subject: [PATCH] add helper functions for python3 support + +This functions are used in the whole code and are +mandatory for the python3 support to work. In python2 +case nothing is touched. + +* cmp_to_key: + converts a cmp= into a key= function + +* decode_list: + decodes each element of a list. This is needed if + we have a mixed list with strings and bytes. + +* decode_it: + Takes the input and checks if it is not a string. + Then it uses chardet to get the encoding. +--- + osc/util/helper.py | 65 +++++++++++++++++++++++++++++++++++++++++++ + tests/suite.py | 2 ++ + tests/test_helpers.py | 35 +++++++++++++++++++++++ + 3 files changed, 102 insertions(+) + create mode 100644 osc/util/helper.py + create mode 100644 tests/test_helpers.py + +diff --git a/osc/util/helper.py b/osc/util/helper.py +new file mode 100644 +index 00000000..2e25b73c +--- /dev/null ++++ b/osc/util/helper.py +@@ -0,0 +1,65 @@ ++# Copyright (C) 2018 SUSE Linux. All rights reserved. ++# This program is free software; it may be used, copied, modified ++# and distributed under the terms of the GNU General Public Licence, ++# either version 2, or (at your option) any later version. ++ ++ ++def cmp_to_key(mycmp): ++ """ Converts a cmp= function into a key= function. ++ """ ++ ++ class K(object): ++ def __init__(self, obj, *args): ++ self.obj = obj ++ ++ def __lt__(self, other): ++ return mycmp(self.obj, other.obj) < 0 ++ ++ def __gt__(self, other): ++ return mycmp(self.obj, other.obj) > 0 ++ ++ def __eq__(self, other): ++ return mycmp(self.obj, other.obj) == 0 ++ ++ def __le__(self, other): ++ return mycmp(self.obj, other.obj) <= 0 ++ ++ def __ge__(self, other): ++ return mycmp(self.obj, other.obj) >= 0 ++ ++ def __ne__(self, other): ++ return mycmp(self.obj, other.obj) != 0 ++ ++ def __hash__(self): ++ raise TypeError('hash not implemented') ++ ++ return K ++ ++ ++def decode_list(ilist): ++ """ Decodes the elements of a list if needed ++ """ ++ ++ dlist = [] ++ for elem in ilist: ++ if not isinstance(elem, str): ++ dlist.append(decode_it(elem)) ++ else: ++ dlist.append(elem) ++ return dlist ++ ++ ++def decode_it(obj): ++ """ Decodes the given object if obj is not a string ++ based on the chardet module if possible ++ """ ++ ++ if isinstance(obj, str): ++ return obj ++ else: ++ try: ++ import chardet ++ return obj.decode(chardet.detect(obj)['encoding']) ++ except: ++ import locale ++ return obj.decode(locale.getlocale()[1]) +diff --git a/tests/suite.py b/tests/suite.py +index 6d94d446..d8fa512b 100644 +--- a/tests/suite.py ++++ b/tests/suite.py +@@ -24,6 +24,7 @@ + import test_prdiff + import test_conf + import test_results ++import test_helpers + + suite = unittest.TestSuite() + suite.addTests(test_addfiles.suite()) +@@ -42,6 +43,7 @@ + suite.addTests(test_prdiff.suite()) + suite.addTests(test_conf.suite()) + suite.addTests(test_results.suite()) ++suite.addTests(test_helpers.suite()) + + if have_xmlrunner: + result = xmlrunner.XMLTestRunner(output=os.path.join(os.getcwd(), 'junit-xml-results')).run(suite) +diff --git a/tests/test_helpers.py b/tests/test_helpers.py +new file mode 100644 +index 00000000..571b828e +--- /dev/null ++++ b/tests/test_helpers.py +@@ -0,0 +1,35 @@ ++import unittest ++from osc.util.helper import decode_it, decode_list ++ ++def suite(): ++ return unittest.makeSuite(TestResults) ++ ++class TestResults(unittest.TestCase): ++ def testDecodeList(self): ++ strlist = ['Test1', 'Test2', 'Test3'] ++ mixlist = ['Test1', b'Test2', 'Test3'] ++ byteslist = [b'Test1', b'Test2', b'Test3'] ++ ++ out = decode_list(strlist) ++ self.assertListEqual(out, strlist) ++ ++ out = decode_list(mixlist) ++ self.assertListEqual(out, strlist) ++ ++ out = decode_list(byteslist) ++ self.assertListEqual(out, strlist) ++ ++ ++ def testDecodeIt(self): ++ bytes_obj = b'Test the decoding' ++ string_obj = 'Test the decoding' ++ ++ out = decode_it(bytes_obj) ++ self.assertEqual(out, string_obj) ++ ++ out = decode_it(string_obj) ++ self.assertEqual(out, string_obj) ++ ++if __name__ == '__main__': ++ unittest.main() ++ diff --git a/002_man_page_building.patch b/002_man_page_building.patch new file mode 100644 index 0000000..1e52747 --- /dev/null +++ b/002_man_page_building.patch @@ -0,0 +1,55 @@ +From 67560887db764757bd2ad52ff6bde137984b4f61 Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 13:33:51 +0100 +Subject: [PATCH] [python3] enable full python3 support for man + +--- + osc/cmdln.py | 20 ++++++++++---------- + 1 file changed, 10 insertions(+), 10 deletions(-) + +diff --git a/osc/cmdln.py b/osc/cmdln.py +index 3d303b09..918a1da1 100644 +--- a/osc/cmdln.py ++++ b/osc/cmdln.py +@@ -621,16 +621,16 @@ def do_man(self, argv): + ${name} man + """ + mandate = datetime.utcfromtimestamp(int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))) +- self.stdout.write(bytes( ++ self.stdout.write( + self.man_header % { + 'date': mandate.strftime('%b %Y'), + 'version': self.get_version(), + 'name': self.name, + 'ucname': self.name.upper() +- }, +- "utf-8")) ++ } ++ ) + +- self.stdout.write(bytes(self.man_commands_header, "utf-8")) ++ self.stdout.write(self.man_commands_header) + commands = self._help_get_command_list() + for command, doc in commands: + cmdname = command.split(' ')[0] +@@ -641,14 +641,14 @@ def do_man(self, argv): + line = line[8:] + lines.append(man_escape(line)) + +- self.stdout.write(bytes( +- '.TP\n\\fB%s\\fR\n%s\n' % (command, '\n'.join(lines)), "utf-8")) ++ self.stdout.write( ++ '.TP\n\\fB%s\\fR\n%s\n' % (command, '\n'.join(lines))) + +- self.stdout.write(bytes(self.man_options_header, "utf-8")) +- self.stdout.write(bytes( +- man_escape(self._help_preprocess('${option_list}', None)), "utf-8")) ++ self.stdout.write(self.man_options_header) ++ self.stdout.write( ++ man_escape(self._help_preprocess('${option_list}', None))) + +- self.stdout.write(bytes(self.man_footer, "utf-8")) ++ self.stdout.write(self.man_footer) + + self.stdout.flush() + diff --git a/003_setup.patch b/003_setup.patch new file mode 100644 index 0000000..7ac81a9 --- /dev/null +++ b/003_setup.patch @@ -0,0 +1,75 @@ +From 36ab05c22244890e3442d0e8c658faa938198120 Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 13:15:31 +0100 +Subject: [PATCH] [python3] setup.py now supports python3 + +--- + setup.py | 18 ++++++++++-------- + 1 file changed, 10 insertions(+), 8 deletions(-) + +diff --git a/setup.py b/setup.py +index e306f707..91a7eb85 100755 +--- a/setup.py ++++ b/setup.py +@@ -2,11 +2,13 @@ + + from distutils.core import setup + import distutils.core +-import distutils.command.build +-import distutils.command.install_data ++from distutils.command import build, install_data + import os.path + import osc.core + import sys ++ ++import setuptools ++ + from osc import commandline + from osc import babysitter + # optional support for py2exe +@@ -17,7 +19,7 @@ + HAVE_PY2EXE = False + + +-class build_osc(distutils.command.build.build, object): ++class build_osc(build.build, object): + """ + Custom build command which generates man page. + """ +@@ -28,7 +30,7 @@ def build_man_page(self): + import gzip + man_path = os.path.join(self.build_base, 'osc.1.gz') + distutils.log.info('generating %s' % man_path) +- outfile = gzip.open(man_path, 'w') ++ outfile = gzip.open(man_path, 'wt') + osccli = commandline.Osc(stdout=outfile) + # FIXME: we cannot call the main method because osc expects an ~/.oscrc + # file (this would break builds in environments like the obs) +@@ -60,15 +62,15 @@ def run(self): + src_dir = (self.distribution.package_dir or {'': ''})[''] + src_dir = os.path.join(os.getcwd(), src_dir) + import sphinx +- sphinx.main(['runme', +- '-D', 'version=%s' % metadata.get_version(), ++ sphinx.main(['runme', ++ '-D', 'version=%s' % metadata.get_version(), + os.path.join('docs',), os.path.join(self.built_docs, 'docs')]) + + + # take a potential build-base option into account (for instance, if osc is + # build and installed like this: + # python setup.py build --build-base= ... install ...) +-class install_data(distutils.command.install_data.install_data, object): ++class install_data(install_data.install_data, object): + def initialize_options(self): + super(install_data, self).initialize_options() + self.built_data = None +@@ -97,7 +99,7 @@ def finalize_options(self): + if sys.platform[:3] != 'win': + data_files.append((os.path.join('share', 'man', 'man1'), ['osc.1.gz'])) + +-setup(name='osc', ++setuptools.setup(name='osc', + version = osc.core.__version__, + description = 'openSUSE commander', + long_description = 'Command-line client for the openSUSE Build Service, which allows to access repositories in the openSUSE Build Service in similar way as Subversion repositories.', diff --git a/004_babysitter.patch b/004_babysitter.patch new file mode 100644 index 0000000..b77a56d --- /dev/null +++ b/004_babysitter.patch @@ -0,0 +1,38 @@ +From 4269426b28a4d930b0c3b5fa2c69d3fce68560b2 Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 14:32:38 +0100 +Subject: [PATCH] [python3] msg and body are byte strings + +--- + osc/babysitter.py | 11 ++++++----- + 1 file changed, 6 insertions(+), 5 deletions(-) + +diff --git a/osc/babysitter.py b/osc/babysitter.py +index 4b2583ae..6ca92531 100644 +--- a/osc/babysitter.py ++++ b/osc/babysitter.py +@@ -16,6 +16,7 @@ + from .oscsslexcp import NoSecureSSLError + from osc.util.cpio import CpioError + from osc.util.packagequery import PackageError ++from osc.util.helper import decode_it + + try: + from M2Crypto.SSL.Checker import SSLVerificationError +@@ -112,11 +113,11 @@ def run(prg, argv=None): + print(body, file=sys.stderr) + + if e.code in [400, 403, 404, 500]: +- if '' in body: +- msg = body.split('')[1] +- msg = msg.split('')[0] +- msg = msg.replace('<', '<').replace('>' , '>').replace('&', '&') +- print(msg, file=sys.stderr) ++ if b'' in body: ++ msg = body.split(b'')[1] ++ msg = msg.split(b'')[0] ++ msg = msg.replace(b'<', b'<').replace(b'>' , b'>').replace(b'&', b'&') ++ print(decode_it(msg), file=sys.stderr) + if e.code >= 500 and e.code <= 599: + print('\nRequest: %s' % e.filename) + print('Headers:') diff --git a/005_oscssl.patch b/005_oscssl.patch new file mode 100644 index 0000000..e4e876b --- /dev/null +++ b/005_oscssl.patch @@ -0,0 +1,107 @@ +From 6ddf771c9f753cf052d9eb2965000021f8ecd26c Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 14:53:34 +0100 +Subject: [PATCH] [python3] make oscssl.py python3 ready. * works also with + python2 + +--- + osc/oscssl.py | 48 ++++++++++++++++++++++++++++++------------------ + 1 file changed, 30 insertions(+), 18 deletions(-) + +diff --git a/osc/oscssl.py b/osc/oscssl.py +index 186c98db..0e7401b2 100644 +--- a/osc/oscssl.py ++++ b/osc/oscssl.py +@@ -5,9 +5,8 @@ + + from __future__ import print_function + +-import M2Crypto.httpslib + from M2Crypto.SSL.Checker import SSLVerificationError +-from M2Crypto import m2, SSL ++from M2Crypto import m2, SSL, httpslib + import M2Crypto.m2urllib2 + import socket + import sys +@@ -185,22 +184,28 @@ def __init__(self, *args, **kwargs): + # "do_open()" and "https_open()" so that we just need to override + # the small "https_open()" method...) + def https_open(self, req): +- host = req.get_host() ++ # https://docs.python.org/3.3/library/urllib.request.html#urllib.request.Request.get_host ++ try: # up to python-3.2 ++ host = req.get_host() ++ except AttributeError: # from python-3.3 ++ host = req.host + if not host: +- raise M2Crypto.m2urllib2.URLError('no host given: ' + req.get_full_url()) ++ raise M2Crypto.m2urllib2.URLError('no host given') + + # Our change: Check to see if we're using a proxy. + # Then create an appropriate ssl-aware connection. + full_url = req.get_full_url() + target_host = urlparse(full_url)[1] + +- if (target_host != host): +- h = myProxyHTTPSConnection(host = host, appname = self.appname, ssl_context = self.ctx) +- # M2Crypto.ProxyHTTPSConnection.putrequest expects a fullurl +- selector = full_url ++ if target_host != host: ++ request_uri = urldefrag(full_url)[0] ++ h = httpslib.ProxyHTTPSConnection(host=host, ssl_context=self.ctx) + else: +- h = myHTTPSConnection(host = host, appname = self.appname, ssl_context = self.ctx) +- selector = req.get_selector() ++ try: # up to python-3.2 ++ request_uri = req.get_selector() ++ except AttributeError: # from python-3.3 ++ request_uri = req.selector ++ h = httpslib.HTTPSConnection(host=host, ssl_context=self.ctx) + # End our change + h.set_debuglevel(self._debuglevel) + +@@ -214,10 +219,9 @@ def https_open(self, req): + # request. + headers["Connection"] = "close" + try: +- h.request(req.get_method(), selector, req.data, headers) ++ h.request(req.get_method(), request_uri, req.data, headers) + r = h.getresponse() +- except socket.error as err: # XXX what error? +- err.filename = full_url ++ except socket.error as err: # XXX what error? + raise M2Crypto.m2urllib2.URLError(err) + + # Pick apart the HTTPResponse object to get the addinfourl +@@ -227,18 +231,26 @@ def https_open(self, req): + # for Windows. That adapter calls recv(), so delegate recv() + # to read(). This weird wrapping allows the returned object to + # have readline() and readlines() methods. +- +- # XXX It might be better to extract the read buffering code +- # out of socket._fileobject() and into a base class. +- + r.recv = r.read +- fp = socket._fileobject(r) ++ if (sys.version_info < (3, 0)): ++ fp = socket._fileobject(r, close=True) ++ else: ++ r._decref_socketios = lambda: None ++ r.ssl = h.sock.ssl ++ r._timeout = -1.0 ++ # hack to bypass python3 bug with 0 buffer size and ++ # http/client.py readinto method for response class ++ if r.length is not None and r.length == 0: ++ r.readinto = lambda b: 0 ++ r.recv_into = r.readinto ++ fp = socket.SocketIO(r, 'rb') + + resp = addinfourl(fp, r.msg, req.get_full_url()) + resp.code = r.status + resp.msg = r.reason + return resp + ++ + class myHTTPSConnection(M2Crypto.httpslib.HTTPSConnection): + def __init__(self, *args, **kwargs): + self.appname = kwargs.pop('appname', 'generic') diff --git a/006_build.patch b/006_build.patch new file mode 100644 index 0000000..3597997 --- /dev/null +++ b/006_build.patch @@ -0,0 +1,179 @@ +From f9001464d7d02a5b308a10a9adf3f17010e41a38 Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 15:09:45 +0100 +Subject: [PATCH] [python3] build with python3 (mostly bytestring) * return of + get_buildinfo is bytestring with python3 * other variables contain + bytestrings as well now + +--- + osc/build.py | 62 +++++++++++++++++++++++++++------------------------- + 1 file changed, 32 insertions(+), 30 deletions(-) + +diff --git a/osc/build.py b/osc/build.py +index b26ae9c9..3caf53e6 100644 +--- a/osc/build.py ++++ b/osc/build.py +@@ -23,6 +23,7 @@ + from osc.core import get_buildinfo, store_read_apiurl, store_read_project, store_read_package, meta_exists, quote_plus, get_buildconfig, is_package_dir, dgst + from osc.core import get_binarylist, get_binary_file, run_external, return_external, raw_input + from osc.util import rpmquery, debquery, archquery ++from osc.util.helper import decode_it + import osc.conf + from . import oscerr + import subprocess +@@ -440,11 +441,11 @@ def get_prefer_pkgs(dirs, wanted_arch, type, cpio): + packageQuery = packagequery.PackageQuery.query(path) + packageQueries.add(packageQuery) + +- prefer_pkgs = dict((name, packageQuery.path()) ++ prefer_pkgs = dict((decode_it(name), packageQuery.path()) + for name, packageQuery in packageQueries.items()) + + depfile = create_deps(packageQueries.values()) +- cpio.add('deps', '\n'.join(depfile)) ++ cpio.add(b'deps', b'\n'.join(depfile)) + return prefer_pkgs + + +@@ -455,22 +456,22 @@ def create_deps(pkgqs): + """ + depfile = [] + for p in pkgqs: +- id = '%s.%s-0/0/0: ' % (p.name(), p.arch()) +- depfile.append('P:%s%s' % (id, ' '.join(p.provides()))) +- depfile.append('R:%s%s' % (id, ' '.join(p.requires()))) ++ id = b'%s.%s-0/0/0: ' % (p.name(), p.arch()) ++ depfile.append(b'P:%s%s' % (id, b' '.join(p.provides()))) ++ depfile.append(b'R:%s%s' % (id, b' '.join(p.requires()))) + d = p.conflicts() + if d: +- depfile.append('C:%s%s' % (id, ' '.join(d))) ++ depfile.append(b'C:%s%s' % (id, b' '.join(d))) + d = p.obsoletes() + if d: +- depfile.append('O:%s%s' % (id, ' '.join(d))) ++ depfile.append(b'O:%s%s' % (id, b' '.join(d))) + d = p.recommends() + if d: +- depfile.append('r:%s%s' % (id, ' '.join(d))) ++ depfile.append(b'r:%s%s' % (id, b' '.join(d))) + d = p.supplements() + if d: +- depfile.append('s:%s%s' % (id, ' '.join(d))) +- depfile.append('I:%s%s-%s 0-%s' % (id, p.name(), p.evr(), p.arch())) ++ depfile.append(b's:%s%s' % (id, b' '.join(d))) ++ depfile.append(b'I:%s%s-%s 0-%s' % (id, p.name(), p.evr().encode(), p.arch())) + return depfile + + +@@ -661,24 +662,24 @@ def main(apiurl, opts, argv): + extra_pkgs += xp + + prefer_pkgs = {} +- build_descr_data = open(build_descr).read() ++ build_descr_data = open(build_descr, 'rb').read() + + # XXX: dirty hack but there's no api to provide custom defines + if opts.without: + s = '' + for i in opts.without: + s += "%%define _without_%s 1\n" % i +- build_descr_data = s + build_descr_data ++ build_descr_data = s.encode() + build_descr_data + if opts._with: + s = '' + for i in opts._with: + s += "%%define _with_%s 1\n" % i +- build_descr_data = s + build_descr_data ++ build_descr_data = s.encode() + build_descr_data + if opts.define: + s = '' + for i in opts.define: + s += "%%define %s\n" % i +- build_descr_data = s + build_descr_data ++ build_descr_data = s.encode + build_descr_data + + cpiodata = None + servicefile = os.path.join(os.path.dirname(build_descr), "_service") +@@ -708,12 +709,12 @@ def main(apiurl, opts, argv): + prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, build_type, cpiodata) + + if cpiodata: +- cpiodata.add(os.path.basename(build_descr), build_descr_data) ++ cpiodata.add(os.path.basename(build_descr.encode()), build_descr_data) + # buildenv must come last for compatibility reasons... + if buildenvfile: +- cpiodata.add("buildenv", open(buildenvfile).read()) ++ cpiodata.add(b"buildenv", open(buildenvfile, 'rb').read()) + if servicefile: +- cpiodata.add("_service", open(servicefile).read()) ++ cpiodata.add(b"_service", open(servicefile, 'rb').read()) + build_descr_data = cpiodata.get() + + # special handling for overlay and rsync-src/dest +@@ -767,13 +768,14 @@ def main(apiurl, opts, argv): + raise oscerr.WrongOptions('--offline is not possible, no local buildconfig file') + else: + print('Getting buildinfo from server and store to %s' % bi_filename) +- bi_text = ''.join(get_buildinfo(apiurl, +- prj, +- pac, +- repo, +- arch, +- specfile=build_descr_data, +- addlist=extra_pkgs)) ++ ++ bi_text = decode_it(get_buildinfo(apiurl, ++ prj, ++ pac, ++ repo, ++ arch, ++ specfile=build_descr_data, ++ addlist=extra_pkgs)) + if not bi_file: + bi_file = open(bi_filename, 'w') + # maybe we should check for errors before saving the file +@@ -783,7 +785,7 @@ def main(apiurl, opts, argv): + bc = get_buildconfig(apiurl, prj, repo) + if not bc_file: + bc_file = open(bc_filename, 'w') +- bc_file.write(bc) ++ bc_file.write(decode_it(bc)) + bc_file.flush() + except HTTPError as e: + if e.code == 404: +@@ -814,7 +816,7 @@ def main(apiurl, opts, argv): + # Set default binary type if cannot be detected + binary_type = 'rpm' + if os.path.exists('/usr/lib/build/queryconfig'): +- binary_type = return_external('/usr/lib/build/queryconfig', '--dist', bc_filename, 'binarytype').decode('utf-8').strip() ++ binary_type = decode_it(return_external('/usr/lib/build/queryconfig', '--dist', bc_filename, 'binarytype')).strip() + # If binary type is set to a useless value, reset to 'rpm' + if binary_type == 'UNDEFINED': + binary_type = 'rpm' +@@ -1142,7 +1144,7 @@ def __str__(self): + if bi.installonly_list: + rpmlist.append('installonly: ' + ' '.join(bi.installonly_list) + '\n') + +- rpmlist_file = NamedTemporaryFile(prefix='rpmlist.') ++ rpmlist_file = NamedTemporaryFile(mode='w+t', prefix='rpmlist.') + rpmlist_filename = rpmlist_file.name + rpmlist_file.writelines(rpmlist) + rpmlist_file.flush() +@@ -1242,13 +1244,13 @@ def __str__(self): + (s_built, b_built) = get_built_files(pacdir, bi.buildtype) + + print() +- if s_built: print(s_built) ++ if s_built: print(decode_it(s_built)) + print() +- print(b_built) ++ print(decode_it(b_built)) + + if opts.keep_pkgs: + for i in b_built.splitlines() + s_built.splitlines(): +- shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(i))) ++ shutil.copy2(i, os.path.join(opts.keep_pkgs, os.path.basename(decode_it(i)))) + + if bi_file: + bi_file.close() diff --git a/007_core.patch b/007_core.patch new file mode 100644 index 0000000..986a56b --- /dev/null +++ b/007_core.patch @@ -0,0 +1,504 @@ +diff --git a/osc/core.py b/osc/core.py +index 0a24d08..c3a9eda 100644 +--- a/osc/core.py ++++ b/osc/core.py +@@ -24,6 +24,7 @@ import errno + import shlex + import hashlib + ++ + try: + from urllib.parse import urlsplit, urlunsplit, urlparse, quote_plus, urlencode, unquote + from urllib.error import HTTPError +@@ -49,6 +50,13 @@ except ImportError: + from . import oscerr + from . import conf + ++try: ++ from functools import cmp_to_key ++except ImportError: ++ from .util.helper import cmp_to_key ++ ++from osc.util.helper import decode_list, decode_it ++ + try: + # python 2.6 and python 2.7 + unicode +@@ -62,6 +70,11 @@ except: + unicode = lambda x, *args: x + ET_ENCODING = "unicode" + ++def compare(a, b): return cmp(a[1:], b[1:]) ++ ++def cmp(a, b): ++ return (a > b) - (a < b) ++ + DISTURL_RE = re.compile(r"^(?P.*)://(?P.*?)/(?P.*?)/(?P.*?)/(?P.*)-(?P.*)$") + BUILDLOGURL_RE = re.compile(r"^(?Phttps?://.*?)/build/(?P.*?)/(?P.*?)/(?P.*?)/(?P.*?)/_log$") + BUFSIZE = 1024*1024 +@@ -331,7 +344,7 @@ class Serviceinfo: + def addVerifyFile(self, serviceinfo_node, filename): + import hashlib + +- f = open(filename, 'r') ++ f = open(filename, 'rb') + digest = hashlib.sha256(f.read()).hexdigest() + f.close() + +@@ -1849,8 +1862,8 @@ class Package: + meta = show_package_meta(self.apiurl, self.prjname, self.name) + if meta != "": + # is empty for _project for example +- meta = ''.join(meta) +- store_write_string(self.absdir, '_meta', meta + '\n') ++ meta = b''.join(meta) ++ store_write_string(self.absdir, '_meta', meta + b'\n') + + def findfilebyname(self, n): + for i in self.filelist: +@@ -2115,7 +2128,7 @@ rev: %s + argument force supress the confirm question + """ + +- m = ''.join(show_package_meta(self.apiurl, self.prjname, self.name)) ++ m = b''.join(show_package_meta(self.apiurl, self.prjname, self.name)) + + root = ET.fromstring(m) + root.find('title').text = self.summary +@@ -2130,7 +2143,7 @@ rev: %s + + if not force: + print('*' * 36, 'old', '*' * 36) +- print(m) ++ print(decode_it(m)) + print('*' * 36, 'new', '*' * 36) + print(ET.tostring(root, encoding=ET_ENCODING)) + print('*' * 72) +@@ -2979,7 +2992,7 @@ class Request: + lines.append(' *** This request will get automatically accepted after '+self.accept_at+' ! ***\n') + if self.priority in [ 'critical', 'important' ] and self.state.name in [ 'new', 'review' ]: + lines.append(' *** This request has classified as '+self.priority+' ! ***\n') +- if self.state and self.state.approver and self.state.name == 'review': ++ if self.state.approver and self.state.name == 'review': + lines.append(' *** This request got approved by '+self.state.approver+'. It will get automatically accepted after last review got accepted! ***\n') + + for action in self.actions: +@@ -3565,7 +3578,7 @@ def show_attribute_meta(apiurl, prj, pac, subpac, attribute, with_defaults, with + + def show_devel_project(apiurl, prj, pac): + m = show_package_meta(apiurl, prj, pac) +- node = ET.fromstring(''.join(m)).find('devel') ++ node = ET.fromstring(b''.join(m)).find('devel') + if node is None: + return None, None + else: +@@ -3574,7 +3587,7 @@ def show_devel_project(apiurl, prj, pac): + + def set_devel_project(apiurl, prj, pac, devprj=None, devpac=None): + meta = show_package_meta(apiurl, prj, pac) +- root = ET.fromstring(''.join(meta)) ++ root = ET.fromstring(b''.join(meta)) + node = root.find('devel') + if node is None: + if devprj is None: +@@ -3647,8 +3660,12 @@ class metafile: + self.url = url + self.change_is_required = change_is_required + (fd, self.filename) = tempfile.mkstemp(prefix = 'osc_metafile.', suffix = file_ext) +- f = os.fdopen(fd, 'w') +- f.write(''.join(input)) ++ if not input or isinstance(input[0], str) or isinstance(input, str): ++ f = os.fdopen(fd, 'w') ++ f.write(''.join(input)) ++ else: ++ f = os.fdopen(fd, 'wb') ++ f.write(b''.join(input)) + f.close() + self.hash_orig = dgst(self.filename) + +@@ -3681,8 +3698,8 @@ class metafile: + # examine the error - we can't raise an exception because we might want + # to try again + data = e.read() +- if '' in data: +- print(data.split('')[1].split('')[0], file=sys.stderr) ++ if b'' in data: ++ print(data.split(b'')[1].split(b'')[0], file=sys.stderr) + ri = raw_input('Try again? ([y/N]): ') + if ri not in ['y', 'Y']: + break +@@ -3799,7 +3816,20 @@ def edit_meta(metatype, + if metatype == 'pkg': + # check if the package is a link to a different project + project, package = path_args +- orgprj = ET.fromstring(''.join(data)).get('project') ++ # data can be a bytes object, a list with strings, a list with bytes, just a string. ++ # So we need the following even if it is ugly. ++ if sys.version_info >= (3, 0): ++ if isinstance(data, bytes): ++ data = decode_it(data) ++ orgprj = ET.fromstring(''.join(data)).get('project') ++ elif isinstance(data, list): ++ decode_data = decode_list(data) ++ orgprj = ET.fromstring(''.join(decode_data)).get('project') ++ else: ++ orgprj = ET.fromstring(''.join(data)).get('project') ++ else: ++ orgprj = ET.fromstring(''.join(data)).get('project') ++ + if orgprj is not None and unquote(project) != orgprj: + print('The package is linked from a different project.') + print('If you want to edit the meta of the package create first a branch.') +@@ -3839,7 +3869,7 @@ def show_files_meta(apiurl, prj, pac, revision=None, expand=False, linkrev=None, + + def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=False, include_service_files=False, deleted=False): + m = show_files_meta(apiurl, prj, pac, expand=expand, revision=revision, meta=meta, deleted=deleted) +- et = ET.fromstring(''.join(m)) ++ et = ET.fromstring(m) + if include_service_files: + try: + sinfo = et.find('serviceinfo') +@@ -3852,7 +3882,7 @@ def show_upstream_srcmd5(apiurl, prj, pac, expand=False, revision=None, meta=Fal + + def show_upstream_xsrcmd5(apiurl, prj, pac, revision=None, linkrev=None, linkrepair=False, meta=False, include_service_files=False): + m = show_files_meta(apiurl, prj, pac, revision=revision, linkrev=linkrev, linkrepair=linkrepair, meta=meta, expand=include_service_files) +- et = ET.fromstring(''.join(m)) ++ et = ET.fromstring(m) + if include_service_files: + return et.get('srcmd5') + +@@ -3891,7 +3921,7 @@ def get_project_sourceinfo(apiurl, project, nofilename, *packages): + raise + if len(packages) == 1: + raise oscerr.APIError('package name too long: %s' % packages[0]) +- n = len(packages) / 2 ++ n = int(len(packages) / 2) + pkgs = packages[:n] + res = get_project_sourceinfo(apiurl, project, nofilename, *pkgs) + pkgs = packages[n:] +@@ -3906,12 +3936,12 @@ def get_project_sourceinfo(apiurl, project, nofilename, *packages): + + def show_upstream_rev_vrev(apiurl, prj, pac, revision=None, expand=False, meta=False): + m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, meta=meta) +- et = ET.fromstring(''.join(m)) ++ et = ET.fromstring(m) + return et.get('rev'), et.get('vrev') + + def show_upstream_rev(apiurl, prj, pac, revision=None, expand=False, linkrev=None, meta=False, include_service_files=False): + m = show_files_meta(apiurl, prj, pac, revision=revision, expand=expand, linkrev=linkrev, meta=meta) +- et = ET.fromstring(''.join(m)) ++ et = ET.fromstring(m) + if include_service_files: + try: + sinfo = et.find('serviceinfo') +@@ -4012,10 +4042,16 @@ def run_pager(message, tmp_suffix=''): + return + + if not sys.stdout.isatty(): +- print(message) ++ if isinstance(message, str): ++ print(message) ++ else: ++ print(decode_it(message)) + else: + tmpfile = tempfile.NamedTemporaryFile(suffix=tmp_suffix) +- tmpfile.write(message) ++ if isinstance(message, str): ++ tmpfile.write(bytes(message, 'utf-8')) ++ else: ++ tmpfile.write(message) + tmpfile.flush() + pager = os.getenv('PAGER', default=get_default_pager()) + cmd = shlex.split(pager) + [tmpfile.name] +@@ -4042,6 +4078,8 @@ def _edit_message_open_editor(filename, data, orig_mtime): + import tempfile + editor = _editor_command() + mtime = os.stat(filename).st_mtime ++ if isinstance(data, str): ++ data = bytes(data, 'utf-8') + if mtime == orig_mtime: + # prepare file for editors + if editor[0] in ('vi', 'vim'): +@@ -4051,7 +4089,7 @@ def _edit_message_open_editor(filename, data, orig_mtime): + editor.extend(['-c', ':r %s' % f.name, filename]) + run_external(editor[0], *editor[1:]) + else: +- with open(filename, 'w') as f: ++ with open(filename, 'wb') as f: + f.write(data) + orig_mtime = os.stat(filename).st_mtime + run_editor(filename) +@@ -4543,7 +4581,7 @@ def get_group_meta(apiurl, group): + u = makeurl(apiurl, ['group', quote_plus(group)]) + try: + f = http_GET(u) +- return ''.join(f.readlines()) ++ return b''.join(f.readlines()) + except HTTPError: + print('group \'%s\' not found' % group) + return None +@@ -4552,7 +4590,7 @@ def get_user_meta(apiurl, user): + u = makeurl(apiurl, ['person', quote_plus(user)]) + try: + f = http_GET(u) +- return ''.join(f.readlines()) ++ return b''.join(f.readlines()) + except HTTPError: + print('user \'%s\' not found' % user) + return None +@@ -4594,7 +4632,10 @@ def download(url, filename, progress_obj = None, mtime = None): + try: + o = os.fdopen(fd, 'wb') + for buf in streamfile(url, http_GET, BUFSIZE, progress_obj=progress_obj): +- o.write(bytes(buf, "utf-8")) ++ if isinstance(buf, str): ++ o.write(bytes(buf, "utf-8")) ++ else: ++ o.write(buf) + o.close() + os.rename(tmpfile, filename) + except: +@@ -4807,7 +4848,7 @@ def server_diff_noex(apiurl, + msg = None + body = None + try: +- body = e.read() ++ body = decode_it(e.read()) + if not 'bad link' in body: + return '# diff failed: ' + body + except: +@@ -5027,7 +5068,7 @@ def replace_pkg_meta(pkgmeta, new_name, new_prj, keep_maintainers = False, + only maintainer (unless keep_maintainers is set). Additionally remove the + develproject entry () unless keep_develproject is true. + """ +- root = ET.fromstring(''.join(pkgmeta)) ++ root = ET.fromstring(b''.join(pkgmeta)) + root.set('name', new_name) + root.set('project', new_prj) + # never take releasename, it needs to be explicit +@@ -5512,7 +5553,7 @@ def get_distibutions(apiurl, discon=False): + else: + result_line_templ = '%(name)-25s %(project)-25s %(repository)-25s %(reponame)s' + f = http_GET(makeurl(apiurl, ['distributions'])) +- root = ET.fromstring(''.join(f)) ++ root = ET.fromstring(b''.join(f)) + + for node in root.findall('distribution'): + rmap = {} +@@ -5538,7 +5579,7 @@ def get_platforms_of_project(apiurl, prj): + + def get_repositories_of_project(apiurl, prj): + f = show_project_meta(apiurl, prj) +- root = ET.fromstring(''.join(f)) ++ root = ET.fromstring(b''.join(f)) + + r = [ node.get('name') for node in root.findall('repository')] + return r +@@ -5580,7 +5621,7 @@ class Repo: + + def get_repos_of_project(apiurl, prj): + f = show_project_meta(apiurl, prj) +- root = ET.fromstring(''.join(f)) ++ root = ET.fromstring(b''.join(f)) + + for node in root.findall('repository'): + for node2 in node.findall('arch'): +@@ -5751,7 +5792,7 @@ def get_package_results(apiurl, project, package=None, wait=False, *args, **kwar + while True: + waiting = False + try: +- xml = ''.join(show_results_meta(apiurl, project, package, *args, **kwargs)) ++ xml = b''.join(show_results_meta(apiurl, project, package, *args, **kwargs)) + except HTTPError as e: + # check for simple timeout error and fetch again + if e.code == 502 or e.code == 504: +@@ -5792,7 +5833,7 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non + r = [] + + f = show_prj_results_meta(apiurl, prj) +- root = ET.fromstring(''.join(f)) ++ root = ET.fromstring(b''.join(f)) + + pacs = [] + # sequence of (repo,arch) tuples +@@ -5972,7 +6013,6 @@ def get_prj_results(apiurl, prj, hide_legend=False, csv=False, status_filter=Non + return r + + +- + def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj=None, text=None): + """ + performs http_meth on url and read bufsize bytes from the response +@@ -6032,8 +6072,12 @@ def streamfile(url, http_meth = http_GET, bufsize=8192, data=None, progress_obj= + + def buildlog_strip_time(data): + """Strips the leading build time from the log""" +- time_regex = re.compile('^\[[^\]]*\] ', re.M) +- return time_regex.sub('', data) ++ if isinstance(data, str): ++ time_regex = re.compile('^\[[^\]]*\] ', re.M) ++ return time_regex.sub('', data) ++ else: ++ time_regex = re.compile(b'^\[[^\]]*\] ', re.M) ++ return time_regex.sub(b'', data) + + + def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time=False, last=False): +@@ -6042,11 +6086,14 @@ def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time= + def print_data(data, strip_time=False): + if strip_time: + data = buildlog_strip_time(data) +- sys.stdout.write(data.translate(all_bytes, remove_bytes)) ++ sys.stdout.write(decode_it(data.translate(all_bytes, remove_bytes))) + + # to protect us against control characters + import string +- all_bytes = string.maketrans('', '') ++ if sys.version_info >= (3, 0): ++ all_bytes = bytes.maketrans(b'', b'') ++ else: ++ all_bytes = string.maketrans(b'', b'') + remove_bytes = all_bytes[:8] + all_bytes[14:32] # accept tabs and newlines + + query = {'nostream' : '1', 'start' : '%s' % offset} +@@ -6058,7 +6105,7 @@ def print_buildlog(apiurl, prj, package, repository, arch, offset=0, strip_time= + start_offset = offset + u = makeurl(apiurl, ['build', prj, repository, arch, package, '_log'], query=query) + try: +- for data in streamfile(u, bufsize="line"): ++ for data in streamfile(u): + offset += len(data) + print_data(data, strip_time) + except IncompleteRead as e: +@@ -6115,7 +6162,7 @@ def get_worker_info(apiurl, worker): + u = makeurl(apiurl, ['worker', worker]) + f = http_GET(u) + +- return f.read() ++ return decode_it(f.read()) + + + def check_constraints(apiurl, prj, repository, arch, package, constraintsfile=None): +@@ -6126,7 +6173,7 @@ def check_constraints(apiurl, prj, repository, arch, package, constraintsfile=No + query['arch'] = arch + u = makeurl(apiurl, ['worker'], query) + f = http_POST(u, data=constraintsfile) +- root = ET.fromstring(''.join(f)) ++ root = ET.fromstring(b''.join(f)) + return [node.get('name') for node in root.findall('entry')] + + +@@ -6267,7 +6314,7 @@ def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False, + try: + comment = node.find('comment').text.encode(locale.getpreferredencoding(), 'replace') + except: +- comment = '' ++ comment = b'' + try: + requestid = node.find('requestid').text.encode(locale.getpreferredencoding(), 'replace') + except: +@@ -6290,10 +6337,10 @@ def get_commitlog(apiurl, prj, package, revision, format = 'text', meta = False, + r.append('') + else: + if requestid: +- requestid = "rq" + requestid ++ requestid = decode_it((b"rq" + requestid)) + s = '-' * 76 + \ + '\nr%s | %s | %s | %s | %s | %s\n' % (rev, user, t, srcmd5, version, requestid) + \ +- '\n' + comment ++ '\n' + decode_it(comment) + r.append(s) + + if format not in ['csv', 'xml']: +@@ -6427,6 +6474,8 @@ def store_write_string(dir, file, string, subdir=''): + fname = os.path.join(dir, store, subdir, file) + try: + f = open(fname + '.new', 'w') ++ if not isinstance(string, str): ++ string = decode_it(string) + f.write(string) + f.close() + os.rename(fname + '.new', fname) +@@ -6805,7 +6854,11 @@ def is_rpm(f): + except: + return False + +- if h == '\xed\xab\xee\xdb': ++ if isinstance(h, str): ++ isrpmstr = '\xed\xab\xee\xdb' ++ else: ++ isrpmstr = b'\xed\xab\xee\xdb' ++ if h == isrpmstr: + return True + else: + return False +@@ -6821,7 +6874,8 @@ def is_srcrpm(f): + except: + return False + +- if h[7] == '\x01': ++ issrcrpm = bytes(bytearray([h[7]])).decode('utf-8') ++ if issrcrpm == '\x01': + return True + else: + return False +@@ -6843,7 +6897,7 @@ def addPerson(apiurl, prj, pac, user, role="maintainer"): + create_new=False) + + if data and get_user_meta(apiurl, user) != None: +- root = ET.fromstring(''.join(data)) ++ root = ET.fromstring(b''.join(data)) + found = False + for person in root.getiterator('person'): + if person.get('userid') == user and person.get('role') == role: +@@ -7021,7 +7075,7 @@ def addDownloadUrlService(url): + + # for pretty output + xmlindent(s) +- f = open(service_file, 'wb') ++ f = open(service_file, 'w') + f.write(ET.tostring(s, encoding=ET_ENCODING)) + f.close() + if addfile: +@@ -7043,7 +7097,7 @@ def addDownloadUrlService(url): + + # for pretty output + xmlindent(s) +- f = open(service_file, 'wb') ++ f = open(service_file, 'w') + f.write(ET.tostring(s, encoding=ET_ENCODING)) + f.close() + +@@ -7204,7 +7258,7 @@ def get_commit_msg(wc_dir, pacs): + footer = [] + lines = [] + for p in pacs: +- states = sorted(p.get_status(False, ' ', '?'), lambda x, y: cmp(x[1], y[1])) ++ states = sorted(p.get_status(False, ' ', '?'), key=cmp_to_key(compare)) + changed = [statfrmt(st, os.path.normpath(os.path.join(p.dir, filename))) for st, filename in states] + if changed: + footer += changed +@@ -7283,7 +7337,7 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, + except (ValueError, IndexError): + print('Invalid rpmlintlog index. Please choose between 0 and %i' % (len(lintlogs)-1)) + try: +- print(get_rpmlint_log(apiurl, **lintlogs[lint_n])) ++ print(decode_it(get_rpmlint_log(apiurl, **lintlogs[lint_n]))) + except HTTPError as e: + if e.code == 404: + print('No rpmlintlog for %s %s' % (lintlogs[lint_n]['repo'], +@@ -7359,12 +7413,12 @@ def request_interactive_review(apiurl, request, initial_cmd='', group=None, + tmpfile.close() + tmpfile = None + if tmpfile is None: +- tmpfile = tempfile.NamedTemporaryFile(suffix='.diff') ++ tmpfile = tempfile.NamedTemporaryFile(suffix='.diff', mode='r+') + tmpfile.write(req_summary) + tmpfile.write(issues) + try: + diff = request_diff(apiurl, request.reqid) +- tmpfile.write(diff) ++ tmpfile.write(decode_it(diff)) + except HTTPError as e: + if e.code != 400: + raise diff --git a/008_commandline.patch b/008_commandline.patch new file mode 100644 index 0000000..51f76ba --- /dev/null +++ b/008_commandline.patch @@ -0,0 +1,385 @@ +From 1772e5fb09f73181bd2250744b6e0a8da1c5ac5d Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Thu, 13 Dec 2018 15:17:29 +0100 +Subject: [PATCH] [python3] python3 support for commandline module + +* use cmp_to_key from functools for python 2.7 and higer +* use self written cmp_to_key for python 2.6 +* a lot of bytestring handling and decoding +--- + osc/commandline.py | 105 +++++++++++++++++++++++++-------------------- + 1 file changed, 58 insertions(+), 47 deletions(-) + +diff --git a/osc/commandline.py b/osc/commandline.py +index e96a1170..67c2ad2a 100644 +--- a/osc/commandline.py ++++ b/osc/commandline.py +@@ -29,6 +29,11 @@ + from .core import * + from .util import safewriter + ++try: ++ from functools import cmp_to_key ++except ImportError: ++ from .util.helper import cmp_to_key ++ + MAN_HEADER = r""".TH %(ucname)s "1" "%(date)s" "%(name)s %(version)s" "User Commands" + .SH NAME + %(name)s \- openSUSE build service command-line tool. +@@ -435,7 +440,8 @@ def do_list(self, subcmd, opts, *args): + break + m = show_files_meta(apiurl, project, package) + li = Linkinfo() +- li.read(ET.fromstring(''.join(m)).find('linkinfo')) ++ root = ET.fromstring(m) ++ li.read(root.find('linkinfo')) + if li.haserror(): + raise oscerr.LinkExpandError(project, package, li.error) + project, package, rev = li.project, li.package, li.rev +@@ -753,7 +759,7 @@ def do_token(self, subcmd, opts, *args): + buf = f.read(16384) + if not buf: + break +- sys.stdout.write(buf) ++ sys.stdout.write(decode_it(buf)) + + elif opts.delete: + print("Delete token") +@@ -773,7 +779,7 @@ def do_token(self, subcmd, opts, *args): + raise oscerr.WrongArgs("Did you mean --" + args[0] + "?") + # just list token + for data in streamfile(url, http_GET): +- sys.stdout.write(data) ++ sys.stdout.write(decode_it(data)) + + + @cmdln.option('-a', '--attribute', metavar='ATTRIBUTE', +@@ -938,22 +944,22 @@ def do_meta(self, subcmd, opts, *args): + # show + if not opts.edit and not opts.file and not opts.delete and not opts.create and not opts.set: + if cmd == 'prj': +- sys.stdout.write(''.join(show_project_meta(apiurl, project, rev=opts.revision, blame=opts.blame))) ++ sys.stdout.write(decode_it(b''.join(show_project_meta(apiurl, project, rev=opts.revision, blame=opts.blame)))) + elif cmd == 'pkg': +- sys.stdout.write(''.join(show_package_meta(apiurl, project, package, blame=opts.blame))) ++ sys.stdout.write(decode_it(b''.join(show_package_meta(apiurl, project, package, blame=opts.blame)))) + elif cmd == 'attribute': +- sys.stdout.write(''.join(show_attribute_meta(apiurl, project, package, subpackage, +- opts.attribute, opts.attribute_defaults, opts.attribute_project))) ++ sys.stdout.write(decode_it(b''.join(show_attribute_meta(apiurl, project, package, subpackage, ++ opts.attribute, opts.attribute_defaults, opts.attribute_project)))) + elif cmd == 'prjconf': +- sys.stdout.write(''.join(show_project_conf(apiurl, project, rev=opts.revision, blame=opts.blame))) ++ sys.stdout.write(decode_it(b''.join(show_project_conf(apiurl, project, rev=opts.revision, blame=opts.blame)))) + elif cmd == 'user': + r = get_user_meta(apiurl, user) + if r: +- sys.stdout.write(''.join(r)) ++ sys.stdout.write(decode_it(r)) + elif cmd == 'group': + r = get_group_meta(apiurl, group) + if r: +- sys.stdout.write(''.join(r)) ++ sys.stdout.write(decode_it(r)) + elif cmd == 'pattern': + if pattern: + r = show_pattern_meta(apiurl, project, pattern) +@@ -1390,9 +1396,9 @@ def _check_service(root): + if opts.diff or not opts.message: + try: + rdiff = 'old: %s/%s\nnew: %s/%s rev %s\n' % (dst_project, dst_package, src_project, src_package, rev) +- rdiff += server_diff(apiurl, ++ rdiff += decode_it(server_diff(apiurl, + dst_project, dst_package, None, +- src_project, src_package, rev, True) ++ src_project, src_package, rev, True)) + except: + rdiff = '' + +@@ -2483,7 +2489,7 @@ def do_request(self, subcmd, opts, *args): + action.tgt_project, action.tgt_package) + diff += submit_action_diff(apiurl, action) + diff += '\n\n' +- run_pager(diff, tmp_suffix='') ++ run_pager(decode_it(diff), tmp_suffix='') + + # checkout + elif cmd == 'checkout' or cmd == 'co': +@@ -2983,7 +2989,7 @@ def do_copypac(self, subcmd, opts, *args): + revision=rev, + comment=comment, + keep_link=opts.keep_link) +- print(r) ++ print(decode_it(r)) + + + @cmdln.option('-r', '--repo', metavar='REPO', +@@ -3495,7 +3501,7 @@ def do_branch(self, subcmd, opts, *args): + devloc = None + if not exists and (srcprj != args[0] or srcpkg != args[1]): + try: +- root = ET.fromstring(''.join(show_attribute_meta(apiurl, args[0], None, None, ++ root = ET.fromstring(b''.join(show_attribute_meta(apiurl, args[0], None, None, + conf.config['maintained_update_project_attribute'], False, False))) + # this might raise an AttributeError + uproject = root.find('attribute').find('value').text +@@ -3649,7 +3655,7 @@ def do_lock(self, subcmd, opts, project, package=None): + kind = 'pkg' + path_args = (project, package) + meta = meta_exists(kind, path_args, create_new=False, apiurl=apiurl) +- root = ET.fromstring(''.join(meta)) ++ root = ET.fromstring(b''.join(meta)) + if root.find('lock') is not None: + print('Already locked', file=sys.stderr) + sys.exit(1) +@@ -3854,9 +3860,9 @@ def do_diff(self, subcmd, opts, *args): + for i in pac.get_diff(rev1): + diff += ''.join(i) + else: +- diff += server_diff_noex(pac.apiurl, pac.prjname, pac.name, rev1, ++ diff += decode_it(server_diff_noex(pac.apiurl, pac.prjname, pac.name, rev1, + pac.prjname, pac.name, rev2, +- not opts.plain, opts.missingok, opts.meta, not opts.unexpand) ++ not opts.plain, opts.missingok, opts.meta, not opts.unexpand)) + run_pager(diff) + + +@@ -4135,12 +4141,12 @@ def _prdiff_output_diff(self, opts, rdiff): + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + close_fds=True) +- p.stdin.write(rdiff.encode()) ++ p.stdin.write(rdiff) + p.stdin.close() +- print("".join(x.decode() for x in p.stdout.readlines())) ++ print("".join(decode_it(x) for x in p.stdout.readlines())) + elif opts.unified: + print() +- print(rdiff) ++ print(decode_it(rdiff)) + #run_pager(rdiff) + + def _prdiff_output_matching_requests(self, opts, requests, +@@ -4285,7 +4291,7 @@ def do_repourls(self, subcmd, opts, *args): + else: + raise oscerr.WrongArgs('Wrong number of arguments') + +- root = ET.fromstring(''.join(show_configuration(apiurl))) ++ root = ET.fromstring(b''.join(show_configuration(apiurl))) + elm = root.find('download_url') + if elm is None or not elm.text: + raise oscerr.APIError('download_url configuration element expected') +@@ -4530,7 +4536,7 @@ def do_status(self, subcmd, opts, *args): + # don't exclude packages with state ' ' because the packages + # might have modified etc. files + prj_excl = [st for st in excl_states if st != ' '] +- for st, pac in sorted(prj.get_status(*prj_excl), lambda x, y: cmp(x[1], y[1])): ++ for st, pac in sorted(prj.get_status(*prj_excl), key=cmp_to_key(compare)): + p = prj.get_pacobj(pac) + if p is None: + # state is != ' ' +@@ -4541,11 +4547,11 @@ def do_status(self, subcmd, opts, *args): + elif st == ' ' and opts.verbose or st != ' ': + lines.append(statfrmt(st, os.path.normpath(os.path.join(prj.dir, pac)))) + states = p.get_status(opts.show_excluded, *excl_states) +- for st, filename in sorted(states, lambda x, y: cmp(x[1], y[1])): ++ for st, filename in sorted(states, key=cmp_to_key(compare)): + lines.append(statfrmt(st, os.path.normpath(os.path.join(p.dir, filename)))) + else: + p = findpacs([arg])[0] +- for st, filename in sorted(p.get_status(opts.show_excluded, *excl_states), lambda x, y: cmp(x[1], y[1])): ++ for st, filename in sorted(p.get_status(opts.show_excluded, *excl_states), key=cmp_to_key(compare)): + lines.append(statfrmt(st, os.path.normpath(os.path.join(p.dir, filename)))) + if lines: + print('\n'.join(lines)) +@@ -5227,7 +5233,7 @@ def do_results(self, subcmd, opts, *args): + del kwargs['showexcl'] + for xml in get_package_results(**kwargs): + if opts.xml: +- print(xml, end='') ++ print(decode_it(xml), end='') + else: + # csv formatting + results = [r for r, _ in result_xml_to_dicts(xml)] +@@ -5290,7 +5296,7 @@ def do_prjresults(self, subcmd, opts, *args): + kwargs['arch'] = opts.arch + kwargs['wait'] = opts.watch + for results in get_package_results(apiurl, project, **kwargs): +- print(results) ++ print(decode_it(results)) + return + + if opts.watch: +@@ -5345,7 +5351,7 @@ def do_rpmlintlog(self, subcmd, opts, *args): + else: + raise oscerr.WrongArgs('please provide project package repository arch.') + +- print(get_rpmlint_log(apiurl, project, package, repository, arch)) ++ print(decode_it(get_rpmlint_log(apiurl, project, package, repository, arch))) + + @cmdln.alias('bl') + @cmdln.alias('blt') +@@ -5791,7 +5797,7 @@ def do_buildinfo(self, subcmd, opts, *args): + + build_descr_data = None + if not build_descr is None: +- build_descr_data = open(build_descr, 'r').read() ++ build_descr_data = open(build_descr, 'rb').read() + if opts.prefer_pkgs and build_descr_data is None: + raise oscerr.WrongArgs('error: a build description is needed if \'--prefer-pkgs\' is used') + elif opts.prefer_pkgs: +@@ -5802,13 +5808,13 @@ def do_buildinfo(self, subcmd, opts, *args): + prefer_pkgs = get_prefer_pkgs(opts.prefer_pkgs, arch, + os.path.splitext(build_descr)[1], + cpiodata) +- cpiodata.add(os.path.basename(build_descr), build_descr_data) ++ cpiodata.add(os.path.basename(build_descr.encode()), build_descr_data) + build_descr_data = cpiodata.get() + + if opts.multibuild_package: + package = package + ":" + opts.multibuild_package + +- print(''.join(get_buildinfo(apiurl, ++ print(decode_it(get_buildinfo(apiurl, + project, package, repository, arch, + specfile=build_descr_data, + debug=opts.debug, +@@ -5855,7 +5861,7 @@ def do_buildconfig(self, subcmd, opts, *args): + else: + raise oscerr.WrongArgs('Wrong number of arguments.') + +- print(''.join(get_buildconfig(apiurl, project, repository))) ++ print(decode_it(get_buildconfig(apiurl, project, repository))) + + + def do_workerinfo(self, subcmd, opts, worker): +@@ -6097,6 +6103,7 @@ def parse_repoarchdescr(self, args, noinit = False, alternative_project = None, + recipe = recipe.strip() + if recipe == 'arch': + recipe = 'PKGBUILD' ++ recipe = decode_it(recipe) + pac = os.path.basename(os.getcwd()) + if is_package_dir(os.getcwd()): + pac = store_read_package(os.getcwd()) +@@ -7287,7 +7294,7 @@ def do_getbinaries(self, subcmd, opts, *args): + package = pac, + target_filename = fname, + target_mtime = i.mtime, +- progress_meter = not opts.quiet) ++ progress_meter = opts.quiet) + + + @cmdln.option('-b', '--bugowner', action='store_true', +@@ -7373,7 +7380,7 @@ def do_my(self, subcmd, opts, *args): + what = {'project': ''} + elif type in args_sr: + requests = get_request_collection(apiurl, 'creator', req_who=user) +- for r in sorted(requests): ++ for r in sorted(requests, key=lambda x: x.reqid): + print(r.list_view(), '\n') + return + elif not type in args_pkg: +@@ -7741,8 +7748,9 @@ def build_xpath(attr, what, substr = False): + continue + # construct a sorted, flat list + # Sort by first column, follwed by second column if we have two columns, else sort by first. +- results.sort(lambda x, y: ( cmp(x[0], y[0]) or +- (len(x)>1 and len(y)>1 and cmp(x[1], y[1])) )) ++ # results.sort(lambda x, y: ( cmp(x[0], y[0]) or ++ # (len(x)>1 and len(y)>1 and cmp(x[1], y[1])) )) ++ results.sort(key=cmp_to_key(compare)) + new = [] + for i in results: + new.extend(i) +@@ -7962,7 +7970,7 @@ def do_api(self, subcmd, opts, url): + data=opts.data, + file=opts.file, + headers=opts.headers) +- out = r.read() ++ out = decode_it(r.read()) + + if opts.edit: + text = edit_text(out) +@@ -7970,7 +7978,7 @@ def do_api(self, subcmd, opts, url): + url, + data=text, + headers=opts.headers) +- out = r.read() ++ out = decode_it(r.read()) + + sys.stdout.write(out) + +@@ -8184,7 +8192,7 @@ def setBugownerHelper(apiurl, project, package, bugowner): + else: + if pac: + m = show_package_meta(apiurl, prj, pac) +- metaroot = ET.fromstring(''.join(m)) ++ metaroot = ET.fromstring(b''.join(m)) + if not opts.nodevelproject: + while metaroot.findall('devel'): + d = metaroot.find('devel') +@@ -8193,18 +8201,18 @@ def setBugownerHelper(apiurl, project, package, bugowner): + if opts.verbose: + print("Following to the development space: %s/%s" % (prj, pac)) + m = show_package_meta(apiurl, prj, pac) +- metaroot = ET.fromstring(''.join(m)) ++ metaroot = ET.fromstring(b''.join(m)) + if not metaroot.findall('person') and not metaroot.findall('group'): + if opts.verbose: + print("No dedicated persons in package defined, showing the project persons.") + pac = None + m = show_project_meta(apiurl, prj) +- metaroot = ET.fromstring(''.join(m)) ++ metaroot = ET.fromstring(b''.join(m)) + else: + # fallback to project lookup for old servers + if prj and not searchresult: + m = show_project_meta(apiurl, prj) +- metaroot = ET.fromstring(''.join(m)) ++ metaroot = ET.fromstring(b''.join(m)) + + # extract the maintainers + projects = [] +@@ -8365,10 +8373,13 @@ def do_cat(self, subcmd, opts, *args): + u = makeurl(apiurl, ['source', project, package, filename], query=query) + if subcmd == 'less': + f = http_GET(u) +- run_pager(''.join(f.readlines())) ++ run_pager(b''.join(f.readlines())) + else: + for data in streamfile(u): +- sys.stdout.write(data) ++ if isinstance(data, str): ++ sys.stdout.write(data) ++ else: ++ sys.stdout.write(decode_it(data)) + + + # helper function to download a file from a specific revision +@@ -8617,7 +8628,7 @@ def do_pull(self, subcmd, opts, *args): + u = makeurl(p.apiurl, ['source', p.prjname, p.name], query=query) + f = http_GET(u) + meta = f.readlines() +- root_new = ET.fromstring(''.join(meta)) ++ root_new = ET.fromstring(b''.join(meta)) + linkinfo_new = root_new.find('linkinfo') + if linkinfo_new == None: + raise oscerr.APIError('link is not a really a link?') +@@ -8790,7 +8801,7 @@ def do_signkey(self, subcmd, opts, *args): + buf = f.read(16384) + if not buf: + break +- sys.stdout.write(buf) ++ sys.stdout.write(decode_it(buf)) + + @cmdln.option('-m', '--message', + help='add MESSAGE to changes (do not open an editor)') +@@ -8830,7 +8841,7 @@ def do_vc(self, subcmd, opts, *args): + import glob, re + try: + fn_changelog = glob.glob('*.changes')[0] +- fp = file(fn_changelog) ++ fp = open(fn_changelog) + titleline = fp.readline() + fp.close() + if re.match('^\*\W+(.+\W+\d{1,2}\W+20\d{2})\W+(.+)\W+<(.+)>\W+(.+)$', titleline): diff --git a/009_fetch.patch b/009_fetch.patch new file mode 100644 index 0000000..6c73bdb --- /dev/null +++ b/009_fetch.patch @@ -0,0 +1,45 @@ +From 442a2731be9dd9af816b4998efcbd62f0f090b5e Mon Sep 17 00:00:00 2001 +From: lethliel +Date: Tue, 12 Feb 2019 14:04:42 +0100 +Subject: [PATCH] cpio.py is now a bytes only api. + +The decoding of the header needs to be done in fetch.py +--- + osc/fetch.py | 13 +++++++------ + 1 file changed, 7 insertions(+), 6 deletions(-) + +diff --git a/osc/fetch.py b/osc/fetch.py +index 833c8fa5..5a786250 100644 +--- a/osc/fetch.py ++++ b/osc/fetch.py +@@ -23,6 +23,7 @@ + import tempfile + import re + ++from osc.util.helper import decode_it + from .meter import create_text_meter + + class Fetcher: +@@ -77,16 +78,16 @@ def __download_cpio_archive(self, apiurl, project, repo, arch, package, **pkgs): + raise oscerr.APIError('CPIO archive is incomplete ' + '(see .errors file)') + if package == '_repository': +- n = re.sub(r'\.pkg\.tar\..z$', '.arch', hdr.filename) +- if n.startswith('container:'): +- n = re.sub(r'\.tar\..z$', '.tar', hdr.filename) +- pac = pkgs[n.rsplit('.', 1)[0]] ++ n = re.sub(b'\.pkg\.tar\..z$', b'.arch', hdr.filename) ++ if n.startswith(b'container:'): ++ n = re.sub(b'\.tar\..z$', b'.tar', hdr.filename) ++ pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])] + pac.canonname = hdr.filename + else: +- pac = pkgs[n.rsplit('.', 1)[0]] ++ pac = pkgs[decode_it(n.rsplit(b'.', 1)[0])] + else: + # this is a kiwi product +- pac = pkgs[hdr.filename] ++ pac = pkgs[decode_it(hdr.filename)] + + # Extract a single file from the cpio archive + try: diff --git a/010_fix-decoding_in_canonname.patch b/010_fix-decoding_in_canonname.patch new file mode 100644 index 0000000..86c4ffa --- /dev/null +++ b/010_fix-decoding_in_canonname.patch @@ -0,0 +1,53 @@ +diff --git a/osc/util/rpmquery.py b/osc/util/rpmquery.py +index 534636a..535cd37 100644 +--- a/osc/util/rpmquery.py ++++ b/osc/util/rpmquery.py +@@ -5,6 +5,10 @@ import os + import re + import struct + from . import packagequery ++from osc.util.helper import decode_it ++ ++def cmp(a, b): ++ return (a > b) - (a < b) + + class RpmError(packagequery.PackageError): + pass +@@ -184,14 +188,14 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult): + continue + # RPMSENSE_SENSEMASK = 15 (see rpmlib.h) but ignore RPMSENSE_SERIAL (= 1 << 0) therefore use 14 + if flags & 14: +- name += ' ' ++ name += b' ' + if flags & self.GREATER: +- name += '>' ++ name += b'>' + elif flags & self.LESS: +- name += '<' ++ name += b'<' + if flags & self.EQUAL: +- name += '=' +- name += ' %s' % ver ++ name += b'=' ++ name += b' %s' % ver + res.append(name) + return res + +@@ -288,7 +292,7 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult): + arch = 'src' + else: + arch = self.arch() +- return RpmQuery.filename(self.name(), None, self.version(), self.release(), arch) ++ return RpmQuery.filename(decode_it(self.name()), None, decode_it(self.version()), decode_it(self.release()), decode_it(arch)) + + @staticmethod + def query(filename): +@@ -318,6 +322,8 @@ class RpmQuery(packagequery.PackageQuery, packagequery.PackageQueryResult): + if ver1 == ver2: + return 0 + res = 0 ++ ver1 = decode_it(ver1) ++ ver2 = decode_it(ver2) + while res == 0: + # remove all leading non alphanumeric or tilde chars + ver1 = re.sub('^[^a-zA-Z0-9~]*', '', ver1) diff --git a/011_repodata.patch b/011_repodata.patch new file mode 100644 index 0000000..2e8b9a0 --- /dev/null +++ b/011_repodata.patch @@ -0,0 +1,21 @@ +diff --git a/osc/util/repodata.py b/osc/util/repodata.py +index cb09113..9203170 100644 +--- a/osc/util/repodata.py ++++ b/osc/util/repodata.py +@@ -177,13 +177,13 @@ class RepoDataQueryResult(osc.util.packagequery.PackageQueryResult): + return None + + def vercmp(self, other): +- res = osc.util.rpmquery.RpmQuery.rpmvercmp(str(self.epoch()), str(other.epoch())) ++ res = osc.util.rpmquery.RpmQuery.rpmvercmp(str(self.epoch()).encode(), str(other.epoch()).encode()) + if res != 0: + return res +- res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.version(), other.version()) ++ res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.version().encode(), other.version().encode()) + if res != 0: + return res +- res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.release(), other.release()) ++ res = osc.util.rpmquery.RpmQuery.rpmvercmp(self.release().encode(), other.release().encode()) + return res + + def version(self): diff --git a/012_packagequery.patch b/012_packagequery.patch new file mode 100644 index 0000000..fc9cb08 --- /dev/null +++ b/012_packagequery.patch @@ -0,0 +1,38 @@ +diff --git a/osc/util/packagequery.py b/osc/util/packagequery.py +index e430744..ac2c74c 100644 +--- a/osc/util/packagequery.py ++++ b/osc/util/packagequery.py +@@ -1,5 +1,6 @@ + + from __future__ import print_function ++from osc.util.helper import decode_it + + class PackageError(Exception): + """base class for all package related errors""" +@@ -30,11 +31,11 @@ class PackageQueries(dict): + self.__setitem__(query.name(), query) + + def __setitem__(self, name, query): +- if name != query.name(): ++ if decode_it(name) != decode_it(query.name()): + raise ValueError("key '%s' does not match " + "package query name '%s'" % (name, query.name())) + +- architecture = query.arch() ++ architecture = decode_it(query.arch()) + + if (architecture in [self.wanted_architecture, 'noarch', 'all', 'any'] + or self.wanted_architecture in self.architectureMap.get(architecture, +@@ -149,10 +150,10 @@ class PackageQueryResult: + raise NotImplementedError + + def evr(self): +- evr = self.version() ++ evr = decode_it(self.version()) + + if self.release(): +- evr += "-" + self.release() ++ evr += "-" + decode_it(self.release()) + + epoch = self.epoch() + if epoch is not None and epoch != 0: diff --git a/osc.spec b/osc.spec index 3cfac20..9e03aa7 100644 --- a/osc.spec +++ b/osc.spec @@ -2,7 +2,7 @@ # when rebasing check what they are using on # http://download.opensuse.org/repositories/openSUSE:/Tools/Fedora_28/src/ # update the obsrel to match the upstream release number -%global obsrel 237.1 +%global obsrel 245.1 # osc plugin support %global osc_plugin_dir %{_prefix}/lib/osc-plugins @@ -11,19 +11,57 @@ %global obsroot %{_prefix}/lib/obs %global obs_srcsvc_dir %{obsroot}/service +# Control building as Python 3 for F30+ +%if 0%{?fedora} >= 30 +%bcond_without python3 +%else +%bcond_with python3 +%endif + +%if %{with python3} +%global __python %{__python3} +%else +%global __python %{__python2} +%endif + # Real release number %global rel 1 Name: osc -Version: 0.163.0 +Summary: Open Build Service Commander +Version: 0.164.2 # Bump the release as necessary to ensure we're one level up from upstream -Release: %{obsrel}.%{rel}%{?dist}.1 +Release: %{obsrel}.%{rel}%{?dist} License: GPLv2+ -Url: https://github.com/openSUSE/osc +URL: https://github.com/openSUSE/osc Source: %{url}/archive/%{version}/%{name}-%{version}.tar.gz -Summary: Open Build Service Commander + +# Python 3 compatibility patches pending upstream merge +# From: https://build.opensuse.org/package/show/home:mstrigl:branches:openSUSE:Tools/osc?rev=11 +Patch001: 001_helper.patch +Patch002: 002_man_page_building.patch +Patch003: 003_setup.patch +Patch004: 004_babysitter.patch +Patch005: 005_oscssl.patch +Patch006: 006_build.patch +Patch007: 007_core.patch +Patch008: 008_commandline.patch +Patch009: 009_fetch.patch +Patch010: 010_fix-decoding_in_canonname.patch +Patch011: 011_repodata.patch +Patch012: 012_packagequery.patch + BuildArch: noarch +%if %{with python3} +BuildRequires: python3-devel +BuildRequires: python3-rpm +BuildRequires: python3-urlgrabber +Requires: python3-rpm +Requires: python3-m2crypto +Requires: python3-lxml +Requires: python3-urlgrabber +%else BuildRequires: python2-devel BuildRequires: python2-rpm BuildRequires: python2-urlgrabber @@ -31,8 +69,10 @@ Requires: python2-rpm Requires: m2crypto Requires: python2-lxml Requires: python2-urlgrabber +%endif %if 0%{?fedora} || 0%{?rhel} >= 8 +Recommends: obs-build Recommends: obs-service-source_validator %else Requires: obs-service-source_validator @@ -50,17 +90,17 @@ introduction. %prep -%setup -q +%autosetup -p1 #fixup encoding iconv -f ISO8859-1 -t UTF-8 -o TODO.new TODO mv TODO.new TODO %build -%py2_build +%py_build %install -%py2_install +%py_install %__ln_s osc-wrapper.py %{buildroot}%{_bindir}/osc %__mkdir_p %{buildroot}%{_localstatedir}/lib/osc-plugins @@ -86,7 +126,7 @@ EOM %doc AUTHORS README TODO NEWS %license COPYING %{_bindir}/osc* -%{python2_sitelib}/* +%{python_sitelib}/osc* %{_sysconfdir}/profile.d/osc.csh %{_datadir}/bash-completion/completions/osc %dir %{_localstatedir}/lib/osc-plugins @@ -98,6 +138,11 @@ EOM %dir %{osc_plugin_dir} %changelog +* Sun Mar 24 2019 Neal Gompa - 0.164.2-245.1.1 +- Update to 0.164.2 +- Add proposed patches to build for Python 3 for Fedora 30+ +- Add Recommends for obs-build + * Fri Feb 01 2019 Fedora Release Engineering - 0.163.0-237.1.1.1 - Rebuilt for https://fedoraproject.org/wiki/Fedora_30_Mass_Rebuild diff --git a/sources b/sources index 508c4d8..9dfdc7f 100644 --- a/sources +++ b/sources @@ -1 +1 @@ -SHA512 (osc-0.163.0.tar.gz) = c79d9c85be216372045ab276ee06eac4f666d1788cfecbc840c0e432c11de7472d9b03ce5c8b687d30bfab059b62f0b12ca58c6081473dbf6cb85469279b6d84 +SHA512 (osc-0.164.2.tar.gz) = 7a4178c0ead2c42ec53ca73433ebb7d2ff346f2e42b351f3a658fbe9cff9a8eb01d1ac0e979302fb5ae79d2bbac5c85cfe3834f3c1772f979fbde6eb29c37d9e