| |
@@ -0,0 +1,841 @@
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorycoderetriever.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorycoderetriever.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorycoderetriever.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorycoderetriever.py 2019-03-01 18:38:02.397178507 +0100
|
| |
+ @@ -17,7 +17,7 @@ EXAMPLES = '''
|
| |
+ from ansible.module_utils.basic import *
|
| |
+ from gofedlib.providers.providerbuilder import ProviderBuilder
|
| |
+ from gofedlib.urlbuilder.builder import UrlBuilder
|
| |
+ -import urllib2
|
| |
+ +import urllib.request, urllib.error, urllib.parse
|
| |
+ import tempfile
|
| |
+ import tarfile
|
| |
+ import os
|
| |
+ @@ -69,15 +69,15 @@ class RepositoryCodeRetriever(object):
|
| |
+ # TODO(jchaloup): catch exceptions: urllib2.URLError, urllib2.HTTPError
|
| |
+ # raise ResourceNotRetrieved instead?
|
| |
+ try:
|
| |
+ - response = urllib2.urlopen(resource_url)
|
| |
+ - except urllib2.URLError as err:
|
| |
+ + response = urllib.request.urlopen(resource_url)
|
| |
+ + except urllib.error.URLError as err:
|
| |
+ # can a user do something about it?
|
| |
+ msg = "Unable to retrieve resource, url = %s, err = %s" % (resource_url, err)
|
| |
+ - raise urllib2.URLError(msg)
|
| |
+ - except urllib2.HTTPError as err:
|
| |
+ + raise urllib.error.URLError(msg)
|
| |
+ + except urllib.error.HTTPError as err:
|
| |
+ # can a user do something about it?
|
| |
+ msg = "Unable to retrieve resource, url = %s, err = %s" % (resource_url, err)
|
| |
+ - raise urllib2.HTTPError(msg)
|
| |
+ + raise urllib.error.HTTPError(msg)
|
| |
+
|
| |
+ try:
|
| |
+ with tempfile.NamedTemporaryFile(delete=False) as f:
|
| |
+ @@ -126,10 +126,10 @@ def main():
|
| |
+ errmsg = ""
|
| |
+ try:
|
| |
+ r.retrieve()
|
| |
+ - except urllib2.URLError as err:
|
| |
+ + except urllib.error.URLError as err:
|
| |
+ failed = True
|
| |
+ errmsg = err
|
| |
+ - except urllib2.HTTPError as err:
|
| |
+ + except urllib.error.HTTPError as err:
|
| |
+ failed = True
|
| |
+ errmsg = err
|
| |
+ except ResourceUnableToRetrieveError as err:
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorydataextractor.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorydataextractor.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorydataextractor.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/repositorydataextractor.py 2019-03-01 18:38:02.472178398 +0100
|
| |
+ @@ -157,8 +157,8 @@ class RepositoryDataExtractor(object):
|
| |
+
|
| |
+ # from all branches (up to master) filter out all commits that are already covered in master branch
|
| |
+ if "master" in branches:
|
| |
+ - for branch in filter(lambda l: l != "master", branches.keys()):
|
| |
+ - for key in branches["master"].keys():
|
| |
+ + for branch in [l for l in list(branches.keys()) if l != "master"]:
|
| |
+ + for key in list(branches["master"].keys()):
|
| |
+ branches[branch].pop(key, None)
|
| |
+
|
| |
+ self._info = self._generateGolangProjectRepositoryInfo(branches)
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/rpmretriever.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/rpmretriever.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/rpmretriever.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/ansible/roles/gofedinfra/library/rpmretriever.py 2019-03-01 18:38:02.531178312 +0100
|
| |
+ @@ -2,7 +2,7 @@
|
| |
+
|
| |
+ from ansible.module_utils.basic import *
|
| |
+
|
| |
+ -import urllib2
|
| |
+ +import urllib.request, urllib.error, urllib.parse
|
| |
+ import tempfile
|
| |
+ import os
|
| |
+ from gofedlib.utils import runCommand
|
| |
+ @@ -23,15 +23,15 @@ class RpmRetriever(object):
|
| |
+ # TODO(jchaloup): catch exceptions: urllib2.URLError, urllib2.HTTPError
|
| |
+ # raise ResourceNotRetrieved instead?
|
| |
+ try:
|
| |
+ - response = urllib2.urlopen(resource_url)
|
| |
+ - except urllib2.URLError as err:
|
| |
+ + response = urllib.request.urlopen(resource_url)
|
| |
+ + except urllib.error.URLError as err:
|
| |
+ # can a user do something about it?
|
| |
+ msg = "Unable to retrieve resource, url = %s, err = %s" % (resource_url, err)
|
| |
+ - raise urllib2.URLError(msg)
|
| |
+ - except urllib2.HTTPError as err:
|
| |
+ + raise urllib.error.URLError(msg)
|
| |
+ + except urllib.error.HTTPError as err:
|
| |
+ # can a user do something about it?
|
| |
+ msg = "Unable to retrieve resource, url = %s, err = %s" % (resource_url, err)
|
| |
+ - raise urllib2.HTTPError(msg)
|
| |
+ + raise urllib.error.HTTPError(msg)
|
| |
+
|
| |
+ try:
|
| |
+ with tempfile.NamedTemporaryFile(delete=False) as f:
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/builddepgraph.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/builddepgraph.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/builddepgraph.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/builddepgraph.py 2019-03-01 18:38:05.259174342 +0100
|
| |
+ @@ -52,12 +52,12 @@ if __name__ == "__main__":
|
| |
+ dataset = LocalProjectDatasetBuilder("/home/jchaloup/Packages/etcd/fedora/etcd/etcd-5e6eb7e19d6385adfabb1f1caea03e732f9348ad", "github.com/coreos/etcd").build()
|
| |
+
|
| |
+ graph = DatasetDependencyGraphBuilder().build(dataset, 2)
|
| |
+ - #print str(graph)
|
| |
+ + #print(str(graph))
|
| |
+
|
| |
+ # get a subgraph
|
| |
+ - #print str(GraphUtils.truncateGraph(graph, ["kubernetes-devel-1.2.0-0.15.alpha6.gitf0cd09a.fc25.noarch.rpm"]))
|
| |
+ + #print(str(GraphUtils.truncateGraph(graph, ["kubernetes-devel-1.2.0-0.15.alpha6.gitf0cd09a.fc25.noarch.rpm"])))
|
| |
+
|
| |
+ - print json.dumps(BasicDependencyAnalysis(graph).analyse().getResults())
|
| |
+ + print(json.dumps(BasicDependencyAnalysis(graph).analyse().getResults()))
|
| |
+
|
| |
+ #DatasetBuilder("DistributionLatestBuild").\
|
| |
+ # build("rawhide", packages).\
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactkeys.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactkeys.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactkeys.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactkeys.py 2019-03-01 18:38:02.602178209 +0100
|
| |
+ @@ -78,4 +78,4 @@ if __name__ == "__main__":
|
| |
+ class_def = generateKeyClass(key)
|
| |
+ with open("gofedinfra/system/helpers/artefactkeygenerator/%s" % obj.class_filename_ext(), "w") as f:
|
| |
+ f.write(class_def)
|
| |
+ - print("gofedinfra/system/helpers/artefactkeygenerator/%s" % obj.class_filename_ext())
|
| |
+ + print("gofedinfra/system/helpers/artefactkeygenerator/%s" % obj.class_filename_ext())
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactreaderschema.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactreaderschema.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactreaderschema.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/artefactreaderschema.py 2019-03-01 18:38:02.625178176 +0100
|
| |
+ @@ -59,4 +59,4 @@ if __name__ == "__main__":
|
| |
+ "definitions": definitions
|
| |
+ }
|
| |
+
|
| |
+ - print json.dumps(schema, indent = 4)
|
| |
+ + print(json.dumps(schema, indent = 4))
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/classhelper.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/classhelper.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/classhelper.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/generators/classhelper.py 2019-03-01 18:38:02.611178196 +0100
|
| |
+ @@ -5,7 +5,7 @@ class ClassHelper:
|
| |
+ self._generate()
|
| |
+
|
| |
+ def _generate(self):
|
| |
+ - self._class_name = "".join(map(lambda i: i.capitalize(), self.key_spec["id"].split("-")))
|
| |
+ + self._class_name = "".join([i.capitalize() for i in self.key_spec["id"].split("-")])
|
| |
+ self._class_keys = '["' + '", "'.join(self.key_spec["keys"]) + '"]'
|
| |
+ self._class_filename_ext = "%s.py" % self.key_spec["id"].replace("-", "")
|
| |
+ self._class_filename = self.key_spec["id"].replace("-", "")
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scandistributionpackage/act.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scandistributionpackage/act.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scandistributionpackage/act.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scandistributionpackage/act.py 2019-03-01 18:38:02.983177654 +0100
|
| |
+ @@ -133,7 +133,7 @@ class ScanDistributionPackageAct(MetaAct
|
| |
+ "distribution": self.distribution,
|
| |
+ "package": self.package,
|
| |
+ "coverage": itemsetcache.intervals(),
|
| |
+ - "builds": itemsetcache.items()
|
| |
+ + "builds": list(itemsetcache.items())
|
| |
+ }
|
| |
+
|
| |
+ def _mergeItemSetInfoArtefacts(self, info1, info2, coverage):
|
| |
+ @@ -207,7 +207,7 @@ class ScanDistributionPackageAct(MetaAct
|
| |
+ return {}
|
| |
+
|
| |
+ items = {}
|
| |
+ - for item in filter(lambda l: l["point"] >= start and l["point"] <= end, cache["builds"]):
|
| |
+ + for item in [l for l in cache["builds"] if l["point"] >= start and l["point"] <= end]:
|
| |
+ # construct a storage request for each item
|
| |
+ data = {
|
| |
+ "artefact": ARTEFACT_GOLANG_PROJECT_DISTRIBUTION_BUILD,
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/act.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/act.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/act.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/act.py 2019-03-01 18:38:03.109177471 +0100
|
| |
+ @@ -197,7 +197,7 @@ class ScanUpstreamRepositoryAct(MetaAct)
|
| |
+ "artefact": ARTEFACT_CACHE_GOLANG_PROJECT_REPOSITORY_COMMITS,
|
| |
+ "repository": self.repository,
|
| |
+ "coverage": itemsetcache.intervals(),
|
| |
+ - "commits": itemsetcache.items()
|
| |
+ + "commits": list(itemsetcache.items())
|
| |
+ }
|
| |
+
|
| |
+ def _mergeItemSetInfoArtefacts(self, info1, info2, coverage):
|
| |
+ @@ -295,7 +295,7 @@ class ScanUpstreamRepositoryAct(MetaAct)
|
| |
+ return items
|
| |
+
|
| |
+ def _truncateRepositoryInfoArtefact(self, info, branches):
|
| |
+ - info["branches"] = filter(lambda l: l["branch"] in branches, info["branches"])
|
| |
+ + info["branches"] = [l for l in info["branches"] if l["branch"] in branches]
|
| |
+ return info
|
| |
+
|
| |
+ def _retrieveItemsFromCache(self, cache, info, start, end):
|
| |
+ @@ -314,7 +314,7 @@ class ScanUpstreamRepositoryAct(MetaAct)
|
| |
+ return {}
|
| |
+
|
| |
+ items = {}
|
| |
+ - for item in filter(lambda l: l["point"] >= start and l["point"] <= end, cache["commits"]):
|
| |
+ + for item in [l for l in cache["commits"] if l["point"] >= start and l["point"] <= end]:
|
| |
+ if self.branch != "":
|
| |
+ # commit in a given branch?
|
| |
+ if item["item"] not in branch_commits:
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/fakeact.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/fakeact.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/fakeact.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/acts/scanupstreamrepository/fakeact.py 2019-03-01 18:38:03.121177453 +0100
|
| |
+ @@ -21,6 +21,6 @@ class FakeScanUpstreamRepositoryAct(Scan
|
| |
+ self._items = json.load(f)
|
| |
+
|
| |
+ if self.commit != "":
|
| |
+ - self._items[self.commit] = self._items[self._items.keys()[-1]]
|
| |
+ + self._items[self.commit] = self._items[list(self._items.keys())[-1]]
|
| |
+
|
| |
+ return True
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/core/meta/metaartefactkeygenerator.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/core/meta/metaartefactkeygenerator.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/core/meta/metaartefactkeygenerator.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/core/meta/metaartefactkeygenerator.py 2019-03-01 18:38:03.346177126 +0100
|
| |
+ @@ -18,7 +18,7 @@ class MetaArtefactKeyGenerator:
|
| |
+ return re.sub('[^a-zA-Z0-9:-]', '-', key)
|
| |
+
|
| |
+ def value2key(self, value, delimiter, key, key_order):
|
| |
+ - if type(value) in [type(""), type(u"")]:
|
| |
+ + if type(value) in [type(""), type("")]:
|
| |
+ return [self.truncateKey(value)]
|
| |
+
|
| |
+ if type(value) != type({}):
|
| |
+ @@ -29,7 +29,7 @@ class MetaArtefactKeyGenerator:
|
| |
+ if vkey not in value:
|
| |
+ raise ValueError("%s key missing" % vkey)
|
| |
+
|
| |
+ - if type(value[vkey]) != type("") and type(value[vkey]) != type(u""):
|
| |
+ + if type(value[vkey]) != type("") and type(value[vkey]) != type(""):
|
| |
+ raise ValueError("Second level value is not string")
|
| |
+
|
| |
+ keys.append(self.truncateKey(value[vkey]))
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/artefactdecomposer.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/artefactdecomposer.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/artefactdecomposer.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/artefactdecomposer.py 2019-03-01 18:38:03.446176981 +0100
|
| |
+ @@ -166,12 +166,12 @@ class ArtefactDecomposer:
|
| |
+ test_classes[key] = [test]
|
| |
+
|
| |
+ # nonempty list of classes must be the same for all parts
|
| |
+ - classes_len = filter(lambda l: l > 0, [len(pkg_classes.keys()), len(dep_classes.keys()), len(main_classes.keys()), len(test_classes.keys())])
|
| |
+ + classes_len = [l for l in [len(list(pkg_classes.keys())), len(list(dep_classes.keys())), len(list(main_classes.keys())), len(list(test_classes.keys()))] if l > 0]
|
| |
+ if max(classes_len) != min(classes_len):
|
| |
+ raise ValueError("Not every data belongs to the same set of classes")
|
| |
+
|
| |
+ # collect common classes
|
| |
+ - classes_keys = filter(lambda l: l != [], [pkg_classes.keys(), dep_classes.keys(), main_classes.keys(), test_classes.keys()])
|
| |
+ + classes_keys = [l for l in [list(pkg_classes.keys()), list(dep_classes.keys()), list(main_classes.keys()), list(test_classes.keys())] if l != []]
|
| |
+ if classes_keys == []:
|
| |
+ raise ValueError("No prefix class detected")
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/cacheintervalmerger.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/cacheintervalmerger.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/cacheintervalmerger.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/cacheintervalmerger.py 2019-03-01 18:38:03.645176691 +0100
|
| |
+ @@ -8,7 +8,7 @@ class CacheIntervalBreaker(object):
|
| |
+ def decompose(self, stored_commits, not_stored_commits):
|
| |
+ commits = stored_commits + not_stored_commits
|
| |
+ commits = sorted(commits, key = lambda commit: commit["d"])
|
| |
+ - commits = map(lambda l: {"c": "", "d": l["d"]} if l in not_stored_commits else l, commits)
|
| |
+ + commits = [{"c": "", "d": l["d"]} if l in not_stored_commits else l for l in commits]
|
| |
+
|
| |
+ # decompose
|
| |
+ commits_len = len(commits)
|
| |
+ @@ -103,13 +103,13 @@ class CacheIntervalMerger(object):
|
| |
+ n_intervals[i] = {}
|
| |
+
|
| |
+ # filter out all empty intervals
|
| |
+ - n_intervals = filter(lambda l: l != {}, n_intervals)
|
| |
+ + n_intervals = [l for l in n_intervals if l != {}]
|
| |
+
|
| |
+ if not overlap:
|
| |
+ break
|
| |
+
|
| |
+ interval_count = len(n_intervals)
|
| |
+ - for i in reversed(range(1, interval_count)):
|
| |
+ + for i in reversed(list(range(1, interval_count))):
|
| |
+ if n_intervals[i] == {}:
|
| |
+ continue
|
| |
+
|
| |
+ @@ -119,6 +119,6 @@ class CacheIntervalMerger(object):
|
| |
+ n_intervals[i] = {}
|
| |
+
|
| |
+ # filter out all empty intervals
|
| |
+ - n_intervals = filter(lambda l: l != {}, n_intervals)
|
| |
+ + n_intervals = [l for l in n_intervals if l != {}]
|
| |
+
|
| |
+ return n_intervals
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/itemsetcache/itemsetcache.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/itemsetcache/itemsetcache.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/itemsetcache/itemsetcache.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/itemsetcache/itemsetcache.py 2019-03-01 18:38:03.688176628 +0100
|
| |
+ @@ -62,7 +62,7 @@ class CoverageIntervalBreaker(object):
|
| |
+ def decompose(self, interval, breakpoints):
|
| |
+ items = interval + breakpoints
|
| |
+ items = sorted(items, key = lambda item: item["point"])
|
| |
+ - items = map(lambda l: {"item": "", "point": l["point"]} if l in breakpoints else l, items)
|
| |
+ + items = [{"item": "", "point": l["point"]} if l in breakpoints else l for l in items]
|
| |
+
|
| |
+ # decompose
|
| |
+ items_len = len(items)
|
| |
+ @@ -154,13 +154,13 @@ class CoverageIntervalMerger(object):
|
| |
+ n_intervals[i] = {}
|
| |
+
|
| |
+ # filter out all empty intervals
|
| |
+ - n_intervals = filter(lambda l: l != {}, n_intervals)
|
| |
+ + n_intervals = [l for l in n_intervals if l != {}]
|
| |
+
|
| |
+ if not overlap:
|
| |
+ break
|
| |
+
|
| |
+ interval_count = len(n_intervals)
|
| |
+ - for i in reversed(range(1, interval_count)):
|
| |
+ + for i in reversed(list(range(1, interval_count))):
|
| |
+ if n_intervals[i] == {}:
|
| |
+ continue
|
| |
+
|
| |
+ @@ -170,7 +170,7 @@ class CoverageIntervalMerger(object):
|
| |
+ n_intervals[i] = {}
|
| |
+
|
| |
+ # filter out all empty intervals
|
| |
+ - n_intervals = filter(lambda l: l != {}, n_intervals)
|
| |
+ + n_intervals = [l for l in n_intervals if l != {}]
|
| |
+
|
| |
+ return n_intervals
|
| |
+
|
| |
+ @@ -191,8 +191,8 @@ if __name__ == "__main__":
|
| |
+
|
| |
+ cache = ItemSetCache().addItems(intervals1, breakpoints1)
|
| |
+
|
| |
+ - print cache.intervals()
|
| |
+ - print cache.items()
|
| |
+ + print(cache.intervals())
|
| |
+ + print(list(cache.items()))
|
| |
+
|
| |
+ intervals2 = [
|
| |
+ {"item": "g", "point": 17},
|
| |
+ @@ -208,8 +208,8 @@ if __name__ == "__main__":
|
| |
+
|
| |
+ cache.addItems(intervals2, breakpoints2)
|
| |
+
|
| |
+ - print cache.intervals()
|
| |
+ - print cache.items()
|
| |
+ + print(cache.intervals())
|
| |
+ + print(list(cache.items()))
|
| |
+
|
| |
+ - print ""
|
| |
+ - print cache.cache()
|
| |
+ + print("")
|
| |
+ + print(cache.cache())
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/jsoncomparator.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/jsoncomparator.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/jsoncomparator.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/jsoncomparator.py 2019-03-01 18:38:03.692176622 +0100
|
| |
+ @@ -11,7 +11,7 @@ class DirectJSONComparator:
|
| |
+ if left_diff != [] or right_diff != []:
|
| |
+ return False
|
| |
+
|
| |
+ - for key in json1.keys():
|
| |
+ + for key in list(json1.keys()):
|
| |
+ if json1[key] != json2[key]:
|
| |
+ return False
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/schema_validator.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/schema_validator.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/schema_validator.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/schema_validator.py 2019-03-01 17:35:11.440202601 +0100
|
| |
+ @@ -13,7 +13,7 @@ class SchemaValidator:
|
| |
+ try:
|
| |
+ with open(schema_file, "r") as f:
|
| |
+ schema = json.load(f)
|
| |
+ - except IOError as e:
|
| |
+ + except IOError as e:
|
| |
+ logging.error("Unable to load schema from %s" % schema_file)
|
| |
+ return False
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/utils.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/utils.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/utils.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/helpers/utils.py 2019-03-01 18:38:03.726176573 +0100
|
| |
+ @@ -84,7 +84,7 @@ def format_output(fmt, out, fancy = Fals
|
| |
+
|
| |
+ def normal_format_str(fmt):
|
| |
+ try:
|
| |
+ - ret = ''.join(['{} ' for num in xrange(len(fmt))])
|
| |
+ + ret = ''.join(['{} ' for num in range(len(fmt))])
|
| |
+ except NameError:
|
| |
+ ret = ''.join(['{} ' for num in range(len(fmt))])
|
| |
+ return ret[:-1] # omit space at the end of line
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/ecosnapshots/distributionsnapshotchecker.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/ecosnapshots/distributionsnapshotchecker.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/ecosnapshots/distributionsnapshotchecker.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/ecosnapshots/distributionsnapshotchecker.py 2019-03-01 18:38:03.812176448 +0100
|
| |
+ @@ -60,8 +60,8 @@ class DistributionSnapshotChecker(object
|
| |
+ index = 1
|
| |
+ for package in snapshot.json()["builds"]:
|
| |
+ # scan devel and unit-tests only
|
| |
+ - rpms = filter(lambda l: GolangRpm(package["build"], l).provideSourceCode(), package["rpms"])
|
| |
+ - print(package["rpms"])
|
| |
+ + rpms = [l for l in package["rpms"] if GolangRpm(package["build"], l).provideSourceCode()]
|
| |
+ + print(package["rpms"])
|
| |
+
|
| |
+ if rpms == []:
|
| |
+ continue
|
| |
+ @@ -71,7 +71,7 @@ class DistributionSnapshotChecker(object
|
| |
+ "distribution": version,
|
| |
+ "build": {
|
| |
+ "name": package["build"],
|
| |
+ - "rpms": map(lambda l: {"name": l}, rpms)
|
| |
+ + "rpms": [{"name": l} for l in rpms]
|
| |
+ }
|
| |
+ }
|
| |
+
|
| |
+ @@ -121,7 +121,7 @@ class DistributionSnapshotChecker(object
|
| |
+ except KeyError:
|
| |
+ continue
|
| |
+
|
| |
+ - distro_packages = distro_packages + DistributionSnapshot().read(data).builds().keys()
|
| |
+ + distro_packages = distro_packages + list(DistributionSnapshot().read(data).builds().keys())
|
| |
+
|
| |
+ known_packages = list(set(distro_packages + custom_packages))
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasetdependencygraphbuilder.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasetdependencygraphbuilder.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasetdependencygraphbuilder.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasetdependencygraphbuilder.py 2019-03-01 18:38:03.997176178 +0100
|
| |
+ @@ -29,7 +29,7 @@ class DatasetDependencyGraphBuilder(obje
|
| |
+ for edge in labels[category]:
|
| |
+ a, b = edge
|
| |
+ if b == "":
|
| |
+ - missing = missing + list(set(map(lambda (a,b): b, labels[category][edge])))
|
| |
+ + missing = missing + list(set(map(lambda a,b: b, labels[category][edge])))
|
| |
+
|
| |
+ self._missing_packages = list(set(missing))
|
| |
+
|
| |
+ @@ -39,7 +39,7 @@ class DatasetDependencyGraphBuilder(obje
|
| |
+ # find the corresponding rpm
|
| |
+ if root_package != "":
|
| |
+ root_rpm = ""
|
| |
+ - for rpm_sig in dataset.parents().values():
|
| |
+ + for rpm_sig in list(dataset.parents().values()):
|
| |
+ if Build(rpm_sig["build"]).name() == root_package:
|
| |
+ root_rpm = rpm_sig["rpm"]
|
| |
+ break
|
| |
+ @@ -66,7 +66,7 @@ class DatasetDependencyGraphBuilder(obje
|
| |
+ for edge in labels[category]:
|
| |
+ a, b = edge
|
| |
+ if b == "":
|
| |
+ - missing = missing + list(set(map(lambda (a,b): b, labels[category][edge])))
|
| |
+ + missing = missing + list(set(map(lambda a,b: b, labels[category][edge])))
|
| |
+ else:
|
| |
+ edges = edges + labels[category][edge]
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/datasetbuilder.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/datasetbuilder.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/datasetbuilder.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/datasetbuilder.py 2019-03-01 18:38:03.962176229 +0100
|
| |
+ @@ -67,7 +67,7 @@ class DatasetBuilder(object):
|
| |
+ # edges
|
| |
+ edges[node_name]["devel"] = []
|
| |
+ for dependencies in artefact["data"]["dependencies"]:
|
| |
+ - edges[node_name]["devel"] = edges[node_name]["devel"] + map(lambda l: (self._prefixPackage(artefact["ipprefix"], dependencies["package"]), l["name"]), dependencies["dependencies"])
|
| |
+ + edges[node_name]["devel"] = edges[node_name]["devel"] + [(self._prefixPackage(artefact["ipprefix"], dependencies["package"]), l["name"]) for l in dependencies["dependencies"]]
|
| |
+
|
| |
+ # main packages
|
| |
+ vertices[node_name]["main"] = []
|
| |
+ @@ -76,7 +76,7 @@ class DatasetBuilder(object):
|
| |
+ # dirname from filename says in which package the dependencies are required/imported
|
| |
+ pkg = os.path.dirname(main["filename"])
|
| |
+ vertices[node_name]["main"].append("%s/%s" % (artefact["ipprefix"], pkg))
|
| |
+ - edges[node_name]["main"] = edges[node_name]["main"] + map(lambda l: (self._prefixPackage(artefact["ipprefix"], pkg), l), main["dependencies"])
|
| |
+ + edges[node_name]["main"] = edges[node_name]["main"] + [(self._prefixPackage(artefact["ipprefix"], pkg), l) for l in main["dependencies"]]
|
| |
+ # one directory can have multiple filename import the same package
|
| |
+ edges[node_name]["main"] = list(set(edges[node_name]["main"]))
|
| |
+
|
| |
+ @@ -85,7 +85,7 @@ class DatasetBuilder(object):
|
| |
+ edges[node_name]["tests"] = []
|
| |
+ for test in artefact["data"]["tests"]:
|
| |
+ vertices[node_name]["tests"].append("%s/%s" % (artefact["ipprefix"], test["test"]))
|
| |
+ - edges[node_name]["tests"] = edges[node_name]["tests"] + map(lambda l: (self._prefixPackage(artefact["ipprefix"], test["test"]), l), test["dependencies"])
|
| |
+ + edges[node_name]["tests"] = edges[node_name]["tests"] + [(self._prefixPackage(artefact["ipprefix"], test["test"]), l) for l in test["dependencies"]]
|
| |
+
|
| |
+ return (vertices, edges)
|
| |
+
|
| |
+ @@ -110,7 +110,7 @@ class DatasetBuilder(object):
|
| |
+ # edges
|
| |
+ edges[node_name]["devel"] = []
|
| |
+ for dependencies in prefix_unit["dependencies"]:
|
| |
+ - edges[node_name]["devel"] = edges[node_name]["devel"] + map(lambda l: (dependencies["package"], l["name"]), dependencies["dependencies"])
|
| |
+ + edges[node_name]["devel"] = edges[node_name]["devel"] + [(dependencies["package"], l["name"]) for l in dependencies["dependencies"]]
|
| |
+
|
| |
+ # main packages
|
| |
+ vertices[node_name]["main"] = []
|
| |
+ @@ -119,7 +119,7 @@ class DatasetBuilder(object):
|
| |
+ # dirname from filename says in which package the dependencies are required/imported
|
| |
+ pkg = os.path.dirname(main["filename"])
|
| |
+ vertices[node_name]["main"].append(pkg)
|
| |
+ - edges[node_name]["main"] = edges[node_name]["main"] + map(lambda l: (pkg, l), main["dependencies"])
|
| |
+ + edges[node_name]["main"] = edges[node_name]["main"] + [(pkg, l) for l in main["dependencies"]]
|
| |
+ # one directory can have multiple filename import the same package
|
| |
+ edges[node_name]["main"] = list(set(edges[node_name]["main"]))
|
| |
+
|
| |
+ @@ -128,7 +128,7 @@ class DatasetBuilder(object):
|
| |
+ edges[node_name]["tests"] = []
|
| |
+ for test in prefix_unit["tests"]:
|
| |
+ vertices[node_name]["tests"].append(test["test"])
|
| |
+ - edges[node_name]["tests"] = edges[node_name]["tests"] + map(lambda l: (test["test"], l), test["dependencies"])
|
| |
+ + edges[node_name]["tests"] = edges[node_name]["tests"] + [(test["test"], l) for l in test["dependencies"]]
|
| |
+
|
| |
+ return (vertices, edges)
|
| |
+
|
| |
+ @@ -187,7 +187,7 @@ class DatasetBuilder(object):
|
| |
+ target_rpm = parents[b]["rpm"]
|
| |
+ edges.append((rpm, target_rpm))
|
| |
+ except KeyError:
|
| |
+ - #print "Missing node: %s" % b
|
| |
+ + #print("Missing node: %s" % b)
|
| |
+ target_rpm = ""
|
| |
+
|
| |
+ # labels
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/distributionlatestbuilds.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/distributionlatestbuilds.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/distributionlatestbuilds.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/graphs/datasets/distributionlatestbuilds.py 2019-03-01 18:38:03.862176375 +0100
|
| |
+ @@ -86,7 +86,7 @@ class DistributionLatestBuildGraphDatase
|
| |
+ "distribution": distribution.version(),
|
| |
+ "build": {
|
| |
+ "name": builds[pkg]["build"],
|
| |
+ - "rpms": map(lambda l: {"name": l}, builds[pkg]["rpms"])
|
| |
+ + "rpms": [{"name": l} for l in builds[pkg]["rpms"]]
|
| |
+ }
|
| |
+ }
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/checker.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/checker.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/checker.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/checker.py 2019-03-01 18:38:04.137175975 +0100
|
| |
+ @@ -177,11 +177,11 @@ class SnapshotChecker(object):
|
| |
+
|
| |
+ for ipprefix in ipprefixes:
|
| |
+ if ipprefix not in providers:
|
| |
+ - print "%sUnable to find provider for '%s' ipprefix%s" % (WHITE, ipprefix, ENDC)
|
| |
+ + print("%sUnable to find provider for '%s' ipprefix%s" % (WHITE, ipprefix, ENDC))
|
| |
+ continue
|
| |
+
|
| |
+ if ipprefix not in rpms:
|
| |
+ - print "%sUnable to find ipprefix2rpm mapping '%s' ipprefix%s" % (WHITE, ipprefix, ENDC)
|
| |
+ + print("%sUnable to find ipprefix2rpm mapping '%s' ipprefix%s" % (WHITE, ipprefix, ENDC))
|
| |
+ continue
|
| |
+
|
| |
+ upstream_commit = self._getCommitDate(providers[ipprefix], upstream[ipprefix])
|
| |
+ @@ -207,10 +207,10 @@ class SnapshotChecker(object):
|
| |
+ not_covered = self._checkPackageCoverage(product, distribution, rpms[ipprefix]["build"], rpms[ipprefix]["rpm"], ipprefix, ipprefixes[ipprefix])
|
| |
+ except ActFailedError:
|
| |
+ logging.error("golang-project-packages artefact for '%s' not retrieved" % ipprefix)
|
| |
+ - print "%s: %scoverage unknown%s" % (comparison, RED, ENDC)
|
| |
+ + print("%s: %scoverage unknown%s" % (comparison, RED, ENDC))
|
| |
+ continue
|
| |
+
|
| |
+ if not_covered != []:
|
| |
+ - print "%s: %snot covered: %s%s" % (comparison, RED, not_covered, ENDC)
|
| |
+ + print("%s: %snot covered: %s%s" % (comparison, RED, not_covered, ENDC))
|
| |
+ else:
|
| |
+ - print comparison
|
| |
+ + print(comparison)
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/reconstructor.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/reconstructor.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/reconstructor.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/models/snapshots/reconstructor.py 2019-03-01 18:38:04.092176040 +0100
|
| |
+ @@ -61,7 +61,7 @@ class SnapshotReconstructor(object):
|
| |
+
|
| |
+ def _findYoungestCommits(self, commits):
|
| |
+ # sort commits
|
| |
+ - commits = map(lambda l: {"c": l, "d": commits[l]["cdate"]}, commits)
|
| |
+ + commits = [{"c": l, "d": commits[l]["cdate"]} for l in commits]
|
| |
+ commits = sorted(commits, key = lambda commit: commit["d"])
|
| |
+
|
| |
+ return commits[-1]
|
| |
+ @@ -107,7 +107,7 @@ class SnapshotReconstructor(object):
|
| |
+ potential_commits[commit] = branch["commits"][commit]
|
| |
+
|
| |
+ if potential_commits:
|
| |
+ - sorted_commits = sorted(potential_commits.items(), key=operator.itemgetter(1))
|
| |
+ + sorted_commits = sorted(list(potential_commits.items()), key=operator.itemgetter(1))
|
| |
+ c, _ = sorted_commits[-1]
|
| |
+ return c
|
| |
+
|
| |
+ @@ -117,7 +117,7 @@ class SnapshotReconstructor(object):
|
| |
+ dependencies = list(set(dependencies))
|
| |
+ # normalize paths
|
| |
+ normalizer = ImportPathNormalizer()
|
| |
+ - dependencies = map(lambda l: normalizer.normalize(l), dependencies)
|
| |
+ + dependencies = [normalizer.normalize(l) for l in dependencies]
|
| |
+
|
| |
+ decomposer = ImportPathsDecomposerBuilder().buildLocalDecomposer()
|
| |
+ decomposer.decompose(dependencies)
|
| |
+ @@ -172,7 +172,7 @@ class SnapshotReconstructor(object):
|
| |
+ # update packages to scan
|
| |
+ next_projects[prefix] = {
|
| |
+ "ipprefix": prefix,
|
| |
+ - "paths": map(lambda l: str(l), prefix_classes[prefix]),
|
| |
+ + "paths": [str(l) for l in prefix_classes[prefix]],
|
| |
+ "provider": provider,
|
| |
+ "commit": closest_commit,
|
| |
+ "provider_prefix": provider_prefix
|
| |
+ @@ -200,7 +200,7 @@ class SnapshotReconstructor(object):
|
| |
+ # collect dependencies
|
| |
+ direct_dependencies = []
|
| |
+ for package in packages_artefact["data"]["dependencies"]:
|
| |
+ - direct_dependencies = direct_dependencies + map(lambda l: l["name"], package["dependencies"])
|
| |
+ + direct_dependencies = direct_dependencies + [l["name"] for l in package["dependencies"]]
|
| |
+
|
| |
+ if mains != []:
|
| |
+ paths = {}
|
| |
+ @@ -214,7 +214,7 @@ class SnapshotReconstructor(object):
|
| |
+ direct_dependencies = direct_dependencies + paths[main]
|
| |
+
|
| |
+ if tests:
|
| |
+ - for dependencies in map(lambda l: l["dependencies"], packages_artefact["data"]["tests"]):
|
| |
+ + for dependencies in [l["dependencies"] for l in packages_artefact["data"]["tests"]]:
|
| |
+ direct_dependencies = direct_dependencies + dependencies
|
| |
+
|
| |
+ # remove duplicates
|
| |
+ @@ -252,7 +252,7 @@ class SnapshotReconstructor(object):
|
| |
+ subgraph = GraphUtils.truncateGraph(graph, self.unscanned_projects[prefix]["paths"])
|
| |
+
|
| |
+ # get dependencies from the subgraph
|
| |
+ - package_nodes = filter(lambda l: l.startswith(self.unscanned_projects[prefix]["ipprefix"]), subgraph.nodes())
|
| |
+ + package_nodes = [l for l in subgraph.nodes() if l.startswith(self.unscanned_projects[prefix]["ipprefix"])]
|
| |
+ label_edges = dataset.getLabelEdges()
|
| |
+ for node in package_nodes:
|
| |
+ # package that does not import any other package has no edge -> the label_edges[node] does not exist then
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/distributionpackagebuildsextractor/extractor.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/distributionpackagebuildsextractor/extractor.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/distributionpackagebuildsextractor/extractor.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/distributionpackagebuildsextractor/extractor.py 2019-03-01 18:38:04.227175844 +0100
|
| |
+ @@ -104,7 +104,7 @@ class DistributionPackageBuildsExtractor
|
| |
+ artefact["package"] = self.package
|
| |
+ artefact["product"] = self.product
|
| |
+ artefact["distribution"] = self.distribution
|
| |
+ - artefact["builds"] = self.builds.keys()
|
| |
+ + artefact["builds"] = list(self.builds.keys())
|
| |
+
|
| |
+ start_ts = self.end_ts
|
| |
+ end_ts = self.start_ts
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/repositorydataextractor/extractor.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/repositorydataextractor/extractor.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/repositorydataextractor/extractor.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/repositorydataextractor/extractor.py 2019-03-01 18:38:04.450175519 +0100
|
| |
+ @@ -123,7 +123,7 @@ class RepositoryDataExtractor(MetaProces
|
| |
+
|
| |
+ # from all branches (up to master) filter out all commits that are already covered in master branch
|
| |
+ if "master" in branches:
|
| |
+ - for branch in filter(lambda l: l != "master", branches.keys()):
|
| |
+ + for branch in [l for l in list(branches.keys()) if l != "master"]:
|
| |
+ branches[branch] = list(set(branches[branch]) - set(branches["master"]))
|
| |
+
|
| |
+ # TODO(jchaloup): move validation to unit-tests
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/simplefilestorage/artefactdriver.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/simplefilestorage/artefactdriver.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/simplefilestorage/artefactdriver.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/simplefilestorage/artefactdriver.py 2019-03-01 17:58:57.877895472 +0100
|
| |
+ @@ -1,4 +1,5 @@
|
| |
+ import logging
|
| |
+ +from functools import reduce
|
| |
+ logger = logging.getLogger("simple_file_storage")
|
| |
+
|
| |
+ from infra.system.core.meta.metaartefactdriver import MetaArtefactDriver
|
| |
+ @@ -61,7 +62,7 @@ class ArtefactDriver(object):
|
| |
+ #except IOError as e:
|
| |
+
|
| |
+ # TODO(jchaloup): check if the file exists, if so, lock it?
|
| |
+ - with file(os.path.join(data_path, "data.json"), "w") as f:
|
| |
+ + with open(os.path.join(data_path, "data.json"), "w") as f:
|
| |
+ json.dump(data, f)
|
| |
+ # it is okay to ignore IOError exception
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/specdataextractor/SpecParser.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/specdataextractor/SpecParser.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/specdataextractor/SpecParser.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/gofedinfra/system/plugins/specdataextractor/SpecParser.py 2019-03-01 18:38:04.780175039 +0100
|
| |
+ @@ -85,7 +85,7 @@ class SpecParser:
|
| |
+ # Otherwise return None
|
| |
+ devel = ""
|
| |
+ devel_counter = 0
|
| |
+ - subpkg_keys = self.subpackages.keys()
|
| |
+ + subpkg_keys = list(self.subpackages.keys())
|
| |
+ for key in subpkg_keys:
|
| |
+ if key.endswith('devel'):
|
| |
+ devel = key
|
| |
+ @@ -145,7 +145,7 @@ class SpecParser:
|
| |
+ return self.changelogs[0]
|
| |
+
|
| |
+ def getSubpackages(self):
|
| |
+ - return self.subpackages.keys()
|
| |
+ + return list(self.subpackages.keys())
|
| |
+
|
| |
+ def getProvides(self):
|
| |
+ if self.subpackages == {}:
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/runscan.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/runscan.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/runscan.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/runscan.py 2019-03-01 18:38:05.283174307 +0100
|
| |
+ @@ -47,7 +47,7 @@ if __name__ == "__main__":
|
| |
+ rpms.append(rpm_obj)
|
| |
+
|
| |
+ if rpms == []:
|
| |
+ - print "List of rpms empty\n"
|
| |
+ + print("List of rpms empty\n")
|
| |
+ continue
|
| |
+
|
| |
+ data = {
|
| |
+ @@ -59,26 +59,26 @@ if __name__ == "__main__":
|
| |
+ }
|
| |
+ }
|
| |
+
|
| |
+ - print data
|
| |
+ + print(data)
|
| |
+
|
| |
+ try:
|
| |
+ - #print "Setting:"
|
| |
+ + #print("Setting:")
|
| |
+ if not act.setData(data):
|
| |
+ - print "setData Error: %s\n" % pkg
|
| |
+ + print("setData Error: %s\n" % pkg)
|
| |
+
|
| |
+ - #print "Executing:"
|
| |
+ + #print("Executing:")
|
| |
+ if not act.execute():
|
| |
+ - print "execute Error: %s\n" % pkg
|
| |
+ + print("execute Error: %s\n" % pkg)
|
| |
+ except:
|
| |
+ exc_info = sys.exc_info()
|
| |
+ traceback.print_exception(*exc_info)
|
| |
+ del exc_info
|
| |
+
|
| |
+ - print ""
|
| |
+ + print("")
|
| |
+
|
| |
+ - #print "Getting:"
|
| |
+ + #print("Getting:")
|
| |
+ #act.getData()
|
| |
+ - #print json.dumps(act.getData())
|
| |
+ + #print(json.dumps(act.getData()))
|
| |
+ #break
|
| |
+ # for each build get a list of devel subpackages (make asumption: pkg-devel.noarch.rpm)
|
| |
+ #rpms = session.getLatestRPMS("rawhide", package="etcd")
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testdriver.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testdriver.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testdriver.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testdriver.py 2019-03-01 17:35:11.443202597 +0100
|
| |
+ @@ -15,4 +15,4 @@ if __name__ == "__main__":
|
| |
+
|
| |
+ driver = EtcdFactory().build(artefacts.ARTEFACT_GOLANG_PROJECT_INFO_FEDORA)
|
| |
+ driver.store(data)
|
| |
+ - #print driver.retrieve(data)
|
| |
+ + #print(driver.retrieve(data))
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testff.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testff.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testff.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testff.py 2019-03-01 18:38:05.297174286 +0100
|
| |
+ @@ -13,4 +13,4 @@ data = {
|
| |
+ }
|
| |
+
|
| |
+
|
| |
+ -print json.dumps(f.call(data))
|
| |
+ +print(json.dumps(f.call(data)))
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testplugin.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testplugin.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testplugin.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testplugin.py 2019-03-01 18:38:05.306174273 +0100
|
| |
+ @@ -19,21 +19,21 @@ def runRepositoryDataExtractor():
|
| |
+
|
| |
+ extractor.execute()
|
| |
+ extractor.getData()
|
| |
+ - #print json.dumps(act.getData())
|
| |
+ + #print(json.dumps(act.getData()))
|
| |
+
|
| |
+ def runDistributionPackageBuildsExtractor():
|
| |
+ extractor = DistributionPackageBuildsExtractor()
|
| |
+ - print extractor.setData({
|
| |
+ + print((extractor.setData({
|
| |
+ #"package": "golang-bitbucket-ww-goautoneg",
|
| |
+ "package": "etcd",
|
| |
+ "product": "Fedora",
|
| |
+ "distribution": "f24",
|
| |
+ "start_timestamp": 1400131190,
|
| |
+ "end_timestamp": 1460131190
|
| |
+ - })
|
| |
+ + })))
|
| |
+
|
| |
+ - print extractor.execute()
|
| |
+ - print json.dumps(extractor.getData())
|
| |
+ + print(extractor.execute())
|
| |
+ + print(json.dumps(extractor.getData()))
|
| |
+
|
| |
+ if __name__ == "__main__":
|
| |
+
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/test.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/test.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/test.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/test.py 2019-03-01 18:38:05.332174235 +0100
|
| |
+ @@ -101,11 +101,11 @@ data = {
|
| |
+ "exported_api_2": exported_api2
|
| |
+ }
|
| |
+
|
| |
+ -#print json.dumps(exported_api2)
|
| |
+ +#print(json.dumps(exported_api2))
|
| |
+ #exit(1)
|
| |
+
|
| |
+ ff = FunctionFactory()
|
| |
+ data = ff.bake("goapidiff").call(data)
|
| |
+ ff.bake("etcdstoragewriter").call(data)
|
| |
+
|
| |
+ -print(json.dumps(data))
|
| |
+ +print(json.dumps(data))
|
| |
+ diff -up infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testsnapshot.py.bak infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testsnapshot.py
|
| |
+ --- infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testsnapshot.py.bak 2018-04-01 15:45:07.000000000 +0200
|
| |
+ +++ infra-8bf8ce89490dcd0a44bfa9b09a24fd454c20f6b9/testsnapshot.py 2019-03-01 18:38:05.361174193 +0100
|
| |
+ @@ -32,10 +32,10 @@ def test_reconstructor():
|
| |
+ snapshot = SnapshotReconstructor().reconstruct(repository, commit, ipprefix, mains = [], tests=True).snapshot()
|
| |
+
|
| |
+
|
| |
+ - print ""
|
| |
+ - print json.dumps(snapshot.Godeps())
|
| |
+ - print ""
|
| |
+ - print snapshot.GLOGFILE()
|
| |
+ + print("")
|
| |
+ + print(json.dumps(snapshot.Godeps()))
|
| |
+ + print("")
|
| |
+ + print(snapshot.GLOGFILE())
|
| |
+
|
| |
+ if __name__ == "__main__":
|
| |
+
|
| |
+ @@ -47,7 +47,7 @@ if __name__ == "__main__":
|
| |
+ s2 = DistributionSnapshot().load("/home/jchaloup/Projects/gofed/infra/snapshot2.json")
|
| |
+
|
| |
+ data = s2.compare(s1)
|
| |
+ - print data["new_rpms"]
|
| |
+ + print(data["new_rpms"])
|
| |
+
|
| |
+ exit(1)
|
| |
+ snapshot = DistributionSnapshot("rawhide", "1.5")
|
| |
+ @@ -60,11 +60,11 @@ if __name__ == "__main__":
|
| |
+ try:
|
| |
+ data = kojiclient.getLatestRPMS("rawhide", package)
|
| |
+ except KeyError as e:
|
| |
+ - print e
|
| |
+ + print(e)
|
| |
+ continue
|
| |
+ snapshot.setRpms(package, data["rpms"])
|
| |
+
|
| |
+ - print json.dumps(snapshot.json())
|
| |
+ + print(json.dumps(snapshot.json()))
|
| |
+
|
| |
+ exit(1)
|
| |
+
|
| |
+ @@ -88,7 +88,7 @@ if __name__ == "__main__":
|
| |
+
|
| |
+ snapshot = SnapshotReconstructor().reconstruct(repository, commit, ipprefix, mains = ["main.go", "etcdctl/main.go"], tests=True).snapshot()
|
| |
+
|
| |
+ - print ""
|
| |
+ - print json.dumps(snapshot.Godeps())
|
| |
+ - print ""
|
| |
+ - print snapshot.GLOGFILE()
|
| |
+ + print("")
|
| |
+ + print(json.dumps(snapshot.Godeps()))
|
| |
+ + print("")
|
| |
+ + print(snapshot.GLOGFILE())
|
| |
This is an attempt to convert to Py 3 to use with F30/31 which obsoleted Py 2.
I only tested gofed repo2spec in F29, which works fine.
Signed-off-by: Robert-André Mauchin zebob.m@gmail.com