Change-Id: I28f49a454df3be7c3275c80465c7a91e9172f241
Reviewed-on: http://photon-jenkins.eng.vmware.com:8082/4413
Tested-by: gerrit-photon <photon-checkins@vmware.com>
Reviewed-by: Xiaolin Li <xiaolinl@vmware.com>
... | ... |
@@ -528,7 +528,7 @@ check-docker-service: |
528 | 528 |
@docker ps >/dev/null 2>&1 || { echo "Docker service is not running. Aborting." >&2; exit 1; } |
529 | 529 |
|
530 | 530 |
check-docker-py: |
531 |
- @python -c "import docker; assert docker.__version__ == '$(PHOTON_DOCKER_PY_VER)'" >/dev/null 2>&1 || { echo "Error: Python package docker-py 2.3.0 not installed.\nPlease use: pip install docker==2.3.0" >&2; exit 1; } |
|
531 |
+ @python3 -c "import docker; assert docker.__version__ == '$(PHOTON_DOCKER_PY_VER)'" >/dev/null 2>&1 || { echo "Error: Python3 package docker-py3 2.3.0 not installed.\nPlease use: pip3 install docker==2.3.0" >&2; exit 1; } |
|
532 | 532 |
|
533 | 533 |
check-bison: |
534 | 534 |
@command -v bison >/dev/null 2>&1 || { echo "Package bison not installed. Aborting." >&2; exit 1; } |
... | ... |
@@ -1,47 +1,47 @@ |
1 |
+import os.path |
|
1 | 2 |
from Logger import Logger |
2 | 3 |
from CommandUtils import CommandUtils |
3 | 4 |
from constants import constants |
4 |
-import os.path |
|
5 | 5 |
|
6 | 6 |
class ChrootUtils(object): |
7 |
- |
|
8 |
- def __init__(self,logName=None,logPath=None): |
|
7 |
+ |
|
8 |
+ def __init__(self, logName=None, logPath=None): |
|
9 | 9 |
if logName is None: |
10 | 10 |
logName = "ChrootUtils" |
11 | 11 |
if logPath is None: |
12 | 12 |
logPath = constants.logPath |
13 |
- self.logName=logName |
|
14 |
- self.logPath=logPath |
|
15 |
- self.logger=Logger.getLogger(logName,logPath) |
|
16 |
- |
|
17 |
- def createChroot(self,chrootName): |
|
18 |
- chrootID=constants.buildRootPath+"/"+chrootName |
|
13 |
+ self.logName = logName |
|
14 |
+ self.logPath = logPath |
|
15 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
16 |
+ |
|
17 |
+ def createChroot(self, chrootName): |
|
18 |
+ chrootID = constants.buildRootPath + "/" + chrootName |
|
19 | 19 |
if os.path.isdir(chrootID): |
20 | 20 |
if not self.destroyChroot(chrootID): |
21 |
- self.logger.error("Given chroot "+chrootID+" is already exists. unable to destroy it ") |
|
22 |
- return False,None |
|
21 |
+ self.logger.error("Given chroot " + chrootID + |
|
22 |
+ " is already exists. unable to destroy it ") |
|
23 |
+ return False, None |
|
23 | 24 |
# need to add timeout for this step |
24 | 25 |
# http://stackoverflow.com/questions/1191374/subprocess-with-timeout |
25 |
- cmdUtils=CommandUtils() |
|
26 |
- returnVal=cmdUtils.runCommandInShell("mkdir -p "+chrootID) |
|
26 |
+ cmdUtils = CommandUtils() |
|
27 |
+ returnVal = cmdUtils.runCommandInShell("mkdir -p " + chrootID) |
|
27 | 28 |
if not returnVal: |
28 |
- self.logger.error("Unable to create chroot:"+ chrootID +".Unknown error.") |
|
29 |
- return False,None |
|
30 |
- return True,chrootID |
|
31 |
- |
|
32 |
- def destroyChroot(self,chrootID): |
|
29 |
+ self.logger.error("Unable to create chroot:" + chrootID + ".Unknown error.") |
|
30 |
+ return False, None |
|
31 |
+ return True, chrootID |
|
32 |
+ |
|
33 |
+ def destroyChroot(self, chrootID): |
|
33 | 34 |
# need to add timeout for this step |
34 | 35 |
# http://stackoverflow.com/questions/1191374/subprocess-with-timeout |
35 |
- cmdUtils=CommandUtils() |
|
36 |
- returnVal=cmdUtils.runCommandInShell("./clean-up-chroot.py "+chrootID) |
|
36 |
+ cmdUtils = CommandUtils() |
|
37 |
+ returnVal = cmdUtils.runCommandInShell("./clean-up-chroot.py " + chrootID) |
|
37 | 38 |
if not returnVal: |
38 |
- self.logger.error("Unable to destroy chroot:"+ chrootID +".Unknown error.") |
|
39 |
+ self.logger.error("Unable to destroy chroot:" + chrootID + ".Unknown error.") |
|
39 | 40 |
return False |
40 |
- |
|
41 |
- returnVal=cmdUtils.runCommandInShell("rm -rf "+chrootID) |
|
41 |
+ |
|
42 |
+ returnVal = cmdUtils.runCommandInShell("rm -rf " + chrootID) |
|
42 | 43 |
if not returnVal: |
43 |
- self.logger.error("Unable to destroy chroot:"+ chrootID +".Unknown error.") |
|
44 |
+ self.logger.error("Unable to destroy chroot:" + chrootID + ".Unknown error.") |
|
44 | 45 |
return False |
45 |
- self.logger.info("Successfully destroyed chroot:"+chrootID) |
|
46 |
+ self.logger.info("Successfully destroyed chroot:" + chrootID) |
|
46 | 47 |
return True |
47 |
- |
... | ... |
@@ -5,9 +5,11 @@ class CommandUtils(object): |
5 | 5 |
def __init__(self): |
6 | 6 |
self.findBinary = "find" |
7 | 7 |
|
8 |
- def findFile (self, filename, sourcePath): |
|
9 |
- process = subprocess.Popen([self.findBinary, "-L", sourcePath, "-name", filename, "-not", "-type", "d"], stdout=subprocess.PIPE) |
|
10 |
- # We don't check the return val here because find could return 1 but still be able to find |
|
8 |
+ def findFile(self, filename, sourcePath): |
|
9 |
+ process = subprocess.Popen([self.findBinary, "-L", sourcePath, "-name", filename, |
|
10 |
+ "-not", "-type", "d"], stdout=subprocess.PIPE) |
|
11 |
+ # We don't check the return val here because find could return 1 but still be |
|
12 |
+ # able to find |
|
11 | 13 |
# the result. We shouldn't blindly return None without even checking the result. |
12 | 14 |
# The reason we want to suppress this is because for built RPMs, we first copy it to |
13 | 15 |
# the location with a random name and move it to the real name. find will complain our |
... | ... |
@@ -17,30 +19,30 @@ class CommandUtils(object): |
17 | 17 |
|
18 | 18 |
#if returnVal != 0: |
19 | 19 |
# return None |
20 |
- result=process.communicate()[0] |
|
20 |
+ result = process.communicate()[0] |
|
21 | 21 |
if result is None: |
22 | 22 |
return None |
23 |
- return result.split() |
|
23 |
+ return result.decode().split() |
|
24 | 24 |
|
25 |
- def runCommandInShell(self,cmd,logfilePath=None,chrootCmd=None): |
|
25 |
+ @staticmethod |
|
26 |
+ def runCommandInShell(cmd, logfilePath=None, chrootCmd=None): |
|
26 | 27 |
if chrootCmd is not None: |
27 |
- cmd = chrootCmd+" "+cmd |
|
28 |
+ cmd = chrootCmd + " " + cmd |
|
28 | 29 |
if logfilePath is None: |
29 |
- logfilePath=os.devnull |
|
30 |
- logfile=open(logfilePath,"w") |
|
31 |
- process = subprocess.Popen("%s" %cmd,shell=True,stdout=logfile,stderr=logfile) |
|
30 |
+ logfilePath = os.devnull |
|
31 |
+ logfile = open(logfilePath, "w") |
|
32 |
+ process = subprocess.Popen("%s" %cmd, shell=True, stdout=logfile, stderr=logfile) |
|
32 | 33 |
retval = process.wait() |
33 | 34 |
logfile.close() |
34 |
- if retval==0: |
|
35 |
+ if retval == 0: |
|
35 | 36 |
return True |
36 | 37 |
return False |
37 |
- |
|
38 |
- def runCommandInShell2(self,cmd,chrootCmd=None): |
|
38 |
+ @staticmethod |
|
39 |
+ def runCommandInShell2(cmd, chrootCmd=None): |
|
39 | 40 |
if chrootCmd is not None: |
40 |
- cmd = chrootCmd+" "+cmd |
|
41 |
- process = subprocess.Popen("%s" %cmd,shell=True,stdout=subprocess.PIPE) |
|
41 |
+ cmd = chrootCmd + " " + cmd |
|
42 |
+ process = subprocess.Popen("%s" %cmd, shell=True, stdout=subprocess.PIPE) |
|
42 | 43 |
retval = process.wait() |
43 | 44 |
if retval != 0: |
44 | 45 |
return None |
45 | 46 |
return process.communicate()[0] |
46 |
- |
... | ... |
@@ -1,22 +1,22 @@ |
1 |
-import logging |
|
2 | 1 |
import os |
2 |
+import logging |
|
3 | 3 |
|
4 | 4 |
class Logger(object): |
5 | 5 |
@staticmethod |
6 |
- def getLogger (mymodule, logpath=None, resetFile=False): |
|
7 |
- logfile=mymodule+".log" |
|
6 |
+ def getLogger(mymodule, logpath=None, resetFile=False): |
|
7 |
+ logfile = mymodule + ".log" |
|
8 | 8 |
if logpath is not None: |
9 | 9 |
if not os.path.isdir(logpath): |
10 | 10 |
os.makedirs(logpath) |
11 |
- logfile=logpath+"/"+logfile |
|
11 |
+ logfile = logpath + "/" + logfile |
|
12 | 12 |
if resetFile: |
13 | 13 |
open(logfile, 'w').close() |
14 |
- logger=logging.getLogger(mymodule) |
|
14 |
+ logger = logging.getLogger(mymodule) |
|
15 | 15 |
if len(logger.handlers) == 0: |
16 | 16 |
#creating file handler |
17 |
- fhandler=logging.FileHandler(logfile) |
|
17 |
+ fhandler = logging.FileHandler(logfile) |
|
18 | 18 |
# create console handler |
19 |
- ch=logging.StreamHandler() |
|
19 |
+ ch = logging.StreamHandler() |
|
20 | 20 |
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') |
21 | 21 |
# add formatter to handler |
22 | 22 |
fhandler.setFormatter(formatter) |
... | ... |
@@ -25,15 +25,15 @@ class Logger(object): |
25 | 25 |
logger.addHandler(ch) |
26 | 26 |
logger.addHandler(fhandler) |
27 | 27 |
logger.setLevel(logging.DEBUG) |
28 |
- logger.info("--------------------------------------------------------------------------") |
|
28 |
+ logger.info("-" * 75) |
|
29 | 29 |
logger.info("Starting Log") |
30 |
- logger.info("--------------------------------------------------------------------------") |
|
30 |
+ logger.info("-" * 75) |
|
31 | 31 |
return logger |
32 | 32 |
|
33 | 33 |
if __name__ == "__main__": |
34 | 34 |
#Logger.getLogger("my module") |
35 |
- t1 = Logger.getLogger("my module") |
|
35 |
+ t1 = Logger.getLogger("my module") |
|
36 | 36 |
t1.info("test1") |
37 |
- t2 = Logger.getLogger("my module") |
|
37 |
+ t2 = Logger.getLogger("my module") |
|
38 | 38 |
t2.info("test2") |
39 | 39 |
t1.info("test3") |
... | ... |
@@ -1,36 +1,36 @@ |
1 | 1 |
import os.path |
2 | 2 |
|
3 | 3 |
class MiscUtils(object): |
4 |
- |
|
4 |
+ |
|
5 | 5 |
@staticmethod |
6 |
- def isOutdated(listInputFiles,listOutputFiles): |
|
7 |
- thresholdTimeStamp=None |
|
6 |
+ def isOutdated(listInputFiles, listOutputFiles): |
|
7 |
+ thresholdTimeStamp = None |
|
8 | 8 |
if len(listInputFiles) == 0: |
9 | 9 |
return False |
10 | 10 |
if len(listOutputFiles) == 0: |
11 | 11 |
return True |
12 | 12 |
for f in listOutputFiles: |
13 |
- t=os.path.getmtime(f) |
|
13 |
+ t = os.path.getmtime(f) |
|
14 | 14 |
if thresholdTimeStamp is None: |
15 | 15 |
thresholdTimeStamp = t |
16 | 16 |
if t < thresholdTimeStamp: |
17 |
- thresholdTimeStamp = t |
|
17 |
+ thresholdTimeStamp = t |
|
18 | 18 |
for f in listInputFiles: |
19 |
- t=os.path.getmtime(f) |
|
19 |
+ t = os.path.getmtime(f) |
|
20 | 20 |
if t > thresholdTimeStamp: |
21 | 21 |
return True |
22 | 22 |
return False |
23 |
- |
|
23 |
+ |
|
24 | 24 |
@staticmethod |
25 |
- def getListSpecFiles(listSpecFiles,path): |
|
25 |
+ def getListSpecFiles(listSpecFiles, path): |
|
26 | 26 |
for dirEntry in os.listdir(path): |
27 | 27 |
dirEntryPath = os.path.join(path, dirEntry) |
28 | 28 |
if os.path.isfile(dirEntryPath) and dirEntryPath.endswith(".spec"): |
29 | 29 |
listSpecFiles.append(dirEntryPath) |
30 | 30 |
elif os.path.isdir(dirEntryPath): |
31 |
- MiscUtils.getListSpecFiles(listSpecFiles,dirEntryPath) |
|
32 |
- |
|
33 |
-if __name__=="__main__": |
|
34 |
- listInputFiles=["SpecParser.py","Logger.py"] |
|
35 |
- listOutputFiles=["builder.py"] |
|
36 |
- print MiscUtils.isOutdated(listInputFiles, listOutputFiles) |
|
37 | 31 |
\ No newline at end of file |
32 |
+ MiscUtils.getListSpecFiles(listSpecFiles, dirEntryPath) |
|
33 |
+ |
|
34 |
+if __name__ == "__main__": |
|
35 |
+ listInputFiles = ["SpecParser.py", "Logger.py"] |
|
36 |
+ listOutputFiles = ["builder.py"] |
|
37 |
+ print(MiscUtils.isOutdated(listInputFiles, listOutputFiles)) |
... | ... |
@@ -1,204 +1,205 @@ |
1 |
+import copy |
|
1 | 2 |
from Logger import Logger |
2 | 3 |
from constants import constants |
3 |
-from sets import Set |
|
4 |
-import copy |
|
5 | 4 |
from SpecData import SPECS |
6 | 5 |
|
7 |
- |
|
8 |
- |
|
9 | 6 |
def removeDuplicateEntriesInList(myList): |
10 |
- myListCopy=[] |
|
7 |
+ myListCopy = [] |
|
11 | 8 |
for p in myList: |
12 | 9 |
if p not in myListCopy: |
13 | 10 |
myListCopy.append(p) |
14 | 11 |
return myListCopy |
15 |
- |
|
12 |
+ |
|
16 | 13 |
class PackageBuildDataGenerator(object): |
17 |
- |
|
18 |
- cycleCount=0 |
|
19 |
- |
|
20 |
- def __init__(self,logName=None,logPath=None): |
|
14 |
+ |
|
15 |
+ cycleCount = 0 |
|
16 |
+ |
|
17 |
+ def __init__(self, logName=None, logPath=None): |
|
21 | 18 |
if logName is None: |
22 | 19 |
logName = "PackageBuildDataGenerator" |
23 | 20 |
if logPath is None: |
24 | 21 |
logPath = constants.logPath |
25 |
- self.logName=logName |
|
26 |
- self.logPath=logPath |
|
27 |
- self.logger=Logger.getLogger(logName,logPath) |
|
28 |
- self.__mapCyclesToPackageList={} |
|
29 |
- self.__mapPackageToCycle={} |
|
30 |
- self.__buildDependencyGraph={} |
|
31 |
- self.__runTimeDependencyGraph={} |
|
32 |
- self.__sortedPackageList=[] |
|
33 |
- self.__sortedBuildDependencyGraph={} |
|
34 |
- |
|
35 |
- def getPackageBuildData(self,listPackages): |
|
22 |
+ self.logName = logName |
|
23 |
+ self.logPath = logPath |
|
24 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
25 |
+ self.__mapCyclesToPackageList = {} |
|
26 |
+ self.__mapPackageToCycle = {} |
|
27 |
+ self.__buildDependencyGraph = {} |
|
28 |
+ self.__runTimeDependencyGraph = {} |
|
29 |
+ self.__sortedPackageList = [] |
|
30 |
+ self.__sortedBuildDependencyGraph = {} |
|
31 |
+ |
|
32 |
+ def getPackageBuildData(self, listPackages): |
|
36 | 33 |
self.__readDependencyGraphAndCyclesForGivenPackages(listPackages) |
37 | 34 |
self.__getSortedBuildOrderListForGivenPackages(listPackages) |
38 | 35 |
return self.__mapCyclesToPackageList, self.__mapPackageToCycle, self.__sortedPackageList |
39 | 36 |
|
40 |
- #todo |
|
41 |
- def findCompleteListOfPackagesRequiredToBuildGivenPackages(self,listPackages): |
|
42 |
- return self.__buildDependencyGraph.keys() |
|
43 |
- |
|
44 |
- def createSortListForPkg(self,pkg): |
|
45 |
- runTimeDepPkgList=self.__runTimeDependencyGraph[pkg] |
|
37 |
+ #todo |
|
38 |
+ def findCompleteListOfPackagesRequiredToBuildGivenPackages(self, listPackages): |
|
39 |
+ return list(self.__buildDependencyGraph.keys()) |
|
40 |
+ |
|
41 |
+ def createSortListForPkg(self, pkg): |
|
42 |
+ runTimeDepPkgList = self.__runTimeDependencyGraph[pkg] |
|
46 | 43 |
runTimeDepPkgList.append(pkg) |
47 |
- sortListForPkg=[] |
|
48 |
- |
|
44 |
+ sortListForPkg = [] |
|
45 |
+ |
|
49 | 46 |
for p in runTimeDepPkgList: |
50 |
- basePkg=SPECS.getData().getSpecName(p) |
|
47 |
+ basePkg = SPECS.getData().getSpecName(p) |
|
51 | 48 |
for bPkg in self.__sortedBuildDependencyGraph[basePkg]: |
52 | 49 |
if bPkg not in sortListForPkg: |
53 | 50 |
sortListForPkg.append(bPkg) |
54 |
- |
|
51 |
+ |
|
55 | 52 |
return sortListForPkg |
56 |
- |
|
57 |
- def getCircularDependentPackages(self,pkg): |
|
58 |
- circularDependentPackages=[] |
|
59 |
- if self.__mapPackageToCycle.has_key(pkg): |
|
60 |
- circularDependentPackages.extend(self.__mapCyclesToPackageList[self.__mapPackageToCycle[pkg]]) |
|
53 |
+ |
|
54 |
+ def getCircularDependentPackages(self, pkg): |
|
55 |
+ circularDependentPackages = [] |
|
56 |
+ if pkg in self.__mapPackageToCycle: |
|
57 |
+ circularDependentPackages.extend( |
|
58 |
+ self.__mapCyclesToPackageList[self.__mapPackageToCycle[pkg]]) |
|
61 | 59 |
circularDependentPackages.remove(pkg) |
62 | 60 |
return circularDependentPackages |
63 |
- |
|
64 |
- def __getSortedBuildOrderListForGivenPackages(self,listPackages): |
|
65 |
- |
|
66 |
- alreadyProcessedPackages=[] |
|
67 |
- sortedList=[] |
|
68 |
- completeListPackagesToBuild=self.findCompleteListOfPackagesRequiredToBuildGivenPackages(listPackages) |
|
61 |
+ |
|
62 |
+ def __getSortedBuildOrderListForGivenPackages(self, listPackages): |
|
63 |
+ |
|
64 |
+ alreadyProcessedPackages = [] |
|
65 |
+ sortedList = [] |
|
66 |
+ completeListPackagesToBuild = self.findCompleteListOfPackagesRequiredToBuildGivenPackages( |
|
67 |
+ listPackages) |
|
69 | 68 |
packageIndexInSortedList = 0 |
70 |
- prevSortListLen=0 |
|
71 |
- |
|
69 |
+ prevSortListLen = 0 |
|
70 |
+ |
|
72 | 71 |
while completeListPackagesToBuild: |
73 |
- |
|
72 |
+ |
|
74 | 73 |
# find next package to process |
75 |
- pkg=None |
|
76 |
- index=-1 |
|
77 |
- lenList=len(sortedList) |
|
74 |
+ pkg = None |
|
75 |
+ index = -1 |
|
76 |
+ lenList = len(sortedList) |
|
78 | 77 |
for i in range(lenList): |
79 | 78 |
if sortedList[i] in alreadyProcessedPackages: |
80 | 79 |
continue |
81 | 80 |
pkg = sortedList[i] |
82 | 81 |
packageIndexInSortedList = i |
83 | 82 |
break |
84 |
- |
|
83 |
+ |
|
85 | 84 |
if pkg is None: |
86 | 85 |
pkg = completeListPackagesToBuild.pop() |
87 | 86 |
packageIndexInSortedList = len(sortedList) |
88 | 87 |
|
89 | 88 |
#creating sort list for package |
90 |
- sortListForPkg=self.createSortListForPkg(pkg) |
|
91 |
- |
|
89 |
+ sortListForPkg = self.createSortListForPkg(pkg) |
|
90 |
+ |
|
92 | 91 |
#remove any cyclic packages in sortListForPkg if they already exists in sortedList |
93 |
- circularDependentPackages=self.getCircularDependentPackages(pkg) |
|
92 |
+ circularDependentPackages = self.getCircularDependentPackages(pkg) |
|
94 | 93 |
for p in circularDependentPackages: |
95 | 94 |
if p in sortedList and p in sortListForPkg: |
96 | 95 |
sortListForPkg.remove(p) |
97 |
- |
|
96 |
+ |
|
98 | 97 |
# insert sort list of package in global sorted list |
99 |
- index=packageIndexInSortedList |
|
100 |
- subList=[] |
|
98 |
+ index = packageIndexInSortedList |
|
99 |
+ subList = [] |
|
101 | 100 |
if packageIndexInSortedList > 0: |
102 |
- subList=sortedList[:packageIndexInSortedList] |
|
101 |
+ subList = sortedList[:packageIndexInSortedList] |
|
103 | 102 |
for p in sortListForPkg: |
104 | 103 |
if p not in subList: |
105 | 104 |
sortedList.insert(index, p) |
106 | 105 |
index = index + 1 |
107 |
- |
|
106 |
+ |
|
108 | 107 |
alreadyProcessedPackages.append(p) |
109 |
- |
|
108 |
+ |
|
110 | 109 |
# Remove duplicate entries in sorted list in intervals |
111 |
- if (len(sortedList)-prevSortListLen) > 100 : |
|
110 |
+ if (len(sortedList) - prevSortListLen) > 100: |
|
112 | 111 |
self.logger.info("Removing duplicates in sortedList") |
113 | 112 |
sortedList = removeDuplicateEntriesInList(sortedList) |
114 | 113 |
else: |
115 |
- prevSortListLen=len(sortedList) |
|
116 |
- |
|
114 |
+ prevSortListLen = len(sortedList) |
|
115 |
+ |
|
117 | 116 |
self.logger.info("Removing duplicates in sorted list") |
118 | 117 |
sortedList = removeDuplicateEntriesInList(sortedList) |
119 |
- |
|
118 |
+ |
|
120 | 119 |
self.logger.info("Sorted list:") |
121 | 120 |
self.logger.info(sortedList) |
122 |
- self.__sortedPackageList=sortedList |
|
123 |
- |
|
124 |
- def __constructBuildAndRunTimeDependencyGraph(self,package): |
|
125 |
- basePackage=SPECS.getData().getSpecName(package) |
|
126 |
- |
|
127 |
- addBuildTimeGraph=True |
|
128 |
- addRunTimeGraph=True |
|
129 |
- if self.__buildDependencyGraph.has_key(basePackage): |
|
121 |
+ self.__sortedPackageList = sortedList |
|
122 |
+ |
|
123 |
+ def __constructBuildAndRunTimeDependencyGraph(self, package): |
|
124 |
+ basePackage = SPECS.getData().getSpecName(package) |
|
125 |
+ |
|
126 |
+ addBuildTimeGraph = True |
|
127 |
+ addRunTimeGraph = True |
|
128 |
+ if basePackage in self.__buildDependencyGraph: |
|
130 | 129 |
addBuildTimeGraph = False |
131 |
- if self.__runTimeDependencyGraph.has_key(basePackage): |
|
132 |
- addRunTimeGraph=False |
|
133 |
- |
|
134 |
- nextPackagesToConstructGraph=[] |
|
130 |
+ if basePackage in self.__runTimeDependencyGraph: |
|
131 |
+ addRunTimeGraph = False |
|
132 |
+ |
|
133 |
+ nextPackagesToConstructGraph = [] |
|
135 | 134 |
if addBuildTimeGraph: |
136 |
- listDependentRpmPackages=SPECS.getData().getBuildRequiresForPackage(basePackage) |
|
137 |
- listDependentPackages=[] |
|
135 |
+ listDependentRpmPackages = SPECS.getData().getBuildRequiresForPackage(basePackage) |
|
136 |
+ listDependentPackages = [] |
|
138 | 137 |
for rpmPkg in listDependentRpmPackages: |
139 |
- basePkg=SPECS.getData().getSpecName(rpmPkg) |
|
138 |
+ basePkg = SPECS.getData().getSpecName(rpmPkg) |
|
140 | 139 |
if basePkg not in listDependentPackages: |
141 | 140 |
listDependentPackages.append(basePkg) |
142 |
- self.__buildDependencyGraph[basePackage]=listDependentPackages |
|
141 |
+ self.__buildDependencyGraph[basePackage] = listDependentPackages |
|
143 | 142 |
nextPackagesToConstructGraph.extend(listDependentPackages) |
144 |
- |
|
143 |
+ |
|
145 | 144 |
if addRunTimeGraph: |
146 |
- listRpmPackages=SPECS.getData().getPackages(basePackage) |
|
145 |
+ listRpmPackages = SPECS.getData().getPackages(basePackage) |
|
147 | 146 |
for rpmPkg in listRpmPackages: |
148 |
- listDependentRpmPackages=SPECS.getData().getRequiresAllForPackage(rpmPkg) |
|
149 |
- self.__runTimeDependencyGraph[rpmPkg]=listDependentRpmPackages[:] |
|
147 |
+ listDependentRpmPackages = SPECS.getData().getRequiresAllForPackage(rpmPkg) |
|
148 |
+ self.__runTimeDependencyGraph[rpmPkg] = listDependentRpmPackages[:] |
|
150 | 149 |
nextPackagesToConstructGraph.extend(listDependentRpmPackages) |
151 | 150 |
|
152 | 151 |
for pkg in nextPackagesToConstructGraph: |
153 | 152 |
self.__constructBuildAndRunTimeDependencyGraph(pkg) |
154 |
- |
|
155 |
- def __readDependencyGraphAndCyclesForGivenPackages(self,listPackages): |
|
153 |
+ |
|
154 |
+ def __readDependencyGraphAndCyclesForGivenPackages(self, listPackages): |
|
156 | 155 |
self.logger.info("Reading dependency graph to check for cycles") |
157 | 156 |
for pkg in listPackages: |
158 | 157 |
self.__constructBuildAndRunTimeDependencyGraph(pkg) |
159 |
- packagesToBUild=self.__buildDependencyGraph.keys() |
|
160 |
- for pkg in packagesToBUild: |
|
161 |
- sortedPackageList,circularDependentPackages = self.topologicalSortPackages(self.__buildDependencyGraph,pkg) |
|
162 |
- if len(circularDependentPackages) > 0 : |
|
158 |
+ |
|
159 |
+ for pkg in self.__buildDependencyGraph.keys(): |
|
160 |
+ sortedPackageList, circularDependentPackages = self.topologicalSortPackages( |
|
161 |
+ self.__buildDependencyGraph, pkg) |
|
162 |
+ if len(circularDependentPackages) > 0: |
|
163 | 163 |
self.logger.error("Found circular dependency") |
164 | 164 |
self.logger.error(circularDependentPackages) |
165 | 165 |
raise Exception("Build Time Circular Dependency") |
166 |
- self.__sortedBuildDependencyGraph[pkg]=sortedPackageList |
|
167 |
- sortedPackageList,circularDependentPackages = self.topologicalSortPackages(self.__runTimeDependencyGraph) |
|
168 |
- if len(circularDependentPackages) > 0 : |
|
166 |
+ self.__sortedBuildDependencyGraph[pkg] = sortedPackageList |
|
167 |
+ sortedPackageList, circularDependentPackages = self.topologicalSortPackages( |
|
168 |
+ self.__runTimeDependencyGraph) |
|
169 |
+ if len(circularDependentPackages) > 0: |
|
169 | 170 |
self.__findCircularDependencies(circularDependentPackages) |
170 |
- |
|
171 |
+ |
|
171 | 172 |
def topologicalSortPackages(self, dependencyGraph, package=None): |
172 |
- noDepPackages = Set() |
|
173 |
+ noDepPackages = set() |
|
173 | 174 |
sortedPackageList = [] |
174 | 175 |
dependentOfPackage = dict() |
175 |
- |
|
176 |
- dependentPackages={} |
|
176 |
+ |
|
177 |
+ dependentPackages = {} |
|
177 | 178 |
if package is None: |
178 |
- dependentPackages=copy.deepcopy(dependencyGraph) |
|
179 |
+ dependentPackages = copy.deepcopy(dependencyGraph) |
|
179 | 180 |
else: |
180 |
- listDepPkgs= Set() |
|
181 |
+ listDepPkgs = set() |
|
181 | 182 |
listDepPkgs.add(package) |
182 | 183 |
while listDepPkgs: |
183 | 184 |
pkg = listDepPkgs.pop() |
184 |
- if dependentPackages.has_key(pkg): |
|
185 |
+ if pkg in dependentPackages: |
|
185 | 186 |
continue |
186 |
- dependentPackages[pkg]=dependencyGraph[pkg][:] |
|
187 |
+ dependentPackages[pkg] = dependencyGraph[pkg][:] |
|
187 | 188 |
for depPkg in dependencyGraph[pkg]: |
188 | 189 |
listDepPkgs.add(depPkg) |
189 |
- |
|
190 |
+ |
|
190 | 191 |
#Find packages with no dependencies and generate dependentof_package edge list |
191 | 192 |
for pkg in dependentPackages: |
192 | 193 |
if len(dependentPackages[pkg]) == 0: |
193 | 194 |
noDepPackages.add(pkg) |
194 | 195 |
else: |
195 | 196 |
for depPkg in dependentPackages[pkg]: |
196 |
- if not dependentOfPackage.has_key(depPkg): |
|
197 |
- dependentOfPackage[depPkg]=[pkg] |
|
197 |
+ if depPkg not in dependentOfPackage: |
|
198 |
+ dependentOfPackage[depPkg] = [pkg] |
|
198 | 199 |
else: |
199 | 200 |
if pkg not in dependentOfPackage[depPkg]: |
200 | 201 |
dependentOfPackage[depPkg].append(pkg) |
201 |
- |
|
202 |
+ |
|
202 | 203 |
while noDepPackages: |
203 | 204 |
pkg = noDepPackages.pop() |
204 | 205 |
sortedPackageList.append(pkg) |
... | ... |
@@ -206,29 +207,27 @@ class PackageBuildDataGenerator(object): |
206 | 206 |
for childPkg in list(dependentOfPackage.get(pkg)): |
207 | 207 |
dependentOfPackage.get(pkg).remove(childPkg) |
208 | 208 |
dependentPackages[childPkg].remove(pkg) |
209 |
- if len(dependentPackages[childPkg])==0: |
|
209 |
+ if len(dependentPackages[childPkg]) == 0: |
|
210 | 210 |
noDepPackages.add(childPkg) |
211 |
- |
|
211 |
+ |
|
212 | 212 |
# creating circular dependency graph for given dependency graph |
213 |
- circularDependencyGraph={} |
|
214 |
- listCircularPkg = dependentPackages.keys() |
|
215 |
- for pkg in listCircularPkg: |
|
213 |
+ circularDependencyGraph = {} |
|
214 |
+ for pkg in dependentPackages.keys(): |
|
216 | 215 |
if len(dependentPackages[pkg]) != 0: |
217 |
- circularDependencyGraph[pkg]=dependentPackages[pkg] |
|
218 |
- |
|
219 |
- #return (non-circular dependent package in sorted order and circular dependent package list in a dependencyGraph) |
|
220 |
- return sortedPackageList,circularDependencyGraph |
|
221 |
- |
|
222 |
- |
|
223 |
- def __constructDependencyMap(self,cyclicDependencyGraph): |
|
216 |
+ circularDependencyGraph[pkg] = dependentPackages[pkg] |
|
217 |
+ |
|
218 |
+ #return (non-circular dependent package in sorted order and circular dependent |
|
219 |
+ #package list in a dependencyGraph) |
|
220 |
+ return sortedPackageList, circularDependencyGraph |
|
221 |
+ |
|
222 |
+ def __constructDependencyMap(self, yclicDependencyGraph): |
|
224 | 223 |
self.logger.info("Constructing dependency map from circular dependency graph.....") |
225 |
- constructDependencyMap={} |
|
226 |
- listNodes=cyclicDependencyGraph.keys() |
|
227 |
- for node in listNodes: |
|
228 |
- tmpDepNodeList=[] |
|
224 |
+ constructDependencyMap = {} |
|
225 |
+ for node in cyclicDependencyGraph.keys(): |
|
226 |
+ tmpDepNodeList = [] |
|
229 | 227 |
tmpDepNodeList.append(node) |
230 |
- depNodeList=[] |
|
231 |
- while len(tmpDepNodeList)!=0: |
|
228 |
+ depNodeList = [] |
|
229 |
+ while len(tmpDepNodeList) != 0: |
|
232 | 230 |
currentNode = tmpDepNodeList.pop() |
233 | 231 |
addDepNodeList = cyclicDependencyGraph[currentNode] |
234 | 232 |
depNodeList.append(currentNode) |
... | ... |
@@ -239,50 +238,43 @@ class PackageBuildDataGenerator(object): |
239 | 239 |
if depNode not in tmpDepNodeList: |
240 | 240 |
tmpDepNodeList.append(depNode) |
241 | 241 |
depNodeList.remove(node) |
242 |
- constructDependencyMap[node]=depNodeList |
|
242 |
+ constructDependencyMap[node] = depNodeList |
|
243 | 243 |
self.logger.info("Dependency Map:") |
244 | 244 |
self.logger.info(constructDependencyMap) |
245 | 245 |
return constructDependencyMap |
246 |
- |
|
247 |
- |
|
248 |
- def __findCircularDependencies(self,cyclicDependencyGraph): |
|
246 |
+ |
|
247 |
+ def __findCircularDependencies(self, cyclicDependencyGraph): |
|
249 | 248 |
self.logger.info("Looking for circular dependencies") |
250 | 249 |
if len(cyclicDependencyGraph) == 0: |
251 | 250 |
return |
252 | 251 |
#step1: construct dependency map from dependency graph |
253 |
- constructDependencyMap=self.__constructDependencyMap(cyclicDependencyGraph) |
|
254 |
- |
|
252 |
+ constructDependencyMap = self.__constructDependencyMap(cyclicDependencyGraph) |
|
253 |
+ |
|
255 | 254 |
#step2: find cycles in dependency map |
256 | 255 |
self.logger.info("Finding and adding cycles using constructed dependency map......") |
257 |
- cycleCount=0 |
|
258 |
- listNodes=cyclicDependencyGraph.keys() |
|
259 |
- for node in listNodes: |
|
260 |
- listDepPkg=constructDependencyMap[node] |
|
261 |
- cycPkgs=[] |
|
262 |
- if not self.__mapPackageToCycle.has_key(node): |
|
256 |
+ cycleCount = 0 |
|
257 |
+ for node in cyclicDependencyGraph.keys(): |
|
258 |
+ listDepPkg = constructDependencyMap[node] |
|
259 |
+ cycPkgs = [] |
|
260 |
+ if node not in self.__mapPackageToCycle: |
|
263 | 261 |
for depPkg in listDepPkg: |
264 | 262 |
x = constructDependencyMap[depPkg] |
265 | 263 |
if node in x: |
266 | 264 |
cycPkgs.append(depPkg) |
267 |
- |
|
265 |
+ |
|
268 | 266 |
if len(cycPkgs) != 0: |
269 | 267 |
cycPkgs.append(node) |
270 |
- cycleName="cycle"+str(PackageBuildDataGenerator.cycleCount) |
|
271 |
- PackageBuildDataGenerator.cycleCount=PackageBuildDataGenerator.cycleCount+1 |
|
268 |
+ cycleName = "cycle" + str(PackageBuildDataGenerator.cycleCount) |
|
269 |
+ PackageBuildDataGenerator.cycleCount += 1 |
|
272 | 270 |
for x in cycPkgs: |
273 |
- self.__mapPackageToCycle[x]=cycleName |
|
274 |
- self.__mapCyclesToPackageList[cycleName]=cycPkgs |
|
271 |
+ self.__mapPackageToCycle[x] = cycleName |
|
272 |
+ self.__mapCyclesToPackageList[cycleName] = cycPkgs |
|
275 | 273 |
self.logger.info("New circular dependency found:") |
276 |
- self.logger.info(cycleName+" "+ ",".join(cycPkgs)) |
|
277 |
- cycleCount = cycleCount + 1 |
|
278 |
- |
|
279 |
- if cycleCount > 0 : |
|
280 |
- self.logger.info("Found "+str(cycleCount) + " cycles.") |
|
274 |
+ self.logger.info(cycleName + " " + ",".join(cycPkgs)) |
|
275 |
+ cycleCount += 1 |
|
276 |
+ |
|
277 |
+ if cycleCount > 0: |
|
278 |
+ self.logger.info("Found " + str(cycleCount) + " cycles.") |
|
281 | 279 |
self.logger.info("Successfully added all detected circular dependencies to list.") |
282 | 280 |
else: |
283 | 281 |
self.logger.info("No circular dependencies found.") |
284 |
- |
|
285 |
- |
|
286 |
- |
|
287 |
- |
|
288 |
- |
... | ... |
@@ -1,49 +1,51 @@ |
1 |
+import sys |
|
2 |
+import os.path |
|
1 | 3 |
from PackageUtils import PackageUtils |
2 | 4 |
from Logger import Logger |
3 | 5 |
from ChrootUtils import ChrootUtils |
4 | 6 |
from ToolChainUtils import ToolChainUtils |
5 | 7 |
from CommandUtils import CommandUtils |
6 |
-import os.path |
|
7 | 8 |
from constants import constants |
8 |
-import shutil |
|
9 | 9 |
from SpecData import SPECS |
10 | 10 |
import docker |
11 |
-import sys |
|
12 | 11 |
|
13 | 12 |
class PackageBuilderBase(object): |
14 |
- def __init__(self,mapPackageToCycles,listAvailableCyclicPackages,listBuildOptionPackages,pkgBuildOptionFile, pkgBuildType): |
|
13 |
+ def __init__(self, mapPackageToCycles, listAvailableCyclicPackages, |
|
14 |
+ listBuildOptionPackages, pkgBuildOptionFile, pkgBuildType): |
|
15 | 15 |
# will be initialized in buildPackageThreadAPI() |
16 |
- self.logName=None |
|
17 |
- self.logPath=None |
|
18 |
- self.logger=None |
|
19 |
- self.package=None |
|
16 |
+ self.logName = None |
|
17 |
+ self.logPath = None |
|
18 |
+ self.logger = None |
|
19 |
+ self.package = None |
|
20 | 20 |
self.mapPackageToCycles = mapPackageToCycles |
21 | 21 |
self.listAvailableCyclicPackages = listAvailableCyclicPackages |
22 |
- self.listNodepsPackages = ["glibc","gmp","zlib","file","binutils","mpfr","mpc","gcc","ncurses","util-linux","groff","perl","texinfo","rpm","openssl","go"] |
|
23 |
- self.listBuildOptionPackages=listBuildOptionPackages |
|
24 |
- self.pkgBuildOptionFile=pkgBuildOptionFile |
|
22 |
+ self.listNodepsPackages = ["glibc", "gmp", "zlib", "file", "binutils", "mpfr", |
|
23 |
+ "mpc", "gcc", "ncurses", "util-linux", "groff", "perl", |
|
24 |
+ "texinfo", "rpm", "openssl", "go"] |
|
25 |
+ self.listBuildOptionPackages = listBuildOptionPackages |
|
26 |
+ self.pkgBuildOptionFile = pkgBuildOptionFile |
|
25 | 27 |
self.pkgBuildType = pkgBuildType |
26 | 28 |
|
27 |
- def buildPackageThreadAPIPrepare(self,package,outputMap, threadName): |
|
28 |
- self.package=package |
|
29 |
- self.logName="build-"+package |
|
30 |
- self.logPath=constants.logPath+"/build-"+package |
|
29 |
+ def buildPackageThreadAPIPrepare(self, package, outputMap, threadName): |
|
30 |
+ self.package = package |
|
31 |
+ self.logName = "build-" + package |
|
32 |
+ self.logPath = constants.logPath + "/build-" + package |
|
31 | 33 |
if not os.path.isdir(self.logPath): |
32 | 34 |
cmdUtils = CommandUtils() |
33 |
- cmdUtils.runCommandInShell("mkdir -p "+self.logPath) |
|
34 |
- self.logger=Logger.getLogger(self.logName,self.logPath) |
|
35 |
+ cmdUtils.runCommandInShell("mkdir -p " + self.logPath) |
|
36 |
+ self.logger = Logger.getLogger(self.logName, self.logPath) |
|
35 | 37 |
|
36 |
- def findPackageNameFromRPMFile(self,rpmfile): |
|
37 |
- rpmfile=os.path.basename(rpmfile) |
|
38 |
- releaseindex=rpmfile.rfind("-") |
|
38 |
+ def findPackageNameFromRPMFile(self, rpmfile): |
|
39 |
+ rpmfile = os.path.basename(rpmfile) |
|
40 |
+ releaseindex = rpmfile.rfind("-") |
|
39 | 41 |
if releaseindex == -1: |
40 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
42 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
41 | 43 |
return None |
42 |
- versionindex=rpmfile[0:releaseindex].rfind("-") |
|
44 |
+ versionindex = rpmfile[0:releaseindex].rfind("-") |
|
43 | 45 |
if versionindex == -1: |
44 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
46 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
45 | 47 |
return None |
46 |
- packageName=rpmfile[0:versionindex] |
|
48 |
+ packageName = rpmfile[0:versionindex] |
|
47 | 49 |
return packageName |
48 | 50 |
|
49 | 51 |
def findInstalledPackages(self, instanceID): |
... | ... |
@@ -52,69 +54,76 @@ class PackageBuilderBase(object): |
52 | 52 |
listInstalledRPMs = pkgUtils.findInstalledRPMPackages(instanceID) |
53 | 53 |
elif self.pkgBuildType == "container": |
54 | 54 |
listInstalledRPMs = pkgUtils.findInstalledRPMPackagesInContainer(instanceID) |
55 |
- listInstalledPackages=[] |
|
55 |
+ listInstalledPackages = [] |
|
56 | 56 |
for installedRPM in listInstalledRPMs: |
57 |
- packageName=self.findPackageNameFromRPMFile(installedRPM) |
|
57 |
+ packageName = self.findPackageNameFromRPMFile(installedRPM) |
|
58 | 58 |
if packageName is not None: |
59 | 59 |
listInstalledPackages.append(packageName) |
60 | 60 |
return listInstalledPackages, listInstalledRPMs |
61 | 61 |
|
62 | 62 |
def checkIfPackageIsAlreadyBuilt(self): |
63 |
- basePkg=SPECS.getData().getSpecName(self.package) |
|
64 |
- listRPMPackages=SPECS.getData().getRPMPackages(basePkg) |
|
65 |
- packageIsAlreadyBuilt=True |
|
66 |
- pkgUtils = PackageUtils(self.logName,self.logPath) |
|
63 |
+ basePkg = SPECS.getData().getSpecName(self.package) |
|
64 |
+ listRPMPackages = SPECS.getData().getRPMPackages(basePkg) |
|
65 |
+ packageIsAlreadyBuilt = True |
|
66 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
67 | 67 |
for pkg in listRPMPackages: |
68 | 68 |
if pkgUtils.findRPMFileForGivenPackage(pkg) is None: |
69 |
- packageIsAlreadyBuilt=False |
|
69 |
+ packageIsAlreadyBuilt = False |
|
70 | 70 |
break |
71 | 71 |
return packageIsAlreadyBuilt |
72 | 72 |
|
73 |
- def findRunTimeRequiredRPMPackages(self,rpmPackage): |
|
74 |
- listRequiredPackages=SPECS.getData().getRequiresForPackage(rpmPackage) |
|
73 |
+ def findRunTimeRequiredRPMPackages(self, rpmPackage): |
|
74 |
+ listRequiredPackages = SPECS.getData().getRequiresForPackage(rpmPackage) |
|
75 | 75 |
return listRequiredPackages |
76 | 76 |
|
77 | 77 |
def findBuildTimeRequiredPackages(self): |
78 |
- listRequiredPackages=SPECS.getData().getBuildRequiresForPackage(self.package) |
|
78 |
+ listRequiredPackages = SPECS.getData().getBuildRequiresForPackage(self.package) |
|
79 | 79 |
return listRequiredPackages |
80 | 80 |
|
81 | 81 |
def findBuildTimeCheckRequiredPackages(self): |
82 |
- listRequiredPackages=SPECS.getData().getCheckBuildRequiresForPackage(self.package) |
|
82 |
+ listRequiredPackages = SPECS.getData().getCheckBuildRequiresForPackage(self.package) |
|
83 | 83 |
return listRequiredPackages |
84 | 84 |
|
85 |
- def installPackage(self, pkgUtils, package, instanceID, destLogPath, listInstalledPackages, listInstalledRPMs): |
|
86 |
- latestRPM = os.path.basename(pkgUtils.findRPMFileForGivenPackage(package)).replace(".rpm", "") |
|
85 |
+ def installPackage(self, pkgUtils, package, instanceID, destLogPath, |
|
86 |
+ listInstalledPackages, listInstalledRPMs): |
|
87 |
+ latestRPM = os.path.basename( |
|
88 |
+ pkgUtils.findRPMFileForGivenPackage(package)).replace(".rpm", "") |
|
87 | 89 |
if package in listInstalledPackages and latestRPM in listInstalledRPMs: |
88 | 90 |
return |
89 | 91 |
# mark it as installed - to avoid cyclic recursion |
90 | 92 |
listInstalledPackages.append(package) |
91 | 93 |
listInstalledRPMs.append(latestRPM) |
92 |
- self.installDependentRunTimePackages(pkgUtils,package,instanceID,destLogPath,listInstalledPackages, listInstalledRPMs) |
|
93 |
- noDeps=False |
|
94 |
- if self.mapPackageToCycles.has_key(package): |
|
94 |
+ self.installDependentRunTimePackages(pkgUtils, package, instanceID, destLogPath, |
|
95 |
+ listInstalledPackages, listInstalledRPMs) |
|
96 |
+ noDeps = False |
|
97 |
+ if package in self.mapPackageToCycles: |
|
95 | 98 |
noDeps = True |
96 | 99 |
if package in self.listNodepsPackages: |
97 |
- noDeps=True |
|
100 |
+ noDeps = True |
|
98 | 101 |
if package in constants.noDepsPackageList: |
99 |
- noDeps=True |
|
102 |
+ noDeps = True |
|
100 | 103 |
if self.pkgBuildType == "chroot": |
101 |
- pkgUtils.installRPM(package,instanceID,noDeps,destLogPath) |
|
104 |
+ pkgUtils.installRPM(package, instanceID, noDeps, destLogPath) |
|
102 | 105 |
elif self.pkgBuildType == "container": |
103 | 106 |
pkgUtils.prepRPMforInstallInContainer(package, instanceID, noDeps, destLogPath) |
104 | 107 |
|
105 |
- def installDependentRunTimePackages(self,pkgUtils,package,instanceID,destLogPath,listInstalledPackages, listInstalledRPMs): |
|
106 |
- listRunTimeDependentPackages=self.findRunTimeRequiredRPMPackages(package) |
|
108 |
+ def installDependentRunTimePackages(self, pkgUtils, package, instanceID, destLogPath, |
|
109 |
+ listInstalledPackages, listInstalledRPMs): |
|
110 |
+ listRunTimeDependentPackages = self.findRunTimeRequiredRPMPackages(package) |
|
107 | 111 |
if len(listRunTimeDependentPackages) != 0: |
108 | 112 |
for pkg in listRunTimeDependentPackages: |
109 |
- if self.mapPackageToCycles.has_key(pkg) and pkg not in self.listAvailableCyclicPackages: |
|
113 |
+ if pkg in self.mapPackageToCycles and pkg not in self.listAvailableCyclicPackages: |
|
110 | 114 |
continue |
111 |
- latestPkgRPM = os.path.basename(pkgUtils.findRPMFileForGivenPackage(pkg)).replace(".rpm", "") |
|
115 |
+ latestPkgRPM = os.path.basename( |
|
116 |
+ pkgUtils.findRPMFileForGivenPackage(pkg)).replace(".rpm", "") |
|
112 | 117 |
if pkg in listInstalledPackages and latestPkgRPM in listInstalledRPMs: |
113 | 118 |
continue |
114 |
- self.installPackage(pkgUtils, pkg, instanceID, destLogPath, listInstalledPackages, listInstalledRPMs) |
|
119 |
+ self.installPackage(pkgUtils, pkg, instanceID, destLogPath, |
|
120 |
+ listInstalledPackages, listInstalledRPMs) |
|
115 | 121 |
|
116 | 122 |
class PackageBuilderContainer(object): |
117 |
- def __init__(self, mapPackageToCycles, listAvailableCyclicPackages, listBuildOptionPackages, pkgBuildOptionFile, pkgBuildType): |
|
123 |
+ def __init__(self, mapPackageToCycles, listAvailableCyclicPackages, listBuildOptionPackages, |
|
124 |
+ pkgBuildOptionFile, pkgBuildType): |
|
118 | 125 |
self.buildContainerImage = "photon_build_container:latest" |
119 | 126 |
self.dockerClient = docker.from_env(version="auto") |
120 | 127 |
|
... | ... |
@@ -125,19 +134,20 @@ class PackageBuilderContainer(object): |
125 | 125 |
self.base.buildPackageThreadAPIPrepare(package, outputMap, threadName) |
126 | 126 |
try: |
127 | 127 |
self.buildPackage() |
128 |
- outputMap[threadName]=True |
|
128 |
+ outputMap[threadName] = True |
|
129 | 129 |
except Exception as e: |
130 | 130 |
# TODO: self.logger might be None |
131 | 131 |
self.base.logger.exception(e) |
132 |
- outputMap[threadName]=False |
|
132 |
+ outputMap[threadName] = False |
|
133 | 133 |
raise e |
134 | 134 |
|
135 |
- def prepareBuildContainer(self, containerTaskName, packageName, isToolChainPackage=False): |
|
135 |
+ def prepareBuildContainer(self, containerTaskName, packageName, |
|
136 |
+ isToolChainPackage=False): |
|
136 | 137 |
# Prepare an empty chroot environment to let docker use the BUILD folder. |
137 | 138 |
# This avoids docker using overlayFS which will cause make check failure. |
138 |
- chrootName="build-"+packageName |
|
139 |
+ chrootName = "build-" + packageName |
|
139 | 140 |
chrUtils = ChrootUtils(self.base.logName, self.base.logPath) |
140 |
- returnVal,chrootID = chrUtils.createChroot(chrootName) |
|
141 |
+ returnVal, chrootID = chrUtils.createChroot(chrootName) |
|
141 | 142 |
if not returnVal: |
142 | 143 |
raise Exception("Unable to prepare build root") |
143 | 144 |
cmdUtils = CommandUtils() |
... | ... |
@@ -146,15 +156,17 @@ class PackageBuilderContainer(object): |
146 | 146 |
|
147 | 147 |
containerID = None |
148 | 148 |
mountVols = { |
149 |
- constants.prevPublishRPMRepo: {'bind': '/publishrpms', 'mode': 'ro'}, |
|
150 |
- constants.prevPublishXRPMRepo: {'bind': '/publishxrpms', 'mode': 'ro'}, |
|
151 |
- constants.tmpDirPath: {'bind': '/tmp', 'mode': 'rw'}, |
|
152 |
- constants.rpmPath: {'bind': constants.topDirPath + "/RPMS", 'mode': 'rw'}, |
|
153 |
- constants.sourceRpmPath: {'bind': constants.topDirPath + "/SRPMS", 'mode': 'rw'}, |
|
154 |
- constants.logPath + "/" + self.base.logName: {'bind': constants.topDirPath + "/LOGS", 'mode': 'rw'}, |
|
155 |
- chrootID + constants.topDirPath + "/BUILD": {'bind': constants.topDirPath + "/BUILD", 'mode': 'rw'}, |
|
156 |
- constants.dockerUnixSocket: {'bind': constants.dockerUnixSocket, 'mode': 'rw'} |
|
157 |
- } |
|
149 |
+ constants.prevPublishRPMRepo: {'bind': '/publishrpms', 'mode': 'ro'}, |
|
150 |
+ constants.prevPublishXRPMRepo: {'bind': '/publishxrpms', 'mode': 'ro'}, |
|
151 |
+ constants.tmpDirPath: {'bind': '/tmp', 'mode': 'rw'}, |
|
152 |
+ constants.rpmPath: {'bind': constants.topDirPath + "/RPMS", 'mode': 'rw'}, |
|
153 |
+ constants.sourceRpmPath: {'bind': constants.topDirPath + "/SRPMS", 'mode': 'rw'}, |
|
154 |
+ constants.logPath + "/" + self.base.logName: {'bind': constants.topDirPath + "/LOGS", |
|
155 |
+ 'mode': 'rw'}, |
|
156 |
+ chrootID + constants.topDirPath + "/BUILD": {'bind': constants.topDirPath + "/BUILD", |
|
157 |
+ 'mode': 'rw'}, |
|
158 |
+ constants.dockerUnixSocket: {'bind': constants.dockerUnixSocket, 'mode': 'rw'} |
|
159 |
+ } |
|
158 | 160 |
|
159 | 161 |
containerName = containerTaskName |
160 | 162 |
containerName = containerName.replace("+", "p") |
... | ... |
@@ -163,10 +175,14 @@ class PackageBuilderContainer(object): |
163 | 163 |
if oldContainerID is not None: |
164 | 164 |
oldContainerID.remove(force=True) |
165 | 165 |
except docker.errors.NotFound: |
166 |
- sys.exc_clear() |
|
166 |
+ try: |
|
167 |
+ sys.exc_clear() |
|
168 |
+ except: |
|
169 |
+ pass |
|
167 | 170 |
|
168 | 171 |
try: |
169 |
- self.base.logger.info("BuildContainer-prepareBuildContainer: Starting build container: " + containerName) |
|
172 |
+ self.base.logger.info("BuildContainer-prepareBuildContainer: " + |
|
173 |
+ "Starting build container: " + containerName) |
|
170 | 174 |
#TODO: Is init=True equivalent of --sig-proxy? |
171 | 175 |
privilegedDocker = False |
172 | 176 |
cap_list = ['SYS_PTRACE'] |
... | ... |
@@ -174,20 +190,22 @@ class PackageBuilderContainer(object): |
174 | 174 |
privilegedDocker = True |
175 | 175 |
|
176 | 176 |
containerID = self.dockerClient.containers.run(self.buildContainerImage, |
177 |
- detach=True, |
|
178 |
- cap_add=cap_list, |
|
179 |
- privileged=privilegedDocker, |
|
180 |
- name=containerName, |
|
181 |
- network_mode="host", |
|
182 |
- volumes=mountVols, |
|
183 |
- command="/bin/bash -l -c /wait.sh") |
|
184 |
- |
|
185 |
- self.base.logger.debug("Started Photon build container for task " + containerTaskName |
|
186 |
- + " ID: " + containerID.short_id) |
|
177 |
+ detach=True, |
|
178 |
+ cap_add=cap_list, |
|
179 |
+ privileged=privilegedDocker, |
|
180 |
+ name=containerName, |
|
181 |
+ network_mode="host", |
|
182 |
+ volumes=mountVols, |
|
183 |
+ command="/bin/bash -l -c /wait.sh") |
|
184 |
+ |
|
185 |
+ self.base.logger.debug("Started Photon build container for task " + containerTaskName + |
|
186 |
+ " ID: " + containerID.short_id) |
|
187 | 187 |
if not containerID: |
188 |
- raise Exception("Unable to start Photon build container for task " + containerTaskName) |
|
188 |
+ raise Exception("Unable to start Photon build container for task " + |
|
189 |
+ containerTaskName) |
|
189 | 190 |
except Exception as e: |
190 |
- self.base.logger.debug("Unable to start Photon build container for task " + containerTaskName) |
|
191 |
+ self.base.logger.debug("Unable to start Photon build container for task " + |
|
192 |
+ containerTaskName) |
|
191 | 193 |
raise e |
192 | 194 |
return containerID, chrootID |
193 | 195 |
|
... | ... |
@@ -212,46 +230,55 @@ class PackageBuilderContainer(object): |
212 | 212 |
isToolChainPackage = True |
213 | 213 |
destLogPath = constants.logPath + "/build-" + self.base.package |
214 | 214 |
try: |
215 |
- containerID, chrootID = self.prepareBuildContainer(containerTaskName, self.base.package, isToolChainPackage) |
|
215 |
+ containerID, chrootID = self.prepareBuildContainer( |
|
216 |
+ containerTaskName, self.base.package, isToolChainPackage) |
|
216 | 217 |
|
217 | 218 |
tcUtils = ToolChainUtils(self.base.logName, self.base.logPath) |
218 | 219 |
if self.base.package in constants.perPackageToolChain: |
219 | 220 |
self.base.logger.debug(constants.perPackageToolChain[self.base.package]) |
220 |
- tcUtils.installCustomToolChainRPMSinContainer(containerID, |
|
221 |
- constants.perPackageToolChain[self.base.package], |
|
222 |
- self.base.package); |
|
221 |
+ tcUtils.installCustomToolChainRPMSinContainer( |
|
222 |
+ containerID, |
|
223 |
+ constants.perPackageToolChain[self.base.package], |
|
224 |
+ self.base.package) |
|
223 | 225 |
|
224 | 226 |
listInstalledPackages, listInstalledRPMs = self.base.findInstalledPackages(containerID) |
225 | 227 |
self.base.logger.info(listInstalledPackages) |
226 | 228 |
listDependentPackages = self.base.findBuildTimeRequiredPackages() |
227 | 229 |
if constants.rpmCheck and self.base.package in constants.testForceRPMS: |
228 | 230 |
listDependentPackages.extend(self.base.findBuildTimeCheckRequiredPackages()) |
229 |
- testPackages=set(constants.listMakeCheckRPMPkgtoInstall)-set(listInstalledPackages)-set([self.base.package]) |
|
231 |
+ testPackages = (set(constants.listMakeCheckRPMPkgtoInstall) - |
|
232 |
+ set(listInstalledPackages) - |
|
233 |
+ set([self.base.package])) |
|
230 | 234 |
listDependentPackages.extend(testPackages) |
231 |
- listDependentPackages=list(set(listDependentPackages)) |
|
235 |
+ listDependentPackages = list(set(listDependentPackages)) |
|
232 | 236 |
|
233 | 237 |
pkgUtils = PackageUtils(self.base.logName, self.base.logPath) |
234 | 238 |
if len(listDependentPackages) != 0: |
235 |
- self.base.logger.info("BuildContainer-buildPackage: Installing dependent packages..") |
|
239 |
+ self.base.logger.info("BuildContainer-buildPackage: " + |
|
240 |
+ "Installing dependent packages..") |
|
236 | 241 |
self.base.logger.info(listDependentPackages) |
237 | 242 |
for pkg in listDependentPackages: |
238 |
- self.base.installPackage(pkgUtils, pkg, containerID, destLogPath, listInstalledPackages, listInstalledRPMs) |
|
243 |
+ self.base.installPackage(pkgUtils, pkg, containerID, destLogPath, |
|
244 |
+ listInstalledPackages, listInstalledRPMs) |
|
239 | 245 |
pkgUtils.installRPMSInAOneShotInContainer(containerID, destLogPath) |
240 |
- self.base.logger.info("Finished installing the build time dependent packages......") |
|
246 |
+ self.base.logger.info("Finished installing the build time dependent packages....") |
|
241 | 247 |
|
242 |
- self.base.logger.info("BuildContainer-buildPackage: Start building the package: " + self.base.package) |
|
248 |
+ self.base.logger.info("BuildContainer-buildPackage: Start building the package: " + |
|
249 |
+ self.base.package) |
|
243 | 250 |
pkgUtils.adjustGCCSpecsInContainer(self.base.package, containerID, destLogPath) |
244 | 251 |
pkgUtils.buildRPMSForGivenPackageInContainer( |
245 |
- self.base.package, |
|
246 |
- containerID, |
|
247 |
- self.base.listBuildOptionPackages, |
|
248 |
- self.base.pkgBuildOptionFile, |
|
249 |
- destLogPath) |
|
250 |
- self.base.logger.info("BuildContainer-buildPackage: Successfully built the package: " + self.base.package) |
|
252 |
+ self.base.package, |
|
253 |
+ containerID, |
|
254 |
+ self.base.listBuildOptionPackages, |
|
255 |
+ self.base.pkgBuildOptionFile, |
|
256 |
+ destLogPath) |
|
257 |
+ self.base.logger.info("BuildContainer-buildPackage: Successfully built the package: " + |
|
258 |
+ self.base.package) |
|
251 | 259 |
except Exception as e: |
252 | 260 |
self.base.logger.error("Failed while building package:" + self.base.package) |
253 | 261 |
if containerID is not None: |
254 |
- self.base.logger.debug("Container " + containerID.short_id + " retained for debugging.") |
|
262 |
+ self.base.logger.debug("Container " + containerID.short_id + |
|
263 |
+ " retained for debugging.") |
|
255 | 264 |
logFileName = os.path.join(destLogPath, self.base.package + ".log") |
256 | 265 |
fileLog = os.popen('tail -n 20 ' + logFileName).read() |
257 | 266 |
self.base.logger.debug(fileLog) |
... | ... |
@@ -266,31 +293,32 @@ class PackageBuilderContainer(object): |
266 | 266 |
chrUtils.destroyChroot(chrootID) |
267 | 267 |
|
268 | 268 |
class PackageBuilderChroot(object): |
269 |
- def __init__(self, mapPackageToCycles, listAvailableCyclicPackages, listBuildOptionPackages, pkgBuildOptionFile, pkgBuildType): |
|
269 |
+ def __init__(self, mapPackageToCycles, listAvailableCyclicPackages, listBuildOptionPackages, |
|
270 |
+ pkgBuildOptionFile, pkgBuildType): |
|
270 | 271 |
self.base = PackageBuilderBase(mapPackageToCycles, listAvailableCyclicPackages, |
271 | 272 |
listBuildOptionPackages, pkgBuildOptionFile, pkgBuildType) |
272 | 273 |
|
273 | 274 |
def buildPackageThreadAPI(self, package, outputMap, threadName): |
274 |
- self.base.buildPackageThreadAPIPrepare(package,outputMap, threadName) |
|
275 |
+ self.base.buildPackageThreadAPIPrepare(package, outputMap, threadName) |
|
275 | 276 |
try: |
276 | 277 |
self.buildPackage() |
277 |
- outputMap[threadName]=True |
|
278 |
+ outputMap[threadName] = True |
|
278 | 279 |
except Exception as e: |
279 | 280 |
# TODO: self.logger might be None |
280 | 281 |
self.base.logger.exception(e) |
281 |
- outputMap[threadName]=False |
|
282 |
+ outputMap[threadName] = False |
|
282 | 283 |
raise e |
283 | 284 |
|
284 | 285 |
def prepareBuildRoot(self): |
285 |
- chrootID=None |
|
286 |
- chrootName="build-"+self.base.package |
|
286 |
+ chrootID = None |
|
287 |
+ chrootName = "build-" + self.base.package |
|
287 | 288 |
try: |
288 |
- chrUtils = ChrootUtils(self.base.logName,self.base.logPath) |
|
289 |
- returnVal,chrootID = chrUtils.createChroot(chrootName) |
|
289 |
+ chrUtils = ChrootUtils(self.base.logName, self.base.logPath) |
|
290 |
+ returnVal, chrootID = chrUtils.createChroot(chrootName) |
|
290 | 291 |
self.base.logger.debug("Created new chroot: " + chrootID) |
291 | 292 |
if not returnVal: |
292 | 293 |
raise Exception("Unable to prepare build root") |
293 |
- tUtils=ToolChainUtils(self.base.logName,self.base.logPath) |
|
294 |
+ tUtils = ToolChainUtils(self.base.logName, self.base.logPath) |
|
294 | 295 |
tUtils.installToolChainRPMS(chrootID, self.base.package, self.base.logPath) |
295 | 296 |
except Exception as e: |
296 | 297 |
if chrootID is not None: |
... | ... |
@@ -311,33 +339,39 @@ class PackageBuilderChroot(object): |
311 | 311 |
self.base.logger.info("Skipping testing the package:" + self.base.package) |
312 | 312 |
return |
313 | 313 |
|
314 |
- chrUtils = ChrootUtils(self.base.logName,self.base.logPath) |
|
315 |
- chrootID=None |
|
314 |
+ chrUtils = ChrootUtils(self.base.logName, self.base.logPath) |
|
315 |
+ chrootID = None |
|
316 | 316 |
try: |
317 | 317 |
chrootID = self.prepareBuildRoot() |
318 | 318 |
listInstalledPackages, listInstalledRPMs = self.base.findInstalledPackages(chrootID) |
319 |
- listDependentPackages=self.base.findBuildTimeRequiredPackages() |
|
319 |
+ listDependentPackages = self.base.findBuildTimeRequiredPackages() |
|
320 | 320 |
if constants.rpmCheck and self.base.package in constants.testForceRPMS: |
321 | 321 |
listDependentPackages.extend(self.base.findBuildTimeCheckRequiredPackages()) |
322 |
- testPackages=set(constants.listMakeCheckRPMPkgtoInstall)-set(listInstalledPackages)-set([self.base.package]) |
|
322 |
+ testPackages = (set(constants.listMakeCheckRPMPkgtoInstall) - |
|
323 |
+ set(listInstalledPackages) - |
|
324 |
+ set([self.base.package])) |
|
323 | 325 |
listDependentPackages.extend(testPackages) |
324 |
- listDependentPackages=list(set(listDependentPackages)) |
|
326 |
+ listDependentPackages = list(set(listDependentPackages)) |
|
325 | 327 |
|
326 |
- pkgUtils = PackageUtils(self.base.logName,self.base.logPath) |
|
328 |
+ pkgUtils = PackageUtils(self.base.logName, self.base.logPath) |
|
327 | 329 |
if len(listDependentPackages) != 0: |
328 | 330 |
self.base.logger.info("Installing the build time dependent packages......") |
329 | 331 |
for pkg in listDependentPackages: |
330 |
- self.base.installPackage(pkgUtils, pkg, chrootID, self.base.logPath, listInstalledPackages, listInstalledRPMs) |
|
332 |
+ self.base.installPackage(pkgUtils, pkg, chrootID, self.base.logPath, |
|
333 |
+ listInstalledPackages, listInstalledRPMs) |
|
331 | 334 |
pkgUtils.installRPMSInAOneShot(chrootID, self.base.logPath) |
332 |
- self.base.logger.info("Finished installing the build time dependent packages......") |
|
335 |
+ self.base.logger.info("Finished installing the build time dependent packages....") |
|
333 | 336 |
|
334 | 337 |
pkgUtils.adjustGCCSpecs(self.base.package, chrootID, self.base.logPath) |
335 |
- pkgUtils.buildRPMSForGivenPackage(self.base.package, chrootID,self.base.listBuildOptionPackages, |
|
336 |
- self.base.pkgBuildOptionFile, self.base.logPath) |
|
338 |
+ pkgUtils.buildRPMSForGivenPackage(self.base.package, chrootID, |
|
339 |
+ self.base.listBuildOptionPackages, |
|
340 |
+ self.base.pkgBuildOptionFile, |
|
341 |
+ self.base.logPath) |
|
337 | 342 |
self.base.logger.info("Successfully built the package:" + self.base.package) |
338 | 343 |
except Exception as e: |
339 | 344 |
self.base.logger.error("Failed while building package:" + self.base.package) |
340 |
- self.base.logger.debug("Chroot with ID: " + chrootID + " not deleted for debugging.") |
|
345 |
+ self.base.logger.debug("Chroot with ID: " + chrootID + |
|
346 |
+ " not deleted for debugging.") |
|
341 | 347 |
logFileName = os.path.join(self.base.logPath, self.base.package + ".log") |
342 | 348 |
fileLog = os.popen('tail -n 100 ' + logFileName).read() |
343 | 349 |
self.base.logger.debug(fileLog) |
... | ... |
@@ -1,7 +1,7 @@ |
1 | 1 |
import json |
2 |
+import os.path |
|
2 | 3 |
from Logger import Logger |
3 | 4 |
from constants import constants |
4 |
-import os.path |
|
5 | 5 |
from CommandUtils import CommandUtils |
6 | 6 |
from PackageUtils import PackageUtils |
7 | 7 |
from SpecData import SPECS |
... | ... |
@@ -13,13 +13,13 @@ class PackageInfo(object): |
13 | 13 |
logName = "PackageInfo" |
14 | 14 |
if logPath is None: |
15 | 15 |
logPath = constants.logPath |
16 |
- self.logName=logName |
|
17 |
- self.logPath=logPath |
|
18 |
- self.logger=Logger.getLogger(logName,logPath) |
|
16 |
+ self.logName = logName |
|
17 |
+ self.logPath = logPath |
|
18 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
19 | 19 |
self.pkgList = {} |
20 | 20 |
|
21 | 21 |
def loadPackagesData(self): |
22 |
- listPackages = SPECS.getData().getListPackages() |
|
22 |
+ listPackages = SPECS.getData().getListPackages() |
|
23 | 23 |
listPackages.sort() |
24 | 24 |
listRPMFiles = [] |
25 | 25 |
cmdUtils = CommandUtils() |
... | ... |
@@ -27,33 +27,34 @@ class PackageInfo(object): |
27 | 27 |
release = SPECS.getData().getRelease(package) |
28 | 28 |
version = SPECS.getData().getVersion(package) |
29 | 29 |
listRPMPackages = SPECS.getData().getRPMPackages(package) |
30 |
- srpmFileName = package+"-"+version+"-"+release+".src.rpm" |
|
30 |
+ srpmFileName = package + "-" + version + "-" + release + ".src.rpm" |
|
31 | 31 |
srpmFiles = cmdUtils.findFile(srpmFileName, constants.sourceRpmPath) |
32 | 32 |
srpmFile = None |
33 | 33 |
if len(srpmFiles) == 1: |
34 | 34 |
srpmFile = srpmFiles[0] |
35 |
- debugrpmFileName = package+"-debuginfo-"+version+"-"+release+"*" |
|
35 |
+ debugrpmFileName = package + "-debuginfo-" + version + "-" + release + "*" |
|
36 | 36 |
debugrpmFiles = cmdUtils.findFile(debugrpmFileName, constants.rpmPath) |
37 | 37 |
debugrpmFile = None |
38 | 38 |
if len(debugrpmFiles) == 1: |
39 | 39 |
debugrpmFile = debugrpmFiles[0] |
40 |
- pkgUtils = PackageUtils(self.logName,self.logPath) |
|
40 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
41 | 41 |
for rpmPkg in listRPMPackages: |
42 | 42 |
rpmFile = pkgUtils.findRPMFileForGivenPackage(rpmPkg) |
43 | 43 |
if rpmFile is not None: |
44 | 44 |
listRPMFiles.append(rpmFile) |
45 |
- listPkgAttributes = {"sourcerpm":srpmFile, "rpm":rpmFile, "debugrpm":debugrpmFile} |
|
45 |
+ listPkgAttributes = {"sourcerpm":srpmFile, "rpm":rpmFile, |
|
46 |
+ "debugrpm":debugrpmFile} |
|
46 | 47 |
self.pkgList[rpmPkg] = listPkgAttributes |
47 |
- self.logger.debug("Added "+rpmPkg +" rpm package to the list") |
|
48 |
+ self.logger.debug("Added " + rpmPkg + " rpm package to the list") |
|
48 | 49 |
else: |
49 |
- self.logger.error("Missing rpm file for package:"+rpmPkg) |
|
50 |
+ self.logger.error("Missing rpm file for package:" + rpmPkg) |
|
50 | 51 |
|
51 | 52 |
def writePkgListToFile(self, fileName): |
52 |
- self.logger.info("Writing package list to the json file") |
|
53 |
- cmdUtils=CommandUtils() |
|
54 |
- dirPath=os.path.basename(fileName) |
|
55 |
- if not os.path.isdir(dirPath): |
|
56 |
- cmdUtils.runCommandInShell("mkdir -p "+dirPath) |
|
57 |
- pkgInfoFile = open(fileName,'w+') |
|
58 |
- json.dump(self.pkgList, pkgInfoFile, indent=4) |
|
59 |
- pkgInfoFile.close() |
|
53 |
+ self.logger.info("Writing package list to the json file") |
|
54 |
+ cmdUtils = CommandUtils() |
|
55 |
+ dirPath = os.path.basename(fileName) |
|
56 |
+ if not os.path.isdir(dirPath): |
|
57 |
+ cmdUtils.runCommandInShell("mkdir -p " + dirPath) |
|
58 |
+ pkgInfoFile = open(fileName, 'w+') |
|
59 |
+ json.dump(self.pkgList, pkgInfoFile, indent=4) |
|
60 |
+ pkgInfoFile.close() |
... | ... |
@@ -1,9 +1,9 @@ |
1 |
+import os |
|
2 |
+import threading |
|
1 | 3 |
from PackageBuildDataGenerator import PackageBuildDataGenerator |
2 | 4 |
from Logger import Logger |
3 |
-import threading |
|
4 | 5 |
from constants import constants |
5 | 6 |
import docker |
6 |
-import os |
|
7 | 7 |
from ChrootUtils import ChrootUtils |
8 | 8 |
from CommandUtils import CommandUtils |
9 | 9 |
from PackageUtils import PackageUtils |
... | ... |
@@ -14,67 +14,70 @@ from SpecData import SPECS |
14 | 14 |
|
15 | 15 |
class PackageManager(object): |
16 | 16 |
|
17 |
- def __init__(self,logName=None,logPath=None,pkgBuildType="chroot"): |
|
17 |
+ def __init__(self, logName=None, logPath=None, pkgBuildType="chroot"): |
|
18 | 18 |
if logName is None: |
19 | 19 |
logName = "PackageManager" |
20 | 20 |
if logPath is None: |
21 | 21 |
logPath = constants.logPath |
22 |
- self.logName=logName |
|
23 |
- self.logPath=logPath |
|
24 |
- self.logger=Logger.getLogger(logName,logPath) |
|
25 |
- self.mapCyclesToPackageList={} |
|
26 |
- self.mapPackageToCycle={} |
|
27 |
- self.sortedPackageList=[] |
|
22 |
+ self.logName = logName |
|
23 |
+ self.logPath = logPath |
|
24 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
25 |
+ self.mapCyclesToPackageList = {} |
|
26 |
+ self.mapPackageToCycle = {} |
|
27 |
+ self.sortedPackageList = [] |
|
28 | 28 |
self.listOfPackagesAlreadyBuilt = [] |
29 |
- self.listThreads={} |
|
30 |
- self.mapOutputThread={} |
|
31 |
- self.mapThreadsLaunchTime={} |
|
32 |
- self.listAvailableCyclicPackages=[] |
|
33 |
- self.listBuildOptionPackages=[] |
|
34 |
- self.pkgBuildOptionFile="" |
|
35 |
- self.pkgBuildType=pkgBuildType |
|
29 |
+ self.listThreads = {} |
|
30 |
+ self.mapOutputThread = {} |
|
31 |
+ self.mapThreadsLaunchTime = {} |
|
32 |
+ self.listAvailableCyclicPackages = [] |
|
33 |
+ self.listBuildOptionPackages = [] |
|
34 |
+ self.pkgBuildOptionFile = "" |
|
35 |
+ self.pkgBuildType = pkgBuildType |
|
36 | 36 |
if self.pkgBuildType == "container": |
37 | 37 |
self.dockerClient = docker.from_env(version="auto") |
38 | 38 |
|
39 | 39 |
def readPackageBuildData(self, listPackages): |
40 | 40 |
try: |
41 |
- pkgBuildDataGen = PackageBuildDataGenerator(self.logName,self.logPath) |
|
42 |
- self.mapCyclesToPackageList,self.mapPackageToCycle,self.sortedPackageList = pkgBuildDataGen.getPackageBuildData(listPackages) |
|
43 |
- except: |
|
41 |
+ pkgBuildDataGen = PackageBuildDataGenerator(self.logName, self.logPath) |
|
42 |
+ self.mapCyclesToPackageList, self.mapPackageToCycle, self.sortedPackageList = ( |
|
43 |
+ pkgBuildDataGen.getPackageBuildData(listPackages)) |
|
44 |
+ |
|
45 |
+ except Exception as e: |
|
46 |
+ self.logger.exception(e) |
|
44 | 47 |
self.logger.error("unable to get sorted list") |
45 | 48 |
return False |
46 | 49 |
return True |
47 | 50 |
|
48 | 51 |
def readAlreadyAvailablePackages(self): |
49 |
- listAvailablePackages=[] |
|
50 |
- listFoundRPMPackages=[] |
|
51 |
- listRPMFiles=[] |
|
52 |
- listDirectorys=[] |
|
52 |
+ listAvailablePackages = [] |
|
53 |
+ listFoundRPMPackages = [] |
|
54 |
+ listRPMFiles = [] |
|
55 |
+ listDirectorys = [] |
|
53 | 56 |
listDirectorys.append(constants.rpmPath) |
54 | 57 |
if constants.inputRPMSPath is not None: |
55 | 58 |
listDirectorys.append(constants.inputRPMSPath) |
56 | 59 |
|
57 | 60 |
while len(listDirectorys) > 0: |
58 |
- dirPath=listDirectorys.pop() |
|
61 |
+ dirPath = listDirectorys.pop() |
|
59 | 62 |
for dirEntry in os.listdir(dirPath): |
60 | 63 |
dirEntryPath = os.path.join(dirPath, dirEntry) |
61 | 64 |
if os.path.isfile(dirEntryPath) and dirEntryPath.endswith(".rpm"): |
62 | 65 |
listRPMFiles.append(dirEntryPath) |
63 | 66 |
elif os.path.isdir(dirEntryPath): |
64 | 67 |
listDirectorys.append(dirEntryPath) |
65 |
- pkgUtils = PackageUtils(self.logName,self.logPath) |
|
68 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
66 | 69 |
for rpmfile in listRPMFiles: |
67 |
- package,version,release = pkgUtils.findPackageInfoFromRPMFile(rpmfile) |
|
70 |
+ package, version, release = pkgUtils.findPackageInfoFromRPMFile(rpmfile) |
|
68 | 71 |
if SPECS.getData().isRPMPackage(package): |
69 |
- specVersion=SPECS.getData().getVersion(package) |
|
70 |
- specRelease=SPECS.getData().getRelease(package) |
|
72 |
+ specVersion = SPECS.getData().getVersion(package) |
|
73 |
+ specRelease = SPECS.getData().getRelease(package) |
|
71 | 74 |
if version == specVersion and release == specRelease: |
72 | 75 |
listFoundRPMPackages.append(package) |
73 | 76 |
#Mark package available only if all sub packages are available |
74 | 77 |
for package in listFoundRPMPackages: |
75 | 78 |
basePkg = SPECS.getData().getSpecName(package) |
76 | 79 |
if basePkg in listAvailablePackages: |
77 |
- continue; |
|
80 |
+ continue |
|
78 | 81 |
listRPMPackages = SPECS.getData().getRPMPackages(basePkg) |
79 | 82 |
packageIsAlreadyBuilt = True |
80 | 83 |
for rpmpkg in listRPMPackages: |
... | ... |
@@ -86,14 +89,14 @@ class PackageManager(object): |
86 | 86 |
self.logger.info(listAvailablePackages) |
87 | 87 |
return listAvailablePackages |
88 | 88 |
|
89 |
- def calculateParams(self,listPackages): |
|
89 |
+ def calculateParams(self, listPackages): |
|
90 | 90 |
self.listThreads.clear() |
91 | 91 |
self.mapOutputThread.clear() |
92 | 92 |
self.mapThreadsLaunchTime.clear() |
93 |
- self.listAvailableCyclicPackages=[] |
|
93 |
+ self.listAvailableCyclicPackages = [] |
|
94 | 94 |
self.mapCyclesToPackageList.clear() |
95 | 95 |
self.mapPackageToCycle.clear() |
96 |
- self.sortedPackageList=[] |
|
96 |
+ self.sortedPackageList = [] |
|
97 | 97 |
|
98 | 98 |
listOfPackagesAlreadyBuilt = [] |
99 | 99 |
listOfPackagesAlreadyBuilt = self.readAlreadyAvailablePackages() |
... | ... |
@@ -113,9 +116,10 @@ class PackageManager(object): |
113 | 113 |
if needToRebuild: |
114 | 114 |
self.listOfPackagesAlreadyBuilt.remove(pkg) |
115 | 115 |
|
116 |
- listPackagesToBuild=listPackages[:] |
|
116 |
+ listPackagesToBuild = listPackages[:] |
|
117 | 117 |
for pkg in listPackages: |
118 |
- if pkg in self.listOfPackagesAlreadyBuilt and not constants.rpmCheck: |
|
118 |
+ if (pkg in self.listOfPackagesAlreadyBuilt and |
|
119 |
+ not constants.rpmCheck): |
|
119 | 120 |
listPackagesToBuild.remove(pkg) |
120 | 121 |
|
121 | 122 |
if not self.readPackageBuildData(listPackagesToBuild): |
... | ... |
@@ -125,8 +129,9 @@ class PackageManager(object): |
125 | 125 |
def buildToolChain(self): |
126 | 126 |
pkgCount = 0 |
127 | 127 |
try: |
128 |
- tUtils=ToolChainUtils() |
|
129 |
- pkgCount = tUtils.buildCoreToolChainPackages(self.listBuildOptionPackages, self.pkgBuildOptionFile) |
|
128 |
+ tUtils = ToolChainUtils() |
|
129 |
+ pkgCount = tUtils.buildCoreToolChainPackages(self.listBuildOptionPackages, |
|
130 |
+ self.pkgBuildOptionFile) |
|
130 | 131 |
except Exception as e: |
131 | 132 |
self.logger.error("Unable to build tool chain") |
132 | 133 |
self.logger.error(e) |
... | ... |
@@ -150,71 +155,73 @@ class PackageManager(object): |
150 | 150 |
self.buildToolChain() |
151 | 151 |
self.buildGivenPackages(constants.listMakeCheckRPMPkgtoInstall, buildThreads) |
152 | 152 |
|
153 |
- def buildPackages(self,listPackages, listBuildOptionPackages, pkgBuildOptionFile, buildThreads, pkgBuildType): |
|
153 |
+ def buildPackages(self, listPackages, listBuildOptionPackages, pkgBuildOptionFile, |
|
154 |
+ buildThreads, pkgBuildType): |
|
154 | 155 |
self.listBuildOptionPackages = listBuildOptionPackages |
155 | 156 |
self.pkgBuildOptionFile = pkgBuildOptionFile |
156 | 157 |
self.pkgBuildType = pkgBuildType |
157 | 158 |
if constants.rpmCheck: |
158 |
- constants.rpmCheck=False |
|
159 |
+ constants.rpmCheck = False |
|
159 | 160 |
self.buildToolChainPackages(listBuildOptionPackages, pkgBuildOptionFile, buildThreads) |
160 | 161 |
self.buildTestPackages(listBuildOptionPackages, pkgBuildOptionFile, buildThreads) |
161 |
- constants.rpmCheck=True |
|
162 |
+ constants.rpmCheck = True |
|
162 | 163 |
self.buildGivenPackages(listPackages, buildThreads) |
163 | 164 |
else: |
164 | 165 |
self.buildToolChainPackages(listBuildOptionPackages, pkgBuildOptionFile, buildThreads) |
165 | 166 |
self.buildGivenPackages(listPackages, buildThreads) |
166 | 167 |
|
167 |
- def initializeThreadPool(self,statusEvent): |
|
168 |
+ def initializeThreadPool(self, statusEvent): |
|
168 | 169 |
ThreadPool.clear() |
169 |
- ThreadPool.mapPackageToCycle=self.mapPackageToCycle |
|
170 |
- ThreadPool.listAvailableCyclicPackages=self.listAvailableCyclicPackages |
|
171 |
- ThreadPool.listBuildOptionPackages=self.listBuildOptionPackages |
|
172 |
- ThreadPool.pkgBuildOptionFile=self.pkgBuildOptionFile |
|
173 |
- ThreadPool.logger=self.logger |
|
174 |
- ThreadPool.statusEvent=statusEvent |
|
175 |
- ThreadPool.pkgBuildType=self.pkgBuildType |
|
170 |
+ ThreadPool.mapPackageToCycle = self.mapPackageToCycle |
|
171 |
+ ThreadPool.listAvailableCyclicPackages = self.listAvailableCyclicPackages |
|
172 |
+ ThreadPool.listBuildOptionPackages = self.listBuildOptionPackages |
|
173 |
+ ThreadPool.pkgBuildOptionFile = self.pkgBuildOptionFile |
|
174 |
+ ThreadPool.logger = self.logger |
|
175 |
+ ThreadPool.statusEvent = statusEvent |
|
176 |
+ ThreadPool.pkgBuildType = self.pkgBuildType |
|
176 | 177 |
|
177 |
- def initializeScheduler(self,statusEvent): |
|
178 |
+ def initializeScheduler(self, statusEvent): |
|
178 | 179 |
Scheduler.setLog(self.logName, self.logPath) |
179 | 180 |
Scheduler.setParams(self.sortedPackageList, self.listOfPackagesAlreadyBuilt) |
180 | 181 |
Scheduler.setEvent(statusEvent) |
181 |
- Scheduler.stopScheduling=False |
|
182 |
+ Scheduler.stopScheduling = False |
|
182 | 183 |
|
183 |
- def buildGivenPackages (self, listPackages, buildThreads): |
|
184 |
+ def buildGivenPackages(self, listPackages, buildThreads): |
|
184 | 185 |
if constants.rpmCheck: |
185 |
- alreadyBuiltRPMS=self.readAlreadyAvailablePackages() |
|
186 |
- listPackages=list(set(listPackages)|(set(constants.listMakeCheckRPMPkgtoInstall)-set(alreadyBuiltRPMS))) |
|
186 |
+ alreadyBuiltRPMS = self.readAlreadyAvailablePackages() |
|
187 |
+ listPackages = (list(set(listPackages)|(set(constants.listMakeCheckRPMPkgtoInstall)- |
|
188 |
+ set(alreadyBuiltRPMS)))) |
|
187 | 189 |
|
188 |
- returnVal=self.calculateParams(listPackages) |
|
190 |
+ returnVal = self.calculateParams(listPackages) |
|
189 | 191 |
if not returnVal: |
190 | 192 |
self.logger.error("Unable to set paramaters. Terminating the package manager.") |
191 | 193 |
raise Exception("Unable to set paramaters") |
192 | 194 |
|
193 |
- statusEvent=threading.Event() |
|
195 |
+ statusEvent = threading.Event() |
|
194 | 196 |
self.initializeScheduler(statusEvent) |
195 | 197 |
self.initializeThreadPool(statusEvent) |
196 | 198 |
|
197 |
- i=0 |
|
199 |
+ i = 0 |
|
198 | 200 |
while i < buildThreads: |
199 |
- workerName="WorkerThread"+str(i) |
|
201 |
+ workerName = "WorkerThread" + str(i) |
|
200 | 202 |
ThreadPool.addWorkerThread(workerName) |
201 | 203 |
ThreadPool.startWorkerThread(workerName) |
202 | 204 |
i = i + 1 |
203 | 205 |
|
204 | 206 |
statusEvent.wait() |
205 |
- Scheduler.stopScheduling=True |
|
207 |
+ Scheduler.stopScheduling = True |
|
206 | 208 |
self.logger.info("Waiting for all remaining worker threads") |
207 |
- listWorkerObjs=ThreadPool.getAllWorkerObjects() |
|
209 |
+ listWorkerObjs = ThreadPool.getAllWorkerObjects() |
|
208 | 210 |
for w in listWorkerObjs: |
209 | 211 |
w.join() |
210 | 212 |
|
211 |
- setFailFlag=False |
|
212 |
- allPackagesBuilt=False |
|
213 |
+ setFailFlag = False |
|
214 |
+ allPackagesBuilt = False |
|
213 | 215 |
if Scheduler.isAnyPackagesFailedToBuild(): |
214 |
- setFailFlag=True |
|
216 |
+ setFailFlag = True |
|
215 | 217 |
|
216 | 218 |
if Scheduler.isAllPackagesBuilt(): |
217 |
- allPackagesBuilt=True |
|
219 |
+ allPackagesBuilt = True |
|
218 | 220 |
|
219 | 221 |
if setFailFlag: |
220 | 222 |
self.logger.error("Some of the packages failed:") |
... | ... |
@@ -240,7 +247,7 @@ class PackageManager(object): |
240 | 240 |
self.logger.debug("Photon build container image not found.") |
241 | 241 |
|
242 | 242 |
# Create toolchain chroot and install toolchain RPMs |
243 |
- chrootID=None |
|
243 |
+ chrootID = None |
|
244 | 244 |
try: |
245 | 245 |
#TODO: constants.tcrootname |
246 | 246 |
chrUtils = ChrootUtils("toolchain-chroot", self.logPath) |
... | ... |
@@ -1,29 +1,29 @@ |
1 |
-from CommandUtils import CommandUtils |
|
2 |
-from Logger import Logger |
|
3 | 1 |
import os |
4 | 2 |
import platform |
5 | 3 |
import shutil |
6 |
-from constants import constants |
|
7 | 4 |
import re |
8 | 5 |
import random |
9 | 6 |
import string |
10 |
-from time import sleep |
|
11 |
-import PullSources |
|
12 |
-import json |
|
13 | 7 |
import collections |
8 |
+import json |
|
9 |
+from CommandUtils import CommandUtils |
|
10 |
+from Logger import Logger |
|
11 |
+from constants import constants |
|
12 |
+import PullSources |
|
14 | 13 |
from SpecData import SPECS |
15 | 14 |
|
16 | 15 |
class PackageUtils(object): |
17 | 16 |
|
18 |
- def __init__(self,logName=None,logPath=None): |
|
17 |
+ def __init__(self, logName=None, logPath=None): |
|
19 | 18 |
if logName is None: |
20 | 19 |
self.logName = "PackageUtils" |
21 | 20 |
if logPath is None: |
22 | 21 |
logPath = constants.logPath |
23 |
- self.logName=logName |
|
24 |
- self.logPath=logPath |
|
25 |
- self.logger=Logger.getLogger(logName,logPath) |
|
26 |
- self.runInChrootCommand="./run-in-chroot.sh " + constants.sourcePath + " " + constants.rpmPath; |
|
22 |
+ self.logName = logName |
|
23 |
+ self.logPath = logPath |
|
24 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
25 |
+ self.runInChrootCommand = ("./run-in-chroot.sh " + constants.sourcePath + |
|
26 |
+ " " + constants.rpmPath) |
|
27 | 27 |
self.rpmBinary = "rpm" |
28 | 28 |
self.installRPMPackageOptions = "-Uvh" |
29 | 29 |
self.nodepsRPMPackageOptions = "--nodeps" |
... | ... |
@@ -31,58 +31,60 @@ class PackageUtils(object): |
31 | 31 |
self.rpmbuildBinary = "rpmbuild" |
32 | 32 |
self.rpmbuildBuildallOption = "-ba --clean" |
33 | 33 |
self.rpmbuildNocheckOption = "--nocheck" |
34 |
- self.rpmbuildCheckOption ="-bi --clean" |
|
34 |
+ self.rpmbuildCheckOption = "-bi --clean" |
|
35 | 35 |
self.queryRpmPackageOptions = "-qa" |
36 | 36 |
self.forceRpmPackageOptions = "--force" |
37 | 37 |
self.replaceRpmPackageOptions = "--replacepkgs" |
38 |
- self.adjustGCCSpecScript="adjust-gcc-specs.sh" |
|
39 |
- self.rpmFilesToInstallInAOneShot="" |
|
40 |
- self.packagesToInstallInAOneShot="" |
|
41 |
- self.noDepsRPMFilesToInstallInAOneShot="" |
|
42 |
- self.noDepsPackagesToInstallInAOneShot="" |
|
43 |
- self.rpmFilesToReInstallInAOneShot="" |
|
44 |
- self.noDepsRPMFilesToReInstallInAOneShot="" |
|
45 |
- |
|
46 |
- def getRPMArch(self,rpmName): |
|
47 |
- arch="" |
|
38 |
+ self.adjustGCCSpecScript = "adjust-gcc-specs.sh" |
|
39 |
+ self.rpmFilesToInstallInAOneShot = "" |
|
40 |
+ self.packagesToInstallInAOneShot = "" |
|
41 |
+ self.noDepsRPMFilesToInstallInAOneShot = "" |
|
42 |
+ self.noDepsPackagesToInstallInAOneShot = "" |
|
43 |
+ self.rpmFilesToReInstallInAOneShot = "" |
|
44 |
+ self.noDepsRPMFilesToReInstallInAOneShot = "" |
|
45 |
+ |
|
46 |
+ def getRPMArch(self, rpmName): |
|
47 |
+ arch = "" |
|
48 | 48 |
if rpmName.find("x86_64") != -1: |
49 |
- arch="x86_64" |
|
49 |
+ arch = "x86_64" |
|
50 | 50 |
elif rpmName.find("aarch64") != -1: |
51 |
- arch="aarch64" |
|
51 |
+ arch = "aarch64" |
|
52 | 52 |
elif rpmName.find("noarch") != -1: |
53 |
- arch="noarch" |
|
53 |
+ arch = "noarch" |
|
54 | 54 |
return arch |
55 | 55 |
|
56 |
- def getRPMDestDir(self,rpmName,rpmDir): |
|
56 |
+ def getRPMDestDir(self, rpmName, rpmDir): |
|
57 | 57 |
arch = self.getRPMArch(rpmName) |
58 |
- rpmDestDir=rpmDir+"/"+arch |
|
58 |
+ rpmDestDir = rpmDir + "/" + arch |
|
59 | 59 |
return rpmDestDir |
60 | 60 |
|
61 |
- def copyRPM(self,rpmFile,destDir): |
|
61 |
+ def copyRPM(self, rpmFile, destDir): |
|
62 | 62 |
cmdUtils = CommandUtils() |
63 |
- rpmName=os.path.basename(rpmFile) |
|
64 |
- rpmDestDir=self.getRPMDestDir(rpmName,destDir) |
|
63 |
+ rpmName = os.path.basename(rpmFile) |
|
64 |
+ rpmDestDir = self.getRPMDestDir(rpmName, destDir) |
|
65 | 65 |
# shutil is not atomic. copy & move to ensure atomicity. |
66 |
- rpmDestPath=rpmDestDir+"/"+rpmName |
|
67 |
- rpmDestPathTemp = rpmDestDir + "/." + ''.join([random.choice(string.ascii_letters + string.digits) for n in xrange(10)]) |
|
68 |
- if os.geteuid()==0: |
|
66 |
+ rpmDestPath = rpmDestDir + "/" + rpmName |
|
67 |
+ rpmDestPathTemp = (rpmDestDir + "/." + |
|
68 |
+ ''.join([random.choice(string.ascii_letters + |
|
69 |
+ string.digits) for n in range(10)])) |
|
70 |
+ if os.geteuid() == 0: |
|
69 | 71 |
if not os.path.isdir(rpmDestDir): |
70 |
- cmdUtils.runCommandInShell("mkdir -p "+rpmDestDir) |
|
72 |
+ cmdUtils.runCommandInShell("mkdir -p " + rpmDestDir) |
|
71 | 73 |
shutil.copyfile(rpmFile, rpmDestPathTemp) |
72 | 74 |
shutil.move(rpmDestPathTemp, rpmDestPath) |
73 | 75 |
return rpmDestPath |
74 | 76 |
|
75 |
- def installRPM(self,package,chrootID,noDeps=False,destLogPath=None): |
|
77 |
+ def installRPM(self, package, chrootID, noDeps=False, destLogPath=None): |
|
76 | 78 |
# self.logger.info("Installing rpm for package:"+package) |
77 | 79 |
# self.logger.debug("No deps:"+str(noDeps)) |
78 | 80 |
|
79 |
- rpmfile=self.findRPMFileForGivenPackage(package) |
|
81 |
+ rpmfile = self.findRPMFileForGivenPackage(package) |
|
80 | 82 |
if rpmfile is None: |
81 |
- self.logger.error("No rpm file found for package:"+package) |
|
82 |
- raise Exception("Missing rpm file: "+package) |
|
83 |
+ self.logger.error("No rpm file found for package:" + package) |
|
84 |
+ raise Exception("Missing rpm file: " + package) |
|
83 | 85 |
|
84 |
- rpmDestFile = self.copyRPM(rpmfile, chrootID+constants.topDirPath+"/RPMS") |
|
85 |
- rpmFile=rpmDestFile.replace(chrootID,"") |
|
86 |
+ rpmDestFile = self.copyRPM(rpmfile, chrootID + constants.topDirPath + "/RPMS") |
|
87 |
+ rpmFile = rpmDestFile.replace(chrootID, "") |
|
86 | 88 |
if noDeps: |
87 | 89 |
self.noDepsRPMFilesToInstallInAOneShot += " " + rpmFile |
88 | 90 |
self.noDepsPackagesToInstallInAOneShot += " " + package |
... | ... |
@@ -90,14 +92,16 @@ class PackageUtils(object): |
90 | 90 |
self.rpmFilesToInstallInAOneShot += " " + rpmFile |
91 | 91 |
self.packagesToInstallInAOneShot += " " + package |
92 | 92 |
|
93 |
- def installRPMSInAOneShot(self,chrootID,destLogPath): |
|
94 |
- chrootCmd=self.runInChrootCommand+" "+chrootID |
|
95 |
- rpmInstallcmd=self.rpmBinary+" "+ self.installRPMPackageOptions |
|
93 |
+ def installRPMSInAOneShot(self, chrootID, destLogPath): |
|
94 |
+ chrootCmd = self.runInChrootCommand + " " + chrootID |
|
95 |
+ rpmInstallcmd = self.rpmBinary + " " + self.installRPMPackageOptions |
|
96 | 96 |
cmdUtils = CommandUtils() |
97 | 97 |
if self.noDepsRPMFilesToInstallInAOneShot != "": |
98 |
- self.logger.info("Installing nodeps rpms: " + self.noDepsPackagesToInstallInAOneShot) |
|
99 |
- logFile=destLogPath+"/install_rpms_nodeps.log" |
|
100 |
- cmd = rpmInstallcmd+" "+self.nodepsRPMPackageOptions + " " + self.noDepsRPMFilesToInstallInAOneShot |
|
98 |
+ self.logger.info("Installing nodeps rpms: " + |
|
99 |
+ self.noDepsPackagesToInstallInAOneShot) |
|
100 |
+ logFile = destLogPath + "/install_rpms_nodeps.log" |
|
101 |
+ cmd = (rpmInstallcmd+" "+self.nodepsRPMPackageOptions + " " + |
|
102 |
+ self.noDepsRPMFilesToInstallInAOneShot) |
|
101 | 103 |
returnVal = cmdUtils.runCommandInShell(cmd, logFile, chrootCmd) |
102 | 104 |
if not returnVal: |
103 | 105 |
self.logger.debug("Command Executed:" + cmd) |
... | ... |
@@ -105,8 +109,8 @@ class PackageUtils(object): |
105 | 105 |
raise Exception("RPM installation failed") |
106 | 106 |
if self.rpmFilesToInstallInAOneShot != "": |
107 | 107 |
self.logger.info("Installing rpms: " + self.packagesToInstallInAOneShot) |
108 |
- logFile=destLogPath+"/install_rpms.log" |
|
109 |
- cmd=rpmInstallcmd+" "+self.rpmFilesToInstallInAOneShot |
|
108 |
+ logFile = destLogPath+"/install_rpms.log" |
|
109 |
+ cmd = rpmInstallcmd+" "+self.rpmFilesToInstallInAOneShot |
|
110 | 110 |
returnVal = cmdUtils.runCommandInShell(cmd, logFile, chrootCmd) |
111 | 111 |
if not returnVal: |
112 | 112 |
self.logger.debug("Command Executed:" + cmd) |
... | ... |
@@ -118,34 +122,38 @@ class PackageUtils(object): |
118 | 118 |
# Fetch/verify sources if sha1 not None. |
119 | 119 |
sha1 = SPECS.getData().getSHA1(package, source) |
120 | 120 |
if sha1 is not None: |
121 |
- PullSources.get(source, sha1, constants.sourcePath, constants.pullsourcesConfig, self.logger) |
|
121 |
+ PullSources.get(source, sha1, constants.sourcePath, constants.pullsourcesConfig, |
|
122 |
+ self.logger) |
|
122 | 123 |
|
123 |
- sourcePath = cmdUtils.findFile(source,constants.sourcePath) |
|
124 |
+ sourcePath = cmdUtils.findFile(source, constants.sourcePath) |
|
124 | 125 |
if sourcePath is None or len(sourcePath) == 0: |
125 |
- sourcePath = cmdUtils.findFile(source,constants.specPath) |
|
126 |
+ sourcePath = cmdUtils.findFile(source, constants.specPath) |
|
126 | 127 |
if sourcePath is None or len(sourcePath) == 0: |
127 | 128 |
if sha1 is None: |
128 |
- self.logger.error("No sha1 found or missing source for "+source) |
|
129 |
- raise Exception("No sha1 found or missing source for "+source) |
|
129 |
+ self.logger.error("No sha1 found or missing source for " + source) |
|
130 |
+ raise Exception("No sha1 found or missing source for " + source) |
|
130 | 131 |
else: |
131 |
- self.logger.error("Missing source: "+source+". Cannot find sources for package: "+package) |
|
132 |
+ self.logger.error("Missing source: " + source + |
|
133 |
+ ". Cannot find sources for package: " + package) |
|
132 | 134 |
raise Exception("Missing source") |
133 | 135 |
else: |
134 | 136 |
if sha1 is None: |
135 | 137 |
self.logger.error("No sha1 found for "+source) |
136 | 138 |
raise Exception("No sha1 found") |
137 | 139 |
if len(sourcePath) > 1: |
138 |
- self.logger.error("Multiple sources found for source:"+source+"\n"+ ",".join(sourcePath) +"\nUnable to determine one.") |
|
140 |
+ self.logger.error("Multiple sources found for source:" + source + "\n" + |
|
141 |
+ ",".join(sourcePath) +"\nUnable to determine one.") |
|
139 | 142 |
raise Exception("Multiple sources found") |
140 | 143 |
return sourcePath |
141 | 144 |
|
142 |
- def copySourcesTobuildroot(self,listSourceFiles,package,destDir): |
|
145 |
+ def copySourcesTobuildroot(self, listSourceFiles, package, destDir): |
|
143 | 146 |
for source in listSourceFiles: |
144 | 147 |
sourcePath = self.verifyShaAndGetSourcePath(source, package) |
145 |
- self.logger.info("Copying... Source path :" + source + " Source filename: " + sourcePath[0]) |
|
148 |
+ self.logger.info("Copying... Source path :" + source + |
|
149 |
+ " Source filename: " + sourcePath[0]) |
|
146 | 150 |
shutil.copy2(sourcePath[0], destDir) |
147 | 151 |
|
148 |
- def copyAdditionalBuildFiles(self,listAdditionalFiles,chrootID): |
|
152 |
+ def copyAdditionalBuildFiles(self, listAdditionalFiles, chrootID): |
|
149 | 153 |
cmdUtils = CommandUtils() |
150 | 154 |
for additionalFile in listAdditionalFiles: |
151 | 155 |
source = additionalFile["src"].encode('utf-8') |
... | ... |
@@ -174,80 +182,90 @@ class PackageUtils(object): |
174 | 174 |
macros.append(str(macro.encode('utf-8'))) |
175 | 175 |
return listAdditionalFiles, macros |
176 | 176 |
|
177 |
- def buildRPMSForGivenPackage(self,package,chrootID,listBuildOptionPackages,pkgBuildOptionFile,destLogPath=None): |
|
178 |
- self.logger.info("Building rpm's for package:"+package) |
|
177 |
+ def buildRPMSForGivenPackage(self, package, chrootID, listBuildOptionPackages, |
|
178 |
+ pkgBuildOptionFile, destLogPath=None): |
|
179 |
+ self.logger.info("Building rpm's for package:" + package) |
|
179 | 180 |
|
180 | 181 |
listSourcesFiles = SPECS.getData().getSources(package) |
181 |
- listPatchFiles = SPECS.getData().getPatches(package) |
|
182 |
+ listPatchFiles = SPECS.getData().getPatches(package) |
|
182 | 183 |
specFile = SPECS.getData().getSpecFile(package) |
183 | 184 |
specName = SPECS.getData().getSpecName(package) + ".spec" |
184 | 185 |
|
185 |
- chrootSourcePath=chrootID+constants.topDirPath+"/SOURCES/" |
|
186 |
- chrootSpecPath=constants.topDirPath+"/SPECS/" |
|
187 |
- chrootLogsFilePath=chrootID+constants.topDirPath+"/LOGS/"+package+".log" |
|
188 |
- chrootCmd=self.runInChrootCommand+" "+chrootID |
|
189 |
- shutil.copyfile(specFile, chrootID+chrootSpecPath+specName ) |
|
186 |
+ chrootSourcePath = chrootID + constants.topDirPath + "/SOURCES/" |
|
187 |
+ chrootSpecPath = constants.topDirPath + "/SPECS/" |
|
188 |
+ chrootLogsFilePath = chrootID + constants.topDirPath + "/LOGS/" + package + ".log" |
|
189 |
+ chrootCmd = self.runInChrootCommand + " " + chrootID |
|
190 |
+ shutil.copyfile(specFile, chrootID + chrootSpecPath + specName) |
|
190 | 191 |
|
191 | 192 |
# FIXME: some sources are located in SPECS/.. how to mount? |
192 | 193 |
# if os.geteuid()==0: |
193 |
- self.copySourcesTobuildroot(listSourcesFiles,package,chrootSourcePath) |
|
194 |
- self.copySourcesTobuildroot(listPatchFiles,package,chrootSourcePath) |
|
194 |
+ self.copySourcesTobuildroot(listSourcesFiles, package, chrootSourcePath) |
|
195 |
+ self.copySourcesTobuildroot(listPatchFiles, package, chrootSourcePath) |
|
195 | 196 |
|
196 | 197 |
macros = [] |
197 | 198 |
if package in listBuildOptionPackages: |
198 | 199 |
listAdditionalFiles, macros = self.getAdditionalBuildFiles(package, pkgBuildOptionFile) |
199 |
- self.copyAdditionalBuildFiles(listAdditionalFiles,chrootID) |
|
200 |
+ self.copyAdditionalBuildFiles(listAdditionalFiles, chrootID) |
|
200 | 201 |
|
201 | 202 |
#Adding rpm macros |
202 | 203 |
listRPMMacros = constants.userDefinedMacros |
203 | 204 |
for macroName in listRPMMacros.keys(): |
204 |
- macros.append(macroName+" "+listRPMMacros[macroName]) |
|
205 |
+ macros.append(macroName + " " + listRPMMacros[macroName]) |
|
205 | 206 |
|
206 |
- listRPMFiles=[] |
|
207 |
- listSRPMFiles=[] |
|
207 |
+ listRPMFiles = [] |
|
208 |
+ listSRPMFiles = [] |
|
208 | 209 |
try: |
209 |
- listRPMFiles,listSRPMFiles = self.buildRPM(chrootSpecPath +specName,chrootLogsFilePath,chrootCmd,package,macros) |
|
210 |
- self.logger.info("Successfully built rpm:"+package) |
|
210 |
+ listRPMFiles, listSRPMFiles = self.buildRPM(chrootSpecPath + specName, |
|
211 |
+ chrootLogsFilePath, chrootCmd, |
|
212 |
+ package, macros) |
|
213 |
+ self.logger.info("Successfully built rpm:" + package) |
|
211 | 214 |
except Exception as e: |
212 |
- self.logger.error("Failed while building rpm:"+package) |
|
215 |
+ self.logger.error("Failed while building rpm:" + package) |
|
213 | 216 |
raise e |
214 | 217 |
finally: |
215 | 218 |
if destLogPath is not None: |
216 |
- if constants.rpmCheck and package in constants.testForceRPMS and SPECS.getData().isCheckAvailable(package): |
|
217 |
- cmd="sed -i '/^Executing(%check):/,/^Processing files:/{//!b};d' "+ chrootLogsFilePath |
|
218 |
- logFile = destLogPath+"/adjustTestFile.log" |
|
219 |
+ if (constants.rpmCheck and |
|
220 |
+ package in constants.testForceRPMS and |
|
221 |
+ SPECS.getData().isCheckAvailable(package)): |
|
222 |
+ cmd = ("sed -i '/^Executing(%check):/,/^Processing files:/{//!b};d' " + |
|
223 |
+ chrootLogsFilePath) |
|
224 |
+ logFile = destLogPath + "/adjustTestFile.log" |
|
219 | 225 |
returnVal = CommandUtils().runCommandInShell(cmd, logFile) |
220 |
- testLogFile = destLogPath+"/"+package+"-test.log" |
|
226 |
+ testLogFile = destLogPath + "/" + package + "-test.log" |
|
221 | 227 |
shutil.copyfile(chrootLogsFilePath, testLogFile) |
222 | 228 |
else: |
223 | 229 |
shutil.copy2(chrootLogsFilePath, destLogPath) |
224 | 230 |
self.logger.info("RPM build is successful") |
225 | 231 |
|
226 | 232 |
for rpmFile in listRPMFiles: |
227 |
- self.copyRPM(chrootID+"/"+rpmFile, constants.rpmPath) |
|
233 |
+ self.copyRPM(chrootID + "/" + rpmFile, constants.rpmPath) |
|
228 | 234 |
|
229 | 235 |
for srpmFile in listSRPMFiles: |
230 |
- srpmDestFile = self.copyRPM(chrootID+"/"+srpmFile, constants.sourceRpmPath) |
|
236 |
+ srpmDestFile = self.copyRPM(chrootID + "/" + srpmFile, constants.sourceRpmPath) |
|
231 | 237 |
|
232 |
- def buildRPM(self,specFile,logFile,chrootCmd,package,macros): |
|
238 |
+ def buildRPM(self, specFile, logFile, chrootCmd, package, macros): |
|
233 | 239 |
|
234 |
- rpmBuildcmd=self.rpmbuildBinary+" "+self.rpmbuildBuildallOption |
|
240 |
+ rpmBuildcmd = self.rpmbuildBinary + " " + self.rpmbuildBuildallOption |
|
235 | 241 |
|
236 | 242 |
if constants.rpmCheck and package in constants.testForceRPMS: |
237 |
- self.logger.info("#"*(68+2*len(package))) |
|
243 |
+ self.logger.info("#" * (68 + 2 * len(package))) |
|
238 | 244 |
if not SPECS.getData().isCheckAvailable(package): |
239 |
- self.logger.info("####### "+package+" MakeCheck is not available. Skipping MakeCheck TEST for "+package+ " #######") |
|
240 |
- rpmBuildcmd=self.rpmbuildBinary+" --clean" |
|
245 |
+ self.logger.info("####### " + package + |
|
246 |
+ " MakeCheck is not available. Skipping MakeCheck TEST for " + |
|
247 |
+ package + " #######") |
|
248 |
+ rpmBuildcmd = self.rpmbuildBinary + " --clean" |
|
241 | 249 |
else: |
242 |
- self.logger.info("####### "+package+" MakeCheck is available. Running MakeCheck TEST for "+package+ " #######") |
|
243 |
- rpmBuildcmd=self.rpmbuildBinary+" "+self.rpmbuildCheckOption |
|
244 |
- self.logger.info("#"*(68+2*len(package))) |
|
250 |
+ self.logger.info("####### " + package + |
|
251 |
+ " MakeCheck is available. Running MakeCheck TEST for " + |
|
252 |
+ package + " #######") |
|
253 |
+ rpmBuildcmd = self.rpmbuildBinary + " " + self.rpmbuildCheckOption |
|
254 |
+ self.logger.info("#" * (68 + 2 * len(package))) |
|
245 | 255 |
else: |
246 |
- rpmBuildcmd+=" "+self.rpmbuildNocheckOption |
|
256 |
+ rpmBuildcmd += " " + self.rpmbuildNocheckOption |
|
247 | 257 |
|
248 | 258 |
for macro in macros: |
249 |
- rpmBuildcmd+=' --define \\\"%s\\\"' % macro |
|
250 |
- rpmBuildcmd+=" "+specFile |
|
259 |
+ rpmBuildcmd += ' --define \\\"%s\\\"' % macro |
|
260 |
+ rpmBuildcmd += " " + specFile |
|
251 | 261 |
|
252 | 262 |
cmdUtils = CommandUtils() |
253 | 263 |
self.logger.info("Building rpm....") |
... | ... |
@@ -255,99 +273,113 @@ class PackageUtils(object): |
255 | 255 |
returnVal = cmdUtils.runCommandInShell(rpmBuildcmd, logFile, chrootCmd) |
256 | 256 |
if constants.rpmCheck and package in constants.testForceRPMS: |
257 | 257 |
if not SPECS.getData().isCheckAvailable(package): |
258 |
- constants.testLogger.info(package+" : N/A") |
|
258 |
+ constants.testLogger.info(package + " : N/A") |
|
259 | 259 |
elif returnVal: |
260 |
- constants.testLogger.info(package+" : PASS") |
|
260 |
+ constants.testLogger.info(package + " : PASS") |
|
261 | 261 |
else: |
262 |
- constants.testLogger.error(package+" : FAIL" ) |
|
262 |
+ constants.testLogger.error(package + " : FAIL") |
|
263 | 263 |
|
264 | 264 |
if constants.rpmCheck: |
265 | 265 |
if not returnVal and constants.rpmCheckStopOnError: |
266 |
- self.logger.error("Checking rpm is failed "+specFile) |
|
266 |
+ self.logger.error("Checking rpm is failed " + specFile) |
|
267 | 267 |
raise Exception("RPM check failed") |
268 | 268 |
else: |
269 | 269 |
if not returnVal: |
270 |
- self.logger.error("Building rpm is failed "+specFile) |
|
270 |
+ self.logger.error("Building rpm is failed " + specFile) |
|
271 | 271 |
raise Exception("RPM build failed") |
272 | 272 |
|
273 | 273 |
#Extracting rpms created from log file |
274 |
- logfile=open(logFile,'r') |
|
275 |
- fileContents=logfile.readlines() |
|
274 |
+ logfile = open(logFile, 'r') |
|
275 |
+ fileContents = logfile.readlines() |
|
276 | 276 |
logfile.close() |
277 |
- listRPMFiles=[] |
|
278 |
- listSRPMFiles=[] |
|
279 |
- for i in range(0,len(fileContents)): |
|
280 |
- if re.search("^Wrote:",fileContents[i]): |
|
281 |
- listcontents=fileContents[i].split() |
|
282 |
- if (len(listcontents) == 2) and listcontents[1].strip()[-4:] == ".rpm" and listcontents[1].find("/RPMS/") != -1: |
|
277 |
+ listRPMFiles = [] |
|
278 |
+ listSRPMFiles = [] |
|
279 |
+ for i in range(0, len(fileContents)): |
|
280 |
+ if re.search("^Wrote:", fileContents[i]): |
|
281 |
+ listcontents = fileContents[i].split() |
|
282 |
+ if ((len(listcontents) == 2) and |
|
283 |
+ listcontents[1].strip().endswith(".rpm") and |
|
284 |
+ "/RPMS/" in listcontents[1]): |
|
283 | 285 |
listRPMFiles.append(listcontents[1]) |
284 |
- if (len(listcontents) == 2) and listcontents[1].strip()[-8:] == ".src.rpm" and listcontents[1].find("/SRPMS/") != -1: |
|
286 |
+ if ((len(listcontents) == 2) and |
|
287 |
+ listcontents[1].strip().endswith(".src.rpm") and |
|
288 |
+ "/SRPMS/" in listcontents[1]): |
|
285 | 289 |
listSRPMFiles.append(listcontents[1]) |
286 |
- return listRPMFiles,listSRPMFiles |
|
290 |
+ return listRPMFiles, listSRPMFiles |
|
287 | 291 |
|
288 |
- def findRPMFileForGivenPackage(self,package): |
|
292 |
+ def findRPMFileForGivenPackage(self, package): |
|
289 | 293 |
cmdUtils = CommandUtils() |
290 | 294 |
version = SPECS.getData().getVersion(package) |
291 | 295 |
release = SPECS.getData().getRelease(package) |
292 |
- listFoundRPMFiles = sum([cmdUtils.findFile(package+"-"+version+"-"+release+"."+platform.machine()+".rpm",constants.rpmPath), |
|
293 |
- cmdUtils.findFile(package+"-"+version+"-"+release+".noarch.rpm",constants.rpmPath)], []) |
|
296 |
+ listFoundRPMFiles = sum([cmdUtils.findFile(package + "-" + version + "-" + release + "." + |
|
297 |
+ platform.machine()+".rpm", |
|
298 |
+ constants.rpmPath), |
|
299 |
+ cmdUtils.findFile(package + "-" + version + "-" + release + |
|
300 |
+ ".noarch.rpm", |
|
301 |
+ constants.rpmPath)], []) |
|
294 | 302 |
if constants.inputRPMSPath is not None: |
295 |
- listFoundRPMFiles = sum([cmdUtils.findFile(package+"-"+version+"-"+release+"."+platform.machine()+".rpm",constants.inputRPMSPath), |
|
296 |
- cmdUtils.findFile(package+"-"+version+"-"+release+".noarch.rpm",constants.inputRPMSPath)], listFoundRPMFiles) |
|
297 |
- if len(listFoundRPMFiles) == 1 : |
|
303 |
+ listFoundRPMFiles = sum([cmdUtils.findFile(package + "-" + version + "-" + release + |
|
304 |
+ "." + platform.machine()+".rpm", |
|
305 |
+ constants.inputRPMSPath), |
|
306 |
+ cmdUtils.findFile(package + "-" + version + "-" + release + |
|
307 |
+ ".noarch.rpm", constants.inputRPMSPath)], |
|
308 |
+ listFoundRPMFiles) |
|
309 |
+ if len(listFoundRPMFiles) == 1: |
|
298 | 310 |
return listFoundRPMFiles[0] |
299 |
- if len(listFoundRPMFiles) == 0 : |
|
311 |
+ if len(listFoundRPMFiles) == 0: |
|
300 | 312 |
return None |
301 |
- if len(listFoundRPMFiles) > 1 : |
|
302 |
- self.logger.error("Found multiple rpm files for given package in rpm directory.Unable to determine the rpm file for package:"+package) |
|
313 |
+ if len(listFoundRPMFiles) > 1: |
|
314 |
+ self.logger.error("Found multiple rpm files for given package in rpm directory." + |
|
315 |
+ "Unable to determine the rpm file for package:" + package) |
|
303 | 316 |
raise Exception("Multiple rpm files found") |
304 | 317 |
|
305 |
- def findPackageNameFromRPMFile(self,rpmfile): |
|
306 |
- rpmfile=os.path.basename(rpmfile) |
|
307 |
- releaseindex=rpmfile.rfind("-") |
|
318 |
+ def findPackageNameFromRPMFile(self, rpmfile): |
|
319 |
+ rpmfile = os.path.basename(rpmfile) |
|
320 |
+ releaseindex = rpmfile.rfind("-") |
|
308 | 321 |
if releaseindex == -1: |
309 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
322 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
310 | 323 |
raise Exception("Invalid RPM") |
311 |
- versionindex=rpmfile[0:releaseindex].rfind("-") |
|
324 |
+ versionindex = rpmfile[0:releaseindex].rfind("-") |
|
312 | 325 |
if versionindex == -1: |
313 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
326 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
314 | 327 |
raise Exception("Invalid RPM") |
315 |
- packageName=rpmfile[0:versionindex] |
|
328 |
+ packageName = rpmfile[0:versionindex] |
|
316 | 329 |
return packageName |
317 | 330 |
|
318 |
- def findPackageInfoFromRPMFile(self,rpmfile): |
|
319 |
- rpmfile=os.path.basename(rpmfile) |
|
320 |
- rpmfile=rpmfile.replace("."+platform.machine()+".rpm","") |
|
321 |
- rpmfile=rpmfile.replace(".noarch.rpm","") |
|
322 |
- releaseindex=rpmfile.rfind("-") |
|
331 |
+ def findPackageInfoFromRPMFile(self, rpmfile): |
|
332 |
+ rpmfile = os.path.basename(rpmfile) |
|
333 |
+ rpmfile = rpmfile.replace("." + platform.machine() + ".rpm", "") |
|
334 |
+ rpmfile = rpmfile.replace(".noarch.rpm", "") |
|
335 |
+ releaseindex = rpmfile.rfind("-") |
|
323 | 336 |
if releaseindex == -1: |
324 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
337 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
325 | 338 |
raise Exception("Invalid RPM") |
326 |
- versionindex=rpmfile[0:releaseindex].rfind("-") |
|
339 |
+ versionindex = rpmfile[0:releaseindex].rfind("-") |
|
327 | 340 |
if versionindex == -1: |
328 |
- self.logger.error("Invalid rpm file:"+rpmfile) |
|
341 |
+ self.logger.error("Invalid rpm file:" + rpmfile) |
|
329 | 342 |
raise Exception("Invalid RPM") |
330 |
- packageName=rpmfile[0:versionindex] |
|
331 |
- version=rpmfile[versionindex+1:releaseindex] |
|
332 |
- release=rpmfile[releaseindex+1:] |
|
333 |
- return packageName,version,release |
|
343 |
+ packageName = rpmfile[0:versionindex] |
|
344 |
+ version = rpmfile[versionindex + 1:releaseindex] |
|
345 |
+ release = rpmfile[releaseindex + 1:] |
|
346 |
+ return packageName, version, release |
|
334 | 347 |
|
335 | 348 |
def findInstalledRPMPackages(self, chrootID): |
336 |
- cmd = self.rpmBinary+" "+self.queryRpmPackageOptions |
|
337 |
- chrootCmd=self.runInChrootCommand+" "+chrootID |
|
338 |
- cmdUtils=CommandUtils() |
|
339 |
- result=cmdUtils.runCommandInShell2(cmd, chrootCmd) |
|
349 |
+ cmd = self.rpmBinary + " " + self.queryRpmPackageOptions |
|
350 |
+ chrootCmd = self.runInChrootCommand + " " + chrootID |
|
351 |
+ cmdUtils = CommandUtils() |
|
352 |
+ result = cmdUtils.runCommandInShell2(cmd, chrootCmd) |
|
340 | 353 |
if result is not None: |
341 |
- return result.split() |
|
354 |
+ return result.decode().split() |
|
342 | 355 |
return result |
343 | 356 |
|
344 | 357 |
def adjustGCCSpecs(self, package, chrootID, logPath): |
345 | 358 |
opt = " " + SPECS.getData().getSecurityHardeningOption(package) |
346 |
- cmdUtils=CommandUtils() |
|
347 |
- cpcmd="cp "+ self.adjustGCCSpecScript+" "+chrootID+"/tmp/"+self.adjustGCCSpecScript |
|
348 |
- cmd = "/tmp/"+self.adjustGCCSpecScript+opt |
|
349 |
- logFile = logPath+"/adjustGCCSpecScript.log" |
|
350 |
- chrootCmd=self.runInChrootCommand+" "+chrootID |
|
359 |
+ cmdUtils = CommandUtils() |
|
360 |
+ cpcmd = ("cp " + self.adjustGCCSpecScript + " " + chrootID + |
|
361 |
+ "/tmp/" + self.adjustGCCSpecScript) |
|
362 |
+ cmd = "/tmp/" + self.adjustGCCSpecScript + opt |
|
363 |
+ logFile = logPath + "/adjustGCCSpecScript.log" |
|
364 |
+ chrootCmd = self.runInChrootCommand + " " + chrootID |
|
351 | 365 |
returnVal = cmdUtils.runCommandInShell(cpcmd, logFile) |
352 | 366 |
if not returnVal: |
353 | 367 |
self.logger.error("Error during copying the file adjust gcc spec") |
... | ... |
@@ -356,8 +388,10 @@ class PackageUtils(object): |
356 | 356 |
if returnVal: |
357 | 357 |
return |
358 | 358 |
|
359 |
- self.logger.debug(cmdUtils.runCommandInShell2("ls -la " + chrootID + "/tmp/" + self.adjustGCCSpecScript)) |
|
360 |
- self.logger.debug(cmdUtils.runCommandInShell2("lsof " + chrootID + "/tmp/" + self.adjustGCCSpecScript)) |
|
359 |
+ self.logger.debug(cmdUtils.runCommandInShell2("ls -la " + chrootID + |
|
360 |
+ "/tmp/" + self.adjustGCCSpecScript)) |
|
361 |
+ self.logger.debug(cmdUtils.runCommandInShell2("lsof " + chrootID + "/tmp/" + |
|
362 |
+ self.adjustGCCSpecScript)) |
|
361 | 363 |
self.logger.debug(cmdUtils.runCommandInShell2("ps ax")) |
362 | 364 |
|
363 | 365 |
self.logger.error("Failed while adjusting gcc specs") |
... | ... |
@@ -373,7 +407,8 @@ class PackageUtils(object): |
373 | 373 |
|
374 | 374 |
def copyAdditionalBuildFilesToContainer(self, listAdditionalFiles, containerID): |
375 | 375 |
cmdUtils = CommandUtils() |
376 |
- #self.logger.debug("VDBG-PU-copyAdditionalBuildFilesToContainer id: " +containerID.short_id) |
|
376 |
+ #self.logger.debug("VDBG-PU-copyAdditionalBuildFilesToContainer id: " + |
|
377 |
+ # containerID.short_id) |
|
377 | 378 |
#self.logger.debug(listAdditionalFiles) |
378 | 379 |
for additionalFile in listAdditionalFiles: |
379 | 380 |
source = additionalFile["src"].encode('utf-8') |
... | ... |
@@ -388,7 +423,7 @@ class PackageUtils(object): |
388 | 388 |
copyCmd += " " + destPath |
389 | 389 |
else: |
390 | 390 |
self.logger.info("Copying addl source file tree: " + source) |
391 |
- copyCmd += "/. " + destPath |
|
391 |
+ copyCmd += "/. " + destPath |
|
392 | 392 |
#TODO: cmd error code |
393 | 393 |
cmdUtils.runCommandInShell(copyCmd) |
394 | 394 |
|
... | ... |
@@ -427,39 +462,44 @@ class PackageUtils(object): |
427 | 427 |
self.rmpFilesToReInstallInAOneShot += " " + rpmDestFile |
428 | 428 |
|
429 | 429 |
def installRPMSInAOneShotInContainer(self, containerID, destLogPath): |
430 |
- rpmInstallcmd = self.rpmBinary + " " + self.installRPMPackageOptions + " " + self.forceRpmPackageOptions |
|
430 |
+ rpmInstallcmd = (self.rpmBinary + " " + self.installRPMPackageOptions + " " + |
|
431 |
+ self.forceRpmPackageOptions) |
|
431 | 432 |
|
432 | 433 |
if self.noDepsRPMFilesToInstallInAOneShot != "": |
433 |
- self.logger.info("PackageUtils-installRPMSInAOneShotInContainer: Installing nodeps rpms: " + \ |
|
434 |
+ self.logger.info("PackageUtils-installRPMSInAOneShotInContainer: " + |
|
435 |
+ "Installing nodeps rpms: " + |
|
434 | 436 |
self.noDepsPackagesToInstallInAOneShot) |
435 | 437 |
logFile = destLogPath + "/install_rpms_nodeps.log" |
436 |
- cmd = rpmInstallcmd + " " + self.nodepsRPMPackageOptions + " " + self.noDepsRPMFilesToInstallInAOneShot |
|
438 |
+ cmd = (rpmInstallcmd + " " + self.nodepsRPMPackageOptions + " " + |
|
439 |
+ self.noDepsRPMFilesToInstallInAOneShot) |
|
437 | 440 |
cmd = "/bin/bash -l -c '" + cmd + "'" |
438 |
- #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: Install nodeps cmd: " + cmd) |
|
441 |
+ #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: " + |
|
442 |
+ # "Install nodeps cmd: " + cmd) |
|
439 | 443 |
#TODO: Error code from exec_run |
440 | 444 |
installLog = containerID.exec_run(cmd) |
441 | 445 |
if not installLog: |
442 | 446 |
self.logger.error("Unable to install nodeps rpms") |
443 | 447 |
raise Exception("nodeps RPM installation failed") |
444 |
- logfile = open(logFile, 'w') |
|
445 |
- logfile.write(installLog) |
|
446 |
- logfile.close() |
|
448 |
+ with open(logFile, 'w') as logfile: |
|
449 |
+ logfile.write(installLog.decode()) |
|
447 | 450 |
|
448 | 451 |
if self.noDepsRPMFilesToReInstallInAOneShot != "": |
449 |
- cmd = rpmInstallcmd + " " + self.nodepsRPMPackageOptions + " " + self.forceRpmPackageOptions + " " + self.noDepsRPMFilesToReInstallInAOneShot |
|
452 |
+ cmd = (rpmInstallcmd + " " + self.nodepsRPMPackageOptions + " " + |
|
453 |
+ self.forceRpmPackageOptions + " " + |
|
454 |
+ self.noDepsRPMFilesToReInstallInAOneShot) |
|
450 | 455 |
cmd = "/bin/bash -l -c '" + cmd + "'" |
451 |
- #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: ReInstall nodeps cmd: " + cmd) |
|
456 |
+ #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: " + |
|
457 |
+ # "ReInstall nodeps cmd: " + cmd) |
|
452 | 458 |
#TODO: Error code from exec_run |
453 | 459 |
installLog = containerID.exec_run(cmd) |
454 | 460 |
if not installLog: |
455 | 461 |
self.logger.error("Unable to re-install nodeps rpms") |
456 | 462 |
raise Exception("nodeps RPM re-installation failed") |
457 |
- logfile = open(logFile, 'a') |
|
458 |
- logfile.write(installLog) |
|
459 |
- logfile.close() |
|
463 |
+ with open(logFile, 'a') as logfile: |
|
464 |
+ logfile.write(installLog.decode()) |
|
460 | 465 |
|
461 | 466 |
if self.rpmFilesToInstallInAOneShot != "": |
462 |
- self.logger.info("PackageUtils-installRPMSInAOneShotInContainer: Installing rpms: " + \ |
|
467 |
+ self.logger.info("PackageUtils-installRPMSInAOneShotInContainer: Installing rpms: " + |
|
463 | 468 |
self.packagesToInstallInAOneShot) |
464 | 469 |
logFile = destLogPath + "/install_rpms.log" |
465 | 470 |
cmd = rpmInstallcmd + " " + self.rpmFilesToInstallInAOneShot |
... | ... |
@@ -470,22 +510,22 @@ class PackageUtils(object): |
470 | 470 |
if not installLog: |
471 | 471 |
self.logger.error("Unable to install rpms") |
472 | 472 |
raise Exception("RPM installation failed") |
473 |
- logfile = open(logFile, 'w') |
|
474 |
- logfile.write(installLog) |
|
475 |
- logfile.close() |
|
473 |
+ with open(logFile, 'w') as logfile: |
|
474 |
+ logfile.write(installLog.decode()) |
|
476 | 475 |
|
477 | 476 |
if self.rpmFilesToReInstallInAOneShot != "": |
478 |
- cmd = rpmInstallcmd + " " + self.forceRpmPackageOptions + " " + self.rpmFilesToReInstallInAOneShot |
|
477 |
+ cmd = (rpmInstallcmd + " " + self.forceRpmPackageOptions + " " + |
|
478 |
+ self.rpmFilesToReInstallInAOneShot) |
|
479 | 479 |
cmd = "/bin/bash -l -c '" + cmd + "'" |
480 |
- #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: ReInstall cmd: " + cmd) |
|
480 |
+ #self.logger.debug("VDBG-PU-installRPMSInAOneShotInContainer: " + |
|
481 |
+ # "ReInstall cmd: " + cmd) |
|
481 | 482 |
#TODO: Error code from exec_run |
482 | 483 |
installLog = containerID.exec_run(cmd) |
483 | 484 |
if not installLog: |
484 | 485 |
self.logger.error("Unable to re-install rpms") |
485 | 486 |
raise Exception("RPM re-installation failed") |
486 |
- logfile = open(logFile, 'a') |
|
487 |
- logfile.write(installLog) |
|
488 |
- logfile.close() |
|
487 |
+ with open(logFile, 'a') as logfile: |
|
488 |
+ logfile.write(installLog.decode()) |
|
489 | 489 |
|
490 | 490 |
def findInstalledRPMPackagesInContainer(self, containerID): |
491 | 491 |
cmd = self.rpmBinary + " " + self.queryRpmPackageOptions |
... | ... |
@@ -493,7 +533,7 @@ class PackageUtils(object): |
493 | 493 |
#TODO: Error code from exec_run |
494 | 494 |
result = containerID.exec_run(cmd) |
495 | 495 |
if result is not None: |
496 |
- return result.split() |
|
496 |
+ return result.decode().split() |
|
497 | 497 |
return result |
498 | 498 |
|
499 | 499 |
def adjustGCCSpecsInContainer(self, package, containerID, logPath): |
... | ... |
@@ -505,9 +545,8 @@ class PackageUtils(object): |
505 | 505 |
#TODO: Error code from exec_run |
506 | 506 |
scriptLog = containerID.exec_run(adjustCmd) |
507 | 507 |
if scriptLog: |
508 |
- logfile = open(logFile, 'w') |
|
509 |
- logfile.write(scriptLog) |
|
510 |
- logfile.close() |
|
508 |
+ with open(logFile, 'w') as logfile: |
|
509 |
+ logfile.write(scriptLog.decode()) |
|
511 | 510 |
return |
512 | 511 |
|
513 | 512 |
self.logger.debug(containerID.exec_run("ls -la /tmp/" + self.adjustGCCSpecScript)) |
... | ... |
@@ -518,7 +557,8 @@ class PackageUtils(object): |
518 | 518 |
|
519 | 519 |
def buildRPMSForGivenPackageInContainer(self, package, containerID, listBuildOptionPackages, |
520 | 520 |
pkgBuildOptionFile, destLogPath=None): |
521 |
- self.logger.info("Building rpm's for package " + package + " in container " + containerID.short_id) |
|
521 |
+ self.logger.info("Building rpm's for package " + package + " in container " + |
|
522 |
+ containerID.short_id) |
|
522 | 523 |
|
523 | 524 |
listSourcesFiles = SPECS.getData().getSources(package) |
524 | 525 |
listPatchFiles = SPECS.getData().getPatches(package) |
... | ... |
@@ -555,28 +595,30 @@ class PackageUtils(object): |
555 | 555 |
macros.append(macroName + " " + listRPMMacros[macroName]) |
556 | 556 |
|
557 | 557 |
# Build RPMs |
558 |
- listRPMFiles=[] |
|
559 |
- listSRPMFiles=[] |
|
558 |
+ listRPMFiles = [] |
|
559 |
+ listSRPMFiles = [] |
|
560 | 560 |
try: |
561 | 561 |
listRPMFiles, listSRPMFiles = self.buildRPMinContainer( |
562 |
- specPath + specName, |
|
563 |
- rpmLogFile, |
|
564 |
- destLogFile, |
|
565 |
- containerID, |
|
566 |
- package, |
|
567 |
- macros) |
|
568 |
- self.logger.info("Successfully built rpm:"+package) |
|
562 |
+ specPath + specName, |
|
563 |
+ rpmLogFile, |
|
564 |
+ destLogFile, |
|
565 |
+ containerID, |
|
566 |
+ package, |
|
567 |
+ macros) |
|
568 |
+ self.logger.info("Successfully built rpm:" + package) |
|
569 | 569 |
except Exception as e: |
570 | 570 |
self.logger.error("Failed while building rpm: " + package) |
571 | 571 |
raise e |
572 | 572 |
finally: |
573 | 573 |
if destLogPath is not None: |
574 | 574 |
rpmLog = destLogPath + "/" + package + ".log" |
575 |
- if constants.rpmCheck and package in constants.testForceRPMS and SPECS.getData().isCheckAvailable(package): |
|
576 |
- cmd="sed -i '/^Executing(%check):/,/^Processing files:/{//!b};d' "+ rpmLog |
|
577 |
- logFile = destLogPath+"/adjustTestFile.log" |
|
575 |
+ if (constants.rpmCheck and |
|
576 |
+ package in constants.testForceRPMS and |
|
577 |
+ SPECS.getData().isCheckAvailable(package)): |
|
578 |
+ cmd = "sed -i '/^Executing(%check):/,/^Processing files:/{//!b};d' " + rpmLog |
|
579 |
+ logFile = destLogPath + "/adjustTestFile.log" |
|
578 | 580 |
returnVal = CommandUtils().runCommandInShell(cmd, logFile) |
579 |
- testLogFile = destLogPath+"/"+package+"-test.log" |
|
581 |
+ testLogFile = destLogPath + "/" + package + "-test.log" |
|
580 | 582 |
shutil.copyfile(rpmLog, testLogFile) |
581 | 583 |
self.logger.info("RPM build is successful") |
582 | 584 |
|
... | ... |
@@ -602,16 +644,20 @@ class PackageUtils(object): |
602 | 602 |
rpmBuildCmd = self.rpmbuildBinary + " " + self.rpmbuildBuildallOption |
603 | 603 |
|
604 | 604 |
if constants.rpmCheck and package in constants.testForceRPMS: |
605 |
- self.logger.info("#"*(68+2*len(package))) |
|
605 |
+ self.logger.info("#" * (68 + 2 * len(package))) |
|
606 | 606 |
if not SPECS.getData().isCheckAvailable(package): |
607 |
- self.logger.info("####### "+package+" MakeCheck is not available. Skipping MakeCheck TEST for "+package+ " #######") |
|
608 |
- rpmBuildCmd=self.rpmbuildBinary+" --clean" |
|
607 |
+ self.logger.info("####### " + package + |
|
608 |
+ " MakeCheck is not available. Skipping MakeCheck TEST for " + |
|
609 |
+ package + " #######") |
|
610 |
+ rpmBuildCmd = self.rpmbuildBinary + " --clean" |
|
609 | 611 |
else: |
610 |
- self.logger.info("####### "+package+" MakeCheck is available. Running MakeCheck TEST for "+package+ " #######") |
|
611 |
- rpmBuildCmd=self.rpmbuildBinary+" "+self.rpmbuildCheckOption |
|
612 |
- self.logger.info("#"*(68+2*len(package))) |
|
612 |
+ self.logger.info("####### " + package + |
|
613 |
+ " MakeCheck is available. Running MakeCheck TEST for " + |
|
614 |
+ package + " #######") |
|
615 |
+ rpmBuildCmd = self.rpmbuildBinary + " " + self.rpmbuildCheckOption |
|
616 |
+ self.logger.info("#" * (68 + 2 * len(package))) |
|
613 | 617 |
else: |
614 |
- rpmBuildCmd+=" "+self.rpmbuildNocheckOption |
|
618 |
+ rpmBuildCmd += " "+self.rpmbuildNocheckOption |
|
615 | 619 |
|
616 | 620 |
for macro in macros: |
617 | 621 |
rpmBuildCmd += ' --define \"%s\"' % macro |
... | ... |
@@ -632,33 +678,37 @@ class PackageUtils(object): |
632 | 632 |
|
633 | 633 |
if constants.rpmCheck and package in constants.testForceRPMS: |
634 | 634 |
if not SPECS.getData().isCheckAvailable(package): |
635 |
- constants.testLogger.info(package+" : N/A") |
|
635 |
+ constants.testLogger.info(package + " : N/A") |
|
636 | 636 |
elif returnVal: |
637 |
- constants.testLogger.info(package+" : PASS") |
|
637 |
+ constants.testLogger.info(package + " : PASS") |
|
638 | 638 |
else: |
639 |
- constants.testLogger.error(package+" : FAIL" ) |
|
639 |
+ constants.testLogger.error(package + " : FAIL") |
|
640 | 640 |
|
641 | 641 |
if constants.rpmCheck: |
642 | 642 |
if not returnVal and constants.rpmCheckStopOnError: |
643 |
- self.logger.error("Checking rpm is failed "+specFile) |
|
643 |
+ self.logger.error("Checking rpm is failed " + specFile) |
|
644 | 644 |
raise Exception("RPM check failed") |
645 | 645 |
else: |
646 | 646 |
if not returnVal: |
647 |
- self.logger.error("Building rpm is failed "+specFile) |
|
647 |
+ self.logger.error("Building rpm is failed " + specFile) |
|
648 | 648 |
raise Exception("RPM build failed") |
649 | 649 |
|
650 | 650 |
#Extracting rpms created from log file |
651 |
- listRPMFiles=[] |
|
652 |
- listSRPMFiles=[] |
|
651 |
+ listRPMFiles = [] |
|
652 |
+ listSRPMFiles = [] |
|
653 | 653 |
logfile = open(destLogFile, 'r') |
654 | 654 |
rpmBuildLogLines = logfile.readlines() |
655 | 655 |
logfile.close() |
656 | 656 |
for i in range(0, len(rpmBuildLogLines)): |
657 | 657 |
if re.search("^Wrote:", rpmBuildLogLines[i]): |
658 | 658 |
listcontents = rpmBuildLogLines[i].split() |
659 |
- if (len(listcontents) == 2) and listcontents[1].strip()[-4:] == ".rpm" and listcontents[1].find("/RPMS/") != -1: |
|
659 |
+ if ((len(listcontents) == 2) and |
|
660 |
+ listcontents[1].strip().endswith(".rpm") and |
|
661 |
+ "/RPMS/" in listcontents[1]): |
|
660 | 662 |
listRPMFiles.append(listcontents[1]) |
661 |
- if (len(listcontents) == 2) and listcontents[1].strip()[-8:] == ".src.rpm" and listcontents[1].find("/SRPMS/") != -1: |
|
663 |
+ if ((len(listcontents) == 2) and |
|
664 |
+ listcontents[1].strip().endswith(".src.rpm") and |
|
665 |
+ "/SRPMS/" in listcontents[1]): |
|
662 | 666 |
listSRPMFiles.append(listcontents[1]) |
663 | 667 |
#if not listRPMFiles: |
664 | 668 |
# self.logger.error("Building rpm failed for " + specFile) |
... | ... |
@@ -1,7 +1,5 @@ |
1 |
-#! /usr/bin/python2 |
|
2 |
-# |
|
3 | 1 |
# Copyright (C) 2015-2017 VMware, Inc. All rights reserved. |
4 |
-# pullsources.py |
|
2 |
+# pullsources.py |
|
5 | 3 |
# Allows pulling packages'sources from a source repository. |
6 | 4 |
# |
7 | 5 |
# Author(s): Mahmoud Bassiouny (mbassiouny@vmware.com) |
... | ... |
@@ -11,7 +9,6 @@ |
11 | 11 |
import json |
12 | 12 |
import os |
13 | 13 |
import hashlib |
14 |
-import datetime |
|
15 | 14 |
import requests |
16 | 15 |
from requests.auth import HTTPBasicAuth |
17 | 16 |
from CommandUtils import CommandUtils |
... | ... |
@@ -30,37 +27,39 @@ def get(source, sha1, sourcesPath, configs, logger): |
30 | 30 |
sourcePath = cmdUtils.findFile(source, sourcesPath) |
31 | 31 |
if sourcePath is not None and len(sourcePath) > 0: |
32 | 32 |
if len(sourcePath) > 1: |
33 |
- raise Exception("Multiple sources found for source:"+source+"\n"+ ",".join(sourcePath) +"\nUnable to determine one.") |
|
33 |
+ raise Exception("Multiple sources found for source:" + source + "\n" + |
|
34 |
+ ",".join(sourcePath) +"\nUnable to determine one.") |
|
34 | 35 |
if sha1 == getFileHash(sourcePath[0]): |
35 | 36 |
# Use file from sourcesPath |
36 | 37 |
return |
37 | 38 |
else: |
38 |
- logger.info("sha1 of "+sourcePath[0]+" does not match. "+sha1+" vs "+getFileHash(sourcePath[0])) |
|
39 |
- configFiles=configs.split(":") |
|
39 |
+ logger.info("sha1 of " + sourcePath[0] + " does not match. " + sha1 + |
|
40 |
+ " vs " + getFileHash(sourcePath[0])) |
|
41 |
+ configFiles = configs.split(":") |
|
40 | 42 |
for config in configFiles: |
41 | 43 |
p = pullSources(config, logger) |
42 | 44 |
package_path = os.path.join(sourcesPath, source) |
43 |
- try: |
|
45 |
+ try: |
|
44 | 46 |
p.downloadFileHelper(source, package_path, sha1) |
45 | 47 |
return |
46 | 48 |
except Exception as e: |
47 |
- logger.error(e) |
|
48 |
- raise Exception("Missing source: "+source) |
|
49 |
+ logger.exception(e) |
|
50 |
+ raise Exception("Missing source: " + source) |
|
49 | 51 |
|
50 | 52 |
class pullSources: |
51 | 53 |
|
52 | 54 |
def __init__(self, conf_file, logger): |
53 | 55 |
self._config = {} |
54 |
- self.logger=logger |
|
56 |
+ self.logger = logger |
|
55 | 57 |
self.loadConfig(conf_file) |
56 | 58 |
|
57 | 59 |
# generate the auth |
58 | 60 |
self._auth = None |
59 | 61 |
if ('user' in self._config and len(self._config['user']) > 0 and |
60 |
- 'apikey' in self._config and len(self._config['apikey'])) > 0: |
|
62 |
+ 'apikey' in self._config and len(self._config['apikey'])) > 0: |
|
61 | 63 |
self._auth = HTTPBasicAuth(self._config['user'], self._config['apikey']) |
62 | 64 |
|
63 |
- def loadConfig(self,conf_file): |
|
65 |
+ def loadConfig(self, conf_file): |
|
64 | 66 |
with open(conf_file) as jsonFile: |
65 | 67 |
self._config = json.load(jsonFile) |
66 | 68 |
|
... | ... |
@@ -68,7 +67,7 @@ class pullSources: |
68 | 68 |
#form url: https://dl.bintray.com/vmware/photon_sources/1.0/<filename>. |
69 | 69 |
url = '%s/%s' % (self._config['baseurl'], filename) |
70 | 70 |
|
71 |
- self.logger.info("Downloading: "+url) |
|
71 |
+ self.logger.info("Downloading: " + url) |
|
72 | 72 |
|
73 | 73 |
with open(file_path, 'wb') as handle: |
74 | 74 |
response = requests.get(url, auth=self._auth, stream=True) |
... | ... |
@@ -85,8 +84,7 @@ class pullSources: |
85 | 85 |
response.close() |
86 | 86 |
return file_path |
87 | 87 |
|
88 |
- def downloadFileHelper(self, package_name, package_path, package_sha1 = None): |
|
88 |
+ def downloadFileHelper(self, package_name, package_path, package_sha1=None): |
|
89 | 89 |
self.downloadFile(package_name, package_path) |
90 | 90 |
if package_sha1 != getFileHash(package_path): |
91 | 91 |
raise Exception('Invalid sha1 for package %s' % package_name) |
92 |
- |
... | ... |
@@ -4,7 +4,9 @@ class RepoQueryDependency(object): |
4 | 4 |
def __init__(self, repoFile): |
5 | 5 |
self.repo_file = repoFile |
6 | 6 |
def getRequiresList(self,pkg): |
7 |
- cmd = "repoquery -c "+self.repo_file+" -R -q "+pkg+" | xargs repoquery -c "+self.repo_file+" --whatprovides -q | sed 's/-[0-9]/ /g' | cut -f 1 -d ' ' | sort | uniq " |
|
7 |
+ cmd = ("repoquery -c " + self.repo_file + " -R -q " + pkg + |
|
8 |
+ " | xargs repoquery -c " + self.repo_file + |
|
9 |
+ " --whatprovides -q | sed 's/-[0-9]/ /g' | cut -f 1 -d ' ' | sort | uniq ") |
|
8 | 10 |
status,output = commands.getstatusoutput(cmd) |
9 | 11 |
if status == 0: |
10 | 12 |
outList = output.split('\n') |
... | ... |
@@ -1,36 +1,36 @@ |
1 |
+import threading |
|
2 |
+from queue import PriorityQueue |
|
1 | 3 |
import json |
2 | 4 |
import ThreadPool |
3 | 5 |
from constants import constants |
4 | 6 |
from Logger import Logger |
5 |
-import threading |
|
6 |
-from Queue import PriorityQueue |
|
7 | 7 |
from SpecData import SPECS |
8 | 8 |
|
9 | 9 |
class Scheduler(object): |
10 |
- |
|
11 |
- lock=threading.Lock() |
|
12 |
- listOfAlreadyBuiltPackages=[] |
|
13 |
- listOfPackagesToBuild=[] |
|
14 |
- listOfPackagesCurrentlyBuilding=[] |
|
15 |
- sortedList=[] |
|
16 |
- listOfPackagesNextToBuild=PriorityQueue() |
|
17 |
- listOfFailedPackages=[] |
|
10 |
+ |
|
11 |
+ lock = threading.Lock() |
|
12 |
+ listOfAlreadyBuiltPackages = [] |
|
13 |
+ listOfPackagesToBuild = [] |
|
14 |
+ listOfPackagesCurrentlyBuilding = [] |
|
15 |
+ sortedList = [] |
|
16 |
+ listOfPackagesNextToBuild = PriorityQueue() |
|
17 |
+ listOfFailedPackages = [] |
|
18 | 18 |
alldependencyGraph = {} |
19 | 19 |
dependencyGraph = {} |
20 | 20 |
priorityMap = {} |
21 |
- pkgWeights={} |
|
22 |
- isPriorityScheduler=1 |
|
23 |
- logger=None |
|
24 |
- event=None |
|
25 |
- stopScheduling=False |
|
26 |
- |
|
21 |
+ pkgWeights = {} |
|
22 |
+ isPriorityScheduler = 1 |
|
23 |
+ logger = None |
|
24 |
+ event = None |
|
25 |
+ stopScheduling = False |
|
26 |
+ |
|
27 | 27 |
@staticmethod |
28 | 28 |
def setEvent(event): |
29 |
- Scheduler.event=event |
|
30 |
- |
|
29 |
+ Scheduler.event = event |
|
30 |
+ |
|
31 | 31 |
@staticmethod |
32 |
- def setLog(logName,logPath): |
|
33 |
- Scheduler.logger = Logger.getLogger(logName, logPath) |
|
32 |
+ def setLog(logName, logPath): |
|
33 |
+ Scheduler.logger = Logger.getLogger(logName, logPath) |
|
34 | 34 |
|
35 | 35 |
@staticmethod |
36 | 36 |
def getBuildRequiredPackages(package): |
... | ... |
@@ -50,10 +50,10 @@ class Scheduler(object): |
50 | 50 |
@staticmethod |
51 | 51 |
def getDependencies(package, parentPackage, k): |
52 | 52 |
|
53 |
- for node in Scheduler.alldependencyGraph[package].keys(): |
|
54 |
- Scheduler.getDependencies(node, package, k) |
|
53 |
+ for node in list(Scheduler.alldependencyGraph[package].keys()): |
|
54 |
+ Scheduler.getDependencies(node, package, k) |
|
55 | 55 |
|
56 |
- if parentPackage == None: |
|
56 |
+ if parentPackage is None: |
|
57 | 57 |
return |
58 | 58 |
else: |
59 | 59 |
for node in Scheduler.alldependencyGraph[package].keys(): |
... | ... |
@@ -69,9 +69,9 @@ class Scheduler(object): |
69 | 69 |
def makeGraph(): |
70 | 70 |
k = 3 |
71 | 71 |
for package in Scheduler.sortedList: |
72 |
- for child_pkg in Scheduler.dependencyGraph[package].keys(): |
|
72 |
+ for child_pkg in list(Scheduler.dependencyGraph[package].keys()): |
|
73 | 73 |
Scheduler.getDependencies(child_pkg, package, k) |
74 |
- for node in Scheduler.alldependencyGraph[child_pkg].keys(): |
|
74 |
+ for node in list(Scheduler.alldependencyGraph[child_pkg].keys()): |
|
75 | 75 |
try: |
76 | 76 |
Scheduler.dependencyGraph[package][node] = max( |
77 | 77 |
Scheduler.dependencyGraph[package][node], |
... | ... |
@@ -79,22 +79,22 @@ class Scheduler(object): |
79 | 79 |
except KeyError: |
80 | 80 |
Scheduler.dependencyGraph[package][node] = \ |
81 | 81 |
Scheduler.alldependencyGraph[child_pkg][node] * k |
82 |
- if constants.publishBuildDependencies: |
|
83 |
- dependencyLists = {} |
|
84 |
- for package in Scheduler.dependencyGraph.keys(): |
|
85 |
- dependencyLists[package] = [] |
|
86 |
- for dependency in Scheduler.dependencyGraph[package].keys(): |
|
87 |
- dependencyLists[package].append(dependency) |
|
88 |
- graphfile = open(str(constants.logPath) + "/BuildDependencies.json", 'w') |
|
89 |
- graphfile.write(json.dumps(dependencyLists, sort_keys=True, indent=4)) |
|
90 |
- graphfile.close() |
|
82 |
+ if constants.publishBuildDependencies: |
|
83 |
+ dependencyLists = {} |
|
84 |
+ for package in list(Scheduler.dependencyGraph.keys()): |
|
85 |
+ dependencyLists[package] = [] |
|
86 |
+ for dependency in list(Scheduler.dependencyGraph[package].keys()): |
|
87 |
+ dependencyLists[package].append(dependency) |
|
88 |
+ graphfile = open(str(constants.logPath) + "/BuildDependencies.json", 'w') |
|
89 |
+ graphfile.write(json.dumps(dependencyLists, sort_keys=True, indent=4)) |
|
90 |
+ graphfile.close() |
|
91 | 91 |
|
92 | 92 |
@staticmethod |
93 | 93 |
def parseWeights(): |
94 |
- Scheduler.pkgWeights.clear() |
|
95 |
- weightFile = open(constants.packageWeightsPath, 'r') |
|
96 |
- Scheduler.pkgWeights = json.load(weightFile) |
|
97 |
- weightFile.close() |
|
94 |
+ Scheduler.pkgWeights.clear() |
|
95 |
+ weightFile = open(constants.packageWeightsPath, 'r') |
|
96 |
+ Scheduler.pkgWeights = json.load(weightFile) |
|
97 |
+ weightFile.close() |
|
98 | 98 |
|
99 | 99 |
@staticmethod |
100 | 100 |
def getWeight(package): |
... | ... |
@@ -103,15 +103,13 @@ class Scheduler(object): |
103 | 103 |
except KeyError: |
104 | 104 |
return 0 |
105 | 105 |
|
106 |
- |
|
107 |
- |
|
108 | 106 |
@staticmethod |
109 | 107 |
def setPriorities(): |
110 |
- if constants.packageWeightsPath == None: |
|
108 |
+ if constants.packageWeightsPath is None: |
|
111 | 109 |
Scheduler.logger.info("Priority Scheduler disabled") |
112 | 110 |
Scheduler.isPriorityScheduler = 0 |
113 |
- else: |
|
114 |
- Scheduler.parseWeights() |
|
111 |
+ else: |
|
112 |
+ Scheduler.parseWeights() |
|
115 | 113 |
|
116 | 114 |
for package in Scheduler.sortedList: |
117 | 115 |
Scheduler.dependencyGraph[package] = {} |
... | ... |
@@ -135,51 +133,52 @@ class Scheduler(object): |
135 | 135 |
|
136 | 136 |
|
137 | 137 |
@staticmethod |
138 |
- def setParams(sortedList,listOfAlreadyBuiltPackages): |
|
139 |
- Scheduler.sortedList=sortedList |
|
140 |
- Scheduler.listOfAlreadyBuiltPackages=listOfAlreadyBuiltPackages |
|
138 |
+ def setParams(sortedList, listOfAlreadyBuiltPackages): |
|
139 |
+ Scheduler.sortedList = sortedList |
|
140 |
+ Scheduler.listOfAlreadyBuiltPackages = listOfAlreadyBuiltPackages |
|
141 | 141 |
for x in Scheduler.sortedList: |
142 | 142 |
if x not in Scheduler.listOfAlreadyBuiltPackages or x in constants.testForceRPMS: |
143 | 143 |
Scheduler.listOfPackagesToBuild.append(x) |
144 |
- Scheduler.listOfPackagesCurrentlyBuilding=[] |
|
145 |
- Scheduler.listOfPackagesNextToBuild=[] |
|
146 |
- Scheduler.listOfFailedPackages=[] |
|
144 |
+ Scheduler.listOfPackagesCurrentlyBuilding = [] |
|
145 |
+ Scheduler.listOfPackagesNextToBuild = [] |
|
146 |
+ Scheduler.listOfFailedPackages = [] |
|
147 | 147 |
Scheduler.setPriorities() |
148 |
- |
|
148 |
+ |
|
149 | 149 |
@staticmethod |
150 | 150 |
def getRequiredPackages(package): |
151 |
- listRequiredRPMPackages=[] |
|
151 |
+ listRequiredRPMPackages = [] |
|
152 | 152 |
listRequiredRPMPackages.extend(SPECS.getData().getBuildRequiresForPackage(package)) |
153 | 153 |
listRequiredRPMPackages.extend(SPECS.getData().getRequiresAllForPackage(package)) |
154 |
- |
|
155 |
- listRequiredPackages=[] |
|
154 |
+ |
|
155 |
+ listRequiredPackages = [] |
|
156 | 156 |
|
157 | 157 |
for pkg in listRequiredRPMPackages: |
158 |
- basePkg=SPECS.getData().getSpecName(pkg) |
|
158 |
+ basePkg = SPECS.getData().getSpecName(pkg) |
|
159 | 159 |
if basePkg not in listRequiredPackages: |
160 | 160 |
listRequiredPackages.append(basePkg) |
161 |
- |
|
161 |
+ |
|
162 | 162 |
return listRequiredPackages |
163 |
- |
|
163 |
+ |
|
164 | 164 |
@staticmethod |
165 | 165 |
def __getListNextPackagesReadyToBuild(): |
166 |
- listOfPackagesNextToBuild=PriorityQueue() |
|
166 |
+ listOfPackagesNextToBuild = PriorityQueue() |
|
167 | 167 |
Scheduler.logger.info("Checking for next possible packages to build") |
168 | 168 |
for pkg in Scheduler.listOfPackagesToBuild: |
169 | 169 |
if pkg in Scheduler.listOfPackagesCurrentlyBuilding: |
170 | 170 |
continue |
171 |
- listRequiredPackages=Scheduler.getRequiredPackages(pkg) |
|
172 |
- canBuild=True |
|
173 |
- Scheduler.logger.info("Required packages for "+ pkg + " are:") |
|
171 |
+ listRequiredPackages = Scheduler.getRequiredPackages(pkg) |
|
172 |
+ canBuild = True |
|
173 |
+ Scheduler.logger.info("Required packages for " + pkg + " are:") |
|
174 | 174 |
Scheduler.logger.info(listRequiredPackages) |
175 | 175 |
for reqPkg in listRequiredPackages: |
176 | 176 |
if reqPkg not in Scheduler.listOfAlreadyBuiltPackages: |
177 |
- canBuild=False |
|
178 |
- Scheduler.logger.info(reqPkg+" is not available. So we cannot build "+ pkg +" at this moment.") |
|
177 |
+ canBuild = False |
|
178 |
+ Scheduler.logger.info(reqPkg + " is not available. So we cannot build " + |
|
179 |
+ pkg + " at this moment.") |
|
179 | 180 |
break |
180 | 181 |
if canBuild: |
181 | 182 |
listOfPackagesNextToBuild.put((-Scheduler.priorityMap[pkg], pkg)) |
182 |
- Scheduler.logger.info("Adding "+ pkg +" to the schedule list") |
|
183 |
+ Scheduler.logger.info("Adding " + pkg + " to the schedule list") |
|
183 | 184 |
return listOfPackagesNextToBuild |
184 | 185 |
|
185 | 186 |
@staticmethod |
... | ... |
@@ -210,51 +209,50 @@ class Scheduler(object): |
210 | 210 |
Scheduler.lock.release() |
211 | 211 |
return None |
212 | 212 |
|
213 |
- packageTup=Scheduler.listOfPackagesNextToBuild.get() |
|
213 |
+ packageTup = Scheduler.listOfPackagesNextToBuild.get() |
|
214 | 214 |
|
215 | 215 |
if packageTup[0] == 0 and Scheduler.isPriorityScheduler == 1: |
216 | 216 |
listOfPackagesNextToBuild = Scheduler.__getListNextPackagesReadyToBuild() |
217 | 217 |
Scheduler.listOfPackagesNextToBuild = listOfPackagesNextToBuild |
218 |
- if Scheduler.listOfPackagesNextToBuild.qsize() == 0: |
|
219 |
- Scheduler.logger.info("Released scheduler lock") |
|
220 |
- Scheduler.lock.release() |
|
221 |
- return None |
|
218 |
+ if Scheduler.listOfPackagesNextToBuild.qsize() == 0: |
|
219 |
+ Scheduler.logger.info("Released scheduler lock") |
|
220 |
+ Scheduler.lock.release() |
|
221 |
+ return None |
|
222 | 222 |
packageTup = Scheduler.listOfPackagesNextToBuild.get() |
223 | 223 |
|
224 | 224 |
package = packageTup[1] |
225 | 225 |
Scheduler.logger.info("PackagesNextToBuild " + str(packageTup)) |
226 | 226 |
if Scheduler.listOfPackagesNextToBuild.qsize() > 0: |
227 |
- ThreadPool.ThreadPool.activateWorkerThreads(Scheduler.listOfPackagesNextToBuild.qsize()) |
|
227 |
+ ThreadPool.ThreadPool.activateWorkerThreads( |
|
228 |
+ Scheduler.listOfPackagesNextToBuild.qsize()) |
|
228 | 229 |
Scheduler.logger.info("Released scheduler lock") |
229 | 230 |
Scheduler.lock.release() |
230 | 231 |
Scheduler.listOfPackagesCurrentlyBuilding.append(package) |
231 | 232 |
Scheduler.listOfPackagesToBuild.remove(package) |
232 | 233 |
return package |
233 |
- |
|
234 |
+ |
|
234 | 235 |
#can be synchronized TODO |
235 | 236 |
@staticmethod |
236 | 237 |
def notifyPackageBuildCompleted(package): |
237 | 238 |
if package in Scheduler.listOfPackagesCurrentlyBuilding: |
238 | 239 |
Scheduler.listOfPackagesCurrentlyBuilding.remove(package) |
239 | 240 |
Scheduler.listOfAlreadyBuiltPackages.append(package) |
240 |
- |
|
241 |
- |
|
241 |
+ |
|
242 | 242 |
#can be synchronized TODO |
243 | 243 |
@staticmethod |
244 | 244 |
def notifyPackageBuildFailed(package): |
245 | 245 |
if package in Scheduler.listOfPackagesCurrentlyBuilding: |
246 | 246 |
Scheduler.listOfPackagesCurrentlyBuilding.remove(package) |
247 | 247 |
Scheduler.listOfFailedPackages.append(package) |
248 |
- |
|
248 |
+ |
|
249 | 249 |
@staticmethod |
250 | 250 |
def isAllPackagesBuilt(): |
251 |
- if len(Scheduler.listOfPackagesToBuild) == 0 : |
|
251 |
+ if len(Scheduler.listOfPackagesToBuild) == 0: |
|
252 | 252 |
return True |
253 | 253 |
return False |
254 |
- |
|
254 |
+ |
|
255 | 255 |
@staticmethod |
256 | 256 |
def isAnyPackagesFailedToBuild(): |
257 | 257 |
if len(Scheduler.listOfFailedPackages) != 0: |
258 | 258 |
return True |
259 | 259 |
return False |
260 |
- |
... | ... |
@@ -1,197 +1,201 @@ |
1 |
-from SpecUtils import Specutils |
|
2 | 1 |
import os |
3 | 2 |
import platform |
4 |
-from Logger import Logger |
|
5 |
-from distutils.version import StrictVersion |
|
6 |
-import Queue |
|
3 |
+import queue |
|
7 | 4 |
import json |
8 | 5 |
import operator |
6 |
+from distutils.version import StrictVersion |
|
7 |
+from SpecUtils import Specutils |
|
8 |
+from Logger import Logger |
|
9 | 9 |
from constants import constants |
10 | 10 |
|
11 | 11 |
class SerializableSpecObject(object): |
12 | 12 |
def __init__(self): |
13 |
- self.listPackages=[] |
|
14 |
- self.listRPMPackages=[] |
|
15 |
- self.name="" |
|
16 |
- self.version="" |
|
17 |
- self.release="" |
|
18 |
- self.buildRequirePackages=[] |
|
19 |
- self.checkBuildRequirePackages=[] |
|
20 |
- self.installRequiresAllPackages=[] |
|
21 |
- self.installRequiresPackages={} |
|
22 |
- self.specFile="" |
|
23 |
- self.listSources=[] |
|
24 |
- self.checksums={} |
|
25 |
- self.listPatches=[] |
|
26 |
- self.securityHardening="" |
|
27 |
- self.url="" |
|
28 |
- self.sourceurl="" |
|
29 |
- self.license="" |
|
30 |
- self.specDefs={} |
|
13 |
+ self.listPackages = [] |
|
14 |
+ self.listRPMPackages = [] |
|
15 |
+ self.name = "" |
|
16 |
+ self.version = "" |
|
17 |
+ self.release = "" |
|
18 |
+ self.buildRequirePackages = [] |
|
19 |
+ self.checkBuildRequirePackages = [] |
|
20 |
+ self.installRequiresAllPackages = [] |
|
21 |
+ self.installRequiresPackages = {} |
|
22 |
+ self.specFile = "" |
|
23 |
+ self.listSources = [] |
|
24 |
+ self.checksums = {} |
|
25 |
+ self.listPatches = [] |
|
26 |
+ self.securityHardening = "" |
|
27 |
+ self.url = "" |
|
28 |
+ self.sourceurl = "" |
|
29 |
+ self.license = "" |
|
30 |
+ self.specDefs = {} |
|
31 | 31 |
|
32 | 32 |
class SerializableSpecObjectsUtils(object): |
33 | 33 |
|
34 |
- def __init__(self,logPath): |
|
35 |
- self.mapSerializableSpecObjects={} |
|
36 |
- self.mapPackageToSpec={} |
|
37 |
- self.logger=Logger.getLogger("Serializable Spec objects", logPath ) |
|
34 |
+ def __init__(self, logPath): |
|
35 |
+ self.mapSerializableSpecObjects = {} |
|
36 |
+ self.mapPackageToSpec = {} |
|
37 |
+ self.logger = Logger.getLogger("Serializable Spec objects", logPath) |
|
38 | 38 |
|
39 |
- def readSpecsAndConvertToSerializableObjects(self,specFilesPath): |
|
40 |
- listSpecFiles=[] |
|
41 |
- self.getListSpecFiles(listSpecFiles,specFilesPath) |
|
39 |
+ def readSpecsAndConvertToSerializableObjects(self, specFilesPath): |
|
40 |
+ listSpecFiles = [] |
|
41 |
+ self.getListSpecFiles(listSpecFiles, specFilesPath) |
|
42 | 42 |
for specFile in listSpecFiles: |
43 | 43 |
skipUpdating = False |
44 |
- spec=Specutils(specFile) |
|
45 |
- specName=spec.getBasePackageName() |
|
46 |
- specObj=SerializableSpecObject() |
|
47 |
- specObj.name=specName |
|
48 |
- specObj.buildRequirePackages=spec.getBuildRequiresAllPackages() |
|
49 |
- specObj.installRequiresAllPackages=spec.getRequiresAllPackages() |
|
50 |
- specObj.checkBuildRequirePackages=spec.getCheckBuildRequiresAllPackages() |
|
51 |
- specObj.listPackages=spec.getPackageNames() |
|
52 |
- specObj.specFile=specFile |
|
53 |
- specObj.version=spec.getVersion() |
|
54 |
- specObj.release=spec.getRelease() |
|
55 |
- specObj.listSources=spec.getSourceNames() |
|
56 |
- specObj.checksums=spec.getChecksums() |
|
57 |
- specObj.specDefs=spec.getDefinitions() |
|
58 |
- specObj.listPatches=spec.getPatchNames() |
|
59 |
- specObj.securityHardening=spec.getSecurityHardeningOption() |
|
60 |
- specObj.isCheckAvailable=spec.isCheckAvailable() |
|
61 |
- specObj.license=spec.getLicense() |
|
62 |
- specObj.url=spec.getURL() |
|
63 |
- specObj.sourceurl=spec.getSourceURL() |
|
44 |
+ spec = Specutils(specFile) |
|
45 |
+ specName = spec.getBasePackageName() |
|
46 |
+ specObj = SerializableSpecObject() |
|
47 |
+ specObj.name = specName |
|
48 |
+ specObj.buildRequirePackages = spec.getBuildRequiresAllPackages() |
|
49 |
+ specObj.installRequiresAllPackages = spec.getRequiresAllPackages() |
|
50 |
+ specObj.checkBuildRequirePackages = spec.getCheckBuildRequiresAllPackages() |
|
51 |
+ specObj.listPackages = spec.getPackageNames() |
|
52 |
+ specObj.specFile = specFile |
|
53 |
+ specObj.version = spec.getVersion() |
|
54 |
+ specObj.release = spec.getRelease() |
|
55 |
+ specObj.listSources = spec.getSourceNames() |
|
56 |
+ specObj.checksums = spec.getChecksums() |
|
57 |
+ specObj.specDefs = spec.getDefinitions() |
|
58 |
+ specObj.listPatches = spec.getPatchNames() |
|
59 |
+ specObj.securityHardening = spec.getSecurityHardeningOption() |
|
60 |
+ specObj.isCheckAvailable = spec.isCheckAvailable() |
|
61 |
+ specObj.license = spec.getLicense() |
|
62 |
+ specObj.url = spec.getURL() |
|
63 |
+ specObj.sourceurl = spec.getSourceURL() |
|
64 | 64 |
for specPkg in specObj.listPackages: |
65 | 65 |
if specPkg in self.mapPackageToSpec: |
66 | 66 |
existingObj = self.mapSerializableSpecObjects[self.mapPackageToSpec[specPkg]] |
67 |
- if self.compareVersions(existingObj,specObj) == 1: |
|
67 |
+ if self.compareVersions(existingObj, specObj) == 1: |
|
68 | 68 |
skipUpdating = True |
69 |
- break; |
|
70 |
- specObj.installRequiresPackages[specPkg]=spec.getRequires(specPkg) |
|
71 |
- self.mapPackageToSpec[specPkg]=specName |
|
69 |
+ break |
|
70 |
+ specObj.installRequiresPackages[specPkg] = spec.getRequires(specPkg) |
|
71 |
+ self.mapPackageToSpec[specPkg] = specName |
|
72 | 72 |
if spec.getIsRPMPackage(specPkg): |
73 | 73 |
specObj.listRPMPackages.append(specPkg) |
74 | 74 |
if skipUpdating == False: |
75 |
- self.mapSerializableSpecObjects[specName]=specObj |
|
75 |
+ self.mapSerializableSpecObjects[specName] = specObj |
|
76 | 76 |
|
77 |
- def getListSpecFiles(self,listSpecFiles,path): |
|
77 |
+ def getListSpecFiles(self, listSpecFiles, path): |
|
78 | 78 |
for dirEntry in os.listdir(path): |
79 | 79 |
dirEntryPath = os.path.join(path, dirEntry) |
80 |
- if os.path.isfile(dirEntryPath) and dirEntryPath.endswith(".spec") and os.path.basename(dirEntryPath) not in constants.skipSpecsForArch.get(platform.machine(),[]): |
|
80 |
+ if (os.path.isfile(dirEntryPath) and |
|
81 |
+ dirEntryPath.endswith(".spec") and |
|
82 |
+ os.path.basename(dirEntryPath) not in |
|
83 |
+ constants.skipSpecsForArch.get(platform.machine(), [])): |
|
81 | 84 |
listSpecFiles.append(dirEntryPath) |
82 | 85 |
elif os.path.isdir(dirEntryPath): |
83 |
- self.getListSpecFiles(listSpecFiles,dirEntryPath) |
|
86 |
+ self.getListSpecFiles(listSpecFiles, dirEntryPath) |
|
84 | 87 |
|
85 | 88 |
def getBuildRequiresForPackage(self, package): |
86 |
- specName=self.getSpecName(package) |
|
89 |
+ specName = self.getSpecName(package) |
|
87 | 90 |
return self.mapSerializableSpecObjects[specName].buildRequirePackages |
88 | 91 |
|
89 | 92 |
def getRequiresAllForPackage(self, package): |
90 |
- specName=self.getSpecName(package) |
|
93 |
+ specName = self.getSpecName(package) |
|
91 | 94 |
return self.mapSerializableSpecObjects[specName].installRequiresAllPackages |
92 | 95 |
|
93 | 96 |
def getRequiresForPackage(self, package): |
94 |
- specName=self.getSpecName(package) |
|
95 |
- if self.mapSerializableSpecObjects[specName].installRequiresPackages.has_key(package): |
|
97 |
+ specName = self.getSpecName(package) |
|
98 |
+ if package in self.mapSerializableSpecObjects[specName].installRequiresPackages: |
|
96 | 99 |
return self.mapSerializableSpecObjects[specName].installRequiresPackages[package] |
97 | 100 |
return None |
98 | 101 |
|
99 | 102 |
def getCheckBuildRequiresForPackage(self, package): |
100 |
- specName=self.getSpecName(package) |
|
103 |
+ specName = self.getSpecName(package) |
|
101 | 104 |
return self.mapSerializableSpecObjects[specName].checkBuildRequirePackages |
102 | 105 |
|
103 | 106 |
def getRelease(self, package): |
104 |
- specName=self.getSpecName(package) |
|
107 |
+ specName = self.getSpecName(package) |
|
105 | 108 |
return self.mapSerializableSpecObjects[specName].release |
106 | 109 |
|
107 | 110 |
def getVersion(self, package): |
108 |
- specName=self.getSpecName(package) |
|
111 |
+ specName = self.getSpecName(package) |
|
109 | 112 |
return self.mapSerializableSpecObjects[specName].version |
110 | 113 |
|
111 | 114 |
def getSpecFile(self, package): |
112 |
- specName=self.getSpecName(package) |
|
115 |
+ specName = self.getSpecName(package) |
|
113 | 116 |
return self.mapSerializableSpecObjects[specName].specFile |
114 | 117 |
|
115 | 118 |
def getPatches(self, package): |
116 |
- specName=self.getSpecName(package) |
|
119 |
+ specName = self.getSpecName(package) |
|
117 | 120 |
return self.mapSerializableSpecObjects[specName].listPatches |
118 | 121 |
|
119 | 122 |
def getSources(self, package): |
120 |
- specName=self.getSpecName(package) |
|
123 |
+ specName = self.getSpecName(package) |
|
121 | 124 |
return self.mapSerializableSpecObjects[specName].listSources |
122 | 125 |
|
123 | 126 |
def getSHA1(self, package, source): |
124 |
- specName=self.getSpecName(package) |
|
127 |
+ specName = self.getSpecName(package) |
|
125 | 128 |
return self.mapSerializableSpecObjects[specName].checksums.get(source) |
126 | 129 |
|
127 | 130 |
def getPackages(self, package): |
128 |
- specName=self.getSpecName(package) |
|
131 |
+ specName = self.getSpecName(package) |
|
129 | 132 |
return self.mapSerializableSpecObjects[specName].listPackages |
130 | 133 |
|
131 | 134 |
def getRPMPackages(self, package): |
132 |
- specName=self.getSpecName(package) |
|
135 |
+ specName = self.getSpecName(package) |
|
133 | 136 |
return self.mapSerializableSpecObjects[specName].listRPMPackages |
134 | 137 |
|
135 | 138 |
def getReleaseNum(self, releaseVal): |
136 |
- id = releaseVal.find("%") |
|
137 |
- if (id != -1): |
|
138 |
- return releaseVal[0:id] |
|
139 |
- else: |
|
140 |
- return releaseVal |
|
139 |
+ id = releaseVal.find("%") |
|
140 |
+ if id != -1: |
|
141 |
+ return releaseVal[0:id] |
|
142 |
+ else: |
|
143 |
+ return releaseVal |
|
141 | 144 |
|
142 | 145 |
def compareVersions(self, existingObj, newObject): |
143 |
- if StrictVersion(existingObj.version) > StrictVersion(newObject.version): |
|
144 |
- return 1; |
|
145 |
- elif StrictVersion(existingObj.version) < StrictVersion(newObject.version): |
|
146 |
- return -1 |
|
147 |
- else: |
|
148 |
- if int(self.getReleaseNum(existingObj.release)) > int(self.getReleaseNum(newObject.release)): |
|
149 |
- return 1; |
|
150 |
- else: |
|
151 |
- return -1; |
|
152 |
- |
|
153 |
- def getSpecName(self,package): |
|
154 |
- if self.mapPackageToSpec.has_key(package): |
|
155 |
- specName=self.mapPackageToSpec[package] |
|
156 |
- if self.mapSerializableSpecObjects.has_key(specName): |
|
146 |
+ if StrictVersion(existingObj.version) > StrictVersion(newObject.version): |
|
147 |
+ return 1 |
|
148 |
+ elif StrictVersion(existingObj.version) < StrictVersion(newObject.version): |
|
149 |
+ return -1 |
|
150 |
+ else: |
|
151 |
+ if (int(self.getReleaseNum(existingObj.release)) > |
|
152 |
+ int(self.getReleaseNum(newObject.release))): |
|
153 |
+ return 1 |
|
154 |
+ else: |
|
155 |
+ return -1 |
|
156 |
+ |
|
157 |
+ def getSpecName(self, package): |
|
158 |
+ if package in self.mapPackageToSpec: |
|
159 |
+ specName = self.mapPackageToSpec[package] |
|
160 |
+ if specName in self.mapSerializableSpecObjects: |
|
157 | 161 |
return specName |
158 |
- self.logger.error("Could not able to find "+package+" package from specs") |
|
159 |
- raise Exception("Invalid package:"+package) |
|
162 |
+ self.logger.error("Could not able to find " + package + " package from specs") |
|
163 |
+ raise Exception("Invalid package:" + package) |
|
160 | 164 |
|
161 |
- def isRPMPackage(self,package): |
|
162 |
- if self.mapPackageToSpec.has_key(package): |
|
163 |
- specName=self.mapPackageToSpec[package] |
|
164 |
- if self.mapSerializableSpecObjects.has_key(specName): |
|
165 |
+ def isRPMPackage(self, package): |
|
166 |
+ if package in self.mapPackageToSpec: |
|
167 |
+ specName = self.mapPackageToSpec[package] |
|
168 |
+ if specName in self.mapSerializableSpecObjects: |
|
165 | 169 |
return True |
166 | 170 |
return False |
167 | 171 |
|
168 | 172 |
def getSecurityHardeningOption(self, package): |
169 |
- specName=self.getSpecName(package) |
|
173 |
+ specName = self.getSpecName(package) |
|
170 | 174 |
return self.mapSerializableSpecObjects[specName].securityHardening |
171 | 175 |
|
172 | 176 |
def isCheckAvailable(self, package): |
173 |
- specName=self.getSpecName(package) |
|
177 |
+ specName = self.getSpecName(package) |
|
174 | 178 |
return self.mapSerializableSpecObjects[specName].isCheckAvailable |
175 | 179 |
|
176 | 180 |
def getListPackages(self): |
177 |
- return self.mapSerializableSpecObjects.keys() |
|
181 |
+ return list(self.mapSerializableSpecObjects.keys()) |
|
178 | 182 |
|
179 | 183 |
def getURL(self, package): |
180 |
- specName=self.getSpecName(package) |
|
184 |
+ specName = self.getSpecName(package) |
|
181 | 185 |
return self.mapSerializableSpecObjects[specName].url |
182 | 186 |
|
183 | 187 |
def getSourceURL(self, package): |
184 |
- specName=self.getSpecName(package) |
|
188 |
+ specName = self.getSpecName(package) |
|
185 | 189 |
return self.mapSerializableSpecObjects[specName].sourceurl |
186 | 190 |
|
187 | 191 |
def getLicense(self, package): |
188 |
- specName=self.getSpecName(package) |
|
192 |
+ specName = self.getSpecName(package) |
|
189 | 193 |
return self.mapSerializableSpecObjects[specName].license |
190 | 194 |
|
191 | 195 |
def printAllObjects(self): |
192 |
- listSpecs=self.mapSerializableSpecObjects.keys() |
|
196 |
+ listSpecs = self.mapSerializableSpecObjects.keys() |
|
193 | 197 |
for spec in listSpecs: |
194 |
- specObj=self.mapSerializableSpecObjects[spec] |
|
198 |
+ specObj = self.mapSerializableSpecObjects[spec] |
|
195 | 199 |
self.logger.info("-----------Spec:"+specObj.name+"--------------") |
196 | 200 |
self.logger.info("Version:"+specObj.version) |
197 | 201 |
self.logger.info("Release:"+specObj.release) |
... | ... |
@@ -226,13 +230,13 @@ class SPECS(object): |
226 | 226 |
@staticmethod |
227 | 227 |
def getData(): |
228 | 228 |
""" Static access method. """ |
229 |
- if SPECS.__instance == None: |
|
229 |
+ if SPECS.__instance is None: |
|
230 | 230 |
SPECS() |
231 | 231 |
return SPECS.__instance.specData |
232 | 232 |
|
233 | 233 |
def __init__(self): |
234 | 234 |
""" Virtually private constructor. """ |
235 |
- if SPECS.__instance != None: |
|
235 |
+ if SPECS.__instance is not None: |
|
236 | 236 |
raise Exception("This class is a singleton!") |
237 | 237 |
else: |
238 | 238 |
SPECS.__instance = self |
... | ... |
@@ -241,28 +245,30 @@ class SPECS(object): |
241 | 241 |
def initialize(self): |
242 | 242 |
# Preparse some files |
243 | 243 |
#adding openjre8 version rpm macro |
244 |
- if (platform.machine() == "x86_64"): |
|
244 |
+ if platform.machine() == "x86_64": |
|
245 | 245 |
spec = Specutils(constants.specPath + "/openjdk8/openjdk8.spec") |
246 | 246 |
else: |
247 | 247 |
spec = Specutils(constants.specPath + "/openjdk8/openjdk8_aarch64.spec") |
248 | 248 |
java8version = spec.getVersion() |
249 |
- constants.addMacro("JAVA8_VERSION",java8version) |
|
249 |
+ constants.addMacro("JAVA8_VERSION", java8version) |
|
250 | 250 |
|
251 | 251 |
#adding kernelversion rpm macro |
252 | 252 |
spec = Specutils(constants.specPath + "/linux/linux.spec") |
253 | 253 |
kernelversion = spec.getVersion() |
254 |
- constants.addMacro("KERNEL_VERSION",kernelversion) |
|
254 |
+ constants.addMacro("KERNEL_VERSION", kernelversion) |
|
255 | 255 |
|
256 | 256 |
#adding kernelrelease rpm macro |
257 | 257 |
kernelrelease = spec.getRelease() |
258 |
- constants.addMacro("KERNEL_RELEASE",kernelrelease) |
|
258 |
+ constants.addMacro("KERNEL_RELEASE", kernelrelease) |
|
259 | 259 |
|
260 | 260 |
#adding kernelsubrelease rpm macro |
261 |
- a,b,c = kernelversion.split(".") |
|
262 |
- kernelsubrelease = '%02d%02d%03d%03d' % (int(a),int(b),int(c),int(kernelrelease.split('.')[0])) |
|
261 |
+ a, b, c = kernelversion.split(".") |
|
262 |
+ kernelsubrelease = ('%02d%02d%03d%03d' % (int(a), |
|
263 |
+ int(b), int(c), |
|
264 |
+ int(kernelrelease.split('.')[0]))) |
|
263 | 265 |
if kernelsubrelease: |
264 |
- kernelsubrelease = "."+kernelsubrelease |
|
265 |
- constants.addMacro("kernelsubrelease",kernelsubrelease) |
|
266 |
+ kernelsubrelease = "." + kernelsubrelease |
|
267 |
+ constants.addMacro("kernelsubrelease", kernelsubrelease) |
|
266 | 268 |
|
267 | 269 |
# Full parsing |
268 | 270 |
self.specData = SerializableSpecObjectsUtils(constants.logPath) |
... | ... |
@@ -273,8 +279,8 @@ class SPECS(object): |
273 | 273 |
class SerializedSpecObjects(object): |
274 | 274 |
|
275 | 275 |
def __init__(self, inputDataDir, stageDir): |
276 |
- self.mapSerializableSpecObjects={} |
|
277 |
- self.mapPackageToSpec={} |
|
276 |
+ self.mapSerializableSpecObjects = {} |
|
277 |
+ self.mapPackageToSpec = {} |
|
278 | 278 |
self.jsonFilesOutPath = stageDir + "/common/data/" |
279 | 279 |
self.inputDataDir = inputDataDir |
280 | 280 |
|
... | ... |
@@ -283,11 +289,11 @@ class SerializedSpecObjects(object): |
283 | 283 |
specPkg = depQue.get() |
284 | 284 |
specName = self.getSpecName(specPkg) |
285 | 285 |
if specName is None: |
286 |
- print specPkg + " is missing" |
|
286 |
+ print(specPkg + " is missing") |
|
287 | 287 |
specObj = self.mapSerializableSpecObjects[specName] |
288 | 288 |
for depPkg in specObj.installRequiresPackages[specPkg]: |
289 |
- if True == allDeps.has_key(depPkg): |
|
290 |
- if(allDeps[depPkg] < allDeps[specPkg] + 1): |
|
289 |
+ if depPkg in allDeps: |
|
290 |
+ if allDeps[depPkg] < allDeps[specPkg] + 1: |
|
291 | 291 |
allDeps[depPkg] = allDeps[specPkg] + 1 |
292 | 292 |
parent[depPkg] = specPkg |
293 | 293 |
self.updateLevels(allDeps, depPkg, parent, allDeps[depPkg]) |
... | ... |
@@ -300,23 +306,23 @@ class SerializedSpecObjects(object): |
300 | 300 |
while not depQue.empty(): |
301 | 301 |
specPkg = depQue.get() |
302 | 302 |
specName = self.getSpecName(specPkg) |
303 |
- spec=Specutils(self.getSpecFile(specPkg)) |
|
304 |
- RPMName=spec.getRPMName(specPkg) |
|
305 |
- debuginfoRPMName=spec.getDebuginfoRPMName(specPkg) |
|
303 |
+ spec = Specutils(self.getSpecFile(specPkg)) |
|
304 |
+ RPMName = spec.getRPMName(specPkg) |
|
305 |
+ debuginfoRPMName = spec.getDebuginfoRPMName(specPkg) |
|
306 | 306 |
whoBuildDepSet.add(RPMName) |
307 | 307 |
whoBuildDepSet.add(debuginfoRPMName) |
308 | 308 |
if specName is None: |
309 |
- print specPkg + " is missing" |
|
310 |
- if not whoBuildDeps.has_key(specPkg): |
|
309 |
+ print(specPkg + " is missing") |
|
310 |
+ if specPkg not in whoBuildDeps: |
|
311 | 311 |
continue |
312 | 312 |
for depPkg in whoBuildDeps[specPkg]: |
313 | 313 |
depQue.put(depPkg) |
314 | 314 |
|
315 |
- def printTree(self, allDeps, children, curParent , depth): |
|
316 |
- if (children.has_key(curParent)): |
|
315 |
+ def printTree(self, allDeps, children, curParent, depth): |
|
316 |
+ if curParent in children: |
|
317 | 317 |
for child in children[curParent]: |
318 |
- print "\t" * depth, child |
|
319 |
- self.printTree(allDeps, children, child, depth+1) |
|
318 |
+ print("\t" * depth + child) |
|
319 |
+ self.printTree(allDeps, children, child, depth + 1) |
|
320 | 320 |
|
321 | 321 |
def get_all_package_names(self, jsonFilePath): |
322 | 322 |
base_path = os.path.dirname(jsonFilePath) |
... | ... |
@@ -331,100 +337,106 @@ class SerializedSpecObjects(object): |
331 | 331 |
specObj = self.mapSerializableSpecObjects[specName] |
332 | 332 |
for depPkg in specObj.installRequiresPackages[inPkg]: |
333 | 333 |
# ignore circular deps within single spec file |
334 |
- if (specObj.installRequiresPackages.has_key(depPkg) and inPkg in specObj.installRequiresPackages[depPkg] and self.getSpecName(depPkg) == specName): |
|
334 |
+ if (depPkg in specObj.installRequiresPackages and |
|
335 |
+ inPkg in specObj.installRequiresPackages[depPkg] and |
|
336 |
+ self.getSpecName(depPkg) == specName): |
|
335 | 337 |
continue |
336 |
- if (allDeps.has_key(depPkg) and allDeps[depPkg] < level + 1): |
|
338 |
+ if depPkg in allDeps and allDeps[depPkg] < level + 1: |
|
337 | 339 |
allDeps[depPkg] = level + 1 |
338 | 340 |
parent[depPkg] = inPkg |
339 | 341 |
self.updateLevels(allDeps, depPkg, parent, allDeps[depPkg]) |
340 | 342 |
|
341 |
- def readSpecsAndConvertToSerializableObjects(self, specFilesPath, inputType, inputValue, displayOption): |
|
343 |
+ def readSpecsAndConvertToSerializableObjects(self, specFilesPath, inputType, |
|
344 |
+ inputValue, displayOption): |
|
342 | 345 |
children = {} |
343 |
- listSpecFiles=[] |
|
344 |
- whoNeedsList=[] |
|
345 |
- whoBuildDepSet= set() |
|
346 |
- independentRPMS=[] # list of all RPMS not built from photon and that must be blindly copied. |
|
346 |
+ listSpecFiles = [] |
|
347 |
+ whoNeedsList = [] |
|
348 |
+ whoBuildDepSet = set() |
|
349 |
+ # list of all RPMS not built from photon and that must be blindly copied. |
|
350 |
+ independentRPMS = [] |
|
347 | 351 |
whoBuildDeps = {} |
348 |
- allDeps={} |
|
349 |
- parent={} |
|
350 |
- depQue = Queue.Queue() |
|
352 |
+ allDeps = {} |
|
353 |
+ parent = {} |
|
354 |
+ depQue = queue.Queue() |
|
351 | 355 |
packageFound = False |
352 |
- self.getListSpecFiles(listSpecFiles,specFilesPath) |
|
356 |
+ self.getListSpecFiles(listSpecFiles, specFilesPath) |
|
353 | 357 |
for specFile in listSpecFiles: |
354 |
- spec=Specutils(specFile) |
|
355 |
- specName=spec.getBasePackageName() |
|
356 |
- specObj=SerializableSpecObject() |
|
357 |
- specObj.name=specName |
|
358 |
- specObj.buildRequirePackages=spec.getBuildRequiresAllPackages() |
|
359 |
- specObj.installRequiresAllPackages=spec.getRequiresAllPackages() |
|
360 |
- specObj.listPackages=spec.getPackageNames() |
|
361 |
- specObj.specFile=specFile |
|
362 |
- specObj.version=spec.getVersion() |
|
363 |
- specObj.release=spec.getRelease() |
|
364 |
- specObj.listSources=spec.getSourceNames() |
|
365 |
- specObj.listPatches=spec.getPatchNames() |
|
366 |
- specObj.securityHardening=spec.getSecurityHardeningOption() |
|
358 |
+ spec = Specutils(specFile) |
|
359 |
+ specName = spec.getBasePackageName() |
|
360 |
+ specObj = SerializableSpecObject() |
|
361 |
+ specObj.name = specName |
|
362 |
+ specObj.buildRequirePackages = spec.getBuildRequiresAllPackages() |
|
363 |
+ specObj.installRequiresAllPackages = spec.getRequiresAllPackages() |
|
364 |
+ specObj.listPackages = spec.getPackageNames() |
|
365 |
+ specObj.specFile = specFile |
|
366 |
+ specObj.version = spec.getVersion() |
|
367 |
+ specObj.release = spec.getRelease() |
|
368 |
+ specObj.listSources = spec.getSourceNames() |
|
369 |
+ specObj.listPatches = spec.getPatchNames() |
|
370 |
+ specObj.securityHardening = spec.getSecurityHardeningOption() |
|
367 | 371 |
for specPkg in specObj.listPackages: |
368 |
- specObj.installRequiresPackages[specPkg]=spec.getRequires(specPkg) |
|
369 |
- if (inputType == "pkg" and inputValue == specPkg): # all the first level dependencies to a dictionary and queue |
|
372 |
+ specObj.installRequiresPackages[specPkg] = spec.getRequires(specPkg) |
|
373 |
+ if inputType == "pkg" and inputValue == specPkg: |
|
374 |
+ # all the first level dependencies to a dictionary and queue |
|
370 | 375 |
packageFound = True |
371 | 376 |
for depPkg in specObj.installRequiresPackages[specPkg]: |
372 |
- if False == allDeps.has_key(depPkg): |
|
377 |
+ if depPkg not in allDeps: |
|
373 | 378 |
allDeps[depPkg] = 0 |
374 | 379 |
parent[depPkg] = "" |
375 | 380 |
depQue.put(depPkg) |
376 |
- elif (inputType == "who-needs" and (inputValue in specObj.installRequiresPackages[specPkg])): |
|
381 |
+ elif (inputType == "who-needs" and |
|
382 |
+ inputValue in specObj.installRequiresPackages[specPkg]): |
|
377 | 383 |
whoNeedsList.append(specPkg) |
378 |
- elif (inputType == "who-needs-build"): |
|
384 |
+ elif inputType == "who-needs-build": |
|
379 | 385 |
for bdrq in specObj.buildRequirePackages: |
380 |
- if (whoBuildDeps.has_key(bdrq)): |
|
386 |
+ if bdrq in whoBuildDeps: |
|
381 | 387 |
whoBuildDeps[bdrq].add(specPkg) |
382 | 388 |
else: |
383 | 389 |
whoBuildDeps[bdrq] = set() |
384 | 390 |
whoBuildDeps[bdrq].add(specPkg) |
385 |
- if(inputValue == specPkg): |
|
391 |
+ if inputValue == specPkg: |
|
386 | 392 |
packageFound = True |
387 | 393 |
for depPkg in specObj.listPackages: |
388 | 394 |
depQue.put(depPkg) |
389 | 395 |
|
390 |
- self.mapPackageToSpec[specPkg]=specName |
|
391 |
- self.mapSerializableSpecObjects[specName]=specObj |
|
396 |
+ self.mapPackageToSpec[specPkg] = specName |
|
397 |
+ self.mapSerializableSpecObjects[specName] = specObj |
|
392 | 398 |
|
393 | 399 |
# Generate dependencies for individual packages |
394 |
- if (inputType == "pkg"): |
|
395 |
- if (packageFound == True): |
|
400 |
+ if inputType == "pkg": |
|
401 |
+ if packageFound == True: |
|
396 | 402 |
self.findTotalRequires(allDeps, depQue, parent, displayOption) |
397 | 403 |
else: |
398 |
- print "No spec file builds a package named",inputValue |
|
404 |
+ print("No spec file builds a package named {}".format(inputValue)) |
|
399 | 405 |
return |
400 | 406 |
|
401 | 407 |
# Generate dependencies for all packages in the given JSON input file |
402 |
- elif (inputType == "json"): |
|
403 |
- filePath = self.inputDataDir +"/"+ inputValue |
|
408 |
+ elif inputType == "json": |
|
409 |
+ filePath = self.inputDataDir + "/" + inputValue |
|
404 | 410 |
data = self.get_all_package_names(filePath) |
405 | 411 |
for pkg in data: |
406 |
- if False == allDeps.has_key(pkg): |
|
412 |
+ if pkg not in allDeps: |
|
407 | 413 |
spName = self.getSpecName(pkg) |
408 |
- if(spName != None): |
|
414 |
+ if spName is not None: |
|
409 | 415 |
allDeps[pkg] = 0 |
410 | 416 |
parent[pkg] = "" |
411 | 417 |
depQue.put(pkg) |
412 | 418 |
self.findTotalRequires(allDeps, depQue, parent, displayOption) |
413 | 419 |
else: |
414 |
- independentRPMS.append(pkg); |
|
420 |
+ independentRPMS.append(pkg) |
|
415 | 421 |
|
416 | 422 |
#Generating the list of packages that requires the given input package at install time |
417 |
- elif (inputType == "who-needs"): |
|
418 |
- print whoNeedsList |
|
423 |
+ elif inputType == "who-needs": |
|
424 |
+ print(whoNeedsList) |
|
419 | 425 |
return |
420 | 426 |
|
421 | 427 |
#Generating the list of packages that the modified package will affect at build time |
422 |
- elif (inputType == "who-needs-build"): |
|
423 |
- if (packageFound == True): |
|
428 |
+ elif inputType == "who-needs-build": |
|
429 |
+ if packageFound == True: |
|
424 | 430 |
self.findTotalWhoNeedsToBuild(depQue, whoBuildDeps, whoBuildDepSet, displayOption) |
425 |
- print whoBuildDepSet |
|
431 |
+ print(whoBuildDepSet) |
|
426 | 432 |
else: |
427 |
- print "No spec file builds a package named", inputValue |
|
433 |
+ print("No spec file builds a package named {}".format(inputValue)) |
|
428 | 434 |
return |
429 | 435 |
|
430 | 436 |
# construct the sorted list of all packages (sorted by dependency) |
... | ... |
@@ -434,100 +446,105 @@ class SerializedSpecObjects(object): |
434 | 434 |
sortedList.extend(independentRPMS) |
435 | 435 |
|
436 | 436 |
# construct all children nodes |
437 |
- if (displayOption == "tree"): |
|
437 |
+ if displayOption == "tree": |
|
438 | 438 |
for k, v in parent.iteritems(): |
439 | 439 |
children.setdefault(v, []).append(k) |
440 |
- if(inputType == "json"): |
|
441 |
- print "Dependency Mappings for", inputValue, ":", "\n----------------------------------------------------",children |
|
442 |
- print "----------------------------------------------------" |
|
443 |
- if (children.has_key("")): |
|
440 |
+ if inputType == "json": |
|
441 |
+ print("Dependency Mappings for {}".format(inputValue) + " :") |
|
442 |
+ print("-" * 52 + " {}".format(children)) |
|
443 |
+ print("-" * 52) |
|
444 |
+ if "" in children: |
|
444 | 445 |
for child in children[""]: |
445 |
- print child |
|
446 |
+ print(child) |
|
446 | 447 |
self.printTree(allDeps, children, child, 1) |
447 | 448 |
for pkg in independentRPMS: |
448 |
- print pkg |
|
449 |
- print "******************",len(sortedList), "packages in total ******************" |
|
449 |
+ print(pkg) |
|
450 |
+ print("*" * 18 + " {} ".format(len(sortedList)) + |
|
451 |
+ "packages in total " + "*" * 18) |
|
450 | 452 |
else: |
451 |
- if (inputType == "pkg" and len(children) > 0): |
|
452 |
- print "cyclic dependency detected, mappings: \n",children |
|
453 |
+ if inputType == "pkg" and len(children) > 0: |
|
454 |
+ print("cyclic dependency detected, mappings:") |
|
455 |
+ print(children) |
|
453 | 456 |
|
454 | 457 |
# To display a flat list of all packages |
455 |
- elif(displayOption == "list"): |
|
456 |
- print sortedList |
|
458 |
+ elif displayOption == "list": |
|
459 |
+ print(sortedList) |
|
457 | 460 |
|
458 | 461 |
# To generate a new JSON file based on given input json file |
459 |
- elif(displayOption == "json" and inputType == "json"): |
|
462 |
+ elif displayOption == "json" and inputType == "json": |
|
460 | 463 |
d = {} |
461 | 464 |
d['packages'] = sortedList |
462 | 465 |
outFilePath = self.jsonFilesOutPath + inputValue |
463 |
- with open(outFilePath, 'wb') as outfile: |
|
466 |
+ with open(outFilePath, 'w') as outfile: |
|
464 | 467 |
json.dump(d, outfile) |
465 | 468 |
return sortedList |
466 | 469 |
|
467 |
- def getListSpecFiles(self,listSpecFiles,path): |
|
470 |
+ def getListSpecFiles(self, listSpecFiles, path): |
|
468 | 471 |
for dirEntry in os.listdir(path): |
469 | 472 |
dirEntryPath = os.path.join(path, dirEntry) |
470 |
- if os.path.isfile(dirEntryPath) and dirEntryPath.endswith(".spec") and os.path.basename(dirEntryPath) not in constants.skipSpecsForArch.get(platform.machine(),[]): |
|
473 |
+ if (os.path.isfile(dirEntryPath) and |
|
474 |
+ dirEntryPath.endswith(".spec") and |
|
475 |
+ os.path.basename(dirEntryPath) not in |
|
476 |
+ constants.skipSpecsForArch.get(platform.machine(), [])): |
|
471 | 477 |
listSpecFiles.append(dirEntryPath) |
472 | 478 |
elif os.path.isdir(dirEntryPath): |
473 |
- self.getListSpecFiles(listSpecFiles,dirEntryPath) |
|
479 |
+ self.getListSpecFiles(listSpecFiles, dirEntryPath) |
|
474 | 480 |
|
475 | 481 |
def getBuildRequiresForPackage(self, package): |
476 |
- specName=self.getSpecName(package) |
|
482 |
+ specName = self.getSpecName(package) |
|
477 | 483 |
return self.mapSerializableSpecObjects[specName].buildRequirePackages |
478 | 484 |
|
479 | 485 |
def getRequiresForPackage(self, package): |
480 |
- specName=self.getSpecName(package) |
|
481 |
- if self.mapSerializableSpecObjects[specName].installRequiresPackages.has_key(package): |
|
486 |
+ specName = self.getSpecName(package) |
|
487 |
+ if package in self.mapSerializableSpecObjects[specName].installRequiresPackages: |
|
482 | 488 |
return self.mapSerializableSpecObjects[specName].installRequiresPackages[package] |
483 | 489 |
return None |
484 | 490 |
|
485 | 491 |
def getRelease(self, package): |
486 |
- specName=self.getSpecName(package) |
|
492 |
+ specName = self.getSpecName(package) |
|
487 | 493 |
return self.mapSerializableSpecObjects[specName].release |
488 | 494 |
|
489 | 495 |
def getVersion(self, package): |
490 |
- specName=self.getSpecName(package) |
|
496 |
+ specName = self.getSpecName(package) |
|
491 | 497 |
return self.mapSerializableSpecObjects[specName].version |
492 | 498 |
|
493 | 499 |
def getSpecFile(self, package): |
494 |
- specName=self.getSpecName(package) |
|
500 |
+ specName = self.getSpecName(package) |
|
495 | 501 |
return self.mapSerializableSpecObjects[specName].specFile |
496 | 502 |
|
497 | 503 |
def getPatches(self, package): |
498 |
- specName=self.getSpecName(package) |
|
504 |
+ specName = self.getSpecName(package) |
|
499 | 505 |
return self.mapSerializableSpecObjects[specName].listPatches |
500 | 506 |
|
501 | 507 |
def getSources(self, package): |
502 |
- specName=self.getSpecName(package) |
|
508 |
+ specName = self.getSpecName(package) |
|
503 | 509 |
return self.mapSerializableSpecObjects[specName].listSources |
504 | 510 |
|
505 | 511 |
def getPackages(self, package): |
506 |
- specName=self.getSpecName(package) |
|
512 |
+ specName = self.getSpecName(package) |
|
507 | 513 |
return self.mapSerializableSpecObjects[specName].listPackages |
508 | 514 |
|
509 |
- def getSpecName(self,package): |
|
510 |
- if self.mapPackageToSpec.has_key(package): |
|
511 |
- specName=self.mapPackageToSpec[package] |
|
512 |
- if self.mapSerializableSpecObjects.has_key(specName): |
|
515 |
+ def getSpecName(self, package): |
|
516 |
+ if package in self.mapPackageToSpec: |
|
517 |
+ specName = self.mapPackageToSpec[package] |
|
518 |
+ if specName in self.mapSerializableSpecObjects: |
|
513 | 519 |
return specName |
514 | 520 |
else: |
515 |
- print "SpecDeps: Could not able to find " + package + " package from specs" |
|
521 |
+ print("SpecDeps: Could not able to find " + package + " package from specs") |
|
516 | 522 |
raise Exception("Invalid package:" + package) |
517 | 523 |
else: |
518 | 524 |
return None |
519 | 525 |
|
520 |
- def isRPMPackage(self,package): |
|
521 |
- if self.mapPackageToSpec.has_key(package): |
|
522 |
- specName=self.mapPackageToSpec[package] |
|
523 |
- if self.mapSerializableSpecObjects.has_key(specName): |
|
526 |
+ def isRPMPackage(self, package): |
|
527 |
+ if package in self.mapPackageToSpec: |
|
528 |
+ specName = self.mapPackageToSpec[package] |
|
529 |
+ if specName in self.mapSerializableSpecObjects: |
|
524 | 530 |
return True |
525 | 531 |
return False |
526 | 532 |
|
527 | 533 |
def getSecurityHardeningOption(self, package): |
528 |
- specName=self.getSpecName(package) |
|
534 |
+ specName = self.getSpecName(package) |
|
529 | 535 |
return self.mapSerializableSpecObjects[specName].securityHardening |
530 | 536 |
|
531 | 537 |
def getSpecDetails(self, name): |
532 |
- print self.mapSerializableSpecObjects[name].installRequiresAllPackages |
|
533 |
- |
|
538 |
+ print(self.mapSerializableSpecObjects[name].installRequiresAllPackages) |
... | ... |
@@ -1,14 +1,14 @@ |
1 |
-#! /usr/bin/python2 |
|
1 |
+#! /usr/bin/python3 |
|
2 | 2 |
# |
3 | 3 |
# Copyright (C) 2015 vmware inc. |
4 | 4 |
# |
5 | 5 |
# Author: Harish Udaiya Kumar <hudaiyakumar@vmware.com> |
6 |
-from SpecUtils import Specutils |
|
7 |
-from SpecData import SerializableSpecObject |
|
8 |
-from SpecData import SerializedSpecObjects |
|
9 | 6 |
import sys |
10 | 7 |
import os |
11 | 8 |
from optparse import OptionParser |
9 |
+from SpecUtils import Specutils |
|
10 |
+from SpecData import SerializableSpecObject |
|
11 |
+from SpecData import SerializedSpecObjects |
|
12 | 12 |
from jsonwrapper import JsonWrapper |
13 | 13 |
|
14 | 14 |
DEFAULT_INPUT_TYPE = "pkg" |
... | ... |
@@ -16,42 +16,56 @@ DEFAULT_DISPLAY_OPTION = "tree" |
16 | 16 |
SPEC_FILE_DIR = "../../SPECS" |
17 | 17 |
LOG_FILE_DIR = "../../stage/LOGS" |
18 | 18 |
|
19 |
- |
|
20 | 19 |
def main(): |
21 |
- usage = os.path.basename(__file__) + "--input-type=[json/pkg/who-needs/who-needs-build] --pkg=[pkg_name] --file=<JSON_FILE_NAME> --disp=[tree/list/json]" |
|
20 |
+ usage = (os.path.basename(__file__) + |
|
21 |
+ "--input-type=[json/pkg/who-needs/who-needs-build] " + |
|
22 |
+ "--pkg=[pkg_name] --file=<JSON_FILE_NAME> --disp=[tree/list/json]") |
|
22 | 23 |
parser = OptionParser(usage) |
23 | 24 |
parser.add_option("-i", "--input-type", dest="input_type", default=DEFAULT_INPUT_TYPE) |
24 | 25 |
parser.add_option("-p", "--pkg", dest="pkg") |
25 | 26 |
parser.add_option("-f", "--file", dest="json_file", default="packages_minimal.json") |
26 |
- parser.add_option("-d", "--disp", dest="display_option", default=DEFAULT_DISPLAY_OPTION) |
|
27 |
+ parser.add_option("-d", "--disp", dest="display_option", default=DEFAULT_DISPLAY_OPTION) |
|
27 | 28 |
parser.add_option("-s", "--spec-dir", dest="spec_dir", default=SPEC_FILE_DIR) |
28 | 29 |
parser.add_option("-t", "--stage-dir", dest="stage_dir", default="../../stage") |
29 |
- parser.add_option("-a", "--input-data-dir", dest="input_data_dir", default="../../common/data/") |
|
30 |
- (options, args) = parser.parse_args() |
|
30 |
+ parser.add_option("-a", "--input-data-dir", dest="input_data_dir", |
|
31 |
+ default="../../common/data/") |
|
32 |
+ (options, args) = parser.parse_args() |
|
31 | 33 |
|
32 |
- if(False == options.input_data_dir.endswith('/')): |
|
34 |
+ if not options.input_data_dir.endswith('/'): |
|
33 | 35 |
options.input_data_dir += '/' |
36 |
+ try: |
|
37 |
+ specDeps = SerializedSpecObjects(options.input_data_dir, options.stage_dir) |
|
38 |
+ displayOption = options.display_option |
|
39 |
+ abs_path = os.path.abspath(__file__) |
|
40 |
+ dir_name = os.path.dirname(abs_path) |
|
41 |
+ os.chdir(dir_name) |
|
34 | 42 |
|
35 |
- specDeps = SerializedSpecObjects(options.input_data_dir, options.stage_dir) |
|
36 |
- displayOption = options.display_option |
|
37 |
- abs_path = os.path.abspath(__file__) |
|
38 |
- dir_name = os.path.dirname(abs_path) |
|
39 |
- os.chdir(dir_name) |
|
40 |
- |
|
41 |
- # To display/print package dependencies on console |
|
42 |
- if(options.input_type == "pkg" or options.input_type == "who-needs" or options.input_type == "who-needs-build"): |
|
43 |
- targetName = options.pkg |
|
44 |
- specDeps.readSpecsAndConvertToSerializableObjects(options.spec_dir, options.input_type, targetName, displayOption) |
|
45 |
- elif(options.input_type == "json"):# Generate the expanded package dependencies json file based on package_list_file |
|
46 |
- json_wrapper_option_list = JsonWrapper(options.json_file) |
|
47 |
- option_list_json = json_wrapper_option_list.read() |
|
48 |
- options_sorted = option_list_json.items() |
|
49 |
- for install_option in options_sorted: |
|
50 |
- if displayOption == "tree" and install_option[1]["title"] == "ISO Packages": |
|
51 |
- continue |
|
52 |
- specDeps.readSpecsAndConvertToSerializableObjects(options.spec_dir, options.input_type, install_option[1]["file"], displayOption) |
|
43 |
+ # To display/print package dependencies on console |
|
44 |
+ if(options.input_type == "pkg" or |
|
45 |
+ options.input_type == "who-needs" or |
|
46 |
+ options.input_type == "who-needs-build"): |
|
47 |
+ targetName = options.pkg |
|
48 |
+ specDeps.readSpecsAndConvertToSerializableObjects(options.spec_dir, |
|
49 |
+ options.input_type, |
|
50 |
+ targetName, displayOption) |
|
51 |
+ elif(options.input_type == "json"): |
|
52 |
+ # Generate the expanded package dependencies json file based on package_list_file |
|
53 |
+ json_wrapper_option_list = JsonWrapper(options.json_file) |
|
54 |
+ option_list_json = json_wrapper_option_list.read() |
|
55 |
+ options_sorted = option_list_json.items() |
|
56 |
+ for install_option in options_sorted: |
|
57 |
+ if displayOption == "tree" and install_option[1]["title"] == "ISO Packages": |
|
58 |
+ continue |
|
59 |
+ specDeps.readSpecsAndConvertToSerializableObjects( |
|
60 |
+ options.spec_dir, |
|
61 |
+ options.input_type, install_option[1]["file"], |
|
62 |
+ displayOption) |
|
63 |
+ except Exception as error: |
|
64 |
+ sys.stderr.write("Failed to generate dependency lists from spec files\n") |
|
65 |
+ sys.exit(1) |
|
53 | 66 |
|
67 |
+ sys.stderr.write("Successfully generated dependency lists from spec files\n") |
|
54 | 68 |
sys.exit(0) |
55 | 69 |
|
56 |
-if __name__=="__main__": |
|
70 |
+if __name__ == "__main__": |
|
57 | 71 |
main() |
... | ... |
@@ -7,34 +7,34 @@ from constants import constants |
7 | 7 |
class SpecParser(object): |
8 | 8 |
|
9 | 9 |
def __init__(self): |
10 |
- self.cleanMacro=rpmMacro().setName("clean") |
|
11 |
- self.prepMacro=rpmMacro().setName("prep") |
|
12 |
- self.buildMacro=rpmMacro().setName("build") |
|
13 |
- self.installMacro=rpmMacro().setName("install") |
|
14 |
- self.changelogMacro=rpmMacro().setName("changelog") |
|
15 |
- self.checkMacro=rpmMacro().setName("check") |
|
16 |
- self.packages={} |
|
17 |
- self.specAdditionalContent="" |
|
18 |
- self.globalSecurityHardening="" |
|
19 |
- self.defs={} |
|
10 |
+ self.cleanMacro = rpmMacro().setName("clean") |
|
11 |
+ self.prepMacro = rpmMacro().setName("prep") |
|
12 |
+ self.buildMacro = rpmMacro().setName("build") |
|
13 |
+ self.installMacro = rpmMacro().setName("install") |
|
14 |
+ self.changelogMacro = rpmMacro().setName("changelog") |
|
15 |
+ self.checkMacro = rpmMacro().setName("check") |
|
16 |
+ self.packages = {} |
|
17 |
+ self.specAdditionalContent = "" |
|
18 |
+ self.globalSecurityHardening = "" |
|
19 |
+ self.defs = {} |
|
20 | 20 |
self.conditionalCheckMacroEnabled = False |
21 | 21 |
self.macro_pattern = re.compile(r'%{(\S+?)\}') |
22 | 22 |
|
23 | 23 |
|
24 |
- def readPkgNameFromPackageMacro(self,data,basePkgName=None): |
|
25 |
- data=" ".join(data.split()) |
|
26 |
- pkgHeaderName=data.split(" ") |
|
24 |
+ def readPkgNameFromPackageMacro(self, data, basePkgName=None): |
|
25 |
+ data = " ".join(data.split()) |
|
26 |
+ pkgHeaderName = data.split(" ") |
|
27 | 27 |
lenpkgHeaderName = len(pkgHeaderName) |
28 |
- i=1; |
|
28 |
+ i = 1 |
|
29 | 29 |
pkgName = None |
30 |
- while i<lenpkgHeaderName: |
|
30 |
+ while i < lenpkgHeaderName: |
|
31 | 31 |
if pkgHeaderName[i] == "-n" and i+1 < lenpkgHeaderName: |
32 | 32 |
pkgName = pkgHeaderName[i+1] |
33 | 33 |
break |
34 | 34 |
if pkgHeaderName[i].startswith('-'): |
35 | 35 |
i = i + 2 |
36 | 36 |
else: |
37 |
- pkgName = basePkgName+"-"+pkgHeaderName[i] |
|
37 |
+ pkgName = basePkgName + "-" + pkgHeaderName[i] |
|
38 | 38 |
break |
39 | 39 |
if pkgName is None: |
40 | 40 |
return True, basePkgName |
... | ... |
@@ -49,7 +49,7 @@ class SpecParser(object): |
49 | 49 |
|
50 | 50 |
""" |
51 | 51 |
def _is_conditional(macro): |
52 |
- return macro.startswith("?") or macro.startswith("!") |
|
52 |
+ return macro.startswith(("?", "!")) |
|
53 | 53 |
|
54 | 54 |
def _test_conditional(macro): |
55 | 55 |
if macro[0] == "?": |
... | ... |
@@ -94,68 +94,68 @@ class SpecParser(object): |
94 | 94 |
#User macros |
95 | 95 |
for macroName in constants.userDefinedMacros.keys(): |
96 | 96 |
value = constants.userDefinedMacros[macroName] |
97 |
- macro="%"+macroName |
|
97 |
+ macro = "%" + macroName |
|
98 | 98 |
if string.find(macro) != -1: |
99 |
- string = string.replace(macro,value) |
|
99 |
+ string = string.replace(macro, value) |
|
100 | 100 |
#Spec definitions |
101 | 101 |
for macroName in self.defs.keys(): |
102 | 102 |
value = self.defs[macroName] |
103 |
- macro="%"+macroName |
|
103 |
+ macro = "%" + macroName |
|
104 | 104 |
if string.find(macro) != -1: |
105 |
- string = string.replace(macro,value) |
|
105 |
+ string = string.replace(macro, value) |
|
106 | 106 |
return re.sub(self.macro_pattern, _macro_repl, string) |
107 | 107 |
|
108 |
- def parseSpecFile(self,specfile): |
|
108 |
+ def parseSpecFile(self, specfile): |
|
109 | 109 |
self.createDefaultPackage() |
110 |
- currentPkg="default" |
|
110 |
+ currentPkg = "default" |
|
111 | 111 |
specFile = open(specfile) |
112 | 112 |
lines = specFile.readlines() |
113 |
- totalLines=len(lines) |
|
114 |
- i=0 |
|
113 |
+ totalLines = len(lines) |
|
114 |
+ i = 0 |
|
115 | 115 |
while i < totalLines: |
116 | 116 |
line = lines[i].strip() |
117 | 117 |
if self.isConditionalArch(line): |
118 |
- if (platform.machine() != self.readConditionalArch(line)): |
|
118 |
+ if platform.machine() != self.readConditionalArch(line): |
|
119 | 119 |
# skip conditional body |
120 | 120 |
deep = 1 |
121 |
- while (i < totalLines and deep != 0): |
|
122 |
- i=i+1 |
|
121 |
+ while i < totalLines and deep != 0: |
|
122 |
+ i = i + 1 |
|
123 | 123 |
line = lines[i].strip() |
124 | 124 |
if self.isConditionalMacroStart(line): |
125 | 125 |
deep = deep + 1 |
126 | 126 |
elif self.isConditionalMacroEnd(line): |
127 | 127 |
deep = deep - 1 |
128 | 128 |
elif self.isIfCondition(line): |
129 |
- if (not self.isConditionTrue(line)): |
|
129 |
+ if not self.isConditionTrue(line): |
|
130 | 130 |
# skip conditional body |
131 | 131 |
deep = 1 |
132 |
- while (i < totalLines and deep != 0): |
|
133 |
- i=i+1 |
|
132 |
+ while i < totalLines and deep != 0: |
|
133 |
+ i = i + 1 |
|
134 | 134 |
line = lines[i].strip() |
135 | 135 |
if self.isConditionalMacroStart(line): |
136 | 136 |
deep = deep + 1 |
137 | 137 |
elif self.isConditionalMacroEnd(line): |
138 | 138 |
deep = deep - 1 |
139 | 139 |
elif self.isSpecMacro(line): |
140 |
- macro,i=self.readMacroFromFile(i, lines) |
|
140 |
+ macro, i = self.readMacroFromFile(i, lines) |
|
141 | 141 |
self.updateMacro(macro) |
142 | 142 |
elif self.isPackageMacro(line): |
143 | 143 |
defaultpkg = self.packages.get('default') |
144 |
- returnVal,packageName=self.readPkgNameFromPackageMacro(line, defaultpkg.name) |
|
145 |
- packageName=self.replaceMacros(packageName) |
|
144 |
+ returnVal, packageName = self.readPkgNameFromPackageMacro(line, defaultpkg.name) |
|
145 |
+ packageName = self.replaceMacros(packageName) |
|
146 | 146 |
if not returnVal: |
147 | 147 |
return False |
148 |
- if re.search('^'+'%package',line) : |
|
148 |
+ if line.startswith('%package'): |
|
149 | 149 |
pkg = Package(defaultpkg) |
150 |
- pkg.name=packageName |
|
151 |
- currentPkg=packageName |
|
152 |
- self.packages[pkg.name]=pkg |
|
150 |
+ pkg.name = packageName |
|
151 |
+ currentPkg = packageName |
|
152 |
+ self.packages[pkg.name] = pkg |
|
153 | 153 |
else: |
154 |
- if defaultpkg.name == packageName : |
|
154 |
+ if defaultpkg.name == packageName: |
|
155 | 155 |
packageName = 'default' |
156 |
- macro,i=self.readMacroFromFile(i, lines) |
|
157 |
- if not self.packages.has_key(packageName): |
|
158 |
- i=i+1 |
|
156 |
+ macro, i = self.readMacroFromFile(i, lines) |
|
157 |
+ if packageName not in self.packages: |
|
158 |
+ i = i + 1 |
|
159 | 159 |
continue |
160 | 160 |
self.packages[packageName].updatePackageMacro(macro) |
161 | 161 |
elif self.isPackageHeaders(line): |
... | ... |
@@ -171,262 +171,216 @@ class SpecParser(object): |
171 | 171 |
elif self.conditionalCheckMacroEnabled and self.isConditionalMacroEnd(line): |
172 | 172 |
self.conditionalCheckMacroEnabled = False |
173 | 173 |
else: |
174 |
- self.specAdditionalContent+=line+"\n" |
|
175 |
- i=i+1 |
|
174 |
+ self.specAdditionalContent += line + "\n" |
|
175 |
+ i = i + 1 |
|
176 | 176 |
specFile.close() |
177 | 177 |
|
178 | 178 |
def createDefaultPackage(self): |
179 | 179 |
pkg = Package() |
180 |
- self.packages["default"]=pkg |
|
180 |
+ self.packages["default"] = pkg |
|
181 | 181 |
|
182 |
- def readMacroFromFile(self,currentPos,lines): |
|
182 |
+ def readMacroFromFile(self, currentPos, lines): |
|
183 | 183 |
macro = rpmMacro() |
184 | 184 |
line = lines[currentPos] |
185 | 185 |
macro.position = currentPos |
186 |
- macro.endposition=currentPos |
|
187 |
- endPos=len(lines) |
|
186 |
+ macro.endposition = currentPos |
|
187 |
+ endPos = len(lines) |
|
188 | 188 |
line = " ".join(line.split()) |
189 | 189 |
flagindex = line.find(" ") |
190 | 190 |
if flagindex != -1: |
191 |
- macro.macroFlag=line[flagindex+1:] |
|
192 |
- macro.macroName=line[:flagindex] |
|
191 |
+ macro.macroFlag = line[flagindex+1:] |
|
192 |
+ macro.macroName = line[:flagindex] |
|
193 | 193 |
else: |
194 |
- macro.macroName=line |
|
194 |
+ macro.macroName = line |
|
195 | 195 |
|
196 |
- if currentPos+1 < len(lines) and self.isMacro(lines[currentPos+1]): |
|
197 |
- return macro,currentPos |
|
196 |
+ if currentPos + 1 < len(lines) and self.isMacro(lines[currentPos+1]): |
|
197 |
+ return macro, currentPos |
|
198 | 198 |
|
199 |
- for j in range(currentPos+1,endPos): |
|
199 |
+ for j in range(currentPos + 1, endPos): |
|
200 | 200 |
content = lines[j] |
201 | 201 |
if j+1 < endPos and self.isMacro(lines[j+1]): |
202 |
- return macro,j |
|
202 |
+ return macro, j |
|
203 | 203 |
macro.content += content +'\n' |
204 |
- macro.endposition=j |
|
205 |
- return macro,endPos |
|
204 |
+ macro.endposition = j |
|
205 |
+ return macro, endPos |
|
206 | 206 |
|
207 |
- |
|
208 |
- def updateMacro(self,macro): |
|
207 |
+ def updateMacro(self, macro): |
|
209 | 208 |
if macro.macroName == "%clean": |
210 |
- self.cleanMacro=macro |
|
209 |
+ self.cleanMacro = macro |
|
211 | 210 |
return True |
212 | 211 |
if macro.macroName == "%prep": |
213 |
- self.prepMacro=macro |
|
212 |
+ self.prepMacro = macro |
|
214 | 213 |
return True |
215 | 214 |
if macro.macroName == "%build": |
216 |
- self.buildMacro=macro |
|
215 |
+ self.buildMacro = macro |
|
217 | 216 |
return True |
218 | 217 |
if macro.macroName == "%install": |
219 |
- self.installMacro=macro |
|
218 |
+ self.installMacro = macro |
|
220 | 219 |
return True |
221 | 220 |
if macro.macroName == "%changelog": |
222 |
- self.changelogMacro=macro |
|
221 |
+ self.changelogMacro = macro |
|
223 | 222 |
return True |
224 | 223 |
if macro.macroName == "%check": |
225 |
- self.checkMacro=macro |
|
224 |
+ self.checkMacro = macro |
|
226 | 225 |
return True |
227 | 226 |
return False |
228 | 227 |
|
229 |
- def isMacro(self,line): |
|
230 |
- return self.isPackageMacro(line) or self.isSpecMacro(line) or self.isConditionalMacroStart(line) or self.isConditionalMacroEnd(line) |
|
228 |
+ def isMacro(self, line): |
|
229 |
+ return (self.isPackageMacro(line) or |
|
230 |
+ self.isSpecMacro(line) or |
|
231 |
+ self.isConditionalMacroStart(line) or |
|
232 |
+ self.isConditionalMacroEnd(line)) |
|
231 | 233 |
|
232 |
- def isConditionalArch(self,line): |
|
233 |
- if re.search('^'+'%ifarch',line) : |
|
234 |
+ def isConditionalArch(self, line): |
|
235 |
+ if re.search('^'+'%ifarch', line): |
|
234 | 236 |
return True |
235 | 237 |
return False |
236 | 238 |
|
237 |
- def isSpecMacro(self,line): |
|
238 |
- if re.search('^'+'%clean',line) : |
|
239 |
- return True |
|
240 |
- elif re.search('^'+'%prep',line) : |
|
241 |
- return True |
|
242 |
- elif re.search('^'+'%build',line) : |
|
243 |
- return True |
|
244 |
- elif re.search('^'+'%install',line) : |
|
245 |
- return True |
|
246 |
- elif re.search('^'+'%changelog',line) : |
|
247 |
- return True |
|
248 |
- elif re.search('^'+'%check',line) : |
|
239 |
+ def isSpecMacro(self, line): |
|
240 |
+ if line.startswith(('%clean', '%prep', '%build', '%install', '%changelog', '%check')): |
|
249 | 241 |
return True |
250 | 242 |
return False |
251 | 243 |
|
252 |
- def isPackageMacro(self,line): |
|
253 |
- line=line.strip() |
|
254 |
- |
|
255 |
- if re.search('^'+'%post',line) : |
|
256 |
- return True |
|
257 |
- elif re.search('^'+'%postun',line) : |
|
258 |
- return True |
|
259 |
- elif re.search('^'+'%files',line) : |
|
260 |
- return True |
|
261 |
- elif re.search('^'+'%description',line) : |
|
262 |
- return True |
|
263 |
- elif re.search('^'+'%package',line) : |
|
244 |
+ def isPackageMacro(self, line): |
|
245 |
+ line = line.strip() |
|
246 |
+ if line.startswith(('%post', '%postun', '%files', '%description', '%package')): |
|
264 | 247 |
return True |
265 | 248 |
return False |
266 | 249 |
|
267 |
- def isPackageHeaders(self,line): |
|
268 |
- if re.search('^'+'summary:',line,flags=re.IGNORECASE) : |
|
269 |
- return True |
|
270 |
- elif re.search('^'+'name:',line,flags=re.IGNORECASE) : |
|
271 |
- return True |
|
272 |
- elif re.search('^'+'group:',line,flags=re.IGNORECASE) : |
|
273 |
- return True |
|
274 |
- elif re.search('^'+'license:',line,flags=re.IGNORECASE) : |
|
275 |
- return True |
|
276 |
- elif re.search('^'+'version:',line,flags=re.IGNORECASE) : |
|
277 |
- return True |
|
278 |
- elif re.search('^'+'release:',line,flags=re.IGNORECASE) : |
|
279 |
- return True |
|
280 |
- elif re.search('^'+'distribution:',line,flags=re.IGNORECASE) : |
|
281 |
- return True |
|
282 |
- elif re.search('^'+'requires:',line,flags=re.IGNORECASE) : |
|
283 |
- return True |
|
284 |
- elif re.search('^'+'requires\((pre|post|preun|postun)\):',line,flags=re.IGNORECASE) : |
|
285 |
- return True |
|
286 |
- elif re.search('^'+'provides:',line,flags=re.IGNORECASE) : |
|
287 |
- return True |
|
288 |
- elif re.search('^'+'obsoletes:',line,flags=re.IGNORECASE) : |
|
289 |
- return True |
|
290 |
- elif re.search('^'+'conflicts:',line,flags=re.IGNORECASE) : |
|
291 |
- return True |
|
292 |
- elif re.search('^'+'url:',line,flags=re.IGNORECASE) : |
|
293 |
- return True |
|
294 |
- elif re.search('^'+'source[0-9]*:',line,flags=re.IGNORECASE) : |
|
295 |
- return True |
|
296 |
- elif re.search('^'+'patch[0-9]*:',line,flags=re.IGNORECASE) : |
|
297 |
- return True |
|
298 |
- elif re.search('^'+'buildrequires:',line,flags=re.IGNORECASE) : |
|
299 |
- return True |
|
300 |
- elif re.search('^'+'buildprovides:',line,flags=re.IGNORECASE) : |
|
301 |
- return True |
|
302 |
- elif re.search('^'+'buildarch:',line,flags=re.IGNORECASE) : |
|
250 |
+ def isPackageHeaders(self, line): |
|
251 |
+ headersPatterns = ['^summary:', '^name:', '^group:', |
|
252 |
+ '^license:', '^version:', '^release:', |
|
253 |
+ '^distribution:', '^requires:', |
|
254 |
+ '^requires\((pre|post|preun|postun)\):', |
|
255 |
+ '^provides:', '^obsoletes:', '^conflicts:', |
|
256 |
+ '^url:', '^source[0-9]*:', '^patch[0-9]*:', |
|
257 |
+ '^buildrequires:', '^buildprovides:', |
|
258 |
+ '^buildarch:'] |
|
259 |
+ if any([re.search(r, line, flags=re.IGNORECASE) for r in headersPatterns]): |
|
303 | 260 |
return True |
304 | 261 |
return False |
305 | 262 |
|
306 |
- def isGlobalSecurityHardening(self,line): |
|
307 |
- if re.search('^%global *security_hardening',line,flags=re.IGNORECASE) : |
|
263 |
+ def isGlobalSecurityHardening(self, line): |
|
264 |
+ if re.search('^%global *security_hardening', line, flags=re.IGNORECASE): |
|
308 | 265 |
return True |
309 | 266 |
return False |
310 | 267 |
|
311 |
- def isChecksum(self,line): |
|
312 |
- if re.search('^%define *sha1',line,flags=re.IGNORECASE) : |
|
268 |
+ def isChecksum(self, line): |
|
269 |
+ if re.search('^%define *sha1', line, flags=re.IGNORECASE): |
|
313 | 270 |
return True |
314 | 271 |
return False |
315 | 272 |
|
316 |
- def isDefinition(self,line): |
|
317 |
- if re.search('^'+'%define',line) : |
|
318 |
- return True |
|
319 |
- if re.search('^'+'%global',line) : |
|
273 |
+ def isDefinition(self, line): |
|
274 |
+ if line.startswith(('%define', '%global')): |
|
320 | 275 |
return True |
321 | 276 |
return False |
322 | 277 |
|
323 |
- def readConditionalArch(self,line): |
|
324 |
- w=line.split() |
|
278 |
+ def readConditionalArch(self, line): |
|
279 |
+ w = line.split() |
|
325 | 280 |
if len(w) == 2: |
326 |
- return w[1] |
|
281 |
+ return w[1] |
|
327 | 282 |
return None |
328 | 283 |
|
329 |
- def readDefinition(self,line): |
|
330 |
- listDefines=line.split() |
|
284 |
+ def readDefinition(self, line): |
|
285 |
+ listDefines = line.split() |
|
331 | 286 |
if len(listDefines) == 3: |
332 |
- self.defs[listDefines[1]] = self.replaceMacros(listDefines[2]) |
|
333 |
- return True |
|
287 |
+ self.defs[listDefines[1]] = self.replaceMacros(listDefines[2]) |
|
288 |
+ return True |
|
334 | 289 |
return False |
335 | 290 |
|
336 |
- def readHeader(self,line): |
|
337 |
- headerSplitIndex=line.find(":") |
|
338 |
- if(headerSplitIndex+1 == len(line) ): |
|
339 |
- print line |
|
340 |
- print "Error:Invalid header" |
|
341 |
- return False, None,None |
|
342 |
- headerName=line[0:headerSplitIndex].lower() |
|
343 |
- headerContent=line[headerSplitIndex+1:].strip() |
|
344 |
- return True,headerName,headerContent |
|
345 |
- |
|
346 |
- |
|
347 |
- def readDependentPackageData(self,line): |
|
291 |
+ def readHeader(self, line): |
|
292 |
+ headerSplitIndex = line.find(":") |
|
293 |
+ if headerSplitIndex + 1 == len(line): |
|
294 |
+ print(line) |
|
295 |
+ print("Error:Invalid header") |
|
296 |
+ return False, None, None |
|
297 |
+ headerName = line[0:headerSplitIndex].lower() |
|
298 |
+ headerContent = line[headerSplitIndex + 1:].strip() |
|
299 |
+ return True, headerName, headerContent |
|
300 |
+ |
|
301 |
+ def readDependentPackageData(self, line): |
|
348 | 302 |
strUtils = StringUtils() |
349 |
- listPackages=line.split(",") |
|
350 |
- listdependentpkgs=[] |
|
303 |
+ listPackages = line.split(",") |
|
304 |
+ listdependentpkgs = [] |
|
351 | 305 |
for line in listPackages: |
352 |
- line=strUtils.getStringInConditionalBrackets(line) |
|
353 |
- listContents=line.split() |
|
306 |
+ line = strUtils.getStringInConditionalBrackets(line) |
|
307 |
+ listContents = line.split() |
|
354 | 308 |
totalContents = len(listContents) |
355 |
- i=0 |
|
309 |
+ i = 0 |
|
356 | 310 |
while i < totalContents: |
357 | 311 |
dpkg = dependentPackageData() |
358 |
- compare=None |
|
359 |
- packageName=listContents[i] |
|
312 |
+ compare = None |
|
313 |
+ packageName = listContents[i] |
|
360 | 314 |
if listContents[i].startswith("/"): |
361 |
- provider=constants.providedBy.get(listContents[i], None) |
|
362 |
- i=i+1 |
|
315 |
+ provider = constants.providedBy.get(listContents[i], None) |
|
316 |
+ i += 1 |
|
363 | 317 |
if provider is not None: |
364 |
- packageName=provider |
|
318 |
+ packageName = provider |
|
365 | 319 |
else: |
366 | 320 |
continue |
367 | 321 |
if i+2 < len(listContents): |
368 | 322 |
if listContents[i+1] == ">=": |
369 |
- compare="gte" |
|
323 |
+ compare = "gte" |
|
370 | 324 |
elif listContents[i+1] == "<=": |
371 |
- compare="lte" |
|
325 |
+ compare = "lte" |
|
372 | 326 |
elif listContents[i+1] == "==": |
373 |
- compare="eq" |
|
327 |
+ compare = "eq" |
|
374 | 328 |
elif listContents[i+1] == "<": |
375 |
- compare="lt" |
|
329 |
+ compare = "lt" |
|
376 | 330 |
elif listContents[i+1] == ">": |
377 |
- compare="gt" |
|
331 |
+ compare = "gt" |
|
378 | 332 |
elif listContents[i+1] == "=": |
379 |
- compare="eq" |
|
333 |
+ compare = "eq" |
|
380 | 334 |
|
381 | 335 |
if compare is not None: |
382 |
- dpkg.package=packageName |
|
383 |
- dpkg.compare=compare |
|
384 |
- dpkg.version=listContents[i+2] |
|
385 |
- i=i+3 |
|
336 |
+ dpkg.package = packageName |
|
337 |
+ dpkg.compare = compare |
|
338 |
+ dpkg.version = listContents[i+2] |
|
339 |
+ i = i + 3 |
|
386 | 340 |
else: |
387 |
- dpkg.package=packageName |
|
388 |
- i=i+1 |
|
341 |
+ dpkg.package = packageName |
|
342 |
+ i = i + 1 |
|
389 | 343 |
listdependentpkgs.append(dpkg) |
390 | 344 |
return listdependentpkgs |
391 | 345 |
|
392 |
- def readPackageHeaders(self,line,pkg): |
|
393 |
- returnVal,headerName,headerContent=self.readHeader(line) |
|
346 |
+ def readPackageHeaders(self, line, pkg): |
|
347 |
+ returnVal, headerName, headerContent = self.readHeader(line) |
|
394 | 348 |
if not returnVal: |
395 | 349 |
return False |
396 | 350 |
|
397 |
- headerContent=self.replaceMacros(headerContent) |
|
351 |
+ headerContent = self.replaceMacros(headerContent) |
|
398 | 352 |
if headerName == 'summary': |
399 |
- pkg.summary=headerContent |
|
353 |
+ pkg.summary = headerContent |
|
400 | 354 |
return True |
401 | 355 |
if headerName == 'name': |
402 |
- pkg.name=headerContent |
|
403 |
- if (pkg == self.packages["default"]): |
|
356 |
+ pkg.name = headerContent |
|
357 |
+ if pkg == self.packages["default"]: |
|
404 | 358 |
self.defs["name"] = pkg.name |
405 | 359 |
return True |
406 | 360 |
if headerName == 'group': |
407 |
- pkg.group=headerContent |
|
361 |
+ pkg.group = headerContent |
|
408 | 362 |
return True |
409 | 363 |
if headerName == 'license': |
410 |
- pkg.license=headerContent |
|
364 |
+ pkg.license = headerContent |
|
411 | 365 |
return True |
412 | 366 |
if headerName == 'version': |
413 |
- pkg.version=headerContent |
|
414 |
- if (pkg == self.packages["default"]): |
|
367 |
+ pkg.version = headerContent |
|
368 |
+ if pkg == self.packages["default"]: |
|
415 | 369 |
self.defs["version"] = pkg.version |
416 | 370 |
return True |
417 | 371 |
if headerName == 'buildarch': |
418 |
- pkg.buildarch=headerContent |
|
372 |
+ pkg.buildarch = headerContent |
|
419 | 373 |
return True |
420 | 374 |
if headerName == 'release': |
421 |
- pkg.release=headerContent |
|
422 |
- if (pkg == self.packages["default"]): |
|
375 |
+ pkg.release = headerContent |
|
376 |
+ if pkg == self.packages["default"]: |
|
423 | 377 |
self.defs["release"] = pkg.release |
424 | 378 |
return True |
425 | 379 |
if headerName == 'distribution': |
426 |
- pkg.distribution=headerContent |
|
380 |
+ pkg.distribution = headerContent |
|
427 | 381 |
return True |
428 | 382 |
if headerName == 'url': |
429 |
- pkg.URL=headerContent |
|
383 |
+ pkg.URL = headerContent |
|
430 | 384 |
return True |
431 | 385 |
if headerName.find('source') != -1: |
432 | 386 |
pkg.sources.append(headerContent) |
... | ... |
@@ -434,8 +388,13 @@ class SpecParser(object): |
434 | 434 |
if headerName.find('patch') != -1: |
435 | 435 |
pkg.patches.append(headerContent) |
436 | 436 |
return True |
437 |
- if headerName.startswith('requires') or headerName == 'provides' or headerName == 'obsoletes' or headerName == 'conflicts' or headerName == 'buildrequires' or headerName == 'buildprovides': |
|
438 |
- dpkg=self.readDependentPackageData(headerContent) |
|
437 |
+ if (headerName.startswith('requires') or |
|
438 |
+ headerName == 'provides' or |
|
439 |
+ headerName == 'obsoletes' or |
|
440 |
+ headerName == 'conflicts' or |
|
441 |
+ headerName == 'buildrequires' or |
|
442 |
+ headerName == 'buildprovides'): |
|
443 |
+ dpkg = self.readDependentPackageData(headerContent) |
|
439 | 444 |
if dpkg is None: |
440 | 445 |
return False |
441 | 446 |
if headerName.startswith('requires'): |
... | ... |
@@ -457,47 +416,48 @@ class SpecParser(object): |
457 | 457 |
return True |
458 | 458 |
return False |
459 | 459 |
|
460 |
- def readSecurityHardening(self,line): |
|
461 |
- data = line.lower().strip(); |
|
462 |
- words=data.split(" ") |
|
460 |
+ def readSecurityHardening(self, line): |
|
461 |
+ data = line.lower().strip() |
|
462 |
+ words = data.split(" ") |
|
463 | 463 |
nrWords = len(words) |
464 |
- if (nrWords != 3): |
|
465 |
- print "Error: Unable to parse line: "+line |
|
464 |
+ if nrWords != 3: |
|
465 |
+ print("Error: Unable to parse line: " + line) |
|
466 | 466 |
return False |
467 |
- if (words[2] != "none" and words[2] != "nonow" and words[2] != "nopie") : |
|
468 |
- print "Error: Invalid security_hardening value: " + words[2] |
|
467 |
+ if words[2] != "none" and words[2] != "nonow" and words[2] != "nopie": |
|
468 |
+ print("Error: Invalid security_hardening value: " + words[2]) |
|
469 | 469 |
return False |
470 | 470 |
self.globalSecurityHardening = words[2] |
471 |
- return True; |
|
471 |
+ return True |
|
472 | 472 |
|
473 |
- def readChecksum(self,line,pkg): |
|
473 |
+ def readChecksum(self, line, pkg): |
|
474 | 474 |
strUtils = StringUtils() |
475 |
- line=self.replaceMacros(line) |
|
476 |
- data = line.strip(); |
|
477 |
- words=data.split() |
|
475 |
+ line = self.replaceMacros(line) |
|
476 |
+ data = line.strip() |
|
477 |
+ words = data.split() |
|
478 | 478 |
nrWords = len(words) |
479 |
- if (nrWords != 3): |
|
480 |
- print "Error: Unable to parse line: "+line |
|
479 |
+ if nrWords != 3: |
|
480 |
+ print("Error: Unable to parse line: " + line) |
|
481 | 481 |
return False |
482 |
- value=words[2].split("=") |
|
483 |
- if (len(value) != 2): |
|
484 |
- print "Error: Unable to parse line: "+line |
|
482 |
+ value = words[2].split("=") |
|
483 |
+ if len(value) != 2: |
|
484 |
+ print("Error: Unable to parse line: "+line) |
|
485 | 485 |
return False |
486 |
- matchedSources=[] |
|
486 |
+ matchedSources = [] |
|
487 | 487 |
for source in pkg.sources: |
488 |
- sourceName=strUtils.getFileNameFromURL(source) |
|
488 |
+ sourceName = strUtils.getFileNameFromURL(source) |
|
489 | 489 |
if (sourceName.startswith(value[0])): |
490 | 490 |
matchedSources.append(sourceName) |
491 | 491 |
if (len(matchedSources) == 0): |
492 |
- print "Error: Can not find match for sha1 "+value[0] |
|
492 |
+ print("Error: Can not find match for sha1 " + value[0]) |
|
493 | 493 |
return False |
494 | 494 |
if (len(matchedSources) > 1): |
495 |
- print "Error: Too many matched Sources:" + ' '.join(matchedSources) + " for sha1 "+value[0] |
|
495 |
+ print("Error: Too many matched Sources:" + |
|
496 |
+ ' '.join(matchedSources) + " for sha1 " + value[0]) |
|
496 | 497 |
return False |
497 | 498 |
pkg.checksums[sourceName] = value[1] |
498 | 499 |
return True; |
499 | 500 |
|
500 |
- def isConditionalCheckMacro(self,line): |
|
501 |
+ def isConditionalCheckMacro(self, line): |
|
501 | 502 |
data = line.strip() |
502 | 503 |
words = data.split() |
503 | 504 |
nrWords = len(words) |
... | ... |
@@ -522,8 +482,8 @@ class SpecParser(object): |
522 | 522 |
return False |
523 | 523 |
return True |
524 | 524 |
|
525 |
- def isConditionalMacroStart(self,line): |
|
525 |
+ def isConditionalMacroStart(self, line): |
|
526 | 526 |
return line.startswith("%if") |
527 | 527 |
|
528 |
- def isConditionalMacroEnd(self,line): |
|
528 |
+ def isConditionalMacroEnd(self, line): |
|
529 | 529 |
return (line.strip() == "%endif") |
... | ... |
@@ -3,76 +3,78 @@ import platform |
3 | 3 |
class rpmMacro(object): |
4 | 4 |
|
5 | 5 |
def __init__(self): |
6 |
- self.macroName="" |
|
7 |
- self.macroFlag="" |
|
8 |
- self.content="" |
|
9 |
- self.position=-1 |
|
10 |
- self.endposition=-1 |
|
6 |
+ self.macroName = "" |
|
7 |
+ self.macroFlag = "" |
|
8 |
+ self.content = "" |
|
9 |
+ self.position = -1 |
|
10 |
+ self.endposition = -1 |
|
11 | 11 |
|
12 |
- def setName(self,name): |
|
13 |
- self.macroName=name |
|
12 |
+ def setName(self, name): |
|
13 |
+ self.macroName = name |
|
14 | 14 |
|
15 | 15 |
def displayMacro(self): |
16 |
- print "Macro:\n", self.macroName, " ",self.macroFlag," ",self.position," ",self.endposition |
|
17 |
- print self.content |
|
16 |
+ print("Macro:") |
|
17 |
+ print(self.macroName + " {}".format(self.macroFlag) |
|
18 |
+ + " {}".format(self.position) |
|
19 |
+ + " {}".format(self.endposition)) |
|
20 |
+ print(self.content) |
|
18 | 21 |
|
19 | 22 |
class dependentPackageData(object): |
20 | 23 |
|
21 | 24 |
def __init__(self): |
22 |
- self.package="" |
|
23 |
- self.version="" |
|
24 |
- self.compare="" |
|
25 |
+ self.package = "" |
|
26 |
+ self.version = "" |
|
27 |
+ self.compare = "" |
|
25 | 28 |
|
26 | 29 |
class Package(object): |
27 | 30 |
def __init__(self, basePkg=None): |
28 |
- self.summary="" |
|
29 |
- self.name="" |
|
30 |
- self.group="" |
|
31 |
- self.license="" |
|
32 |
- self.version="" |
|
33 |
- self.release="" |
|
34 |
- self.buildarch=platform.machine() |
|
35 |
- self.distribution="Photon" |
|
36 |
- self.basePkgName="" |
|
37 |
- self.URL="" |
|
31 |
+ self.summary = "" |
|
32 |
+ self.name = "" |
|
33 |
+ self.group = "" |
|
34 |
+ self.license = "" |
|
35 |
+ self.version = "" |
|
36 |
+ self.release = "" |
|
37 |
+ self.buildarch = platform.machine() |
|
38 |
+ self.distribution = "Photon" |
|
39 |
+ self.basePkgName = "" |
|
40 |
+ self.URL = "" |
|
38 | 41 |
|
39 |
- self.sources=[] |
|
40 |
- self.checksums={} |
|
41 |
- self.patches=[] |
|
42 |
- self.buildrequires=[] |
|
43 |
- self.buildprovides=[] |
|
44 |
- self.checkbuildrequires=[] |
|
42 |
+ self.sources = [] |
|
43 |
+ self.checksums = {} |
|
44 |
+ self.patches = [] |
|
45 |
+ self.buildrequires = [] |
|
46 |
+ self.buildprovides = [] |
|
47 |
+ self.checkbuildrequires = [] |
|
45 | 48 |
|
49 |
+ self.requires = [] |
|
50 |
+ self.provides = [] |
|
51 |
+ self.obsoletes = [] |
|
52 |
+ self.conflicts = [] |
|
46 | 53 |
|
47 |
- self.requires=[] |
|
48 |
- self.provides=[] |
|
49 |
- self.obsoletes=[] |
|
50 |
- self.conflicts=[] |
|
51 |
- |
|
52 |
- self.descriptionMacro= None |
|
53 |
- self.postMacro=None |
|
54 |
- self.postunMacro=None |
|
55 |
- self.filesMacro=None |
|
56 |
- self.packageMacro=None |
|
54 |
+ self.descriptionMacro = None |
|
55 |
+ self.postMacro = None |
|
56 |
+ self.postunMacro = None |
|
57 |
+ self.filesMacro = None |
|
58 |
+ self.packageMacro = None |
|
57 | 59 |
|
58 | 60 |
if basePkg is not None: |
59 |
- self.basePkgName=basePkg.name |
|
60 |
- self.group=basePkg.group |
|
61 |
- self.license=basePkg.license |
|
62 |
- self.version=basePkg.version |
|
63 |
- self.buildarch=basePkg.buildarch |
|
64 |
- self.release=basePkg.release |
|
65 |
- self.distribution=basePkg.distribution |
|
61 |
+ self.basePkgName = basePkg.name |
|
62 |
+ self.group = basePkg.group |
|
63 |
+ self.license = basePkg.license |
|
64 |
+ self.version = basePkg.version |
|
65 |
+ self.buildarch = basePkg.buildarch |
|
66 |
+ self.release = basePkg.release |
|
67 |
+ self.distribution = basePkg.distribution |
|
66 | 68 |
|
67 |
- def updatePackageMacro(self,macro): |
|
69 |
+ def updatePackageMacro(self, macro): |
|
68 | 70 |
if macro.macroName == "%post": |
69 |
- self.postMacro=macro |
|
71 |
+ self.postMacro = macro |
|
70 | 72 |
return True |
71 | 73 |
if macro.macroName == "%postun": |
72 |
- self.postunMacro=macro |
|
74 |
+ self.postunMacro = macro |
|
73 | 75 |
return True |
74 | 76 |
if macro.macroName == "%files": |
75 |
- self.filesMacro=macro |
|
77 |
+ self.filesMacro = macro |
|
76 | 78 |
return True |
77 | 79 |
if macro.macroName == "%description": |
78 | 80 |
self.descriptionMacro = macro |
... | ... |
@@ -1,30 +1,30 @@ |
1 |
-from SpecParser import SpecParser |
|
2 |
-from StringUtils import StringUtils |
|
3 | 1 |
import platform |
4 | 2 |
import os |
3 |
+from SpecParser import SpecParser |
|
4 |
+from StringUtils import StringUtils |
|
5 | 5 |
|
6 | 6 |
class Specutils(object): |
7 | 7 |
|
8 |
- def __init__(self,specfile): |
|
9 |
- self.specfile="" |
|
8 |
+ def __init__(self, specfile): |
|
9 |
+ self.specfile = "" |
|
10 | 10 |
self.spec = SpecParser() |
11 | 11 |
if self.isSpecFile(specfile): |
12 |
- self.specfile=specfile |
|
12 |
+ self.specfile = specfile |
|
13 | 13 |
self.spec.parseSpecFile(self.specfile) |
14 | 14 |
|
15 |
- def isSpecFile(self,specfile): |
|
16 |
- if os.path.isfile(specfile) and specfile[-5:] == ".spec": |
|
15 |
+ def isSpecFile(self, specfile): |
|
16 |
+ if os.path.isfile(specfile) and specfile.endswith(".spec"): |
|
17 | 17 |
return True |
18 | 18 |
return False |
19 | 19 |
|
20 | 20 |
def getSourceNames(self): |
21 |
- sourceNames=[] |
|
21 |
+ sourceNames = [] |
|
22 | 22 |
strUtils = StringUtils() |
23 | 23 |
pkg = self.spec.packages.get('default') |
24 | 24 |
if pkg is None: |
25 | 25 |
return None |
26 | 26 |
for source in pkg.sources: |
27 |
- sourceName=strUtils.getFileNameFromURL(source) |
|
27 |
+ sourceName = strUtils.getFileNameFromURL(source) |
|
28 | 28 |
sourceNames.append(sourceName) |
29 | 29 |
return sourceNames |
30 | 30 |
|
... | ... |
@@ -32,12 +32,12 @@ class Specutils(object): |
32 | 32 |
pkg = self.spec.packages.get('default') |
33 | 33 |
return pkg.checksums |
34 | 34 |
|
35 |
- def getChecksumForSource(self,source): |
|
35 |
+ def getChecksumForSource(self, source): |
|
36 | 36 |
pkg = self.spec.packages.get('default') |
37 | 37 |
return pkg.checksums.get(source) |
38 | 38 |
|
39 | 39 |
def getSourceURLs(self): |
40 |
- sourceNames=[] |
|
40 |
+ sourceNames = [] |
|
41 | 41 |
strUtils = StringUtils() |
42 | 42 |
pkg = self.spec.packages.get('default') |
43 | 43 |
if pkg is None: |
... | ... |
@@ -47,25 +47,24 @@ class Specutils(object): |
47 | 47 |
return sourceNames |
48 | 48 |
|
49 | 49 |
def getPatchNames(self): |
50 |
- patchNames=[] |
|
50 |
+ patchNames = [] |
|
51 | 51 |
strUtils = StringUtils() |
52 | 52 |
pkg = self.spec.packages.get('default') |
53 | 53 |
if pkg is None: |
54 | 54 |
return None |
55 | 55 |
for patch in pkg.patches: |
56 |
- patchName=strUtils.getFileNameFromURL(patch) |
|
56 |
+ patchName = strUtils.getFileNameFromURL(patch) |
|
57 | 57 |
patchNames.append(patchName) |
58 | 58 |
return patchNames |
59 | 59 |
|
60 | 60 |
def getPackageNames(self): |
61 |
- packageNames=[] |
|
62 |
- for key in self.spec.packages.keys(): |
|
63 |
- pkg = self.spec.packages.get(key) |
|
61 |
+ packageNames = [] |
|
62 |
+ for pkg in self.spec.packages.values(): |
|
64 | 63 |
packageNames.append(pkg.name) |
65 | 64 |
return packageNames |
66 | 65 |
|
67 |
- def getIsRPMPackage(self,pkgName): |
|
68 |
- defaultPkgName=self.spec.packages['default'].name |
|
66 |
+ def getIsRPMPackage(self, pkgName): |
|
67 |
+ defaultPkgName = self.spec.packages['default'].name |
|
69 | 68 |
if pkgName == defaultPkgName: |
70 | 69 |
pkgName = "default" |
71 | 70 |
if pkgName in self.spec.packages.keys(): |
... | ... |
@@ -75,42 +74,38 @@ class Specutils(object): |
75 | 75 |
return False |
76 | 76 |
|
77 | 77 |
def getRPMNames(self): |
78 |
- rpmNames=[] |
|
79 |
- for key in self.spec.packages.keys(): |
|
80 |
- pkg = self.spec.packages.get(key) |
|
81 |
- rpmName=pkg.name+"-"+pkg.version+"-"+pkg.release |
|
78 |
+ rpmNames = [] |
|
79 |
+ for pkg in self.spec.packages.values(): |
|
80 |
+ rpmName = pkg.name + "-" + pkg.version + "-" + pkg.release |
|
82 | 81 |
rpmNames.append(rpmName) |
83 | 82 |
return rpmNames |
84 | 83 |
|
85 | 84 |
def getRPMName(self, pkgName): |
86 |
- rpmName=None |
|
87 |
- for key in self.spec.packages.keys(): |
|
88 |
- pkg = self.spec.packages.get(key) |
|
85 |
+ rpmName = None |
|
86 |
+ for pkg in self.spec.packages.values(): |
|
89 | 87 |
if pkg.name == pkgName: |
90 |
- rpmName=pkg.name+"-"+pkg.version+"-"+pkg.release |
|
88 |
+ rpmName = pkg.name + "-" + pkg.version + "-" + pkg.release |
|
91 | 89 |
break |
92 | 90 |
return rpmName |
93 | 91 |
|
94 | 92 |
def getRPMVersion(self, pkgName): |
95 |
- version=None |
|
96 |
- for key in self.spec.packages.keys(): |
|
97 |
- pkg = self.spec.packages.get(key) |
|
93 |
+ version = None |
|
94 |
+ for pkg in self.spec.packages.values(): |
|
98 | 95 |
if pkg.name == pkgName: |
99 |
- version=pkg.version |
|
96 |
+ version = pkg.version |
|
100 | 97 |
break |
101 | 98 |
return version |
102 | 99 |
|
103 | 100 |
def getRPMRelease(self, pkgName): |
104 |
- release=None |
|
105 |
- for key in self.spec.packages.keys(): |
|
106 |
- pkg = self.spec.packages.get(key) |
|
101 |
+ release = None |
|
102 |
+ for pkg in self.spec.packages.values(): |
|
107 | 103 |
if pkg.name == pkgName: |
108 |
- release=pkg.release |
|
104 |
+ release = pkg.release |
|
109 | 105 |
break |
110 | 106 |
return release |
111 | 107 |
|
112 | 108 |
def getLicense(self): |
113 |
- licenseInfo=None |
|
109 |
+ licenseInfo = None |
|
114 | 110 |
pkg = self.spec.packages.get('default') |
115 | 111 |
if pkg is None: |
116 | 112 |
return None |
... | ... |
@@ -134,80 +129,74 @@ class Specutils(object): |
134 | 134 |
return None |
135 | 135 |
|
136 | 136 |
def getBuildArch(self, pkgName): |
137 |
- buildArch=platform.machine() |
|
138 |
- for key in self.spec.packages.keys(): |
|
139 |
- pkg = self.spec.packages.get(key) |
|
137 |
+ buildArch = platform.machine() |
|
138 |
+ for pkg in self.spec.packages.values(): |
|
140 | 139 |
if pkg.name == pkgName: |
141 |
- buildArch=pkg.buildarch |
|
140 |
+ buildArch = pkg.buildarch |
|
142 | 141 |
break |
143 | 142 |
return buildArch |
144 | 143 |
|
145 | 144 |
def getRequiresAllPackages(self): |
146 |
- dependentPackages=[] |
|
147 |
- for key in self.spec.packages.keys(): |
|
148 |
- pkg = self.spec.packages.get(key) |
|
145 |
+ dependentPackages = [] |
|
146 |
+ for pkg in self.spec.packages.values(): |
|
149 | 147 |
for dpkg in pkg.requires: |
150 | 148 |
dependentPackages.append(dpkg.package) |
151 |
- dependentPackages=list(set(dependentPackages)) |
|
152 |
- packageNames=self.getPackageNames() |
|
149 |
+ dependentPackages = list(set(dependentPackages)) |
|
150 |
+ packageNames = self.getPackageNames() |
|
153 | 151 |
for pkgName in packageNames: |
154 | 152 |
if pkgName in dependentPackages: |
155 | 153 |
dependentPackages.remove(pkgName) |
156 | 154 |
return dependentPackages |
157 | 155 |
|
158 | 156 |
def getBuildRequiresAllPackages(self): |
159 |
- dependentPackages=[] |
|
160 |
- for key in self.spec.packages.keys(): |
|
161 |
- pkg = self.spec.packages.get(key) |
|
157 |
+ dependentPackages = [] |
|
158 |
+ for pkg in self.spec.packages.values(): |
|
162 | 159 |
for dpkg in pkg.buildrequires: |
163 | 160 |
dependentPackages.append(dpkg.package) |
164 |
- dependentPackages=list(set(dependentPackages)) |
|
165 |
- packageNames=self.getPackageNames() |
|
161 |
+ dependentPackages = list(set(dependentPackages)) |
|
162 |
+ packageNames = self.getPackageNames() |
|
166 | 163 |
for pkgName in packageNames: |
167 | 164 |
if pkgName in dependentPackages: |
168 | 165 |
dependentPackages.remove(pkgName) |
169 | 166 |
return dependentPackages |
170 | 167 |
|
171 | 168 |
def getCheckBuildRequiresAllPackages(self): |
172 |
- dependentPackages=[] |
|
173 |
- for key in self.spec.packages.keys(): |
|
174 |
- pkg = self.spec.packages.get(key) |
|
169 |
+ dependentPackages = [] |
|
170 |
+ for pkg in self.spec.packages.values(): |
|
175 | 171 |
for dpkg in pkg.checkbuildrequires: |
176 | 172 |
dependentPackages.append(dpkg.package) |
177 |
- dependentPackages=list(set(dependentPackages)) |
|
173 |
+ dependentPackages = list(set(dependentPackages)) |
|
178 | 174 |
return dependentPackages |
179 | 175 |
|
180 |
- def getRequires(self,pkgName): |
|
181 |
- dependentPackages=[] |
|
182 |
- for key in self.spec.packages.keys(): |
|
183 |
- pkg = self.spec.packages.get(key) |
|
176 |
+ def getRequires(self, pkgName): |
|
177 |
+ dependentPackages = [] |
|
178 |
+ for pkg in self.spec.packages.values(): |
|
184 | 179 |
if pkg.name == pkgName: |
185 | 180 |
for dpkg in pkg.requires: |
186 | 181 |
dependentPackages.append(dpkg.package) |
187 | 182 |
return dependentPackages |
188 | 183 |
|
189 |
- def getBuildRequires(self,pkgName): |
|
190 |
- dependentPackages=[] |
|
191 |
- for key in self.spec.packages.keys(): |
|
192 |
- pkg = self.spec.packages.get(key) |
|
184 |
+ def getBuildRequires(self, pkgName): |
|
185 |
+ dependentPackages = [] |
|
186 |
+ for pkg in self.spec.packages.values(): |
|
193 | 187 |
if pkg.name == pkgName: |
194 | 188 |
for dpkg in pkg.buildrequires: |
195 | 189 |
dependentPackages.append(dpkg.package) |
196 | 190 |
return dependentPackages |
197 | 191 |
|
198 |
- def getProvides(self,packageName): |
|
199 |
- dependentPackages=[] |
|
200 |
- defaultPkgName=self.spec.packages['default'].name |
|
192 |
+ def getProvides(self, packageName): |
|
193 |
+ dependentPackages = [] |
|
194 |
+ defaultPkgName = self.spec.packages['default'].name |
|
201 | 195 |
pkg = None |
202 |
- if self.spec.packages.has_key(packageName): |
|
196 |
+ if packageName in self.spec.packages: |
|
203 | 197 |
pkg = self.spec.packages.get(packageName) |
204 | 198 |
if defaultPkgName == packageName: |
205 |
- pkg=self.spec.packages['default'] |
|
199 |
+ pkg = self.spec.packages['default'] |
|
206 | 200 |
if pkg is not None: |
207 | 201 |
for dpkg in pkg.provides: |
208 | 202 |
dependentPackages.append(dpkg.package) |
209 | 203 |
else: |
210 |
- print "package not found" |
|
204 |
+ print("package not found") |
|
211 | 205 |
return dependentPackages |
212 | 206 |
|
213 | 207 |
def getVersion(self): |
... | ... |
@@ -226,9 +215,9 @@ class Specutils(object): |
226 | 226 |
return self.spec.globalSecurityHardening |
227 | 227 |
|
228 | 228 |
def isCheckAvailable(self): |
229 |
- check=False |
|
229 |
+ check = False |
|
230 | 230 |
if self.spec.checkMacro is not None: |
231 |
- check=True |
|
231 |
+ check = True |
|
232 | 232 |
return check |
233 | 233 |
|
234 | 234 |
def getDefinitions(self): |
... | ... |
@@ -236,17 +225,16 @@ class Specutils(object): |
236 | 236 |
|
237 | 237 |
def main(): |
238 | 238 |
spec = Specutils("/workspace1/myrepos/photon/SPECS/docker/docker.spec") |
239 |
- print "packages",spec.getPackageNames() |
|
240 |
- print "packages",spec.getRPMNames() |
|
241 |
- print "sources",spec.getSourceNames() |
|
242 |
- print "patches",spec.getPatchNames() |
|
243 |
- print "requires",spec.getRequires('libltdl-devel') |
|
244 |
- print "requires",spec.getRequires('libtool') |
|
239 |
+ print("packages {}".format(spec.getPackageNames())) |
|
240 |
+ print("packages {}".format(spec.getRPMNames())) |
|
241 |
+ print("sources {}".format(spec.getSourceNames())) |
|
242 |
+ print("patches {}".format(spec.getPatchNames())) |
|
243 |
+ print("requires {}".format(spec.getRequires('libltdl-devel'))) |
|
244 |
+ print("requires {}".format(spec.getRequires('libtool'))) |
|
245 | 245 |
|
246 |
- print "provides",spec.getProvides('libtool') |
|
247 |
- print "all-requires",spec.getRequiresAllPackages() |
|
248 |
- print "all-build-requires",spec.getBuildRequiresAllPackages() |
|
246 |
+ print("provides {}".format(spec.getProvides('libtool'))) |
|
247 |
+ print("all-requires {}".format(spec.getRequiresAllPackages())) |
|
248 |
+ print("all-build-requires {}".format(spec.getBuildRequiresAllPackages())) |
|
249 | 249 |
|
250 | 250 |
if __name__ == '__main__': |
251 | 251 |
main() |
252 |
- |
... | ... |
@@ -6,34 +6,36 @@ class StringUtils(object): |
6 | 6 |
# (aaa <= 3.1 or bbb) ccc (ddd or fff > 4.5.6) |
7 | 7 |
# into |
8 | 8 |
# aaa <= 3.1 ccc ddd |
9 |
- def getStringInConditionalBrackets(self,inputstring): |
|
10 |
- inputstring=inputstring.strip() |
|
11 |
- items = re.findall(r"([(][A-Za-z0-9 %{?}_\.\-<>=]+[)])", inputstring) |
|
9 |
+ def getStringInConditionalBrackets(self, inputstring): |
|
10 |
+ inputstring = inputstring.strip() |
|
11 |
+ items = re.findall(r"([(][A-Za-z0-9 %{?}_\.\-<>=]+[)])", inputstring) |
|
12 | 12 |
for m in items: |
13 | 13 |
out = m[m.find("(")+1 : m.find(" or ")].strip() |
14 |
- inputstring = inputstring.replace(m, out); |
|
14 |
+ inputstring = inputstring.replace(m, out) |
|
15 | 15 |
return inputstring |
16 | 16 |
|
17 |
- def getFileNameFromURL(self,inputstring): |
|
18 |
- index=inputstring.rfind("/") |
|
17 |
+ def getFileNameFromURL(self, inputstring): |
|
18 |
+ index = inputstring.rfind("/") |
|
19 | 19 |
return inputstring[index+1:] |
20 | 20 |
|
21 |
- def getPackageNameFromURL(self,inputstring): |
|
22 |
- filename=self.getFileNameFromURL(inputstring) |
|
23 |
- m = re.search(r"(zip|mozjs|.+-)([0-9_.]+)(\.source|\.tar|-src|\.zip|\+md|\.tgz).*", filename) |
|
21 |
+ def getPackageNameFromURL(self, inputstring): |
|
22 |
+ filename = self.getFileNameFromURL(inputstring) |
|
23 |
+ m = re.search(r"(zip|mozjs|.+-)([0-9_.]+)(\.source|\.tar|-src|\.zip|\+md|\.tgz).*", |
|
24 |
+ filename) |
|
24 | 25 |
if m is None: |
25 |
- print "Unable to parse "+filename |
|
26 |
+ print("Unable to parse " + filename) |
|
26 | 27 |
return inputstring |
27 | 28 |
name = m.group(1) |
28 | 29 |
if name.endswith("-"): |
29 | 30 |
name = name[:-1] |
30 | 31 |
return name |
31 | 32 |
|
32 |
- def getPackageVersionFromURL(self,inputstring): |
|
33 |
- filename=self.getFileNameFromURL(inputstring) |
|
34 |
- m = re.search(r"(zip|mozjs|.*-)([0-9_.]+)(\.source|\.tar|-src|\.zip|\+md|\.tgz).*", filename) |
|
33 |
+ def getPackageVersionFromURL(self, inputstring): |
|
34 |
+ filename = self.getFileNameFromURL(inputstring) |
|
35 |
+ m = re.search(r"(zip|mozjs|.*-)([0-9_.]+)(\.source|\.tar|-src|\.zip|\+md|\.tgz).*", |
|
36 |
+ filename) |
|
35 | 37 |
if m is None: |
36 |
- print "Unable to parse "+filename |
|
38 |
+ print("Unable to parse " + filename) |
|
37 | 39 |
return inputstring |
38 | 40 |
name = m.group(2) |
39 | 41 |
return name.replace("_", ".") |
... | ... |
@@ -1,73 +1,71 @@ |
1 |
- |
|
2 | 1 |
import WorkerThread |
2 |
+ |
|
3 | 3 |
class ThreadPool(object): |
4 |
- |
|
5 |
- mapWorkerThreads={} |
|
6 |
- activeWorkerThreads=[] |
|
7 |
- inactiveWorkerThreads=[] |
|
8 |
- mapPackageToCycle={} |
|
9 |
- listAvailableCyclicPackages=[] |
|
10 |
- listBuildOptionPackages=[] |
|
11 |
- pkgBuildOptionFile="" |
|
12 |
- pkgBuildType="chroot" |
|
13 |
- logger=None |
|
14 |
- statusEvent=None |
|
15 |
- |
|
4 |
+ |
|
5 |
+ mapWorkerThreads = {} |
|
6 |
+ activeWorkerThreads = [] |
|
7 |
+ inactiveWorkerThreads = [] |
|
8 |
+ mapPackageToCycle = {} |
|
9 |
+ listAvailableCyclicPackages = [] |
|
10 |
+ listBuildOptionPackages = [] |
|
11 |
+ pkgBuildOptionFile = "" |
|
12 |
+ pkgBuildType = "chroot" |
|
13 |
+ logger = None |
|
14 |
+ statusEvent = None |
|
15 |
+ |
|
16 | 16 |
@staticmethod |
17 | 17 |
def clear(): |
18 | 18 |
ThreadPool.mapWorkerThreads.clear() |
19 |
- ThreadPool.activeWorkerThreads=[] |
|
20 |
- ThreadPool.inactiveWorkerThreads=[] |
|
21 |
- |
|
19 |
+ ThreadPool.activeWorkerThreads = [] |
|
20 |
+ ThreadPool.inactiveWorkerThreads = [] |
|
21 |
+ |
|
22 | 22 |
@staticmethod |
23 | 23 |
def getAllWorkerObjects(): |
24 |
- listWorkerObjs=[] |
|
24 |
+ listWorkerObjs = [] |
|
25 | 25 |
listWorkerKeys = ThreadPool.mapWorkerThreads.keys() |
26 | 26 |
for x in listWorkerKeys: |
27 |
- xobj=ThreadPool.mapWorkerThreads[x] |
|
27 |
+ xobj = ThreadPool.mapWorkerThreads[x] |
|
28 | 28 |
listWorkerObjs.append(xobj) |
29 | 29 |
return listWorkerObjs |
30 |
- |
|
30 |
+ |
|
31 | 31 |
@staticmethod |
32 | 32 |
def addWorkerThread(workerThreadName): |
33 | 33 |
workerThread = WorkerThread.WorkerThread( |
34 |
- ThreadPool.statusEvent, |
|
35 |
- workerThreadName, |
|
36 |
- ThreadPool.mapPackageToCycle, |
|
37 |
- ThreadPool.listAvailableCyclicPackages, |
|
38 |
- ThreadPool.logger, |
|
39 |
- ThreadPool.listBuildOptionPackages, |
|
40 |
- ThreadPool.pkgBuildOptionFile, |
|
41 |
- ThreadPool.pkgBuildType) |
|
42 |
- ThreadPool.mapWorkerThreads[workerThreadName]=workerThread |
|
43 |
- |
|
34 |
+ ThreadPool.statusEvent, |
|
35 |
+ workerThreadName, |
|
36 |
+ ThreadPool.mapPackageToCycle, |
|
37 |
+ ThreadPool.listAvailableCyclicPackages, |
|
38 |
+ ThreadPool.logger, |
|
39 |
+ ThreadPool.listBuildOptionPackages, |
|
40 |
+ ThreadPool.pkgBuildOptionFile, |
|
41 |
+ ThreadPool.pkgBuildType) |
|
42 |
+ ThreadPool.mapWorkerThreads[workerThreadName] = workerThread |
|
43 |
+ |
|
44 | 44 |
@staticmethod |
45 | 45 |
def makeWorkerThreadActive(threadName): |
46 | 46 |
if threadName in ThreadPool.inactiveWorkerThreads: |
47 | 47 |
ThreadPool.inactiveWorkerThreads.remove(threadName) |
48 | 48 |
ThreadPool.activeWorkerThreads.append(threadName) |
49 |
- |
|
49 |
+ |
|
50 | 50 |
@staticmethod |
51 | 51 |
def makeWorkerThreadInActive(threadName): |
52 | 52 |
if threadName in ThreadPool.activeWorkerThreads: |
53 | 53 |
ThreadPool.activeWorkerThreads.remove(threadName) |
54 | 54 |
ThreadPool.inactiveWorkerThreads.append(threadName) |
55 |
- |
|
55 |
+ |
|
56 | 56 |
@staticmethod |
57 | 57 |
def startWorkerThread(threadName): |
58 | 58 |
ThreadPool.mapWorkerThreads[threadName].start() |
59 |
- |
|
59 |
+ |
|
60 | 60 |
@staticmethod |
61 | 61 |
def getListInactiveWorkerThreads(): |
62 | 62 |
return ThreadPool.inactiveWorkerThreads |
63 |
- |
|
63 |
+ |
|
64 | 64 |
@staticmethod |
65 | 65 |
def activateWorkerThreads(numOfThreadsToActivate): |
66 | 66 |
while len(ThreadPool.inactiveWorkerThreads) > 0 and numOfThreadsToActivate > 0: |
67 |
- threadName=ThreadPool.inactiveWorkerThreads.pop() |
|
67 |
+ threadName = ThreadPool.inactiveWorkerThreads.pop() |
|
68 | 68 |
ThreadPool.addWorkerThread(threadName) |
69 | 69 |
ThreadPool.startWorkerThread(threadName) |
70 | 70 |
ThreadPool.makeWorkerThreadActive(threadName) |
71 | 71 |
numOfThreadsToActivate = numOfThreadsToActivate -1 |
72 |
- |
|
73 |
- |
... | ... |
@@ -1,106 +1,107 @@ |
1 |
+import os.path |
|
2 |
+import platform |
|
3 |
+import traceback |
|
1 | 4 |
from CommandUtils import CommandUtils |
2 | 5 |
from ChrootUtils import ChrootUtils |
3 | 6 |
from Logger import Logger |
4 | 7 |
from PackageUtils import PackageUtils |
5 | 8 |
from constants import constants |
6 |
-import subprocess |
|
7 |
-import os.path |
|
8 |
-import platform |
|
9 |
-import traceback |
|
10 |
-import shutil |
|
11 | 9 |
|
12 | 10 |
class ToolChainUtils(object): |
13 | 11 |
|
14 |
- def __init__(self,logName=None,logPath=None): |
|
12 |
+ def __init__(self, logName=None, logPath=None): |
|
15 | 13 |
if logName is None: |
16 | 14 |
logName = "Toolchain Utils" |
17 | 15 |
if logPath is None: |
18 | 16 |
logPath = constants.logPath |
19 |
- self.logName=logName |
|
20 |
- self.logPath=logPath |
|
21 |
- self.logger=Logger.getLogger(logName,logPath) |
|
17 |
+ self.logName = logName |
|
18 |
+ self.logPath = logPath |
|
19 |
+ self.logger = Logger.getLogger(logName, logPath) |
|
22 | 20 |
self.adjustToolChainScript = "adjust-tool-chain.sh" |
23 | 21 |
self.localegenScript = "./locale-gen.sh" |
24 | 22 |
self.localegenConfig = "./locale-gen.conf" |
25 |
- self.prepareBuildRootCmd="./prepare-build-root.sh" |
|
23 |
+ self.prepareBuildRootCmd = "./prepare-build-root.sh" |
|
26 | 24 |
self.rpmbuildCommand = "rpmbuild" |
27 |
- if os.geteuid()==0: |
|
28 |
- self.rpmCommand="rpm" |
|
25 |
+ if os.geteuid() == 0: |
|
26 |
+ self.rpmCommand = "rpm" |
|
29 | 27 |
else: |
30 |
- self.rpmCommand="fakeroot-ng rpm" |
|
28 |
+ self.rpmCommand = "fakeroot-ng rpm" |
|
31 | 29 |
|
32 |
- def prepareBuildRoot(self,chrootID): |
|
30 |
+ def prepareBuildRoot(self, chrootID): |
|
33 | 31 |
self.logger.info("Preparing build environment") |
34 | 32 |
cmdUtils = CommandUtils() |
35 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/dev") |
|
36 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/etc") |
|
37 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/proc") |
|
38 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/run") |
|
39 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/sys") |
|
40 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+"/tmp") |
|
41 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath) |
|
42 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/RPMS/"+platform.machine()) |
|
43 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/RPMS/noarch") |
|
44 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/SOURCES") |
|
45 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/SPECS") |
|
46 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/LOGS") |
|
47 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/BUILD") |
|
48 |
- cmdUtils.runCommandInShell("mkdir -p "+chrootID+constants.topDirPath+"/BUILDROOT") |
|
33 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/dev") |
|
34 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/etc") |
|
35 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/proc") |
|
36 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/run") |
|
37 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/sys") |
|
38 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + "/tmp") |
|
39 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath) |
|
40 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + |
|
41 |
+ "/RPMS/" + platform.machine()) |
|
42 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/RPMS/noarch") |
|
43 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/SOURCES") |
|
44 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/SPECS") |
|
45 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/LOGS") |
|
46 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/BUILD") |
|
47 |
+ cmdUtils.runCommandInShell("mkdir -p " + chrootID + constants.topDirPath + "/BUILDROOT") |
|
49 | 48 |
|
50 |
- prepareChrootCmd=self.prepareBuildRootCmd+" "+chrootID |
|
51 |
- logFile=self.logPath+"/prepareBuildRoot.log" |
|
52 |
- returnVal=cmdUtils.runCommandInShell(prepareChrootCmd,logFile) |
|
49 |
+ prepareChrootCmd = self.prepareBuildRootCmd + " " + chrootID |
|
50 |
+ logFile = self.logPath + "/prepareBuildRoot.log" |
|
51 |
+ returnVal = cmdUtils.runCommandInShell(prepareChrootCmd, logFile) |
|
53 | 52 |
if not returnVal: |
54 | 53 |
self.logger.error("Prepare build root script failed.Unable to prepare chroot.") |
55 | 54 |
raise Exception("Prepare build root script failed") |
56 | 55 |
|
57 |
- self.logger.info("Successfully prepared chroot:"+chrootID) |
|
56 |
+ self.logger.info("Successfully prepared chroot:" + chrootID) |
|
58 | 57 |
|
59 |
- def findRPMFileInGivenLocation(self,package,rpmdirPath): |
|
58 |
+ def findRPMFileInGivenLocation(self, package, rpmdirPath): |
|
60 | 59 |
cmdUtils = CommandUtils() |
61 |
- listFoundRPMFiles = cmdUtils.findFile(package+"-*.rpm",rpmdirPath) |
|
62 |
- listFilterRPMFiles=[] |
|
60 |
+ listFoundRPMFiles = cmdUtils.findFile(package + "-*.rpm", rpmdirPath) |
|
61 |
+ listFilterRPMFiles = [] |
|
63 | 62 |
for f in listFoundRPMFiles: |
64 |
- rpmFileName=os.path.basename(f) |
|
65 |
- checkRPMName=rpmFileName.replace(package,"") |
|
63 |
+ rpmFileName = os.path.basename(f) |
|
64 |
+ checkRPMName = rpmFileName.replace(package, "") |
|
66 | 65 |
rpmNameSplit = checkRPMName.split("-") |
67 | 66 |
if len(rpmNameSplit) == 3: |
68 | 67 |
listFilterRPMFiles.append(f) |
69 |
- if len(listFilterRPMFiles) == 1 : |
|
68 |
+ if len(listFilterRPMFiles) == 1: |
|
70 | 69 |
return listFilterRPMFiles[0] |
71 |
- if len(listFilterRPMFiles) == 0 : |
|
70 |
+ if len(listFilterRPMFiles) == 0: |
|
72 | 71 |
return None |
73 |
- if len(listFilterRPMFiles) > 1 : |
|
74 |
- self.logger.error("Found multiple rpm files for given package in rpm directory.Unable to determine the rpm file for package:"+package) |
|
72 |
+ if len(listFilterRPMFiles) > 1: |
|
73 |
+ self.logger.error("Found multiple rpm files for given package in rpm directory." + |
|
74 |
+ "Unable to determine the rpm file for package:" + package) |
|
75 | 75 |
return None |
76 | 76 |
|
77 | 77 |
def buildCoreToolChainPackages(self, listBuildOptionPackages, pkgBuildOptionFile): |
78 | 78 |
self.logger.info("Building core toolchain packages.....") |
79 |
- chrootID=None |
|
79 |
+ chrootID = None |
|
80 | 80 |
pkgCount = 0 |
81 | 81 |
try: |
82 |
- pkgUtils=PackageUtils(self.logName,self.logPath) |
|
82 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
83 | 83 |
for package in constants.listCoreToolChainPackages: |
84 |
- rpmPkg=pkgUtils.findRPMFileForGivenPackage(package) |
|
84 |
+ rpmPkg = pkgUtils.findRPMFileForGivenPackage(package) |
|
85 | 85 |
if rpmPkg is not None: |
86 | 86 |
continue |
87 | 87 |
self.logger.info("Building core toolchain package: " + package) |
88 |
- chrUtils = ChrootUtils(self.logName,self.logPath) |
|
89 |
- chrootName="build-"+package |
|
90 |
- destLogPath=constants.logPath+"/build-"+package |
|
88 |
+ chrUtils = ChrootUtils(self.logName, self.logPath) |
|
89 |
+ chrootName = "build-"+package |
|
90 |
+ destLogPath = constants.logPath + "/build-" + package |
|
91 | 91 |
if not os.path.isdir(destLogPath): |
92 | 92 |
cmdUtils = CommandUtils() |
93 |
- cmdUtils.runCommandInShell("mkdir -p "+destLogPath) |
|
94 |
- returnVal,chrootID = chrUtils.createChroot(chrootName) |
|
93 |
+ cmdUtils.runCommandInShell("mkdir -p " + destLogPath) |
|
94 |
+ returnVal, chrootID = chrUtils.createChroot(chrootName) |
|
95 | 95 |
if not returnVal: |
96 | 96 |
self.logger.error("Creating chroot failed") |
97 | 97 |
raise Exception("creating chroot failed") |
98 | 98 |
self.installToolChainRPMS(chrootID, package, destLogPath) |
99 | 99 |
pkgUtils.adjustGCCSpecs(package, chrootID, destLogPath) |
100 |
- pkgUtils.buildRPMSForGivenPackage(package, chrootID, listBuildOptionPackages, pkgBuildOptionFile, destLogPath) |
|
100 |
+ pkgUtils.buildRPMSForGivenPackage(package, chrootID, listBuildOptionPackages, |
|
101 |
+ pkgBuildOptionFile, destLogPath) |
|
101 | 102 |
pkgCount += 1 |
102 | 103 |
chrUtils.destroyChroot(chrootID) |
103 |
- chrootID=None |
|
104 |
+ chrootID = None |
|
104 | 105 |
self.logger.info("Successfully built toolchain") |
105 | 106 |
if chrootID is not None: |
106 | 107 |
chrUtils.destroyChroot(chrootID) |
... | ... |
@@ -110,77 +111,87 @@ class ToolChainUtils(object): |
110 | 110 |
traceback.print_exc() |
111 | 111 |
raise e |
112 | 112 |
return pkgCount |
113 |
- |
|
114 |
- def installToolChainRPMS(self,chrootID, packageName, logPath=None): |
|
113 |
+ |
|
114 |
+ def installToolChainRPMS(self, chrootID, packageName, logPath=None): |
|
115 | 115 |
if logPath is None: |
116 |
- logPath=self.logPath |
|
116 |
+ logPath = self.logPath |
|
117 | 117 |
cmdUtils = CommandUtils() |
118 | 118 |
self.prepareBuildRoot(chrootID) |
119 | 119 |
self.logger.info("Installing Tool Chain RPMS.......") |
120 | 120 |
rpmFiles = "" |
121 | 121 |
packages = "" |
122 | 122 |
for package in constants.listToolChainRPMsToInstall: |
123 |
- pkgUtils=PackageUtils(self.logName,self.logPath) |
|
123 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
124 | 124 |
rpmFile = None |
125 | 125 |
if constants.rpmCheck: |
126 |
- rpmFile=pkgUtils.findRPMFileForGivenPackage(package) |
|
126 |
+ rpmFile = pkgUtils.findRPMFileForGivenPackage(package) |
|
127 | 127 |
else: |
128 | 128 |
if (packageName not in constants.listToolChainRPMsToInstall or |
129 |
- constants.listToolChainRPMsToInstall.index(packageName) > constants.listToolChainRPMsToInstall.index(package)): |
|
130 |
- rpmFile=pkgUtils.findRPMFileForGivenPackage(package) |
|
129 |
+ constants.listToolChainRPMsToInstall.index(packageName) > |
|
130 |
+ constants.listToolChainRPMsToInstall.index(package)): |
|
131 |
+ rpmFile = pkgUtils.findRPMFileForGivenPackage(package) |
|
131 | 132 |
if rpmFile is None: |
132 | 133 |
# sqlite-autoconf package was renamed, but it still published as sqlite-autoconf |
133 | 134 |
if (package == "sqlite") and (platform.machine() == "x86_64"): |
134 | 135 |
package = "sqlite-autoconf" |
135 |
- rpmFile=self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
136 |
+ rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
136 | 137 |
if rpmFile is None: |
137 | 138 |
if package in constants.listOfRPMsProvidedAfterBuild: |
138 |
- self.logger.info("No old version of "+package+" exists, skip until the new version is built") |
|
139 |
+ self.logger.info("No old version of " + package + |
|
140 |
+ " exists, skip until the new version is built") |
|
139 | 141 |
continue |
140 |
- self.logger.error("Unable to find rpm "+ package +" in current and previous versions") |
|
142 |
+ self.logger.error("Unable to find rpm " + package + |
|
143 |
+ " in current and previous versions") |
|
141 | 144 |
raise Exception("Input Error") |
142 | 145 |
rpmFiles += " " + rpmFile |
143 | 146 |
packages += " " + package |
144 | 147 |
|
145 |
- self.logger.debug("Installing toolchain rpms:"+packages) |
|
146 |
- cmd=self.rpmCommand + " -i -v --nodeps --noorder --force --root "+chrootID+" --define \'_dbpath /var/lib/rpm\' "+ rpmFiles |
|
147 |
- retVal = cmdUtils.runCommandInShell(cmd, logPath+"/install_toolchain_rpms.log") |
|
148 |
+ self.logger.debug("Installing toolchain rpms:" + packages) |
|
149 |
+ cmd = (self.rpmCommand + " -i -v --nodeps --noorder --force --root " + |
|
150 |
+ chrootID +" --define \'_dbpath /var/lib/rpm\' "+ rpmFiles) |
|
151 |
+ retVal = cmdUtils.runCommandInShell(cmd, logPath + "/install_toolchain_rpms.log") |
|
148 | 152 |
if not retVal: |
149 | 153 |
self.logger.debug("Command Executed:" + cmd) |
150 | 154 |
self.logger.error("Installing tool chain failed") |
151 | 155 |
raise Exception("RPM installation failed") |
152 |
- self.logger.info("Successfully installed default Tool Chain RPMS in Chroot:"+chrootID) |
|
153 |
- print "Building Package:"+ packageName |
|
154 |
- print constants.perPackageToolChain |
|
156 |
+ self.logger.info("Successfully installed default Tool Chain RPMS in Chroot:" + chrootID) |
|
157 |
+ print("Building Package: ".format(packageName)) |
|
158 |
+ print(constants.perPackageToolChain) |
|
155 | 159 |
if packageName in constants.perPackageToolChain: |
156 |
- print constants.perPackageToolChain[packageName] |
|
157 |
- self.installCustomToolChainRPMS(chrootID, constants.perPackageToolChain[packageName], packageName); |
|
158 |
- |
|
160 |
+ print(constants.perPackageToolChain[packageName]) |
|
161 |
+ self.installCustomToolChainRPMS(chrootID, constants.perPackageToolChain[packageName], |
|
162 |
+ packageName) |
|
163 |
+ |
|
159 | 164 |
def installCustomToolChainRPMS(self, chrootID, listOfToolChainPkgs, packageName): |
160 |
- self.logger.info("Installing package specific tool chain RPMs for " + packageName + ".......") |
|
165 |
+ self.logger.info("Installing package specific tool chain RPMs for " + packageName + |
|
166 |
+ ".......") |
|
161 | 167 |
rpmFiles = "" |
162 | 168 |
packages = "" |
163 | 169 |
cmdUtils = CommandUtils() |
164 | 170 |
for package in listOfToolChainPkgs: |
165 |
- pkgUtils=PackageUtils(self.logName,self.logPath) |
|
166 |
- print "DEBUG:" + package |
|
171 |
+ pkgUtils = PackageUtils(self.logName, self.logPath) |
|
172 |
+ print("DEBUG:" + package) |
|
167 | 173 |
if "openjre8" in packageName or "openjdk8" in packageName: |
168 | 174 |
# x86_64 has openjdk/jre as a published rpms but aarch64 has openjdk8/jre8 |
169 | 175 |
# Remove this condition after publishxrpms for x86_^4 got updated |
170 |
- if (package == "openjdk" or package == "openjre") and (platform.machine() == "aarch64"): |
|
176 |
+ if ((package == "openjdk" or package == "openjre") and |
|
177 |
+ platform.machine() == "aarch64"): |
|
171 | 178 |
package = package + "8" |
172 |
- rpmFile=self.findRPMFileInGivenLocation(package, constants.prevPublishXRPMRepo) |
|
179 |
+ rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishXRPMRepo) |
|
173 | 180 |
else: |
174 |
- rpmFile=self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
181 |
+ rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
175 | 182 |
if rpmFile is None: |
176 |
- self.logger.error("Unable to find rpm "+ package +" in current and previous versions") |
|
183 |
+ self.logger.error("Unable to find rpm "+ package + |
|
184 |
+ " in current and previous versions") |
|
177 | 185 |
raise Exception("Input Error") |
178 | 186 |
rpmFiles += " " + rpmFile |
179 | 187 |
packages += " " + package |
180 | 188 |
|
181 |
- self.logger.debug("Installing custom rpms:"+packages) |
|
182 |
- cmd=self.rpmCommand + " -i -v --nodeps --noorder --force --root "+chrootID+" --define \'_dbpath /var/lib/rpm\' "+ rpmFiles |
|
183 |
- retVal = cmdUtils.runCommandInShell(cmd, self.logPath+"/install_custom_toolchain_rpms.log") |
|
189 |
+ self.logger.debug("Installing custom rpms:" + packages) |
|
190 |
+ cmd = (self.rpmCommand + " -i -v --nodeps --noorder --force --root " + |
|
191 |
+ chrootID + " --define \'_dbpath /var/lib/rpm\' " + rpmFiles) |
|
192 |
+ retVal = cmdUtils.runCommandInShell(cmd, self.logPath + |
|
193 |
+ "/install_custom_toolchain_rpms.log") |
|
184 | 194 |
if not retVal: |
185 | 195 |
self.logger.debug("Command Executed:" + cmd) |
186 | 196 |
self.logger.error("Installing tool chain failed") |
... | ... |
@@ -201,9 +212,11 @@ class ToolChainUtils(object): |
201 | 201 |
rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
202 | 202 |
if rpmFile is None: |
203 | 203 |
if package in constants.listOfRPMsProvidedAfterBuild: |
204 |
- self.logger.info("No old version of " + package + " exists, skip until the new version is built") |
|
204 |
+ self.logger.info("No old version of " + package + |
|
205 |
+ " exists, skip until the new version is built") |
|
205 | 206 |
continue |
206 |
- self.logger.error("Unable to find rpm " + package + " in current and previous versions") |
|
207 |
+ self.logger.error("Unable to find rpm " + package + |
|
208 |
+ " in current and previous versions") |
|
207 | 209 |
raise Exception("Input Error") |
208 | 210 |
if rpmFile.find("stage/PUBLISHRPMS"): |
209 | 211 |
rpmFile = rpmFile.replace(constants.prevPublishRPMRepo, "/publishrpms") |
... | ... |
@@ -224,7 +237,8 @@ class ToolChainUtils(object): |
224 | 224 |
self.logger.error("Installing tool chain in container failed") |
225 | 225 |
raise Exception("RPM installation in container failed") |
226 | 226 |
self.logger.info(tcInstallLog) |
227 |
- self.logger.info("Successfully installed default tool-chain RPMS in container: " + containerID.short_id) |
|
227 |
+ self.logger.info("Successfully installed default tool-chain RPMS in container: " + |
|
228 |
+ containerID.short_id) |
|
228 | 229 |
|
229 | 230 |
def installCustomToolChainRPMSinContainer(self, containerID, listOfToolChainPkgs, packageName): |
230 | 231 |
self.logger.info("Installing package specific tool chain RPMs for " + packageName) |
... | ... |
@@ -232,11 +246,12 @@ class ToolChainUtils(object): |
232 | 232 |
packages = "" |
233 | 233 |
for package in listOfToolChainPkgs: |
234 | 234 |
if "openjre8" in packageName or "openjdk8" in packageName: |
235 |
- rpmFile=self.findRPMFileInGivenLocation(package, constants.prevPublishXRPMRepo) |
|
235 |
+ rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishXRPMRepo) |
|
236 | 236 |
else: |
237 |
- rpmFile=self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
237 |
+ rpmFile = self.findRPMFileInGivenLocation(package, constants.prevPublishRPMRepo) |
|
238 | 238 |
if rpmFile is None: |
239 |
- self.logger.error("Unable to find rpm " + package + " in current and previous versions") |
|
239 |
+ self.logger.error("Unable to find rpm " + package + |
|
240 |
+ " in current and previous versions") |
|
240 | 241 |
raise Exception("Input Error") |
241 | 242 |
if rpmFile.find("stage/PUBLISHRPMS"): |
242 | 243 |
rpmFile = rpmFile.replace(constants.prevPublishRPMRepo, "/publishrpms") |
... | ... |
@@ -249,11 +264,13 @@ class ToolChainUtils(object): |
249 | 249 |
|
250 | 250 |
self.logger.debug("Installing rpms: " + packages) |
251 | 251 |
cmd = "rpm -Uvh --nodeps --force " + rpmFiles |
252 |
- self.logger.debug("VDBG-TCU-installCustomToolChainRPMSinContainer: Installing rpms cmd: " + cmd) |
|
252 |
+ self.logger.debug("VDBG-TCU-installCustomToolChainRPMSinContainer: Installing rpms cmd: " + |
|
253 |
+ cmd) |
|
253 | 254 |
tcInstallLog = containerID.exec_run(cmd) |
254 | 255 |
# TODO: Find a way to collect exit status of the command that was run. |
255 | 256 |
if not tcInstallLog: |
256 | 257 |
self.logger.error("Installing tool chain in container failed") |
257 | 258 |
raise Exception("RPM installation in container failed") |
258 | 259 |
self.logger.info(tcInstallLog) |
259 |
- self.logger.info("Successfully installed all tool-chain XRPMS in container: " + containerID.short_id) |
|
260 |
+ self.logger.info("Successfully installed all tool-chain XRPMS in container: " + |
|
261 |
+ containerID.short_id) |
... | ... |
@@ -1,32 +1,33 @@ |
1 |
+import threading |
|
1 | 2 |
from PackageBuilder import PackageBuilderChroot |
2 | 3 |
from PackageBuilder import PackageBuilderContainer |
3 |
-import threading |
|
4 | 4 |
import Scheduler |
5 | 5 |
import ThreadPool |
6 | 6 |
|
7 | 7 |
class WorkerThread(threading.Thread): |
8 | 8 |
|
9 |
- def __init__(self,event,name,mapPackageToCycle,listAvailableCyclicPackages,logger,listBuildOptionPackages,pkgBuildOptionFile,pkgBuildType): |
|
9 |
+ def __init__(self, event, name, mapPackageToCycle, listAvailableCyclicPackages, logger, |
|
10 |
+ listBuildOptionPackages, pkgBuildOptionFile, pkgBuildType): |
|
10 | 11 |
threading.Thread.__init__(self) |
11 |
- self.statusEvent=event |
|
12 |
- self.name=name |
|
13 |
- self.mapPackageToCycle=mapPackageToCycle |
|
14 |
- self.listAvailableCyclicPackages=listAvailableCyclicPackages |
|
15 |
- self.logger=logger |
|
16 |
- self.listBuildOptionPackages=listBuildOptionPackages |
|
17 |
- self.pkgBuildOptionFile=pkgBuildOptionFile |
|
18 |
- self.pkgBuildType=pkgBuildType |
|
12 |
+ self.statusEvent = event |
|
13 |
+ self.name = name |
|
14 |
+ self.mapPackageToCycle = mapPackageToCycle |
|
15 |
+ self.listAvailableCyclicPackages = listAvailableCyclicPackages |
|
16 |
+ self.logger = logger |
|
17 |
+ self.listBuildOptionPackages = listBuildOptionPackages |
|
18 |
+ self.pkgBuildOptionFile = pkgBuildOptionFile |
|
19 |
+ self.pkgBuildType = pkgBuildType |
|
19 | 20 |
|
20 | 21 |
def run(self): |
21 |
- buildThreadFailed=False |
|
22 |
+ buildThreadFailed = False |
|
22 | 23 |
ThreadPool.ThreadPool.makeWorkerThreadActive(self.name) |
23 |
- self.logger.info("Thread "+self.name +" is starting now") |
|
24 |
+ self.logger.info("Thread " + self.name + " is starting now") |
|
24 | 25 |
while True: |
25 |
- outputMap={} |
|
26 |
+ outputMap = {} |
|
26 | 27 |
pkg = Scheduler.Scheduler.getNextPackageToBuild() |
27 | 28 |
if pkg is None: |
28 | 29 |
break |
29 |
- self.logger.info("Thread "+self.name+" is building package:"+ pkg) |
|
30 |
+ self.logger.info("Thread " + self.name + " is building package:" + pkg) |
|
30 | 31 |
if self.pkgBuildType == "chroot": |
31 | 32 |
pkgBuilder = PackageBuilderChroot(self.mapPackageToCycle, |
32 | 33 |
self.listAvailableCyclicPackages, |
... | ... |
@@ -39,15 +40,16 @@ class WorkerThread(threading.Thread): |
39 | 39 |
self.listBuildOptionPackages, |
40 | 40 |
self.pkgBuildOptionFile, |
41 | 41 |
self.pkgBuildType) |
42 |
- t = threading.Thread(target=pkgBuilder.buildPackageThreadAPI,args=(pkg,outputMap,pkg)) |
|
42 |
+ t = threading.Thread(target=pkgBuilder.buildPackageThreadAPI, |
|
43 |
+ args=(pkg, outputMap, pkg)) |
|
43 | 44 |
t.start() |
44 | 45 |
t.join() |
45 |
- if not outputMap.has_key(pkg) or outputMap[pkg] == False: |
|
46 |
+ if pkg not in outputMap or outputMap[pkg] == False: |
|
46 | 47 |
buildThreadFailed = True |
47 | 48 |
Scheduler.Scheduler.notifyPackageBuildFailed(pkg) |
48 | 49 |
self.logger.info("Thread "+self.name +" stopped building package:" + pkg) |
49 | 50 |
break |
50 |
- self.logger.info("Thread "+self.name+" finished building package:" + pkg) |
|
51 |
+ self.logger.info("Thread "+ self.name + " finished building package:" + pkg) |
|
51 | 52 |
Scheduler.Scheduler.notifyPackageBuildCompleted(pkg) |
52 | 53 |
|
53 | 54 |
if buildThreadFailed: |
... | ... |
@@ -1,85 +1,102 @@ |
1 |
-#!/usr/bin/env python |
|
1 |
+#!/usr/bin/env python3 |
|
2 | 2 |
|
3 | 3 |
from argparse import ArgumentParser |
4 | 4 |
import os.path |
5 | 5 |
import platform |
6 |
+import collections |
|
7 |
+import traceback |
|
8 |
+import sys |
|
9 |
+import json |
|
6 | 10 |
from CommandUtils import CommandUtils |
7 | 11 |
from Logger import Logger |
8 | 12 |
from constants import constants |
9 | 13 |
from PackageManager import PackageManager |
10 |
-import json |
|
11 |
-import sys |
|
12 | 14 |
from SpecData import SPECS |
13 |
-from SpecUtils import Specutils |
|
14 |
-from StringUtils import StringUtils |
|
15 |
-import collections |
|
16 |
-import traceback |
|
17 | 15 |
from PackageInfo import PackageInfo |
18 | 16 |
|
19 | 17 |
def main(): |
20 | 18 |
usage = "Usage: %prog [options] <package name>" |
21 | 19 |
parser = ArgumentParser(usage) |
22 |
- parser.add_argument("-s", "--spec-path", dest="specPath", default="../../SPECS") |
|
23 |
- parser.add_argument("-x", "--source-path", dest="sourcePath", default="../../stage/SOURCES") |
|
24 |
- parser.add_argument("-r", "--rpm-path", dest="rpmPath", default="../../stage/RPMS") |
|
25 |
- parser.add_argument("-i", "--install-package", dest="installPackage", default=False, action ="store_true") |
|
26 |
- parser.add_argument("-p", "--publish-RPMS-path", dest="publishRPMSPath", default="../../stage/PUBLISHRPMS") |
|
27 |
- parser.add_argument("-e", "--publish-XRPMS-path", dest="publishXRPMSPath", default="../../stage/PUBLISHXRPMS") |
|
28 |
- parser.add_argument("-l", "--log-path", dest="logPath", default="../../stage/LOGS") |
|
29 |
- parser.add_argument("-z", "--top-dir-path", dest="topDirPath", default="/usr/src/photon") |
|
30 |
- parser.add_argument("-b", "--build-root-path", dest="buildRootPath", default="/mnt") |
|
31 |
- parser.add_argument("-t", "--threads", dest="buildThreads", default=1, type=int, help="Number of working threads") |
|
32 |
- parser.add_argument("-m", "--tool-chain-stage", dest="toolChainStage", default="None") |
|
33 |
- parser.add_argument("-c", "--pullsources-config", dest="pullsourcesConfig", default="pullsources.conf") |
|
34 |
- parser.add_argument("-d", "--dist", dest="dist", default="") |
|
35 |
- parser.add_argument("-k", "--input-RPMS-path", dest="inputRPMSPath", default=None) |
|
36 |
- parser.add_argument("-n", "--build-number", dest="buildNumber", default="0000000") |
|
37 |
- parser.add_argument("-v", "--release-version", dest="releaseVersion", default="NNNnNNN") |
|
38 |
- parser.add_argument("-u", "--enable-rpmcheck", dest="rpmCheck", default=False, action ="store_true") |
|
39 |
- parser.add_argument("-a", "--source-rpm-path", dest="sourceRpmPath", default="../../stage/SRPMS") |
|
40 |
- parser.add_argument("-w", "--pkginfo-file", dest="pkgInfoFile", default="../../stage/pkg_info.json") |
|
41 |
- parser.add_argument("-g", "--pkg-build-option-file", dest="pkgBuildOptionFile", default="../../common/data/pkg_build_options.json") |
|
42 |
- parser.add_argument("-q", "--rpmcheck-stop-on-error", dest="rpmCheckStopOnError", default=False, action ="store_true") |
|
43 |
- parser.add_argument("-bd", "--publish-build-dependencies", dest="publishBuildDependencies", default=False) |
|
20 |
+ parser.add_argument("-s", "--spec-path", dest="specPath", default="../../SPECS") |
|
21 |
+ parser.add_argument("-x", "--source-path", dest="sourcePath", |
|
22 |
+ default="../../stage/SOURCES") |
|
23 |
+ parser.add_argument("-r", "--rpm-path", dest="rpmPath", |
|
24 |
+ default="../../stage/RPMS") |
|
25 |
+ parser.add_argument("-i", "--install-package", dest="installPackage", |
|
26 |
+ default=False, action="store_true") |
|
27 |
+ parser.add_argument("-p", "--publish-RPMS-path", dest="publishRPMSPath", |
|
28 |
+ default="../../stage/PUBLISHRPMS") |
|
29 |
+ parser.add_argument("-e", "--publish-XRPMS-path", dest="publishXRPMSPath", |
|
30 |
+ default="../../stage/PUBLISHXRPMS") |
|
31 |
+ parser.add_argument("-l", "--log-path", dest="logPath", default="../../stage/LOGS") |
|
32 |
+ parser.add_argument("-z", "--top-dir-path", dest="topDirPath", default="/usr/src/photon") |
|
33 |
+ parser.add_argument("-b", "--build-root-path", dest="buildRootPath", default="/mnt") |
|
34 |
+ parser.add_argument("-t", "--threads", dest="buildThreads", |
|
35 |
+ default=1, type=int, help="Number of working threads") |
|
36 |
+ parser.add_argument("-m", "--tool-chain-stage", dest="toolChainStage", default="None") |
|
37 |
+ parser.add_argument("-c", "--pullsources-config", dest="pullsourcesConfig", |
|
38 |
+ default="pullsources.conf") |
|
39 |
+ parser.add_argument("-d", "--dist", dest="dist", default="") |
|
40 |
+ parser.add_argument("-k", "--input-RPMS-path", dest="inputRPMSPath", default=None) |
|
41 |
+ parser.add_argument("-n", "--build-number", dest="buildNumber", default="0000000") |
|
42 |
+ parser.add_argument("-v", "--release-version", dest="releaseVersion", default="NNNnNNN") |
|
43 |
+ parser.add_argument("-u", "--enable-rpmcheck", dest="rpmCheck", |
|
44 |
+ default=False, action="store_true") |
|
45 |
+ parser.add_argument("-a", "--source-rpm-path", dest="sourceRpmPath", |
|
46 |
+ default="../../stage/SRPMS") |
|
47 |
+ parser.add_argument("-w", "--pkginfo-file", dest="pkgInfoFile", |
|
48 |
+ default="../../stage/pkg_info.json") |
|
49 |
+ parser.add_argument("-g", "--pkg-build-option-file", dest="pkgBuildOptionFile", |
|
50 |
+ default="../../common/data/pkg_build_options.json") |
|
51 |
+ parser.add_argument("-q", "--rpmcheck-stop-on-error", dest="rpmCheckStopOnError", |
|
52 |
+ default=False, action="store_true") |
|
53 |
+ parser.add_argument("-bd", "--publish-build-dependencies", dest="publishBuildDependencies", |
|
54 |
+ default=False) |
|
44 | 55 |
parser.add_argument("-pw", "--package-weights-path", dest="packageWeightsPath", default=None) |
45 |
- parser.add_argument("-y", "--generate-pkg-yaml-files", dest="generatePkgYamlFiles", default=False, action ="store_true") |
|
46 |
- parser.add_argument("-j", "--pkg-yaml-dir-path", dest="pkgYamlDirPath", default="../../stage/") |
|
47 |
- parser.add_argument("-f", "--pkg-blacklist-file", dest="pkgBlacklistFile", default=None) |
|
48 |
- parser.add_argument("-bt", "--build-type", dest="pkgBuildType", default="chroot") |
|
49 |
- parser.add_argument("-F", "--kat-build", dest="katBuild", default=None) |
|
56 |
+ parser.add_argument("-y", "--generate-pkg-yaml-files", dest="generatePkgYamlFiles", |
|
57 |
+ default=False, action="store_true") |
|
58 |
+ parser.add_argument("-j", "--pkg-yaml-dir-path", dest="pkgYamlDirPath", |
|
59 |
+ default="../../stage/") |
|
60 |
+ parser.add_argument("-f", "--pkg-blacklist-file", dest="pkgBlacklistFile", default=None) |
|
61 |
+ parser.add_argument("-bt", "--build-type", dest="pkgBuildType", default="chroot") |
|
62 |
+ parser.add_argument("-F", "--kat-build", dest="katBuild", default=None) |
|
50 | 63 |
parser.add_argument("PackageName", nargs='?') |
51 | 64 |
options = parser.parse_args() |
52 |
- cmdUtils=CommandUtils() |
|
65 |
+ cmdUtils = CommandUtils() |
|
53 | 66 |
if not os.path.isdir(options.logPath): |
54 |
- cmdUtils.runCommandInShell("mkdir -p "+options.logPath) |
|
67 |
+ cmdUtils.runCommandInShell("mkdir -p " + options.logPath) |
|
55 | 68 |
|
56 |
- logger=Logger.getLogger(options.logPath+"/Main") |
|
57 |
- errorFlag=False |
|
69 |
+ logger = Logger.getLogger(options.logPath + "/Main") |
|
70 |
+ errorFlag = False |
|
58 | 71 |
package = None |
59 | 72 |
pkgInfoJsonFile = options.pkgInfoFile |
60 | 73 |
if not os.path.isdir(options.sourcePath): |
61 |
- logger.error("Given Sources Path is not a directory:"+options.sourcePath) |
|
74 |
+ logger.error("Given Sources Path is not a directory:" + options.sourcePath) |
|
62 | 75 |
errorFlag = True |
63 | 76 |
if not os.path.isdir(options.specPath): |
64 |
- logger.error("Given Specs Path is not a directory:"+options.specPath) |
|
77 |
+ logger.error("Given Specs Path is not a directory:" + options.specPath) |
|
65 | 78 |
errorFlag = True |
66 | 79 |
if not os.path.isdir(options.publishRPMSPath): |
67 |
- logger.error("Given RPMS Path is not a directory:"+options.publishRPMSPath) |
|
80 |
+ logger.error("Given RPMS Path is not a directory:" + options.publishRPMSPath) |
|
68 | 81 |
errorFlag = True |
69 | 82 |
if not os.path.isdir(options.publishXRPMSPath): |
70 |
- logger.error("Given X RPMS Path is not a directory:"+options.publishXRPMSPath) |
|
83 |
+ logger.error("Given X RPMS Path is not a directory:" + options.publishXRPMSPath) |
|
71 | 84 |
errorFlag = True |
72 |
- if not os.path.isdir(options.publishRPMSPath+"/" + platform.machine()): |
|
73 |
- logger.error("Given RPMS Path is missing "+platform.machine()+" sub-directory:"+options.publishRPMSPath) |
|
85 |
+ if not os.path.isdir(options.publishRPMSPath + "/" + platform.machine()): |
|
86 |
+ logger.error("Given RPMS Path is missing " + platform.machine()+ |
|
87 |
+ " sub-directory:"+options.publishRPMSPath) |
|
74 | 88 |
errorFlag = True |
75 | 89 |
if not os.path.isdir(options.publishXRPMSPath+"/" + platform.machine()): |
76 |
- logger.error("Given X RPMS Path is missing "+platform.machine()+" sub-directory:"+options.publishXRPMSPath) |
|
90 |
+ logger.error("Given X RPMS Path is missing "+platform.machine()+ |
|
91 |
+ " sub-directory:"+options.publishXRPMSPath) |
|
77 | 92 |
errorFlag = True |
78 | 93 |
if not os.path.isdir(options.publishRPMSPath+"/noarch"): |
79 |
- logger.error("Given RPMS Path is missing noarch sub-directory:"+options.publishRPMSPath) |
|
94 |
+ logger.error("Given RPMS Path is missing noarch sub-directory:"+ |
|
95 |
+ options.publishRPMSPath) |
|
80 | 96 |
errorFlag = True |
81 | 97 |
if not os.path.isdir(options.publishXRPMSPath+"/noarch"): |
82 |
- logger.error("Given X RPMS Path is missing noarch sub-directory:"+options.publishXRPMSPath) |
|
98 |
+ logger.error("Given X RPMS Path is missing noarch sub-directory:"+ |
|
99 |
+ options.publishXRPMSPath) |
|
83 | 100 |
errorFlag = True |
84 | 101 |
if not os.path.isfile(options.pkgBuildOptionFile): |
85 | 102 |
logger.warning("Given JSON File is not a file:"+options.pkgBuildOptionFile) |
... | ... |
@@ -93,16 +110,19 @@ def main(): |
93 | 93 |
errorFlag = True |
94 | 94 |
|
95 | 95 |
if options.generatePkgYamlFiles: |
96 |
- if options.pkgBlacklistFile is not None and options.pkgBlacklistFile != "" and not os.path.isfile(options.pkgBlacklistFile): |
|
97 |
- logger.error("Given package blacklist file is not valid:"+options.pkgBlacklistFile) |
|
96 |
+ if (options.pkgBlacklistFile is not None and |
|
97 |
+ options.pkgBlacklistFile != "" and |
|
98 |
+ not os.path.isfile(options.pkgBlacklistFile)): |
|
99 |
+ logger.error("Given package blacklist file is not valid:"+ |
|
100 |
+ options.pkgBlacklistFile) |
|
98 | 101 |
errorFlag = True |
99 | 102 |
|
100 |
- if options.installPackage : |
|
103 |
+ if options.installPackage: |
|
101 | 104 |
if not options.PackageName: |
102 | 105 |
logger.error("Please provide package name") |
103 | 106 |
errorFlag = True |
104 | 107 |
else: |
105 |
- package=options.PackageName |
|
108 |
+ package = options.PackageName |
|
106 | 109 |
|
107 | 110 |
if errorFlag: |
108 | 111 |
logger.error("Found some errors. Please fix input options and re-run it.") |
... | ... |
@@ -139,7 +159,7 @@ def main(): |
139 | 139 |
try: |
140 | 140 |
constants.initialize(options) |
141 | 141 |
# parse SPECS folder |
142 |
- SPECS(); |
|
142 |
+ SPECS() |
|
143 | 143 |
if package == "packages_list": |
144 | 144 |
buildPackagesList(options.buildRootPath+"/../packages_list.csv") |
145 | 145 |
elif options.generatePkgYamlFiles: |
... | ... |
@@ -153,9 +173,12 @@ def main(): |
153 | 153 |
pkgManager = PackageManager() |
154 | 154 |
pkgManager.buildToolChainPackages(options.buildThreads) |
155 | 155 |
elif options.installPackage: |
156 |
- buildAPackage(package, listBuildOptionPackages, options.pkgBuildOptionFile, options.buildThreads, options.pkgBuildType) |
|
156 |
+ buildAPackage(package, listBuildOptionPackages, options.pkgBuildOptionFile, |
|
157 |
+ options.buildThreads, options.pkgBuildType) |
|
157 | 158 |
else: |
158 |
- buildPackagesForAllSpecs(listBuildOptionPackages, options.pkgBuildOptionFile, logger, options.buildThreads, pkgInfoJsonFile, options.pkgBuildType) |
|
159 |
+ buildPackagesForAllSpecs(listBuildOptionPackages, options.pkgBuildOptionFile, |
|
160 |
+ logger, options.buildThreads, pkgInfoJsonFile, |
|
161 |
+ options.pkgBuildType) |
|
159 | 162 |
except Exception as e: |
160 | 163 |
logger.error("Caught an exception") |
161 | 164 |
logger.error(str(e)) |
... | ... |
@@ -167,7 +190,7 @@ def main(): |
167 | 167 |
def buildPackagesList(csvFilename): |
168 | 168 |
csvFile = open(csvFilename, "w") |
169 | 169 |
csvFile.write("Package,Version,License,URL,Sources,Patches\n") |
170 |
- listPackages = SPECS.getData().getListPackages() |
|
170 |
+ listPackages = SPECS.getData().getListPackages() |
|
171 | 171 |
listPackages.sort() |
172 | 172 |
for package in listPackages: |
173 | 173 |
name = package |
... | ... |
@@ -201,7 +224,7 @@ def buildSourcesList(yamlDir, blackListPkgs, logger, singleFile=True): |
201 | 201 |
cmdUtils.runCommandInShell("mkdir -p "+yamlSourceDir) |
202 | 202 |
if singleFile: |
203 | 203 |
yamlFile = open(yamlSourceDir+"/sources_list.yaml", "w") |
204 |
- listPackages = SPECS.getData().getListPackages() |
|
204 |
+ listPackages = SPECS.getData().getListPackages() |
|
205 | 205 |
listPackages.sort() |
206 | 206 |
import PullSources |
207 | 207 |
for package in listPackages: |
... | ... |
@@ -211,7 +234,7 @@ def buildSourcesList(yamlDir, blackListPkgs, logger, singleFile=True): |
211 | 211 |
ossversion = SPECS.getData().getVersion(package) |
212 | 212 |
modified = False |
213 | 213 |
listPatches = SPECS.getData().getPatches(package) |
214 |
- if listPatches is not None and len(listPatches) > 0 : |
|
214 |
+ if listPatches is not None and len(listPatches) > 0: |
|
215 | 215 |
modified = True |
216 | 216 |
url = SPECS.getData().getSourceURL(package) |
217 | 217 |
if url is None: |
... | ... |
@@ -219,11 +242,12 @@ def buildSourcesList(yamlDir, blackListPkgs, logger, singleFile=True): |
219 | 219 |
|
220 | 220 |
sourceName = None |
221 | 221 |
listSourceNames = SPECS.getData().getSources(package) |
222 |
- if len(listSourceNames) >0: |
|
223 |
- sourceName=listSourceNames[0] |
|
222 |
+ if len(listSourceNames) > 0: |
|
223 |
+ sourceName = listSourceNames[0] |
|
224 | 224 |
sha1 = SPECS.getData().getSHA1(package, sourceName) |
225 | 225 |
if sha1 is not None: |
226 |
- PullSources.get(sourceName, sha1, yamlSourceDir, constants.pullsourcesConfig, logger) |
|
226 |
+ PullSources.get(sourceName, sha1, yamlSourceDir, |
|
227 |
+ constants.pullsourcesConfig, logger) |
|
227 | 228 |
|
228 | 229 |
if not singleFile: |
229 | 230 |
yamlFile = open(yamlSourceDir+"/"+ossname+"-"+ossversion+".yaml", "w") |
... | ... |
@@ -252,7 +276,7 @@ def buildSRPMList(srpmPath, yamlDir, blackListPkgs, logger, singleFile=True): |
252 | 252 |
cmdUtils.runCommandInShell("mkdir -p "+yamlSrpmDir) |
253 | 253 |
if singleFile: |
254 | 254 |
yamlFile = open(yamlSrpmDir+"/srpm_list.yaml", "w") |
255 |
- listPackages = SPECS.getData().getListPackages() |
|
255 |
+ listPackages = SPECS.getData().getListPackages() |
|
256 | 256 |
listPackages.sort() |
257 | 257 |
for package in listPackages: |
258 | 258 |
if package in blackListPkgs: |
... | ... |
@@ -261,17 +285,18 @@ def buildSRPMList(srpmPath, yamlDir, blackListPkgs, logger, singleFile=True): |
261 | 261 |
ossversion = SPECS.getData().getVersion(package) |
262 | 262 |
ossrelease = SPECS.getData().getRelease(package) |
263 | 263 |
|
264 |
- listFoundSRPMFiles = cmdUtils.findFile(ossname+"-"+ossversion+"-"+ossrelease+".src.rpm",srpmPath) |
|
264 |
+ listFoundSRPMFiles = cmdUtils.findFile(ossname+"-"+ossversion+"-"+ossrelease+".src.rpm", |
|
265 |
+ srpmPath) |
|
265 | 266 |
srpmName = None |
266 | 267 |
if len(listFoundSRPMFiles) == 1: |
267 |
- srpmFullPath = listFoundSRPMFiles[0]; |
|
268 |
+ srpmFullPath = listFoundSRPMFiles[0] |
|
268 | 269 |
srpmName = os.path.basename(srpmFullPath) |
269 | 270 |
cpcmd = "cp "+ srpmFullPath +" "+yamlSrpmDir+"/" |
270 | 271 |
returnVal = cmdUtils.runCommandInShell(cpcmd) |
271 | 272 |
if not returnVal: |
272 | 273 |
logger.error("Copy SRPM File is failed for package:"+ossname) |
273 | 274 |
else: |
274 |
- logger.error("SRPM file is not found:" +ossname) |
|
275 |
+ logger.error("SRPM file is not found:" +ossname) |
|
275 | 276 |
|
276 | 277 |
if not singleFile: |
277 | 278 |
yamlFile = open(yamlSrpmDir+"/"+ossname+"-"+ossversion+"-"+ossrelease+".yaml", "w") |
... | ... |
@@ -291,15 +316,18 @@ def buildSRPMList(srpmPath, yamlDir, blackListPkgs, logger, singleFile=True): |
291 | 291 |
yamlFile.close() |
292 | 292 |
logger.info("Generated srpm yaml files for all packages") |
293 | 293 |
|
294 |
-def buildAPackage(package, listBuildOptionPackages, pkgBuildOptionFile, buildThreads, pkgBuildType): |
|
295 |
- listPackages=[] |
|
294 |
+def buildAPackage(package, listBuildOptionPackages, pkgBuildOptionFile, |
|
295 |
+ buildThreads, pkgBuildType): |
|
296 |
+ listPackages = [] |
|
296 | 297 |
listPackages.append(package) |
297 | 298 |
pkgManager = PackageManager(pkgBuildType=pkgBuildType) |
298 | 299 |
if constants.rpmCheck: |
299 | 300 |
constants.setTestForceRPMS(listPackages[:]) |
300 |
- pkgManager.buildPackages(listPackages, listBuildOptionPackages, pkgBuildOptionFile, buildThreads, pkgBuildType) |
|
301 |
+ pkgManager.buildPackages(listPackages, listBuildOptionPackages, pkgBuildOptionFile, |
|
302 |
+ buildThreads, pkgBuildType) |
|
301 | 303 |
|
302 |
-def buildPackagesForAllSpecs(listBuildOptionPackages, pkgBuildOptionFile, logger, buildThreads, pkgInfoJsonFile, pkgBuildType): |
|
304 |
+def buildPackagesForAllSpecs(listBuildOptionPackages, pkgBuildOptionFile, logger, |
|
305 |
+ buildThreads, pkgInfoJsonFile, pkgBuildType): |
|
303 | 306 |
listPackages = SPECS.getData().getListPackages() |
304 | 307 |
|
305 | 308 |
logger.info("List of packages to build:") |
... | ... |
@@ -307,7 +335,8 @@ def buildPackagesForAllSpecs(listBuildOptionPackages, pkgBuildOptionFile, logger |
307 | 307 |
if constants.rpmCheck: |
308 | 308 |
constants.setTestForceRPMS(listPackages[:]) |
309 | 309 |
pkgManager = PackageManager(pkgBuildType=pkgBuildType) |
310 |
- pkgManager.buildPackages(listPackages, listBuildOptionPackages, pkgBuildOptionFile, buildThreads, pkgBuildType) |
|
310 |
+ pkgManager.buildPackages(listPackages, listBuildOptionPackages, pkgBuildOptionFile, |
|
311 |
+ buildThreads, pkgBuildType) |
|
311 | 312 |
|
312 | 313 |
#Generating package info file which is required by installer |
313 | 314 |
logger.info("Writing Package info to the file:"+pkgInfoJsonFile) |
... | ... |
@@ -323,7 +352,7 @@ def get_packages_with_build_options(pkg_build_options_file): |
323 | 323 |
jsonData.close() |
324 | 324 |
pkgs_sorted = pkg_build_option_json.items() |
325 | 325 |
for pkg in pkgs_sorted: |
326 |
- p = pkg[0].encode('utf-8') |
|
326 |
+ p = pkg[0].encode('utf-8') |
|
327 | 327 |
packages.append(str(p)) |
328 | 328 |
|
329 | 329 |
return packages |
... | ... |
@@ -338,12 +367,12 @@ def get_all_package_names(build_install_option): |
338 | 338 |
|
339 | 339 |
for install_option in options_sorted: |
340 | 340 |
filename = os.path.join(base_path, install_option[1]["file"]) |
341 |
- jsonData=open(filename) |
|
341 |
+ jsonData = open(filename) |
|
342 | 342 |
package_list_json = json.load(jsonData) |
343 | 343 |
jsonData.close() |
344 | 344 |
packages = packages + package_list_json["packages"] |
345 | 345 |
|
346 | 346 |
return packages |
347 | 347 |
|
348 |
-if __name__=="__main__": |
|
348 |
+if __name__ == "__main__": |
|
349 | 349 |
main() |
... | ... |
@@ -1,16 +1,16 @@ |
1 |
-#!/usr/bin/env python |
|
1 |
+#!/usr/bin/python3 |
|
2 | 2 |
import subprocess |
3 | 3 |
import sys |
4 | 4 |
|
5 | 5 |
def cleanUpChroot(chrootPath): |
6 |
- returnVal,listmountpoints=findmountpoints(chrootPath) |
|
6 |
+ returnVal, listmountpoints = findmountpoints(chrootPath) |
|
7 | 7 |
|
8 | 8 |
if not returnVal: |
9 | 9 |
return False |
10 | 10 |
|
11 |
- sortmountpoints(listmountpoints, chrootPath) |
|
11 |
+ sortmountpoints(listmountpoints) |
|
12 | 12 |
|
13 |
- print listmountpoints |
|
13 |
+ print(listmountpoints) |
|
14 | 14 |
|
15 | 15 |
if not unmountmountpoints(listmountpoints): |
16 | 16 |
return False |
... | ... |
@@ -21,62 +21,67 @@ def cleanUpChroot(chrootPath): |
21 | 21 |
return True |
22 | 22 |
|
23 | 23 |
def removeAllFilesFromChroot(chrootPath): |
24 |
- cmd="rm -rf "+chrootPath+"/*" |
|
25 |
- process = subprocess.Popen("%s" %cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) |
|
24 |
+ cmd = "rm -rf " + chrootPath + "/*" |
|
25 |
+ process = subprocess.Popen("%s" %cmd, shell=True, |
|
26 |
+ stdout=subprocess.PIPE, |
|
27 |
+ stderr=subprocess.PIPE) |
|
26 | 28 |
retval = process.wait() |
27 |
- if retval!=0: |
|
28 |
- print "Unable to remove files from chroot "+chrootPath |
|
29 |
+ if retval != 0: |
|
30 |
+ print("Unable to remove files from chroot " + chrootPath) |
|
29 | 31 |
return False |
30 | 32 |
return True |
31 | 33 |
|
32 | 34 |
def unmountmountpoints(listmountpoints): |
33 | 35 |
if listmountpoints is None: |
34 | 36 |
return True |
35 |
- result=True |
|
37 |
+ result = True |
|
36 | 38 |
for mountpoint in listmountpoints: |
37 |
- cmd="umount "+mountpoint |
|
38 |
- process = subprocess.Popen("%s" %cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) |
|
39 |
+ cmd = "umount " + mountpoint |
|
40 |
+ process = subprocess.Popen("%s" %cmd, shell=True, stdout=subprocess.PIPE, |
|
41 |
+ stderr=subprocess.PIPE) |
|
39 | 42 |
retval = process.wait() |
40 |
- if retval!=0: |
|
43 |
+ if retval != 0: |
|
41 | 44 |
result = False |
42 |
- print "Unable to unmount "+mountpoint |
|
45 |
+ print("Unable to unmount " + mountpoint) |
|
43 | 46 |
break |
44 | 47 |
if not result: |
45 |
- print "Unable to unmount all mounts. Unable to clean up the chroot" |
|
48 |
+ print("Unable to unmount all mounts. Unable to clean up the chroot") |
|
46 | 49 |
return False |
47 | 50 |
return True |
48 | 51 |
|
49 | 52 |
def findmountpoints(chrootPath): |
50 | 53 |
if not chrootPath.endswith("/"): |
51 | 54 |
chrootPath = chrootPath + "/" |
52 |
- cmd="mount | grep "+chrootPath+" | cut -d' ' -s -f3" |
|
53 |
- process = subprocess.Popen("%s" %cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) |
|
55 |
+ cmd = "mount | grep " + chrootPath + " | cut -d' ' -s -f3" |
|
56 |
+ process = subprocess.Popen("%s" %cmd, shell=True, |
|
57 |
+ stdout=subprocess.PIPE, |
|
58 |
+ stderr=subprocess.PIPE) |
|
54 | 59 |
retval = process.wait() |
55 |
- if retval!=0: |
|
56 |
- print "Unable to find mountpoints in chroot" |
|
57 |
- return False,None |
|
58 |
- mountpoints=process.communicate()[0] |
|
59 |
- mountpoints= mountpoints.replace("\n"," ").strip() |
|
60 |
+ if retval != 0: |
|
61 |
+ print("Unable to find mountpoints in chroot") |
|
62 |
+ return False, None |
|
63 |
+ mountpoints = process.communicate()[0].decode() |
|
64 |
+ mountpoints = mountpoints.replace("\n", " ").strip() |
|
60 | 65 |
if mountpoints == "": |
61 |
- print "No mount points found" |
|
62 |
- return True,None |
|
63 |
- listmountpoints=mountpoints.split(" ") |
|
64 |
- return True,listmountpoints |
|
66 |
+ print("No mount points found") |
|
67 |
+ return True, None |
|
68 |
+ listmountpoints = mountpoints.split(" ") |
|
69 |
+ return True, listmountpoints |
|
65 | 70 |
|
66 |
-def sortmountpoints(listmountpoints,chrootPath): |
|
71 |
+def sortmountpoints(listmountpoints): |
|
67 | 72 |
if listmountpoints is None: |
68 | 73 |
return True |
69 |
- sortedmountpoints=listmountpoints |
|
74 |
+ sortedmountpoints = listmountpoints |
|
70 | 75 |
sorted(sortedmountpoints) |
71 | 76 |
sortedmountpoints.reverse() |
72 | 77 |
|
73 | 78 |
def main(): |
74 | 79 |
if len(sys.argv) < 2: |
75 |
- print "Usage: ./clean-up-chroot.py <chrootpath>" |
|
80 |
+ print("Usage: ./clean-up-chroot.py <chrootpath>") |
|
76 | 81 |
sys.exit(1) |
77 | 82 |
if not cleanUpChroot(sys.argv[1]): |
78 | 83 |
sys.exit(1) |
79 | 84 |
sys.exit(0) |
80 | 85 |
|
81 |
-if __name__=="__main__": |
|
86 |
+if __name__ == "__main__": |
|
82 | 87 |
main() |
... | ... |
@@ -1,25 +1,25 @@ |
1 | 1 |
from Logger import Logger |
2 | 2 |
|
3 | 3 |
class constants(object): |
4 |
- specPath="" |
|
5 |
- sourcePath="" |
|
6 |
- rpmPath="" |
|
7 |
- logPath="" |
|
8 |
- topDirPath="" |
|
9 |
- buildRootPath="/mnt" |
|
10 |
- prevPublishRPMRepo="" |
|
11 |
- prevPublishXRPMRepo="" |
|
12 |
- pullsourcesConfig="" |
|
13 |
- buildPatch=False |
|
14 |
- inputRPMSPath="" |
|
15 |
- rpmCheck=False |
|
16 |
- sourceRpmPath="" |
|
17 |
- publishBuildDependencies=False |
|
18 |
- packageWeightsPath=None |
|
19 |
- dockerUnixSocket="/var/run/docker.sock" |
|
20 |
- userDefinedMacros={} |
|
4 |
+ specPath = "" |
|
5 |
+ sourcePath = "" |
|
6 |
+ rpmPath = "" |
|
7 |
+ logPath = "" |
|
8 |
+ topDirPath = "" |
|
9 |
+ buildRootPath = "/mnt" |
|
10 |
+ prevPublishRPMRepo = "" |
|
11 |
+ prevPublishXRPMRepo = "" |
|
12 |
+ pullsourcesConfig = "" |
|
13 |
+ buildPatch = False |
|
14 |
+ inputRPMSPath = "" |
|
15 |
+ rpmCheck = False |
|
16 |
+ sourceRpmPath = "" |
|
17 |
+ publishBuildDependencies = False |
|
18 |
+ packageWeightsPath = None |
|
19 |
+ dockerUnixSocket = "/var/run/docker.sock" |
|
20 |
+ userDefinedMacros = {} |
|
21 | 21 |
|
22 |
- noDepsPackageList=[ |
|
22 |
+ noDepsPackageList = [ |
|
23 | 23 |
"texinfo", |
24 | 24 |
"bzip2", |
25 | 25 |
"bzip2-libs", |
... | ... |
@@ -32,7 +32,7 @@ class constants(object): |
32 | 32 |
"sqlite-libs"] |
33 | 33 |
|
34 | 34 |
# These packages will be built in first order as build-core-toolchain stage |
35 |
- listCoreToolChainPackages=[ |
|
35 |
+ listCoreToolChainPackages = [ |
|
36 | 36 |
"filesystem", |
37 | 37 |
"linux-api-headers", |
38 | 38 |
"glibc", |
... | ... |
@@ -52,7 +52,7 @@ class constants(object): |
52 | 52 |
"bash"] |
53 | 53 |
|
54 | 54 |
# These packages will be built in a second stage to replace publish RPMS |
55 |
- listToolChainPackages=[ |
|
55 |
+ listToolChainPackages = [ |
|
56 | 56 |
"filesystem", |
57 | 57 |
"linux-api-headers", |
58 | 58 |
"glibc", |
... | ... |
@@ -114,7 +114,7 @@ class constants(object): |
114 | 114 |
# package. This list should be ordered by install order. On a stage1 |
115 | 115 |
# and stage2 published rpms will/might be used after stage2 only local |
116 | 116 |
# RPMS will be used |
117 |
- listToolChainRPMsToInstall=[ |
|
117 |
+ listToolChainRPMsToInstall = [ |
|
118 | 118 |
"filesystem", |
119 | 119 |
"linux-api-headers", |
120 | 120 |
"glibc", |
... | ... |
@@ -212,42 +212,48 @@ class constants(object): |
212 | 212 |
"cpio", |
213 | 213 |
"go"] |
214 | 214 |
|
215 |
- perPackageToolChain = dict.fromkeys(["openjdk8", "openjdk8-doc", "openjdk8-src", "openjdk8-sample", "openjre8" ], [ |
|
216 |
- "icu-devel", |
|
217 |
- "openjdk", |
|
218 |
- "openjre", |
|
219 |
- "icu", |
|
220 |
- "alsa-lib", |
|
221 |
- "alsa-lib-devel", |
|
222 |
- "xcb-proto", |
|
223 |
- "libXdmcp-devel", |
|
224 |
- "libXau-devel", |
|
225 |
- "util-macros", |
|
226 |
- "xtrans", |
|
227 |
- "libxcb-devel", |
|
228 |
- "proto", |
|
229 |
- "libXdmcp", |
|
230 |
- "libxcb", |
|
231 |
- "libXau", |
|
232 |
- "xtrans-devel", |
|
233 |
- "libX11", |
|
234 |
- "libX11-devel", |
|
235 |
- "libXext", |
|
236 |
- "libXext-devel", |
|
237 |
- "libICE-devel", |
|
238 |
- "libSM", |
|
239 |
- "libICE", |
|
240 |
- "libSM-devel", |
|
241 |
- "libXt", |
|
242 |
- "libXmu", |
|
243 |
- "libXt-devel", |
|
244 |
- "libXmu-devel", |
|
245 |
- "libXrender", |
|
246 |
- "libXrender-devel"]) |
|
215 |
+ perPackageToolChain = dict.fromkeys( |
|
216 |
+ ["openjdk8", |
|
217 |
+ "openjdk8-doc", |
|
218 |
+ "openjdk8-src", |
|
219 |
+ "openjdk8-sample", |
|
220 |
+ "openjre8"], |
|
221 |
+ [ |
|
222 |
+ "icu-devel", |
|
223 |
+ "openjdk", |
|
224 |
+ "openjre", |
|
225 |
+ "icu", |
|
226 |
+ "alsa-lib", |
|
227 |
+ "alsa-lib-devel", |
|
228 |
+ "xcb-proto", |
|
229 |
+ "libXdmcp-devel", |
|
230 |
+ "libXau-devel", |
|
231 |
+ "util-macros", |
|
232 |
+ "xtrans", |
|
233 |
+ "libxcb-devel", |
|
234 |
+ "proto", |
|
235 |
+ "libXdmcp", |
|
236 |
+ "libxcb", |
|
237 |
+ "libXau", |
|
238 |
+ "xtrans-devel", |
|
239 |
+ "libX11", |
|
240 |
+ "libX11-devel", |
|
241 |
+ "libXext", |
|
242 |
+ "libXext-devel", |
|
243 |
+ "libICE-devel", |
|
244 |
+ "libSM", |
|
245 |
+ "libICE", |
|
246 |
+ "libSM-devel", |
|
247 |
+ "libXt", |
|
248 |
+ "libXmu", |
|
249 |
+ "libXt-devel", |
|
250 |
+ "libXmu-devel", |
|
251 |
+ "libXrender", |
|
252 |
+ "libXrender-devel"]) |
|
247 | 253 |
perPackageToolChain["apache-maven"] = ["apache-maven"] |
248 | 254 |
# List of RPMs which are not published. They will be created during the |
249 | 255 |
# build process |
250 |
- listOfRPMsProvidedAfterBuild=[ |
|
256 |
+ listOfRPMsProvidedAfterBuild = [ |
|
251 | 257 |
"util-linux-devel", |
252 | 258 |
"flex-devel", |
253 | 259 |
"nspr-devel", |
... | ... |
@@ -266,7 +272,7 @@ class constants(object): |
266 | 266 |
|
267 | 267 |
# List of packages that will be installed in addition for each |
268 | 268 |
# package to make check |
269 |
- listMakeCheckRPMPkgtoInstall=[ |
|
269 |
+ listMakeCheckRPMPkgtoInstall = [ |
|
270 | 270 |
"python2", |
271 | 271 |
"python2-devel", |
272 | 272 |
"python2-libs", |
... | ... |
@@ -315,14 +321,14 @@ class constants(object): |
315 | 315 |
"unzip", |
316 | 316 |
"systemd-devel", |
317 | 317 |
"gnupg", |
318 |
- "ncurses-terminfo" ] |
|
318 |
+ "ncurses-terminfo"] |
|
319 | 319 |
|
320 |
- listReInstallPackages=[ |
|
320 |
+ listReInstallPackages = [ |
|
321 | 321 |
"go"] |
322 | 322 |
|
323 | 323 |
# List of packages that requires privileged docker |
324 | 324 |
# to run make check. |
325 |
- listReqPrivilegedDockerForTest=[ |
|
325 |
+ listReqPrivilegedDockerForTest = [ |
|
326 | 326 |
"elfutils", # SYS_PTRACE |
327 | 327 |
"gdb", |
328 | 328 |
"glibc", |
... | ... |
@@ -332,7 +338,7 @@ class constants(object): |
332 | 332 |
# Requires(post):/sbin/useradd |
333 | 333 |
# Build system should interpret it as |
334 | 334 |
# Requires: shadow |
335 |
- providedBy={ |
|
335 |
+ providedBy = { |
|
336 | 336 |
"/usr/sbin/useradd":"shadow", |
337 | 337 |
"/usr/sbin/groupadd":"shadow", |
338 | 338 |
"/usr/bin/which":"which", |
... | ... |
@@ -340,7 +346,7 @@ class constants(object): |
340 | 340 |
} |
341 | 341 |
|
342 | 342 |
# list of spec files to skip for parsing for given arch |
343 |
- skipSpecsForArch={ |
|
343 |
+ skipSpecsForArch = { |
|
344 | 344 |
"x86_64":[ |
345 | 345 |
"u-boot-rpi3.spec", |
346 | 346 |
"openjdk8_aarch64.spec" |
... | ... |
@@ -426,37 +432,37 @@ class constants(object): |
426 | 426 |
constants.logPath = options.logPath |
427 | 427 |
constants.prevPublishRPMRepo = options.publishRPMSPath |
428 | 428 |
constants.prevPublishXRPMRepo = options.publishXRPMSPath |
429 |
- constants.buildRootPath=options.buildRootPath |
|
429 |
+ constants.buildRootPath = options.buildRootPath |
|
430 | 430 |
constants.pullsourcesConfig = options.pullsourcesConfig |
431 |
- constants.inputRPMSPath=options.inputRPMSPath |
|
432 |
- constants.testForceRPMS=[] |
|
431 |
+ constants.inputRPMSPath = options.inputRPMSPath |
|
432 |
+ constants.testForceRPMS = [] |
|
433 | 433 |
constants.rpmCheck = options.rpmCheck |
434 | 434 |
constants.rpmCheckStopOnError = options.rpmCheckStopOnError |
435 |
- constants.publishBuildDependencies=options.publishBuildDependencies |
|
436 |
- constants.packageWeightsPath=options.packageWeightsPath |
|
435 |
+ constants.publishBuildDependencies = options.publishBuildDependencies |
|
436 |
+ constants.packageWeightsPath = options.packageWeightsPath |
|
437 | 437 |
constants.tmpDirPath = "/dev/shm" |
438 | 438 |
if constants.rpmCheck: |
439 |
- constants.testLogger=Logger.getLogger("MakeCheckTest",constants.logPath) |
|
440 |
- constants.addMacro("with_check","1") |
|
439 |
+ constants.testLogger = Logger.getLogger("MakeCheckTest", constants.logPath) |
|
440 |
+ constants.addMacro("with_check", "1") |
|
441 | 441 |
else: |
442 |
- constants.addMacro("with_check","0") |
|
442 |
+ constants.addMacro("with_check", "0") |
|
443 | 443 |
|
444 | 444 |
#adding distribution rpm macro |
445 |
- constants.addMacro("dist",options.dist) |
|
445 |
+ constants.addMacro("dist", options.dist) |
|
446 | 446 |
|
447 | 447 |
#adding buildnumber rpm macro |
448 |
- constants.addMacro("photon_build_number",options.buildNumber) |
|
448 |
+ constants.addMacro("photon_build_number", options.buildNumber) |
|
449 | 449 |
|
450 | 450 |
#adding releasenumber rpm macro |
451 |
- constants.addMacro("photon_release_version",options.releaseVersion) |
|
451 |
+ constants.addMacro("photon_release_version", options.releaseVersion) |
|
452 | 452 |
|
453 |
- if options.katBuild != None: |
|
453 |
+ if options.katBuild is not None: |
|
454 | 454 |
constants.addMacro("kat_build", options.katBuild) |
455 | 455 |
|
456 | 456 |
@staticmethod |
457 | 457 |
def setTestForceRPMS(listsPackages): |
458 |
- constants.testForceRPMS=listsPackages |
|
458 |
+ constants.testForceRPMS = listsPackages |
|
459 | 459 |
|
460 | 460 |
@staticmethod |
461 | 461 |
def addMacro(macroName, macroValue): |
462 |
- constants.userDefinedMacros[macroName]=macroValue |
|
462 |
+ constants.userDefinedMacros[macroName] = macroValue |
... | ... |
@@ -1,5 +1,3 @@ |
1 |
-#!/usr/bin/python2 |
|
2 |
-# |
|
3 | 1 |
# Copyright (C) 2015 vmware inc. |
4 | 2 |
# |
5 | 3 |
# Author: Sharath George <sharathg@vmware.com> |
... | ... |
@@ -10,16 +8,21 @@ import collections |
10 | 10 |
|
11 | 11 |
class JsonWrapper(object): |
12 | 12 |
|
13 |
- def __init__(self, filename): |
|
13 |
+ def __init__(self, filename): |
|
14 | 14 |
self.filename = filename |
15 |
- |
|
15 |
+ self.data = None |
|
16 | 16 |
def read(self): |
17 |
- json_data = open(self.filename) |
|
18 |
- self.data = json.load(json_data, object_pairs_hook=collections.OrderedDict) |
|
19 |
- json_data.close() |
|
17 |
+ try: |
|
18 |
+ with open(self.filename) as json_data: |
|
19 |
+ self.data = json.load(json_data, object_pairs_hook=collections.OrderedDict) |
|
20 |
+ except Exception as _: |
|
21 |
+ raise Exception("Unable to read {}".format(self.filename)) |
|
20 | 22 |
return self.data |
21 | 23 |
|
22 |
- def write(self, data): |
|
24 |
+ def write(self, data): |
|
23 | 25 |
self.data = data |
24 |
- outfile = open(self.filename, 'wb') |
|
25 |
- json.dump(data, outfile) |
|
26 |
+ try: |
|
27 |
+ with open(self.filename, 'w') as outfile: |
|
28 |
+ json.dump(data, outfile) |
|
29 |
+ except Exception as _: |
|
30 |
+ raise Exception("Unable to write {}".format(self.filename)) |