diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 0000000..a385877 --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,23 @@ +name: Pylint + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint prettytable sqlalchemy + - name: Analysing the code with pylint + run: | + pylint --disable=all --enable=E $(git ls-files '*.py') \ No newline at end of file diff --git a/Tests/runTheGlobalTagTests.py b/Tests/runTheGlobalTagTests.py index 92811a5..31cd132 100644 --- a/Tests/runTheGlobalTagTests.py +++ b/Tests/runTheGlobalTagTests.py @@ -5,14 +5,14 @@ import copy import string, re import subprocess -import ConfigParser, json +import configparser, json from optparse import OptionParser from autoCond_TEMPL import autoCondTemplate as myAutoCond ####################--- Classes ---############################ -class BetterConfigParser(ConfigParser.ConfigParser): +class BetterConfigParser(configparser.ConfigParser): ############################################## def optionxform(self, optionstr): @@ -38,7 +38,7 @@ def __updateDict( self, dictionary, section ): if "local"+section.title() in self.sections(): for option in self.options( "local"+section.title() ): result[option] = self.get( "local"+section.title(),option ) - except ConfigParser.NoSectionError, section: + except (ConfigParser.NoSectionError, section): msg = ("%s in configuration files. This section is mandatory." %(str(section).replace(":", "", 1))) #raise AllInOneError(msg) @@ -50,7 +50,7 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): for option in demandPars: try: result[option] = self.get( section, option ) - except ConfigParser.NoOptionError, globalSectionError: + except (ConfigParser.NoOptionError, globalSectionError): globalSection = str( globalSectionError ).split( "'" )[-2] splittedSectionName = section.split( ":" ) if len( splittedSectionName ) > 1: @@ -61,7 +61,7 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): if self.has_section( localSection ): try: result[option] = self.get( localSection, option ) - except ConfigParser.NoOptionError, option: + except (ConfigParser.NoOptionError, option): msg = ("%s. This option is mandatory." %(str(option).replace(":", "", 1).replace( "section", @@ -72,7 +72,7 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): %(str(globalSectionError).replace(":", "", 1))) #raise AllInOneError(msg) result = self.__updateDict( result, section ) - #print result + #print(result) return result ##### method to parse the input file ################################ @@ -94,7 +94,7 @@ def ConfigSectionMap(config, section): def replaceByMap(target, map): result = target for id in map: - #print " "+id+": "+map[id] + #print(" "+id+": "+map[id]) lifeSaver = 10e3 iteration = 0 while ".oO[" in result and "]Oo." in result: @@ -103,11 +103,11 @@ def replaceByMap(target, map): iteration += 1 if iteration > lifeSaver: problematicLines = "" - print map.keys() + print(map.keys()) for line in result.splitlines(): if ".oO[" in result and "]Oo." in line: problematicLines += "%s\n"%line - raise StandardError, "Oh Dear, there seems to be an endless loop in replaceByMap!!\n%s\nrepMap"%problematicLines + raise (StandardError, "Oh Dear, there seems to be an endless loop in replaceByMap!!\n%s\nrepMap"%problematicLines) return result ############################################## @@ -115,11 +115,11 @@ def execme(command,dryrun=False): '''Wrapper for executing commands. ''' if dryrun: - print command + print(command) else: - print " * Executing: %s..."%command + print(" * Executing: %s..."%command) os.system(command) - print " * Executed!" + print(" * Executed!") ##################################################################### def getCommandOutput(command): @@ -131,7 +131,7 @@ def getCommandOutput(command): data = child.read() err = child.close() if err: - print '%s failed w/ exit code %d' % (command, err) + print('%s failed w/ exit code %d' % (command, err)) return data ############################################## @@ -149,9 +149,9 @@ def main(): repMap = {} if ConfigFile is not None: - print "********************************************************" - print "* Parsing from input file:", ConfigFile," " - print "********************************************************" + print("********************************************************") + print("* Parsing from input file:", ConfigFile," ") + print("********************************************************") config = BetterConfigParser() config.read(ConfigFile) @@ -209,7 +209,7 @@ def main(): for section in sections: - print "Preparing:",section + print("Preparing:",section) conditions = config.getResultingSection(section) if(conditions): fout1.write("## "+section.replace("_"," ")+"\n \n") @@ -223,15 +223,15 @@ def main(): fout2.write(" * =\'"+dict[key][0]+"\'= ("+dict[key][1]+") : [[https://cms-conddb.cern.ch/cmsDbBrowser/list/Prod/gts/"+params[0]+"]["+params[0]+"]],[[https://cms-conddb.cern.ch/cmsDbBrowser/diff/Prod/gts/"+params[0]+"/"+params[1]+"][diff with previous]]: \n") fout2.write(" *\n \n") - print "=====>",str(dict[key][0]).ljust(20),":",str(params[0]).ljust(20) - #print '{:10s} {:10s}'.format(str(dict[key][0]),str(params[0])) + print("=====>",str(dict[key][0]).ljust(20),":",str(params[0]).ljust(20)) + #print('{:10s} {:10s}'.format(str(dict[key][0]),str(params[0]))) repMap.update({dict[key][0].upper():params[0]}) ## replace the map of inputs theReplacedMap = replaceByMap(myAutoCond,repMap) - #print repMap - #print theReplacedMap + #print(repMap) + #print(theReplacedMap) filePath = os.path.join(".","autoCond.py") theFile = open( filePath, "w" ) @@ -239,9 +239,9 @@ def main(): theFile.close() theReleaseList = getCommandOutput("echo `scramv1 l -a | awk '$1==\"CMSSW\" && /CMSSW_"+opts.inputrelease+"/ { print $2 }' | sort`") - #print theReleaseList + #print(theReleaseList) theRelease = theReleaseList.split()[-1] - #print theRelease + #print(theRelease) commands = [] commands.append("#!/bin/tcsh") @@ -256,11 +256,11 @@ def main(): theExecutableFile = open("testing.csh", "w" ) - print "-------------------------------------------------------" - print "Will run the following commands" - print "-------------------------------------------------------" + print("-------------------------------------------------------") + print("Will run the following commands") + print("-------------------------------------------------------") for command in commands: - print command + print(command) theExecutableFile.write(command+"\n") theExecutableFile.close() diff --git a/ValidateHLTMenu/ALCAMenuChecker.py b/ValidateHLTMenu/ALCAMenuChecker.py index a2d0f66..060ba1c 100644 --- a/ValidateHLTMenu/ALCAMenuChecker.py +++ b/ValidateHLTMenu/ALCAMenuChecker.py @@ -21,8 +21,8 @@ L1MenuName="l1menu.xml" # Parse the HLT menu -### Usually, blindly executing an external file is a security hazard... -execfile(HLTMenuName) +### Usually, blindly executing an external file is a security hazard... +exec(open(HLTMenuName).read()) # Using exec() - Python 3 compatible # Parse the L1 menu. Notice that here we are parsing the version that is # usually available from the Twiki: @@ -34,7 +34,7 @@ for algo in root.findall('algorithm'): listOfAvailableSeeds.append(algo[0].text) -print process.process +print(process.process) pathnames = process.paths.viewkeys() pathsVsSeeds = dict() @@ -51,7 +51,7 @@ def splitL1seeds(fullSeed): # 0) Get the number of HLT columns numberOfHLTColumns = len(process.PrescaleService.lvl1Labels) HLTColumnIndexes = range(0,numberOfHLTColumns) -print "HLT menu has",numberOfHLTColumns,"columns" +print("HLT menu has",numberOfHLTColumns,"columns") # 1) Make the map of path vs list of seeds for path in process.paths: @@ -66,7 +66,7 @@ def splitL1seeds(fullSeed): pathsVsSeeds[thePath.label()] = list() # 2) Make the map of path vs prescales -print "-"*64 +print("-"*64) for i in process.PrescaleService.prescaleTable: # We don't want the Output paths that may be here if "Output" in i.pathName.value(): @@ -78,14 +78,14 @@ def splitL1seeds(fullSeed): if pathName not in pathPrescales.keys(): pathPrescales[pathName] = [1]*numberOfHLTColumns if not("Calibration" in pathName or "HLTriggerFirstPath" in pathName or "HLTriggerFinalPath" in pathName): - print RED+'WARNING:'+RESET,pathName,"has no defined HLT prescales" + print(RED+'WARNING:'+RESET,pathName,"has no defined HLT prescales") if len(pathsVsSeeds[pathName]) == 0: L1pathPrescales[pathName] = [1]*numberOfHLTColumns # NOW come the AlCa checks proper # 1) Do I have all the AlCa datasets? -print "-"*64 +print("-"*64) datasetNames = process.datasets._Parameterizable__parameterNames mandatoryDatasetsAndPaths = {"ExpressPhysics":["HLT_IsoMu20_v*", "HLT_IsoMu24_v*", @@ -122,20 +122,20 @@ def splitL1seeds(fullSeed): for mds in mandatoryDatasets: if mds in datasetNames: - print row_format.format(mds),GREEN+"PRESENT"+RESET + print(row_format.format(mds),GREEN+"PRESENT"+RESET) presentMandatoryDatasets.append(mds) else: - print row_format.format(mds),RED+"ABSENT"+RESET + print(row_format.format(mds),RED+"ABSENT"+RESET) presentMandatoryDatasets.sort() # 2) Do the datasets have all paths they should have? -print "-"*64 +print("-"*64) for mds in presentMandatoryDatasets: theDataset = getattr(process.datasets,mds) for requestedPath in mandatoryDatasetsAndPaths[mds]: pathIsPresent = False if len(fnmatch.filter(theDataset,requestedPath)) == 0: - print row_format.format(mds),row_format2.format(requestedPath),RED+"ABSENT"+RESET + print(row_format.format(mds),row_format2.format(requestedPath),RED+"ABSENT"+RESET) # Do the paths have at least one L1 seed available in the menu? for matchingPath in fnmatch.filter(theDataset,requestedPath): hasL1Seed = False @@ -146,17 +146,17 @@ def splitL1seeds(fullSeed): if seed in listOfAvailableSeeds: hasL1Seed = True if not hasL1Seed: - print row_format.format(mds),row_format2.format(matchingPath),GREEN+"PRESENT"+RESET,"but ",RED+"NO L1 SEED"+RESET + print(row_format.format(mds),row_format2.format(matchingPath),GREEN+"PRESENT"+RESET,"but ",RED+"NO L1 SEED"+RESET) elif hasL1Seed: - print row_format.format(mds),row_format2.format(matchingPath),GREEN+"PRESENT"+RESET,"and ",GREEN+"HAS L1 SEED"+RESET + print(row_format.format(mds),row_format2.format(matchingPath),GREEN+"PRESENT"+RESET,"and ",GREEN+"HAS L1 SEED"+RESET) # 3) Check the smart prescales of the Express Datasets: -print "-"*64 +print("-"*64) for mds in presentMandatoryDatasets: if "Express" in mds: normalizedDSName = mds.replace("Physics","") - print BOLD+normalizedDSName+RESET + print(BOLD+normalizedDSName+RESET) smartPrescales = getattr(process,"hltPre"+normalizedDSName+"OutputSmart") - print smartPrescales.triggerConditions - print "\n" + print(smartPrescales.triggerConditions) + print("\n") diff --git a/ValidateHLTMenu/ValidateHLTMenu.py b/ValidateHLTMenu/ValidateHLTMenu.py index 6b4f789..ae0281e 100644 --- a/ValidateHLTMenu/ValidateHLTMenu.py +++ b/ValidateHLTMenu/ValidateHLTMenu.py @@ -114,7 +114,7 @@ def find_hlt_path(PrimaryDataset, HLTpath, HLTMenu, output_file): return True, matching_triggers -def find_pd(primary_dataset): +def find_pd(primary_dataset, HLTMenu): """ """ pd_match = "" @@ -204,7 +204,7 @@ def analyze(AlCaRecoMatrix, AlCaRecoTriggerBits, configuration, HLTMenu, hlt_men "MatchingTriggers": matching_triggers }) else: - pd_match, pd_unmatch = find_pd(primary_dataset) + pd_match, pd_unmatch = find_pd(primary_dataset, HLTMenu) output[TriggerBitKey]["PDMatch"] = pd_match output[TriggerBitKey]["PDUnMatch"] = pd_unmatch if pd_match == "": diff --git a/ValidateHLTMenu/testAlCaRecoMatrix.py b/ValidateHLTMenu/testAlCaRecoMatrix.py index 246ce39..3edf336 100644 --- a/ValidateHLTMenu/testAlCaRecoMatrix.py +++ b/ValidateHLTMenu/testAlCaRecoMatrix.py @@ -102,7 +102,7 @@ # Parse the HLT menu ### Usually, blindly executing an external file is a security hazard... print("Parsing HLT menu...") -execfile(HLTMenuName) +exec(open(HLTMenuName).read()) # Using exec() - Python 3 compatible print("List of datasets:") print(sorted(process.datasets._Parameterizable__parameterNames)) diff --git a/scripts/CheckPR/CheckPullRequest.py b/scripts/CheckPR/CheckPullRequest.py index 8ef2e68..4d13225 100644 --- a/scripts/CheckPR/CheckPullRequest.py +++ b/scripts/CheckPR/CheckPullRequest.py @@ -25,7 +25,7 @@ if os.environ.get('SCRAM_ARCH') != 'slc6_amd64_gcc530': os.environ['SCRAM_ARCH']='slc6_amd64_gcc530' -print 'SCRAM_ARCH =', os.environ.get('SCRAM_ARCH') +print('SCRAM_ARCH =', os.environ.get('SCRAM_ARCH')) ### equivallent to step 0 -- we prepare the terrain if len(sys.argv) == 3: @@ -34,16 +34,16 @@ os.system("mkdir %s" % toCompare[0]) os.chdir("%s/%s" % (CWD, toCompare[0])) os.system("scramv1 project %s" % CMSSWREL) - print "*** Preparing %s ***" % toCompare[0] + print("*** Preparing %s ***" % toCompare[0]) os.chdir("%s/%s/%s/src" % (CWD, toCompare[0], CMSSWREL)) - print "Initializing git repository ..." + print("Initializing git repository ...") os.system("eval `scramv1 runtime -sh` && git cms-init > ../creation.log 2>&1") if toCompare[1] == 1: - print "Applying patch ..." + print("Applying patch ...") os.system("eval `scramv1 runtime -sh` && git cms-merge-topic %s > ../merge.log 2>&1" % sys.argv[2]) - print "Compiling ..." + print("Compiling ...") os.system("eval `scramv1 runtime -sh` && scramv1 b -j16 > ../compile.log 2>&1") os.system("mkdir testDir") @@ -70,40 +70,40 @@ ### compare the edmEvents stuff elif sys.argv[1] == "2": for toCompare in compare: - print '========================================\n*** Creating edm reports for %s ***\n' % toCompare[0] + print('========================================\n*** Creating edm reports for %s ***\n' % toCompare[0]) CMSSWREL = os.listdir("%s/%s" % (CWD, toCompare[0]))[0] os.chdir("%s/%s/%s/src/testDir/FARM/outputs" % (CWD, toCompare[0], CMSSWREL)) for workflow in toCompare[2]: if os.path.isdir("results_%s" % workflow[0]): os.chdir("results_%s" % workflow[0]) else: - print "results_%s is missing! Could be due to disk quota or other problems." % workflow[0] + print("results_%s is missing! Could be due to disk quota or other problems." % workflow[0]) continue ListOfFilesToCheck = os.popen("find . -name \"step?_ALCA*.py\" | xargs -I% grep root % | grep -v step | grep -v Names | awk '{print $3}' | cut -c 23- | sed \"s/.root\'),//g\"").read().split() if len(ListOfFilesToCheck) == 0: - print workflow[0], "\thas no AlCaReco root files." + print(workflow[0], "\thas no AlCaReco root files.") os.chdir("../") continue - print "Creating logs of", workflow[0], ":" + print("Creating logs of", workflow[0], ":") for toCheck in ListOfFilesToCheck: - print " edmEventSize -v -a %s.root > eventSize_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck) + print(" edmEventSize -v -a %s.root > eventSize_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck)) os.system("eval `scramv1 runtime -sh` && edmEventSize -v -a %s.root > eventSize_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck)) - print " edmDumpEventContent %s.root > eventContent_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck) + print(" edmDumpEventContent %s.root > eventContent_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck)) os.system("eval `scramv1 runtime -sh` && edmDumpEventContent %s.root > eventContent_%s_%s.log 2>&1" % (toCheck, workflow[0], toCheck)) - print "Merging eventSize logs ..." - print " cat eventSize_%s* > %s/eventSize_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0]) + print("Merging eventSize logs ...") + print(" cat eventSize_%s* > %s/eventSize_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0])) os.system("cat eventSize_%s* > %s/eventSize_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0])) - print " cat eventContent_%s* > %s/eventContent_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0]) + print(" cat eventContent_%s* > %s/eventContent_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0])) os.system("cat eventContent_%s* > %s/eventContent_%s_%s.summary" % (workflow[0], CWD, toCompare[0], workflow[0])) os.chdir("../") - print "\n" + print("\n") os.chdir(CWD) ### compare the plots using validate.C elif sys.argv[1] == "3": - print 'Not finished yet ...' + print('Not finished yet ...') raise SystemExit CMSSWREL = os.listdir("%s/%s" % (CWD, compare[0][0])) for workflow in compare[0][2]: diff --git a/scripts/CheckPR/LaunchOnCondor.py b/scripts/CheckPR/LaunchOnCondor.py index 915a21e..fa81bef 100644 --- a/scripts/CheckPR/LaunchOnCondor.py +++ b/scripts/CheckPR/LaunchOnCondor.py @@ -49,175 +49,175 @@ def natural_sort(l): alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ] return sorted(l, key = alphanum_key) - - def usage() : - print 'LaunchOnCondor [options]' + print('LaunchOnCondor [options]') # print ' -j json file with lumi info' # print ' -o output file' - print 'is an interface to submit jobs to LSF/Condor/Crab3 batch in a high-level way' + print('is an interface to submit jobs to LSF/Condor/Crab3 batch in a high-level way') def CreateTheConfigFile(argv): - global Jobs_Name - global Jobs_Index - global Jobs_Count - global Jobs_Seed - global Jobs_Skip - global Jobs_NEvent - global Jobs_Inputs - global Jobs_InitCmds - global Jobs_FinalCmds - global Path_Cfg - global CopyRights - Path_Cfg = [] - configFileList = [] - if(isinstance(argv[1], list)): - configFileList.extend(argv[1]) - else: - configFileList.extend([argv[1]]) - - step = 0 - for localConfigFile in configFileList: - currentPathCfg = Farm_Directories[1]+Jobs_Index+Jobs_Name+'_step_%i_cfg.py' % step - Path_Cfg.append(currentPathCfg) - config_file=open(localConfigFile,'r') - config_txt = '\n\n' + CopyRights + '\n\n' - config_txt += config_file.read() - config_file.close() - i = 2 - while i < len(argv)-1: - config_txt = config_txt.replace(argv[i],argv[i+1]) - i+=2 - - #Default Replacements - config_txt = config_txt.replace("XXX_I_XXX" ,"%04i"%Jobs_Count) - config_txt = config_txt.replace("XXX_PATH_XXX" ,os.getcwd()) - config_txt = config_txt.replace("XXX_OUTPUT_XXX" ,Jobs_Name) - config_txt = config_txt.replace("XXX_NAME_XXX" ,Jobs_Index+Jobs_Name) - config_txt = config_txt.replace("XXX_SEED_XXX" ,str(Jobs_Seed+Jobs_Count)) - config_txt = config_txt.replace("XXX_NEVENTS_XXX" ,str(Jobs_NEvent)) - config_txt = config_txt.replace("XXX_SKIP_XXX" ,str(Jobs_Skip)) - if Jobs_Count < len(Jobs_Inputs): - config_txt = config_txt.replace("XXX_INPUT_XXX" ,Jobs_Inputs[Jobs_Count]) - - config_file=open(currentPathCfg,'w') - config_file.write(config_txt) - config_file.close() - step+=1 - -def CreateTheShellFile(argv): - global Path_Shell - global Path_Log - global Path_Cfg - global CopyRights - global Jobs_RunHere - global Jobs_InitCmds - global Jobs_FinalCmds - global absoluteShellPath - if(subTool=='crab'):return - - Path_Log = Farm_Directories[2]+Jobs_Index+Jobs_Name - Path_Shell = Farm_Directories[1]+Jobs_Index+Jobs_Name+'.sh' - function_argument='' - hostname = os.getenv("HOSTNAME", "") + global Jobs_Name + global Jobs_Index + global Jobs_Count + global Jobs_Seed + global Jobs_Skip + global Jobs_NEvent + global Jobs_Inputs + global Jobs_InitCmds + global Jobs_FinalCmds + global Path_Cfg + global CopyRights + Path_Cfg = [] + configFileList = [] + if(isinstance(argv[1], list)): + configFileList.extend(argv[1]) + else: + configFileList.extend([argv[1]]) - for i in range(2,len(argv)): - function_argument+="%s" % argv[i] - if i != len(argv)-1: - function_argument+=', ' - - shell_file=open(Path_Shell,'w') - shell_file.write('#! /bin/sh\n') - shell_file.write(CopyRights + '\n') - shell_file.write('pwd\n') - if 'cis.gov.pl' in hostname: - shell_file.write('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') + step = 0 + for localConfigFile in configFileList: + currentPathCfg = Farm_Directories[1]+Jobs_Index+Jobs_Name+'_step_%i_cfg.py' % step + Path_Cfg.append(currentPathCfg) + config_file=open(localConfigFile,'r') + config_txt = '\n\n' + CopyRights + '\n\n' + config_txt += config_file.read() + config_file.close() + i = 2 + while i < len(argv)-1: + config_txt = config_txt.replace(argv[i],argv[i+1]) + i+=2 + + #Default Replacements + config_txt = config_txt.replace("XXX_I_XXX" ,"%04i"%Jobs_Count) + config_txt = config_txt.replace("XXX_PATH_XXX" ,os.getcwd()) + config_txt = config_txt.replace("XXX_OUTPUT_XXX" ,Jobs_Name) + config_txt = config_txt.replace("XXX_NAME_XXX" ,Jobs_Index+Jobs_Name) + config_txt = config_txt.replace("XXX_SEED_XXX" ,str(Jobs_Seed+Jobs_Count)) + config_txt = config_txt.replace("XXX_NEVENTS_XXX" ,str(Jobs_NEvent)) + config_txt = config_txt.replace("XXX_SKIP_XXX" ,str(Jobs_Skip)) + if Jobs_Count < len(Jobs_Inputs): + config_txt = config_txt.replace("XXX_INPUT_XXX" ,Jobs_Inputs[Jobs_Count]) + + config_file=open(currentPathCfg,'w') + config_file.write(config_txt) + config_file.close() + step+=1 + +def CreateTheShellFile(argv): + global Path_Shell + global Path_Log + global Path_Cfg + global CopyRights + global Jobs_RunHere + global Jobs_InitCmds + global Jobs_FinalCmds + global absoluteShellPath + if(subTool=='crab'):return + + Path_Log = Farm_Directories[2]+Jobs_Index+Jobs_Name + Path_Shell = Farm_Directories[1]+Jobs_Index+Jobs_Name+'.sh' + function_argument='' + hostname = os.getenv("HOSTNAME", "") + + for i in range(2,len(argv)): + function_argument+="%s" % argv[i] + if i != len(argv)-1: + function_argument+=', ' + + shell_file=open(Path_Shell,'w') + shell_file.write('#! /bin/sh\n') + shell_file.write(CopyRights + '\n') + shell_file.write('pwd\n') + if 'cis.gov.pl' in hostname: + shell_file.write('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - if 'purdue.edu' in hostname: - shell_file.write('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') - shell_file.write('source /grp/cms/tools/glite/setup.sh\n') - - shell_file.write('export SCRAM_ARCH='+os.getenv("SCRAM_ARCH","slc5_amd64_gcc462")+'\n') - shell_file.write('export BUILD_ARCH='+os.getenv("BUILD_ARCH","slc5_amd64_gcc462")+'\n') - - if 'purdue.edu' in hostname: - shell_file.write('export VO_CMS_SW_DIR=/apps/osg/cmssoft/cms\n') - elif 'cis.gov.pl' in hostname: - shell_file.write('export VO_CMS_SW_DIR=/cvmfs/cms.cern.ch\n') - else: - shell_file.write('export VO_CMS_SW_DIR='+os.getenv("VO_CMS_SW_DIR","/nfs/soft/cms")+'\n') + if 'purdue.edu' in hostname: + shell_file.write('source /cvmfs/cms.cern.ch/cmsset_default.sh\n') + shell_file.write('source /grp/cms/tools/glite/setup.sh\n') + + shell_file.write('export SCRAM_ARCH='+os.getenv("SCRAM_ARCH","slc5_amd64_gcc462")+'\n') + shell_file.write('export BUILD_ARCH='+os.getenv("BUILD_ARCH","slc5_amd64_gcc462")+'\n') + + if 'purdue.edu' in hostname: + shell_file.write('export VO_CMS_SW_DIR=/apps/osg/cmssoft/cms\n') + elif 'cis.gov.pl' in hostname: + shell_file.write('export VO_CMS_SW_DIR=/cvmfs/cms.cern.ch\n') + else: + shell_file.write('export VO_CMS_SW_DIR='+os.getenv("VO_CMS_SW_DIR","/nfs/soft/cms")+'\n') #shell_file.write('source /nfs/soft/cms/cmsset_default.sh\n') - shell_file.write('cd ' + os.getcwd() + '\n') - shell_file.write('eval `scramv1 runtime -sh`\n') - - for i in range(len(Jobs_InitCmds)): - shell_file.write(Jobs_InitCmds[i]+'\n') - - if argv[0]=='BASH': - if Jobs_RunHere==0: - shell_file.write('cd -\n') - shell_file.write(argv[1] + " %s\n" % function_argument) - elif argv[0]=='ROOT': - function_argument='('+function_argument+')' - if Jobs_RunHere==0: - shell_file.write('cd -\n') - shell_file.write('root -l -b << EOF\n') - shell_file.write(' TString makeshared(gSystem->GetMakeSharedLib());\n') - shell_file.write(' makeshared.ReplaceAll("-W ", "-Wno-deprecated-declarations -Wno-deprecated -Wno-unused-local-typedefs -Wno-attributes ");\n') - shell_file.write(' makeshared.ReplaceAll("-Woverloaded-virtual ", " ");\n') - shell_file.write(' makeshared.ReplaceAll("-Wshadow ", " -std=c++0x -D__USE_XOPEN2K8 ");\n') - shell_file.write(' cout << "Compilling with the following arguments: " << makeshared << endl;\n') - shell_file.write(' gSystem->SetMakeSharedLib(makeshared);\n') - shell_file.write(' gSystem->SetIncludePath( "-I$ROOFITSYS/include" );\n') - shell_file.write(' .x %s+' % argv[1] + function_argument + '\n') - shell_file.write(' .q\n') - shell_file.write('EOF\n\n') - elif argv[0]=='FWLITE': - function_argument='('+function_argument+')' - if Jobs_RunHere==0: - shell_file.write('cd -\n') - shell_file.write('root -l -b << EOF\n') - shell_file.write(' TString makeshared(gSystem->GetMakeSharedLib());\n') - shell_file.write(' makeshared.ReplaceAll("-W ", "-Wno-deprecated-declarations -Wno-deprecated -Wno-unused-local-typedefs -Wno-attributes ");\n') - shell_file.write(' makeshared.ReplaceAll("-Woverloaded-virtual ", " ");\n') - shell_file.write(' makeshared.ReplaceAll("-Wshadow ", " -std=c++0x -D__USE_XOPEN2K8 ");\n') - shell_file.write(' cout << "Compilling with the following arguments: " << makeshared << endl;\n') - shell_file.write(' gSystem->SetMakeSharedLib(makeshared);\n') - shell_file.write(' gSystem->SetIncludePath("-I$ROOFITSYS/include");\n') - shell_file.write(' gSystem->Load("libFWCoreFWLite");\n') - shell_file.write(' AutoLibraryLoader::enable();\n') - shell_file.write(' gSystem->Load("libDataFormatsFWLite.so");\n') - shell_file.write(' gSystem->Load("libAnalysisDataFormatsSUSYBSMObjects.so");\n') - shell_file.write(' gSystem->Load("libDataFormatsVertexReco.so");\n') - shell_file.write(' gSystem->Load("libDataFormatsHepMCCandidate.so");\n') - shell_file.write(' gSystem->Load("libPhysicsToolsUtilities.so");\n') - shell_file.write(' gSystem->Load("libdcap.so");\n') - shell_file.write(' .x %s+' % argv[1] + function_argument + '\n') - shell_file.write(' .q\n') - shell_file.write('EOF\n\n') - elif argv[0]=='CMSSW' or argv[0]=='LIP': - CreateTheConfigFile(argv); - if Jobs_RunHere==0: - shell_file.write('cd -\n') - for config in Path_Cfg: - shell_file.write('cmsRun ' + os.getcwd() + '/'+config + '\n') - else: - print #Program to use is not specified... Guess it is bash command - shell_file.write('#Program to use is not specified... Guess it is bash command\n') - shell_file.write(argv[1] + " %s\n" % function_argument) - - for i in range(len(Jobs_FinalCmds)): - #shell_file.write('echo ' + Jobs_FinalCmds[i]+'\n') - shell_file.write(Jobs_FinalCmds[i]+'\n') - if Jobs_RunHere==0: - outDir = Farm_Directories[3] - if(not os.path.isabs(Path_Shell)): outDir = os.getcwd()+'/'+outDir; - shell_file.write('mv '+ Jobs_Name+'* '+outDir+'\n') - shell_file.close() - os.system("chmod 777 "+Path_Shell) + shell_file.write('cd ' + os.getcwd() + '\n') + shell_file.write('eval `scramv1 runtime -sh`\n') + for i in range(len(Jobs_InitCmds)): + shell_file.write(Jobs_InitCmds[i]+'\n') + + if argv[0]=='BASH': + if Jobs_RunHere==0: + shell_file.write('cd -\n') + shell_file.write(argv[1] + " %s\n" % function_argument) + elif argv[0]=='ROOT': + function_argument='('+function_argument+')' + if Jobs_RunHere==0: + shell_file.write('cd -\n') + shell_file.write('root -l -b << EOF\n') + shell_file.write(' TString makeshared(gSystem->GetMakeSharedLib());\n') + shell_file.write(' makeshared.ReplaceAll("-W ", "-Wno-deprecated-declarations -Wno-deprecated -Wno-unused-local-typedefs -Wno-attributes ");\n') + shell_file.write(' makeshared.ReplaceAll("-Woverloaded-virtual ", " ");\n') + shell_file.write(' makeshared.ReplaceAll("-Wshadow ", " -std=c++0x -D__USE_XOPEN2K8 ");\n') + shell_file.write(' cout << "Compilling with the following arguments: " << makeshared << endl;\n') + shell_file.write(' gSystem->SetMakeSharedLib(makeshared);\n') + shell_file.write(' gSystem->SetIncludePath( "-I$ROOFITSYS/include" );\n') + shell_file.write(' .x %s+' % argv[1] + function_argument + '\n') + shell_file.write(' .q\n') + shell_file.write('EOF\n\n') + elif argv[0]=='FWLITE': + function_argument='('+function_argument+')' + if Jobs_RunHere==0: + shell_file.write('cd -\n') + shell_file.write('root -l -b << EOF\n') + shell_file.write(' TString makeshared(gSystem->GetMakeSharedLib());\n') + shell_file.write(' makeshared.ReplaceAll("-W ", "-Wno-deprecated-declarations -Wno-deprecated -Wno-unused-local-typedefs -Wno-attributes ");\n') + shell_file.write(' makeshared.ReplaceAll("-Woverloaded-virtual ", " ");\n') + shell_file.write(' makeshared.ReplaceAll("-Wshadow ", " -std=c++0x -D__USE_XOPEN2K8 ");\n') + shell_file.write(' cout << "Compilling with the following arguments: " << makeshared << endl;\n') + shell_file.write(' gSystem->SetMakeSharedLib(makeshared);\n') + shell_file.write(' gSystem->SetIncludePath("-I$ROOFITSYS/include");\n') + shell_file.write(' gSystem->Load("libFWCoreFWLite");\n') + shell_file.write(' AutoLibraryLoader::enable();\n') + shell_file.write(' gSystem->Load("libDataFormatsFWLite.so");\n') + shell_file.write(' gSystem->Load("libAnalysisDataFormatsSUSYBSMObjects.so");\n') + shell_file.write(' gSystem->Load("libDataFormatsVertexReco.so");\n') + shell_file.write(' gSystem->Load("libDataFormatsHepMCCandidate.so");\n') + shell_file.write(' gSystem->Load("libPhysicsToolsUtilities.so");\n') + shell_file.write(' gSystem->Load("libdcap.so");\n') + shell_file.write(' .x %s+' % argv[1] + function_argument + '\n') + shell_file.write(' .q\n') + shell_file.write('EOF\n\n') + elif argv[0]=='CMSSW' or argv[0]=='LIP': + CreateTheConfigFile(argv) + if Jobs_RunHere==0: + shell_file.write('cd -\n') + for config in Path_Cfg: + shell_file.write('cmsRun ' + os.getcwd() + '/'+config + '\n') + else: + print("#Program to use is not specified... Guess it is bash command") + shell_file.write('#Program to use is not specified... Guess it is bash command\n') + shell_file.write(argv[1] + " %s\n" % function_argument) + + for i in range(len(Jobs_FinalCmds)): + #shell_file.write('echo ' + Jobs_FinalCmds[i]+'\n') + shell_file.write(Jobs_FinalCmds[i]+'\n') + if Jobs_RunHere==0: + outDir = Farm_Directories[3] + + if(not os.path.isabs(Path_Shell)): + outDir = os.getcwd()+'/'+outDir + shell_file.write('mv '+ Jobs_Name+'* '+outDir+'\n') + + shell_file.close() + os.system("chmod 777 "+Path_Shell) def CreateCrabConfig(crabWorkDir, crabConfigPath, exePath, cfgPath): global Jobs_CRABDataset @@ -284,186 +284,190 @@ def CreateCrabConfig(crabWorkDir, crabConfigPath, exePath, cfgPath): def CreateTheCmdFile(): - global subTool - global Path_Cmd - global CopyRights - Path_Cmd = Farm_Directories[1]+Jobs_Name+'.cmd' - cmd_file=open(Path_Cmd,'w') - if subTool=='condor': - cmd_file.write('Universe = vanilla\n') - cmd_file.write('Environment = CONDORJOBID=$(Process)\n') - cmd_file.write('notification = Error\n') - #site specific code - if (commands.getstatusoutput("hostname -f")[1].find("ucl.ac.be" )!=-1): cmd_file.write('requirements = (CMSFARM=?=True)&&(Memory > 200)\n') - elif(commands.getstatusoutput("uname -n" )[1].find("purdue.edu")!=-1): cmd_file.write('requirements = (request_memory > 200)\n') - else: cmd_file.write('requirements = (Memory > 200)\n') - cmd_file.write('should_transfer_files = YES\n') - cmd_file.write('when_to_transfer_output = ON_EXIT\n') - else: - cmd_file.write(CopyRights + '\n') - cmd_file.close() - -def AddJobToCmdFile(): - global subTool - global Path_Shell - global Path_Cmd - global Path_Out - global Path_Log - global absoluteShellPath - global Jobs_EmailReport - Path_Out = Farm_Directories[3] + Jobs_Index + Jobs_Name - cmd_file=open(Path_Cmd,'a') - if subTool=='bsub': - absoluteShellPath = Path_Shell; - if(not os.path.isabs(absoluteShellPath)): absoluteShellPath= os.getcwd() + "/"+absoluteShellPath - temp = "bsub -q " + Jobs_Queue + " -R " + Jobs_LSFRequirement + " -J " + Jobs_Name+Jobs_Index - if(not Jobs_EmailReport): - absoluteOutPath = Path_Out - if(not os.path.isabs(absoluteOutPath)): - absoluteOutPath = os.getcwd() + "/" + Path_Out - temp = temp + " -oo " + absoluteOutPath + ".cout" - temp = temp + " '" + absoluteShellPath + "'\n" - cmd_file.write(temp) - elif subTool=='qsub': - absoluteShellPath = Path_Shell; - if(not os.path.isabs(absoluteShellPath)): absoluteShellPath= os.getcwd() + "/" + absoluteShellPath - cmd_file.write("qsub " + absoluteShellPath + "\n") - elif subTool=='crab': - crabWorkDirPath = Farm_Directories[1] - crabConfigPath = Farm_Directories[1]+'crabConfig_'+Jobs_Index+Jobs_Name+'_cfg.py' - crabExePath = Farm_Directories[1]+'crabExe.sh' - crabParamPath = Farm_Directories[1]+'crabParam_'+Jobs_Index+Jobs_Name+'_cfg.py' - CreateCrabConfig(crabWorkDirPath, crabConfigPath, crabExePath, crabParamPath) - cmd_file.write("crab submit -c " + crabConfigPath + "\n") + global subTool + global Path_Cmd + global CopyRights + Path_Cmd = Farm_Directories[1]+Jobs_Name+'.cmd' + cmd_file=open(Path_Cmd,'w') + if subTool=='condor': + cmd_file.write('Universe = vanilla\n') + cmd_file.write('Environment = CONDORJOBID=$(Process)\n') + cmd_file.write('notification = Error\n') + #site specific code + if (commands.getstatusoutput("hostname -f")[1].find("ucl.ac.be" )!=-1): + cmd_file.write('requirements = (CMSFARM=?=True)&&(Memory > 200)\n') + elif(commands.getstatusoutput("uname -n" )[1].find("purdue.edu")!=-1): + cmd_file.write('requirements = (request_memory > 200)\n') else: - cmd_file.write('\n') - cmd_file.write('Executable = %s\n' % Path_Shell) - cmd_file.write('output = %s.out\n' % Path_Log) - cmd_file.write('error = %s.err\n' % Path_Log) - cmd_file.write('log = %s.log\n' % Path_Log) #/dev/null\n') - cmd_file.write('Queue 1\n') + cmd_file.write('requirements = (Memory > 200)\n') + cmd_file.write('should_transfer_files = YES\n') + cmd_file.write('when_to_transfer_output = ON_EXIT\n') + else: + cmd_file.write(CopyRights + '\n') cmd_file.close() +def AddJobToCmdFile(): + global subTool + global Path_Shell + global Path_Cmd + global Path_Out + global Path_Log + global absoluteShellPath + global Jobs_EmailReport + Path_Out = Farm_Directories[3] + Jobs_Index + Jobs_Name + cmd_file=open(Path_Cmd,'a') + if subTool=='bsub': + absoluteShellPath = Path_Shell; + if(not os.path.isabs(absoluteShellPath)): + absoluteShellPath= os.getcwd() + "/"+absoluteShellPath + temp = "bsub -q " + Jobs_Queue + " -R " + Jobs_LSFRequirement + " -J " + Jobs_Name+Jobs_Index + if(not Jobs_EmailReport): + absoluteOutPath = Path_Out + if(not os.path.isabs(absoluteOutPath)): + absoluteOutPath = os.getcwd() + "/" + Path_Out + temp = temp + " -oo " + absoluteOutPath + ".cout" + temp = temp + " '" + absoluteShellPath + "'\n" + cmd_file.write(temp) + elif subTool=='qsub': + absoluteShellPath = Path_Shell; + if(not os.path.isabs(absoluteShellPath)): absoluteShellPath= os.getcwd() + "/" + absoluteShellPath + cmd_file.write("qsub " + absoluteShellPath + "\n") + elif subTool=='crab': + crabWorkDirPath = Farm_Directories[1] + crabConfigPath = Farm_Directories[1]+'crabConfig_'+Jobs_Index+Jobs_Name+'_cfg.py' + crabExePath = Farm_Directories[1]+'crabExe.sh' + crabParamPath = Farm_Directories[1]+'crabParam_'+Jobs_Index+Jobs_Name+'_cfg.py' + CreateCrabConfig(crabWorkDirPath, crabConfigPath, crabExePath, crabParamPath) + cmd_file.write("crab submit -c " + crabConfigPath + "\n") + else: + cmd_file.write('\n') + cmd_file.write('Executable = %s\n' % Path_Shell) + cmd_file.write('output = %s.out\n' % Path_Log) + cmd_file.write('error = %s.err\n' % Path_Log) + cmd_file.write('log = %s.log\n' % Path_Log) #/dev/null\n') + cmd_file.write('Queue 1\n') + cmd_file.close() + def CreateDirectoryStructure(FarmDirectory): - global Jobs_Name - global Farm_Directories - Farm_Directories = [FarmDirectory+'/', FarmDirectory+'/inputs/', FarmDirectory+'/logs/', FarmDirectory+'/outputs/'] - for i in range(0,len(Farm_Directories)): - if os.path.isdir(Farm_Directories[i]) == False: - os.system('mkdir -p ' + Farm_Directories[i]) + global Jobs_Name + global Farm_Directories + Farm_Directories = [FarmDirectory+'/', FarmDirectory+'/inputs/', FarmDirectory+'/logs/', FarmDirectory+'/outputs/'] + for i in range(0,len(Farm_Directories)): + if os.path.isdir(Farm_Directories[i]) == False: + os.system('mkdir -p ' + Farm_Directories[i]) def SendCluster_LoadInputFiles(path, NJobs): - global Jobs_Inputs - input_file = open(path,'r') - input_lines = input_file.readlines() - input_file.close() - #input_lines.sort() + global Jobs_Inputs + input_file = open(path,'r') + input_lines = input_file.readlines() + input_file.close() + #input_lines.sort() - BlockSize = (len(input_lines)/NJobs) - LineIndex = 0 - JobIndex = 0 - BlockIndex = 0 - Jobs_Inputs = [""] - while LineIndex < len(input_lines): - Jobs_Inputs[JobIndex] += input_lines[LineIndex] - LineIndex +=1 - BlockIndex+=1 - if BlockIndex>BlockSize: - BlockIndex = 0 - JobIndex += 1 - Jobs_Inputs.append("") - return JobIndex+1 - + BlockSize = (len(input_lines)/NJobs) + LineIndex = 0 + JobIndex = 0 + BlockIndex = 0 + Jobs_Inputs = [""] + while LineIndex < len(input_lines): + Jobs_Inputs[JobIndex] += input_lines[LineIndex] + LineIndex +=1 + BlockIndex+=1 + if BlockIndex>BlockSize: + BlockIndex = 0 + JobIndex += 1 + Jobs_Inputs.append("") + return JobIndex+1 def SendCluster_Create(FarmDirectory, JobName): - global subTool - global Jobs_Name - global Jobs_Count - global Farm_Directories - - #determine what is the submission system available, or use condor - if(subTool==''): - if( not commands.getstatusoutput("which bjobs")[1].startswith("which:")): subTool = 'bsub' - elif( not commands.getstatusoutput("which qsub")[1].startswith("which:")): subTool = 'qsub' - else: subTool = 'condor' - if(Jobs_Queue.find('crab')>=0): subTool = 'crab' + global subTool + global Jobs_Name + global Jobs_Count + global Farm_Directories + + #determine what is the submission system available, or use condor + if(subTool==''): + if( not commands.getstatusoutput("which bjobs")[1].startswith("which:")): + subTool = 'bsub' + elif( not commands.getstatusoutput("which qsub")[1].startswith("which:")): + subTool = 'qsub' + else: + subTool = 'condor' + if(Jobs_Queue.find('crab')>=0): + subTool = 'crab' - Jobs_Name = JobName - Jobs_Count = 0 + Jobs_Name = JobName + Jobs_Count = 0 - CreateDirectoryStructure(FarmDirectory) - CreateTheCmdFile() + CreateDirectoryStructure(FarmDirectory) + CreateTheCmdFile() def SendCluster_Push(Argv): - global Farm_Directories - global Jobs_Count - global Jobs_Index - global Path_Shell - global Path_Log - - Jobs_Index = "%04i_" % Jobs_Count - if Jobs_Count==0 and (Argv[0]=="ROOT" or Argv[0]=="FWLITE"): - #First Need to Compile the macro --> Create a temporary shell path with no arguments - print "Compiling the Macro..." - CreateTheShellFile([Argv[0],Argv[1]]) - os.system('sh '+Path_Shell) - os.system('rm '+Path_Shell) - print "Getting the jobs..." - print Argv - CreateTheShellFile(Argv) - AddJobToCmdFile() - Jobs_Count = Jobs_Count+1 + global Farm_Directories + global Jobs_Count + global Jobs_Index + global Path_Shell + global Path_Log + + Jobs_Index = "%04i_" % Jobs_Count + if Jobs_Count==0 and (Argv[0]=="ROOT" or Argv[0]=="FWLITE"): + #First Need to Compile the macro --> Create a temporary shell path with no arguments + print("Compiling the Macro...") + CreateTheShellFile([Argv[0],Argv[1]]) + os.system('sh '+Path_Shell) + os.system('rm '+Path_Shell) + print("Getting the jobs...") + print(Argv) + CreateTheShellFile(Argv) + AddJobToCmdFile() + Jobs_Count = Jobs_Count+1 def SendCluster_Submit(): - global subTool - global CopyRights - global Jobs_Count - global Path_Cmd + global subTool + global CopyRights + global Jobs_Count + global Path_Cmd - if subTool=='bsub' or subTool=='qsub': os.system("sh " + Path_Cmd) - elif subTool=='crab': os.system("sh " + Path_Cmd) - else: os.system("condor_submit " + Path_Cmd) + if subTool=='bsub' or subTool=='qsub': os.system("sh " + Path_Cmd) + elif subTool=='crab': os.system("sh " + Path_Cmd) + else: os.system("condor_submit " + Path_Cmd) - print '\n'+CopyRights - print '%i Job(s) has/have been submitted on the Computing Cluster' % Jobs_Count + print('\n'+CopyRights) + print('%i Job(s) has/have been submitted on the Computing Cluster' % Jobs_Count) def SendSingleJob(FarmDirectory, JobName, Argv): - SendCluster_Create(FarmDirectory, JobName, Argv) - SendCluster_Push(FarmDirectory, JobName, Argv) - SendCluster_Submit(FarmDirectory, JobName,Argv) + SendCluster_Create(FarmDirectory, JobName, Argv) + SendCluster_Push(FarmDirectory, JobName, Argv) + SendCluster_Submit(FarmDirectory, JobName,Argv) def SendCMSJobs(FarmDirectory, JobName, ConfigFile, InputFiles, NJobs, Argv): - SendCluster_Create(FarmDirectory, JobName) - NJobs = SendCluster_LoadInputFiles(InputFiles, NJobs) - for i in range(NJobs): - SendCluster_Push (["CMSSW", ConfigFile] + Argv) - SendCluster_Submit() - - + SendCluster_Create(FarmDirectory, JobName) + NJobs = SendCluster_LoadInputFiles(InputFiles, NJobs) + for i in range(NJobs): + SendCluster_Push (["CMSSW", ConfigFile] + Argv) + SendCluster_Submit() def GetListOfFiles(Prefix, InputPattern, Suffix): - List = [] - - if(InputPattern.find('/store/cmst3')==0) : - index = InputPattern.rfind('/') - Listtmp = commands.getstatusoutput('cmsLs ' + InputPattern[0:index] + ' | awk \'{print $5}\'')[1].split('\n') - pattern = InputPattern[index+1:len(InputPattern)] - for file in Listtmp: + List = [] + + if(InputPattern.find('/store/cmst3')==0) : + index = InputPattern.rfind('/') + Listtmp = commands.getstatusoutput('cmsLs ' + InputPattern[0:index] + ' | awk \'{print $5}\'')[1].split('\n') + pattern = InputPattern[index+1:len(InputPattern)] + for file in Listtmp: if fnmatch.fnmatch(file, pattern): List.append(InputPattern[0:index]+'/'+file) - elif(InputPattern.find('/castor/')==0): - index = InputPattern.rfind('/') - Listtmp = commands.getstatusoutput('rfdir ' + InputPattern[0:index] + ' | awk \'{print $9}\'')[1].split('\n') - pattern = InputPattern[index+1:len(InputPattern)] - for file in Listtmp: + elif(InputPattern.find('/castor/')==0): + index = InputPattern.rfind('/') + Listtmp = commands.getstatusoutput('rfdir ' + InputPattern[0:index] + ' | awk \'{print $9}\'')[1].split('\n') + pattern = InputPattern[index+1:len(InputPattern)] + for file in Listtmp: if fnmatch.fnmatch(file, pattern): List.append(InputPattern[0:index]+'/'+file) - else : - List = glob.glob(InputPattern) - - List = sorted(List) - for i in range(len(List)): - List[i] = Prefix + List[i] + Suffix - return natural_sort(List) + else : + List = glob.glob(InputPattern) + List = sorted(List) + for i in range(len(List)): + List[i] = Prefix + List[i] + Suffix + return natural_sort(List) def ListToString(InputList): outString = "" @@ -484,55 +488,44 @@ def FileToList(path): input_lines.sort() return natural_sort(input_lines) - - - - - def SendCMSMergeJob(FarmDirectory, JobName, InputFiles, OutputFile, KeepStatement): - SendCluster_Create(FarmDirectory, JobName) - Temp_Cfg = Farm_Directories[1]+Jobs_Index+Jobs_Name+'_TEMP_cfg.py' - - if len(InputFiles)==0: - print 'Empty InputFile List for Job named "%s", Job will not be submitted' % JobName - return - - InputFilesString = '' - InputFiles = natural_sort(InputFiles) - for i in range(len(InputFiles)): - InputFilesString += "process.source.fileNames.extend([" + InputFiles[i].replace(',',' ') + '])\n' - - - cfg_file=open(Temp_Cfg,'w') - cfg_file.write('import FWCore.ParameterSet.Config as cms\n') - cfg_file.write('process = cms.Process("Merge")\n') - cfg_file.write('\n') - cfg_file.write('process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )\n') - cfg_file.write('process.load("FWCore.MessageService.MessageLogger_cfi")\n') - cfg_file.write('\n') - cfg_file.write('process.MessageLogger.cerr.FwkReport.reportEvery = 50000\n') - cfg_file.write('process.source = cms.Source("PoolSource",\n') - cfg_file.write(' skipBadFiles = cms.untracked.bool(True),\n') - cfg_file.write(' duplicateCheckMode = cms.untracked.string("noDuplicateCheck"),\n') - cfg_file.write(' fileNames = cms.untracked.vstring(\n') - cfg_file.write(' )\n') - cfg_file.write(')\n') - cfg_file.write('\n') -# cfg_file.write('process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(10) )\n') - cfg_file.write('%s\n' % InputFilesString) - cfg_file.write('process.OUT = cms.OutputModule("PoolOutputModule",\n') - cfg_file.write(' outputCommands = cms.untracked.vstring(%s),\n' % KeepStatement) - cfg_file.write(' eventAutoFlushCompressedSize=cms.untracked.int32(15*1024*1024),\n') - cfg_file.write(' fileName = cms.untracked.string(%s)\n' % OutputFile) - cfg_file.write(')\n') - cfg_file.write('\n') - cfg_file.write('process.endPath = cms.EndPath(process.OUT)\n') - cfg_file.close() - SendCluster_Push (["CMSSW", Temp_Cfg]) - SendCluster_Submit() - os.system('rm '+ Temp_Cfg) - - - - - + SendCluster_Create(FarmDirectory, JobName) + Temp_Cfg = Farm_Directories[1]+Jobs_Index+Jobs_Name+'_TEMP_cfg.py' + + if len(InputFiles)==0: + print('Empty InputFile List for Job named "%s", Job will not be submitted' % JobName) + return + + InputFilesString = '' + InputFiles = natural_sort(InputFiles) + for i in range(len(InputFiles)): + InputFilesString += "process.source.fileNames.extend([" + InputFiles[i].replace(',',' ') + '])\n' + + cfg_file=open(Temp_Cfg,'w') + cfg_file.write('import FWCore.ParameterSet.Config as cms\n') + cfg_file.write('process = cms.Process("Merge")\n') + cfg_file.write('\n') + cfg_file.write('process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )\n') + cfg_file.write('process.load("FWCore.MessageService.MessageLogger_cfi")\n') + cfg_file.write('\n') + cfg_file.write('process.MessageLogger.cerr.FwkReport.reportEvery = 50000\n') + cfg_file.write('process.source = cms.Source("PoolSource",\n') + cfg_file.write(' skipBadFiles = cms.untracked.bool(True),\n') + cfg_file.write(' duplicateCheckMode = cms.untracked.string("noDuplicateCheck"),\n') + cfg_file.write(' fileNames = cms.untracked.vstring(\n') + cfg_file.write(' )\n') + cfg_file.write(')\n') + cfg_file.write('\n') + # cfg_file.write('process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(10) )\n') + cfg_file.write('%s\n' % InputFilesString) + cfg_file.write('process.OUT = cms.OutputModule("PoolOutputModule",\n') + cfg_file.write(' outputCommands = cms.untracked.vstring(%s),\n' % KeepStatement) + cfg_file.write(' eventAutoFlushCompressedSize=cms.untracked.int32(15*1024*1024),\n') + cfg_file.write(' fileName = cms.untracked.string(%s)\n' % OutputFile) + cfg_file.write(')\n') + cfg_file.write('\n') + cfg_file.write('process.endPath = cms.EndPath(process.OUT)\n') + cfg_file.close() + SendCluster_Push (["CMSSW", Temp_Cfg]) + SendCluster_Submit() + os.system('rm '+ Temp_Cfg) diff --git a/scripts/DAS_wrapInfo.py b/scripts/DAS_wrapInfo.py index 18b94ba..b11c5b6 100644 --- a/scripts/DAS_wrapInfo.py +++ b/scripts/DAS_wrapInfo.py @@ -9,7 +9,6 @@ import bisect import random import signal -import cPickle import difflib import argparse import functools @@ -28,7 +27,7 @@ def find_key(collection, key): for item in collection: if key in item: return item[key] - print collection + print(collection) raise KeyError(key) ################################################################################ @@ -42,7 +41,7 @@ def das_client(query, check_key = None): """ error = True - for i in xrange(5): # maximum of 5 tries + for i in range(5): # maximum of 5 tries das_data = cmssw_das_client.get_data(query, limit = 0) if das_data["status"] == "ok": @@ -62,9 +61,8 @@ def das_client(query, check_key = None): break if das_data["status"] == "error": - print_msg("DAS query '{}' failed 5 times. " - "The last time for the the following reason:".format(query)) - print das_data["reason"] + print_msg("DAS query '{}' failed 5 times. \nThe last time for the the following reason:".format(query)) + print(das_data["reason"]) sys.exit(1) return das_data["data"] @@ -139,10 +137,9 @@ def print_msg(text, line_break = True, log_file = None): msg = " >>> " + str(text) if line_break: - print msg + print(msg) else: - print msg, - sys.stdout.flush() + print(msg,sys.stdout.flush()) if log_file: with open(log_file, "a") as f: f.write(msg+"\n") return msg @@ -163,11 +160,11 @@ def main(): print_msg("\t"+d) print_msg("This may take a while...") - result = pool.map_async(get_size_per_dataset,datasets).get(sys.maxint) + result = pool.map_async(get_size_per_dataset,datasets).get(sys.maxsize) for count, elem in enumerate(result): - print "==>",datasets[count],float(elem)/(1000*1000*1000),"GB" + print("==>",datasets[count],float(elem)/(1000*1000*1000),"GB") - print "total=",float(sum(result))/(1000*1000*1000),"GB" + print("total=",float(sum(result))/(1000*1000*1000),"GB") ################################################################################ diff --git a/scripts/GetAutos/getAutoAlCa.py b/scripts/GetAutos/getAutoAlCa.py index 7f62380..b449518 100755 --- a/scripts/GetAutos/getAutoAlCa.py +++ b/scripts/GetAutos/getAutoAlCa.py @@ -13,8 +13,8 @@ ##################################################################### def getCMSSWRelease( ): CMSSW_VERSION='CMSSW_VERSION' - if not os.environ.has_key(CMSSW_VERSION): - print "\n CMSSW not properly set. Exiting" + if not CMSSW_VERSION in os.environ: + print("\n CMSSW not properly set. Exiting") sys.exit(1) release = os.getenv(CMSSW_VERSION) return release @@ -35,7 +35,7 @@ def getCMSSWRelease( ): (options, arguments) = parser.parse_args() theRelease = getCMSSWRelease() - print "theRelease:",theRelease + print("theRelease:",theRelease) if(theRelease): from Configuration.AlCa.autoAlca import AlCaRecoMatrix @@ -46,9 +46,9 @@ def getCMSSWRelease( ): listOfKeys.append(key) if(options.key!="def"): - print options.key,":",AlCaRecoMatrix[options.key] + print(options.key,":",AlCaRecoMatrix[options.key]) else: - print listOfKeys + print(listOfKeys) #pp = pprint.PrettyPrinter(depth=6) #pp.pprint(autoCond) diff --git a/scripts/GetAutos/getAutoCond.py b/scripts/GetAutos/getAutoCond.py index 12e00e8..2673070 100755 --- a/scripts/GetAutos/getAutoCond.py +++ b/scripts/GetAutos/getAutoCond.py @@ -13,8 +13,8 @@ ##################################################################### def getCMSSWRelease( ): CMSSW_VERSION='CMSSW_VERSION' - if not os.environ.has_key(CMSSW_VERSION): - print "\n CMSSW not properly set. Exiting" + if not CMSSW_VERSION in os.environ: + print("\n CMSSW not properly set. Exiting") sys.exit(1) release = os.getenv(CMSSW_VERSION) return release @@ -35,7 +35,7 @@ def getCMSSWRelease( ): (options, arguments) = parser.parse_args() theRelease = getCMSSWRelease() - print "theRelease:",theRelease + print("theRelease:",theRelease) if(theRelease): from Configuration.AlCa.autoCond import autoCond @@ -46,9 +46,9 @@ def getCMSSWRelease( ): listOfKeys.append(key) if(options.key!="def"): - print options.key,":",autoCond[options.key] + print(options.key,":",autoCond[options.key]) else: - #print listOfKeys + #print(listOfKeys) pp = pprint.PrettyPrinter(depth=6) pp.pprint(autoCond) diff --git a/scripts/SplitSqlite.py b/scripts/SplitSqlite.py index e4dd9af..70fce81 100755 --- a/scripts/SplitSqlite.py +++ b/scripts/SplitSqlite.py @@ -15,8 +15,8 @@ for line in subprocess.Popen("conddb --noLimit --db dump_one_shot_v1.5_447_232134_256483.db list EcalLaserAPDPNRatios_20151007_232134_256483",shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.readlines(): if "EcalLaserAPDPNRatios" in line: IOVs.append((line.split()[2].strip(')')).strip('(')) -print IOVs -print "There are %s IOVs!"%len(IOVs) +print(IOVs) +print("There are %s IOVs!"%len(IOVs)) #Prepare the conddb_import commands template: #CommandTemplate="conddb_import -f sqlite:SiStripNoise_GR10_v3_offline.db -c sqlite:SiStripNoise_GR10_v3_offline_%s_%s.db -i SiStripNoise_GR10_v4_offline -t SiStripNoise_GR10_v4_offline -b %s -e %s" @@ -27,20 +27,20 @@ RelevantIOVs.append((RelevantIOVs[-1][2],IOVs[-1],IOVs[-1])) -print RelevantIOVs +print(RelevantIOVs) for i,splitIOVs in enumerate(RelevantIOVs): begin=splitIOVs[0] end=splitIOVs[1] upperLimit=splitIOVs[1] - print i,begin,end,upperLimit + print(i,begin,end,upperLimit) command="conddb_import -f sqlite:dump_one_shot_v1.5_447_232134_256483.db -c sqlite:EcalLaserAPDPNRatios_"+begin+"_"+end+".db -i EcalLaserAPDPNRatios_20151007_232134_256483 -t EcalLaserAPDPNRatios -b "+begin\ +" -e "+end #+" -e "+upperLimit - print command + print(command) #Now if we want to execute it inside Python uncomment the following two lines: STDOUT=subprocess.Popen(command,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT).stdout.read() - print STDOUT + print(STDOUT) #for counter in range(0,len(IOVs),5): # if counter+10): listOfRecords.append(tag_list[0]) listOfRefTags.append(tag_list[2]) listOfTarTags.append(tag_list[3]) listOfLabels.append(tag_list[1]) - #print listOfRefTags - #print listOfTarTags - #print listOfRecords + #print(listOfRefTags) + #print(listOfTarTags) + #print(listOfRecords) - print "The two GTs differ by",len(listOfRefTags),"tags" + print("The two GTs differ by",len(listOfRefTags),"tags") fout=open("diff_"+opts.refGT+"_vs_"+opts.tarGT+"_atRun_"+opts.testRunNumber+".txt",'w+b') fout2=open("diff_"+opts.refGT+"_vs_"+opts.tarGT+"_atRun_"+opts.testRunNumber+".twiki",'w+b') @@ -101,18 +100,18 @@ def main(): else: e_run = int(filteredIOVs[1].replace(')','')) if (b_run < int(opts.testRunNumber) and e_run > int(opts.testRunNumber)): - print "===================================================================" + print("===================================================================") fout.write("=================================================================== \n") count = count+1 - #print rawIOVs[0] + #print(rawIOVs[0]) listOfTags=[] for element in rawIOVs[0].split(" "): if ("pro::" in element): - #print element + #print(element) listOfTags.append(element.replace("pro::","")) - #print rawIOVs[1] - #print b_run,"-",e_run,filteredIOVs[2],filteredIOVs[3] - print "N. "+str(count)+" |Record:",listOfRecords[i]," |label ("+listOfLabels[i]+") differs in IOV:",b_run,"-",e_run + #print(rawIOVs[1]) + #print(b_run,"-",e_run,filteredIOVs[2],filteredIOVs[3]) + print("N. "+str(count)+" |Record:",listOfRecords[i]," |label ("+listOfLabels[i]+") differs in IOV:",b_run,"-",e_run) fout.write("N. "+str(count)+" |Record: "+listOfRecords[i]+" |label ("+listOfLabels[i]+") differs in IOV: "+str(b_run)+"-"+str(e_run)+" \n") #out3 = getCommandOutput("conddb search "+filteredIOVs[2]+" --limit 1") @@ -130,7 +129,7 @@ def main(): p2 = subprocess.Popen(["conddb","search",filteredIOVs[3]], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out4, err4) = p2.communicate() - #print out3,out4 + #print(out3,out4) rawInfoRef = out3.split('\n') rawInfoRefSplit = filter(None,rawInfoRef[5].split(" ")) rawInfoTar = out4.split('\n') @@ -145,14 +144,14 @@ def main(): myDict[listOfRecords[i]] = (rawInfoRefSplit[0],rawInfoRefSplit[3],rawInfoRefSplit[4],listOfTags[0],rawInfoTarSplit[0],rawInfoTarSplit[3],rawInfoTarSplit[4],listOfTags[1]) if( refStripDateTime <= datetime.date(2015,1,6) ): - print FAIL + rawInfoRefSplit[0]," ",rawInfoRefSplit[3],rawInfoRefSplit[4]," ",listOfTags[0] + ENDC + print(FAIL + rawInfoRefSplit[0]," ",rawInfoRefSplit[3],rawInfoRefSplit[4]," ",listOfTags[0] + ENDC) else: - print rawInfoRefSplit[0]," ",rawInfoRefSplit[3],rawInfoRefSplit[4]," ",listOfTags[0] + print(rawInfoRefSplit[0]," ",rawInfoRefSplit[3],rawInfoRefSplit[4]," ",listOfTags[0]) if( tarStripDateTime < datetime.date(2015,1,6) ): - print FAIL + rawInfoTarSplit[0]," ",rawInfoTarSplit[3],rawInfoTarSplit[4]," ",listOfTags[1] + ENDC + print(FAIL + rawInfoTarSplit[0]," ",rawInfoTarSplit[3],rawInfoTarSplit[4]," ",listOfTags[1] + ENDC) else: - print rawInfoTarSplit[0]," ",rawInfoTarSplit[3],rawInfoTarSplit[4]," ",listOfTags[1] + print(rawInfoTarSplit[0]," ",rawInfoTarSplit[3],rawInfoTarSplit[4]," ",listOfTags[1]) fout2.write("| *Record* | *"+opts.refGT+"* | *"+opts.tarGT+"* |\n") for key in myDict: diff --git a/scripts/createGTDescription.py b/scripts/createGTDescription.py index b427158..8616975 100755 --- a/scripts/createGTDescription.py +++ b/scripts/createGTDescription.py @@ -17,7 +17,7 @@ import copy import string, re import subprocess -import ConfigParser, json +import configparser, json import os.path from optparse import OptionParser @@ -26,7 +26,7 @@ ####################--- Classes ---############################ -class BetterConfigParser(ConfigParser.ConfigParser): +class BetterConfigParser(configparser.ConfigParser): ############################################## def optionxform(self, optionstr): @@ -52,7 +52,7 @@ def __updateDict( self, dictionary, section ): if "local"+section.title() in self.sections(): for option in self.options( "local"+section.title() ): result[option] = self.get( "local"+section.title(),option ) - except ConfigParser.NoSectionError, section: + except(ConfigParser.NoSectionError, section): msg = ("%s in configuration files. This section is mandatory." %(str(section).replace(":", "", 1))) #raise AllInOneError(msg) @@ -64,7 +64,7 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): for option in demandPars: try: result[option] = self.get( section, option ) - except ConfigParser.NoOptionError, globalSectionError: + except(ConfigParser.NoOptionError, globalSectionError): globalSection = str( globalSectionError ).split( "'" )[-2] splittedSectionName = section.split( ":" ) if len( splittedSectionName ) > 1: @@ -75,7 +75,7 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): if self.has_section( localSection ): try: result[option] = self.get( localSection, option ) - except ConfigParser.NoOptionError, option: + except(ConfigParser.NoOptionError, option): msg = ("%s. This option is mandatory." %(str(option).replace(":", "", 1).replace( "section", @@ -90,10 +90,10 @@ def getResultingSection( self, section, defaultDict = {}, demandPars = [] ): keylist = result.keys() resultSorted = collections.OrderedDict() #print keylist - print sorted(keylist) + print(sorted(keylist)) for key in sorted(keylist): resultSorted[key]=result[key] - #print resultSorted + #print(resultSorted) return resultSorted ##### method to parse the input file ################################ @@ -124,7 +124,7 @@ def getInput(default, prompt = ''): break text += "%s\n" % answer for line in text.splitlines(): - print "-",line + print("-",line) return text.strip() #answer = raw_input(prompt) @@ -145,14 +145,14 @@ def main(): ConfigFile = opts.inputconfig if (ConfigFile is not None and os.path.exists("./"+ConfigFile)) : - print "********************************************************" - print "* Parsing from input file:", ConfigFile," " - print "********************************************************" + print("********************************************************") + print("* Parsing from input file:", ConfigFile," ") + print("********************************************************") config = BetterConfigParser() config.read(ConfigFile) - #print config.sections() + #print(config.sections()) #config.getResultingSection(config.sections()[0]) dict = {'RunI_Ideal_scenario' : ('run1_design',"Run1 Ideal Simulation"), @@ -344,18 +344,18 @@ def main(): ######################### # Print output ######################### - print "Output will be found at:" - print " - GitHub_"+theRelease+"_"+thePR+".txt" - print " - Twiki_"+theRelease+"_"+thePR+".txt" + print("Output will be found at:") + print(" - GitHub_"+theRelease+"_"+thePR+".txt") + print(" - Twiki_"+theRelease+"_"+thePR+".txt") else: - print "\n" - print "ERROR in calling createDescription.py " - print " An input file has not been specified" - print " Please enter the command in the format: " - print " python createGTDescription.py -i GT_changes.ini" - print " =====> exiting..." - print "\n" + print("\n") + print("ERROR in calling createDescription.py ") + print(" An input file has not been specified") + print(" Please enter the command in the format: ") + print(" python createGTDescription.py -i GT_changes.ini") + print(" =====> exiting...") + print("\n") exit(1) if __name__ == "__main__": diff --git a/scripts/dumpBeamSpot.py b/scripts/dumpBeamSpot.py index 9ac0643..73f3357 100755 --- a/scripts/dumpBeamSpot.py +++ b/scripts/dumpBeamSpot.py @@ -23,9 +23,9 @@ options.parseArguments() -print "###################################################################" -print "# dumping: "+options.inputTag -print "###################################################################" +print("###################################################################") +print("# dumping: "+options.inputTag) +print("###################################################################") process.CondDB.connect = cms.string(options.inputDB) diff --git a/scripts/getIOVDiff.py b/scripts/getIOVDiff.py index 26376d8..80924c6 100755 --- a/scripts/getIOVDiff.py +++ b/scripts/getIOVDiff.py @@ -19,7 +19,7 @@ def getCommandOutput(command): data = child.read() err = child.close() if err: - print '%s failed w/ exit code %d' % (command, err) + print('%s failed w/ exit code %d' % (command, err)) return data ################# @@ -50,13 +50,13 @@ def main(): for line in lines[2:]: tags = line.split(" ") tag_list = filter(None, tags) # fastest - #print tag_list + #print(tag_list) if(len(tag_list)>0): listOfRefTags.append(tag_list[2]) listOfTarTags.append(tag_list[3]) - #print listOfRefTags - #print listOfTarTags + #print(listOfRefTags) + #print(listOfTarTags) for i in range(len(listOfRefTags)): out2 = getCommandOutput("conddb diff "+listOfRefTags[i]+" "+listOfTarTags[i]+" -s") @@ -72,9 +72,9 @@ def main(): else: e_run = int(filteredIOVs[1].replace(')','')) if (b_run < int(opts.testRunNumber) and e_run > int(opts.testRunNumber)): - print rawIOVs[0] - print rawIOVs[1] - print b_run,"-",e_run,filteredIOVs[2],filteredIOVs[3] + print(rawIOVs[0]) + print(rawIOVs[1]) + print(b_run,"-",e_run,filteredIOVs[2],filteredIOVs[3]) #out3 = getCommandOutput("conddb search "+filteredIOVs[2]+" --limit 1") #out4 = getCommandOutput("conddb search "+filteredIOVs[3]+" --limit 1") @@ -84,7 +84,7 @@ def main(): p2 = subprocess.Popen(["conddb","search",filteredIOVs[3]], stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out4, err4) = p2.communicate() - #print out3,out4 + #print(out3,out4) rawInfoRef = out3.split('\n') rawInfoRefSplit = filter(None,rawInfoRef[5].split(" ")) rawInfoTar = out4.split('\n') @@ -94,14 +94,14 @@ def main(): tarStripDateTime = datetime.datetime.strptime(rawInfoTarSplit[3].replace('-',''), '%Y%m%d').date() if( refStripDateTime <= datetime.date(2015,1,6) ): - print FAIL + rawInfoRefSplit[0]," ",rawInfoRefSplit[1],rawInfoRefSplit[3],rawInfoRefSplit[4] + ENDC + print(FAIL + rawInfoRefSplit[0]," ",rawInfoRefSplit[1],rawInfoRefSplit[3],rawInfoRefSplit[4] + ENDC) else: - print rawInfoRefSplit[0]," ",rawInfoRefSplit[1],rawInfoRefSplit[3],rawInfoRefSplit[4] + print(rawInfoRefSplit[0]," ",rawInfoRefSplit[1],rawInfoRefSplit[3],rawInfoRefSplit[4]) if( tarStripDateTime < datetime.date(2015,1,6) ): - print FAIL + rawInfoTarSplit[0]," ",rawInfoTarSplit[1],rawInfoTarSplit[3],rawInfoTarSplit[4] + ENDC + print(FAIL + rawInfoTarSplit[0]," ",rawInfoTarSplit[1],rawInfoTarSplit[3],rawInfoTarSplit[4] + ENDC) else: - print rawInfoTarSplit[0]," ",rawInfoTarSplit[1],rawInfoTarSplit[3],rawInfoTarSplit[4] + print(rawInfoTarSplit[0]," ",rawInfoTarSplit[1],rawInfoTarSplit[3],rawInfoTarSplit[4]) if __name__ == "__main__": diff --git a/scripts/getParentsTags.py b/scripts/getParentsTags.py index db2fd92..1e41a18 100755 --- a/scripts/getParentsTags.py +++ b/scripts/getParentsTags.py @@ -27,7 +27,6 @@ import subprocess import CondCore.Utilities.conddblib as conddb - ##################################################################### # we need this check to handle different versions of the CondDBFW ##################################################################### @@ -44,8 +43,8 @@ def isCMSSWBefore81X( theRelease ): ##################################################################### def getCMSSWRelease( ): CMSSW_VERSION='CMSSW_VERSION' - if not os.environ.has_key(CMSSW_VERSION): - print "\n CMSSW not properly set. Exiting" + if not CMSSW_VERSION in os.environ: + print("\n CMSSW not properly set. Exiting") sys.exit(1) release = os.getenv(CMSSW_VERSION) return release @@ -89,7 +88,7 @@ def get_parent_tags(db, theHash): #mapOfOccur[tag]['sinces'] = listOfIOVs #mapOfOccur[tag]['times'] = listOfTimes - #print tag,synchronization,listOfIOVs,listOfTimes + #print(tag,synchronization,listOfIOVs,listOfTimes) return listOfOccur @@ -109,7 +108,7 @@ def get_parent_tags(db, theHash): (options, arguments) = parser.parse_args() theRelease = getCMSSWRelease() - print "- Getting conddblib from release",theRelease + print("- Getting conddblib from release",theRelease) connectionString="frontier://FrontierProd/CMS_CONDITIONS" tags = get_parent_tags(connectionString,options.hash) @@ -117,15 +116,15 @@ def get_parent_tags(db, theHash): #pp = pprint.PrettyPrinter(indent=4) #pp.pprint(tags) - #print tags + #print(tags) #for tag in tags: - # print tag + # print(tag) - #print head + #print(head) t = PrettyTable(['hash', 'since','tag','synch','insertion time']) for element in tags: t.add_row([options.hash,element['since'],element['tag'],element['synchronization'],element['insertion_time']]) - print t + print(t) #eof diff --git a/scripts/multipleSubmissions.py b/scripts/multipleSubmissions.py index 5cbe2a2..7d1ee0c 100755 --- a/scripts/multipleSubmissions.py +++ b/scripts/multipleSubmissions.py @@ -9,7 +9,7 @@ runRanges.sort() for iIOV in range(len(runRanges)): - print iIOV, runRanges[iIOV] + print(iIOV, runRanges[iIOV]) Popen('cp SiStripNoise_GR10_v3_offline_1_100357.txt SiStripNoise_GR10_v3_offline_'+str(runRanges[iIOV])+'.txt',shell=True).wait() Popen('~/bin/upload.py SiStripNoise_GR10_v3_offline_'+str(runRanges[iIOV])+'.db',shell=True).wait() diff --git a/scripts/sqlite_inspect.py b/scripts/sqlite_inspect.py index b75cfda..711c374 100644 --- a/scripts/sqlite_inspect.py +++ b/scripts/sqlite_inspect.py @@ -37,7 +37,7 @@ (options, arguments) = parser.parse_args() -#print len(options) +#print(len(options)) #if len(options) < 2: # parser.print_help() # sys.exit("=======> Exiting: Not enough parameters") @@ -55,5 +55,5 @@ def unpackLumiId(since): my_dict = sqlite_con.tag(name=options.tag).iovs().as_dicts() for element in my_dict: run, lumi = unpackLumiId(element['since']) - print "packed",element['since']," ===> run:",run," ls:",lumi + print("packed",element['since']," ===> run:",run," ls:",lumi)