Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Python3 migration fix #35788

Merged
merged 1 commit into from
Oct 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -106,20 +106,20 @@ def plot(MillePedeUser, alignables, config):
for i in range(3):
# get first and last bin with content and chose the one which
# has a greater distance to the center
if (abs(numberOfBins / 2 - plot.histo[i].FindFirstBinAbove()) > abs(plot.histo[i].FindLastBinAbove() - numberOfBins / 2)):
if (abs(numberOfBins // 2 - plot.histo[i].FindFirstBinAbove()) > abs(plot.histo[i].FindLastBinAbove() - numberOfBins // 2)):
plot.maxBinShift[i] = abs(
numberOfBins / 2 - plot.histo[i].FindFirstBinAbove())
numberOfBins // 2 - plot.histo[i].FindFirstBinAbove())
# set the maxShift value
plot.maxShift[i] = plot.histo[i].GetBinCenter(
plot.histo[i].FindFirstBinAbove())
else:
plot.maxBinShift[i] = abs(
plot.histo[i].FindLastBinAbove() - numberOfBins / 2)
plot.histo[i].FindLastBinAbove() - numberOfBins // 2)
# set the maxShift value
plot.maxShift[i] = plot.histo[i].GetBinCenter(
plot.histo[i].FindLastBinAbove())
# skip empty histogram
if (abs(plot.maxBinShift[i]) == numberOfBins / 2 + 1):
if (abs(plot.maxBinShift[i]) == numberOfBins // 2 + 1):
plot.maxBinShift[i] = 0

# three types of ranges
Expand Down Expand Up @@ -179,10 +179,10 @@ def plot(MillePedeUser, alignables, config):
# count entries which are not shown anymore
for i in range(3):
# bin 1 to begin of histogram
for j in range(1, numberOfBins / 2 - plot.binShift[i]):
for j in range(1, numberOfBins // 2 - plot.binShift[i]):
plot.hiddenEntries[i] += plot.histo[i].GetBinContent(j)
# from the end of shown bins to the end of histogram
for j in range(numberOfBins / 2 + plot.binShift[i], plot.histo[i].GetNbinsX()):
for j in range(numberOfBins // 2 + plot.binShift[i], plot.histo[i].GetNbinsX()):
plot.hiddenEntries[i] += plot.histo[i].GetBinContent(j)

# apply new range
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def parse(path, config):
# save lines in list
try:
with gzip.open(path) as gzipFile:
dumpFile = gzipFile.readlines()
dumpFile = [ l.decode() for l in gzipFile.readlines() ]
except IOError:
logger.error("PedeDump: {0} does not exist".format(path))
return
Expand All @@ -32,41 +32,41 @@ def parse(path, config):
# Sum(Chi^2)/Sum(Ndf)
if ("Sum(Chi^2)/Sum(Ndf) =" in line):
number = []
number.append(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i])))
number.append(map(int, re.findall(r"[-+]?\d+", dumpFile[i + 1])))
number.append(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 2])))
number.append(list(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i]))))
number.append(list(map(int, re.findall(r"[-+]?\d+", dumpFile[i + 1]))))
number.append(list(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 2]))))
pedeDump.sumSteps = "{0} / ( {1} - {2} )".format(
number[0][0], number[1][0], number[1][1])
pedeDump.sumValue = number[2][0]

# Sum(W*Chi^2)/Sum(Ndf)/<W>
if ("Sum(W*Chi^2)/Sum(Ndf)/<W> =" in line):
number = []
number.append(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i])))
number.append(map(int, re.findall(r"[-+]?\d+", dumpFile[i + 1])))
number.append(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 2])))
number.append(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 3])))
number.append(list(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i]))))
number.append(list(map(int, re.findall(r"[-+]?\d+", dumpFile[i + 1]))))
number.append(list(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 2]))))
number.append(list(map(float, re.findall(
r"[-+]?\d*\.\d+", dumpFile[i + 3]))))
pedeDump.sumSteps = "{0} / ( {1} - {2} ) / {3}".format(
number[0][0], number[1][0], number[1][1], number[2][0])
pedeDump.sumWValue = number[3][0]

if ("with correction for down-weighting" in line):
number = map(float, re.findall(r"[-+]?\d*\.\d+", dumpFile[i]))
number = list(map(float, re.findall(r"[-+]?\d*\.\d+", dumpFile[i])))
pedeDump.correction = number[0]

# Peak dynamic memory allocation
if ("Peak dynamic memory allocation:" in line):
number = map(float, re.findall(r"[-+]?\d*\.\d+", dumpFile[i]))
number = list(map(float, re.findall(r"[-+]?\d*\.\d+", dumpFile[i])))
pedeDump.memory = number[0]

# total time
if ("Iteration-end" in line):
number = map(int, re.findall(r"\d+", dumpFile[i + 1]))
number = list(map(int, re.findall(r"\d+", dumpFile[i + 1])))
pedeDump.time = number[:3]

# warings
Expand All @@ -79,17 +79,17 @@ def parse(path, config):

# nrec number of records
if (" = number of records" in line):
number = map(int, re.findall("\d+", dumpFile[i]))
number = list(map(int, re.findall("\d+", dumpFile[i])))
pedeDump.nrec = number[0]

# ntgb total number of parameters
if (" = total number of parameters" in line):
number = map(int, re.findall("\d+", dumpFile[i]))
number = list(map(int, re.findall("\d+", dumpFile[i])))
pedeDump.ntgb = number[0]

# nvgb number of variable parameters
if (" = number of variable parameters" in line):
number = map(int, re.findall("\d+", dumpFile[i]))
number = list(map(int, re.findall("\d+", dumpFile[i])))
pedeDump.nvgb = number[0]

return pedeDump
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@

import logging
import os
import cPickle
try:
import cPickle as pickle
except:
import pickle

import ROOT
ROOT.PyConfig.IgnoreCommandLineOptions = True
Expand All @@ -24,7 +27,7 @@ def plot(config):

# retrieve the weights of the different datasets
with open(os.path.join(config.jobDataPath, ".weights.pkl"), "rb") as f:
weight_conf = cPickle.load(f)
weight_conf = pickle.load(f)

# loop over all millepedemonitor_X.root files
for filename in os.listdir("{0}".format(config.jobDataPath)):
Expand Down
6 changes: 3 additions & 3 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
# create a list of eos ls entries containing files on eos binary store
command = ["ls", "-l", os.path.join(lib.mssDir, "binaries")]
try:
eoslsoutput = subprocess.check_output(command, stderr=subprocess.STDOUT).split('\n')
eoslsoutput = subprocess.check_output(command, stderr=subprocess.STDOUT).decode().split('\n')
except subprocess.CalledProcessError:
eoslsoutput = ""

Expand Down Expand Up @@ -136,7 +136,7 @@
"RemoteSysCpu",
"JobStatus",
"RemoveReason"],
stderr = subprocess.STDOUT)
stderr = subprocess.STDOUT).decode()
condor_log = condor_log.split()

cputime = int(round(float(condor_log[0])))
Expand Down Expand Up @@ -242,7 +242,7 @@
for line in eoslsoutput:
if milleOut in line:
columns = line.split()
mOutSize = columns[4] # 5th column = size
mOutSize = int(columns[4]) # 5th column = size
if not (mOutSize>0):
emptyDatErr = 1

Expand Down
28 changes: 14 additions & 14 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_fire.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def forward_proxy(rundir):
print("Please create proxy via 'voms-proxy-init -voms cms -rfc'.")
sys.exit(1)

local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).strip()
local_proxy = subprocess.check_output(["voms-proxy-info", "--path"]).decode().strip()
shutil.copyfile(local_proxy, os.path.join(rundir,".user_proxy"))


Expand Down Expand Up @@ -107,7 +107,7 @@ def write_HTCondor_submit_file_pede(path, script, config, lib):
for directory in ("binaries", "monitors", "tree_files")
for item
in spco(cmd+
glob.glob(opj(lib.mssDir, directory, "*"))).splitlines()]
glob.glob(opj(lib.mssDir, directory, "*"))).decode().splitlines()]
disk_usage = sum(disk_usage)
disk_usage *= 1.1 # reserve 10% additional space

Expand Down Expand Up @@ -272,7 +272,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
resources = '-q'+resources+' -m g_cmscaf'
elif "htcondor" in resources:
fire_htcondor = True
schedinfo = subprocess.check_output(["myschedd","show"])
schedinfo = subprocess.check_output(["myschedd","show"]).decode()
if 'cafalca' in resources:
if not 'tzero' in schedinfo:
print("\nMPS fire: request to use CAF pool which has not been set up. Call `module load lxbatch/tzero` and try again")
Expand Down Expand Up @@ -310,10 +310,10 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
try:
result = subprocess.check_output(submission,
stderr=subprocess.STDOUT,
shell=True)
shell=True).decode()
except subprocess.CalledProcessError as e:
result = "" # -> check for successful job submission will fail
print(' '+result, end=' ')
print(result)
result = result.strip()

# check if job was submitted and updating jobdatabase
Expand Down Expand Up @@ -374,7 +374,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
try:
result = subprocess.check_output(submission,
stderr=subprocess.STDOUT,
shell=True)
shell=True).decode()
except subprocess.CalledProcessError as e:
result = "" # -> check for successful job submission will fail
print(' '+result, end=' ')
Expand Down Expand Up @@ -404,7 +404,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
resources = '-q cmscafalcamille'
elif "htcondor" in resources:
fire_htcondor = True
schedinfo = subprocess.check_output(["myschedd","show"])
schedinfo = subprocess.check_output(["myschedd","show"]).decode()
if 'bigmem' in resources:
if not 'share' in schedinfo:
print("\nMPS fire: CAF pool is set up, but request to use high-memory machines which live in the standard pool. Call `module load lxbatch/share` and try again")
Expand Down Expand Up @@ -453,7 +453,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):

# get the name of merge cfg file -> either the.py or alignment_merge.py
command = 'cat '+backupScriptPath+' | grep CONFIG_FILE | head -1 | awk -F"/" \'{print $NF}\''
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True).decode()
mergeCfg = mergeCfg.strip()

if fire_htcondor:
Expand Down Expand Up @@ -493,9 +493,9 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):

# get the name of merge cfg file
command = "cat "+scriptPath+" | grep '^\s*CONFIG_FILE' | awk -F'=' '{print $2}'"
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True).decode()
command = 'basename '+mergeCfg
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True).decode()
mergeCfg = mergeCfg.replace('\n','')

if fire_htcondor:
Expand All @@ -521,7 +521,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
submission = ["bsub", "-J", curJobName, resources, scriptPath]
for _ in range(5):
try:
result = subprocess.check_output(submission, stderr=subprocess.STDOUT)
result = subprocess.check_output(submission, stderr=subprocess.STDOUT).decode()
break
except subprocess.CalledProcessError as e:
result = e.output
Expand Down Expand Up @@ -577,9 +577,9 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):

# get the name of merge cfg file
command = "cat "+scriptPath+" | grep '^\s*CONFIG_FILE' | awk -F'=' '{print $2}'"
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True).decode()
command = 'basename '+mergeCfg
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True)
mergeCfg = subprocess.check_output(command, stderr=subprocess.STDOUT, shell=True).decode()
mergeCfg = mergeCfg.replace('\n','')

if fire_htcondor:
Expand All @@ -604,7 +604,7 @@ def write_HTCondor_submit_file_mille(path, script, lib, proxy_path=None):
submission = ["bsub", "-J", curJobName, resources, scriptPath]
for _ in range(5):
try:
result = subprocess.check_output(submission, stderr=subprocess.STDOUT)
result = subprocess.check_output(submission, stderr=subprocess.STDOUT).decode()
break
except subprocess.CalledProcessError as e:
result = e.output
Expand Down
7 changes: 4 additions & 3 deletions Alignment/MillePedeAlignmentAlgorithm/scripts/mps_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def fill_time_info(mps_index, status, cpu_time):
job_status = {}
condor_q = subprocess.check_output(["condor_q", "-af:j",
"JobStatus", "RemoteSysCpu"],
stderr = subprocess.STDOUT)
stderr = subprocess.STDOUT).decode()
for line in condor_q.splitlines():
job_id, status, cpu_time = line.split()
job_status[job_id] = {"status": htcondor_jobstatus[status],
Expand All @@ -90,7 +90,8 @@ def fill_time_info(mps_index, status, cpu_time):

################################################################################
# loop over remaining jobs to see whether they are done
for job_id, mps_index in submitted_jobs.items(): # IMPORTANT to copy here (no iterator!)
submitted_jobs_copy = { k:v for k,v in submitted_jobs.items() }
for job_id, mps_index in submitted_jobs_copy.items(): # IMPORTANT to copy here (no iterator!)
# check if current job is disabled. Print stuff.
disabled = "DISABLED" if "DISABLED" in lib.JOBSTATUS[mps_index] else ""
print(" DB job ", job_id, mps_index)
Expand All @@ -100,7 +101,7 @@ def fill_time_info(mps_index, status, cpu_time):
condor_h = subprocess.check_output(["condor_history", job_id, "-limit", "1",
"-userlog", userlog,
"-af:j", "JobStatus", "RemoteSysCpu"],
stderr = subprocess.STDOUT)
stderr = subprocess.STDOUT).decode()
if len(condor_h.strip()) > 0:
job_id, status, cpu_time = condor_h.split()
status = htcondor_jobstatus[status]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ def datasetSnippet( self, jsonPath = None, begin = None, end = None,
if self.__predefined:
snippet = ("process.load(\"Alignment.OfflineValidation.%s_cff\")\n"
"process.maxEvents = cms.untracked.PSet(\n"
" input = cms.untracked.int32(.oO[nEvents]Oo. / .oO[parallelJobs]Oo.)\n"
" input = cms.untracked.int32(int(.oO[nEvents]Oo. / .oO[parallelJobs]Oo.))\n"
")\n"
"process.source.skipEvents=cms.untracked.uint32(int(.oO[nIndex]Oo.*.oO[nEvents]Oo./.oO[parallelJobs]Oo.))"
%(self.__name))
Expand Down
2 changes: 1 addition & 1 deletion Alignment/OfflineValidation/scripts/validateAlignments.py
Original file line number Diff line number Diff line change
Expand Up @@ -646,7 +646,7 @@ def main(argv = None):
(options, args) = optParser.parse_args(argv)

if not options.dryRun:
schedinfo = subprocess.check_output(["myschedd","show"])
schedinfo = subprocess.check_output(["myschedd","show"]).decode()
if not 'tzero' in schedinfo:
print("\nAll-In-One Tool: you need to call `module load lxbatch/tzero` before trying to submit jobs. Please do so and try again")
exit(1)
Expand Down