Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update types #226

Merged
merged 6 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 31 additions & 28 deletions Collections/plugins/OSUGenericTrackProducer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ OSUGenericTrackProducer<T>::beginRun (const edm::Run &run, const edm::EventSetup
template<class T> void
OSUGenericTrackProducer<T>::produce (edm::Event &event, const edm::EventSetup &setup)
{

//caloGeometry_ = setup.getHandle(caloGeometryToken_);
//ecalStatus_ = setup.getHandle(ecalStatusToken_);

Expand Down Expand Up @@ -273,6 +274,7 @@ OSUGenericTrackProducer<T>::produce (edm::Event &event, const edm::EventSetup &s

#ifdef DISAPP_TRKS
T &track = pl_->back ();
//#endif //temp mcarrigan

// in the specific case of TYPE(tracks)==CandidateTracks (where DATA_FORMAT is xyz_CUSTOM)
// and running over CandidateTracks ntuples, then generalTracks and RecHits may be available
Expand All @@ -296,9 +298,10 @@ OSUGenericTrackProducer<T>::produce (edm::Event &event, const edm::EventSetup &s
track.set_caloNewEMDRp1 (caloE_0p1.eEM);
track.set_caloNewHadDRp1 (caloE_0p1.eHad);


// this could be removed; if CandidateTrackProdcuer sets these,
// then these values need not be recalculated -- and RecHits can all be dropped
/*if (EBRecHits.isValid () && EERecHits.isValid () && HBHERecHits.isValid ())
// then these values need not be recalculated -- and RecHits can all be dropped*/
if (EBRecHits.isValid () && EERecHits.isValid () && HBHERecHits.isValid ())
{
double eEM = 0;
double dR = 0.5;
Expand Down Expand Up @@ -328,28 +331,27 @@ OSUGenericTrackProducer<T>::produce (edm::Event &event, const edm::EventSetup &s
track.set_caloNewEMDRp5(eEM);
std::cout << "In OSUGenericTrackProducer... setting the calo energy: " << eEM << std::endl;
//track.set_caloNewHadDRp5(eHad);
}*/
}
#endif
#if DATA_FORMAT_IS_CUSTOM //&& !DATA_FORMAT_IS_2022
#if DATA_FORMAT_IS_CUSTOM || DATA_FORMAT_IS_2022

// this is called only for ntuples with generalTracks explicitly kept (really just signal),
// to re-calculate the track isolations calculated wrong when ntuples were produces (thus "old" vs not-old)
if (tracks.isValid ())
if (isolatedTracks.isValid ())
{
track.set_trackIsoDRp3 (getTrackIsolation (track, *tracks, false, false, 0.3));
track.set_trackIsoDRp5 (getTrackIsolation (track, *tracks, false, false, 0.5));
track.set_trackIsoNoPUDRp3 (getTrackIsolation (track, *tracks, true, false, 0.3));
track.set_trackIsoNoPUDRp5 (getTrackIsolation (track, *tracks, true, false, 0.5));
track.set_trackIsoNoFakesDRp3 (getTrackIsolation (track, *tracks, false, true, 0.3));
track.set_trackIsoNoFakesDRp5 (getTrackIsolation (track, *tracks, false, true, 0.5));
track.set_trackIsoNoPUNoFakesDRp3 (getTrackIsolation (track, *tracks, true, true, 0.3));
track.set_trackIsoNoPUNoFakesDRp5 (getTrackIsolation (track, *tracks, true, true, 0.5));

track.set_trackIsoOldNoPUDRp3 (getOldTrackIsolation (track, *tracks, true, 0.3));
track.set_trackIsoOldNoPUDRp5 (getOldTrackIsolation (track, *tracks, true, 0.5));
track.set_trackIsoDRp3 (getTrackIsolation (track, *isolatedTracks, false, false, 0.3));
track.set_trackIsoDRp5 (getTrackIsolation (track, *isolatedTracks, false, false, 0.5));
track.set_trackIsoNoPUDRp3 (getTrackIsolation (track, *isolatedTracks, true, false, 0.3));
track.set_trackIsoNoPUDRp5 (getTrackIsolation (track, *isolatedTracks, true, false, 0.5));
track.set_trackIsoNoFakesDRp3 (getTrackIsolation (track, *isolatedTracks, false, true, 0.3));
track.set_trackIsoNoFakesDRp5 (getTrackIsolation (track, *isolatedTracks, false, true, 0.5));
track.set_trackIsoNoPUNoFakesDRp3 (getTrackIsolation (track, *isolatedTracks, true, true, 0.3));
track.set_trackIsoNoPUNoFakesDRp5 (getTrackIsolation (track, *isolatedTracks, true, true, 0.5));

//track.set_trackIsoOldNoPUDRp3 (getOldTrackIsolation (track, *tracks, true, 0.3));
//track.set_trackIsoOldNoPUDRp5 (getOldTrackIsolation (track, *tracks, true, 0.5));
}
#endif // DATA_FORMAT_IS_CUSTOM

track.set_minDeltaRToElectrons(electrons, vertices, eleVIDVetoIdMap, eleVIDLooseIdMap, eleVIDMediumIdMap, eleVIDTightIdMap);
track.set_minDeltaRToMuons(muons, vertices);
track.set_minDeltaRToTaus(taus);
Expand All @@ -359,7 +361,7 @@ OSUGenericTrackProducer<T>::produce (edm::Event &event, const edm::EventSetup &s
#endif

#endif // DISAPP_TRKS
}
}

event.put (std::move (pl_), collection_.instance ());
pl_.reset ();
Expand Down Expand Up @@ -590,17 +592,16 @@ OSUGenericTrackProducer<T>::getChannelStatusMaps ()
}

template<class T> const double
OSUGenericTrackProducer<T>::getTrackIsolation (TYPE(tracks)& track, const vector<reco::Track> &tracks, const bool noPU, const bool noFakes, const double outerDeltaR, const double innerDeltaR) const
OSUGenericTrackProducer<T>::getTrackIsolation (TYPE(tracks)& track, const vector<TYPE(tracks)> &tracks, const bool noPU, const bool noFakes, const double outerDeltaR, const double innerDeltaR) const
{
double sumPt = 0.0;

for (const auto &t : tracks)
{

if (noFakes && (t.normalizedChi2() > 20.0 ||
t.hitPattern().pixelLayersWithMeasurement() < 2 ||
t.hitPattern().trackerLayersWithMeasurement() < 5 ||
fabs(t.d0() / t.d0Error()) > 5.0))
//if (noFakes && (t.normalizedChi2() > 20.0 ||
if (noFakes &&
(t.hitPattern().pixelLayersWithMeasurement() < 2 ||
t.hitPattern().trackerLayersWithMeasurement() < 5 ||
fabs(t.dxy() / t.dxyError()) > 5.0))
continue;
#if DATA_FORMAT_IS_2022
if (noPU && track.dz() > 3.0 * hypot(track.dzError(), t.dzError()))
Expand Down Expand Up @@ -652,7 +653,7 @@ OSUGenericTrackProducer<T>::getOldTrackIsolation (TYPE(tracks)& track, const vec

template<class T> const CaloEnergy
OSUGenericTrackProducer<T>::calculateCaloE (TYPE(tracks)& track, const EBRecHitCollection &EBRecHits, const EERecHitCollection &EERecHits, const HBHERecHitCollection &HBHERecHits, const double dR) const
{
{
double eEM = 0;
for (const auto &hit : EBRecHits) {
if (insideCone(track, hit.detid(), dR)) {
Expand All @@ -666,9 +667,11 @@ OSUGenericTrackProducer<T>::calculateCaloE (TYPE(tracks)& track, const EBRecHitC
}

double eHad = 0;
for (const auto &hit : HBHERecHits)
if (insideCone(track, hit.detid(), dR))
for (const auto &hit : HBHERecHits) {
if (insideCone(track, hit.detid(), dR)) {
eHad += hit.energy();
}
}

return {eEM, eHad};
}
Expand Down
2 changes: 1 addition & 1 deletion Collections/plugins/OSUGenericTrackProducer.h
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ template<class T>
map<DetId, vector<double> > EcalAllDeadChannelsValMap_;
map<DetId, vector<int> > EcalAllDeadChannelsBitMap_;

const double getTrackIsolation (TYPE(tracks)&, const vector<reco::Track> &, const bool, const bool, const double, const double = 1.0e-12) const;
const double getTrackIsolation (TYPE(tracks)&, const vector<TYPE(tracks)> &, const bool, const bool, const double, const double = 1.0e-12) const;
const double getOldTrackIsolation (TYPE(tracks)&, const vector<reco::Track> &, const bool, const double, const double = 1.0e-12) const;
const CaloEnergy calculateCaloE (TYPE(tracks)&, const EBRecHitCollection &, const EERecHitCollection &, const HBHERecHitCollection &, const double dR = 0.5) const;

Expand Down
8 changes: 5 additions & 3 deletions Configuration/python/CollectionProducer_cff.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,7 +261,7 @@ class CollectionProducer:
fiducialMaps = cms.PSet (
electrons = cms.VPSet (
cms.PSet (
histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/electronFiducialMap_mc.root"),
#histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/electronFiducialMap_mc.root"),
beforeVetoHistName = cms.string ("beforeVeto"), # must be eta on x-axis, phi on y-axis
afterVetoHistName = cms.string ("afterVeto"), # must be eta on x-axis, phi on y-axis
thresholdForVeto = cms.double (0.0), # in sigma
Expand All @@ -270,7 +270,7 @@ class CollectionProducer:
),
muons = cms.VPSet (
cms.PSet (
histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/muonFiducialMap_mc.root"),
#histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/muonFiducialMap_mc.root"),
beforeVetoHistName = cms.string ("beforeVeto"), # must be eta on x-axis, phi on y-axis
afterVetoHistName = cms.string ("afterVeto"), # must be eta on x-axis, phi on y-axis
thresholdForVeto = cms.double (0.0), # in sigma
Expand Down Expand Up @@ -334,9 +334,11 @@ class CollectionProducer:
elif "Run2018" in osusub.dataset:
collectionProducer.tracks.fiducialMaps.electrons[0].histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/electronFiducialMap_2018_data.root")
collectionProducer.tracks.fiducialMaps.muons[0].histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/muonFiducialMap_2018_data.root")
else:
elif "Run2015" in osusub.dataset:
collectionProducer.tracks.fiducialMaps.electrons[0].histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/electronFiducialMap_2015_data.root")
collectionProducer.tracks.fiducialMaps.muons[0].histFile = cms.FileInPath ("OSUT3Analysis/Configuration/data/muonFiducialMap_2015_data.root")
else:
print("No fiducial map hist file")
# determine which era this dataset is in
if "_201" in osusub.datasetLabel:
collectionProducer.tracks.fiducialMaps.electrons[0].era = cms.string (osusub.datasetLabel[osusub.datasetLabel.find('_201'):])
Expand Down
34 changes: 17 additions & 17 deletions Configuration/python/configurationOptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,8 +213,8 @@
'MET_2018D',
'MET_2022A',
'MET_2022B',
'MET_2022C',
'MET_2022D',
'JetMET_2022C',
'JetMET_2022D',
'MET_2022E',
'MET_2022F',
'MET_2022G',
Expand Down Expand Up @@ -1308,8 +1308,8 @@
'MET_2022' : [
'MET_2022A',
'MET_2022B',
'MET_2022C',
'MET_2022D',
'JetMET_2022C',
'JetMET_2022D',
'MET_2022E',
'MET_2022F',
'MET_2022G',
Expand Down Expand Up @@ -1519,8 +1519,8 @@
'MET_2018D' : 1625,
'MET_2022A' : 10,
'MET_2022B' : 10,
'MET_2022C' : 170,
'MET_2022D' : 35,
'JetMET_2022C' : 170,
'JetMET_2022D' : 35,
'MET_2022E' : 147,
'MET_2022F' : 516,
'MET_2022G' : 85,
Expand Down Expand Up @@ -1717,10 +1717,10 @@

'EGamma_2022A' : 6,
'EGamma_2022B' : 112,
'EGamma_2022C' : 264,
'EGamma_2022C' : 200,
'EGamma_2022D' : 33,
'EGamma_2022E' : 152,
'EGamma_2022F' : 1,
'EGamma_2022E' : 800,
'EGamma_2022F' : 1000,
'EGamma_2022G' : 77,

# set number of jobs to correpond to roughly 100k events/job
Expand Down Expand Up @@ -2879,8 +2879,8 @@
'MET_2018D' : -1,
'MET_2022A' : -1,
'MET_2022B' : -1,
'MET_2022C' : -1,
'MET_2022D' : -1,
'JetMET_2022C' : -1,
'JetMET_2022D' : -1,
'MET_2022E' : -1,
'MET_2022F' : -1,
'MET_2022G' : -1,
Expand Down Expand Up @@ -4257,8 +4257,8 @@
'MET_2018D' : "data",
'MET_2022A' : "data",
'MET_2022B' : "data",
'MET_2022C' : "data",
'MET_2022D' : "data",
'JetMET_2022C' : "data",
'JetMET_2022D' : "data",
'MET_2022E' : "data",
'MET_2022F' : "data",
'MET_2022G' : "data",
Expand Down Expand Up @@ -5684,8 +5684,8 @@
'MET_2018D' : 1,
'MET_2022A' : 1,
'MET_2022B' : 1,
'MET_2022C' : 1,
'MET_2022D' : 1,
'JetMET_2022C' : 1,
'JetMET_2022D' : 1,
'MET_2022E' : 1,
'MET_2022F' : 1,
'MET_2022G' : 1,
Expand Down Expand Up @@ -7123,8 +7123,8 @@
'MET_2018D' : "MET_2018D data",
'MET_2022A' : "MET_2022A data",
'MET_2022B' : "MET_2022B data",
'MET_2022C' : "MET_2022C data",
'MET_2022D' : "MET_2022D data",
'JetMET_2022C' : "JetMET_2022C data",
'JetMET_2022D' : "JetMET_2022D data",
'MET_2022E' : "MET_2022E data",
'MET_2022F' : "MET_2022F data",
'MET_2022G' : "MET_2022G data",
Expand Down
30 changes: 17 additions & 13 deletions Configuration/python/mergeUtilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import socket
import tempfile
import importlib.util
import json
from threading import Thread, Lock, Semaphore
from multiprocessing import cpu_count
from OSUT3Analysis.Configuration.configurationOptions import *
Expand Down Expand Up @@ -241,17 +242,19 @@ def GetSkimInputTags(File, xrootdDestination = ""):
if not parsing:
continue
splitLine = line.split ()
cppTypes.append (splitLine[0])
inputTags[splitLine[0]] = cms.InputTag (splitLine[1][1:-1].decode("utf-8"), splitLine[2][1:-1].decode("utf-8"), splitLine[3][1:-1].decode("utf-8"))

cppTypes.append (splitLine[0].decode("utf-8"))
#print("Event content: ", line)
inputTags[splitLine[0].decode("utf-8")] = cms.InputTag (splitLine[1][1:-1].decode("utf-8"), splitLine[2][1:-1].decode("utf-8"), splitLine[3][1:-1].decode("utf-8"))

collectionTypes = subprocess.check_output (["getCollectionType"] + cppTypes)
# Save only the collections for which there is a valid type, and only framework collections
# Future jobs on this skim will use the user's collectionMap collections, overwritten only for framework collections
for i in range (0, len (cppTypes)):
if cppTypes[i] not in inputTags:
if cppTypes[i] not in inputTags.keys():
continue
collectionType = collectionTypes.splitlines ()[i]
if collectionType == "INVALID_TYPE":
collectionType = collectionTypes.splitlines ()[i].decode('utf-8')
#if collectionType == "INVALID_TYPE": #mcarrigan 8/10/23
if "INVALID_TYPE" in str(collectionType):
inputTags.pop (cppTypes[i])
else:
thisTag = inputTags.pop (cppTypes[i])
Expand All @@ -260,21 +263,22 @@ def GetSkimInputTags(File, xrootdDestination = ""):

if xrootdDestination != "":
tmpDir = tempfile.mkdtemp()
outfile = os.path.join(tmpDir, 'SkimInputTags.pkl')
outfile = os.path.join(tmpDir, 'SkimInputTags.json')
fout = open (outfile, 'w')
dumpedString = pickle.dumps (inputTags).decode('latin-1')
dumpedString = json.dumps(inputTags).decode('latin-1') #mcarrigan 8/8/23
fout.write(dumpedString)
fout.close()
try:
subprocess.check_output(['xrdcp', '-f', outfile, xrootdDestination])
except subprocess.CalledProcessError as e:
print('Failed to copy SkimInputTags.pkl:', e)
print('Failed to copy SkimInputTags.json:', e)
shutil.rmtree(tmpDir)
else:
if os.path.exists("SkimInputTags.pkl"):
os.remove("SkimInputTags.pkl")
fout = open ("SkimInputTags.pkl", "w")
dumpedString = pickle.dumps (inputTags).decode('latin-1')
if os.path.exists("SkimInputTags.json"):
os.remove("SkimInputTags.json")
fout = open ("SkimInputTags.json", "w")
inputTags = dict((str(k), str(v)) for k,v in inputTags.items())
dumpedString = json.dumps(inputTags) #.decode('latin-1') #mcarrigan
fout.write(dumpedString)
fout.close ()

Expand Down
24 changes: 14 additions & 10 deletions Configuration/python/processingUtilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import pickle
import tempfile
import shutil
import json
import FWCore.ParameterSet.Modules
from optparse import OptionParser
import OSUT3Analysis.DBTools.osusub_cfg as osusub
Expand Down Expand Up @@ -191,30 +192,33 @@ def get_collections (cuts):
############################################################################

def set_skim_tags (inputFileName, collections):
# If we are running over a skim via XrootD, get SkimInputTags.pkl via XrootD
# If we are running over a skim via XrootD, get SkimInputTags.json via XrootD
if inputFileName.startswith ('root:'):
tmpDir = tempfile.mkdtemp ()
subprocess.call('xrdcp ' + os.path.dirname(inputFileName) + '/SkimInputTags.pkl ' + tmpDir + '/SkimInputTags.pkl', shell = True)
inputTagPickleName = tmpDir + '/SkimInputTags.pkl'
# Otherwise get SkimInputTags.pkl via the regular file system
subprocess.call('xrdcp ' + os.path.dirname(inputFileName) + '/SkimInputTags.json ' + tmpDir + '/SkimInputTags.json', shell = True)
inputTagPickleName = tmpDir + '/SkimInputTags.json'
# Otherwise get SkimInputTags.json via the regular file system
else:
if inputFileName.startswith ('file:'):
inputFileName = inputFileName[5:]
inputTagPickleName = os.path.dirname (os.path.realpath (inputFileName)) + '/SkimInputTags.pkl'
inputTagPickleName = os.path.dirname (os.path.realpath (inputFileName)) + '/SkimInputTags.json'
if not os.path.isfile (inputTagPickleName):
print("ERROR: The input file appears to be a skim file but no SkimInputTags.pkl file found in the skim directory.")
print("ERROR: The input file appears to be a skim file but no SkimInputTags.json file found in the skim directory.")
print("Input file is", inputFileName)
print("Be sure that you have run mergeOut.py.")
if inputFileName.startswith ('root:'):
shutil.rmtree (tmpDir)
sys.exit(1)
fin = open (inputTagPickleName)
inputTags = pickle.load (fin)
inputTags = json.load (fin)
fin.close ()
if inputFileName.startswith ('root:'):
shutil.rmtree (tmpDir)
print("Input tags read in")
print("Collections: ", collections)
for tag in inputTags:
setattr (collections, tag, inputTags[tag])
print("Tag {0}, {1}".format(tag, inputTags[tag]))
setattr (collections, tag, eval(inputTags[tag]))

#def add_channels (process, channels, histogramSets, weights, scalingfactorproducers, collections, variableProducers, skim = True, branchSets):
def add_channels (process,
Expand Down Expand Up @@ -290,7 +294,7 @@ def add_channels (process,
raise AttributeError(exceptionString)

############################################################################
# Check the directory of the first input file for SkimInputTags.pkl. If it
# Check the directory of the first input file for SkimInputTags.json. If it
# exists, update the input tags with those stored in the pickle file.
############################################################################
makeEmptySkim = False
Expand All @@ -303,7 +307,7 @@ def add_channels (process,

# If we are running over an empty skim, get the file name from the
# secondary files. The secondary files should be a full skim with
# SkimInputTags.pkl in the same directory.
# SkimInputTags.json in the same directory.
if rootFile.startswith ("emptySkim_"):
makeEmptySkim = True
primaryFileName = fileName
Expand Down
Loading