Skip to content

Commit

Permalink
remove use of site.ad file Fix #8720 (#8721)
Browse files Browse the repository at this point in the history
* remove use of site.ad file Fix #8720

* pylint
  • Loading branch information
belforte authored Sep 30, 2024
1 parent 88f8351 commit f786a6f
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 35 deletions.
15 changes: 2 additions & 13 deletions scripts/AdjustSites.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,8 +150,7 @@ def getGlob(ad, normal, automatic):
"""
if ad.get('CRAB_SplitAlgo') == 'Automatic':
return glob.glob(automatic)
else:
return [normal]
return [normal]


def adjustMaxRetries(adjustJobIds, ad):
Expand Down Expand Up @@ -234,15 +233,14 @@ def makeWebDir(ad):
"RunJobs.dag", "RunJobs.dag.dagman.out", "RunJobs.dag.nodes.log",
"input_files.tar.gz", "run_and_lumis.tar.gz",
"input_dataset_lumis.json", "input_dataset_duplicate_lumis.json",
"aso_status.json", "error_summary.json",
"aso_status.json", "error_summary.json", "site.ad.json"
]
for source in sourceLinks:
link = source
os.symlink(os.path.abspath(os.path.join(".", source)), os.path.join(path, link))
## Symlinks with a different link name than source name. (I would prefer to keep the source names.)
os.symlink(os.path.abspath(os.path.join(".", "job_log")), os.path.join(path, "jobs_log.txt"))
os.symlink(os.path.abspath(os.path.join(".", "node_state")), os.path.join(path, "node_state.txt"))
os.symlink(os.path.abspath(os.path.join(".", "site.ad")), os.path.join(path, "site_ad.txt"))
os.symlink(os.path.abspath(os.path.join(".", ".job.ad")), os.path.join(path, "job_ad.txt"))
os.symlink(os.path.abspath(os.path.join(".", "task_process/status_cache.txt")), os.path.join(path, "status_cache"))
os.symlink(os.path.abspath(os.path.join(".", "task_process/status_cache.pkl")), os.path.join(path, "status_cache.pkl"))
Expand Down Expand Up @@ -478,14 +476,6 @@ def main():
## in adjustedJobIds correspond only to failed jobs.
adjustMaxRetries(adjustedJobIds, ad)

if 'CRAB_SiteAdUpdate' in ad:
newSiteAd = ad['CRAB_SiteAdUpdate']
with open("site.ad", 'r', encoding='utf-8') as fd:
siteAd = classad.parseOne(fd)
siteAd.update(newSiteAd)
with open("site.ad", "w", encoding='utf-8') as fd:
fd.write(str(siteAd))

if resubmitJobIds and ad.get('CRAB_SplitAlgo') == 'Automatic':
printLog("Releasing processing and tail DAGs")
schedd.edit(tailconst, "HoldKillSig", 'SIGUSR1')
Expand All @@ -495,4 +485,3 @@ def main():

if __name__ == '__main__':
main()

15 changes: 3 additions & 12 deletions src/python/TaskWorker/Actions/DagmanCreator.py
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,7 @@ def getPreScriptDefer(self, task, jobid):
return prescriptDeferString


def makeDagSpecs(self, task, sitead, siteinfo, jobgroup, block, availablesites, datasites, outfiles, startjobid, parent=None, stage='conventional'):
def makeDagSpecs(self, task, siteinfo, jobgroup, block, availablesites, datasites, outfiles, startjobid, parent=None, stage='conventional'):
dagSpecs = []
i = startjobid
temp_dest, dest = makeLFNPrefixes(task)
Expand Down Expand Up @@ -622,7 +622,6 @@ def makeDagSpecs(self, task, sitead, siteinfo, jobgroup, block, availablesites,
count = str(i)
else:
count = '{parent}-{i}'.format(parent=parent, i=i)
sitead['Job{0}'.format(count)] = list(availablesites)
siteinfo[count] = groupid
remoteOutputFiles = []
localOutputFiles = []
Expand Down Expand Up @@ -809,11 +808,6 @@ def createSubdag(self, splitterResult, **kwargs):
siteinfo = json.load(fd)
else:
siteinfo = {'group_sites': {}, 'group_datasites': {}}
if os.path.exists("site.ad"):
with open("site.ad", encoding='utf-8') as fd:
sitead = classad.parseOne(fd)
else:
sitead = classad.ClassAd()

blocksWithNoLocations = set()
blocksWithBannedLocations = set()
Expand Down Expand Up @@ -949,7 +943,7 @@ def createSubdag(self, splitterResult, **kwargs):
msg += " This is expected to result in DESIRED_SITES = %s" % (list(available))
self.logger.debug(msg)

jobgroupDagSpecs, startjobid = self.makeDagSpecs(kwargs['task'], sitead, siteinfo,
jobgroupDagSpecs, startjobid = self.makeDagSpecs(kwargs['task'], siteinfo,
jobgroup, list(jgblocks)[0], availablesites,
datasites, outfiles, startjobid, parent=parent, stage=stage)
dagSpecs += jobgroupDagSpecs
Expand Down Expand Up @@ -1083,9 +1077,6 @@ def getBlacklistMsg():
name = "RunJobs{0}.subdag".format(parent)

## Cache site information
with open("site.ad", "w", encoding='utf-8') as fd:
fd.write(str(sitead))

with open("site.ad.json", "w", encoding='utf-8') as fd:
json.dump(siteinfo, fd)

Expand Down Expand Up @@ -1231,7 +1222,7 @@ def executeInternal(self, *args, **kw):
params = {}

inputFiles = ['gWMS-CMSRunAnalysis.sh', 'submit_env.sh', 'CMSRunAnalysis.sh', 'cmscp.py', 'cmscp.sh', 'RunJobs.dag', 'Job.submit', 'dag_bootstrap.sh',
'AdjustSites.py', 'site.ad', 'site.ad.json', 'datadiscovery.pkl', 'taskinformation.pkl', 'taskworkerconfig.pkl',
'AdjustSites.py', 'site.ad.json', 'datadiscovery.pkl', 'taskinformation.pkl', 'taskworkerconfig.pkl',
'run_and_lumis.tar.gz', 'input_files.tar.gz']

self.extractMonitorFiles(inputFiles, **kw)
Expand Down
15 changes: 5 additions & 10 deletions src/python/TaskWorker/Actions/PreJob.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,16 +412,11 @@ def redo_sites(self, new_submit_text, crab_retry, use_resubmit_info):
else:
new_submit_text += '+CRAB_SiteWhitelist = {}\n'
## Get the list of available sites (the sites where this job could run).
if os.path.exists("site.ad.json"):
with open("site.ad.json", 'r', encoding='utf-8') as fd:
site_info = json.load(fd)
group = site_info[self.job_id]
available = set(site_info['group_sites'][str(group)])
datasites = set(site_info['group_datasites'][str(group)])
else:
with open("site.ad", 'r', encoding='utf-8') as fd:
site_ad = classad.parseOne(fd)
available = set(site_ad['Job%s' % (self.job_id)])
with open("site.ad.json", 'r', encoding='utf-8') as fd:
site_info = json.load(fd)
group = site_info[self.job_id]
available = set(site_info['group_sites'][str(group)])
datasites = set(site_info['group_datasites'][str(group)])
## Take the intersection between the available sites and the site whitelist.
## This is the new set of available sites.
if sitewhitelist:
Expand Down

0 comments on commit f786a6f

Please sign in to comment.