Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1   ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
95 96 -class aMCatNLOError(Exception):
97 pass
98
99 100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 if test == 'test_ME' or test == 'test_MC': 124 test_exe='test_soft_col_limits' 125 else: 126 test_exe=test 127 misc.compile([test_exe], cwd = this_dir, job_specs = False) 128 input = pjoin(me_dir, '%s_input.txt' % test) 129 #this can be improved/better written to handle the output 130 misc.call(['./%s' % (test_exe)], cwd=this_dir, 131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 132 close_fds=True) 133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 135 dereference=True) 136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 137 tf.close() 138 139 if not options['reweightonly']: 140 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 141 misc.call(['./gensym'],cwd= this_dir, 142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 143 close_fds=True) 144 #compile madevent_mintMC/mintFO 145 misc.compile([exe], cwd=this_dir, job_specs = False) 146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 148 149 logger.info(' %s done.' % p_dir) 150 return 0 151 except MadGraph5Error, msg: 152 return msg
153
154 155 -def check_compiler(options, block=False):
156 """check that the current fortran compiler is gfortran 4.6 or later. 157 If block, stops the execution, otherwise just print a warning""" 158 159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 161 'Note that You can still run all MadEvent run without any problem!' 162 #first check that gfortran is installed 163 if options['fortran_compiler']: 164 compiler = options['fortran_compiler'] 165 elif misc.which('gfortran'): 166 compiler = 'gfortran' 167 else: 168 compiler = '' 169 170 if 'gfortran' not in compiler: 171 if block: 172 raise aMCatNLOError(msg % compiler) 173 else: 174 logger.warning(msg % compiler) 175 else: 176 curr_version = misc.get_gfortran_version(compiler) 177 if not ''.join(curr_version.split('.')) >= '46': 178 if block: 179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 180 else: 181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183 184 185 #=============================================================================== 186 # CmdExtended 187 #=============================================================================== 188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO""" 190 191 #suggested list of command 192 next_possibility = { 193 'start': [], 194 } 195 196 debug_output = 'ME5_debug' 197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 198 error_debug += 'More information is found in \'%(debug)s\'.\n' 199 error_debug += 'Please attach this file to your report.' 200 201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 202 203 204 keyboard_stop_msg = """stopping all operation 205 in order to quit MadGraph5_aMC@NLO please enter exit""" 206 207 # Define the Error 208 InvalidCmd = InvalidCmd 209 ConfigurationError = aMCatNLOError 210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation""" 213 214 # Tag allowing/forbiding question 215 self.force = False 216 217 # If possible, build an info line with current version number 218 # and date, from the VERSION text file 219 info = misc.get_pkg_info() 220 info_line = "" 221 if info and info.has_key('version') and info.has_key('date'): 222 len_version = len(info['version']) 223 len_date = len(info['date']) 224 if len_version + len_date < 30: 225 info_line = "#* VERSION %s %s %s *\n" % \ 226 (info['version'], 227 (30 - len_version - len_date) * ' ', 228 info['date']) 229 else: 230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 231 info_line = "#* VERSION %s %s *\n" % \ 232 (version, (24 - len(version)) * ' ') 233 234 # Create a header for the history file. 235 # Remember to fill in time at writeout time! 236 self.history_header = \ 237 '#************************************************************\n' + \ 238 '#* MadGraph5_aMC@NLO *\n' + \ 239 '#* *\n' + \ 240 "#* * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * * * 5 * * * * *\n" + \ 243 "#* * * * * *\n" + \ 244 "#* * * *\n" + \ 245 "#* *\n" + \ 246 "#* *\n" + \ 247 info_line + \ 248 "#* *\n" + \ 249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 251 "#* and *\n" + \ 252 "#* http://amcatnlo.cern.ch *\n" + \ 253 '#* *\n' + \ 254 '#************************************************************\n' + \ 255 '#* *\n' + \ 256 '#* Command File for aMCatNLO *\n' + \ 257 '#* *\n' + \ 258 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 259 '#* *\n' + \ 260 '#************************************************************\n' 261 262 if info_line: 263 info_line = info_line[1:] 264 265 logger.info(\ 266 "************************************************************\n" + \ 267 "* *\n" + \ 268 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 269 "* a M C @ N L O *\n" + \ 270 "* *\n" + \ 271 "* * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * * * 5 * * * * *\n" + \ 274 "* * * * * *\n" + \ 275 "* * * *\n" + \ 276 "* *\n" + \ 277 info_line + \ 278 "* *\n" + \ 279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 280 "* http://amcatnlo.cern.ch *\n" + \ 281 "* *\n" + \ 282 "* Type 'help' for in-line help. *\n" + \ 283 "* *\n" + \ 284 "************************************************************") 285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286 287
288 - def get_history_header(self):
289 """return the history header""" 290 return self.history_header % misc.get_time_info()
291
292 - def stop_on_keyboard_stop(self):
293 """action to perform to close nicely on a keyboard interupt""" 294 try: 295 if hasattr(self, 'cluster'): 296 logger.info('rm jobs on queue') 297 self.cluster.remove() 298 if hasattr(self, 'results'): 299 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 300 self.add_error_log_in_html(KeyboardInterrupt) 301 except: 302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """ 306 307 # relaxing the tag forbidding question 308 self.force = False 309 310 if not self.use_rawinput: 311 return stop 312 313 314 arg = line.split() 315 if len(arg) == 0: 316 return stop 317 elif str(arg[0]) in ['exit','quit','EOF']: 318 return stop 319 320 try: 321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 322 level=None, error=True) 323 except Exception: 324 misc.sprint('self.update_status fails', log=logger) 325 pass
326
327 - def nice_user_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_user_error(self, error, line)
332
333 - def nice_config_error(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_config_error(self, error, line)
338
339 - def nice_error_handling(self, error, line):
340 """If a ME run is currently running add a link in the html output""" 341 342 self.add_error_log_in_html() 343 cmd.Cmd.nice_error_handling(self, error, line)
344
345 346 347 #=============================================================================== 348 # HelpToCmd 349 #=============================================================================== 350 -class HelpToCmd(object):
351 """ The Series of help routine for the aMCatNLOCmd""" 352
353 - def help_launch(self):
354 """help for launch command""" 355 _launch_parser.print_help()
356
357 - def help_banner_run(self):
358 logger.info("syntax: banner_run Path|RUN [--run_options]") 359 logger.info("-- Reproduce a run following a given banner") 360 logger.info(" One of the following argument is require:") 361 logger.info(" Path should be the path of a valid banner.") 362 logger.info(" RUN should be the name of a run of the current directory") 363 self.run_options_help([('-f','answer all question by default'), 364 ('--name=X', 'Define the name associated with the new run')])
365 366
367 - def help_compile(self):
368 """help for compile command""" 369 _compile_parser.print_help()
370
371 - def help_generate_events(self):
372 """help for generate_events commandi 373 just call help_launch""" 374 _generate_events_parser.print_help()
375 376
377 - def help_calculate_xsect(self):
378 """help for generate_events command""" 379 _calculate_xsect_parser.print_help()
380
381 - def help_shower(self):
382 """help for shower command""" 383 _shower_parser.print_help()
384 385
386 - def help_open(self):
387 logger.info("syntax: open FILE ") 388 logger.info("-- open a file with the appropriate editor.") 389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 390 logger.info(' the path to the last created/used directory is used')
391
392 - def run_options_help(self, data):
393 if data: 394 logger.info('-- local options:') 395 for name, info in data: 396 logger.info(' %s : %s' % (name, info)) 397 398 logger.info("-- session options:") 399 logger.info(" Note that those options will be kept for the current session") 400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 401 logger.info(" --multicore : Run in multi-core configuration") 402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404 405 406 407 #=============================================================================== 408 # CheckValidForCmd 409 #=============================================================================== 410 -class CheckValidForCmd(object):
411 """ The Series of check routine for the aMCatNLOCmd""" 412
413 - def check_shower(self, args, options):
414 """Check the validity of the line. args[0] is the run_directory""" 415 416 if options['force']: 417 self.force = True 418 419 if len(args) == 0: 420 self.help_shower() 421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 423 raise self.InvalidCmd, 'Directory %s does not exists' % \ 424 pjoin(os.getcwd(), 'Events', args[0]) 425 426 self.set_run_name(args[0], level= 'shower') 427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
429 - def check_plot(self, args):
430 """Check the argument for the plot command 431 plot run_name modes""" 432 433 434 madir = self.options['madanalysis_path'] 435 td = self.options['td_path'] 436 437 if not madir or not td: 438 logger.info('Retry to read configuration file to find madanalysis/td') 439 self.set_configuration() 440 441 madir = self.options['madanalysis_path'] 442 td = self.options['td_path'] 443 444 if not madir: 445 error_msg = 'No Madanalysis path correctly set.' 446 error_msg += 'Please use the set command to define the path and retry.' 447 error_msg += 'You can also define it in the configuration file.' 448 raise self.InvalidCmd(error_msg) 449 if not td: 450 error_msg = 'No path to td directory correctly set.' 451 error_msg += 'Please use the set command to define the path and retry.' 452 error_msg += 'You can also define it in the configuration file.' 453 raise self.InvalidCmd(error_msg) 454 455 if len(args) == 0: 456 if not hasattr(self, 'run_name') or not self.run_name: 457 self.help_plot() 458 raise self.InvalidCmd('No run name currently define. Please add this information.') 459 args.append('all') 460 return 461 462 463 if args[0] not in self._plot_mode: 464 self.set_run_name(args[0], level='plot') 465 del args[0] 466 if len(args) == 0: 467 args.append('all') 468 elif not self.run_name: 469 self.help_plot() 470 raise self.InvalidCmd('No run name currently define. Please add this information.') 471 472 for arg in args: 473 if arg not in self._plot_mode and arg != self.run_name: 474 self.help_plot() 475 raise self.InvalidCmd('unknown options %s' % arg)
476
477 - def check_pgs(self, arg):
478 """Check the argument for pythia command 479 syntax: pgs [NAME] 480 Note that other option are already remove at this point 481 """ 482 483 # If not pythia-pgs path 484 if not self.options['pythia-pgs_path']: 485 logger.info('Retry to read configuration file to find pythia-pgs path') 486 self.set_configuration() 487 488 if not self.options['pythia-pgs_path'] or not \ 489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 490 error_msg = 'No pythia-pgs path correctly set.' 491 error_msg += 'Please use the set command to define the path and retry.' 492 error_msg += 'You can also define it in the configuration file.' 493 raise self.InvalidCmd(error_msg) 494 495 tag = [a for a in arg if a.startswith('--tag=')] 496 if tag: 497 arg.remove(tag[0]) 498 tag = tag[0][6:] 499 500 501 if len(arg) == 0 and not self.run_name: 502 if self.results.lastrun: 503 arg.insert(0, self.results.lastrun) 504 else: 505 raise self.InvalidCmd('No run name currently define. Please add this information.') 506 507 if len(arg) == 1 and self.run_name == arg[0]: 508 arg.pop(0) 509 510 if not len(arg) and \ 511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 512 self.help_pgs() 513 raise self.InvalidCmd('''No file file pythia_events.hep currently available 514 Please specify a valid run_name''') 515 516 lock = None 517 if len(arg) == 1: 518 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 520 521 if not filenames: 522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 523 else: 524 input_file = filenames[0] 525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 527 argument=['-c', input_file], 528 close_fds=True) 529 else: 530 if tag: 531 self.run_card['run_tag'] = tag 532 self.set_run_name(self.run_name, tag, 'pgs') 533 534 return lock
535 536
537 - def check_delphes(self, arg):
538 """Check the argument for pythia command 539 syntax: delphes [NAME] 540 Note that other option are already remove at this point 541 """ 542 543 # If not pythia-pgs path 544 if not self.options['delphes_path']: 545 logger.info('Retry to read configuration file to find delphes path') 546 self.set_configuration() 547 548 if not self.options['delphes_path']: 549 error_msg = 'No delphes path correctly set.' 550 error_msg += 'Please use the set command to define the path and retry.' 551 error_msg += 'You can also define it in the configuration file.' 552 raise self.InvalidCmd(error_msg) 553 554 tag = [a for a in arg if a.startswith('--tag=')] 555 if tag: 556 arg.remove(tag[0]) 557 tag = tag[0][6:] 558 559 560 if len(arg) == 0 and not self.run_name: 561 if self.results.lastrun: 562 arg.insert(0, self.results.lastrun) 563 else: 564 raise self.InvalidCmd('No run name currently define. Please add this information.') 565 566 if len(arg) == 1 and self.run_name == arg[0]: 567 arg.pop(0) 568 569 if not len(arg) and \ 570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 571 self.help_pgs() 572 raise self.InvalidCmd('''No file file pythia_events.hep currently available 573 Please specify a valid run_name''') 574 575 if len(arg) == 1: 576 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 578 579 580 if not filenames: 581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 582 % (self.run_name, prev_tag, 583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 584 else: 585 input_file = filenames[0] 586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 588 argument=['-c', input_file], 589 close_fds=True) 590 else: 591 if tag: 592 self.run_card['run_tag'] = tag 593 self.set_run_name(self.run_name, tag, 'delphes')
594
595 - def check_calculate_xsect(self, args, options):
596 """check the validity of the line. args is ORDER, 597 ORDER being LO or NLO. If no mode is passed, NLO is used""" 598 # modify args in order to be DIR 599 # mode being either standalone or madevent 600 601 if options['force']: 602 self.force = True 603 604 if not args: 605 args.append('NLO') 606 return 607 608 if len(args) > 1: 609 self.help_calculate_xsect() 610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 611 612 elif len(args) == 1: 613 if not args[0] in ['NLO', 'LO']: 614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 615 mode = args[0] 616 617 # check for incompatible options/modes 618 if options['multicore'] and options['cluster']: 619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 620 ' are not compatible. Please choose one.'
621 622
623 - def check_generate_events(self, args, options):
624 """check the validity of the line. args is ORDER, 625 ORDER being LO or NLO. If no mode is passed, NLO is used""" 626 # modify args in order to be DIR 627 # mode being either standalone or madevent 628 629 if not args: 630 args.append('NLO') 631 return 632 633 if len(args) > 1: 634 self.help_generate_events() 635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 636 637 elif len(args) == 1: 638 if not args[0] in ['NLO', 'LO']: 639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 640 mode = args[0] 641 642 # check for incompatible options/modes 643 if options['multicore'] and options['cluster']: 644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 645 ' are not compatible. Please choose one.'
646
647 - def check_banner_run(self, args):
648 """check the validity of line""" 649 650 if len(args) == 0: 651 self.help_banner_run() 652 raise self.InvalidCmd('banner_run requires at least one argument.') 653 654 tag = [a[6:] for a in args if a.startswith('--tag=')] 655 656 657 if os.path.exists(args[0]): 658 type ='banner' 659 format = self.detect_card_type(args[0]) 660 if format != 'banner': 661 raise self.InvalidCmd('The file is not a valid banner.') 662 elif tag: 663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 664 (args[0], tag)) 665 if not os.path.exists(args[0]): 666 raise self.InvalidCmd('No banner associates to this name and tag.') 667 else: 668 name = args[0] 669 type = 'run' 670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 671 if not banners: 672 raise self.InvalidCmd('No banner associates to this name.') 673 elif len(banners) == 1: 674 args[0] = banners[0] 675 else: 676 #list the tag and propose those to the user 677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 678 tag = self.ask('which tag do you want to use?', tags[0], tags) 679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 680 (args[0], tag)) 681 682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 683 if run_name: 684 try: 685 self.exec_cmd('remove %s all banner -f' % run_name) 686 except Exception: 687 pass 688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 689 elif type == 'banner': 690 self.set_run_name(self.find_available_run_name(self.me_dir)) 691 elif type == 'run': 692 if not self.results[name].is_empty(): 693 run_name = self.find_available_run_name(self.me_dir) 694 logger.info('Run %s is not empty so will use run_name: %s' % \ 695 (name, run_name)) 696 self.set_run_name(run_name) 697 else: 698 try: 699 self.exec_cmd('remove %s all banner -f' % run_name) 700 except Exception: 701 pass 702 self.set_run_name(name)
703 704 705
706 - def check_launch(self, args, options):
707 """check the validity of the line. args is MODE 708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 709 # modify args in order to be DIR 710 # mode being either standalone or madevent 711 712 if options['force']: 713 self.force = True 714 715 716 if not args: 717 args.append('auto') 718 return 719 720 if len(args) > 1: 721 self.help_launch() 722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 723 724 elif len(args) == 1: 725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 727 mode = args[0] 728 729 # check for incompatible options/modes 730 if options['multicore'] and options['cluster']: 731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 732 ' are not compatible. Please choose one.' 733 if mode == 'NLO' and options['reweightonly']: 734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735 736
737 - def check_compile(self, args, options):
738 """check the validity of the line. args is MODE 739 MODE being FO or MC. If no mode is passed, MC is used""" 740 # modify args in order to be DIR 741 # mode being either standalone or madevent 742 743 if options['force']: 744 self.force = True 745 746 if not args: 747 args.append('MC') 748 return 749 750 if len(args) > 1: 751 self.help_compile() 752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 753 754 elif len(args) == 1: 755 if not args[0] in ['MC', 'FO']: 756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 757 mode = args[0]
758
759 # check for incompatible options/modes 760 761 762 #=============================================================================== 763 # CompleteForCmd 764 #=============================================================================== 765 -class CompleteForCmd(CheckValidForCmd):
766 """ The Series of help routine for the MadGraphCmd""" 767
768 - def complete_launch(self, text, line, begidx, endidx):
769 """auto-completion for launch command""" 770 771 args = self.split_arg(line[0:begidx]) 772 if len(args) == 1: 773 #return mode 774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 775 elif len(args) == 2 and line[begidx-1] == '@': 776 return self.list_completion(text,['LO','NLO'],line) 777 else: 778 opts = [] 779 for opt in _launch_parser.option_list: 780 opts += opt._long_opts + opt._short_opts 781 return self.list_completion(text, opts, line)
782
783 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
784 "Complete the banner run command" 785 try: 786 787 788 args = self.split_arg(line[0:begidx], error=False) 789 790 if args[-1].endswith(os.path.sep): 791 return self.path_completion(text, 792 os.path.join('.',*[a for a in args \ 793 if a.endswith(os.path.sep)])) 794 795 796 if len(args) > 1: 797 # only options are possible 798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 800 801 if args[-1] != '--tag=': 802 tags = ['--tag=%s' % t for t in tags] 803 else: 804 return self.list_completion(text, tags) 805 return self.list_completion(text, tags +['--name=','-f'], line) 806 807 # First argument 808 possibilites = {} 809 810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 811 if a.endswith(os.path.sep)])) 812 if os.path.sep in line: 813 return comp 814 else: 815 possibilites['Path from ./'] = comp 816 817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 818 run_list = [n.rsplit('/',2)[1] for n in run_list] 819 possibilites['RUN Name'] = self.list_completion(text, run_list) 820 821 return self.deal_multiple_categories(possibilites, formatting) 822 823 824 except Exception, error: 825 print error
826 827
828 - def complete_compile(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['FO','MC'],line) 835 else: 836 opts = [] 837 for opt in _compile_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_calculate_xsect(self, text, line, begidx, endidx):
842 """auto-completion for launch command""" 843 844 args = self.split_arg(line[0:begidx]) 845 if len(args) == 1: 846 #return mode 847 return self.list_completion(text,['LO','NLO'],line) 848 else: 849 opts = [] 850 for opt in _calculate_xsect_parser.option_list: 851 opts += opt._long_opts + opt._short_opts 852 return self.list_completion(text, opts, line)
853
854 - def complete_generate_events(self, text, line, begidx, endidx):
855 """auto-completion for generate_events command 856 call the compeltion for launch""" 857 self.complete_launch(text, line, begidx, endidx)
858 859
860 - def complete_shower(self, text, line, begidx, endidx):
861 args = self.split_arg(line[0:begidx]) 862 if len(args) == 1: 863 #return valid run_name 864 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1
869
870 - def complete_plot(self, text, line, begidx, endidx):
871 """ Complete the plot command """ 872 873 args = self.split_arg(line[0:begidx], error=False) 874 875 if len(args) == 1: 876 #return valid run_name 877 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 878 data = [n.rsplit('/',2)[1] for n in data] 879 tmp1 = self.list_completion(text, data) 880 if not self.run_name: 881 return tmp1 882 883 if len(args) > 1: 884 return self.list_completion(text, self._plot_mode)
885
886 - def complete_pgs(self,text, line, begidx, endidx):
887 "Complete the pgs command" 888 args = self.split_arg(line[0:begidx], error=False) 889 if len(args) == 1: 890 #return valid run_name 891 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 892 pjoin(self.me_dir, 'Events')) 893 data = [n.rsplit('/',2)[1] for n in data] 894 tmp1 = self.list_completion(text, data) 895 if not self.run_name: 896 return tmp1 897 else: 898 tmp2 = self.list_completion(text, self._run_options + ['-f', 899 '--tag=' ,'--no_default'], line) 900 return tmp1 + tmp2 901 else: 902 return self.list_completion(text, self._run_options + ['-f', 903 '--tag=','--no_default'], line)
904 905 complete_delphes = complete_pgs
906
907 -class aMCatNLOAlreadyRunning(InvalidCmd):
908 pass
909
910 -class AskRunNLO(cmd.ControlSwitch):
911 912 to_control = [('order', 'Type of perturbative computation'), 913 ('fixed_order', 'No MC@[N]LO matching / event generation'), 914 ('shower', 'Shower the generated events'), 915 ('madspin', 'Decay onshell particles'), 916 ('reweight', 'Add weights to events for new hypp.'), 917 ('madanalysis','Run MadAnalysis5 on the events generated')] 918 919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 920
921 - def __init__(self, question, line_args=[], mode=None, force=False, 922 *args, **opt):
923 924 self.check_available_module(opt['mother_interface'].options) 925 self.me_dir = opt['mother_interface'].me_dir 926 self.last_mode = opt['mother_interface'].last_mode 927 self.proc_characteristics = opt['mother_interface'].proc_characteristics 928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat')) 929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 930 *args, **opt)
931 932 @property
933 - def answer(self):
934 935 out = super(AskRunNLO, self).answer 936 if out['shower'] == 'HERWIG7': 937 out['shower'] = 'HERWIGPP' 938 939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 940 out['runshower'] = False 941 else: 942 out['runshower'] = True 943 return out
944 945
946 - def check_available_module(self, options):
947 948 self.available_module = set() 949 if options['madanalysis5_path']: 950 self.available_module.add('MA5') 951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 952 953 self.available_module.add('MadSpin') 954 if misc.has_f2py() or options['f2py_compiler']: 955 self.available_module.add('reweight') 956 if options['pythia8_path']: 957 self.available_module.add('PY8') 958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 959 self.available_module.add('HW7')
960 # 961 # shorcut 962 #
963 - def ans_lo(self, value):
964 """ function called if the user type lo=value. or lo (then value is None)""" 965 966 if value is None: 967 self.switch['order'] = 'LO' 968 self.switch['fixed_order'] = 'ON' 969 self.set_switch('shower', 'OFF') 970 else: 971 logger.warning('Invalid command: lo=%s' % value)
972
973 - def ans_nlo(self, value):
974 if value is None: 975 self.switch['order'] = 'NLO' 976 self.switch['fixed_order'] = 'ON' 977 self.set_switch('shower', 'OFF') 978 else: 979 logger.warning('Invalid command: nlo=%s' % value)
980
981 - def ans_amc__at__nlo(self, value):
982 if value is None: 983 self.switch['order'] = 'NLO' 984 self.switch['fixed_order'] = 'OFF' 985 self.set_switch('shower', 'ON') 986 else: 987 logger.warning('Invalid command: aMC@NLO=%s' % value)
988
989 - def ans_amc__at__lo(self, value):
990 if value is None: 991 self.switch['order'] = 'LO' 992 self.switch['fixed_order'] = 'OFF' 993 self.set_switch('shower', 'ON') 994 else: 995 logger.warning('Invalid command: aMC@LO=%s' % value)
996
997 - def ans_noshower(self, value):
998 if value is None: 999 self.switch['order'] = 'NLO' 1000 self.switch['fixed_order'] = 'OFF' 1001 self.set_switch('shower', 'OFF') 1002 else: 1003 logger.warning('Invalid command: noshower=%s' % value)
1004
1005 - def ans_onlyshower(self, value):
1006 if value is None: 1007 self.switch['mode'] = 'onlyshower' 1008 self.switch['madspin'] = 'OFF' 1009 self.switch['reweight'] = 'OFF' 1010 else: 1011 logger.warning('Invalid command: onlyshower=%s' % value)
1012
1013 - def ans_noshowerlo(self, value):
1014 if value is None: 1015 self.switch['order'] = 'LO' 1016 self.switch['fixed_order'] = 'OFF' 1017 self.set_switch('shower', 'OFF') 1018 else: 1019 logger.warning('Invalid command: noshowerlo=%s' % value)
1020
1021 - def ans_madanalysis5(self, value):
1022 """ shortcut madanalysis5 -> madanalysis """ 1023 1024 if value is None: 1025 return self.onecmd('madanalysis') 1026 else: 1027 self.set_switch('madanalysis', value)
1028 # 1029 # ORDER 1030 #
1031 - def get_allowed_order(self):
1032 return ["LO", "NLO"]
1033
1034 - def set_default_order(self):
1035 1036 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1037 self.switch['order'] = 'LO' 1038 self.switch['order'] = 'NLO'
1039
1040 - def set_switch_off_order(self):
1041 return
1042 # 1043 # Fix order 1044 #
1045 - def get_allowed_fixed_order(self):
1046 """ """ 1047 if self.proc_characteristics['ninitial'] == 1: 1048 return ['ON'] 1049 else: 1050 return ['ON', 'OFF']
1051
1052 - def set_default_fixed_order(self):
1053 1054 if self.last_mode in ['LO', 'NLO']: 1055 self.switch['fixed_order'] = 'ON' 1056 self.switch['fixed_order'] = 'OFF' 1057
1058 - def color_for_fixed_order(self, switch_value):
1059 1060 if switch_value in ['OFF']: 1061 return self.green % switch_value 1062 else: 1063 return self.red % switch_value
1064
1065 - def color_for_shower(self, switch_value):
1066 1067 if switch_value in ['ON']: 1068 return self.green % switch_value 1069 elif switch_value in self.get_allowed('shower'): 1070 return self.green % switch_value 1071 else: 1072 return self.red % switch_value
1073
1074 - def consistency_fixed_order_shower(self, vfix, vshower):
1075 """ consistency_XX_YY(val_XX, val_YY) 1076 -> XX is the new key set by the user to a new value val_XX 1077 -> YY is another key set by the user. 1078 -> return value should be None or "replace_YY" 1079 """ 1080 1081 if vfix == 'ON' and vshower != 'OFF' : 1082 return 'OFF' 1083 return None
1084 1085 consistency_fixed_order_madspin = consistency_fixed_order_shower 1086 consistency_fixed_order_reweight = consistency_fixed_order_shower 1087
1088 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1089 1090 if vfix == 'ON' and vma5 == 'ON' : 1091 return 'OFF' 1092 return None
1093 1094
1095 - def consistency_shower_fixed_order(self, vshower, vfix):
1096 """ consistency_XX_YY(val_XX, val_YY) 1097 -> XX is the new key set by the user to a new value val_XX 1098 -> YY is another key set by the user. 1099 -> return value should be None or "replace_YY" 1100 """ 1101 1102 if vshower != 'OFF' and vfix == 'ON': 1103 return 'OFF' 1104 return None
1105 1106 consistency_madspin_fixed_order = consistency_shower_fixed_order 1107 consistency_reweight_fixed_order = consistency_shower_fixed_order 1108 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1109 1110 1111 # 1112 # Shower 1113 #
1114 - def get_allowed_shower(self):
1115 """ """ 1116 1117 if hasattr(self, 'allowed_shower'): 1118 return self.allowed_shower 1119 1120 if self.proc_characteristics['ninitial'] == 1: 1121 self.allowed_shower = ['OFF'] 1122 return ['OFF'] 1123 else: 1124 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1125 if 'PY8' in self.available_module: 1126 allowed.append('PYTHIA8') 1127 if 'HW7' in self.available_module: 1128 allowed.append('HERWIGPP') 1129 1130 self.allowed_shower = allowed 1131 1132 return allowed
1133
1134 - def check_value_shower(self, value):
1135 """ """ 1136 1137 if value.upper() in self.get_allowed_shower(): 1138 return True 1139 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1140 return True 1141 if value.upper() == 'ON': 1142 return self.run_card['parton_shower'] 1143 if value.upper() in ['P8','PY8','PYTHIA_8']: 1144 return 'PYTHIA8' 1145 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1146 return 'PYTHIA6PT' 1147 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1148 return 'PYTHIA6Q' 1149 if value.upper() in ['HW7', 'HERWIG7']: 1150 return 'HERWIG7' 1151 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1152 return 'HERWIGPP' 1153 if value.upper() in ['HW6', 'HERWIG_6']: 1154 return 'HERWIG6'
1155
1156 - def set_default_shower(self):
1157 1158 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1159 self.switch['shower'] = 'OFF' 1160 return 1161 1162 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1163 self.switch['shower'] = self.run_card['parton_shower'] 1164 #self.switch['shower'] = 'ON' 1165 self.switch['fixed_order'] = "OFF" 1166 else: 1167 self.switch['shower'] = 'OFF' 1168
1169 - def consistency_shower_madanalysis(self, vshower, vma5):
1170 """ MA5 only possible with (N)LO+PS if shower is run""" 1171 1172 if vshower == 'OFF' and vma5 == 'ON': 1173 return 'OFF' 1174 return None
1175
1176 - def consistency_madanalysis_shower(self, vma5, vshower):
1177 1178 if vma5=='ON' and vshower == 'OFF': 1179 return 'ON' 1180 return None
1181
1182 - def get_cardcmd_for_shower(self, value):
1183 """ adpat run_card according to this setup. return list of cmd to run""" 1184 1185 if value != 'OFF': 1186 return ['set parton_shower %s' % self.switch['shower']] 1187 return []
1188 1189 # 1190 # madspin 1191 #
1192 - def get_allowed_madspin(self):
1193 """ """ 1194 1195 if hasattr(self, 'allowed_madspin'): 1196 return self.allowed_madspin 1197 1198 self.allowed_madspin = [] 1199 1200 1201 if 'MadSpin' not in self.available_module: 1202 return self.allowed_madspin 1203 if self.proc_characteristics['ninitial'] == 1: 1204 self.available_module.remove('MadSpin') 1205 self.allowed_madspin = ['OFF'] 1206 return self.allowed_madspin 1207 else: 1208 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1209 return self.allowed_madspin
1210
1211 - def check_value_madspin(self, value):
1212 """handle alias and valid option not present in get_allowed_madspin 1213 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1214 1215 if value.upper() in self.get_allowed_madspin(): 1216 if value == value.upper(): 1217 return True 1218 else: 1219 return value.upper() 1220 elif value.lower() in self.get_allowed_madspin(): 1221 if value == value.lower(): 1222 return True 1223 else: 1224 return value.lower() 1225 1226 if 'MadSpin' not in self.available_module or \ 1227 'ON' not in self.get_allowed_madspin(): 1228 return False 1229 1230 if value.lower() in ['madspin', 'full']: 1231 return 'full' 1232 elif value.lower() in ['none']: 1233 return 'none'
1234
1235 - def set_default_madspin(self):
1236 1237 if 'MadSpin' in self.available_module: 1238 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1239 self.switch['madspin'] = 'ON' 1240 else: 1241 self.switch['madspin'] = 'OFF' 1242 else: 1243 self.switch['madspin'] = 'Not Avail.'
1244
1245 - def get_cardcmd_for_madspin(self, value):
1246 """set some command to run before allowing the user to modify the cards.""" 1247 1248 if value == 'onshell': 1249 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1250 elif value in ['full', 'madspin']: 1251 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1252 elif value == 'none': 1253 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1254 else: 1255 return []
1256 1257 # 1258 # reweight 1259 #
1260 - def get_allowed_reweight(self):
1261 """set the valid (visible) options for reweight""" 1262 1263 if hasattr(self, 'allowed_reweight'): 1264 return getattr(self, 'allowed_reweight') 1265 1266 self.allowed_reweight = [] 1267 if 'reweight' not in self.available_module: 1268 return self.allowed_reweight 1269 if self.proc_characteristics['ninitial'] == 1: 1270 self.available_module.remove('reweight') 1271 self.allowed_reweight.append('OFF') 1272 return self.allowed_reweight 1273 else: 1274 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1275 return self.allowed_reweight
1276
1277 - def set_default_reweight(self):
1278 """initialise the switch for reweight""" 1279 1280 if 'reweight' in self.available_module: 1281 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1282 self.switch['reweight'] = 'ON' 1283 else: 1284 self.switch['reweight'] = 'OFF' 1285 else: 1286 self.switch['reweight'] = 'Not Avail.'
1287
1288 - def get_cardcmd_for_reweight(self, value):
1289 """ adpat run_card according to this setup. return list of cmd to run""" 1290 1291 if value == 'LO': 1292 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1293 elif value == 'NLO': 1294 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1295 "set store_rwgt_info T"] 1296 elif value == 'NLO_TREE': 1297 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1298 "set store_rwgt_info T"] 1299 return []
1300 1301 # 1302 # MadAnalysis5 1303 #
1304 - def get_allowed_madanalysis(self):
1305 1306 if hasattr(self, 'allowed_madanalysis'): 1307 return self.allowed_madanalysis 1308 1309 self.allowed_madanalysis = [] 1310 1311 1312 if 'MA5' not in self.available_module: 1313 return self.allowed_madanalysis 1314 1315 if self.proc_characteristics['ninitial'] == 1: 1316 self.available_module.remove('MA5') 1317 self.allowed_madanalysis = ['OFF'] 1318 return self.allowed_madanalysis 1319 else: 1320 self.allowed_madanalysis = ['OFF', 'ON'] 1321 return self.allowed_madanalysis
1322
1323 - def set_default_madanalysis(self):
1324 """initialise the switch for reweight""" 1325 1326 if 'MA5' not in self.available_module: 1327 self.switch['madanalysis'] = 'Not Avail.' 1328 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1329 self.switch['madanalysis'] = 'ON' 1330 else: 1331 self.switch['madanalysis'] = 'OFF'
1332
1333 - def check_value_madanalysis(self, value):
1334 """check an entry is valid. return the valid entry in case of shortcut""" 1335 1336 if value.upper() in self.get_allowed('madanalysis'): 1337 return True 1338 value = value.lower() 1339 if value == 'hadron': 1340 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1341 else: 1342 return False
1343
1344 1345 #=============================================================================== 1346 # aMCatNLOCmd 1347 #=============================================================================== 1348 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1349 """The command line processor of MadGraph""" 1350 1351 # Truth values 1352 true = ['T','.true.',True,'true'] 1353 # Options and formats available 1354 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1355 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1356 _calculate_decay_options = ['-f', '--accuracy=0.'] 1357 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1358 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1359 _clean_mode = _plot_mode + ['channel', 'banner'] 1360 _display_opts = ['run_name', 'options', 'variable'] 1361 # survey options, dict from name to type, default value, and help text 1362 # Variables to store object information 1363 web = False 1364 cluster_mode = 0 1365 queue = 'madgraph' 1366 nb_core = None 1367 make_opts_var = {} 1368 1369 next_possibility = { 1370 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1371 'help generate_events'], 1372 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1373 'launch': ['launch [OPTIONS]', 'shower'], 1374 'shower' : ['generate_events [OPTIONS]'] 1375 } 1376 1377 1378 ############################################################################
1379 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1380 """ add information to the cmd """ 1381 1382 self.start_time = 0 1383 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1384 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1385 1386 self.mode = 'aMCatNLO' 1387 self.nb_core = 0 1388 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1389 1390 1391 self.load_results_db() 1392 self.results.def_web_mode(self.web) 1393 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1394 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1395 1396 if not '[real=QCD]' in proc_card: 1397 check_compiler(self.options, block=True)
1398 1399 1400 ############################################################################
1401 - def do_shower(self, line):
1402 """ run the shower on a given parton level file """ 1403 argss = self.split_arg(line) 1404 (options, argss) = _launch_parser.parse_args(argss) 1405 # check argument validity and normalise argument 1406 options = options.__dict__ 1407 options['reweightonly'] = False 1408 self.check_shower(argss, options) 1409 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1410 self.ask_run_configuration('onlyshower', options) 1411 self.run_mcatnlo(evt_file, options) 1412 1413 self.update_status('', level='all', update_results=True)
1414 1415 ################################################################################
1416 - def do_plot(self, line):
1417 """Create the plot for a given run""" 1418 1419 # Since in principle, all plot are already done automaticaly 1420 args = self.split_arg(line) 1421 # Check argument's validity 1422 self.check_plot(args) 1423 logger.info('plot for run %s' % self.run_name) 1424 1425 if not self.force: 1426 self.ask_edit_cards([], args, plot=True) 1427 1428 if any([arg in ['parton'] for arg in args]): 1429 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1430 if os.path.exists(filename+'.gz'): 1431 misc.gunzip(filename) 1432 if os.path.exists(filename): 1433 logger.info('Found events.lhe file for run %s' % self.run_name) 1434 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1435 self.create_plot('parton') 1436 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1437 misc.gzip(filename) 1438 1439 if any([arg in ['all','parton'] for arg in args]): 1440 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1441 if os.path.exists(filename): 1442 logger.info('Found MADatNLO.top file for run %s' % \ 1443 self.run_name) 1444 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1445 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1446 1447 if not os.path.isdir(plot_dir): 1448 os.makedirs(plot_dir) 1449 top_file = pjoin(plot_dir, 'plots.top') 1450 files.cp(filename, top_file) 1451 madir = self.options['madanalysis_path'] 1452 tag = self.run_card['run_tag'] 1453 td = self.options['td_path'] 1454 misc.call(['%s/plot' % self.dirbin, madir, td], 1455 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1456 stderr = subprocess.STDOUT, 1457 cwd=plot_dir) 1458 1459 misc.call(['%s/plot_page-pl' % self.dirbin, 1460 os.path.basename(plot_dir), 1461 'parton'], 1462 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1463 stderr = subprocess.STDOUT, 1464 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1465 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1466 output) 1467 1468 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1469 1470 if any([arg in ['all','shower'] for arg in args]): 1471 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1472 if len(filenames) != 1: 1473 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1474 if len(filenames) != 1: 1475 logger.info('No shower level file found for run %s' % \ 1476 self.run_name) 1477 return 1478 filename = filenames[0] 1479 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1480 1481 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1482 if aMCatNLO and not self.options['mg5_path']: 1483 raise "plotting NLO HEP file needs MG5 utilities" 1484 1485 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1486 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1487 self.run_hep2lhe() 1488 else: 1489 filename = filenames[0] 1490 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1491 1492 self.create_plot('shower') 1493 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1494 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1495 lhe_file_name) 1496 misc.gzip(lhe_file_name) 1497 1498 if any([arg in ['all','pgs'] for arg in args]): 1499 filename = pjoin(self.me_dir, 'Events', self.run_name, 1500 '%s_pgs_events.lhco' % self.run_tag) 1501 if os.path.exists(filename+'.gz'): 1502 misc.gunzip(filename) 1503 if os.path.exists(filename): 1504 self.create_plot('PGS') 1505 misc.gzip(filename) 1506 else: 1507 logger.info('No valid files for pgs plot') 1508 1509 if any([arg in ['all','delphes'] for arg in args]): 1510 filename = pjoin(self.me_dir, 'Events', self.run_name, 1511 '%s_delphes_events.lhco' % self.run_tag) 1512 if os.path.exists(filename+'.gz'): 1513 misc.gunzip(filename) 1514 if os.path.exists(filename): 1515 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1516 self.create_plot('Delphes') 1517 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1518 misc.gzip(filename) 1519 else: 1520 logger.info('No valid files for delphes plot')
1521 1522 1523 ############################################################################
1524 - def do_calculate_xsect(self, line):
1525 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1526 this function wraps the do_launch one""" 1527 1528 self.start_time = time.time() 1529 argss = self.split_arg(line) 1530 # check argument validity and normalise argument 1531 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1532 options = options.__dict__ 1533 options['reweightonly'] = False 1534 options['parton'] = True 1535 self.check_calculate_xsect(argss, options) 1536 self.do_launch(line, options, argss)
1537 1538 ############################################################################
1539 - def do_banner_run(self, line):
1540 """Make a run from the banner file""" 1541 1542 args = self.split_arg(line) 1543 #check the validity of the arguments 1544 self.check_banner_run(args) 1545 1546 # Remove previous cards 1547 for name in ['shower_card.dat', 'madspin_card.dat']: 1548 try: 1549 os.remove(pjoin(self.me_dir, 'Cards', name)) 1550 except Exception: 1551 pass 1552 1553 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1554 1555 # Check if we want to modify the run 1556 if not self.force: 1557 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1558 if ans == 'n': 1559 self.force = True 1560 1561 # Compute run mode: 1562 if self.force: 1563 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1564 banner = banner_mod.Banner(args[0]) 1565 for line in banner['run_settings']: 1566 if '=' in line: 1567 mode, value = [t.strip() for t in line.split('=')] 1568 mode_status[mode] = value 1569 else: 1570 mode_status = {} 1571 1572 # Call Generate events 1573 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1574 switch=mode_status)
1575 1576 ############################################################################
1577 - def do_generate_events(self, line):
1578 """Main commands: generate events 1579 this function just wraps the do_launch one""" 1580 self.do_launch(line)
1581 1582 1583 ############################################################################
1584 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1585 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1586 #check if no 'Auto' are present in the file 1587 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1588 1589 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1590 # this variable is system only in the run_card 1591 # can not be done in EditCard since this parameter is not written in the 1592 # run_card directly. 1593 if mode in ['LO', 'NLO']: 1594 name = 'fo_lhe_weight_ratio' 1595 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1596 if name in FO_card: 1597 self.run_card.set(name, FO_card[name], user=False) 1598 name = 'fo_lhe_postprocessing' 1599 if name in FO_card: 1600 self.run_card.set(name, FO_card[name], user=False) 1601 1602 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1603 1604 ############################################################################
1605 - def set_configuration(self, amcatnlo=True, **opt):
1606 """assign all configuration variable from file 1607 loop over the different config file if config_file not define """ 1608 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1609 1610 ############################################################################
1611 - def do_launch(self, line, options={}, argss=[], switch={}):
1612 """Main commands: launch the full chain 1613 options and args are relevant if the function is called from other 1614 functions, such as generate_events or calculate_xsect 1615 mode gives the list of switch needed for the computation (usefull for banner_run) 1616 """ 1617 1618 if not argss and not options: 1619 self.start_time = time.time() 1620 argss = self.split_arg(line) 1621 # check argument validity and normalise argument 1622 (options, argss) = _launch_parser.parse_args(argss) 1623 options = options.__dict__ 1624 self.check_launch(argss, options) 1625 1626 1627 if 'run_name' in options.keys() and options['run_name']: 1628 self.run_name = options['run_name'] 1629 # if a dir with the given run_name already exists 1630 # remove it and warn the user 1631 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1632 logger.warning('Removing old run information in \n'+ 1633 pjoin(self.me_dir, 'Events', self.run_name)) 1634 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1635 self.results.delete_run(self.run_name) 1636 else: 1637 self.run_name = '' # will be set later 1638 1639 if options['multicore']: 1640 self.cluster_mode = 2 1641 elif options['cluster']: 1642 self.cluster_mode = 1 1643 1644 if not switch: 1645 mode = argss[0] 1646 1647 if mode in ['LO', 'NLO']: 1648 options['parton'] = True 1649 mode = self.ask_run_configuration(mode, options) 1650 else: 1651 mode = self.ask_run_configuration('auto', options, switch) 1652 1653 self.results.add_detail('run_mode', mode) 1654 1655 self.update_status('Starting run', level=None, update_results=True) 1656 1657 if self.options['automatic_html_opening']: 1658 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1659 self.options['automatic_html_opening'] = False 1660 1661 if '+' in mode: 1662 mode = mode.split('+')[0] 1663 self.compile(mode, options) 1664 evt_file = self.run(mode, options) 1665 1666 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1667 logger.info('No event file generated: grids have been set-up with a '\ 1668 'relative precision of %s' % self.run_card['req_acc']) 1669 return 1670 1671 if not mode in ['LO', 'NLO']: 1672 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1673 1674 if self.run_card['systematics_program'] == 'systematics': 1675 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1676 1677 self.exec_cmd('reweight -from_cards', postcmd=False) 1678 self.exec_cmd('decay_events -from_cards', postcmd=False) 1679 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1680 1681 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1682 and not options['parton']: 1683 self.run_mcatnlo(evt_file, options) 1684 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1685 1686 elif mode == 'noshower': 1687 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1688 Please, shower the Les Houches events before using them for physics analyses.""") 1689 1690 1691 self.update_status('', level='all', update_results=True) 1692 if self.run_card['ickkw'] == 3 and \ 1693 (mode in ['noshower'] or \ 1694 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1695 logger.warning("""You are running with FxFx merging enabled. 1696 To be able to merge samples of various multiplicities without double counting, 1697 you have to remove some events after showering 'by hand'. 1698 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1699 1700 self.store_result() 1701 #check if the param_card defines a scan. 1702 if self.param_card_iterator: 1703 cpath = pjoin(self.me_dir,'Cards','param_card.dat') 1704 param_card_iterator = self.param_card_iterator 1705 self.param_card_iterator = [] #avoid to next generate go trough here 1706 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1707 error=self.results.current['error'], 1708 param_card_path=cpath) 1709 orig_name = self.run_name 1710 #go trough the scal 1711 with misc.TMP_variable(self, 'allow_notification_center', False): 1712 for i,card in enumerate(param_card_iterator): 1713 card.write(cpath) 1714 self.check_param_card(cpath, dependent=True) 1715 if not options['force']: 1716 options['force'] = True 1717 if options['run_name']: 1718 options['run_name'] = '%s_%s' % (orig_name, i+1) 1719 if not argss: 1720 argss = [mode, "-f"] 1721 elif argss[0] == "auto": 1722 argss[0] = mode 1723 self.do_launch("", options=options, argss=argss, switch=switch) 1724 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1725 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1726 error=self.results.current['error'], 1727 param_card_path=cpath) 1728 #restore original param_card 1729 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1730 name = misc.get_scan_name(orig_name, self.run_name) 1731 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1732 logger.info("write all cross-section results in %s" % path, '$MG:BOLD') 1733 param_card_iterator.write_summary(path) 1734 1735 if self.allow_notification_center: 1736 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1737 '%s: %s +- %s ' % (self.results.current['run_name'], 1738 self.results.current['cross'], 1739 self.results.current['error']))
1740 1741 1742 ############################################################################
1743 - def do_compile(self, line):
1744 """Advanced commands: just compile the executables """ 1745 argss = self.split_arg(line) 1746 # check argument validity and normalise argument 1747 (options, argss) = _compile_parser.parse_args(argss) 1748 options = options.__dict__ 1749 options['reweightonly'] = False 1750 options['nocompile'] = False 1751 self.check_compile(argss, options) 1752 1753 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1754 self.ask_run_configuration(mode, options) 1755 self.compile(mode, options) 1756 1757 1758 self.update_status('', level='all', update_results=True)
1759 1760
1761 - def update_random_seed(self):
1762 """Update random number seed with the value from the run_card. 1763 If this is 0, update the number according to a fresh one""" 1764 iseed = self.run_card['iseed'] 1765 if iseed == 0: 1766 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1767 iseed = int(randinit.read()[2:]) + 1 1768 randinit.close() 1769 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1770 randinit.write('r=%d' % iseed) 1771 randinit.close()
1772 1773
1774 - def run(self, mode, options):
1775 """runs aMC@NLO. Returns the name of the event file created""" 1776 logger.info('Starting run') 1777 1778 if not 'only_generation' in options.keys(): 1779 options['only_generation'] = False 1780 1781 # for second step in applgrid mode, do only the event generation step 1782 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1783 options['only_generation'] = True 1784 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1785 self.setup_cluster_or_multicore() 1786 self.update_random_seed() 1787 #find and keep track of all the jobs 1788 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1789 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1790 folder_names['noshower'] = folder_names['aMC@NLO'] 1791 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1792 p_dirs = [d for d in \ 1793 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1794 #Clean previous results 1795 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1796 1797 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1798 1799 1800 if options['reweightonly']: 1801 event_norm=self.run_card['event_norm'] 1802 nevents=self.run_card['nevents'] 1803 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1804 1805 if mode in ['LO', 'NLO']: 1806 # this is for fixed order runs 1807 mode_dict = {'NLO': 'all', 'LO': 'born'} 1808 logger.info('Doing fixed order %s' % mode) 1809 req_acc = self.run_card['req_acc_FO'] 1810 1811 # Re-distribute the grids for the 2nd step of the applgrid 1812 # running 1813 if self.run_card['iappl'] == 2: 1814 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1815 1816 # create a list of dictionaries "jobs_to_run" with all the 1817 # jobs that need to be run 1818 integration_step=-1 1819 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1820 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1821 self.prepare_directories(jobs_to_run,mode) 1822 1823 # loop over the integration steps. After every step, check 1824 # if we have the required accuracy. If this is the case, 1825 # stop running, else do another step. 1826 while True: 1827 integration_step=integration_step+1 1828 self.run_all_jobs(jobs_to_run,integration_step) 1829 self.collect_log_files(jobs_to_run,integration_step) 1830 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1831 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1832 if not jobs_to_run: 1833 # there are no more jobs to run (jobs_to_run is empty) 1834 break 1835 # We are done. 1836 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1837 self.update_status('Run complete', level='parton', update_results=True) 1838 return 1839 1840 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1841 if self.ninitial == 1: 1842 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1843 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1844 'noshower': 'all', 'noshowerLO': 'born'} 1845 shower = self.run_card['parton_shower'].upper() 1846 nevents = self.run_card['nevents'] 1847 req_acc = self.run_card['req_acc'] 1848 if nevents == 0 and req_acc < 0 : 1849 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1850 'of events, because 0 events requested. Please set '\ 1851 'the "req_acc" parameter in the run_card to a value '\ 1852 'between 0 and 1') 1853 elif req_acc >1 or req_acc == 0 : 1854 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1855 'be between larger than 0 and smaller than 1, '\ 1856 'or set to -1 for automatic determination. Current '\ 1857 'value is %f' % req_acc) 1858 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1859 elif req_acc < 0 and nevents > 1000000 : 1860 req_acc=0.001 1861 1862 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1863 1864 if not shower in shower_list: 1865 raise aMCatNLOError('%s is not a valid parton shower. '\ 1866 'Please use one of the following: %s' \ 1867 % (shower, ', '.join(shower_list))) 1868 1869 # check that PYTHIA6PT is not used for processes with FSR 1870 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1871 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1872 1873 if mode in ['aMC@NLO', 'aMC@LO']: 1874 logger.info('Doing %s matched to parton shower' % mode[4:]) 1875 elif mode in ['noshower','noshowerLO']: 1876 logger.info('Generating events without running the shower.') 1877 elif options['only_generation']: 1878 logger.info('Generating events starting from existing results') 1879 1880 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1881 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1882 # Make sure to update all the jobs to be ready for the event generation step 1883 if options['only_generation']: 1884 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1885 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1886 else: 1887 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1888 1889 1890 # Main loop over the three MINT generation steps: 1891 for mint_step, status in enumerate(mcatnlo_status): 1892 if options['only_generation'] and mint_step < 2: 1893 continue 1894 self.update_status(status, level='parton') 1895 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1896 self.collect_log_files(jobs_to_run,mint_step) 1897 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1898 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1899 if mint_step+1==2 and nevents==0: 1900 self.print_summary(options,2,mode) 1901 return 1902 1903 # Sanity check on the event files. If error the jobs are resubmitted 1904 self.check_event_files(jobs_to_collect) 1905 1906 if self.cluster_mode == 1: 1907 #if cluster run, wait 10 sec so that event files are transferred back 1908 self.update_status( 1909 'Waiting while files are transferred back from the cluster nodes', 1910 level='parton') 1911 time.sleep(10) 1912 1913 event_norm=self.run_card['event_norm'] 1914 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1915
1916 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1917 integration_step,mode,fixed_order=True):
1918 """Creates a list of dictionaries with all the jobs to be run""" 1919 jobs_to_run=[] 1920 if not options['only_generation']: 1921 # Fresh, new run. Check all the P*/channels.txt files 1922 # (created by the 'gensym' executable) to set-up all the 1923 # jobs using the default inputs. 1924 npoints = self.run_card['npoints_FO_grid'] 1925 niters = self.run_card['niters_FO_grid'] 1926 for p_dir in p_dirs: 1927 try: 1928 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1929 channels=chan_file.readline().split() 1930 except IOError: 1931 logger.warning('No integration channels found for contribution %s' % p_dir) 1932 continue 1933 if fixed_order: 1934 lch=len(channels) 1935 maxchannels=20 # combine up to 20 channels in a single job 1936 if self.run_card['iappl'] != 0: maxchannels=1 1937 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1938 else int(lch/maxchannels)) 1939 for nj in range(1,njobs+1): 1940 job={} 1941 job['p_dir']=p_dir 1942 job['channel']=str(nj) 1943 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1944 job['configs']=' '.join(channels[:job['nchans']]) 1945 del channels[:job['nchans']] 1946 job['split']=0 1947 if req_acc == -1: 1948 job['accuracy']=0 1949 job['niters']=niters 1950 job['npoints']=npoints 1951 elif req_acc > 0: 1952 job['accuracy']=0.05 1953 job['niters']=6 1954 job['npoints']=-1 1955 else: 1956 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1957 'between 0 and 1 or set it equal to -1.') 1958 job['mint_mode']=0 1959 job['run_mode']=run_mode 1960 job['wgt_frac']=1.0 1961 job['wgt_mult']=1.0 1962 jobs_to_run.append(job) 1963 if channels: 1964 raise aMCatNLOError('channels is not empty %s' % channels) 1965 else: 1966 for channel in channels: 1967 job={} 1968 job['p_dir']=p_dir 1969 job['channel']=channel 1970 job['split']=0 1971 job['accuracy']=0.03 1972 job['niters']=12 1973 job['npoints']=-1 1974 job['mint_mode']=0 1975 job['run_mode']=run_mode 1976 job['wgt_frac']=1.0 1977 jobs_to_run.append(job) 1978 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1979 else: 1980 # if options['only_generation'] is true, just read the current jobs from file 1981 try: 1982 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 1983 jobs_to_collect=pickle.load(f) 1984 for job in jobs_to_collect: 1985 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 1986 jobs_to_run=copy.copy(jobs_to_collect) 1987 except: 1988 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 1989 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 1990 # Update cross sections and determine which jobs to run next 1991 if fixed_order: 1992 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1993 jobs_to_collect,integration_step,mode,run_mode) 1994 # Update the integration_step to make sure that nothing will be overwritten 1995 integration_step=1 1996 for job in jobs_to_run: 1997 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1998 integration_step=integration_step+1 1999 integration_step=integration_step-1 2000 else: 2001 self.append_the_results(jobs_to_collect,integration_step) 2002 return jobs_to_run,jobs_to_collect,integration_step
2003
2004 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
2005 """Set-up the G* directories for running""" 2006 name_suffix={'born' :'B' , 'all':'F'} 2007 for job in jobs_to_run: 2008 if job['split'] == 0: 2009 if fixed_order : 2010 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2011 job['run_mode']+'_G'+job['channel']) 2012 else: 2013 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2014 'G'+name_suffix[job['run_mode']]+job['channel']) 2015 else: 2016 if fixed_order : 2017 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2018 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 2019 else: 2020 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2021 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2022 job['dirname']=dirname 2023 if not os.path.isdir(dirname): 2024 os.makedirs(dirname) 2025 self.write_input_file(job,fixed_order) 2026 # link or copy the grids from the base directory to the split directory: 2027 if not fixed_order: 2028 if job['split'] != 0: 2029 for f in ['grid.MC_integer','mint_grids','res_1']: 2030 if not os.path.isfile(pjoin(job['dirname'],f)): 2031 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2032 else: 2033 if job['split'] != 0: 2034 for f in ['grid.MC_integer','mint_grids']: 2035 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2036 2037
2038 - def write_input_file(self,job,fixed_order):
2039 """write the input file for the madevent_mint* executable in the appropriate directory""" 2040 if fixed_order: 2041 content= \ 2042 """NPOINTS = %(npoints)s 2043 NITERATIONS = %(niters)s 2044 ACCURACY = %(accuracy)s 2045 ADAPT_GRID = 2 2046 MULTICHANNEL = 1 2047 SUM_HELICITY = 1 2048 NCHANS = %(nchans)s 2049 CHANNEL = %(configs)s 2050 SPLIT = %(split)s 2051 WGT_MULT= %(wgt_mult)s 2052 RUN_MODE = %(run_mode)s 2053 RESTART = %(mint_mode)s 2054 """ \ 2055 % job 2056 else: 2057 content = \ 2058 """-1 12 ! points, iterations 2059 %(accuracy)s ! desired fractional accuracy 2060 1 -0.1 ! alpha, beta for Gsoft 2061 1 -0.1 ! alpha, beta for Gazi 2062 1 ! Suppress amplitude (0 no, 1 yes)? 2063 1 ! Exact helicity sum (0 yes, n = number/event)? 2064 %(channel)s ! Enter Configuration Number: 2065 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2066 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2067 %(run_mode)s ! all, born, real, virt 2068 """ \ 2069 % job 2070 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2071 input_file.write(content)
2072 2073
2074 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2075 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2076 if fixed_order: 2077 if integration_step == 0: 2078 self.update_status('Setting up grids', level=None) 2079 else: 2080 self.update_status('Refining results, step %i' % integration_step, level=None) 2081 self.ijob = 0 2082 name_suffix={'born' :'B', 'all':'F'} 2083 if fixed_order: 2084 run_type="Fixed order integration step %s" % integration_step 2085 else: 2086 run_type="MINT step %s" % integration_step 2087 self.njobs=len(jobs_to_run) 2088 for job in jobs_to_run: 2089 executable='ajob1' 2090 if fixed_order: 2091 arguments=[job['channel'],job['run_mode'], \ 2092 str(job['split']),str(integration_step)] 2093 else: 2094 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2095 str(job['split']),str(integration_step)] 2096 self.run_exe(executable,arguments,run_type, 2097 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2098 2099 if self.cluster_mode == 2: 2100 time.sleep(1) # security to allow all jobs to be launched 2101 self.wait_for_complete(run_type)
2102 2103
2104 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2105 integration_step,mode,run_mode,fixed_order=True):
2106 """Collect the results, make HTML pages, print the summary and 2107 determine if there are more jobs to run. Returns the list 2108 of the jobs that still need to be run, as well as the 2109 complete list of jobs that need to be collected to get the 2110 final answer. 2111 """ 2112 # Get the results of the current integration/MINT step 2113 self.append_the_results(jobs_to_run,integration_step) 2114 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2115 # Update HTML pages 2116 if fixed_order: 2117 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2118 jobs=jobs_to_collect) 2119 else: 2120 name_suffix={'born' :'B' , 'all':'F'} 2121 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2122 self.results.add_detail('cross', cross) 2123 self.results.add_detail('error', error) 2124 # Combine grids from split fixed order jobs 2125 if fixed_order: 2126 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2127 # Set-up jobs for the next iteration/MINT step 2128 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2129 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2130 if fixed_order: 2131 # Write the jobs_to_collect directory to file so that we 2132 # can restart them later (with only-generation option) 2133 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2134 pickle.dump(jobs_to_collect,f) 2135 # Print summary 2136 if (not jobs_to_run_new) and fixed_order: 2137 # print final summary of results (for fixed order) 2138 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2139 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2140 return jobs_to_run_new,jobs_to_collect 2141 elif jobs_to_run_new: 2142 # print intermediate summary of results 2143 scale_pdf_info=[] 2144 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2145 else: 2146 # When we are done for (N)LO+PS runs, do not print 2147 # anything yet. This will be done after the reweighting 2148 # and collection of the events 2149 scale_pdf_info=[] 2150 # Prepare for the next integration/MINT step 2151 if (not fixed_order) and integration_step+1 == 2 : 2152 # Write the jobs_to_collect directory to file so that we 2153 # can restart them later (with only-generation option) 2154 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2155 pickle.dump(jobs_to_collect,f) 2156 # next step is event generation (mint_step 2) 2157 jobs_to_run_new,jobs_to_collect_new= \ 2158 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2159 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2160 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2161 self.write_nevts_files(jobs_to_run_new) 2162 else: 2163 if fixed_order and self.run_card['iappl'] == 0 \ 2164 and self.run_card['req_acc_FO'] > 0: 2165 jobs_to_run_new,jobs_to_collect= \ 2166 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2167 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2168 jobs_to_collect_new=jobs_to_collect 2169 return jobs_to_run_new,jobs_to_collect_new
2170 2171
2172 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2173 """writes the nevents_unweighted file in the SubProcesses directory. 2174 We also need to write the jobs that will generate 0 events, 2175 because that makes sure that the cross section from those channels 2176 is taken into account in the event weights (by collect_events.f). 2177 """ 2178 content=[] 2179 for job in jobs: 2180 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2181 lhefile=pjoin(path,'events.lhe') 2182 content.append(' %s %d %9e %9e' % \ 2183 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2184 for job in jobs0events: 2185 if job['nevents']==0: 2186 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2187 lhefile=pjoin(path,'events.lhe') 2188 content.append(' %s %d %9e %9e' % \ 2189 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2190 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2191 f.write('\n'.join(content)+'\n')
2192
2193 - def write_nevts_files(self,jobs):
2194 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2195 for job in jobs: 2196 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2197 if self.run_card['event_norm'].lower()=='bias': 2198 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2199 else: 2200 f.write('%i\n' % job['nevents'])
2201
2202 - def combine_split_order_run(self,jobs_to_run):
2203 """Combines jobs and grids from split jobs that have been run""" 2204 # combine the jobs that need to be combined in job 2205 # groups. Simply combine the ones that have the same p_dir and 2206 # same channel. 2207 jobgroups_to_combine=[] 2208 jobs_to_run_new=[] 2209 for job in jobs_to_run: 2210 if job['split'] == 0: 2211 job['combined']=1 2212 jobs_to_run_new.append(job) # this jobs wasn't split 2213 elif job['split'] == 1: 2214 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2215 j['channel'] == job['channel'], jobs_to_run)) 2216 else: 2217 continue 2218 for job_group in jobgroups_to_combine: 2219 # Combine the grids (mint-grids & MC-integer grids) first 2220 self.combine_split_order_grids(job_group) 2221 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2222 return jobs_to_run_new
2223
2224 - def combine_split_order_jobs(self,job_group):
2225 """combine the jobs in job_group and return a single summed job""" 2226 # first copy one of the jobs in 'jobs' 2227 sum_job=copy.copy(job_group[0]) 2228 # update the information to have a 'non-split' job: 2229 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2230 sum_job['split']=0 2231 sum_job['wgt_mult']=1.0 2232 sum_job['combined']=len(job_group) 2233 # information to be summed: 2234 keys=['niters_done','npoints_done','niters','npoints',\ 2235 'result','resultABS','time_spend'] 2236 keys2=['error','errorABS'] 2237 # information to be summed in quadrature: 2238 for key in keys2: 2239 sum_job[key]=math.pow(sum_job[key],2) 2240 # Loop over the jobs and sum the information 2241 for i,job in enumerate(job_group): 2242 if i==0 : continue # skip the first 2243 for key in keys: 2244 sum_job[key]+=job[key] 2245 for key in keys2: 2246 sum_job[key]+=math.pow(job[key],2) 2247 for key in keys2: 2248 sum_job[key]=math.sqrt(sum_job[key]) 2249 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2250 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2251 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2252 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2253 return sum_job
2254 2255
2256 - def combine_split_order_grids(self,job_group):
2257 """Combines the mint_grids and MC-integer grids from the split order 2258 jobs (fixed order only). 2259 """ 2260 files_mint_grids=[] 2261 files_MC_integer=[] 2262 location=None 2263 for job in job_group: 2264 files_mint_grids.append(pjoin(job['dirname'],'mint_grids')) 2265 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer')) 2266 if not location: 2267 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2268 else: 2269 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2270 raise aMCatNLOError('Not all jobs have the same location. '\ 2271 +'Cannot combine them.') 2272 # Needed to average the grids (both xgrids, ave_virt and 2273 # MC_integer grids), but sum the cross section info. The 2274 # latter is only the only line that contains integers. 2275 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2276 linesoffiles=[] 2277 for f in fs: 2278 with open(f,'r+') as fi: 2279 linesoffiles.append(fi.readlines()) 2280 to_write=[] 2281 for rowgrp in zip(*linesoffiles): 2282 try: 2283 # check that last element on the line is an 2284 # integer (will raise ValueError if not the 2285 # case). If integer, this is the line that 2286 # contains information that needs to be 2287 # summed. All other lines can be averaged. 2288 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2289 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2290 floatgrps = zip(*floatsbyfile) 2291 special=[] 2292 for i,floatgrp in enumerate(floatgrps): 2293 if i==0: # sum X-sec 2294 special.append(sum(floatgrp)) 2295 elif i==1: # sum unc in quadrature 2296 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2297 elif i==2: # average number of PS per iteration 2298 special.append(int(sum(floatgrp)/len(floatgrp))) 2299 elif i==3: # sum the number of iterations 2300 special.append(int(sum(floatgrp))) 2301 elif i==4: # average the nhits_in_grids 2302 special.append(int(sum(floatgrp)/len(floatgrp))) 2303 else: 2304 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2305 'Cannot combine them.') 2306 to_write.append(" ".join(str(s) for s in special) + "\n") 2307 except ValueError: 2308 # just average all 2309 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2310 floatgrps = zip(*floatsbyfile) 2311 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2312 to_write.append(" ".join(str(a) for a in averages) + "\n") 2313 # write the data over the master location 2314 if j==0: 2315 with open(pjoin(location,'mint_grids'),'w') as f: 2316 f.writelines(to_write) 2317 elif j==1: 2318 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2319 f.writelines(to_write)
2320 2321
2322 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2323 """Looks in the jobs_to_run to see if there is the need to split the 2324 jobs, depending on the expected time they take. Updates 2325 jobs_to_run and jobs_to_collect to replace the split-job by 2326 its splits. 2327 """ 2328 # determine the number jobs we should have (this is per p_dir) 2329 if self.options['run_mode'] ==2: 2330 nb_submit = int(self.options['nb_core']) 2331 elif self.options['run_mode'] ==1: 2332 nb_submit = int(self.options['cluster_size']) 2333 else: 2334 nb_submit =1 2335 # total expected aggregated running time 2336 time_expected=0 2337 for job in jobs_to_run: 2338 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2339 (job['niters_done']*job['npoints_done']) 2340 # this means that we must expect the following per job (in 2341 # ideal conditions) 2342 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2343 jobs_to_run_new=[] 2344 jobs_to_collect_new=copy.copy(jobs_to_collect) 2345 for job in jobs_to_run: 2346 # remove current job from jobs_to_collect. Make sure 2347 # to remove all the split ones in case the original 2348 # job had been a split one (before it was re-combined) 2349 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2350 j['channel'] == job['channel'], jobs_to_collect_new): 2351 jobs_to_collect_new.remove(j) 2352 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2353 (job['niters_done']*job['npoints_done']) 2354 # if the time expected for this job is (much) larger than 2355 # the time spend in the previous iteration, and larger 2356 # than the expected time per job, split it 2357 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2358 # determine the number of splits needed 2359 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2360 for i in range(1,nsplit+1): 2361 job_new=copy.copy(job) 2362 job_new['split']=i 2363 job_new['wgt_mult']=1./float(nsplit) 2364 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2365 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2366 if nsplit >= job['niters']: 2367 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2368 job_new['niters']=1 2369 else: 2370 job_new['npoints']=int(job['npoints']/nsplit) 2371 jobs_to_collect_new.append(job_new) 2372 jobs_to_run_new.append(job_new) 2373 else: 2374 jobs_to_collect_new.append(job) 2375 jobs_to_run_new.append(job) 2376 return jobs_to_run_new,jobs_to_collect_new
2377 2378
2379 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2380 """Looks in the jobs_to_run to see if there is the need to split the 2381 event generation step. Updates jobs_to_run and 2382 jobs_to_collect to replace the split-job by its 2383 splits. Also removes jobs that do not need any events. 2384 """ 2385 nevt_job=self.run_card['nevt_job'] 2386 if nevt_job > 0: 2387 jobs_to_collect_new=copy.copy(jobs_to_collect) 2388 for job in jobs_to_run: 2389 nevents=job['nevents'] 2390 if nevents == 0: 2391 jobs_to_collect_new.remove(job) 2392 elif nevents > nevt_job: 2393 jobs_to_collect_new.remove(job) 2394 if nevents % nevt_job != 0 : 2395 nsplit=int(nevents/nevt_job)+1 2396 else: 2397 nsplit=int(nevents/nevt_job) 2398 for i in range(1,nsplit+1): 2399 job_new=copy.copy(job) 2400 left_over=nevents % nsplit 2401 if i <= left_over: 2402 job_new['nevents']=int(nevents/nsplit)+1 2403 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2404 else: 2405 job_new['nevents']=int(nevents/nsplit) 2406 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2407 job_new['split']=i 2408 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2409 jobs_to_collect_new.append(job_new) 2410 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2411 else: 2412 jobs_to_run_new=copy.copy(jobs_to_collect) 2413 for job in jobs_to_collect: 2414 if job['nevents'] == 0: 2415 jobs_to_run_new.remove(job) 2416 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2417 2418 return jobs_to_run_new,jobs_to_collect_new
2419 2420
2421 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2422 """ 2423 For (N)LO+PS: determines the number of events and/or the required 2424 accuracy per job. 2425 For fixed order: determines which jobs need higher precision and 2426 returns those with the newly requested precision. 2427 """ 2428 err=self.cross_sect_dict['errt'] 2429 tot=self.cross_sect_dict['xsect'] 2430 errABS=self.cross_sect_dict['erra'] 2431 totABS=self.cross_sect_dict['xseca'] 2432 jobs_new=[] 2433 if fixed_order: 2434 if req_acc == -1: 2435 if step+1 == 1: 2436 npoints = self.run_card['npoints_FO'] 2437 niters = self.run_card['niters_FO'] 2438 for job in jobs: 2439 job['mint_mode']=-1 2440 job['niters']=niters 2441 job['npoints']=npoints 2442 jobs_new.append(job) 2443 elif step+1 == 2: 2444 pass 2445 elif step+1 > 2: 2446 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2447 'for integration step %i' % step ) 2448 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2449 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2450 for job in jobs: 2451 job['mint_mode']=-1 2452 # Determine relative required accuracy on the ABS for this job 2453 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2454 # If already accurate enough, skip the job (except when doing the first 2455 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2456 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2457 and not (step==-1 and self.run_card['iappl'] == 2): 2458 continue 2459 # Update the number of PS points based on errorABS, ncall and accuracy 2460 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2461 (job['accuracy']*job['resultABS']),2) 2462 if itmax_fl <= 4.0 : 2463 job['niters']=max(int(round(itmax_fl)),2) 2464 job['npoints']=job['npoints_done']*2 2465 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2466 job['niters']=4 2467 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2468 else: 2469 if itmax_fl > 100.0 : itmax_fl=50.0 2470 job['niters']=int(round(math.sqrt(itmax_fl))) 2471 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2472 round(math.sqrt(itmax_fl))))*2 2473 # Add the job to the list of jobs that need to be run 2474 jobs_new.append(job) 2475 return jobs_new 2476 elif step+1 <= 2: 2477 nevents=self.run_card['nevents'] 2478 # Total required accuracy for the upper bounding envelope 2479 if req_acc<0: 2480 req_acc2_inv=nevents 2481 else: 2482 req_acc2_inv=1/(req_acc*req_acc) 2483 if step+1 == 1 or step+1 == 2 : 2484 # determine the req. accuracy for each of the jobs for Mint-step = 1 2485 for job in jobs: 2486 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2487 job['accuracy']=accuracy 2488 if step+1 == 2: 2489 # Randomly (based on the relative ABS Xsec of the job) determine the 2490 # number of events each job needs to generate for MINT-step = 2. 2491 r=self.get_randinit_seed() 2492 random.seed(r) 2493 totevts=nevents 2494 for job in jobs: 2495 job['nevents'] = 0 2496 while totevts : 2497 target = random.random() * totABS 2498 crosssum = 0. 2499 i = 0 2500 while i<len(jobs) and crosssum < target: 2501 job = jobs[i] 2502 crosssum += job['resultABS'] 2503 i += 1 2504 totevts -= 1 2505 i -= 1 2506 jobs[i]['nevents'] += 1 2507 for job in jobs: 2508 job['mint_mode']=step+1 # next step 2509 return jobs 2510 else: 2511 return []
2512 2513
2514 - def get_randinit_seed(self):
2515 """ Get the random number seed from the randinit file """ 2516 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2517 # format of the file is "r=%d". 2518 iseed = int(randinit.read()[2:]) 2519 return iseed
2520 2521
2522 - def append_the_results(self,jobs,integration_step):
2523 """Appends the results for each of the jobs in the job list""" 2524 error_found=False 2525 for job in jobs: 2526 try: 2527 if integration_step >= 0 : 2528 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2529 results=res_file.readline().split() 2530 else: 2531 # should only be here when doing fixed order with the 'only_generation' 2532 # option equal to True. Take the results from the final run done. 2533 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2534 results=res_file.readline().split() 2535 except IOError: 2536 if not error_found: 2537 error_found=True 2538 error_log=[] 2539 error_log.append(pjoin(job['dirname'],'log.txt')) 2540 continue 2541 job['resultABS']=float(results[0]) 2542 job['errorABS']=float(results[1]) 2543 job['result']=float(results[2]) 2544 job['error']=float(results[3]) 2545 job['niters_done']=int(results[4]) 2546 job['npoints_done']=int(results[5]) 2547 job['time_spend']=float(results[6]) 2548 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2549 job['err_perc'] = job['error']/job['result']*100. 2550 if error_found: 2551 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2552 'Please check the .log files inside the directories which failed:\n' + 2553 '\n'.join(error_log)+'\n')
2554 2555 2556
2557 - def write_res_txt_file(self,jobs,integration_step):
2558 """writes the res.txt files in the SubProcess dir""" 2559 jobs.sort(key = lambda job: -job['errorABS']) 2560 content=[] 2561 content.append('\n\nCross section per integration channel:') 2562 for job in jobs: 2563 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2564 content.append('\n\nABS cross section per integration channel:') 2565 for job in jobs: 2566 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2567 totABS=0 2568 errABS=0 2569 tot=0 2570 err=0 2571 for job in jobs: 2572 totABS+= job['resultABS']*job['wgt_frac'] 2573 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2574 tot+= job['result']*job['wgt_frac'] 2575 err+= math.pow(job['error'],2)*job['wgt_frac'] 2576 if jobs: 2577 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2578 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2579 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2580 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2581 res_file.write('\n'.join(content)) 2582 randinit=self.get_randinit_seed() 2583 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2584 'erra':math.sqrt(errABS),'randinit':randinit}
2585 2586
2587 - def collect_scale_pdf_info(self,options,jobs):
2588 """read the scale_pdf_dependence.dat files and collects there results""" 2589 scale_pdf_info=[] 2590 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2591 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2592 evt_files=[] 2593 evt_wghts=[] 2594 for job in jobs: 2595 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2596 evt_wghts.append(job['wgt_frac']) 2597 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2598 return scale_pdf_info
2599 2600
2601 - def combine_plots_FO(self,folder_name,jobs):
2602 """combines the plots and puts then in the Events/run* directory""" 2603 devnull = open(os.devnull, 'w') 2604 2605 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2606 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 2607 stdout=devnull, 2608 cwd=pjoin(self.me_dir, 'SubProcesses')) 2609 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2610 pjoin(self.me_dir, 'Events', self.run_name)) 2611 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2612 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2613 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2614 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2615 self.combine_plots_HwU(jobs,out) 2616 try: 2617 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2618 stdout=devnull,stderr=devnull,\ 2619 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2620 except Exception: 2621 pass 2622 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2623 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2624 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2625 rootfiles = [] 2626 for job in jobs: 2627 if job['dirname'].endswith('.root'): 2628 rootfiles.append(job['dirname']) 2629 else: 2630 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root')) 2631 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \ 2632 stdout=devnull, 2633 cwd=pjoin(self.me_dir, 'SubProcesses')) 2634 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2635 pjoin(self.me_dir, 'Events', self.run_name)) 2636 logger.info('The results of this run and the ROOT file with the plots' + \ 2637 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2638 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2639 self.combine_FO_lhe(jobs) 2640 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2641 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2642 else: 2643 logger.info('The results of this run' + \ 2644 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2645
2646 - def combine_FO_lhe(self,jobs):
2647 """combine the various lhe file generated in each directory. 2648 They are two steps: 2649 1) banner 2650 2) reweight each sample by the factor written at the end of each file 2651 3) concatenate each of the new files (gzip those). 2652 """ 2653 2654 logger.info('Combining lhe events for plotting analysis') 2655 start = time.time() 2656 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2657 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2658 if os.path.exists(output): 2659 os.remove(output) 2660 2661 2662 2663 2664 # 1. write the banner 2665 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2666 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2667 self.banner['initrwgt'] = text[10+i1:i2] 2668 # 2669 # <init> 2670 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2671 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2672 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2673 # </init> 2674 2675 cross = sum(j['result'] for j in jobs) 2676 error = math.sqrt(sum(j['error'] for j in jobs)) 2677 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2678 self.banner.write(output[:-3], close_tag=False) 2679 misc.gzip(output[:-3]) 2680 2681 2682 2683 fsock = lhe_parser.EventFile(output,'a') 2684 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2685 fsock.eventgroup = False 2686 else: 2687 fsock.eventgroup = True 2688 2689 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2690 for job in jobs: 2691 dirname = job['dirname'] 2692 #read last line 2693 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2694 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2695 # get normalisation ratio 2696 ratio = cross/sumwgt 2697 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2698 lhe.eventgroup = True # read the events by eventgroup 2699 for eventsgroup in lhe: 2700 neweventsgroup = [] 2701 for i,event in enumerate(eventsgroup): 2702 event.rescale_weights(ratio) 2703 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2704 and event == neweventsgroup[-1]: 2705 neweventsgroup[-1].wgt += event.wgt 2706 for key in event.reweight_data: 2707 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2708 else: 2709 neweventsgroup.append(event) 2710 fsock.write_events(neweventsgroup) 2711 lhe.close() 2712 os.remove(pjoin(dirname,'events.lhe')) 2713 else: 2714 lhe = [] 2715 lenlhe = [] 2716 misc.sprint('need to combine %s event file' % len(jobs)) 2717 globallhe = lhe_parser.MultiEventFile() 2718 globallhe.eventgroup = True 2719 for job in jobs: 2720 dirname = job['dirname'] 2721 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2722 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2723 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2724 nb_event=int(nb_event), scale=cross/sumwgt) 2725 for eventsgroup in globallhe: 2726 neweventsgroup = [] 2727 for i,event in enumerate(eventsgroup): 2728 event.rescale_weights(event.sample_scale) 2729 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2730 and event == neweventsgroup[-1]: 2731 neweventsgroup[-1].wgt += event.wgt 2732 for key in event.reweight_data: 2733 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2734 else: 2735 neweventsgroup.append(event) 2736 fsock.write_events(neweventsgroup) 2737 globallhe.close() 2738 fsock.write('</LesHouchesEvents>\n') 2739 fsock.close() 2740 misc.sprint('combining lhe file done in ', time.time()-start) 2741 for job in jobs: 2742 dirname = job['dirname'] 2743 os.remove(pjoin(dirname,'events.lhe')) 2744 2745 2746 2747 misc.sprint('combining lhe file done in ', time.time()-start)
2748 2749 2750 2751 2752 2753
2754 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2755 """Sums all the plots in the HwU format.""" 2756 logger.debug('Combining HwU plots.') 2757 2758 command = [] 2759 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2760 for job in jobs: 2761 if job['dirname'].endswith('.HwU'): 2762 command.append(job['dirname']) 2763 else: 2764 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2765 command.append("--out="+out) 2766 command.append("--gnuplot") 2767 command.append("--band=[]") 2768 command.append("--lhapdf-config="+self.options['lhapdf']) 2769 if normalisation: 2770 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2771 command.append("--sum") 2772 command.append("--keep_all_weights") 2773 command.append("--no_open") 2774 2775 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2776 2777 while p.poll() is None: 2778 line = p.stdout.readline() 2779 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2780 print line[:-1] 2781 elif __debug__ and line: 2782 logger.debug(line[:-1])
2783 2784
2785 - def applgrid_combine(self,cross,error,jobs):
2786 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2787 logger.debug('Combining APPLgrids \n') 2788 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2789 'applgrid-combine') 2790 all_jobs=[] 2791 for job in jobs: 2792 all_jobs.append(job['dirname']) 2793 ngrids=len(all_jobs) 2794 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2795 for obs in range(0,nobs): 2796 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2797 # combine APPLgrids from different channels for observable 'obs' 2798 if self.run_card["iappl"] == 1: 2799 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2800 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2801 elif self.run_card["iappl"] == 2: 2802 unc2_inv=pow(cross/error,2) 2803 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2804 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2805 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2806 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2807 for job in all_jobs: 2808 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2809 else: 2810 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2811 # after combining, delete the original grids 2812 for ggdir in gdir: 2813 os.remove(ggdir)
2814 2815
2816 - def applgrid_distribute(self,options,mode,p_dirs):
2817 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2818 # if no appl_start_grid argument given, guess it from the time stamps 2819 # of the starting grid files 2820 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2821 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2822 pjoin(self.me_dir,'Events')) 2823 2824 time_stamps={} 2825 for root_file in gfiles: 2826 time_stamps[root_file]=os.path.getmtime(root_file) 2827 options['appl_start_grid']= \ 2828 max(time_stamps.iterkeys(), key=(lambda key: 2829 time_stamps[key])).split('/')[-2] 2830 logger.info('No --appl_start_grid option given. '+\ 2831 'Guessing that start grid from run "%s" should be used.' \ 2832 % options['appl_start_grid']) 2833 2834 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2835 self.appl_start_grid = options['appl_start_grid'] 2836 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2837 # check that this dir exists and at least one grid file is there 2838 if not os.path.exists(pjoin(start_grid_dir, 2839 'aMCfast_obs_0_starting_grid.root')): 2840 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2841 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2842 else: 2843 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2844 start_grid_dir) if name.endswith("_starting_grid.root")] 2845 nobs =len(all_grids) 2846 gstring=" ".join(all_grids) 2847 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2848 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2849 'Please provide this information.') 2850 #copy the grid to all relevant directories 2851 for pdir in p_dirs: 2852 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2853 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2854 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2855 for g_dir in g_dirs: 2856 for grid in all_grids: 2857 obs=grid.split('_')[-3] 2858 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2859 'grid_obs_'+obs+'_in.root'))
2860 2861 2862 2863
2864 - def collect_log_files(self, jobs, integration_step):
2865 """collect the log files and put them in a single, html-friendly file 2866 inside the Events/run_.../ directory""" 2867 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2868 'alllogs_%d.html' % integration_step) 2869 outfile = open(log_file, 'w') 2870 2871 content = '' 2872 content += '<HTML><BODY>\n<font face="courier" size=2>' 2873 for job in jobs: 2874 # put an anchor 2875 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2876 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2877 pjoin(self.me_dir,'SubProcesses'),'')) 2878 # and put some nice header 2879 content += '<font color="red">\n' 2880 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2881 (os.path.dirname(log).replace(pjoin(self.me_dir, 2882 'SubProcesses'), ''), 2883 integration_step) 2884 content += '</font>\n' 2885 #then just flush the content of the small log inside the big log 2886 #the PRE tag prints everything verbatim 2887 with open(log) as l: 2888 content += '<PRE>\n' + l.read() + '\n</PRE>' 2889 content +='<br>\n' 2890 outfile.write(content) 2891 content='' 2892 2893 outfile.write('</font>\n</BODY></HTML>\n') 2894 outfile.close()
2895 2896
2897 - def finalise_run_FO(self,folder_name,jobs):
2898 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2899 # Copy the res_*.txt files to the Events/run* folder 2900 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2901 for res_file in res_files: 2902 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2903 # Collect the plots and put them in the Events/run* folder 2904 self.combine_plots_FO(folder_name,jobs) 2905 # If doing the applgrid-stuff, also combine those grids 2906 # and put those in the Events/run* folder 2907 if self.run_card['iappl'] != 0: 2908 cross=self.cross_sect_dict['xsect'] 2909 error=self.cross_sect_dict['errt'] 2910 self.applgrid_combine(cross,error,jobs)
2911 2912
2913 - def setup_cluster_or_multicore(self):
2914 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2915 if self.cluster_mode == 1: 2916 cluster_name = self.options['cluster_type'] 2917 try: 2918 self.cluster = cluster.from_name[cluster_name](**self.options) 2919 except KeyError: 2920 # Check if a plugin define this type of cluster 2921 # check for PLUGIN format 2922 cluster_class = misc.from_plugin_import(self.plugin_path, 2923 'new_cluster', cluster_name, 2924 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2925 if cluster_class: 2926 self.cluster = cluster_class(**self.options) 2927 2928 if self.cluster_mode == 2: 2929 try: 2930 import multiprocessing 2931 if not self.nb_core: 2932 try: 2933 self.nb_core = int(self.options['nb_core']) 2934 except TypeError: 2935 self.nb_core = multiprocessing.cpu_count() 2936 logger.info('Using %d cores' % self.nb_core) 2937 except ImportError: 2938 self.nb_core = 1 2939 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2940 'Use set nb_core X in order to set this number and be able to'+ 2941 'run in multicore.') 2942 2943 self.cluster = cluster.MultiCore(**self.options)
2944 2945
2946 - def clean_previous_results(self,options,p_dirs,folder_name):
2947 """Clean previous results. 2948 o. If doing only the reweighting step, do not delete anything and return directlty. 2949 o. Always remove all the G*_* files (from split event generation). 2950 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2951 if options['reweightonly']: 2952 return 2953 if not options['only_generation']: 2954 self.update_status('Cleaning previous results', level=None) 2955 for dir in p_dirs: 2956 #find old folders to be removed 2957 for obj in folder_name: 2958 # list all the G* (or all_G* or born_G*) directories 2959 to_rm = [file for file in \ 2960 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2961 if file.startswith(obj[:-1]) and \ 2962 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2963 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2964 # list all the G*_* directories (from split event generation) 2965 to_always_rm = [file for file in \ 2966 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2967 if file.startswith(obj[:-1]) and 2968 '_' in file and not '_G' in file and \ 2969 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2970 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2971 2972 if not options['only_generation']: 2973 to_always_rm.extend(to_rm) 2974 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2975 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2976 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2977 return
2978 2979
2980 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2981 """print a summary of the results contained in self.cross_sect_dict. 2982 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2983 some additional infos are printed""" 2984 # find process name 2985 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2986 process = '' 2987 for line in proc_card_lines: 2988 if line.startswith('generate') or line.startswith('add process'): 2989 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2990 lpp = {0:'l', 1:'p', -1:'pbar'} 2991 if self.ninitial == 1: 2992 proc_info = '\n Process %s' % process[:-3] 2993 else: 2994 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2995 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2996 self.run_card['ebeam1'], self.run_card['ebeam2']) 2997 2998 if self.ninitial == 1: 2999 self.cross_sect_dict['unit']='GeV' 3000 self.cross_sect_dict['xsec_string']='(Partial) decay width' 3001 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 3002 else: 3003 self.cross_sect_dict['unit']='pb' 3004 self.cross_sect_dict['xsec_string']='Total cross section' 3005 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 3006 if self.run_card['event_norm'].lower()=='bias': 3007 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 3008 3009 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3010 status = ['Determining the number of unweighted events per channel', 3011 'Updating the number of unweighted events per channel', 3012 'Summary:'] 3013 computed='(computed from LHE events)' 3014 elif mode in ['NLO', 'LO']: 3015 status = ['Results after grid setup:','Current results:', 3016 'Final results and run summary:'] 3017 computed='(computed from histogram information)' 3018 3019 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3020 message = status[step] + '\n\n Intermediate results:' + \ 3021 ('\n Random seed: %(randinit)d' + \ 3022 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 3023 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 3024 % self.cross_sect_dict 3025 elif mode in ['NLO','LO'] and not done: 3026 if step == 0: 3027 message = '\n ' + status[0] + \ 3028 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3029 self.cross_sect_dict 3030 else: 3031 message = '\n ' + status[1] + \ 3032 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3033 self.cross_sect_dict 3034 3035 else: 3036 message = '\n --------------------------------------------------------------' 3037 message = message + \ 3038 '\n ' + status[2] + proc_info 3039 if mode not in ['LO', 'NLO']: 3040 message = message + \ 3041 '\n Number of events generated: %s' % self.run_card['nevents'] 3042 message = message + \ 3043 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3044 self.cross_sect_dict 3045 message = message + \ 3046 '\n --------------------------------------------------------------' 3047 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3048 if scale_pdf_info[0]: 3049 # scale uncertainties 3050 message = message + '\n Scale variation %s:' % computed 3051 for s in scale_pdf_info[0]: 3052 if s['unc']: 3053 if self.run_card['ickkw'] != -1: 3054 message = message + \ 3055 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3056 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3057 else: 3058 message = message + \ 3059 ('\n Soft and hard scale dependence (added in quadrature): '\ 3060 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3061 3062 else: 3063 message = message + \ 3064 ('\n Dynamical_scale_choice %(label)i: '\ 3065 '\n %(cen)8.3e pb') % s 3066 3067 if scale_pdf_info[1]: 3068 message = message + '\n PDF variation %s:' % computed 3069 for p in scale_pdf_info[1]: 3070 if p['unc']=='none': 3071 message = message + \ 3072 ('\n %(name)s (central value only): '\ 3073 '\n %(cen)8.3e pb') % p 3074 3075 elif p['unc']=='unknown': 3076 message = message + \ 3077 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3078 '\n %(cen)8.3e pb') % p 3079 else: 3080 message = message + \ 3081 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3082 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3083 # pdf uncertainties 3084 message = message + \ 3085 '\n --------------------------------------------------------------' 3086 3087 3088 if (mode in ['NLO', 'LO'] and not done) or \ 3089 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3090 logger.info(message+'\n') 3091 return 3092 3093 # Some advanced general statistics are shown in the debug message at the 3094 # end of the run 3095 # Make sure it never stops a run 3096 # Gather some basic statistics for the run and extracted from the log files. 3097 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3098 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3099 pjoin(self.me_dir, 'SubProcesses')) 3100 all_log_files = log_GV_files 3101 elif mode == 'NLO': 3102 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3103 pjoin(self.me_dir, 'SubProcesses')) 3104 all_log_files = log_GV_files 3105 3106 elif mode == 'LO': 3107 log_GV_files = '' 3108 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3109 pjoin(self.me_dir, 'SubProcesses')) 3110 else: 3111 raise aMCatNLOError, 'Running mode %s not supported.'%mode 3112 3113 try: 3114 message, debug_msg = \ 3115 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3116 except Exception as e: 3117 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3118 err_string = StringIO.StringIO() 3119 traceback.print_exc(limit=4, file=err_string) 3120 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3121 %err_string.getvalue() 3122 3123 logger.debug(debug_msg+'\n') 3124 logger.info(message+'\n') 3125 3126 # Now copy relevant information in the Events/Run_<xxx> directory 3127 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3128 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3129 open(pjoin(evt_path, '.full_summary.txt'), 3130 'w').write(message+'\n\n'+debug_msg+'\n') 3131 3132 self.archive_files(evt_path,mode)
3133
3134 - def archive_files(self, evt_path, mode):
3135 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3136 the run.""" 3137 3138 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3139 pjoin('Cards','MadLoopParams.dat'), 3140 pjoin('Cards','FKS_params.dat'), 3141 pjoin('Cards','run_card.dat'), 3142 pjoin('Subprocesses','setscales.f'), 3143 pjoin('Subprocesses','cuts.f')] 3144 3145 if mode in ['NLO', 'LO']: 3146 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3147 3148 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3149 os.mkdir(pjoin(evt_path,'RunMaterial')) 3150 3151 for path in files_to_arxiv: 3152 if os.path.isfile(pjoin(self.me_dir,path)): 3153 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3154 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3155 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3156
3157 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3158 """ This functions goes through the log files given in arguments and 3159 compiles statistics about MadLoop stability, virtual integration 3160 optimization and detection of potential error messages into a nice 3161 debug message to printed at the end of the run """ 3162 3163 def safe_float(str_float): 3164 try: 3165 return float(str_float) 3166 except ValueError: 3167 logger.debug('Could not convert the following float during'+ 3168 ' advanced statistics printout: %s'%str(str_float)) 3169 return -1.0
3170 3171 3172 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3173 # > Errors is a list of tuples with this format (log_file,nErrors) 3174 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3175 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3176 3177 # ================================== 3178 # == MadLoop stability statistics == 3179 # ================================== 3180 3181 # Recuperate the fraction of unstable PS points found in the runs for 3182 # the virtuals 3183 UPS_stat_finder = re.compile( 3184 r"Satistics from MadLoop:.*"+\ 3185 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3186 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3187 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3188 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3189 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3190 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3191 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3192 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3193 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3194 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3195 3196 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3197 1 : 'CutTools (double precision)', 3198 2 : 'PJFry++', 3199 3 : 'IREGI', 3200 4 : 'Golem95', 3201 5 : 'Samurai', 3202 6 : 'Ninja (double precision)', 3203 7 : 'COLLIER', 3204 8 : 'Ninja (quadruple precision)', 3205 9 : 'CutTools (quadruple precision)'} 3206 RetUnit_finder =re.compile( 3207 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3208 #Unit 3209 3210 for gv_log in log_GV_files: 3211 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3212 log=open(gv_log,'r').read() 3213 UPS_stats = re.search(UPS_stat_finder,log) 3214 for retunit_stats in re.finditer(RetUnit_finder, log): 3215 if channel_name not in stats['UPS'].keys(): 3216 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3217 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3218 += int(retunit_stats.group('n_occurences')) 3219 if not UPS_stats is None: 3220 try: 3221 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3222 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3223 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3224 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3225 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3226 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3227 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3228 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3229 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3230 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3231 except KeyError: 3232 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3233 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3234 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3235 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3236 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3237 int(UPS_stats.group('n10')),[0]*10] 3238 debug_msg = "" 3239 if len(stats['UPS'].keys())>0: 3240 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3241 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3242 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3243 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3244 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3245 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3246 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3247 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3248 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3249 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3250 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3251 for i in range(10)] 3252 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3253 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3254 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3255 3256 tmpStr = "" 3257 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3258 tmpStr += '\n Stability unknown: %d'%nTotsun 3259 tmpStr += '\n Stable PS point: %d'%nTotsps 3260 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3261 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3262 tmpStr += '\n Only double precision used: %d'%nTotddp 3263 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3264 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3265 tmpStr += '\n Reduction methods used:' 3266 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3267 unit_code_meaning.keys() if nTot1[i]>0] 3268 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3269 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3270 if nTot100 != 0: 3271 debug_msg += '\n Unknown return code (100): %d'%nTot100 3272 if nTot10 != 0: 3273 debug_msg += '\n Unknown return code (10): %d'%nTot10 3274 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3275 not in unit_code_meaning.keys()) 3276 if nUnknownUnit != 0: 3277 debug_msg += '\n Unknown return code (1): %d'\ 3278 %nUnknownUnit 3279 3280 if maxUPS[1]>0.001: 3281 message += tmpStr 3282 message += '\n Total number of unstable PS point detected:'+\ 3283 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3284 message += '\n Maximum fraction of UPS points in '+\ 3285 'channel %s (%4.2f%%)'%maxUPS 3286 message += '\n Please report this to the authors while '+\ 3287 'providing the file' 3288 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3289 maxUPS[0],'UPS.log')) 3290 else: 3291 debug_msg += tmpStr 3292 3293 3294 # ==================================================== 3295 # == aMC@NLO virtual integration optimization stats == 3296 # ==================================================== 3297 3298 virt_tricks_finder = re.compile( 3299 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3300 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3301 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3302 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3303 3304 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3305 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3306 3307 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3308 3309 channel_contr_list = {} 3310 for gv_log in log_GV_files: 3311 logfile=open(gv_log,'r') 3312 log = logfile.read() 3313 logfile.close() 3314 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3315 vf_stats = None 3316 for vf_stats in re.finditer(virt_frac_finder, log): 3317 pass 3318 if not vf_stats is None: 3319 v_frac = safe_float(vf_stats.group('v_frac')) 3320 v_average = safe_float(vf_stats.group('v_average')) 3321 try: 3322 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3323 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3324 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3325 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3326 stats['virt_stats']['v_frac_avg'][0] += v_frac 3327 stats['virt_stats']['v_frac_avg'][1] += 1 3328 except KeyError: 3329 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3330 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3331 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3332 3333 3334 ccontr_stats = None 3335 for ccontr_stats in re.finditer(channel_contr_finder, log): 3336 pass 3337 if not ccontr_stats is None: 3338 contrib = safe_float(ccontr_stats.group('v_contr')) 3339 try: 3340 if contrib>channel_contr_list[channel_name]: 3341 channel_contr_list[channel_name]=contrib 3342 except KeyError: 3343 channel_contr_list[channel_name]=contrib 3344 3345 3346 # Now build the list of relevant virt log files to look for the maxima 3347 # of virt fractions and such. 3348 average_contrib = 0.0 3349 for value in channel_contr_list.values(): 3350 average_contrib += value 3351 if len(channel_contr_list.values()) !=0: 3352 average_contrib = average_contrib / len(channel_contr_list.values()) 3353 3354 relevant_log_GV_files = [] 3355 excluded_channels = set([]) 3356 all_channels = set([]) 3357 for log_file in log_GV_files: 3358 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3359 all_channels.add(channel_name) 3360 try: 3361 if channel_contr_list[channel_name] > (0.1*average_contrib): 3362 relevant_log_GV_files.append(log_file) 3363 else: 3364 excluded_channels.add(channel_name) 3365 except KeyError: 3366 relevant_log_GV_files.append(log_file) 3367 3368 # Now we want to use the latest occurence of accumulated result in the log file 3369 for gv_log in relevant_log_GV_files: 3370 logfile=open(gv_log,'r') 3371 log = logfile.read() 3372 logfile.close() 3373 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3374 3375 vt_stats = None 3376 for vt_stats in re.finditer(virt_tricks_finder, log): 3377 pass 3378 if not vt_stats is None: 3379 vt_stats_group = vt_stats.groupdict() 3380 v_ratio = safe_float(vt_stats.group('v_ratio')) 3381 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3382 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3383 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3384 try: 3385 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3386 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3387 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3388 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3389 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3390 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3391 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3392 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3393 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3394 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3395 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3396 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3397 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3398 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3399 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3400 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3401 except KeyError: 3402 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3403 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3404 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3405 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3406 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3407 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3408 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3409 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3410 3411 vf_stats = None 3412 for vf_stats in re.finditer(virt_frac_finder, log): 3413 pass 3414 if not vf_stats is None: 3415 v_frac = safe_float(vf_stats.group('v_frac')) 3416 v_average = safe_float(vf_stats.group('v_average')) 3417 try: 3418 if v_average < stats['virt_stats']['v_average_min'][0]: 3419 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3420 if v_average > stats['virt_stats']['v_average_max'][0]: 3421 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3422 stats['virt_stats']['v_average_avg'][0] += v_average 3423 stats['virt_stats']['v_average_avg'][1] += 1 3424 except KeyError: 3425 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3426 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3427 stats['virt_stats']['v_average_avg']=[v_average,1] 3428 3429 try: 3430 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3431 3432 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3433 %tuple(stats['virt_stats']['v_frac_max']) 3434 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3435 %tuple(stats['virt_stats']['v_frac_min']) 3436 debug_msg += '\n Average virt fraction computed %.3f'\ 3437 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3438 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3439 (len(excluded_channels),len(all_channels)) 3440 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3441 %tuple(stats['virt_stats']['v_average_max']) 3442 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3443 %tuple(stats['virt_stats']['v_ratio_max']) 3444 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3445 %tuple(stats['virt_stats']['v_ratio_err_max']) 3446 debug_msg += tmpStr 3447 # After all it was decided that it is better not to alarm the user unecessarily 3448 # with such printout of the statistics. 3449 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3450 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3451 # message += "\n Suspiciously large MC error in :" 3452 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3453 # message += tmpStr 3454 3455 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3456 %tuple(stats['virt_stats']['v_contr_err_max']) 3457 debug_msg += tmpStr 3458 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3459 # message += tmpStr 3460 3461 3462 except KeyError: 3463 debug_msg += '\n Could not find statistics on the integration optimization. ' 3464 3465 # ======================================= 3466 # == aMC@NLO timing profile statistics == 3467 # ======================================= 3468 3469 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3470 "(?P<time>[\d\+-Eed\.]*)\s*") 3471 3472 for logf in log_GV_files: 3473 logfile=open(logf,'r') 3474 log = logfile.read() 3475 logfile.close() 3476 channel_name = '/'.join(logf.split('/')[-3:-1]) 3477 mint = re.search(mint_search,logf) 3478 if not mint is None: 3479 channel_name = channel_name+' [step %s]'%mint.group('ID') 3480 3481 for time_stats in re.finditer(timing_stat_finder, log): 3482 try: 3483 stats['timings'][time_stats.group('name')][channel_name]+=\ 3484 safe_float(time_stats.group('time')) 3485 except KeyError: 3486 if time_stats.group('name') not in stats['timings'].keys(): 3487 stats['timings'][time_stats.group('name')] = {} 3488 stats['timings'][time_stats.group('name')][channel_name]=\ 3489 safe_float(time_stats.group('time')) 3490 3491 # useful inline function 3492 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3493 try: 3494 totTimeList = [(time, chan) for chan, time in \ 3495 stats['timings']['Total'].items()] 3496 except KeyError: 3497 totTimeList = [] 3498 3499 totTimeList.sort() 3500 if len(totTimeList)>0: 3501 debug_msg += '\n\n Inclusive timing profile :' 3502 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3503 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3504 debug_msg += '\n Average channel running time %s'%\ 3505 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3506 debug_msg += '\n Aggregated total running time %s'%\ 3507 Tstr(sum([el[0] for el in totTimeList])) 3508 else: 3509 debug_msg += '\n\n Inclusive timing profile non available.' 3510 3511 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3512 sum(stats['timings'][stat].values()), reverse=True) 3513 for name in sorted_keys: 3514 if name=='Total': 3515 continue 3516 if sum(stats['timings'][name].values())<=0.0: 3517 debug_msg += '\n Zero time record for %s.'%name 3518 continue 3519 try: 3520 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3521 chan) for chan, time in stats['timings'][name].items()] 3522 except KeyError, ZeroDivisionError: 3523 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3524 continue 3525 TimeList.sort() 3526 debug_msg += '\n Timing profile for <%s> :'%name 3527 try: 3528 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3529 safe_float((100.0*(sum(stats['timings'][name].values())/ 3530 sum(stats['timings']['Total'].values())))) 3531 except KeyError, ZeroDivisionError: 3532 debug_msg += '\n Overall fraction of time unavailable.' 3533 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3534 (TimeList[-1][0],TimeList[-1][1]) 3535 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3536 (TimeList[0][0],TimeList[0][1]) 3537 3538 # ============================= 3539 # == log file eror detection == 3540 # ============================= 3541 3542 # Find the number of potential errors found in all log files 3543 # This re is a simple match on a case-insensitve 'error' but there is 3544 # also some veto added for excluding the sentence 3545 # "See Section 6 of paper for error calculation." 3546 # which appear in the header of lhapdf in the logs. 3547 err_finder = re.compile(\ 3548 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3549 for log in all_log_files: 3550 logfile=open(log,'r') 3551 nErrors = len(re.findall(err_finder, logfile.read())) 3552 logfile.close() 3553 if nErrors != 0: 3554 stats['Errors'].append((str(log),nErrors)) 3555 3556 nErrors = sum([err[1] for err in stats['Errors']],0) 3557 if nErrors != 0: 3558 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3559 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3560 'found in the following log file%s:'%('s' if \ 3561 len(stats['Errors'])>1 else '') 3562 for error in stats['Errors'][:3]: 3563 log_name = '/'.join(error[0].split('/')[-5:]) 3564 debug_msg += '\n > %d error%s in %s'%\ 3565 (error[1],'s' if error[1]>1 else '',log_name) 3566 if len(stats['Errors'])>3: 3567 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3568 nRemainingLogs = len(stats['Errors'])-3 3569 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3570 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3571 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3572 3573 return message, debug_msg 3574 3575
3576 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3577 """this function calls the reweighting routines and creates the event file in the 3578 Event dir. Return the name of the event file created 3579 """ 3580 scale_pdf_info=[] 3581 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3582 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 3583 scale_pdf_info = self.run_reweight(options['reweightonly']) 3584 self.update_status('Collecting events', level='parton', update_results=True) 3585 misc.compile(['collect_events'], 3586 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3587 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3588 stdin=subprocess.PIPE, 3589 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3590 if event_norm.lower() == 'sum': 3591 p.communicate(input = '1\n') 3592 elif event_norm.lower() == 'unity': 3593 p.communicate(input = '3\n') 3594 elif event_norm.lower() == 'bias': 3595 p.communicate(input = '0\n') 3596 else: 3597 p.communicate(input = '2\n') 3598 3599 #get filename from collect events 3600 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3601 3602 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3603 raise aMCatNLOError('An error occurred during event generation. ' + \ 3604 'The event file has not been created. Check collect_events.log') 3605 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3606 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3607 if not options['reweightonly']: 3608 self.print_summary(options, 2, mode, scale_pdf_info) 3609 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3610 for res_file in res_files: 3611 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3612 3613 logger.info('The %s file has been generated.\n' % (evt_file)) 3614 self.results.add_detail('nb_event', nevents) 3615 self.update_status('Events generated', level='parton', update_results=True) 3616 return evt_file[:-3]
3617 3618
3619 - def run_mcatnlo(self, evt_file, options):
3620 """runs mcatnlo on the generated event file, to produce showered-events 3621 """ 3622 logger.info('Preparing MCatNLO run') 3623 try: 3624 misc.gunzip(evt_file) 3625 except Exception: 3626 pass 3627 3628 self.banner = banner_mod.Banner(evt_file) 3629 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3630 3631 #check that the number of split event files divides the number of 3632 # events, otherwise set it to 1 3633 if int(self.banner.get_detail('run_card', 'nevents') / \ 3634 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3635 != self.banner.get_detail('run_card', 'nevents'): 3636 logger.warning(\ 3637 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3638 'Setting it to 1.') 3639 self.shower_card['nsplit_jobs'] = 1 3640 3641 # don't split jobs if the user asks to shower only a part of the events 3642 if self.shower_card['nevents'] > 0 and \ 3643 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3644 self.shower_card['nsplit_jobs'] != 1: 3645 logger.warning(\ 3646 'Only a part of the events will be showered.\n' + \ 3647 'Setting nsplit_jobs in the shower_card to 1.') 3648 self.shower_card['nsplit_jobs'] = 1 3649 3650 self.banner_to_mcatnlo(evt_file) 3651 3652 # if fastjet has to be linked (in extralibs) then 3653 # add lib /include dirs for fastjet if fastjet-config is present on the 3654 # system, otherwise add fjcore to the files to combine 3655 if 'fastjet' in self.shower_card['extralibs']: 3656 #first, check that stdc++ is also linked 3657 if not 'stdc++' in self.shower_card['extralibs']: 3658 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3659 self.shower_card['extralibs'] += ' stdc++' 3660 # then check if options[fastjet] corresponds to a valid fj installation 3661 try: 3662 #this is for a complete fj installation 3663 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3664 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3665 output, error = p.communicate() 3666 #remove the line break from output (last character) 3667 output = output[:-1] 3668 # add lib/include paths 3669 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3670 logger.warning('Linking FastJet: updating EXTRAPATHS') 3671 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3672 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3673 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3674 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3675 # to be changed in the fortran wrapper 3676 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3677 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3678 except Exception: 3679 logger.warning('Linking FastJet: using fjcore') 3680 # this is for FJcore, so no FJ library has to be linked 3681 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3682 if not 'fjcore.o' in self.shower_card['analyse']: 3683 self.shower_card['analyse'] += ' fjcore.o' 3684 # to be changed in the fortran wrapper 3685 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3686 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3687 # change the fortran wrapper with the correct namespaces/include 3688 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3689 for line in fjwrapper_lines: 3690 if '//INCLUDE_FJ' in line: 3691 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3692 if '//NAMESPACE_FJ' in line: 3693 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3694 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3695 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3696 3697 extrapaths = self.shower_card['extrapaths'].split() 3698 3699 # check that the path needed by HW++ and PY8 are set if one uses these shower 3700 if shower in ['HERWIGPP', 'PYTHIA8']: 3701 path_dict = {'HERWIGPP': ['hepmc_path', 3702 'thepeg_path', 3703 'hwpp_path'], 3704 'PYTHIA8': ['pythia8_path']} 3705 3706 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3707 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3708 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3709 3710 if shower == 'HERWIGPP': 3711 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3712 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3713 3714 # add the HEPMC path of the pythia8 installation 3715 if shower == 'PYTHIA8': 3716 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3717 stdout = subprocess.PIPE).stdout.read().strip() 3718 #this gives all the flags, i.e. 3719 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3720 # we just need the path to the HepMC libraries 3721 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3722 3723 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3724 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3725 3726 # set the PATH for the dynamic libraries 3727 if sys.platform == 'darwin': 3728 ld_library_path = 'DYLD_LIBRARY_PATH' 3729 else: 3730 ld_library_path = 'LD_LIBRARY_PATH' 3731 if ld_library_path in os.environ.keys(): 3732 paths = os.environ[ld_library_path] 3733 else: 3734 paths = '' 3735 paths += ':' + ':'.join(extrapaths) 3736 os.putenv(ld_library_path, paths) 3737 3738 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3739 self.shower_card.write_card(shower, shower_card_path) 3740 3741 # overwrite if shower_card_set.dat exists in MCatNLO 3742 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3743 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3744 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3745 3746 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3747 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3748 3749 3750 # libdl may be needded for pythia 82xx 3751 #if shower == 'PYTHIA8' and not \ 3752 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3753 # 'dl' not in self.shower_card['extralibs'].split(): 3754 # # 'dl' has to be linked with the extralibs 3755 # self.shower_card['extralibs'] += ' dl' 3756 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3757 # "It is needed for the correct running of PY8.2xx.\n" + \ 3758 # "If this library cannot be found on your system, a crash will occur.") 3759 3760 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3761 stderr=open(mcatnlo_log, 'w'), 3762 cwd=pjoin(self.me_dir, 'MCatNLO'), 3763 close_fds=True) 3764 3765 exe = 'MCATNLO_%s_EXE' % shower 3766 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3767 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3768 print open(mcatnlo_log).read() 3769 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3770 logger.info(' ... done') 3771 3772 # create an empty dir where to run 3773 count = 1 3774 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3775 (shower, count))): 3776 count += 1 3777 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3778 (shower, count)) 3779 os.mkdir(rundir) 3780 files.cp(shower_card_path, rundir) 3781 3782 #look for the event files (don't resplit if one asks for the 3783 # same number of event files as in the previous run) 3784 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3785 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3786 logger.info('Cleaning old files and splitting the event file...') 3787 #clean the old files 3788 files.rm([f for f in event_files if 'events.lhe' not in f]) 3789 if self.shower_card['nsplit_jobs'] > 1: 3790 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3791 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3792 stdin=subprocess.PIPE, 3793 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3794 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3795 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3796 logger.info('Splitting done.') 3797 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3798 3799 event_files.sort() 3800 3801 self.update_status('Showering events...', level='shower') 3802 logger.info('(Running in %s)' % rundir) 3803 if shower != 'PYTHIA8': 3804 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3805 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3806 else: 3807 # special treatment for pythia8 3808 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3809 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3810 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3811 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3812 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3813 else: # this is PY8.2xxx 3814 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3815 #link the hwpp exe in the rundir 3816 if shower == 'HERWIGPP': 3817 try: 3818 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3819 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3820 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3821 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3822 except Exception: 3823 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3824 3825 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3826 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3827 3828 files.ln(evt_file, rundir, 'events.lhe') 3829 for i, f in enumerate(event_files): 3830 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3831 3832 if not self.shower_card['analyse']: 3833 # an hep/hepmc file as output 3834 out_id = 'HEP' 3835 else: 3836 # one or more .top file(s) as output 3837 if "HwU" in self.shower_card['analyse']: 3838 out_id = 'HWU' 3839 else: 3840 out_id = 'TOP' 3841 3842 # write the executable 3843 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3844 # set the PATH for the dynamic libraries 3845 if sys.platform == 'darwin': 3846 ld_library_path = 'DYLD_LIBRARY_PATH' 3847 else: 3848 ld_library_path = 'LD_LIBRARY_PATH' 3849 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3850 % {'ld_library_path': ld_library_path, 3851 'extralibs': ':'.join(extrapaths)}) 3852 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3853 3854 if event_files: 3855 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3856 for i in range(len(event_files))] 3857 else: 3858 arg_list = [[shower, out_id, self.run_name]] 3859 3860 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3861 self.njobs = 1 3862 self.wait_for_complete('shower') 3863 3864 # now collect the results 3865 message = '' 3866 warning = '' 3867 to_gzip = [evt_file] 3868 if out_id == 'HEP': 3869 #copy the showered stdhep/hepmc file back in events 3870 if shower in ['PYTHIA8', 'HERWIGPP']: 3871 hep_format = 'HEPMC' 3872 ext = 'hepmc' 3873 else: 3874 hep_format = 'StdHEP' 3875 ext = 'hep' 3876 3877 hep_file = '%s_%s_0.%s.gz' % \ 3878 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3879 count = 0 3880 3881 # find the first available name for the output: 3882 # check existing results with or without event splitting 3883 while os.path.exists(hep_file) or \ 3884 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3885 count +=1 3886 hep_file = '%s_%s_%d.%s.gz' % \ 3887 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3888 3889 try: 3890 if self.shower_card['nsplit_jobs'] == 1: 3891 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3892 message = ('The file %s has been generated. \nIt contains showered' + \ 3893 ' and hadronized events in the %s format obtained' + \ 3894 ' showering the parton-level event file %s.gz with %s') % \ 3895 (hep_file, hep_format, evt_file, shower) 3896 else: 3897 hep_list = [] 3898 for i in range(self.shower_card['nsplit_jobs']): 3899 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3900 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3901 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3902 ' and hadronized events in the %s format obtained' + \ 3903 ' showering the (split) parton-level event file %s.gz with %s') % \ 3904 ('\n '.join(hep_list), hep_format, evt_file, shower) 3905 3906 except OSError, IOError: 3907 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3908 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3909 3910 # run the plot creation in a secure way 3911 if hep_format == 'StdHEP': 3912 try: 3913 self.do_plot('%s -f' % self.run_name) 3914 except Exception, error: 3915 logger.info("Fail to make the plot. Continue...") 3916 pass 3917 3918 elif out_id == 'TOP' or out_id == 'HWU': 3919 #copy the topdrawer or HwU file(s) back in events 3920 if out_id=='TOP': 3921 ext='top' 3922 elif out_id=='HWU': 3923 ext='HwU' 3924 topfiles = [] 3925 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3926 for top_tar in top_tars: 3927 topfiles.extend(top_tar.getnames()) 3928 3929 # safety check 3930 if len(top_tars) != self.shower_card['nsplit_jobs']: 3931 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3932 (self.shower_card['nsplit_jobs'], len(top_tars))) 3933 3934 # find the first available name for the output: 3935 # check existing results with or without event splitting 3936 filename = 'plot_%s_%d_' % (shower, 1) 3937 count = 1 3938 while os.path.exists(pjoin(self.me_dir, 'Events', 3939 self.run_name, '%s0.%s' % (filename,ext))) or \ 3940 os.path.exists(pjoin(self.me_dir, 'Events', 3941 self.run_name, '%s0__1.%s' % (filename,ext))): 3942 count += 1 3943 filename = 'plot_%s_%d_' % (shower, count) 3944 3945 if out_id=='TOP': 3946 hist_format='TopDrawer format' 3947 elif out_id=='HWU': 3948 hist_format='HwU and GnuPlot formats' 3949 3950 if not topfiles: 3951 # if no topfiles are found just warn the user 3952 warning = 'No .top file has been generated. For the results of your ' +\ 3953 'run, please check inside %s' % rundir 3954 elif self.shower_card['nsplit_jobs'] == 1: 3955 # only one job for the shower 3956 top_tars[0].extractall(path = rundir) 3957 plotfiles = [] 3958 for i, file in enumerate(topfiles): 3959 if out_id=='TOP': 3960 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3961 '%s%d.top' % (filename, i)) 3962 files.mv(pjoin(rundir, file), plotfile) 3963 elif out_id=='HWU': 3964 out=pjoin(self.me_dir,'Events', 3965 self.run_name,'%s%d'% (filename,i)) 3966 histos=[{'dirname':pjoin(rundir,file)}] 3967 self.combine_plots_HwU(histos,out) 3968 try: 3969 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3970 stdout=os.open(os.devnull, os.O_RDWR),\ 3971 stderr=os.open(os.devnull, os.O_RDWR),\ 3972 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3973 except Exception: 3974 pass 3975 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3976 '%s%d.HwU'% (filename,i)) 3977 plotfiles.append(plotfile) 3978 3979 ffiles = 'files' 3980 have = 'have' 3981 if len(plotfiles) == 1: 3982 ffiles = 'file' 3983 have = 'has' 3984 3985 message = ('The %s %s %s been generated, with histograms in the' + \ 3986 ' %s, obtained by showering the parton-level' + \ 3987 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3988 hist_format, evt_file, shower) 3989 else: 3990 # many jobs for the shower have been run 3991 topfiles_set = set(topfiles) 3992 plotfiles = [] 3993 for j, top_tar in enumerate(top_tars): 3994 top_tar.extractall(path = rundir) 3995 for i, file in enumerate(topfiles_set): 3996 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3997 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3998 files.mv(pjoin(rundir, file), plotfile) 3999 plotfiles.append(plotfile) 4000 4001 # check if the user asked to combine the .top into a single file 4002 if self.shower_card['combine_td']: 4003 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 4004 4005 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 4006 norm = 1. 4007 else: 4008 norm = 1./float(self.shower_card['nsplit_jobs']) 4009 4010 plotfiles2 = [] 4011 for i, file in enumerate(topfiles_set): 4012 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 4013 for j in range(self.shower_card['nsplit_jobs'])] 4014 if out_id=='TOP': 4015 infile="%d\n%s\n%s\n" % \ 4016 (self.shower_card['nsplit_jobs'], 4017 '\n'.join(filelist), 4018 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 4019 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 4020 stdin=subprocess.PIPE, 4021 stdout=os.open(os.devnull, os.O_RDWR), 4022 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4023 p.communicate(input = infile) 4024 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 4025 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4026 elif out_id=='HWU': 4027 out=pjoin(self.me_dir,'Events', 4028 self.run_name,'%s%d'% (filename,i)) 4029 histos=[] 4030 norms=[] 4031 for plotfile in plotfiles: 4032 histos.append({'dirname':plotfile}) 4033 norms.append(norm) 4034 self.combine_plots_HwU(histos,out,normalisation=norms) 4035 try: 4036 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4037 stdout=os.open(os.devnull, os.O_RDWR),\ 4038 stderr=os.open(os.devnull, os.O_RDWR),\ 4039 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4040 except Exception: 4041 pass 4042 4043 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4044 tar = tarfile.open( 4045 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4046 for f in filelist: 4047 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4048 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4049 4050 tar.close() 4051 4052 ffiles = 'files' 4053 have = 'have' 4054 if len(plotfiles2) == 1: 4055 ffiles = 'file' 4056 have = 'has' 4057 4058 message = ('The %s %s %s been generated, with histograms in the' + \ 4059 ' %s, obtained by showering the parton-level' + \ 4060 ' file %s.gz with %s.\n' + \ 4061 'The files from the different shower ' + \ 4062 'jobs (before combining them) can be found inside %s.') % \ 4063 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4064 evt_file, shower, 4065 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4066 4067 else: 4068 message = ('The following files have been generated:\n %s\n' + \ 4069 'They contain histograms in the' + \ 4070 ' %s, obtained by showering the parton-level' + \ 4071 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4072 hist_format, evt_file, shower) 4073 4074 # Now arxiv the shower card used if RunMaterial is present 4075 run_dir_path = pjoin(rundir, self.run_name) 4076 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4077 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4078 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4079 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4080 %(shower, count))) 4081 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4082 cwd=run_dir_path) 4083 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4084 # end of the run, gzip files and print out the message/warning 4085 for f in to_gzip: 4086 misc.gzip(f) 4087 if message: 4088 logger.info(message) 4089 if warning: 4090 logger.warning(warning) 4091 4092 self.update_status('Run complete', level='shower', update_results=True)
4093 4094 ############################################################################
4095 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
4096 """define the run name, the run_tag, the banner and the results.""" 4097 4098 # when are we force to change the tag new_run:previous run requiring changes 4099 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4100 'shower': ['shower','delphes','madanalysis5_hadron'], 4101 'delphes':['delphes'], 4102 'madanalysis5_hadron':['madanalysis5_hadron'], 4103 'plot':[]} 4104 4105 if name == self.run_name: 4106 if reload_card: 4107 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4108 self.run_card = banner_mod.RunCardNLO(run_card) 4109 4110 #check if we need to change the tag 4111 if tag: 4112 self.run_card['run_tag'] = tag 4113 self.run_tag = tag 4114 self.results.add_run(self.run_name, self.run_card) 4115 else: 4116 for tag in upgrade_tag[level]: 4117 if getattr(self.results[self.run_name][-1], tag): 4118 tag = self.get_available_tag() 4119 self.run_card['run_tag'] = tag 4120 self.run_tag = tag 4121 self.results.add_run(self.run_name, self.run_card) 4122 break 4123 return # Nothing to do anymore 4124 4125 # save/clean previous run 4126 if self.run_name: 4127 self.store_result() 4128 # store new name 4129 self.run_name = name 4130 4131 # Read run_card 4132 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4133 self.run_card = banner_mod.RunCardNLO(run_card) 4134 4135 new_tag = False 4136 # First call for this run -> set the banner 4137 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4138 if 'mgruncard' in self.banner: 4139 self.run_card = self.banner.charge_card('run_card') 4140 if tag: 4141 self.run_card['run_tag'] = tag 4142 new_tag = True 4143 elif not self.run_name in self.results and level =='parton': 4144 pass # No results yet, so current tag is fine 4145 elif not self.run_name in self.results: 4146 #This is only for case when you want to trick the interface 4147 logger.warning('Trying to run data on unknown run.') 4148 self.results.add_run(name, self.run_card) 4149 self.results.update('add run %s' % name, 'all', makehtml=True) 4150 else: 4151 for tag in upgrade_tag[level]: 4152 4153 if getattr(self.results[self.run_name][-1], tag): 4154 # LEVEL is already define in the last tag -> need to switch tag 4155 tag = self.get_available_tag() 4156 self.run_card['run_tag'] = tag 4157 new_tag = True 4158 break 4159 if not new_tag: 4160 # We can add the results to the current run 4161 tag = self.results[self.run_name][-1]['tag'] 4162 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4163 4164 4165 if name in self.results and not new_tag: 4166 self.results.def_current(self.run_name) 4167 else: 4168 self.results.add_run(self.run_name, self.run_card) 4169 4170 self.run_tag = self.run_card['run_tag'] 4171 4172 # Return the tag of the previous run having the required data for this 4173 # tag/run to working wel. 4174 if level == 'parton': 4175 return 4176 elif level == 'pythia': 4177 return self.results[self.run_name][0]['tag'] 4178 else: 4179 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4180 tagRun = self.results[self.run_name][i] 4181 if tagRun.pythia: 4182 return tagRun['tag']
4183 4184
4185 - def store_result(self):
4186 """ tar the pythia results. This is done when we are quite sure that 4187 the pythia output will not be use anymore """ 4188 4189 if not self.run_name: 4190 return 4191 4192 self.results.save() 4193 4194 if not self.to_store: 4195 return 4196 4197 if 'event' in self.to_store: 4198 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4199 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4200 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4201 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4202 else: 4203 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4204 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4205 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4206 4207 4208 tag = self.run_card['run_tag'] 4209 4210 self.to_store = []
4211 4212 4213 ############################################################################
4214 - def get_Gdir(self, Pdir=None):
4215 """get the list of Gdirectory if not yet saved.""" 4216 4217 if hasattr(self, "Gdirs"): 4218 if self.me_dir in self.Gdirs: 4219 if Pdir is None: 4220 return sum(self.Gdirs.values()) 4221 else: 4222 return self.Gdirs[Pdir] 4223 4224 Pdirs = self.get_Pdir() 4225 Gdirs = {self.me_dir:[]} 4226 for P in Pdirs: 4227 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4228 os.path.isdir(pjoin(P,G))] 4229 4230 self.Gdirs = Gdirs 4231 return self.getGdir(Pdir)
4232 4233
4234 - def get_init_dict(self, evt_file):
4235 """reads the info in the init block and returns them in a dictionary""" 4236 ev_file = open(evt_file) 4237 init = "" 4238 found = False 4239 while True: 4240 line = ev_file.readline() 4241 if "<init>" in line: 4242 found = True 4243 elif found and not line.startswith('#'): 4244 init += line 4245 if "</init>" in line or "<event>" in line: 4246 break 4247 ev_file.close() 4248 4249 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4250 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4251 # these are not included (so far) in the init_dict 4252 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4253 4254 init_dict = {} 4255 init_dict['idbmup1'] = int(init.split()[0]) 4256 init_dict['idbmup2'] = int(init.split()[1]) 4257 init_dict['ebmup1'] = float(init.split()[2]) 4258 init_dict['ebmup2'] = float(init.split()[3]) 4259 init_dict['pdfgup1'] = int(init.split()[4]) 4260 init_dict['pdfgup2'] = int(init.split()[5]) 4261 init_dict['pdfsup1'] = int(init.split()[6]) 4262 init_dict['pdfsup2'] = int(init.split()[7]) 4263 init_dict['idwtup'] = int(init.split()[8]) 4264 init_dict['nprup'] = int(init.split()[9]) 4265 4266 return init_dict
4267 4268
4269 - def banner_to_mcatnlo(self, evt_file):
4270 """creates the mcatnlo input script using the values set in the header of the event_file. 4271 It also checks if the lhapdf library is used""" 4272 4273 shower = self.banner.get('run_card', 'parton_shower').upper() 4274 pdlabel = self.banner.get('run_card', 'pdlabel') 4275 itry = 0 4276 nevents = self.shower_card['nevents'] 4277 init_dict = self.get_init_dict(evt_file) 4278 4279 if nevents < 0 or \ 4280 nevents > self.banner.get_detail('run_card', 'nevents'): 4281 nevents = self.banner.get_detail('run_card', 'nevents') 4282 4283 nevents = nevents / self.shower_card['nsplit_jobs'] 4284 4285 mcmass_dict = {} 4286 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4287 pdg = int(line.split()[0]) 4288 mass = float(line.split()[1]) 4289 mcmass_dict[pdg] = mass 4290 4291 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4292 content += 'NEVENTS=%d\n' % nevents 4293 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4294 self.shower_card['nsplit_jobs']) 4295 content += 'MCMODE=%s\n' % shower 4296 content += 'PDLABEL=%s\n' % pdlabel 4297 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 4298 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4299 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4300 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4301 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4302 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4303 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4304 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4305 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4306 try: 4307 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4308 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4309 except KeyError: 4310 content += 'HGGMASS=120.\n' 4311 content += 'HGGWIDTH=0.00575308848\n' 4312 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4313 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4314 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4315 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4316 content += 'DMASS=%s\n' % mcmass_dict[1] 4317 content += 'UMASS=%s\n' % mcmass_dict[2] 4318 content += 'SMASS=%s\n' % mcmass_dict[3] 4319 content += 'CMASS=%s\n' % mcmass_dict[4] 4320 content += 'BMASS=%s\n' % mcmass_dict[5] 4321 try: 4322 content += 'EMASS=%s\n' % mcmass_dict[11] 4323 content += 'MUMASS=%s\n' % mcmass_dict[13] 4324 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4325 except KeyError: 4326 # this is for backward compatibility 4327 mcmass_lines = [l for l in \ 4328 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4329 ).read().split('\n') if l] 4330 new_mcmass_dict = {} 4331 for l in mcmass_lines: 4332 key, val = l.split('=') 4333 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4334 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4335 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4336 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4337 4338 content += 'GMASS=%s\n' % mcmass_dict[21] 4339 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4340 # check if need to link lhapdf 4341 if int(self.shower_card['pdfcode']) > 1 or \ 4342 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4343 shower=='HERWIGPP' : 4344 # Use LHAPDF (should be correctly installed, because 4345 # either events were already generated with them, or the 4346 # user explicitly gives an LHAPDF number in the 4347 # shower_card). 4348 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4349 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4350 stdout = subprocess.PIPE).stdout.read().strip() 4351 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4352 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4353 if self.shower_card['pdfcode']==0: 4354 lhaid_list = '' 4355 content += '' 4356 elif self.shower_card['pdfcode']==1: 4357 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4358 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4359 else: 4360 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4361 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4362 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4363 elif int(self.shower_card['pdfcode'])==1 or \ 4364 int(self.shower_card['pdfcode'])==-1 and True: 4365 # Try to use LHAPDF because user wants to use the same PDF 4366 # as was used for the event generation. However, for the 4367 # event generation, LHAPDF was not used, so non-trivial to 4368 # see if if LHAPDF is available with the corresponding PDF 4369 # set. If not found, give a warning and use build-in PDF 4370 # set instead. 4371 try: 4372 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4373 stdout = subprocess.PIPE).stdout.read().strip() 4374 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4375 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4376 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4377 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4378 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4379 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4380 except Exception: 4381 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4382 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4383 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4384 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4385 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4386 content += 'LHAPDFPATH=\n' 4387 content += 'PDFCODE=0\n' 4388 else: 4389 content += 'LHAPDFPATH=\n' 4390 content += 'PDFCODE=0\n' 4391 4392 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4393 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4394 # add the pythia8/hwpp path(s) 4395 if self.options['pythia8_path']: 4396 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4397 if self.options['hwpp_path']: 4398 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4399 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4400 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4401 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4402 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4403 4404 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4405 output.write(content) 4406 output.close() 4407 return shower
4408 4409
4410 - def run_reweight(self, only):
4411 """runs the reweight_xsec_events executables on each sub-event file generated 4412 to compute on the fly scale and/or PDF uncertainities""" 4413 logger.info(' Doing reweight') 4414 4415 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4416 # if only doing reweight, copy back the nevents_unweighted file 4417 if only: 4418 if os.path.exists(nev_unw + '.orig'): 4419 files.cp(nev_unw + '.orig', nev_unw) 4420 else: 4421 raise aMCatNLOError('Cannot find event file information') 4422 4423 #read the nevents_unweighted file to get the list of event files 4424 file = open(nev_unw) 4425 lines = file.read().split('\n') 4426 file.close() 4427 # make copy of the original nevent_unweighted file 4428 files.cp(nev_unw, nev_unw + '.orig') 4429 # loop over lines (all but the last one whith is empty) and check that the 4430 # number of events is not 0 4431 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4432 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4433 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4434 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4435 #prepare the job_dict 4436 job_dict = {} 4437 exe = 'reweight_xsec_events.local' 4438 for i, evt_file in enumerate(evt_files): 4439 path, evt = os.path.split(evt_file) 4440 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4441 pjoin(self.me_dir, 'SubProcesses', path)) 4442 job_dict[path] = [exe] 4443 4444 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4445 4446 #check that the new event files are complete 4447 for evt_file in evt_files: 4448 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4449 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4450 stdout = subprocess.PIPE).stdout.read().strip() 4451 if last_line != "</LesHouchesEvents>": 4452 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4453 '\'reweight_xsec_events.output\' files inside the ' + \ 4454 '\'SubProcesses/P*/G*/ directories for details') 4455 4456 #update file name in nevents_unweighted 4457 newfile = open(nev_unw, 'w') 4458 for line in lines: 4459 if line: 4460 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4461 newfile.close() 4462 4463 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4464
4465 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4466 """This function takes the files with the scale and pdf values 4467 written by the reweight_xsec_events.f code 4468 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4469 scale and PDF uncertainty (the latter is computed using the 4470 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4471 and returns it in percents. The expected format of the file 4472 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4473 xsec_pdf0 xsec_pdf1 ....""" 4474 4475 scales=[] 4476 pdfs=[] 4477 for i,evt_file in enumerate(evt_files): 4478 path, evt=os.path.split(evt_file) 4479 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4480 data_line=f.readline() 4481 if "scale variations:" in data_line: 4482 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4483 data_line = f.readline().split() 4484 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4485 try: 4486 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4487 except IndexError: 4488 scales+=[scales_this] 4489 data_line=f.readline() 4490 if "pdf variations:" in data_line: 4491 for j,pdf in enumerate(self.run_card['lhaid']): 4492 data_line = f.readline().split() 4493 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4494 try: 4495 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4496 except IndexError: 4497 pdfs+=[pdfs_this] 4498 4499 # get the scale uncertainty in percent 4500 scale_info=[] 4501 for j,scale in enumerate(scales): 4502 s_cen=scale[0] 4503 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4504 # max and min of the full envelope 4505 s_max=(max(scale)/s_cen-1)*100 4506 s_min=(1-min(scale)/s_cen)*100 4507 # ren and fac scale dependence added in quadrature 4508 ren_var=[] 4509 fac_var=[] 4510 for i in range(len(self.run_card['rw_rscale'])): 4511 ren_var.append(scale[i]-s_cen) # central fac scale 4512 for i in range(len(self.run_card['rw_fscale'])): 4513 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4514 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4515 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4516 s_size=len(scale) 4517 else: 4518 s_max=0.0 4519 s_min=0.0 4520 s_max_q=0.0 4521 s_min_q=0.0 4522 s_size=len(scale) 4523 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4524 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4525 'label':self.run_card['dynamical_scale_choice'][j], \ 4526 'unc':self.run_card['reweight_scale'][j]}) 4527 4528 # check if we can use LHAPDF to compute the PDF uncertainty 4529 if any(self.run_card['reweight_pdf']): 4530 use_lhapdf=False 4531 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4532 stdout=subprocess.PIPE).stdout.read().strip() 4533 4534 try: 4535 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4536 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4537 except OSError: 4538 candidates=[] 4539 for candidate in candidates: 4540 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4541 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4542 try: 4543 import lhapdf 4544 use_lhapdf=True 4545 break 4546 except ImportError: 4547 sys.path.pop(0) 4548 continue 4549 4550 if not use_lhapdf: 4551 try: 4552 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4553 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4554 except OSError: 4555 candidates=[] 4556 for candidate in candidates: 4557 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4558 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4559 try: 4560 import lhapdf 4561 use_lhapdf=True 4562 break 4563 except ImportError: 4564 sys.path.pop(0) 4565 continue 4566 4567 if not use_lhapdf: 4568 try: 4569 import lhapdf 4570 use_lhapdf=True 4571 except ImportError: 4572 logger.warning("Failed to access python version of LHAPDF: "\ 4573 "cannot compute PDF uncertainty from the "\ 4574 "weights in the events. The weights in the LHE " \ 4575 "event files will still cover all PDF set members, "\ 4576 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4577 "If the python interface to LHAPDF is available on your system, try "\ 4578 "adding its location to the PYTHONPATH environment variable and the"\ 4579 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4580 use_lhapdf=False 4581 4582 # turn off lhapdf printing any messages 4583 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4584 4585 pdf_info=[] 4586 for j,pdfset in enumerate(pdfs): 4587 p_cen=pdfset[0] 4588 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4589 if use_lhapdf: 4590 pdfsetname=self.run_card['lhapdfsetname'][j] 4591 try: 4592 p=lhapdf.getPDFSet(pdfsetname) 4593 ep=p.uncertainty(pdfset,-1) 4594 p_cen=ep.central 4595 p_min=abs(ep.errminus/p_cen)*100 4596 p_max=abs(ep.errplus/p_cen)*100 4597 p_type=p.errorType 4598 p_size=p.size 4599 p_conf=p.errorConfLevel 4600 except: 4601 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4602 p_min=0.0 4603 p_max=0.0 4604 p_type='unknown' 4605 p_conf='unknown' 4606 p_size=len(pdfset) 4607 else: 4608 p_min=0.0 4609 p_max=0.0 4610 p_type='unknown' 4611 p_conf='unknown' 4612 p_size=len(pdfset) 4613 pdfsetname=self.run_card['lhaid'][j] 4614 else: 4615 p_min=0.0 4616 p_max=0.0 4617 p_type='none' 4618 p_conf='unknown' 4619 p_size=len(pdfset) 4620 pdfsetname=self.run_card['lhaid'][j] 4621 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4622 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4623 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4624 4625 scale_pdf_info=[scale_info,pdf_info] 4626 return scale_pdf_info
4627 4628
4629 - def wait_for_complete(self, run_type):
4630 """this function waits for jobs on cluster to complete their run.""" 4631 starttime = time.time() 4632 #logger.info(' Waiting for submitted jobs to complete') 4633 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4634 starttime=starttime, level='parton', update_results=True) 4635 try: 4636 self.cluster.wait(self.me_dir, update_status) 4637 except: 4638 self.cluster.remove() 4639 raise
4640
4641 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4642 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4643 self.ijob = 0 4644 if run_type != 'shower': 4645 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4646 for args in arg_list: 4647 for Pdir, jobs in job_dict.items(): 4648 for job in jobs: 4649 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4650 if self.cluster_mode == 2: 4651 time.sleep(1) # security to allow all jobs to be launched 4652 else: 4653 self.njobs = len(arg_list) 4654 for args in arg_list: 4655 [(cwd, exe)] = job_dict.items() 4656 self.run_exe(exe, args, run_type, cwd) 4657 4658 self.wait_for_complete(run_type)
4659 4660 4661
4662 - def check_event_files(self,jobs):
4663 """check the integrity of the event files after splitting, and resubmit 4664 those which are not nicely terminated""" 4665 jobs_to_resubmit = [] 4666 for job in jobs: 4667 last_line = '' 4668 try: 4669 last_line = subprocess.Popen( 4670 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4671 stdout = subprocess.PIPE).stdout.read().strip() 4672 except IOError: 4673 pass 4674 if last_line != "</LesHouchesEvents>": 4675 jobs_to_resubmit.append(job) 4676 self.njobs = 0 4677 if jobs_to_resubmit: 4678 run_type = 'Resubmitting broken jobs' 4679 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4680 for job in jobs_to_resubmit: 4681 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4682 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4683 4684
4685 - def find_jobs_to_split(self, pdir, job, arg):
4686 """looks into the nevents_unweighed_splitted file to check how many 4687 split jobs are needed for this (pdir, job). arg is F, B or V""" 4688 # find the number of the integration channel 4689 splittings = [] 4690 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4691 pattern = re.compile('for i in (\d+) ; do') 4692 match = re.search(pattern, ajob) 4693 channel = match.groups()[0] 4694 # then open the nevents_unweighted_splitted file and look for the 4695 # number of splittings to be done 4696 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4697 # This skips the channels with zero events, because they are 4698 # not of the form GFXX_YY, but simply GFXX 4699 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4700 pjoin(pdir, 'G%s%s' % (arg,channel))) 4701 matches = re.findall(pattern, nevents_file) 4702 for m in matches: 4703 splittings.append(m) 4704 return splittings
4705 4706
4707 - def run_exe(self, exe, args, run_type, cwd=None):
4708 """this basic function launch locally/on cluster exe with args as argument. 4709 """ 4710 # first test that exe exists: 4711 execpath = None 4712 if cwd and os.path.exists(pjoin(cwd, exe)): 4713 execpath = pjoin(cwd, exe) 4714 elif not cwd and os.path.exists(exe): 4715 execpath = exe 4716 else: 4717 raise aMCatNLOError('Cannot find executable %s in %s' \ 4718 % (exe, os.getcwd())) 4719 # check that the executable has exec permissions 4720 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4721 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4722 # finally run it 4723 if self.cluster_mode == 0: 4724 #this is for the serial run 4725 misc.call(['./'+exe] + args, cwd=cwd) 4726 self.ijob += 1 4727 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4728 min([1, self.njobs - self.ijob]), 4729 self.ijob, run_type), level='parton') 4730 4731 #this is for the cluster/multicore run 4732 elif 'reweight' in exe: 4733 # a reweight run 4734 # Find the correct PDF input file 4735 input_files, output_files = [], [] 4736 pdfinput = self.get_pdf_input_filename() 4737 if os.path.exists(pdfinput): 4738 input_files.append(pdfinput) 4739 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4740 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4741 input_files.append(args[0]) 4742 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4743 output_files.append('reweight_xsec_events.output') 4744 output_files.append('scale_pdf_dependence.dat') 4745 4746 return self.cluster.submit2(exe, args, cwd=cwd, 4747 input_files=input_files, output_files=output_files, 4748 required_output=output_files) 4749 4750 elif 'ajob' in exe: 4751 # the 'standard' amcatnlo job 4752 # check if args is a list of string 4753 if type(args[0]) == str: 4754 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4755 #submitting 4756 self.cluster.submit2(exe, args, cwd=cwd, 4757 input_files=input_files, output_files=output_files, 4758 required_output=required_output) 4759 4760 # # keep track of folders and arguments for splitted evt gen 4761 # subfolder=output_files[-1].split('/')[0] 4762 # if len(args) == 4 and '_' in subfolder: 4763 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4764 4765 elif 'shower' in exe: 4766 # a shower job 4767 # args are [shower, output(HEP or TOP), run_name] 4768 # cwd is the shower rundir, where the executable are found 4769 input_files, output_files = [], [] 4770 shower = args[0] 4771 # the input files 4772 if shower == 'PYTHIA8': 4773 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4774 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4775 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4776 input_files.append(pjoin(cwd, 'config.sh')) 4777 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4778 else: 4779 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4780 else: 4781 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4782 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4783 if shower == 'HERWIGPP': 4784 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4785 input_files.append(pjoin(cwd, 'Herwig++')) 4786 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4787 input_files.append(pjoin(cwd, 'Herwig')) 4788 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4789 if len(args) == 3: 4790 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4791 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4792 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4793 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4794 else: 4795 raise aMCatNLOError, 'Event file not present in %s' % \ 4796 pjoin(self.me_dir, 'Events', self.run_name) 4797 else: 4798 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4799 # the output files 4800 if len(args) == 3: 4801 output_files.append('mcatnlo_run.log') 4802 else: 4803 output_files.append('mcatnlo_run_%s.log' % args[3]) 4804 if args[1] == 'HEP': 4805 if len(args) == 3: 4806 fname = 'events' 4807 else: 4808 fname = 'events_%s' % args[3] 4809 if shower in ['PYTHIA8', 'HERWIGPP']: 4810 output_files.append(fname + '.hepmc.gz') 4811 else: 4812 output_files.append(fname + '.hep.gz') 4813 elif args[1] == 'TOP' or args[1] == 'HWU': 4814 if len(args) == 3: 4815 fname = 'histfile' 4816 else: 4817 fname = 'histfile_%s' % args[3] 4818 output_files.append(fname + '.tar') 4819 else: 4820 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4821 #submitting 4822 self.cluster.submit2(exe, args, cwd=cwd, 4823 input_files=input_files, output_files=output_files) 4824 4825 else: 4826 return self.cluster.submit(exe, args, cwd=cwd)
4827
4828 - def getIO_ajob(self,exe,cwd, args):
4829 # use local disk if possible => need to stands what are the 4830 # input/output files 4831 4832 output_files = [] 4833 required_output = [] 4834 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4835 pjoin(cwd, 'symfact.dat'), 4836 pjoin(cwd, 'iproc.dat'), 4837 pjoin(cwd, 'initial_states_map.dat'), 4838 pjoin(cwd, 'configs_and_props_info.dat'), 4839 pjoin(cwd, 'leshouche_info.dat'), 4840 pjoin(cwd, 'FKS_params.dat')] 4841 4842 # For GoSam interface, we must copy the SLHA card as well 4843 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4844 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4845 4846 if os.path.exists(pjoin(cwd,'nevents.tar')): 4847 input_files.append(pjoin(cwd,'nevents.tar')) 4848 4849 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4850 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4851 4852 # File for the loop (might not be present if MadLoop is not used) 4853 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4854 cluster.need_transfer(self.options): 4855 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4856 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4857 cluster.need_transfer(self.options): 4858 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4859 dereference=True) 4860 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4861 tf.close() 4862 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4863 4864 if args[1] == 'born' or args[1] == 'all': 4865 # MADEVENT MINT FO MODE 4866 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4867 if args[2] == '0': 4868 current = '%s_G%s' % (args[1],args[0]) 4869 else: 4870 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4871 if os.path.exists(pjoin(cwd,current)): 4872 input_files.append(pjoin(cwd, current)) 4873 output_files.append(current) 4874 4875 required_output.append('%s/results.dat' % current) 4876 required_output.append('%s/res_%s.dat' % (current,args[3])) 4877 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4878 required_output.append('%s/mint_grids' % current) 4879 required_output.append('%s/grid.MC_integer' % current) 4880 if args[3] != '0': 4881 required_output.append('%s/scale_pdf_dependence.dat' % current) 4882 4883 elif args[1] == 'F' or args[1] == 'B': 4884 # MINTMC MODE 4885 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4886 4887 if args[2] == '0': 4888 current = 'G%s%s' % (args[1],args[0]) 4889 else: 4890 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4891 if os.path.exists(pjoin(cwd,current)): 4892 input_files.append(pjoin(cwd, current)) 4893 output_files.append(current) 4894 if args[2] > '0': 4895 # this is for the split event generation 4896 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4897 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4898 4899 else: 4900 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4901 if args[3] in ['0','1']: 4902 required_output.append('%s/results.dat' % current) 4903 if args[3] == '1': 4904 output_files.append('%s/results.dat' % current) 4905 4906 else: 4907 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4908 4909 #Find the correct PDF input file 4910 pdfinput = self.get_pdf_input_filename() 4911 if os.path.exists(pdfinput): 4912 input_files.append(pdfinput) 4913 return input_files, output_files, required_output, args
4914 4915
4916 - def compile(self, mode, options):
4917 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4918 specified in mode""" 4919 4920 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4921 4922 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4923 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4924 4925 self.get_characteristics(pjoin(self.me_dir, 4926 'SubProcesses', 'proc_characteristics')) 4927 4928 #define a bunch of log files 4929 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4930 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4931 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4932 test_log = pjoin(self.me_dir, 'test.log') 4933 4934 # environmental variables to be included in make_opts 4935 self.make_opts_var = {} 4936 if self.proc_characteristics['has_loops'] and \ 4937 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4938 self.make_opts_var['madloop'] = 'true' 4939 4940 self.update_status('Compiling the code', level=None, update_results=True) 4941 4942 libdir = pjoin(self.me_dir, 'lib') 4943 sourcedir = pjoin(self.me_dir, 'Source') 4944 4945 #clean files 4946 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4947 #define which executable/tests to compile 4948 if '+' in mode: 4949 mode = mode.split('+')[0] 4950 if mode in ['NLO', 'LO']: 4951 exe = 'madevent_mintFO' 4952 tests = ['test_ME'] 4953 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4954 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4955 exe = 'madevent_mintMC' 4956 tests = ['test_ME', 'test_MC'] 4957 # write an analyse_opts with a dummy analysis so that compilation goes through 4958 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4959 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4960 4961 #directory where to compile exe 4962 p_dirs = [d for d in \ 4963 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4964 # create param_card.inc and run_card.inc 4965 self.do_treatcards('', amcatnlo=True, mode=mode) 4966 # if --nocompile option is specified, check here that all exes exists. 4967 # If they exists, return 4968 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4969 for p_dir in p_dirs]) and options['nocompile']: 4970 return 4971 4972 # rm links to lhapdflib/ PDFsets if exist 4973 if os.path.exists(pjoin(libdir, 'PDFsets')): 4974 files.rm(pjoin(libdir, 'PDFsets')) 4975 4976 # read the run_card to find if lhapdf is used or not 4977 if self.run_card['pdlabel'] == 'lhapdf' and \ 4978 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4979 self.banner.get_detail('run_card', 'lpp2') != 0): 4980 4981 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4982 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4983 lhaid_list = self.run_card['lhaid'] 4984 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4985 4986 else: 4987 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 4988 logger.info('Using built-in libraries for PDFs') 4989 4990 self.make_opts_var['lhapdf'] = "" 4991 4992 # read the run_card to find if applgrid is used or not 4993 if self.run_card['iappl'] != 0: 4994 self.make_opts_var['applgrid'] = 'True' 4995 # check versions of applgrid and amcfast 4996 for code in ['applgrid','amcfast']: 4997 try: 4998 p = subprocess.Popen([self.options[code], '--version'], \ 4999 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 5000 except OSError: 5001 raise aMCatNLOError(('No valid %s installation found. \n' + \ 5002 'Please set the path to %s-config by using \n' + \ 5003 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 5004 else: 5005 output, _ = p.communicate() 5006 if code is 'applgrid' and output < '1.4.63': 5007 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 5008 +' You are using %s',output) 5009 if code is 'amcfast' and output < '1.1.1': 5010 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 5011 +' You are using %s',output) 5012 5013 # set-up the Source/make_opts with the correct applgrid-config file 5014 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 5015 % (self.options['amcfast'],self.options['applgrid']) 5016 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 5017 text_out=[] 5018 for line in text: 5019 if line.strip().startswith('APPLLIBS=$'): 5020 line=appllibs 5021 text_out.append(line) 5022 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 5023 fsock.writelines(text_out) 5024 else: 5025 self.make_opts_var['applgrid'] = "" 5026 5027 if 'fastjet' in self.options.keys() and self.options['fastjet']: 5028 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5029 5030 # add the make_opts_var to make_opts 5031 self.update_make_opts() 5032 5033 # make Source 5034 self.update_status('Compiling source...', level=None) 5035 misc.compile(['clean4pdf'], cwd = sourcedir) 5036 misc.compile(cwd = sourcedir) 5037 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5038 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5039 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5040 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5041 logger.info(' ...done, continuing with P* directories') 5042 else: 5043 raise aMCatNLOError('Compilation failed') 5044 5045 # make StdHep (only necessary with MG option output_dependencies='internal') 5046 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5047 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5048 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5049 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5050 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5051 misc.compile(['StdHEP'], cwd = sourcedir) 5052 logger.info(' ...done.') 5053 else: 5054 raise aMCatNLOError('Could not compile StdHEP because its'+\ 5055 ' source directory could not be found in the SOURCE folder.\n'+\ 5056 " Check the MG5_aMC option 'output_dependencies.'") 5057 5058 # make CutTools (only necessary with MG option output_dependencies='internal') 5059 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5060 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5061 if os.path.exists(pjoin(sourcedir,'CutTools')): 5062 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5063 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5064 logger.info(' ...done.') 5065 else: 5066 raise aMCatNLOError('Could not compile CutTools because its'+\ 5067 ' source directory could not be found in the SOURCE folder.\n'+\ 5068 " Check the MG5_aMC option 'output_dependencies.'") 5069 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5070 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5071 raise aMCatNLOError('CutTools compilation failed.') 5072 5073 # Verify compatibility between current compiler and the one which was 5074 # used when last compiling CutTools (if specified). 5075 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5076 libdir, 'libcts.a')))),'compiler_version.log') 5077 if os.path.exists(compiler_log_path): 5078 compiler_version_used = open(compiler_log_path,'r').read() 5079 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5080 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5081 if os.path.exists(pjoin(sourcedir,'CutTools')): 5082 logger.info('CutTools was compiled with a different fortran'+\ 5083 ' compiler. Re-compiling it now...') 5084 misc.compile(['cleanCT'], cwd = sourcedir) 5085 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5086 logger.info(' ...done.') 5087 else: 5088 raise aMCatNLOError("CutTools installation in %s"\ 5089 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5090 " seems to have been compiled with a different compiler than"+\ 5091 " the one specified in MG5_aMC. Please recompile CutTools.") 5092 5093 # make IREGI (only necessary with MG option output_dependencies='internal') 5094 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5095 and os.path.exists(pjoin(sourcedir,'IREGI')): 5096 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5097 misc.compile(['IREGI'], cwd = sourcedir) 5098 logger.info(' ...done.') 5099 5100 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5101 # Verify compatibility between current compiler and the one which was 5102 # used when last compiling IREGI (if specified). 5103 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5104 libdir, 'libiregi.a')))),'compiler_version.log') 5105 if os.path.exists(compiler_log_path): 5106 compiler_version_used = open(compiler_log_path,'r').read() 5107 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5108 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5109 if os.path.exists(pjoin(sourcedir,'IREGI')): 5110 logger.info('IREGI was compiled with a different fortran'+\ 5111 ' compiler. Re-compiling it now...') 5112 misc.compile(['cleanIR'], cwd = sourcedir) 5113 misc.compile(['IREGI'], cwd = sourcedir) 5114 logger.info(' ...done.') 5115 else: 5116 raise aMCatNLOError("IREGI installation in %s"\ 5117 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5118 " seems to have been compiled with a different compiler than"+\ 5119 " the one specified in MG5_aMC. Please recompile IREGI.") 5120 5121 # check if MadLoop virtuals have been generated 5122 if self.proc_characteristics['has_loops'] and \ 5123 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5124 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5125 tests.append('check_poles') 5126 5127 # make and run tests (if asked for), gensym and make madevent in each dir 5128 self.update_status('Compiling directories...', level=None) 5129 5130 for test in tests: 5131 self.write_test_input(test) 5132 5133 try: 5134 import multiprocessing 5135 if not self.nb_core: 5136 try: 5137 self.nb_core = int(self.options['nb_core']) 5138 except TypeError: 5139 self.nb_core = multiprocessing.cpu_count() 5140 except ImportError: 5141 self.nb_core = 1 5142 5143 compile_options = copy.copy(self.options) 5144 compile_options['nb_core'] = self.nb_core 5145 compile_cluster = cluster.MultiCore(**compile_options) 5146 logger.info('Compiling on %d cores' % self.nb_core) 5147 5148 update_status = lambda i, r, f: self.donothing(i,r,f) 5149 for p_dir in p_dirs: 5150 compile_cluster.submit(prog = compile_dir, 5151 argument = [self.me_dir, p_dir, mode, options, 5152 tests, exe, self.options['run_mode']]) 5153 try: 5154 compile_cluster.wait(self.me_dir, update_status) 5155 except Exception, error: 5156 logger.warning("Fail to compile the Subprocesses") 5157 if __debug__: 5158 raise 5159 compile_cluster.remove() 5160 self.do_quit('') 5161 5162 logger.info('Checking test output:') 5163 for p_dir in p_dirs: 5164 logger.info(p_dir) 5165 for test in tests: 5166 logger.info(' Result for %s:' % test) 5167 5168 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5169 #check that none of the tests failed 5170 self.check_tests(test, this_dir)
5171 5172
5173 - def donothing(*args):
5174 pass
5175 5176
5177 - def check_tests(self, test, dir):
5178 """just call the correct parser for the test log. 5179 Skip check_poles for LOonly folders""" 5180 if test in ['test_ME', 'test_MC']: 5181 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5182 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5183 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5184 5185
5186 - def parse_test_mx_log(self, log):
5187 """read and parse the test_ME/MC.log file""" 5188 content = open(log).read() 5189 if 'FAILED' in content: 5190 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD') 5191 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5192 'Please check that widths of final state particles (e.g. top) have been' + \ 5193 ' set to 0 in the param_card.dat.') 5194 else: 5195 lines = [l for l in content.split('\n') if 'PASSED' in l] 5196 logger.info(' Passed.') 5197 logger.debug('\n'+'\n'.join(lines))
5198 5199
5200 - def parse_check_poles_log(self, log):
5201 """reads and parse the check_poles.log file""" 5202 content = open(log).read() 5203 npass = 0 5204 nfail = 0 5205 for line in content.split('\n'): 5206 if 'PASSED' in line: 5207 npass +=1 5208 tolerance = float(line.split()[1]) 5209 if 'FAILED' in line: 5210 nfail +=1 5211 tolerance = float(line.split()[1]) 5212 5213 if nfail + npass == 0: 5214 logger.warning('0 points have been tried') 5215 return 5216 5217 if float(nfail)/float(nfail+npass) > 0.1: 5218 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5219 else: 5220 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5221 %(npass, nfail+npass, tolerance))
5222 5223
5224 - def write_test_input(self, test):
5225 """write the input files to run test_ME/MC or check_poles""" 5226 if test in ['test_ME', 'test_MC']: 5227 content = "-2 -2\n" #generate randomly energy/angle 5228 content+= "100 100\n" #run 100 points for soft and collinear tests 5229 content+= "0\n" #all FKS configs 5230 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5231 elif test == 'check_poles': 5232 content = '20 \n -1\n' 5233 5234 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5235 if test == 'test_MC': 5236 shower = self.run_card['parton_shower'] 5237 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5238 file.write(header + content) 5239 elif test == 'test_ME': 5240 header = "2 \n" 5241 file.write(header + content) 5242 else: 5243 file.write(content) 5244 file.close()
5245 5246 5247 action_switcher = AskRunNLO 5248 ############################################################################
5249 - def ask_run_configuration(self, mode, options, switch={}):
5250 """Ask the question when launching generate_events/multi_run""" 5251 5252 if 'parton' not in options: 5253 options['parton'] = False 5254 if 'reweightonly' not in options: 5255 options['reweightonly'] = False 5256 5257 if mode == 'auto': 5258 mode = None 5259 if not mode and (options['parton'] or options['reweightonly']): 5260 mode = 'noshower' 5261 5262 passing_cmd = [] 5263 for key,value in switch.keys(): 5264 passing_cmd.append('%s=%s' % (key,value)) 5265 5266 if 'do_reweight' in options and options['do_reweight']: 5267 passing_cmd.append('reweight=ON') 5268 if 'do_madspin' in options and options['do_madspin']: 5269 passing_cmd.append('madspin=ON') 5270 5271 force = self.force 5272 if mode == 'onlyshower': 5273 passing_cmd.append('onlyshower') 5274 force = True 5275 elif mode: 5276 passing_cmd.append(mode) 5277 5278 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5279 mode=mode, force=force, 5280 first_cmd=passing_cmd, 5281 return_instance=True) 5282 5283 if 'mode' in switch: 5284 mode = switch['mode'] 5285 5286 #assign the mode depending of the switch 5287 if not mode or mode == 'auto': 5288 if switch['order'] == 'LO': 5289 if switch['runshower']: 5290 mode = 'aMC@LO' 5291 elif switch['fixed_order'] == 'ON': 5292 mode = 'LO' 5293 else: 5294 mode = 'noshowerLO' 5295 elif switch['order'] == 'NLO': 5296 if switch['runshower']: 5297 mode = 'aMC@NLO' 5298 elif switch['fixed_order'] == 'ON': 5299 mode = 'NLO' 5300 else: 5301 mode = 'noshower' 5302 logger.info('will run in mode: %s' % mode) 5303 5304 if mode == 'noshower': 5305 if switch['shower'] == 'OFF': 5306 logger.warning("""You have chosen not to run a parton shower. 5307 NLO events without showering are NOT physical. 5308 Please, shower the LesHouches events before using them for physics analyses. 5309 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5310 else: 5311 logger.info("""Your Parton-shower choice is not available for running. 5312 The events will be generated for the associated Parton-Shower. 5313 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD') 5314 5315 5316 # specify the cards which are needed for this run. 5317 cards = ['param_card.dat', 'run_card.dat'] 5318 ignore = [] 5319 if mode in ['LO', 'NLO']: 5320 options['parton'] = True 5321 ignore = ['shower_card.dat', 'madspin_card.dat'] 5322 cards.append('FO_analyse_card.dat') 5323 else: 5324 if switch['madspin'] != 'OFF': 5325 cards.append('madspin_card.dat') 5326 if switch['reweight'] != 'OFF': 5327 cards.append('reweight_card.dat') 5328 if switch['madanalysis'] == 'HADRON': 5329 cards.append('madanalysis5_hadron_card.dat') 5330 if 'aMC@' in mode: 5331 cards.append('shower_card.dat') 5332 if mode == 'onlyshower': 5333 cards = ['shower_card.dat'] 5334 if options['reweightonly']: 5335 cards = ['run_card.dat'] 5336 5337 self.keep_cards(cards, ignore) 5338 5339 if mode =='onlyshower': 5340 cards = ['shower_card.dat'] 5341 5342 5343 # automatically switch to keep_wgt option 5344 first_cmd = cmd_switch.get_cardcmd() 5345 5346 if not options['force'] and not self.force: 5347 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5348 5349 self.banner = banner_mod.Banner() 5350 5351 # store the cards in the banner 5352 for card in cards: 5353 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5354 # and the run settings 5355 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5356 self.banner.add_text('run_settings', run_settings) 5357 5358 if not mode =='onlyshower': 5359 self.run_card = self.banner.charge_card('run_card') 5360 self.run_tag = self.run_card['run_tag'] 5361 #this is if the user did not provide a name for the current run 5362 if not hasattr(self, 'run_name') or not self.run_name: 5363 self.run_name = self.find_available_run_name(self.me_dir) 5364 #add a tag in the run_name for distinguish run_type 5365 if self.run_name.startswith('run_'): 5366 if mode in ['LO','aMC@LO','noshowerLO']: 5367 self.run_name += '_LO' 5368 self.set_run_name(self.run_name, self.run_tag, 'parton') 5369 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5370 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5371 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5372 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5373 samples of various multiplicities without double counting, you 5374 have to remove some events after showering 'by hand'. Please 5375 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5376 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5377 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5378 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5379 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5380 "Type \'n\' to stop or \'y\' to continue" 5381 answers = ['n','y'] 5382 answer = self.ask(question, 'n', answers, alias=alias) 5383 if answer == 'n': 5384 error = '''Stop opertation''' 5385 self.ask_run_configuration(mode, options) 5386 # raise aMCatNLOError(error) 5387 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5388 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5389 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5390 if 'aMC@' in mode or mode == 'onlyshower': 5391 self.shower_card = self.banner.charge_card('shower_card') 5392 5393 elif mode in ['LO', 'NLO']: 5394 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5395 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5396 5397 return mode
5398
5399 5400 #=============================================================================== 5401 # aMCatNLOCmd 5402 #=============================================================================== 5403 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5404 """The command line processor of MadGraph"""
5405 5406 _compile_usage = "compile [MODE] [options]\n" + \ 5407 "-- compiles aMC@NLO \n" + \ 5408 " MODE can be either FO, for fixed-order computations, \n" + \ 5409 " or MC for matching with parton-shower monte-carlos. \n" + \ 5410 " (if omitted, it is set to MC)\n" 5411 _compile_parser = misc.OptionParser(usage=_compile_usage) 5412 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5413 help="Use the card present in the directory for the launch, without editing them") 5414 5415 _launch_usage = "launch [MODE] [options]\n" + \ 5416 "-- execute aMC@NLO \n" + \ 5417 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5418 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5419 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5420 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5421 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5422 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5423 " in the run_card.dat\n" 5424 5425 _launch_parser = misc.OptionParser(usage=_launch_usage) 5426 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5427 help="Use the card present in the directory for the launch, without editing them") 5428 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5429 help="Submit the jobs on the cluster") 5430 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5431 help="Submit the jobs on multicore mode") 5432 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5433 help="Skip compilation. Ignored if no executable is found") 5434 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5435 help="Skip integration and event generation, just run reweight on the" + \ 5436 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5437 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5438 help="Stop the run after the parton level file generation (you need " + \ 5439 "to shower the file in order to get physical results)") 5440 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5441 help="Skip grid set up, just generate events starting from " + \ 5442 "the last available results") 5443 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5444 help="Provide a name to the run") 5445 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5446 help="For use with APPLgrid only: start from existing grids") 5447 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5448 help="Run the reweight module (reweighting by different model parameters)") 5449 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5450 help="Run the madspin package") 5451 5452 5453 5454 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5455 "-- execute aMC@NLO \n" + \ 5456 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5457 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5458 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5459 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5460 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5461 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5462 " in the run_card.dat\n" 5463 5464 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5465 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5466 help="Use the card present in the directory for the generate_events, without editing them") 5467 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5468 help="Submit the jobs on the cluster") 5469 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5470 help="Submit the jobs on multicore mode") 5471 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5472 help="Skip compilation. Ignored if no executable is found") 5473 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5474 help="Skip integration and event generation, just run reweight on the" + \ 5475 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5476 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5477 help="Stop the run after the parton level file generation (you need " + \ 5478 "to shower the file in order to get physical results)") 5479 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5480 help="Skip grid set up, just generate events starting from " + \ 5481 "the last available results") 5482 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5483 help="Provide a name to the run") 5484 5485 5486 5487 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5488 "-- calculate cross section up to ORDER.\n" + \ 5489 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5490 5491 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5492 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5493 help="Use the card present in the directory for the launch, without editing them") 5494 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5495 help="Submit the jobs on the cluster") 5496 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5497 help="Submit the jobs on multicore mode") 5498 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5499 help="Skip compilation. Ignored if no executable is found") 5500 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5501 help="Provide a name to the run") 5502 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5503 help="For use with APPLgrid only: start from existing grids") 5504 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5505 help="Skip grid set up, just generate events starting from " + \ 5506 "the last available results") 5507 5508 _shower_usage = 'shower run_name [options]\n' + \ 5509 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5510 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5511 ' are directly read from the header of the event file\n' 5512 _shower_parser = misc.OptionParser(usage=_shower_usage) 5513 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5514 help="Use the shower_card present in the directory for the launch, without editing") 5515 5516 if '__main__' == __name__: 5517 # Launch the interface without any check if one code is already running. 5518 # This can ONLY run a single command !! 5519 import sys 5520 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5521 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5522 'Please upgrate your version of python.') 5523 5524 import os 5525 import optparse 5526 # Get the directory of the script real path (bin) 5527 # and add it to the current PYTHONPATH 5528 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5529 sys.path.insert(0, root_path)
5530 5531 - class MyOptParser(optparse.OptionParser):
5532 - class InvalidOption(Exception): pass
5533 - def error(self, msg=''):
5534 raise MyOptParser.InvalidOption(msg)
5535 # Write out nice usage message if called with -h or --help 5536 usage = "usage: %prog [options] [FILE] " 5537 parser = MyOptParser(usage=usage) 5538 parser.add_option("-l", "--logging", default='INFO', 5539 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5540 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5541 help='force toce to be in secure mode') 5542 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5543 help='force to launch debug mode') 5544 parser_error = '' 5545 done = False 5546 5547 for i in range(len(sys.argv)-1): 5548 try: 5549 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5550 done = True 5551 except MyOptParser.InvalidOption, error: 5552 pass 5553 else: 5554 args += sys.argv[len(sys.argv)-i:] 5555 if not done: 5556 # raise correct error: 5557 try: 5558 (options, args) = parser.parse_args() 5559 except MyOptParser.InvalidOption, error: 5560 print error 5561 sys.exit(2) 5562 5563 if len(args) == 0: 5564 args = '' 5565 5566 import subprocess 5567 import logging 5568 import logging.config 5569 # Set logging level according to the logging level given by options 5570 #logging.basicConfig(level=vars(logging)[options.logging]) 5571 import internal.coloring_logging 5572 try: 5573 if __debug__ and options.logging == 'INFO': 5574 options.logging = 'DEBUG' 5575 if options.logging.isdigit(): 5576 level = int(options.logging) 5577 else: 5578 level = eval('logging.' + options.logging) 5579 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5580 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5581 logging.root.setLevel(level) 5582 logging.getLogger('madgraph').setLevel(level) 5583 except: 5584 raise 5585 pass 5586 5587 # Call the cmd interface main loop 5588 try: 5589 if args: 5590 # a single command is provided 5591 if '--web' in args: 5592 i = args.index('--web') 5593 args.pop(i) 5594 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5595 else: 5596 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5597 5598 if not hasattr(cmd_line, 'do_%s' % args[0]): 5599 if parser_error: 5600 print parser_error 5601 print 'and %s can not be interpreted as a valid command.' % args[0] 5602 else: 5603 print 'ERROR: %s not a valid command. Please retry' % args[0] 5604 else: 5605 cmd_line.use_rawinput = False 5606 cmd_line.run_cmd(' '.join(args)) 5607 cmd_line.run_cmd('quit') 5608 5609 except KeyboardInterrupt: 5610 print 'quit on KeyboardInterrupt' 5611 pass 5612