Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1   ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
95 96 -class aMCatNLOError(Exception):
97 pass
98
99 100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 if test == 'test_ME' or test == 'test_MC': 124 test_exe='test_soft_col_limits' 125 else: 126 test_exe=test 127 misc.compile([test_exe], cwd = this_dir, job_specs = False) 128 input = pjoin(me_dir, '%s_input.txt' % test) 129 #this can be improved/better written to handle the output 130 misc.call(['./%s' % (test_exe)], cwd=this_dir, 131 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 132 close_fds=True) 133 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 134 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 135 dereference=True) 136 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 137 tf.close() 138 139 if not options['reweightonly']: 140 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 141 misc.call(['./gensym'],cwd= this_dir, 142 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 143 close_fds=True) 144 #compile madevent_mintMC/mintFO 145 misc.compile([exe], cwd=this_dir, job_specs = False) 146 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 147 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 148 149 logger.info(' %s done.' % p_dir) 150 return 0 151 except MadGraph5Error, msg: 152 return msg
153
154 155 -def check_compiler(options, block=False):
156 """check that the current fortran compiler is gfortran 4.6 or later. 157 If block, stops the execution, otherwise just print a warning""" 158 159 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 160 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 161 'Note that You can still run all MadEvent run without any problem!' 162 #first check that gfortran is installed 163 if options['fortran_compiler']: 164 compiler = options['fortran_compiler'] 165 elif misc.which('gfortran'): 166 compiler = 'gfortran' 167 else: 168 compiler = '' 169 170 if 'gfortran' not in compiler: 171 if block: 172 raise aMCatNLOError(msg % compiler) 173 else: 174 logger.warning(msg % compiler) 175 else: 176 curr_version = misc.get_gfortran_version(compiler) 177 if not ''.join(curr_version.split('.')) >= '46': 178 if block: 179 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 180 else: 181 logger.warning(msg % (compiler + ' ' + curr_version))
182
183 184 185 #=============================================================================== 186 # CmdExtended 187 #=============================================================================== 188 -class CmdExtended(common_run.CommonRunCmd):
189 """Particularisation of the cmd command for aMCatNLO""" 190 191 #suggested list of command 192 next_possibility = { 193 'start': [], 194 } 195 196 debug_output = 'ME5_debug' 197 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 198 error_debug += 'More information is found in \'%(debug)s\'.\n' 199 error_debug += 'Please attach this file to your report.' 200 201 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 202 203 204 keyboard_stop_msg = """stopping all operation 205 in order to quit MadGraph5_aMC@NLO please enter exit""" 206 207 # Define the Error 208 InvalidCmd = InvalidCmd 209 ConfigurationError = aMCatNLOError 210
211 - def __init__(self, me_dir, options, *arg, **opt):
212 """Init history and line continuation""" 213 214 # Tag allowing/forbiding question 215 self.force = False 216 217 # If possible, build an info line with current version number 218 # and date, from the VERSION text file 219 info = misc.get_pkg_info() 220 info_line = "" 221 if info and info.has_key('version') and info.has_key('date'): 222 len_version = len(info['version']) 223 len_date = len(info['date']) 224 if len_version + len_date < 30: 225 info_line = "#* VERSION %s %s %s *\n" % \ 226 (info['version'], 227 (30 - len_version - len_date) * ' ', 228 info['date']) 229 else: 230 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 231 info_line = "#* VERSION %s %s *\n" % \ 232 (version, (24 - len(version)) * ' ') 233 234 # Create a header for the history file. 235 # Remember to fill in time at writeout time! 236 self.history_header = \ 237 '#************************************************************\n' + \ 238 '#* MadGraph5_aMC@NLO *\n' + \ 239 '#* *\n' + \ 240 "#* * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * * * 5 * * * * *\n" + \ 243 "#* * * * * *\n" + \ 244 "#* * * *\n" + \ 245 "#* *\n" + \ 246 "#* *\n" + \ 247 info_line + \ 248 "#* *\n" + \ 249 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 250 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 251 "#* and *\n" + \ 252 "#* http://amcatnlo.cern.ch *\n" + \ 253 '#* *\n' + \ 254 '#************************************************************\n' + \ 255 '#* *\n' + \ 256 '#* Command File for aMCatNLO *\n' + \ 257 '#* *\n' + \ 258 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 259 '#* *\n' + \ 260 '#************************************************************\n' 261 262 if info_line: 263 info_line = info_line[1:] 264 265 logger.info(\ 266 "************************************************************\n" + \ 267 "* *\n" + \ 268 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 269 "* a M C @ N L O *\n" + \ 270 "* *\n" + \ 271 "* * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * * * 5 * * * * *\n" + \ 274 "* * * * * *\n" + \ 275 "* * * *\n" + \ 276 "* *\n" + \ 277 info_line + \ 278 "* *\n" + \ 279 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 280 "* http://amcatnlo.cern.ch *\n" + \ 281 "* *\n" + \ 282 "* Type 'help' for in-line help. *\n" + \ 283 "* *\n" + \ 284 "************************************************************") 285 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
286 287
288 - def get_history_header(self):
289 """return the history header""" 290 return self.history_header % misc.get_time_info()
291
292 - def stop_on_keyboard_stop(self):
293 """action to perform to close nicely on a keyboard interupt""" 294 try: 295 if hasattr(self, 'cluster'): 296 logger.info('rm jobs on queue') 297 self.cluster.remove() 298 if hasattr(self, 'results'): 299 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 300 self.add_error_log_in_html(KeyboardInterrupt) 301 except: 302 pass
303
304 - def postcmd(self, stop, line):
305 """ Update the status of the run for finishing interactive command """ 306 307 # relaxing the tag forbidding question 308 self.force = False 309 310 if not self.use_rawinput: 311 return stop 312 313 314 arg = line.split() 315 if len(arg) == 0: 316 return stop 317 elif str(arg[0]) in ['exit','quit','EOF']: 318 return stop 319 320 try: 321 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 322 level=None, error=True) 323 except Exception: 324 misc.sprint('self.update_status fails', log=logger) 325 pass
326
327 - def nice_user_error(self, error, line):
328 """If a ME run is currently running add a link in the html output""" 329 330 self.add_error_log_in_html() 331 cmd.Cmd.nice_user_error(self, error, line)
332
333 - def nice_config_error(self, error, line):
334 """If a ME run is currently running add a link in the html output""" 335 336 self.add_error_log_in_html() 337 cmd.Cmd.nice_config_error(self, error, line)
338
339 - def nice_error_handling(self, error, line):
340 """If a ME run is currently running add a link in the html output""" 341 342 self.add_error_log_in_html() 343 cmd.Cmd.nice_error_handling(self, error, line)
344
345 346 347 #=============================================================================== 348 # HelpToCmd 349 #=============================================================================== 350 -class HelpToCmd(object):
351 """ The Series of help routine for the aMCatNLOCmd""" 352
353 - def help_launch(self):
354 """help for launch command""" 355 _launch_parser.print_help()
356
357 - def help_banner_run(self):
358 logger.info("syntax: banner_run Path|RUN [--run_options]") 359 logger.info("-- Reproduce a run following a given banner") 360 logger.info(" One of the following argument is require:") 361 logger.info(" Path should be the path of a valid banner.") 362 logger.info(" RUN should be the name of a run of the current directory") 363 self.run_options_help([('-f','answer all question by default'), 364 ('--name=X', 'Define the name associated with the new run')])
365 366
367 - def help_compile(self):
368 """help for compile command""" 369 _compile_parser.print_help()
370
371 - def help_generate_events(self):
372 """help for generate_events commandi 373 just call help_launch""" 374 _generate_events_parser.print_help()
375 376
377 - def help_calculate_xsect(self):
378 """help for generate_events command""" 379 _calculate_xsect_parser.print_help()
380
381 - def help_shower(self):
382 """help for shower command""" 383 _shower_parser.print_help()
384 385
386 - def help_open(self):
387 logger.info("syntax: open FILE ") 388 logger.info("-- open a file with the appropriate editor.") 389 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 390 logger.info(' the path to the last created/used directory is used')
391
392 - def run_options_help(self, data):
393 if data: 394 logger.info('-- local options:') 395 for name, info in data: 396 logger.info(' %s : %s' % (name, info)) 397 398 logger.info("-- session options:") 399 logger.info(" Note that those options will be kept for the current session") 400 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 401 logger.info(" --multicore : Run in multi-core configuration") 402 logger.info(" --nb_core=X : limit the number of core to use to X.")
403
404 405 406 407 #=============================================================================== 408 # CheckValidForCmd 409 #=============================================================================== 410 -class CheckValidForCmd(object):
411 """ The Series of check routine for the aMCatNLOCmd""" 412
413 - def check_shower(self, args, options):
414 """Check the validity of the line. args[0] is the run_directory""" 415 416 if options['force']: 417 self.force = True 418 419 if len(args) == 0: 420 self.help_shower() 421 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 422 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 423 raise self.InvalidCmd, 'Directory %s does not exists' % \ 424 pjoin(os.getcwd(), 'Events', args[0]) 425 426 self.set_run_name(args[0], level= 'shower') 427 args[0] = pjoin(self.me_dir, 'Events', args[0])
428
429 - def check_plot(self, args):
430 """Check the argument for the plot command 431 plot run_name modes""" 432 433 434 madir = self.options['madanalysis_path'] 435 td = self.options['td_path'] 436 437 if not madir or not td: 438 logger.info('Retry to read configuration file to find madanalysis/td') 439 self.set_configuration() 440 441 madir = self.options['madanalysis_path'] 442 td = self.options['td_path'] 443 444 if not madir: 445 error_msg = 'No Madanalysis path correctly set.' 446 error_msg += 'Please use the set command to define the path and retry.' 447 error_msg += 'You can also define it in the configuration file.' 448 raise self.InvalidCmd(error_msg) 449 if not td: 450 error_msg = 'No path to td directory correctly set.' 451 error_msg += 'Please use the set command to define the path and retry.' 452 error_msg += 'You can also define it in the configuration file.' 453 raise self.InvalidCmd(error_msg) 454 455 if len(args) == 0: 456 if not hasattr(self, 'run_name') or not self.run_name: 457 self.help_plot() 458 raise self.InvalidCmd('No run name currently define. Please add this information.') 459 args.append('all') 460 return 461 462 463 if args[0] not in self._plot_mode: 464 self.set_run_name(args[0], level='plot') 465 del args[0] 466 if len(args) == 0: 467 args.append('all') 468 elif not self.run_name: 469 self.help_plot() 470 raise self.InvalidCmd('No run name currently define. Please add this information.') 471 472 for arg in args: 473 if arg not in self._plot_mode and arg != self.run_name: 474 self.help_plot() 475 raise self.InvalidCmd('unknown options %s' % arg)
476
477 - def check_pgs(self, arg):
478 """Check the argument for pythia command 479 syntax: pgs [NAME] 480 Note that other option are already remove at this point 481 """ 482 483 # If not pythia-pgs path 484 if not self.options['pythia-pgs_path']: 485 logger.info('Retry to read configuration file to find pythia-pgs path') 486 self.set_configuration() 487 488 if not self.options['pythia-pgs_path'] or not \ 489 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 490 error_msg = 'No pythia-pgs path correctly set.' 491 error_msg += 'Please use the set command to define the path and retry.' 492 error_msg += 'You can also define it in the configuration file.' 493 raise self.InvalidCmd(error_msg) 494 495 tag = [a for a in arg if a.startswith('--tag=')] 496 if tag: 497 arg.remove(tag[0]) 498 tag = tag[0][6:] 499 500 501 if len(arg) == 0 and not self.run_name: 502 if self.results.lastrun: 503 arg.insert(0, self.results.lastrun) 504 else: 505 raise self.InvalidCmd('No run name currently define. Please add this information.') 506 507 if len(arg) == 1 and self.run_name == arg[0]: 508 arg.pop(0) 509 510 if not len(arg) and \ 511 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 512 self.help_pgs() 513 raise self.InvalidCmd('''No file file pythia_events.hep currently available 514 Please specify a valid run_name''') 515 516 lock = None 517 if len(arg) == 1: 518 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 519 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 520 521 if not filenames: 522 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 523 else: 524 input_file = filenames[0] 525 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 526 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 527 argument=['-c', input_file], 528 close_fds=True) 529 else: 530 if tag: 531 self.run_card['run_tag'] = tag 532 self.set_run_name(self.run_name, tag, 'pgs') 533 534 return lock
535 536
537 - def check_delphes(self, arg):
538 """Check the argument for pythia command 539 syntax: delphes [NAME] 540 Note that other option are already remove at this point 541 """ 542 543 # If not pythia-pgs path 544 if not self.options['delphes_path']: 545 logger.info('Retry to read configuration file to find delphes path') 546 self.set_configuration() 547 548 if not self.options['delphes_path']: 549 error_msg = 'No delphes path correctly set.' 550 error_msg += 'Please use the set command to define the path and retry.' 551 error_msg += 'You can also define it in the configuration file.' 552 raise self.InvalidCmd(error_msg) 553 554 tag = [a for a in arg if a.startswith('--tag=')] 555 if tag: 556 arg.remove(tag[0]) 557 tag = tag[0][6:] 558 559 560 if len(arg) == 0 and not self.run_name: 561 if self.results.lastrun: 562 arg.insert(0, self.results.lastrun) 563 else: 564 raise self.InvalidCmd('No run name currently define. Please add this information.') 565 566 if len(arg) == 1 and self.run_name == arg[0]: 567 arg.pop(0) 568 569 if not len(arg) and \ 570 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 571 self.help_pgs() 572 raise self.InvalidCmd('''No file file pythia_events.hep currently available 573 Please specify a valid run_name''') 574 575 if len(arg) == 1: 576 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 577 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 578 579 580 if not filenames: 581 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 582 % (self.run_name, prev_tag, 583 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 584 else: 585 input_file = filenames[0] 586 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 587 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 588 argument=['-c', input_file], 589 close_fds=True) 590 else: 591 if tag: 592 self.run_card['run_tag'] = tag 593 self.set_run_name(self.run_name, tag, 'delphes')
594
595 - def check_calculate_xsect(self, args, options):
596 """check the validity of the line. args is ORDER, 597 ORDER being LO or NLO. If no mode is passed, NLO is used""" 598 # modify args in order to be DIR 599 # mode being either standalone or madevent 600 601 if options['force']: 602 self.force = True 603 604 if not args: 605 args.append('NLO') 606 return 607 608 if len(args) > 1: 609 self.help_calculate_xsect() 610 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 611 612 elif len(args) == 1: 613 if not args[0] in ['NLO', 'LO']: 614 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 615 mode = args[0] 616 617 # check for incompatible options/modes 618 if options['multicore'] and options['cluster']: 619 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 620 ' are not compatible. Please choose one.'
621 622
623 - def check_generate_events(self, args, options):
624 """check the validity of the line. args is ORDER, 625 ORDER being LO or NLO. If no mode is passed, NLO is used""" 626 # modify args in order to be DIR 627 # mode being either standalone or madevent 628 629 if not args: 630 args.append('NLO') 631 return 632 633 if len(args) > 1: 634 self.help_generate_events() 635 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 636 637 elif len(args) == 1: 638 if not args[0] in ['NLO', 'LO']: 639 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 640 mode = args[0] 641 642 # check for incompatible options/modes 643 if options['multicore'] and options['cluster']: 644 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 645 ' are not compatible. Please choose one.'
646
647 - def check_banner_run(self, args):
648 """check the validity of line""" 649 650 if len(args) == 0: 651 self.help_banner_run() 652 raise self.InvalidCmd('banner_run requires at least one argument.') 653 654 tag = [a[6:] for a in args if a.startswith('--tag=')] 655 656 657 if os.path.exists(args[0]): 658 type ='banner' 659 format = self.detect_card_type(args[0]) 660 if format != 'banner': 661 raise self.InvalidCmd('The file is not a valid banner.') 662 elif tag: 663 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 664 (args[0], tag)) 665 if not os.path.exists(args[0]): 666 raise self.InvalidCmd('No banner associates to this name and tag.') 667 else: 668 name = args[0] 669 type = 'run' 670 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 671 if not banners: 672 raise self.InvalidCmd('No banner associates to this name.') 673 elif len(banners) == 1: 674 args[0] = banners[0] 675 else: 676 #list the tag and propose those to the user 677 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 678 tag = self.ask('which tag do you want to use?', tags[0], tags) 679 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 680 (args[0], tag)) 681 682 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 683 if run_name: 684 try: 685 self.exec_cmd('remove %s all banner -f' % run_name) 686 except Exception: 687 pass 688 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 689 elif type == 'banner': 690 self.set_run_name(self.find_available_run_name(self.me_dir)) 691 elif type == 'run': 692 if not self.results[name].is_empty(): 693 run_name = self.find_available_run_name(self.me_dir) 694 logger.info('Run %s is not empty so will use run_name: %s' % \ 695 (name, run_name)) 696 self.set_run_name(run_name) 697 else: 698 try: 699 self.exec_cmd('remove %s all banner -f' % run_name) 700 except Exception: 701 pass 702 self.set_run_name(name)
703 704 705
706 - def check_launch(self, args, options):
707 """check the validity of the line. args is MODE 708 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 709 # modify args in order to be DIR 710 # mode being either standalone or madevent 711 712 if options['force']: 713 self.force = True 714 715 716 if not args: 717 args.append('auto') 718 return 719 720 if len(args) > 1: 721 self.help_launch() 722 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 723 724 elif len(args) == 1: 725 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 726 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 727 mode = args[0] 728 729 # check for incompatible options/modes 730 if options['multicore'] and options['cluster']: 731 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 732 ' are not compatible. Please choose one.' 733 if mode == 'NLO' and options['reweightonly']: 734 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
735 736
737 - def check_compile(self, args, options):
738 """check the validity of the line. args is MODE 739 MODE being FO or MC. If no mode is passed, MC is used""" 740 # modify args in order to be DIR 741 # mode being either standalone or madevent 742 743 if options['force']: 744 self.force = True 745 746 if not args: 747 args.append('MC') 748 return 749 750 if len(args) > 1: 751 self.help_compile() 752 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 753 754 elif len(args) == 1: 755 if not args[0] in ['MC', 'FO']: 756 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 757 mode = args[0]
758
759 # check for incompatible options/modes 760 761 762 #=============================================================================== 763 # CompleteForCmd 764 #=============================================================================== 765 -class CompleteForCmd(CheckValidForCmd):
766 """ The Series of help routine for the MadGraphCmd""" 767
768 - def complete_launch(self, text, line, begidx, endidx):
769 """auto-completion for launch command""" 770 771 args = self.split_arg(line[0:begidx]) 772 if len(args) == 1: 773 #return mode 774 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 775 elif len(args) == 2 and line[begidx-1] == '@': 776 return self.list_completion(text,['LO','NLO'],line) 777 else: 778 opts = [] 779 for opt in _launch_parser.option_list: 780 opts += opt._long_opts + opt._short_opts 781 return self.list_completion(text, opts, line)
782
783 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
784 "Complete the banner run command" 785 try: 786 787 788 args = self.split_arg(line[0:begidx], error=False) 789 790 if args[-1].endswith(os.path.sep): 791 return self.path_completion(text, 792 os.path.join('.',*[a for a in args \ 793 if a.endswith(os.path.sep)])) 794 795 796 if len(args) > 1: 797 # only options are possible 798 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 799 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 800 801 if args[-1] != '--tag=': 802 tags = ['--tag=%s' % t for t in tags] 803 else: 804 return self.list_completion(text, tags) 805 return self.list_completion(text, tags +['--name=','-f'], line) 806 807 # First argument 808 possibilites = {} 809 810 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 811 if a.endswith(os.path.sep)])) 812 if os.path.sep in line: 813 return comp 814 else: 815 possibilites['Path from ./'] = comp 816 817 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 818 run_list = [n.rsplit('/',2)[1] for n in run_list] 819 possibilites['RUN Name'] = self.list_completion(text, run_list) 820 821 return self.deal_multiple_categories(possibilites, formatting) 822 823 824 except Exception, error: 825 print error
826 827
828 - def complete_compile(self, text, line, begidx, endidx):
829 """auto-completion for launch command""" 830 831 args = self.split_arg(line[0:begidx]) 832 if len(args) == 1: 833 #return mode 834 return self.list_completion(text,['FO','MC'],line) 835 else: 836 opts = [] 837 for opt in _compile_parser.option_list: 838 opts += opt._long_opts + opt._short_opts 839 return self.list_completion(text, opts, line)
840
841 - def complete_calculate_xsect(self, text, line, begidx, endidx):
842 """auto-completion for launch command""" 843 844 args = self.split_arg(line[0:begidx]) 845 if len(args) == 1: 846 #return mode 847 return self.list_completion(text,['LO','NLO'],line) 848 else: 849 opts = [] 850 for opt in _calculate_xsect_parser.option_list: 851 opts += opt._long_opts + opt._short_opts 852 return self.list_completion(text, opts, line)
853
854 - def complete_generate_events(self, text, line, begidx, endidx):
855 """auto-completion for generate_events command 856 call the compeltion for launch""" 857 self.complete_launch(text, line, begidx, endidx)
858 859
860 - def complete_shower(self, text, line, begidx, endidx):
861 args = self.split_arg(line[0:begidx]) 862 if len(args) == 1: 863 #return valid run_name 864 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 865 data = [n.rsplit('/',2)[1] for n in data] 866 tmp1 = self.list_completion(text, data) 867 if not self.run_name: 868 return tmp1
869
870 - def complete_plot(self, text, line, begidx, endidx):
871 """ Complete the plot command """ 872 873 args = self.split_arg(line[0:begidx], error=False) 874 875 if len(args) == 1: 876 #return valid run_name 877 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 878 data = [n.rsplit('/',2)[1] for n in data] 879 tmp1 = self.list_completion(text, data) 880 if not self.run_name: 881 return tmp1 882 883 if len(args) > 1: 884 return self.list_completion(text, self._plot_mode)
885
886 - def complete_pgs(self,text, line, begidx, endidx):
887 "Complete the pgs command" 888 args = self.split_arg(line[0:begidx], error=False) 889 if len(args) == 1: 890 #return valid run_name 891 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 892 pjoin(self.me_dir, 'Events')) 893 data = [n.rsplit('/',2)[1] for n in data] 894 tmp1 = self.list_completion(text, data) 895 if not self.run_name: 896 return tmp1 897 else: 898 tmp2 = self.list_completion(text, self._run_options + ['-f', 899 '--tag=' ,'--no_default'], line) 900 return tmp1 + tmp2 901 else: 902 return self.list_completion(text, self._run_options + ['-f', 903 '--tag=','--no_default'], line)
904 905 complete_delphes = complete_pgs
906
907 -class aMCatNLOAlreadyRunning(InvalidCmd):
908 pass
909
910 -class AskRunNLO(cmd.ControlSwitch):
911 912 to_control = [('order', 'Type of perturbative computation'), 913 ('fixed_order', 'No MC@[N]LO matching / event generation'), 914 ('shower', 'Shower the generated events'), 915 ('madspin', 'Decay onshell particles'), 916 ('reweight', 'Add weights to events for new hypp.'), 917 ('madanalysis','Run MadAnalysis5 on the events generated')] 918 919 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower'] 920
921 - def __init__(self, question, line_args=[], mode=None, force=False, 922 *args, **opt):
923 924 self.check_available_module(opt['mother_interface'].options) 925 self.me_dir = opt['mother_interface'].me_dir 926 self.last_mode = opt['mother_interface'].last_mode 927 self.proc_characteristics = opt['mother_interface'].proc_characteristics 928 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat')) 929 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'], 930 *args, **opt)
931 932 @property
933 - def answer(self):
934 935 out = super(AskRunNLO, self).answer 936 if out['shower'] == 'HERWIG7': 937 out['shower'] = 'HERWIGPP' 938 939 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF': 940 out['runshower'] = False 941 else: 942 out['runshower'] = True 943 return out
944 945
946 - def check_available_module(self, options):
947 948 self.available_module = set() 949 if options['madanalysis5_path']: 950 self.available_module.add('MA5') 951 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']): 952 953 self.available_module.add('MadSpin') 954 if misc.has_f2py() or options['f2py_compiler']: 955 self.available_module.add('reweight') 956 if options['pythia8_path']: 957 self.available_module.add('PY8') 958 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']: 959 self.available_module.add('HW7')
960 # 961 # shorcut 962 #
963 - def ans_lo(self, value):
964 """ function called if the user type lo=value. or lo (then value is None)""" 965 966 if value is None: 967 self.switch['order'] = 'LO' 968 self.switch['fixed_order'] = 'ON' 969 self.set_switch('shower', 'OFF') 970 else: 971 logger.warning('Invalid command: lo=%s' % value)
972
973 - def ans_nlo(self, value):
974 if value is None: 975 self.switch['order'] = 'NLO' 976 self.switch['fixed_order'] = 'ON' 977 self.set_switch('shower', 'OFF') 978 else: 979 logger.warning('Invalid command: nlo=%s' % value)
980
981 - def ans_amc__at__nlo(self, value):
982 if value is None: 983 self.switch['order'] = 'NLO' 984 self.switch['fixed_order'] = 'OFF' 985 self.set_switch('shower', 'ON') 986 else: 987 logger.warning('Invalid command: aMC@NLO=%s' % value)
988
989 - def ans_amc__at__lo(self, value):
990 if value is None: 991 self.switch['order'] = 'LO' 992 self.switch['fixed_order'] = 'OFF' 993 self.set_switch('shower', 'ON') 994 else: 995 logger.warning('Invalid command: aMC@LO=%s' % value)
996
997 - def ans_noshower(self, value):
998 if value is None: 999 self.switch['order'] = 'NLO' 1000 self.switch['fixed_order'] = 'OFF' 1001 self.set_switch('shower', 'OFF') 1002 else: 1003 logger.warning('Invalid command: noshower=%s' % value)
1004
1005 - def ans_onlyshower(self, value):
1006 if value is None: 1007 self.switch['mode'] = 'onlyshower' 1008 self.switch['madspin'] = 'OFF' 1009 self.switch['reweight'] = 'OFF' 1010 else: 1011 logger.warning('Invalid command: onlyshower=%s' % value)
1012
1013 - def ans_noshowerlo(self, value):
1014 if value is None: 1015 self.switch['order'] = 'LO' 1016 self.switch['fixed_order'] = 'OFF' 1017 self.set_switch('shower', 'OFF') 1018 else: 1019 logger.warning('Invalid command: noshowerlo=%s' % value)
1020
1021 - def ans_madanalysis5(self, value):
1022 """ shortcut madanalysis5 -> madanalysis """ 1023 1024 if value is None: 1025 return self.onecmd('madanalysis') 1026 else: 1027 self.set_switch('madanalysis', value)
1028 # 1029 # ORDER 1030 #
1031 - def get_allowed_order(self):
1032 return ["LO", "NLO"]
1033
1034 - def set_default_order(self):
1035 1036 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']: 1037 self.switch['order'] = 'LO' 1038 self.switch['order'] = 'NLO'
1039
1040 - def set_switch_off_order(self):
1041 return
1042 # 1043 # Fix order 1044 #
1045 - def get_allowed_fixed_order(self):
1046 """ """ 1047 if self.proc_characteristics['ninitial'] == 1: 1048 return ['ON'] 1049 else: 1050 return ['ON', 'OFF']
1051
1052 - def set_default_fixed_order(self):
1053 1054 if self.last_mode in ['LO', 'NLO']: 1055 self.switch['fixed_order'] = 'ON' 1056 if self.proc_characteristics['ninitial'] == 1: 1057 self.switch['fixed_order'] = 'ON' 1058 else: 1059 self.switch['fixed_order'] = 'OFF' 1060
1061 - def color_for_fixed_order(self, switch_value):
1062 1063 if switch_value in ['OFF']: 1064 return self.green % switch_value 1065 else: 1066 return self.red % switch_value
1067
1068 - def color_for_shower(self, switch_value):
1069 1070 if switch_value in ['ON']: 1071 return self.green % switch_value 1072 elif switch_value in self.get_allowed('shower'): 1073 return self.green % switch_value 1074 else: 1075 return self.red % switch_value
1076
1077 - def consistency_fixed_order_shower(self, vfix, vshower):
1078 """ consistency_XX_YY(val_XX, val_YY) 1079 -> XX is the new key set by the user to a new value val_XX 1080 -> YY is another key set by the user. 1081 -> return value should be None or "replace_YY" 1082 """ 1083 1084 if vfix == 'ON' and vshower != 'OFF' : 1085 return 'OFF' 1086 return None
1087 1088 consistency_fixed_order_madspin = consistency_fixed_order_shower 1089 consistency_fixed_order_reweight = consistency_fixed_order_shower 1090
1091 - def consistency_fixed_order_madanalysis(self, vfix, vma5):
1092 1093 if vfix == 'ON' and vma5 == 'ON' : 1094 return 'OFF' 1095 return None
1096 1097
1098 - def consistency_shower_fixed_order(self, vshower, vfix):
1099 """ consistency_XX_YY(val_XX, val_YY) 1100 -> XX is the new key set by the user to a new value val_XX 1101 -> YY is another key set by the user. 1102 -> return value should be None or "replace_YY" 1103 """ 1104 1105 if vshower != 'OFF' and vfix == 'ON': 1106 return 'OFF' 1107 return None
1108 1109 consistency_madspin_fixed_order = consistency_shower_fixed_order 1110 consistency_reweight_fixed_order = consistency_shower_fixed_order 1111 consistency_madanalysis_fixed_order = consistency_shower_fixed_order 1112 1113 1114 # 1115 # Shower 1116 #
1117 - def get_allowed_shower(self):
1118 """ """ 1119 1120 if hasattr(self, 'allowed_shower'): 1121 return self.allowed_shower 1122 1123 if self.proc_characteristics['ninitial'] == 1: 1124 self.allowed_shower = ['OFF'] 1125 return ['OFF'] 1126 else: 1127 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ] 1128 if 'PY8' in self.available_module: 1129 allowed.append('PYTHIA8') 1130 if 'HW7' in self.available_module: 1131 allowed.append('HERWIGPP') 1132 1133 self.allowed_shower = allowed 1134 1135 return allowed
1136
1137 - def check_value_shower(self, value):
1138 """ """ 1139 1140 if value.upper() in self.get_allowed_shower(): 1141 return True 1142 if value.upper() in ['PYTHIA8', 'HERWIGPP']: 1143 return True 1144 if value.upper() == 'ON': 1145 return self.run_card['parton_shower'] 1146 if value.upper() in ['P8','PY8','PYTHIA_8']: 1147 return 'PYTHIA8' 1148 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']: 1149 return 'PYTHIA6PT' 1150 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']: 1151 return 'PYTHIA6Q' 1152 if value.upper() in ['HW7', 'HERWIG7']: 1153 return 'HERWIG7' 1154 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']: 1155 return 'HERWIGPP' 1156 if value.upper() in ['HW6', 'HERWIG_6']: 1157 return 'HERWIG6'
1158
1159 - def set_default_shower(self):
1160 1161 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']: 1162 self.switch['shower'] = 'OFF' 1163 return 1164 1165 if self.proc_characteristics['ninitial'] == 1: 1166 self.switch['shower'] = 'OFF' 1167 return 1168 1169 1170 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 1171 self.switch['shower'] = self.run_card['parton_shower'] 1172 #self.switch['shower'] = 'ON' 1173 self.switch['fixed_order'] = "OFF" 1174 else: 1175 self.switch['shower'] = 'OFF' 1176
1177 - def consistency_shower_madanalysis(self, vshower, vma5):
1178 """ MA5 only possible with (N)LO+PS if shower is run""" 1179 1180 if vshower == 'OFF' and vma5 == 'ON': 1181 return 'OFF' 1182 return None
1183
1184 - def consistency_madanalysis_shower(self, vma5, vshower):
1185 1186 if vma5=='ON' and vshower == 'OFF': 1187 return 'ON' 1188 return None
1189
1190 - def get_cardcmd_for_shower(self, value):
1191 """ adpat run_card according to this setup. return list of cmd to run""" 1192 1193 if value != 'OFF': 1194 return ['set parton_shower %s' % self.switch['shower']] 1195 return []
1196 1197 # 1198 # madspin 1199 #
1200 - def get_allowed_madspin(self):
1201 """ """ 1202 1203 if hasattr(self, 'allowed_madspin'): 1204 return self.allowed_madspin 1205 1206 self.allowed_madspin = [] 1207 1208 1209 if 'MadSpin' not in self.available_module: 1210 return self.allowed_madspin 1211 if self.proc_characteristics['ninitial'] == 1: 1212 self.available_module.remove('MadSpin') 1213 self.allowed_madspin = ['OFF'] 1214 return self.allowed_madspin 1215 else: 1216 self.allowed_madspin = ['OFF', 'ON', 'onshell'] 1217 return self.allowed_madspin
1218
1219 - def check_value_madspin(self, value):
1220 """handle alias and valid option not present in get_allowed_madspin 1221 remember that this mode should always be OFF for 1>N. (ON not in allowed value)""" 1222 1223 if value.upper() in self.get_allowed_madspin(): 1224 if value == value.upper(): 1225 return True 1226 else: 1227 return value.upper() 1228 elif value.lower() in self.get_allowed_madspin(): 1229 if value == value.lower(): 1230 return True 1231 else: 1232 return value.lower() 1233 1234 if 'MadSpin' not in self.available_module or \ 1235 'ON' not in self.get_allowed_madspin(): 1236 return False 1237 1238 if value.lower() in ['madspin', 'full']: 1239 return 'full' 1240 elif value.lower() in ['none']: 1241 return 'none'
1242
1243 - def set_default_madspin(self):
1244 1245 if 'MadSpin' in self.available_module: 1246 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 1247 self.switch['madspin'] = 'ON' 1248 else: 1249 self.switch['madspin'] = 'OFF' 1250 else: 1251 self.switch['madspin'] = 'Not Avail.'
1252
1253 - def get_cardcmd_for_madspin(self, value):
1254 """set some command to run before allowing the user to modify the cards.""" 1255 1256 if value == 'onshell': 1257 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"] 1258 elif value in ['full', 'madspin']: 1259 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"] 1260 elif value == 'none': 1261 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"] 1262 else: 1263 return []
1264 1265 # 1266 # reweight 1267 #
1268 - def get_allowed_reweight(self):
1269 """set the valid (visible) options for reweight""" 1270 1271 if hasattr(self, 'allowed_reweight'): 1272 return getattr(self, 'allowed_reweight') 1273 1274 self.allowed_reweight = [] 1275 if 'reweight' not in self.available_module: 1276 return self.allowed_reweight 1277 if self.proc_characteristics['ninitial'] == 1: 1278 self.available_module.remove('reweight') 1279 self.allowed_reweight.append('OFF') 1280 return self.allowed_reweight 1281 else: 1282 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO'] 1283 return self.allowed_reweight
1284
1285 - def set_default_reweight(self):
1286 """initialise the switch for reweight""" 1287 1288 if 'reweight' in self.available_module: 1289 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 1290 self.switch['reweight'] = 'ON' 1291 else: 1292 self.switch['reweight'] = 'OFF' 1293 else: 1294 self.switch['reweight'] = 'Not Avail.'
1295
1296 - def get_cardcmd_for_reweight(self, value):
1297 """ adpat run_card according to this setup. return list of cmd to run""" 1298 1299 if value == 'LO': 1300 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"] 1301 elif value == 'NLO': 1302 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO", 1303 "set store_rwgt_info T"] 1304 elif value == 'NLO_TREE': 1305 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree", 1306 "set store_rwgt_info T"] 1307 return []
1308 1309 # 1310 # MadAnalysis5 1311 #
1312 - def get_allowed_madanalysis(self):
1313 1314 if hasattr(self, 'allowed_madanalysis'): 1315 return self.allowed_madanalysis 1316 1317 self.allowed_madanalysis = [] 1318 1319 1320 if 'MA5' not in self.available_module: 1321 return self.allowed_madanalysis 1322 1323 if self.proc_characteristics['ninitial'] == 1: 1324 self.available_module.remove('MA5') 1325 self.allowed_madanalysis = ['OFF'] 1326 return self.allowed_madanalysis 1327 else: 1328 self.allowed_madanalysis = ['OFF', 'ON'] 1329 return self.allowed_madanalysis
1330
1331 - def set_default_madanalysis(self):
1332 """initialise the switch for reweight""" 1333 1334 if 'MA5' not in self.available_module: 1335 self.switch['madanalysis'] = 'Not Avail.' 1336 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')): 1337 self.switch['madanalysis'] = 'ON' 1338 else: 1339 self.switch['madanalysis'] = 'OFF'
1340
1341 - def check_value_madanalysis(self, value):
1342 """check an entry is valid. return the valid entry in case of shortcut""" 1343 1344 if value.upper() in self.get_allowed('madanalysis'): 1345 return True 1346 value = value.lower() 1347 if value == 'hadron': 1348 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False 1349 else: 1350 return False
1351
1352 1353 #=============================================================================== 1354 # aMCatNLOCmd 1355 #=============================================================================== 1356 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1357 """The command line processor of MadGraph""" 1358 1359 # Truth values 1360 true = ['T','.true.',True,'true'] 1361 # Options and formats available 1362 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 1363 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 1364 _calculate_decay_options = ['-f', '--accuracy=0.'] 1365 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 1366 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 1367 _clean_mode = _plot_mode + ['channel', 'banner'] 1368 _display_opts = ['run_name', 'options', 'variable'] 1369 # survey options, dict from name to type, default value, and help text 1370 # Variables to store object information 1371 web = False 1372 cluster_mode = 0 1373 queue = 'madgraph' 1374 nb_core = None 1375 make_opts_var = {} 1376 1377 next_possibility = { 1378 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 1379 'help generate_events'], 1380 'generate_events': ['generate_events [OPTIONS]', 'shower'], 1381 'launch': ['launch [OPTIONS]', 'shower'], 1382 'shower' : ['generate_events [OPTIONS]'] 1383 } 1384 1385 1386 ############################################################################
1387 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1388 """ add information to the cmd """ 1389 1390 self.start_time = 0 1391 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 1392 #common_run.CommonRunCmd.__init__(self, me_dir, options) 1393 1394 self.mode = 'aMCatNLO' 1395 self.nb_core = 0 1396 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 1397 1398 1399 self.load_results_db() 1400 self.results.def_web_mode(self.web) 1401 # check that compiler is gfortran 4.6 or later if virtuals have been exported 1402 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 1403 1404 if not '[real=QCD]' in proc_card: 1405 check_compiler(self.options, block=True)
1406 1407 1408 ############################################################################
1409 - def do_shower(self, line):
1410 """ run the shower on a given parton level file """ 1411 argss = self.split_arg(line) 1412 (options, argss) = _launch_parser.parse_args(argss) 1413 # check argument validity and normalise argument 1414 options = options.__dict__ 1415 options['reweightonly'] = False 1416 self.check_shower(argss, options) 1417 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 1418 self.ask_run_configuration('onlyshower', options) 1419 self.run_mcatnlo(evt_file, options) 1420 1421 self.update_status('', level='all', update_results=True)
1422 1423 ################################################################################
1424 - def do_plot(self, line):
1425 """Create the plot for a given run""" 1426 1427 # Since in principle, all plot are already done automaticaly 1428 args = self.split_arg(line) 1429 # Check argument's validity 1430 self.check_plot(args) 1431 logger.info('plot for run %s' % self.run_name) 1432 1433 if not self.force: 1434 self.ask_edit_cards([], args, plot=True) 1435 1436 if any([arg in ['parton'] for arg in args]): 1437 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 1438 if os.path.exists(filename+'.gz'): 1439 misc.gunzip(filename) 1440 if os.path.exists(filename): 1441 logger.info('Found events.lhe file for run %s' % self.run_name) 1442 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 1443 self.create_plot('parton') 1444 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1445 misc.gzip(filename) 1446 1447 if any([arg in ['all','parton'] for arg in args]): 1448 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1449 if os.path.exists(filename): 1450 logger.info('Found MADatNLO.top file for run %s' % \ 1451 self.run_name) 1452 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1453 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1454 1455 if not os.path.isdir(plot_dir): 1456 os.makedirs(plot_dir) 1457 top_file = pjoin(plot_dir, 'plots.top') 1458 files.cp(filename, top_file) 1459 madir = self.options['madanalysis_path'] 1460 tag = self.run_card['run_tag'] 1461 td = self.options['td_path'] 1462 misc.call(['%s/plot' % self.dirbin, madir, td], 1463 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1464 stderr = subprocess.STDOUT, 1465 cwd=plot_dir) 1466 1467 misc.call(['%s/plot_page-pl' % self.dirbin, 1468 os.path.basename(plot_dir), 1469 'parton'], 1470 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1471 stderr = subprocess.STDOUT, 1472 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1473 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1474 output) 1475 1476 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1477 1478 if any([arg in ['all','shower'] for arg in args]): 1479 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1480 if len(filenames) != 1: 1481 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1482 if len(filenames) != 1: 1483 logger.info('No shower level file found for run %s' % \ 1484 self.run_name) 1485 return 1486 filename = filenames[0] 1487 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1488 1489 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1490 if aMCatNLO and not self.options['mg5_path']: 1491 raise "plotting NLO HEP file needs MG5 utilities" 1492 1493 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1494 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1495 self.run_hep2lhe() 1496 else: 1497 filename = filenames[0] 1498 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1499 1500 self.create_plot('shower') 1501 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1502 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1503 lhe_file_name) 1504 misc.gzip(lhe_file_name) 1505 1506 if any([arg in ['all','pgs'] for arg in args]): 1507 filename = pjoin(self.me_dir, 'Events', self.run_name, 1508 '%s_pgs_events.lhco' % self.run_tag) 1509 if os.path.exists(filename+'.gz'): 1510 misc.gunzip(filename) 1511 if os.path.exists(filename): 1512 self.create_plot('PGS') 1513 misc.gzip(filename) 1514 else: 1515 logger.info('No valid files for pgs plot') 1516 1517 if any([arg in ['all','delphes'] for arg in args]): 1518 filename = pjoin(self.me_dir, 'Events', self.run_name, 1519 '%s_delphes_events.lhco' % self.run_tag) 1520 if os.path.exists(filename+'.gz'): 1521 misc.gunzip(filename) 1522 if os.path.exists(filename): 1523 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1524 self.create_plot('Delphes') 1525 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1526 misc.gzip(filename) 1527 else: 1528 logger.info('No valid files for delphes plot')
1529 1530 1531 ############################################################################
1532 - def do_calculate_xsect(self, line):
1533 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1534 this function wraps the do_launch one""" 1535 1536 self.start_time = time.time() 1537 argss = self.split_arg(line) 1538 # check argument validity and normalise argument 1539 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1540 options = options.__dict__ 1541 options['reweightonly'] = False 1542 options['parton'] = True 1543 self.check_calculate_xsect(argss, options) 1544 self.do_launch(line, options, argss)
1545 1546 ############################################################################
1547 - def do_banner_run(self, line):
1548 """Make a run from the banner file""" 1549 1550 args = self.split_arg(line) 1551 #check the validity of the arguments 1552 self.check_banner_run(args) 1553 1554 # Remove previous cards 1555 for name in ['shower_card.dat', 'madspin_card.dat']: 1556 try: 1557 os.remove(pjoin(self.me_dir, 'Cards', name)) 1558 except Exception: 1559 pass 1560 1561 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1562 1563 # Check if we want to modify the run 1564 if not self.force: 1565 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1566 if ans == 'n': 1567 self.force = True 1568 1569 # Compute run mode: 1570 if self.force: 1571 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1572 banner = banner_mod.Banner(args[0]) 1573 for line in banner['run_settings']: 1574 if '=' in line: 1575 mode, value = [t.strip() for t in line.split('=')] 1576 mode_status[mode] = value 1577 else: 1578 mode_status = {} 1579 1580 # Call Generate events 1581 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1582 switch=mode_status)
1583 1584 ############################################################################
1585 - def do_generate_events(self, line):
1586 """Main commands: generate events 1587 this function just wraps the do_launch one""" 1588 self.do_launch(line)
1589 1590 1591 ############################################################################
1592 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1593 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1594 #check if no 'Auto' are present in the file 1595 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1596 1597 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1598 # this variable is system only in the run_card 1599 # can not be done in EditCard since this parameter is not written in the 1600 # run_card directly. 1601 if mode in ['LO', 'NLO']: 1602 name = 'fo_lhe_weight_ratio' 1603 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1604 if name in FO_card: 1605 self.run_card.set(name, FO_card[name], user=False) 1606 name = 'fo_lhe_postprocessing' 1607 if name in FO_card: 1608 self.run_card.set(name, FO_card[name], user=False) 1609 1610 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1611 1612 ############################################################################
1613 - def set_configuration(self, amcatnlo=True, **opt):
1614 """assign all configuration variable from file 1615 loop over the different config file if config_file not define """ 1616 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1617 1618 ############################################################################
1619 - def do_launch(self, line, options={}, argss=[], switch={}):
1620 """Main commands: launch the full chain 1621 options and args are relevant if the function is called from other 1622 functions, such as generate_events or calculate_xsect 1623 mode gives the list of switch needed for the computation (usefull for banner_run) 1624 """ 1625 1626 if not argss and not options: 1627 self.start_time = time.time() 1628 argss = self.split_arg(line) 1629 # check argument validity and normalise argument 1630 (options, argss) = _launch_parser.parse_args(argss) 1631 options = options.__dict__ 1632 self.check_launch(argss, options) 1633 1634 1635 if 'run_name' in options.keys() and options['run_name']: 1636 self.run_name = options['run_name'] 1637 # if a dir with the given run_name already exists 1638 # remove it and warn the user 1639 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1640 logger.warning('Removing old run information in \n'+ 1641 pjoin(self.me_dir, 'Events', self.run_name)) 1642 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1643 self.results.delete_run(self.run_name) 1644 else: 1645 self.run_name = '' # will be set later 1646 1647 if options['multicore']: 1648 self.cluster_mode = 2 1649 elif options['cluster']: 1650 self.cluster_mode = 1 1651 1652 if not switch: 1653 mode = argss[0] 1654 1655 if mode in ['LO', 'NLO']: 1656 options['parton'] = True 1657 mode = self.ask_run_configuration(mode, options) 1658 else: 1659 mode = self.ask_run_configuration('auto', options, switch) 1660 1661 self.results.add_detail('run_mode', mode) 1662 1663 self.update_status('Starting run', level=None, update_results=True) 1664 1665 if self.options['automatic_html_opening']: 1666 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1667 self.options['automatic_html_opening'] = False 1668 1669 if '+' in mode: 1670 mode = mode.split('+')[0] 1671 self.compile(mode, options) 1672 evt_file = self.run(mode, options) 1673 1674 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1675 logger.info('No event file generated: grids have been set-up with a '\ 1676 'relative precision of %s' % self.run_card['req_acc']) 1677 return 1678 1679 if not mode in ['LO', 'NLO']: 1680 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1681 1682 if self.run_card['systematics_program'] == 'systematics': 1683 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1684 1685 self.exec_cmd('reweight -from_cards', postcmd=False) 1686 self.exec_cmd('decay_events -from_cards', postcmd=False) 1687 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1688 1689 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1690 and not options['parton']: 1691 self.run_mcatnlo(evt_file, options) 1692 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1693 1694 elif mode == 'noshower': 1695 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1696 Please, shower the Les Houches events before using them for physics analyses.""") 1697 1698 1699 self.update_status('', level='all', update_results=True) 1700 if self.run_card['ickkw'] == 3 and \ 1701 (mode in ['noshower'] or \ 1702 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1703 logger.warning("""You are running with FxFx merging enabled. 1704 To be able to merge samples of various multiplicities without double counting, 1705 you have to remove some events after showering 'by hand'. 1706 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1707 1708 self.store_result() 1709 #check if the param_card defines a scan. 1710 if self.param_card_iterator: 1711 cpath = pjoin(self.me_dir,'Cards','param_card.dat') 1712 param_card_iterator = self.param_card_iterator 1713 self.param_card_iterator = [] #avoid to next generate go trough here 1714 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1715 error=self.results.current['error'], 1716 param_card_path=cpath) 1717 orig_name = self.run_name 1718 #go trough the scal 1719 with misc.TMP_variable(self, 'allow_notification_center', False): 1720 for i,card in enumerate(param_card_iterator): 1721 card.write(cpath) 1722 self.check_param_card(cpath, dependent=True) 1723 if not options['force']: 1724 options['force'] = True 1725 if options['run_name']: 1726 options['run_name'] = '%s_%s' % (orig_name, i+1) 1727 if not argss: 1728 argss = [mode, "-f"] 1729 elif argss[0] == "auto": 1730 argss[0] = mode 1731 self.do_launch("", options=options, argss=argss, switch=switch) 1732 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1733 param_card_iterator.store_entry(self.run_name, self.results.current['cross'], 1734 error=self.results.current['error'], 1735 param_card_path=cpath) 1736 #restore original param_card 1737 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1738 name = misc.get_scan_name(orig_name, self.run_name) 1739 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1740 logger.info("write all cross-section results in %s" % path, '$MG:BOLD') 1741 param_card_iterator.write_summary(path) 1742 1743 if self.allow_notification_center: 1744 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1745 '%s: %s +- %s ' % (self.results.current['run_name'], 1746 self.results.current['cross'], 1747 self.results.current['error']))
1748 1749 1750 ############################################################################
1751 - def do_compile(self, line):
1752 """Advanced commands: just compile the executables """ 1753 argss = self.split_arg(line) 1754 # check argument validity and normalise argument 1755 (options, argss) = _compile_parser.parse_args(argss) 1756 options = options.__dict__ 1757 options['reweightonly'] = False 1758 options['nocompile'] = False 1759 self.check_compile(argss, options) 1760 1761 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1762 self.ask_run_configuration(mode, options) 1763 self.compile(mode, options) 1764 1765 1766 self.update_status('', level='all', update_results=True)
1767 1768
1769 - def update_random_seed(self):
1770 """Update random number seed with the value from the run_card. 1771 If this is 0, update the number according to a fresh one""" 1772 iseed = self.run_card['iseed'] 1773 if iseed == 0: 1774 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1775 iseed = int(randinit.read()[2:]) + 1 1776 randinit.close() 1777 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1778 randinit.write('r=%d' % iseed) 1779 randinit.close()
1780 1781
1782 - def run(self, mode, options):
1783 """runs aMC@NLO. Returns the name of the event file created""" 1784 logger.info('Starting run') 1785 1786 if not 'only_generation' in options.keys(): 1787 options['only_generation'] = False 1788 1789 # for second step in applgrid mode, do only the event generation step 1790 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1791 options['only_generation'] = True 1792 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1793 self.setup_cluster_or_multicore() 1794 self.update_random_seed() 1795 #find and keep track of all the jobs 1796 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1797 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1798 folder_names['noshower'] = folder_names['aMC@NLO'] 1799 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1800 p_dirs = [d for d in \ 1801 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1802 #Clean previous results 1803 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1804 1805 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1806 1807 1808 if options['reweightonly']: 1809 event_norm=self.run_card['event_norm'] 1810 nevents=self.run_card['nevents'] 1811 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1812 1813 if mode in ['LO', 'NLO']: 1814 # this is for fixed order runs 1815 mode_dict = {'NLO': 'all', 'LO': 'born'} 1816 logger.info('Doing fixed order %s' % mode) 1817 req_acc = self.run_card['req_acc_FO'] 1818 1819 # Re-distribute the grids for the 2nd step of the applgrid 1820 # running 1821 if self.run_card['iappl'] == 2: 1822 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1823 1824 # create a list of dictionaries "jobs_to_run" with all the 1825 # jobs that need to be run 1826 integration_step=-1 1827 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1828 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1829 self.prepare_directories(jobs_to_run,mode) 1830 1831 # loop over the integration steps. After every step, check 1832 # if we have the required accuracy. If this is the case, 1833 # stop running, else do another step. 1834 while True: 1835 integration_step=integration_step+1 1836 self.run_all_jobs(jobs_to_run,integration_step) 1837 self.collect_log_files(jobs_to_run,integration_step) 1838 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1839 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1840 if not jobs_to_run: 1841 # there are no more jobs to run (jobs_to_run is empty) 1842 break 1843 # We are done. 1844 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1845 self.update_status('Run complete', level='parton', update_results=True) 1846 return 1847 1848 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1849 if self.ninitial == 1: 1850 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1851 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1852 'noshower': 'all', 'noshowerLO': 'born'} 1853 shower = self.run_card['parton_shower'].upper() 1854 nevents = self.run_card['nevents'] 1855 req_acc = self.run_card['req_acc'] 1856 if nevents == 0 and req_acc < 0 : 1857 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1858 'of events, because 0 events requested. Please set '\ 1859 'the "req_acc" parameter in the run_card to a value '\ 1860 'between 0 and 1') 1861 elif req_acc >1 or req_acc == 0 : 1862 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1863 'be between larger than 0 and smaller than 1, '\ 1864 'or set to -1 for automatic determination. Current '\ 1865 'value is %f' % req_acc) 1866 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1867 elif req_acc < 0 and nevents > 1000000 : 1868 req_acc=0.001 1869 1870 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1871 1872 if not shower in shower_list: 1873 raise aMCatNLOError('%s is not a valid parton shower. '\ 1874 'Please use one of the following: %s' \ 1875 % (shower, ', '.join(shower_list))) 1876 1877 # check that PYTHIA6PT is not used for processes with FSR 1878 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1879 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1880 1881 if mode in ['aMC@NLO', 'aMC@LO']: 1882 logger.info('Doing %s matched to parton shower' % mode[4:]) 1883 elif mode in ['noshower','noshowerLO']: 1884 logger.info('Generating events without running the shower.') 1885 elif options['only_generation']: 1886 logger.info('Generating events starting from existing results') 1887 1888 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1889 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1890 # Make sure to update all the jobs to be ready for the event generation step 1891 if options['only_generation']: 1892 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1893 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1894 else: 1895 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1896 1897 1898 # Main loop over the three MINT generation steps: 1899 for mint_step, status in enumerate(mcatnlo_status): 1900 if options['only_generation'] and mint_step < 2: 1901 continue 1902 self.update_status(status, level='parton') 1903 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1904 self.collect_log_files(jobs_to_run,mint_step) 1905 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1906 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1907 if mint_step+1==2 and nevents==0: 1908 self.print_summary(options,2,mode) 1909 return 1910 1911 # Sanity check on the event files. If error the jobs are resubmitted 1912 self.check_event_files(jobs_to_collect) 1913 1914 if self.cluster_mode == 1: 1915 #if cluster run, wait 10 sec so that event files are transferred back 1916 self.update_status( 1917 'Waiting while files are transferred back from the cluster nodes', 1918 level='parton') 1919 time.sleep(10) 1920 1921 event_norm=self.run_card['event_norm'] 1922 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1923
1924 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1925 integration_step,mode,fixed_order=True):
1926 """Creates a list of dictionaries with all the jobs to be run""" 1927 jobs_to_run=[] 1928 if not options['only_generation']: 1929 # Fresh, new run. Check all the P*/channels.txt files 1930 # (created by the 'gensym' executable) to set-up all the 1931 # jobs using the default inputs. 1932 npoints = self.run_card['npoints_FO_grid'] 1933 niters = self.run_card['niters_FO_grid'] 1934 for p_dir in p_dirs: 1935 try: 1936 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1937 channels=chan_file.readline().split() 1938 except IOError: 1939 logger.warning('No integration channels found for contribution %s' % p_dir) 1940 continue 1941 if fixed_order: 1942 lch=len(channels) 1943 maxchannels=20 # combine up to 20 channels in a single job 1944 if self.run_card['iappl'] != 0: maxchannels=1 1945 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1946 else int(lch/maxchannels)) 1947 for nj in range(1,njobs+1): 1948 job={} 1949 job['p_dir']=p_dir 1950 job['channel']=str(nj) 1951 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1952 job['configs']=' '.join(channels[:job['nchans']]) 1953 del channels[:job['nchans']] 1954 job['split']=0 1955 if req_acc == -1: 1956 job['accuracy']=0 1957 job['niters']=niters 1958 job['npoints']=npoints 1959 elif req_acc > 0: 1960 job['accuracy']=0.05 1961 job['niters']=6 1962 job['npoints']=-1 1963 else: 1964 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1965 'between 0 and 1 or set it equal to -1.') 1966 job['mint_mode']=0 1967 job['run_mode']=run_mode 1968 job['wgt_frac']=1.0 1969 job['wgt_mult']=1.0 1970 jobs_to_run.append(job) 1971 if channels: 1972 raise aMCatNLOError('channels is not empty %s' % channels) 1973 else: 1974 for channel in channels: 1975 job={} 1976 job['p_dir']=p_dir 1977 job['channel']=channel 1978 job['split']=0 1979 job['accuracy']=0.03 1980 job['niters']=12 1981 job['npoints']=-1 1982 job['mint_mode']=0 1983 job['run_mode']=run_mode 1984 job['wgt_frac']=1.0 1985 jobs_to_run.append(job) 1986 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1987 else: 1988 # if options['only_generation'] is true, just read the current jobs from file 1989 try: 1990 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 1991 jobs_to_collect=pickle.load(f) 1992 for job in jobs_to_collect: 1993 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1]) 1994 jobs_to_run=copy.copy(jobs_to_collect) 1995 except: 1996 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 1997 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 1998 # Update cross sections and determine which jobs to run next 1999 if fixed_order: 2000 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 2001 jobs_to_collect,integration_step,mode,run_mode) 2002 # Update the integration_step to make sure that nothing will be overwritten 2003 integration_step=1 2004 for job in jobs_to_run: 2005 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 2006 integration_step=integration_step+1 2007 integration_step=integration_step-1 2008 else: 2009 self.append_the_results(jobs_to_collect,integration_step) 2010 return jobs_to_run,jobs_to_collect,integration_step
2011
2012 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
2013 """Set-up the G* directories for running""" 2014 name_suffix={'born' :'B' , 'all':'F'} 2015 for job in jobs_to_run: 2016 if job['split'] == 0: 2017 if fixed_order : 2018 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2019 job['run_mode']+'_G'+job['channel']) 2020 else: 2021 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2022 'G'+name_suffix[job['run_mode']]+job['channel']) 2023 else: 2024 if fixed_order : 2025 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2026 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 2027 else: 2028 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 2029 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 2030 job['dirname']=dirname 2031 if not os.path.isdir(dirname): 2032 os.makedirs(dirname) 2033 self.write_input_file(job,fixed_order) 2034 # link or copy the grids from the base directory to the split directory: 2035 if not fixed_order: 2036 if job['split'] != 0: 2037 for f in ['grid.MC_integer','mint_grids','res_1']: 2038 if not os.path.isfile(pjoin(job['dirname'],f)): 2039 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 2040 else: 2041 if job['split'] != 0: 2042 for f in ['grid.MC_integer','mint_grids']: 2043 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2044 2045
2046 - def write_input_file(self,job,fixed_order):
2047 """write the input file for the madevent_mint* executable in the appropriate directory""" 2048 if fixed_order: 2049 content= \ 2050 """NPOINTS = %(npoints)s 2051 NITERATIONS = %(niters)s 2052 ACCURACY = %(accuracy)s 2053 ADAPT_GRID = 2 2054 MULTICHANNEL = 1 2055 SUM_HELICITY = 1 2056 NCHANS = %(nchans)s 2057 CHANNEL = %(configs)s 2058 SPLIT = %(split)s 2059 WGT_MULT= %(wgt_mult)s 2060 RUN_MODE = %(run_mode)s 2061 RESTART = %(mint_mode)s 2062 """ \ 2063 % job 2064 else: 2065 content = \ 2066 """-1 12 ! points, iterations 2067 %(accuracy)s ! desired fractional accuracy 2068 1 -0.1 ! alpha, beta for Gsoft 2069 -1 -0.1 ! alpha, beta for Gazi 2070 1 ! Suppress amplitude (0 no, 1 yes)? 2071 1 ! Exact helicity sum (0 yes, n = number/event)? 2072 %(channel)s ! Enter Configuration Number: 2073 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 2074 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 2075 %(run_mode)s ! all, born, real, virt 2076 """ \ 2077 % job 2078 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 2079 input_file.write(content)
2080 2081
2082 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2083 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 2084 if fixed_order: 2085 if integration_step == 0: 2086 self.update_status('Setting up grids', level=None) 2087 else: 2088 self.update_status('Refining results, step %i' % integration_step, level=None) 2089 self.ijob = 0 2090 name_suffix={'born' :'B', 'all':'F'} 2091 if fixed_order: 2092 run_type="Fixed order integration step %s" % integration_step 2093 else: 2094 run_type="MINT step %s" % integration_step 2095 self.njobs=len(jobs_to_run) 2096 for job in jobs_to_run: 2097 executable='ajob1' 2098 if fixed_order: 2099 arguments=[job['channel'],job['run_mode'], \ 2100 str(job['split']),str(integration_step)] 2101 else: 2102 arguments=[job['channel'],name_suffix[job['run_mode']], \ 2103 str(job['split']),str(integration_step)] 2104 self.run_exe(executable,arguments,run_type, 2105 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 2106 2107 if self.cluster_mode == 2: 2108 time.sleep(1) # security to allow all jobs to be launched 2109 self.wait_for_complete(run_type)
2110 2111
2112 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 2113 integration_step,mode,run_mode,fixed_order=True):
2114 """Collect the results, make HTML pages, print the summary and 2115 determine if there are more jobs to run. Returns the list 2116 of the jobs that still need to be run, as well as the 2117 complete list of jobs that need to be collected to get the 2118 final answer. 2119 """ 2120 # Get the results of the current integration/MINT step 2121 self.append_the_results(jobs_to_run,integration_step) 2122 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 2123 # Update HTML pages 2124 if fixed_order: 2125 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode], 2126 jobs=jobs_to_collect) 2127 else: 2128 name_suffix={'born' :'B' , 'all':'F'} 2129 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]]) 2130 self.results.add_detail('cross', cross) 2131 self.results.add_detail('error', error) 2132 # Combine grids from split fixed order jobs 2133 if fixed_order: 2134 jobs_to_run=self.combine_split_order_run(jobs_to_run) 2135 # Set-up jobs for the next iteration/MINT step 2136 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 2137 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 2138 if fixed_order: 2139 # Write the jobs_to_collect directory to file so that we 2140 # can restart them later (with only-generation option) 2141 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2142 pickle.dump(jobs_to_collect,f) 2143 # Print summary 2144 if (not jobs_to_run_new) and fixed_order: 2145 # print final summary of results (for fixed order) 2146 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 2147 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 2148 return jobs_to_run_new,jobs_to_collect 2149 elif jobs_to_run_new: 2150 # print intermediate summary of results 2151 scale_pdf_info=[] 2152 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 2153 else: 2154 # When we are done for (N)LO+PS runs, do not print 2155 # anything yet. This will be done after the reweighting 2156 # and collection of the events 2157 scale_pdf_info=[] 2158 # Prepare for the next integration/MINT step 2159 if (not fixed_order) and integration_step+1 == 2 : 2160 # Write the jobs_to_collect directory to file so that we 2161 # can restart them later (with only-generation option) 2162 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 2163 pickle.dump(jobs_to_collect,f) 2164 # next step is event generation (mint_step 2) 2165 jobs_to_run_new,jobs_to_collect_new= \ 2166 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 2167 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2168 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 2169 self.write_nevts_files(jobs_to_run_new) 2170 else: 2171 if fixed_order and self.run_card['iappl'] == 0 \ 2172 and self.run_card['req_acc_FO'] > 0: 2173 jobs_to_run_new,jobs_to_collect= \ 2174 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 2175 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 2176 jobs_to_collect_new=jobs_to_collect 2177 return jobs_to_run_new,jobs_to_collect_new
2178 2179
2180 - def write_nevents_unweighted_file(self,jobs,jobs0events):
2181 """writes the nevents_unweighted file in the SubProcesses directory. 2182 We also need to write the jobs that will generate 0 events, 2183 because that makes sure that the cross section from those channels 2184 is taken into account in the event weights (by collect_events.f). 2185 """ 2186 content=[] 2187 for job in jobs: 2188 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2189 lhefile=pjoin(path,'events.lhe') 2190 content.append(' %s %d %9e %9e' % \ 2191 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 2192 for job in jobs0events: 2193 if job['nevents']==0: 2194 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 2195 lhefile=pjoin(path,'events.lhe') 2196 content.append(' %s %d %9e %9e' % \ 2197 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 2198 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 2199 f.write('\n'.join(content)+'\n')
2200
2201 - def write_nevts_files(self,jobs):
2202 """write the nevts files in the SubProcesses/P*/G*/ directories""" 2203 for job in jobs: 2204 with open(pjoin(job['dirname'],'nevts'),'w') as f: 2205 if self.run_card['event_norm'].lower()=='bias': 2206 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca'])) 2207 else: 2208 f.write('%i\n' % job['nevents'])
2209
2210 - def combine_split_order_run(self,jobs_to_run):
2211 """Combines jobs and grids from split jobs that have been run""" 2212 # combine the jobs that need to be combined in job 2213 # groups. Simply combine the ones that have the same p_dir and 2214 # same channel. 2215 jobgroups_to_combine=[] 2216 jobs_to_run_new=[] 2217 for job in jobs_to_run: 2218 if job['split'] == 0: 2219 job['combined']=1 2220 jobs_to_run_new.append(job) # this jobs wasn't split 2221 elif job['split'] == 1: 2222 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2223 j['channel'] == job['channel'], jobs_to_run)) 2224 else: 2225 continue 2226 for job_group in jobgroups_to_combine: 2227 # Combine the grids (mint-grids & MC-integer grids) first 2228 self.combine_split_order_grids(job_group) 2229 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 2230 return jobs_to_run_new
2231
2232 - def combine_split_order_jobs(self,job_group):
2233 """combine the jobs in job_group and return a single summed job""" 2234 # first copy one of the jobs in 'jobs' 2235 sum_job=copy.copy(job_group[0]) 2236 # update the information to have a 'non-split' job: 2237 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 2238 sum_job['split']=0 2239 sum_job['wgt_mult']=1.0 2240 sum_job['combined']=len(job_group) 2241 # information to be summed: 2242 keys=['niters_done','npoints_done','niters','npoints',\ 2243 'result','resultABS','time_spend'] 2244 keys2=['error','errorABS'] 2245 # information to be summed in quadrature: 2246 for key in keys2: 2247 sum_job[key]=math.pow(sum_job[key],2) 2248 # Loop over the jobs and sum the information 2249 for i,job in enumerate(job_group): 2250 if i==0 : continue # skip the first 2251 for key in keys: 2252 sum_job[key]+=job[key] 2253 for key in keys2: 2254 sum_job[key]+=math.pow(job[key],2) 2255 for key in keys2: 2256 sum_job[key]=math.sqrt(sum_job[key]) 2257 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 2258 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 2259 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 2260 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 2261 return sum_job
2262 2263
2264 - def combine_split_order_grids(self,job_group):
2265 """Combines the mint_grids and MC-integer grids from the split order 2266 jobs (fixed order only). 2267 """ 2268 files_mint_grids=[] 2269 files_MC_integer=[] 2270 location=None 2271 for job in job_group: 2272 files_mint_grids.append(pjoin(job['dirname'],'mint_grids')) 2273 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer')) 2274 if not location: 2275 location=pjoin(job['dirname'].rsplit('_',1)[0]) 2276 else: 2277 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 2278 raise aMCatNLOError('Not all jobs have the same location. '\ 2279 +'Cannot combine them.') 2280 # Needed to average the grids (both xgrids, ave_virt and 2281 # MC_integer grids), but sum the cross section info. The 2282 # latter is only the only line that contains integers. 2283 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 2284 linesoffiles=[] 2285 for f in fs: 2286 with open(f,'r+') as fi: 2287 linesoffiles.append(fi.readlines()) 2288 to_write=[] 2289 for rowgrp in zip(*linesoffiles): 2290 try: 2291 # check that last element on the line is an 2292 # integer (will raise ValueError if not the 2293 # case). If integer, this is the line that 2294 # contains information that needs to be 2295 # summed. All other lines can be averaged. 2296 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 2297 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2298 floatgrps = zip(*floatsbyfile) 2299 special=[] 2300 for i,floatgrp in enumerate(floatgrps): 2301 if i==0: # sum X-sec 2302 special.append(sum(floatgrp)) 2303 elif i==1: # sum unc in quadrature 2304 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 2305 elif i==2: # average number of PS per iteration 2306 special.append(int(sum(floatgrp)/len(floatgrp))) 2307 elif i==3: # sum the number of iterations 2308 special.append(int(sum(floatgrp))) 2309 elif i==4: # average the nhits_in_grids 2310 special.append(int(sum(floatgrp)/len(floatgrp))) 2311 else: 2312 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 2313 'Cannot combine them.') 2314 to_write.append(" ".join(str(s) for s in special) + "\n") 2315 except ValueError: 2316 # just average all 2317 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 2318 floatgrps = zip(*floatsbyfile) 2319 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 2320 to_write.append(" ".join(str(a) for a in averages) + "\n") 2321 # write the data over the master location 2322 if j==0: 2323 with open(pjoin(location,'mint_grids'),'w') as f: 2324 f.writelines(to_write) 2325 elif j==1: 2326 with open(pjoin(location,'grid.MC_integer'),'w') as f: 2327 f.writelines(to_write)
2328 2329
2330 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
2331 """Looks in the jobs_to_run to see if there is the need to split the 2332 jobs, depending on the expected time they take. Updates 2333 jobs_to_run and jobs_to_collect to replace the split-job by 2334 its splits. 2335 """ 2336 # determine the number jobs we should have (this is per p_dir) 2337 if self.options['run_mode'] ==2: 2338 nb_submit = int(self.options['nb_core']) 2339 elif self.options['run_mode'] ==1: 2340 nb_submit = int(self.options['cluster_size']) 2341 else: 2342 nb_submit =1 2343 # total expected aggregated running time 2344 time_expected=0 2345 for job in jobs_to_run: 2346 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 2347 (job['niters_done']*job['npoints_done']) 2348 # this means that we must expect the following per job (in 2349 # ideal conditions) 2350 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 2351 jobs_to_run_new=[] 2352 jobs_to_collect_new=copy.copy(jobs_to_collect) 2353 for job in jobs_to_run: 2354 # remove current job from jobs_to_collect. Make sure 2355 # to remove all the split ones in case the original 2356 # job had been a split one (before it was re-combined) 2357 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 2358 j['channel'] == job['channel'], jobs_to_collect_new): 2359 jobs_to_collect_new.remove(j) 2360 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 2361 (job['niters_done']*job['npoints_done']) 2362 # if the time expected for this job is (much) larger than 2363 # the time spend in the previous iteration, and larger 2364 # than the expected time per job, split it 2365 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 2366 # determine the number of splits needed 2367 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 2368 for i in range(1,nsplit+1): 2369 job_new=copy.copy(job) 2370 job_new['split']=i 2371 job_new['wgt_mult']=1./float(nsplit) 2372 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2373 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 2374 if nsplit >= job['niters']: 2375 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 2376 job_new['niters']=1 2377 else: 2378 job_new['npoints']=int(job['npoints']/nsplit) 2379 jobs_to_collect_new.append(job_new) 2380 jobs_to_run_new.append(job_new) 2381 else: 2382 jobs_to_collect_new.append(job) 2383 jobs_to_run_new.append(job) 2384 return jobs_to_run_new,jobs_to_collect_new
2385 2386
2387 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
2388 """Looks in the jobs_to_run to see if there is the need to split the 2389 event generation step. Updates jobs_to_run and 2390 jobs_to_collect to replace the split-job by its 2391 splits. Also removes jobs that do not need any events. 2392 """ 2393 nevt_job=self.run_card['nevt_job'] 2394 if nevt_job > 0: 2395 jobs_to_collect_new=copy.copy(jobs_to_collect) 2396 for job in jobs_to_run: 2397 nevents=job['nevents'] 2398 if nevents == 0: 2399 jobs_to_collect_new.remove(job) 2400 elif nevents > nevt_job: 2401 jobs_to_collect_new.remove(job) 2402 if nevents % nevt_job != 0 : 2403 nsplit=int(nevents/nevt_job)+1 2404 else: 2405 nsplit=int(nevents/nevt_job) 2406 for i in range(1,nsplit+1): 2407 job_new=copy.copy(job) 2408 left_over=nevents % nsplit 2409 if i <= left_over: 2410 job_new['nevents']=int(nevents/nsplit)+1 2411 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2412 else: 2413 job_new['nevents']=int(nevents/nsplit) 2414 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 2415 job_new['split']=i 2416 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 2417 jobs_to_collect_new.append(job_new) 2418 jobs_to_run_new=copy.copy(jobs_to_collect_new) 2419 else: 2420 jobs_to_run_new=copy.copy(jobs_to_collect) 2421 for job in jobs_to_collect: 2422 if job['nevents'] == 0: 2423 jobs_to_run_new.remove(job) 2424 jobs_to_collect_new=copy.copy(jobs_to_run_new) 2425 2426 return jobs_to_run_new,jobs_to_collect_new
2427 2428
2429 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
2430 """ 2431 For (N)LO+PS: determines the number of events and/or the required 2432 accuracy per job. 2433 For fixed order: determines which jobs need higher precision and 2434 returns those with the newly requested precision. 2435 """ 2436 err=self.cross_sect_dict['errt'] 2437 tot=self.cross_sect_dict['xsect'] 2438 errABS=self.cross_sect_dict['erra'] 2439 totABS=self.cross_sect_dict['xseca'] 2440 jobs_new=[] 2441 if fixed_order: 2442 if req_acc == -1: 2443 if step+1 == 1: 2444 npoints = self.run_card['npoints_FO'] 2445 niters = self.run_card['niters_FO'] 2446 for job in jobs: 2447 job['mint_mode']=-1 2448 job['niters']=niters 2449 job['npoints']=npoints 2450 jobs_new.append(job) 2451 elif step+1 == 2: 2452 pass 2453 elif step+1 > 2: 2454 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 2455 'for integration step %i' % step ) 2456 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2457 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2458 for job in jobs: 2459 job['mint_mode']=-1 2460 # Determine relative required accuracy on the ABS for this job 2461 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2462 # If already accurate enough, skip the job (except when doing the first 2463 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2464 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2465 and not (step==-1 and self.run_card['iappl'] == 2): 2466 continue 2467 # Update the number of PS points based on errorABS, ncall and accuracy 2468 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2469 (job['accuracy']*job['resultABS']),2) 2470 if itmax_fl <= 4.0 : 2471 job['niters']=max(int(round(itmax_fl)),2) 2472 job['npoints']=job['npoints_done']*2 2473 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2474 job['niters']=4 2475 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2476 else: 2477 if itmax_fl > 100.0 : itmax_fl=50.0 2478 job['niters']=int(round(math.sqrt(itmax_fl))) 2479 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2480 round(math.sqrt(itmax_fl))))*2 2481 # Add the job to the list of jobs that need to be run 2482 jobs_new.append(job) 2483 return jobs_new 2484 elif step+1 <= 2: 2485 nevents=self.run_card['nevents'] 2486 # Total required accuracy for the upper bounding envelope 2487 if req_acc<0: 2488 req_acc2_inv=nevents 2489 else: 2490 req_acc2_inv=1/(req_acc*req_acc) 2491 if step+1 == 1 or step+1 == 2 : 2492 # determine the req. accuracy for each of the jobs for Mint-step = 1 2493 for job in jobs: 2494 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2495 job['accuracy']=accuracy 2496 if step+1 == 2: 2497 # Randomly (based on the relative ABS Xsec of the job) determine the 2498 # number of events each job needs to generate for MINT-step = 2. 2499 r=self.get_randinit_seed() 2500 random.seed(r) 2501 totevts=nevents 2502 for job in jobs: 2503 job['nevents'] = 0 2504 while totevts : 2505 target = random.random() * totABS 2506 crosssum = 0. 2507 i = 0 2508 while i<len(jobs) and crosssum < target: 2509 job = jobs[i] 2510 crosssum += job['resultABS'] 2511 i += 1 2512 totevts -= 1 2513 i -= 1 2514 jobs[i]['nevents'] += 1 2515 for job in jobs: 2516 job['mint_mode']=step+1 # next step 2517 return jobs 2518 else: 2519 return []
2520 2521
2522 - def get_randinit_seed(self):
2523 """ Get the random number seed from the randinit file """ 2524 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2525 # format of the file is "r=%d". 2526 iseed = int(randinit.read()[2:]) 2527 return iseed
2528 2529
2530 - def append_the_results(self,jobs,integration_step):
2531 """Appends the results for each of the jobs in the job list""" 2532 error_found=False 2533 for job in jobs: 2534 try: 2535 if integration_step >= 0 : 2536 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2537 results=res_file.readline().split() 2538 else: 2539 # should only be here when doing fixed order with the 'only_generation' 2540 # option equal to True. Take the results from the final run done. 2541 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2542 results=res_file.readline().split() 2543 except IOError: 2544 if not error_found: 2545 error_found=True 2546 error_log=[] 2547 error_log.append(pjoin(job['dirname'],'log.txt')) 2548 continue 2549 job['resultABS']=float(results[0]) 2550 job['errorABS']=float(results[1]) 2551 job['result']=float(results[2]) 2552 job['error']=float(results[3]) 2553 job['niters_done']=int(results[4]) 2554 job['npoints_done']=int(results[5]) 2555 job['time_spend']=float(results[6]) 2556 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2557 job['err_perc'] = job['error']/job['result']*100. 2558 if error_found: 2559 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2560 'Please check the .log files inside the directories which failed:\n' + 2561 '\n'.join(error_log)+'\n')
2562 2563 2564
2565 - def write_res_txt_file(self,jobs,integration_step):
2566 """writes the res.txt files in the SubProcess dir""" 2567 jobs.sort(key = lambda job: -job['errorABS']) 2568 content=[] 2569 content.append('\n\nCross section per integration channel:') 2570 for job in jobs: 2571 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2572 content.append('\n\nABS cross section per integration channel:') 2573 for job in jobs: 2574 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2575 totABS=0 2576 errABS=0 2577 tot=0 2578 err=0 2579 for job in jobs: 2580 totABS+= job['resultABS']*job['wgt_frac'] 2581 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2582 tot+= job['result']*job['wgt_frac'] 2583 err+= math.pow(job['error'],2)*job['wgt_frac'] 2584 if jobs: 2585 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2586 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2587 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2588 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2589 res_file.write('\n'.join(content)) 2590 randinit=self.get_randinit_seed() 2591 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2592 'erra':math.sqrt(errABS),'randinit':randinit}
2593 2594
2595 - def collect_scale_pdf_info(self,options,jobs):
2596 """read the scale_pdf_dependence.dat files and collects there results""" 2597 scale_pdf_info=[] 2598 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2599 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2600 evt_files=[] 2601 evt_wghts=[] 2602 for job in jobs: 2603 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2604 evt_wghts.append(job['wgt_frac']) 2605 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2606 return scale_pdf_info
2607 2608
2609 - def combine_plots_FO(self,folder_name,jobs):
2610 """combines the plots and puts then in the Events/run* directory""" 2611 devnull = open(os.devnull, 'w') 2612 2613 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2614 topfiles = [] 2615 for job in jobs: 2616 if job['dirname'].endswith('.top'): 2617 topfiles.append(job['dirname']) 2618 else: 2619 topfiles.append(pjoin(job['dirname'],'MADatNLO.top')) 2620 misc.call(['./combine_plots_FO.sh'] + topfiles, \ 2621 stdout=devnull, 2622 cwd=pjoin(self.me_dir, 'SubProcesses')) 2623 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2624 pjoin(self.me_dir, 'Events', self.run_name)) 2625 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2626 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2627 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2628 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2629 self.combine_plots_HwU(jobs,out) 2630 try: 2631 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2632 stdout=devnull,stderr=devnull,\ 2633 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2634 except Exception: 2635 pass 2636 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2637 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2638 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2639 rootfiles = [] 2640 for job in jobs: 2641 if job['dirname'].endswith('.root'): 2642 rootfiles.append(job['dirname']) 2643 else: 2644 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root')) 2645 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \ 2646 stdout=devnull, 2647 cwd=pjoin(self.me_dir, 'SubProcesses')) 2648 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2649 pjoin(self.me_dir, 'Events', self.run_name)) 2650 logger.info('The results of this run and the ROOT file with the plots' + \ 2651 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2652 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2653 self.combine_FO_lhe(jobs) 2654 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2655 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2656 else: 2657 logger.info('The results of this run' + \ 2658 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2659
2660 - def combine_FO_lhe(self,jobs):
2661 """combine the various lhe file generated in each directory. 2662 They are two steps: 2663 1) banner 2664 2) reweight each sample by the factor written at the end of each file 2665 3) concatenate each of the new files (gzip those). 2666 """ 2667 2668 logger.info('Combining lhe events for plotting analysis') 2669 start = time.time() 2670 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2671 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2672 if os.path.exists(output): 2673 os.remove(output) 2674 2675 2676 2677 2678 # 1. write the banner 2679 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2680 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2681 self.banner['initrwgt'] = text[10+i1:i2] 2682 # 2683 # <init> 2684 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2685 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2686 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2687 # </init> 2688 2689 cross = sum(j['result'] for j in jobs) 2690 error = math.sqrt(sum(j['error'] for j in jobs)) 2691 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2692 self.banner.write(output[:-3], close_tag=False) 2693 misc.gzip(output[:-3]) 2694 2695 2696 2697 fsock = lhe_parser.EventFile(output,'a') 2698 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2699 fsock.eventgroup = False 2700 else: 2701 fsock.eventgroup = True 2702 2703 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2704 for job in jobs: 2705 dirname = job['dirname'] 2706 #read last line 2707 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2708 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2709 # get normalisation ratio 2710 ratio = cross/sumwgt 2711 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2712 lhe.eventgroup = True # read the events by eventgroup 2713 for eventsgroup in lhe: 2714 neweventsgroup = [] 2715 for i,event in enumerate(eventsgroup): 2716 event.rescale_weights(ratio) 2717 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2718 and event == neweventsgroup[-1]: 2719 neweventsgroup[-1].wgt += event.wgt 2720 for key in event.reweight_data: 2721 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2722 else: 2723 neweventsgroup.append(event) 2724 fsock.write_events(neweventsgroup) 2725 lhe.close() 2726 os.remove(pjoin(dirname,'events.lhe')) 2727 else: 2728 lhe = [] 2729 lenlhe = [] 2730 misc.sprint('need to combine %s event file' % len(jobs)) 2731 globallhe = lhe_parser.MultiEventFile() 2732 globallhe.eventgroup = True 2733 for job in jobs: 2734 dirname = job['dirname'] 2735 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2736 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2737 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2738 nb_event=int(nb_event), scale=cross/sumwgt) 2739 for eventsgroup in globallhe: 2740 neweventsgroup = [] 2741 for i,event in enumerate(eventsgroup): 2742 event.rescale_weights(event.sample_scale) 2743 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2744 and event == neweventsgroup[-1]: 2745 neweventsgroup[-1].wgt += event.wgt 2746 for key in event.reweight_data: 2747 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2748 else: 2749 neweventsgroup.append(event) 2750 fsock.write_events(neweventsgroup) 2751 globallhe.close() 2752 fsock.write('</LesHouchesEvents>\n') 2753 fsock.close() 2754 misc.sprint('combining lhe file done in ', time.time()-start) 2755 for job in jobs: 2756 dirname = job['dirname'] 2757 os.remove(pjoin(dirname,'events.lhe')) 2758 2759 2760 2761 misc.sprint('combining lhe file done in ', time.time()-start)
2762 2763 2764 2765 2766 2767
2768 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2769 """Sums all the plots in the HwU format.""" 2770 logger.debug('Combining HwU plots.') 2771 2772 command = [] 2773 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2774 for job in jobs: 2775 if job['dirname'].endswith('.HwU'): 2776 command.append(job['dirname']) 2777 else: 2778 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2779 command.append("--out="+out) 2780 command.append("--gnuplot") 2781 command.append("--band=[]") 2782 command.append("--lhapdf-config="+self.options['lhapdf']) 2783 if normalisation: 2784 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2785 command.append("--sum") 2786 command.append("--keep_all_weights") 2787 command.append("--no_open") 2788 2789 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2790 2791 while p.poll() is None: 2792 line = p.stdout.readline() 2793 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2794 print line[:-1] 2795 elif __debug__ and line: 2796 logger.debug(line[:-1])
2797 2798
2799 - def applgrid_combine(self,cross,error,jobs):
2800 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2801 logger.debug('Combining APPLgrids \n') 2802 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2803 'applgrid-combine') 2804 all_jobs=[] 2805 for job in jobs: 2806 all_jobs.append(job['dirname']) 2807 ngrids=len(all_jobs) 2808 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2809 for obs in range(0,nobs): 2810 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2811 # combine APPLgrids from different channels for observable 'obs' 2812 if self.run_card["iappl"] == 1: 2813 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2814 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2815 elif self.run_card["iappl"] == 2: 2816 unc2_inv=pow(cross/error,2) 2817 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2818 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2819 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2820 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2821 for job in all_jobs: 2822 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2823 else: 2824 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2825 # after combining, delete the original grids 2826 for ggdir in gdir: 2827 os.remove(ggdir)
2828 2829
2830 - def applgrid_distribute(self,options,mode,p_dirs):
2831 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2832 # if no appl_start_grid argument given, guess it from the time stamps 2833 # of the starting grid files 2834 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2835 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2836 pjoin(self.me_dir,'Events')) 2837 2838 time_stamps={} 2839 for root_file in gfiles: 2840 time_stamps[root_file]=os.path.getmtime(root_file) 2841 options['appl_start_grid']= \ 2842 max(time_stamps.iterkeys(), key=(lambda key: 2843 time_stamps[key])).split('/')[-2] 2844 logger.info('No --appl_start_grid option given. '+\ 2845 'Guessing that start grid from run "%s" should be used.' \ 2846 % options['appl_start_grid']) 2847 2848 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2849 self.appl_start_grid = options['appl_start_grid'] 2850 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2851 # check that this dir exists and at least one grid file is there 2852 if not os.path.exists(pjoin(start_grid_dir, 2853 'aMCfast_obs_0_starting_grid.root')): 2854 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2855 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2856 else: 2857 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2858 start_grid_dir) if name.endswith("_starting_grid.root")] 2859 nobs =len(all_grids) 2860 gstring=" ".join(all_grids) 2861 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2862 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2863 'Please provide this information.') 2864 #copy the grid to all relevant directories 2865 for pdir in p_dirs: 2866 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2867 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2868 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2869 for g_dir in g_dirs: 2870 for grid in all_grids: 2871 obs=grid.split('_')[-3] 2872 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2873 'grid_obs_'+obs+'_in.root'))
2874 2875 2876 2877
2878 - def collect_log_files(self, jobs, integration_step):
2879 """collect the log files and put them in a single, html-friendly file 2880 inside the Events/run_.../ directory""" 2881 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2882 'alllogs_%d.html' % integration_step) 2883 outfile = open(log_file, 'w') 2884 2885 content = '' 2886 content += '<HTML><BODY>\n<font face="courier" size=2>' 2887 for job in jobs: 2888 # put an anchor 2889 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2890 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2891 pjoin(self.me_dir,'SubProcesses'),'')) 2892 # and put some nice header 2893 content += '<font color="red">\n' 2894 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2895 (os.path.dirname(log).replace(pjoin(self.me_dir, 2896 'SubProcesses'), ''), 2897 integration_step) 2898 content += '</font>\n' 2899 #then just flush the content of the small log inside the big log 2900 #the PRE tag prints everything verbatim 2901 with open(log) as l: 2902 content += '<PRE>\n' + l.read() + '\n</PRE>' 2903 content +='<br>\n' 2904 outfile.write(content) 2905 content='' 2906 2907 outfile.write('</font>\n</BODY></HTML>\n') 2908 outfile.close()
2909 2910
2911 - def finalise_run_FO(self,folder_name,jobs):
2912 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2913 # Copy the res_*.txt files to the Events/run* folder 2914 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2915 for res_file in res_files: 2916 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2917 # Collect the plots and put them in the Events/run* folder 2918 self.combine_plots_FO(folder_name,jobs) 2919 # If doing the applgrid-stuff, also combine those grids 2920 # and put those in the Events/run* folder 2921 if self.run_card['iappl'] != 0: 2922 cross=self.cross_sect_dict['xsect'] 2923 error=self.cross_sect_dict['errt'] 2924 self.applgrid_combine(cross,error,jobs)
2925 2926
2927 - def setup_cluster_or_multicore(self):
2928 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2929 if self.cluster_mode == 1: 2930 cluster_name = self.options['cluster_type'] 2931 try: 2932 self.cluster = cluster.from_name[cluster_name](**self.options) 2933 except KeyError: 2934 # Check if a plugin define this type of cluster 2935 # check for PLUGIN format 2936 cluster_class = misc.from_plugin_import(self.plugin_path, 2937 'new_cluster', cluster_name, 2938 info = 'cluster handling will be done with PLUGIN: %{plug}s' ) 2939 if cluster_class: 2940 self.cluster = cluster_class(**self.options) 2941 2942 if self.cluster_mode == 2: 2943 try: 2944 import multiprocessing 2945 if not self.nb_core: 2946 try: 2947 self.nb_core = int(self.options['nb_core']) 2948 except TypeError: 2949 self.nb_core = multiprocessing.cpu_count() 2950 logger.info('Using %d cores' % self.nb_core) 2951 except ImportError: 2952 self.nb_core = 1 2953 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2954 'Use set nb_core X in order to set this number and be able to'+ 2955 'run in multicore.') 2956 2957 self.cluster = cluster.MultiCore(**self.options)
2958 2959
2960 - def clean_previous_results(self,options,p_dirs,folder_name):
2961 """Clean previous results. 2962 o. If doing only the reweighting step, do not delete anything and return directlty. 2963 o. Always remove all the G*_* files (from split event generation). 2964 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2965 if options['reweightonly']: 2966 return 2967 if not options['only_generation']: 2968 self.update_status('Cleaning previous results', level=None) 2969 for dir in p_dirs: 2970 #find old folders to be removed 2971 for obj in folder_name: 2972 # list all the G* (or all_G* or born_G*) directories 2973 to_rm = [file for file in \ 2974 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2975 if file.startswith(obj[:-1]) and \ 2976 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2977 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2978 # list all the G*_* directories (from split event generation) 2979 to_always_rm = [file for file in \ 2980 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2981 if file.startswith(obj[:-1]) and 2982 '_' in file and not '_G' in file and \ 2983 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2984 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2985 2986 if not options['only_generation']: 2987 to_always_rm.extend(to_rm) 2988 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2989 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2990 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2991 return
2992 2993
2994 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2995 """print a summary of the results contained in self.cross_sect_dict. 2996 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2997 some additional infos are printed""" 2998 # find process name 2999 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 3000 process = '' 3001 for line in proc_card_lines: 3002 if line.startswith('generate') or line.startswith('add process'): 3003 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 3004 lpp = {0:'l', 1:'p', -1:'pbar'} 3005 if self.ninitial == 1: 3006 proc_info = '\n Process %s' % process[:-3] 3007 else: 3008 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 3009 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 3010 self.run_card['ebeam1'], self.run_card['ebeam2']) 3011 3012 if self.ninitial == 1: 3013 self.cross_sect_dict['unit']='GeV' 3014 self.cross_sect_dict['xsec_string']='(Partial) decay width' 3015 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 3016 else: 3017 self.cross_sect_dict['unit']='pb' 3018 self.cross_sect_dict['xsec_string']='Total cross section' 3019 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 3020 if self.run_card['event_norm'].lower()=='bias': 3021 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)' 3022 3023 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3024 status = ['Determining the number of unweighted events per channel', 3025 'Updating the number of unweighted events per channel', 3026 'Summary:'] 3027 computed='(computed from LHE events)' 3028 elif mode in ['NLO', 'LO']: 3029 status = ['Results after grid setup:','Current results:', 3030 'Final results and run summary:'] 3031 computed='(computed from histogram information)' 3032 3033 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3034 message = status[step] + '\n\n Intermediate results:' + \ 3035 ('\n Random seed: %(randinit)d' + \ 3036 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 3037 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 3038 % self.cross_sect_dict 3039 elif mode in ['NLO','LO'] and not done: 3040 if step == 0: 3041 message = '\n ' + status[0] + \ 3042 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3043 self.cross_sect_dict 3044 else: 3045 message = '\n ' + status[1] + \ 3046 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3047 self.cross_sect_dict 3048 3049 else: 3050 message = '\n --------------------------------------------------------------' 3051 message = message + \ 3052 '\n ' + status[2] + proc_info 3053 if mode not in ['LO', 'NLO']: 3054 message = message + \ 3055 '\n Number of events generated: %s' % self.run_card['nevents'] 3056 message = message + \ 3057 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 3058 self.cross_sect_dict 3059 message = message + \ 3060 '\n --------------------------------------------------------------' 3061 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 3062 if scale_pdf_info[0]: 3063 # scale uncertainties 3064 message = message + '\n Scale variation %s:' % computed 3065 for s in scale_pdf_info[0]: 3066 if s['unc']: 3067 if self.run_card['ickkw'] != -1: 3068 message = message + \ 3069 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 3070 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 3071 else: 3072 message = message + \ 3073 ('\n Soft and hard scale dependence (added in quadrature): '\ 3074 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 3075 3076 else: 3077 message = message + \ 3078 ('\n Dynamical_scale_choice %(label)i: '\ 3079 '\n %(cen)8.3e pb') % s 3080 3081 if scale_pdf_info[1]: 3082 message = message + '\n PDF variation %s:' % computed 3083 for p in scale_pdf_info[1]: 3084 if p['unc']=='none': 3085 message = message + \ 3086 ('\n %(name)s (central value only): '\ 3087 '\n %(cen)8.3e pb') % p 3088 3089 elif p['unc']=='unknown': 3090 message = message + \ 3091 ('\n %(name)s (%(size)s members; combination method unknown): '\ 3092 '\n %(cen)8.3e pb') % p 3093 else: 3094 message = message + \ 3095 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 3096 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 3097 # pdf uncertainties 3098 message = message + \ 3099 '\n --------------------------------------------------------------' 3100 3101 3102 if (mode in ['NLO', 'LO'] and not done) or \ 3103 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 3104 logger.info(message+'\n') 3105 return 3106 3107 # Some advanced general statistics are shown in the debug message at the 3108 # end of the run 3109 # Make sure it never stops a run 3110 # Gather some basic statistics for the run and extracted from the log files. 3111 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 3112 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 3113 pjoin(self.me_dir, 'SubProcesses')) 3114 all_log_files = log_GV_files 3115 elif mode == 'NLO': 3116 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 3117 pjoin(self.me_dir, 'SubProcesses')) 3118 all_log_files = log_GV_files 3119 3120 elif mode == 'LO': 3121 log_GV_files = '' 3122 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 3123 pjoin(self.me_dir, 'SubProcesses')) 3124 else: 3125 raise aMCatNLOError, 'Running mode %s not supported.'%mode 3126 3127 try: 3128 message, debug_msg = \ 3129 self.compile_advanced_stats(log_GV_files, all_log_files, message) 3130 except Exception as e: 3131 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 3132 err_string = StringIO.StringIO() 3133 traceback.print_exc(limit=4, file=err_string) 3134 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 3135 %err_string.getvalue() 3136 3137 logger.debug(debug_msg+'\n') 3138 logger.info(message+'\n') 3139 3140 # Now copy relevant information in the Events/Run_<xxx> directory 3141 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 3142 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 3143 open(pjoin(evt_path, '.full_summary.txt'), 3144 'w').write(message+'\n\n'+debug_msg+'\n') 3145 3146 self.archive_files(evt_path,mode)
3147
3148 - def archive_files(self, evt_path, mode):
3149 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 3150 the run.""" 3151 3152 files_to_arxiv = [pjoin('Cards','param_card.dat'), 3153 pjoin('Cards','MadLoopParams.dat'), 3154 pjoin('Cards','FKS_params.dat'), 3155 pjoin('Cards','run_card.dat'), 3156 pjoin('Subprocesses','setscales.f'), 3157 pjoin('Subprocesses','cuts.f')] 3158 3159 if mode in ['NLO', 'LO']: 3160 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 3161 3162 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 3163 os.mkdir(pjoin(evt_path,'RunMaterial')) 3164 3165 for path in files_to_arxiv: 3166 if os.path.isfile(pjoin(self.me_dir,path)): 3167 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 3168 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 3169 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3170
3171 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
3172 """ This functions goes through the log files given in arguments and 3173 compiles statistics about MadLoop stability, virtual integration 3174 optimization and detection of potential error messages into a nice 3175 debug message to printed at the end of the run """ 3176 3177 def safe_float(str_float): 3178 try: 3179 return float(str_float) 3180 except ValueError: 3181 logger.debug('Could not convert the following float during'+ 3182 ' advanced statistics printout: %s'%str(str_float)) 3183 return -1.0
3184 3185 3186 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 3187 # > Errors is a list of tuples with this format (log_file,nErrors) 3188 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 3189 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 3190 3191 # ================================== 3192 # == MadLoop stability statistics == 3193 # ================================== 3194 3195 # Recuperate the fraction of unstable PS points found in the runs for 3196 # the virtuals 3197 UPS_stat_finder = re.compile( 3198 r"Satistics from MadLoop:.*"+\ 3199 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 3200 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 3201 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 3202 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 3203 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 3204 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 3205 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 3206 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 3207 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 3208 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 3209 3210 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 3211 1 : 'CutTools (double precision)', 3212 2 : 'PJFry++', 3213 3 : 'IREGI', 3214 4 : 'Golem95', 3215 5 : 'Samurai', 3216 6 : 'Ninja (double precision)', 3217 7 : 'COLLIER', 3218 8 : 'Ninja (quadruple precision)', 3219 9 : 'CutTools (quadruple precision)'} 3220 RetUnit_finder =re.compile( 3221 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 3222 #Unit 3223 3224 for gv_log in log_GV_files: 3225 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 3226 log=open(gv_log,'r').read() 3227 UPS_stats = re.search(UPS_stat_finder,log) 3228 for retunit_stats in re.finditer(RetUnit_finder, log): 3229 if channel_name not in stats['UPS'].keys(): 3230 stats['UPS'][channel_name] = [0]*10+[[0]*10] 3231 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 3232 += int(retunit_stats.group('n_occurences')) 3233 if not UPS_stats is None: 3234 try: 3235 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 3236 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 3237 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 3238 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 3239 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 3240 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 3241 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 3242 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 3243 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 3244 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 3245 except KeyError: 3246 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 3247 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 3248 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 3249 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 3250 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 3251 int(UPS_stats.group('n10')),[0]*10] 3252 debug_msg = "" 3253 if len(stats['UPS'].keys())>0: 3254 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 3255 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 3256 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 3257 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 3258 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 3259 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 3260 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 3261 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 3262 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 3263 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 3264 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 3265 for i in range(10)] 3266 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 3267 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 3268 maxUPS = max(UPSfracs, key = lambda w: w[1]) 3269 3270 tmpStr = "" 3271 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 3272 tmpStr += '\n Stability unknown: %d'%nTotsun 3273 tmpStr += '\n Stable PS point: %d'%nTotsps 3274 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 3275 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 3276 tmpStr += '\n Only double precision used: %d'%nTotddp 3277 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 3278 tmpStr += '\n Initialization phase-space points: %d'%nTotini 3279 tmpStr += '\n Reduction methods used:' 3280 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 3281 unit_code_meaning.keys() if nTot1[i]>0] 3282 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 3283 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 3284 if nTot100 != 0: 3285 debug_msg += '\n Unknown return code (100): %d'%nTot100 3286 if nTot10 != 0: 3287 debug_msg += '\n Unknown return code (10): %d'%nTot10 3288 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 3289 not in unit_code_meaning.keys()) 3290 if nUnknownUnit != 0: 3291 debug_msg += '\n Unknown return code (1): %d'\ 3292 %nUnknownUnit 3293 3294 if maxUPS[1]>0.001: 3295 message += tmpStr 3296 message += '\n Total number of unstable PS point detected:'+\ 3297 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 3298 message += '\n Maximum fraction of UPS points in '+\ 3299 'channel %s (%4.2f%%)'%maxUPS 3300 message += '\n Please report this to the authors while '+\ 3301 'providing the file' 3302 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 3303 maxUPS[0],'UPS.log')) 3304 else: 3305 debug_msg += tmpStr 3306 3307 3308 # ==================================================== 3309 # == aMC@NLO virtual integration optimization stats == 3310 # ==================================================== 3311 3312 virt_tricks_finder = re.compile( 3313 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 3314 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 3315 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 3316 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 3317 3318 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 3319 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 3320 3321 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 3322 3323 channel_contr_list = {} 3324 for gv_log in log_GV_files: 3325 logfile=open(gv_log,'r') 3326 log = logfile.read() 3327 logfile.close() 3328 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3329 vf_stats = None 3330 for vf_stats in re.finditer(virt_frac_finder, log): 3331 pass 3332 if not vf_stats is None: 3333 v_frac = safe_float(vf_stats.group('v_frac')) 3334 v_average = safe_float(vf_stats.group('v_average')) 3335 try: 3336 if v_frac < stats['virt_stats']['v_frac_min'][0]: 3337 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 3338 if v_frac > stats['virt_stats']['v_frac_max'][0]: 3339 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 3340 stats['virt_stats']['v_frac_avg'][0] += v_frac 3341 stats['virt_stats']['v_frac_avg'][1] += 1 3342 except KeyError: 3343 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 3344 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 3345 stats['virt_stats']['v_frac_avg']=[v_frac,1] 3346 3347 3348 ccontr_stats = None 3349 for ccontr_stats in re.finditer(channel_contr_finder, log): 3350 pass 3351 if not ccontr_stats is None: 3352 contrib = safe_float(ccontr_stats.group('v_contr')) 3353 try: 3354 if contrib>channel_contr_list[channel_name]: 3355 channel_contr_list[channel_name]=contrib 3356 except KeyError: 3357 channel_contr_list[channel_name]=contrib 3358 3359 3360 # Now build the list of relevant virt log files to look for the maxima 3361 # of virt fractions and such. 3362 average_contrib = 0.0 3363 for value in channel_contr_list.values(): 3364 average_contrib += value 3365 if len(channel_contr_list.values()) !=0: 3366 average_contrib = average_contrib / len(channel_contr_list.values()) 3367 3368 relevant_log_GV_files = [] 3369 excluded_channels = set([]) 3370 all_channels = set([]) 3371 for log_file in log_GV_files: 3372 channel_name = '/'.join(log_file.split('/')[-3:-1]) 3373 all_channels.add(channel_name) 3374 try: 3375 if channel_contr_list[channel_name] > (0.1*average_contrib): 3376 relevant_log_GV_files.append(log_file) 3377 else: 3378 excluded_channels.add(channel_name) 3379 except KeyError: 3380 relevant_log_GV_files.append(log_file) 3381 3382 # Now we want to use the latest occurence of accumulated result in the log file 3383 for gv_log in relevant_log_GV_files: 3384 logfile=open(gv_log,'r') 3385 log = logfile.read() 3386 logfile.close() 3387 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 3388 3389 vt_stats = None 3390 for vt_stats in re.finditer(virt_tricks_finder, log): 3391 pass 3392 if not vt_stats is None: 3393 vt_stats_group = vt_stats.groupdict() 3394 v_ratio = safe_float(vt_stats.group('v_ratio')) 3395 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 3396 v_contr = safe_float(vt_stats.group('v_abs_contr')) 3397 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 3398 try: 3399 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 3400 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 3401 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 3402 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 3403 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 3404 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 3405 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 3406 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 3407 if v_contr < stats['virt_stats']['v_contr_min'][0]: 3408 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 3409 if v_contr > stats['virt_stats']['v_contr_max'][0]: 3410 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 3411 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 3412 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 3413 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 3414 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 3415 except KeyError: 3416 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 3417 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 3418 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 3419 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 3420 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 3421 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 3422 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 3423 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 3424 3425 vf_stats = None 3426 for vf_stats in re.finditer(virt_frac_finder, log): 3427 pass 3428 if not vf_stats is None: 3429 v_frac = safe_float(vf_stats.group('v_frac')) 3430 v_average = safe_float(vf_stats.group('v_average')) 3431 try: 3432 if v_average < stats['virt_stats']['v_average_min'][0]: 3433 stats['virt_stats']['v_average_min']=(v_average,channel_name) 3434 if v_average > stats['virt_stats']['v_average_max'][0]: 3435 stats['virt_stats']['v_average_max']=(v_average,channel_name) 3436 stats['virt_stats']['v_average_avg'][0] += v_average 3437 stats['virt_stats']['v_average_avg'][1] += 1 3438 except KeyError: 3439 stats['virt_stats']['v_average_min']=[v_average,channel_name] 3440 stats['virt_stats']['v_average_max']=[v_average,channel_name] 3441 stats['virt_stats']['v_average_avg']=[v_average,1] 3442 3443 try: 3444 debug_msg += '\n\n Statistics on virtual integration optimization : ' 3445 3446 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3447 %tuple(stats['virt_stats']['v_frac_max']) 3448 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3449 %tuple(stats['virt_stats']['v_frac_min']) 3450 debug_msg += '\n Average virt fraction computed %.3f'\ 3451 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3452 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3453 (len(excluded_channels),len(all_channels)) 3454 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3455 %tuple(stats['virt_stats']['v_average_max']) 3456 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3457 %tuple(stats['virt_stats']['v_ratio_max']) 3458 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3459 %tuple(stats['virt_stats']['v_ratio_err_max']) 3460 debug_msg += tmpStr 3461 # After all it was decided that it is better not to alarm the user unecessarily 3462 # with such printout of the statistics. 3463 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3464 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3465 # message += "\n Suspiciously large MC error in :" 3466 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3467 # message += tmpStr 3468 3469 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3470 %tuple(stats['virt_stats']['v_contr_err_max']) 3471 debug_msg += tmpStr 3472 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3473 # message += tmpStr 3474 3475 3476 except KeyError: 3477 debug_msg += '\n Could not find statistics on the integration optimization. ' 3478 3479 # ======================================= 3480 # == aMC@NLO timing profile statistics == 3481 # ======================================= 3482 3483 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3484 "(?P<time>[\d\+-Eed\.]*)\s*") 3485 3486 for logf in log_GV_files: 3487 logfile=open(logf,'r') 3488 log = logfile.read() 3489 logfile.close() 3490 channel_name = '/'.join(logf.split('/')[-3:-1]) 3491 mint = re.search(mint_search,logf) 3492 if not mint is None: 3493 channel_name = channel_name+' [step %s]'%mint.group('ID') 3494 3495 for time_stats in re.finditer(timing_stat_finder, log): 3496 try: 3497 stats['timings'][time_stats.group('name')][channel_name]+=\ 3498 safe_float(time_stats.group('time')) 3499 except KeyError: 3500 if time_stats.group('name') not in stats['timings'].keys(): 3501 stats['timings'][time_stats.group('name')] = {} 3502 stats['timings'][time_stats.group('name')][channel_name]=\ 3503 safe_float(time_stats.group('time')) 3504 3505 # useful inline function 3506 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3507 try: 3508 totTimeList = [(time, chan) for chan, time in \ 3509 stats['timings']['Total'].items()] 3510 except KeyError: 3511 totTimeList = [] 3512 3513 totTimeList.sort() 3514 if len(totTimeList)>0: 3515 debug_msg += '\n\n Inclusive timing profile :' 3516 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3517 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3518 debug_msg += '\n Average channel running time %s'%\ 3519 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3520 debug_msg += '\n Aggregated total running time %s'%\ 3521 Tstr(sum([el[0] for el in totTimeList])) 3522 else: 3523 debug_msg += '\n\n Inclusive timing profile non available.' 3524 3525 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3526 sum(stats['timings'][stat].values()), reverse=True) 3527 for name in sorted_keys: 3528 if name=='Total': 3529 continue 3530 if sum(stats['timings'][name].values())<=0.0: 3531 debug_msg += '\n Zero time record for %s.'%name 3532 continue 3533 try: 3534 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3535 chan) for chan, time in stats['timings'][name].items()] 3536 except KeyError, ZeroDivisionError: 3537 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3538 continue 3539 TimeList.sort() 3540 debug_msg += '\n Timing profile for <%s> :'%name 3541 try: 3542 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3543 safe_float((100.0*(sum(stats['timings'][name].values())/ 3544 sum(stats['timings']['Total'].values())))) 3545 except KeyError, ZeroDivisionError: 3546 debug_msg += '\n Overall fraction of time unavailable.' 3547 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3548 (TimeList[-1][0],TimeList[-1][1]) 3549 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3550 (TimeList[0][0],TimeList[0][1]) 3551 3552 # ============================= 3553 # == log file eror detection == 3554 # ============================= 3555 3556 # Find the number of potential errors found in all log files 3557 # This re is a simple match on a case-insensitve 'error' but there is 3558 # also some veto added for excluding the sentence 3559 # "See Section 6 of paper for error calculation." 3560 # which appear in the header of lhapdf in the logs. 3561 err_finder = re.compile(\ 3562 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3563 for log in all_log_files: 3564 logfile=open(log,'r') 3565 nErrors = len(re.findall(err_finder, logfile.read())) 3566 logfile.close() 3567 if nErrors != 0: 3568 stats['Errors'].append((str(log),nErrors)) 3569 3570 nErrors = sum([err[1] for err in stats['Errors']],0) 3571 if nErrors != 0: 3572 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3573 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3574 'found in the following log file%s:'%('s' if \ 3575 len(stats['Errors'])>1 else '') 3576 for error in stats['Errors'][:3]: 3577 log_name = '/'.join(error[0].split('/')[-5:]) 3578 debug_msg += '\n > %d error%s in %s'%\ 3579 (error[1],'s' if error[1]>1 else '',log_name) 3580 if len(stats['Errors'])>3: 3581 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3582 nRemainingLogs = len(stats['Errors'])-3 3583 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3584 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3585 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3586 3587 return message, debug_msg 3588 3589
3590 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3591 """this function calls the reweighting routines and creates the event file in the 3592 Event dir. Return the name of the event file created 3593 """ 3594 scale_pdf_info=[] 3595 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3596 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 3597 scale_pdf_info = self.run_reweight(options['reweightonly']) 3598 self.update_status('Collecting events', level='parton', update_results=True) 3599 misc.compile(['collect_events'], 3600 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3601 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3602 stdin=subprocess.PIPE, 3603 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3604 if event_norm.lower() == 'sum': 3605 p.communicate(input = '1\n') 3606 elif event_norm.lower() == 'unity': 3607 p.communicate(input = '3\n') 3608 elif event_norm.lower() == 'bias': 3609 p.communicate(input = '0\n') 3610 else: 3611 p.communicate(input = '2\n') 3612 3613 #get filename from collect events 3614 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3615 3616 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3617 raise aMCatNLOError('An error occurred during event generation. ' + \ 3618 'The event file has not been created. Check collect_events.log') 3619 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3620 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3621 if not options['reweightonly']: 3622 self.print_summary(options, 2, mode, scale_pdf_info) 3623 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3624 for res_file in res_files: 3625 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3626 3627 logger.info('The %s file has been generated.\n' % (evt_file)) 3628 self.results.add_detail('nb_event', nevents) 3629 self.update_status('Events generated', level='parton', update_results=True) 3630 return evt_file[:-3]
3631 3632
3633 - def run_mcatnlo(self, evt_file, options):
3634 """runs mcatnlo on the generated event file, to produce showered-events 3635 """ 3636 logger.info('Preparing MCatNLO run') 3637 try: 3638 misc.gunzip(evt_file) 3639 except Exception: 3640 pass 3641 3642 self.banner = banner_mod.Banner(evt_file) 3643 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3644 3645 #check that the number of split event files divides the number of 3646 # events, otherwise set it to 1 3647 if int(self.banner.get_detail('run_card', 'nevents') / \ 3648 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3649 != self.banner.get_detail('run_card', 'nevents'): 3650 logger.warning(\ 3651 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3652 'Setting it to 1.') 3653 self.shower_card['nsplit_jobs'] = 1 3654 3655 # don't split jobs if the user asks to shower only a part of the events 3656 if self.shower_card['nevents'] > 0 and \ 3657 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3658 self.shower_card['nsplit_jobs'] != 1: 3659 logger.warning(\ 3660 'Only a part of the events will be showered.\n' + \ 3661 'Setting nsplit_jobs in the shower_card to 1.') 3662 self.shower_card['nsplit_jobs'] = 1 3663 3664 self.banner_to_mcatnlo(evt_file) 3665 3666 # if fastjet has to be linked (in extralibs) then 3667 # add lib /include dirs for fastjet if fastjet-config is present on the 3668 # system, otherwise add fjcore to the files to combine 3669 if 'fastjet' in self.shower_card['extralibs']: 3670 #first, check that stdc++ is also linked 3671 if not 'stdc++' in self.shower_card['extralibs']: 3672 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3673 self.shower_card['extralibs'] += ' stdc++' 3674 # then check if options[fastjet] corresponds to a valid fj installation 3675 try: 3676 #this is for a complete fj installation 3677 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3678 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3679 output, error = p.communicate() 3680 #remove the line break from output (last character) 3681 output = output[:-1] 3682 # add lib/include paths 3683 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3684 logger.warning('Linking FastJet: updating EXTRAPATHS') 3685 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3686 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3687 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3688 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3689 # to be changed in the fortran wrapper 3690 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3691 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3692 except Exception: 3693 logger.warning('Linking FastJet: using fjcore') 3694 # this is for FJcore, so no FJ library has to be linked 3695 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3696 if not 'fjcore.o' in self.shower_card['analyse']: 3697 self.shower_card['analyse'] += ' fjcore.o' 3698 # to be changed in the fortran wrapper 3699 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3700 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3701 # change the fortran wrapper with the correct namespaces/include 3702 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3703 for line in fjwrapper_lines: 3704 if '//INCLUDE_FJ' in line: 3705 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3706 if '//NAMESPACE_FJ' in line: 3707 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3708 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3709 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3710 3711 extrapaths = self.shower_card['extrapaths'].split() 3712 3713 # check that the path needed by HW++ and PY8 are set if one uses these shower 3714 if shower in ['HERWIGPP', 'PYTHIA8']: 3715 path_dict = {'HERWIGPP': ['hepmc_path', 3716 'thepeg_path', 3717 'hwpp_path'], 3718 'PYTHIA8': ['pythia8_path']} 3719 3720 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3721 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3722 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3723 3724 if shower == 'HERWIGPP': 3725 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3726 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3727 3728 # add the HEPMC path of the pythia8 installation 3729 if shower == 'PYTHIA8': 3730 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3731 stdout = subprocess.PIPE).stdout.read().strip() 3732 #this gives all the flags, i.e. 3733 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3734 # we just need the path to the HepMC libraries 3735 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3736 3737 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3738 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3739 3740 # set the PATH for the dynamic libraries 3741 if sys.platform == 'darwin': 3742 ld_library_path = 'DYLD_LIBRARY_PATH' 3743 else: 3744 ld_library_path = 'LD_LIBRARY_PATH' 3745 if ld_library_path in os.environ.keys(): 3746 paths = os.environ[ld_library_path] 3747 else: 3748 paths = '' 3749 paths += ':' + ':'.join(extrapaths) 3750 os.putenv(ld_library_path, paths) 3751 3752 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3753 self.shower_card.write_card(shower, shower_card_path) 3754 3755 # overwrite if shower_card_set.dat exists in MCatNLO 3756 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3757 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3758 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3759 3760 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3761 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3762 3763 3764 # libdl may be needded for pythia 82xx 3765 #if shower == 'PYTHIA8' and not \ 3766 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3767 # 'dl' not in self.shower_card['extralibs'].split(): 3768 # # 'dl' has to be linked with the extralibs 3769 # self.shower_card['extralibs'] += ' dl' 3770 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3771 # "It is needed for the correct running of PY8.2xx.\n" + \ 3772 # "If this library cannot be found on your system, a crash will occur.") 3773 3774 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3775 stderr=open(mcatnlo_log, 'w'), 3776 cwd=pjoin(self.me_dir, 'MCatNLO'), 3777 close_fds=True) 3778 3779 exe = 'MCATNLO_%s_EXE' % shower 3780 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3781 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3782 print open(mcatnlo_log).read() 3783 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3784 logger.info(' ... done') 3785 3786 # create an empty dir where to run 3787 count = 1 3788 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3789 (shower, count))): 3790 count += 1 3791 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3792 (shower, count)) 3793 os.mkdir(rundir) 3794 files.cp(shower_card_path, rundir) 3795 3796 #look for the event files (don't resplit if one asks for the 3797 # same number of event files as in the previous run) 3798 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3799 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3800 logger.info('Cleaning old files and splitting the event file...') 3801 #clean the old files 3802 files.rm([f for f in event_files if 'events.lhe' not in f]) 3803 if self.shower_card['nsplit_jobs'] > 1: 3804 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3805 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3806 stdin=subprocess.PIPE, 3807 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3808 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3809 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3810 logger.info('Splitting done.') 3811 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3812 3813 event_files.sort() 3814 3815 self.update_status('Showering events...', level='shower') 3816 logger.info('(Running in %s)' % rundir) 3817 if shower != 'PYTHIA8': 3818 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3819 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3820 else: 3821 # special treatment for pythia8 3822 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3823 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3824 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3825 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3826 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3827 else: # this is PY8.2xxx 3828 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3829 #link the hwpp exe in the rundir 3830 if shower == 'HERWIGPP': 3831 try: 3832 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3833 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3834 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3835 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3836 except Exception: 3837 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3838 3839 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3840 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3841 3842 files.ln(evt_file, rundir, 'events.lhe') 3843 for i, f in enumerate(event_files): 3844 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3845 3846 if not self.shower_card['analyse']: 3847 # an hep/hepmc file as output 3848 out_id = 'HEP' 3849 else: 3850 # one or more .top file(s) as output 3851 if "HwU" in self.shower_card['analyse']: 3852 out_id = 'HWU' 3853 else: 3854 out_id = 'TOP' 3855 3856 # write the executable 3857 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3858 # set the PATH for the dynamic libraries 3859 if sys.platform == 'darwin': 3860 ld_library_path = 'DYLD_LIBRARY_PATH' 3861 else: 3862 ld_library_path = 'LD_LIBRARY_PATH' 3863 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3864 % {'ld_library_path': ld_library_path, 3865 'extralibs': ':'.join(extrapaths)}) 3866 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3867 3868 if event_files: 3869 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3870 for i in range(len(event_files))] 3871 else: 3872 arg_list = [[shower, out_id, self.run_name]] 3873 3874 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3875 self.njobs = 1 3876 self.wait_for_complete('shower') 3877 3878 # now collect the results 3879 message = '' 3880 warning = '' 3881 to_gzip = [evt_file] 3882 if out_id == 'HEP': 3883 #copy the showered stdhep/hepmc file back in events 3884 if shower in ['PYTHIA8', 'HERWIGPP']: 3885 hep_format = 'HEPMC' 3886 ext = 'hepmc' 3887 else: 3888 hep_format = 'StdHEP' 3889 ext = 'hep' 3890 3891 hep_file = '%s_%s_0.%s.gz' % \ 3892 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3893 count = 0 3894 3895 # find the first available name for the output: 3896 # check existing results with or without event splitting 3897 while os.path.exists(hep_file) or \ 3898 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3899 count +=1 3900 hep_file = '%s_%s_%d.%s.gz' % \ 3901 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3902 3903 try: 3904 if self.shower_card['nsplit_jobs'] == 1: 3905 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3906 message = ('The file %s has been generated. \nIt contains showered' + \ 3907 ' and hadronized events in the %s format obtained' + \ 3908 ' showering the parton-level event file %s.gz with %s') % \ 3909 (hep_file, hep_format, evt_file, shower) 3910 else: 3911 hep_list = [] 3912 for i in range(self.shower_card['nsplit_jobs']): 3913 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3914 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3915 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3916 ' and hadronized events in the %s format obtained' + \ 3917 ' showering the (split) parton-level event file %s.gz with %s') % \ 3918 ('\n '.join(hep_list), hep_format, evt_file, shower) 3919 3920 except OSError, IOError: 3921 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3922 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3923 3924 # run the plot creation in a secure way 3925 if hep_format == 'StdHEP': 3926 try: 3927 self.do_plot('%s -f' % self.run_name) 3928 except Exception, error: 3929 logger.info("Fail to make the plot. Continue...") 3930 pass 3931 3932 elif out_id == 'TOP' or out_id == 'HWU': 3933 #copy the topdrawer or HwU file(s) back in events 3934 if out_id=='TOP': 3935 ext='top' 3936 elif out_id=='HWU': 3937 ext='HwU' 3938 topfiles = [] 3939 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3940 for top_tar in top_tars: 3941 topfiles.extend(top_tar.getnames()) 3942 3943 # safety check 3944 if len(top_tars) != self.shower_card['nsplit_jobs']: 3945 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3946 (self.shower_card['nsplit_jobs'], len(top_tars))) 3947 3948 # find the first available name for the output: 3949 # check existing results with or without event splitting 3950 filename = 'plot_%s_%d_' % (shower, 1) 3951 count = 1 3952 while os.path.exists(pjoin(self.me_dir, 'Events', 3953 self.run_name, '%s0.%s' % (filename,ext))) or \ 3954 os.path.exists(pjoin(self.me_dir, 'Events', 3955 self.run_name, '%s0__1.%s' % (filename,ext))): 3956 count += 1 3957 filename = 'plot_%s_%d_' % (shower, count) 3958 3959 if out_id=='TOP': 3960 hist_format='TopDrawer format' 3961 elif out_id=='HWU': 3962 hist_format='HwU and GnuPlot formats' 3963 3964 if not topfiles: 3965 # if no topfiles are found just warn the user 3966 warning = 'No .top file has been generated. For the results of your ' +\ 3967 'run, please check inside %s' % rundir 3968 elif self.shower_card['nsplit_jobs'] == 1: 3969 # only one job for the shower 3970 top_tars[0].extractall(path = rundir) 3971 plotfiles = [] 3972 for i, file in enumerate(topfiles): 3973 if out_id=='TOP': 3974 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3975 '%s%d.top' % (filename, i)) 3976 files.mv(pjoin(rundir, file), plotfile) 3977 elif out_id=='HWU': 3978 out=pjoin(self.me_dir,'Events', 3979 self.run_name,'%s%d'% (filename,i)) 3980 histos=[{'dirname':pjoin(rundir,file)}] 3981 self.combine_plots_HwU(histos,out) 3982 try: 3983 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3984 stdout=os.open(os.devnull, os.O_RDWR),\ 3985 stderr=os.open(os.devnull, os.O_RDWR),\ 3986 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3987 except Exception: 3988 pass 3989 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3990 '%s%d.HwU'% (filename,i)) 3991 plotfiles.append(plotfile) 3992 3993 ffiles = 'files' 3994 have = 'have' 3995 if len(plotfiles) == 1: 3996 ffiles = 'file' 3997 have = 'has' 3998 3999 message = ('The %s %s %s been generated, with histograms in the' + \ 4000 ' %s, obtained by showering the parton-level' + \ 4001 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 4002 hist_format, evt_file, shower) 4003 else: 4004 # many jobs for the shower have been run 4005 topfiles_set = set(topfiles) 4006 plotfiles = [] 4007 for j, top_tar in enumerate(top_tars): 4008 top_tar.extractall(path = rundir) 4009 for i, file in enumerate(topfiles_set): 4010 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 4011 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 4012 files.mv(pjoin(rundir, file), plotfile) 4013 plotfiles.append(plotfile) 4014 4015 # check if the user asked to combine the .top into a single file 4016 if self.shower_card['combine_td']: 4017 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 4018 4019 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 4020 norm = 1. 4021 else: 4022 norm = 1./float(self.shower_card['nsplit_jobs']) 4023 4024 plotfiles2 = [] 4025 for i, file in enumerate(topfiles_set): 4026 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 4027 for j in range(self.shower_card['nsplit_jobs'])] 4028 if out_id=='TOP': 4029 infile="%d\n%s\n%s\n" % \ 4030 (self.shower_card['nsplit_jobs'], 4031 '\n'.join(filelist), 4032 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 4033 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 4034 stdin=subprocess.PIPE, 4035 stdout=os.open(os.devnull, os.O_RDWR), 4036 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 4037 p.communicate(input = infile) 4038 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 4039 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 4040 elif out_id=='HWU': 4041 out=pjoin(self.me_dir,'Events', 4042 self.run_name,'%s%d'% (filename,i)) 4043 histos=[] 4044 norms=[] 4045 for plotfile in plotfiles: 4046 histos.append({'dirname':plotfile}) 4047 norms.append(norm) 4048 self.combine_plots_HwU(histos,out,normalisation=norms) 4049 try: 4050 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 4051 stdout=os.open(os.devnull, os.O_RDWR),\ 4052 stderr=os.open(os.devnull, os.O_RDWR),\ 4053 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 4054 except Exception: 4055 pass 4056 4057 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 4058 tar = tarfile.open( 4059 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 4060 for f in filelist: 4061 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 4062 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 4063 4064 tar.close() 4065 4066 ffiles = 'files' 4067 have = 'have' 4068 if len(plotfiles2) == 1: 4069 ffiles = 'file' 4070 have = 'has' 4071 4072 message = ('The %s %s %s been generated, with histograms in the' + \ 4073 ' %s, obtained by showering the parton-level' + \ 4074 ' file %s.gz with %s.\n' + \ 4075 'The files from the different shower ' + \ 4076 'jobs (before combining them) can be found inside %s.') % \ 4077 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 4078 evt_file, shower, 4079 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 4080 4081 else: 4082 message = ('The following files have been generated:\n %s\n' + \ 4083 'They contain histograms in the' + \ 4084 ' %s, obtained by showering the parton-level' + \ 4085 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 4086 hist_format, evt_file, shower) 4087 4088 # Now arxiv the shower card used if RunMaterial is present 4089 run_dir_path = pjoin(rundir, self.run_name) 4090 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 4091 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 4092 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 4093 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 4094 %(shower, count))) 4095 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 4096 cwd=run_dir_path) 4097 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 4098 # end of the run, gzip files and print out the message/warning 4099 for f in to_gzip: 4100 misc.gzip(f) 4101 if message: 4102 logger.info(message) 4103 if warning: 4104 logger.warning(warning) 4105 4106 self.update_status('Run complete', level='shower', update_results=True)
4107 4108 ############################################################################
4109 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
4110 """define the run name, the run_tag, the banner and the results.""" 4111 4112 # when are we force to change the tag new_run:previous run requiring changes 4113 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 4114 'shower': ['shower','delphes','madanalysis5_hadron'], 4115 'delphes':['delphes'], 4116 'madanalysis5_hadron':['madanalysis5_hadron'], 4117 'plot':[]} 4118 4119 if name == self.run_name: 4120 if reload_card: 4121 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4122 self.run_card = banner_mod.RunCardNLO(run_card) 4123 4124 #check if we need to change the tag 4125 if tag: 4126 self.run_card['run_tag'] = tag 4127 self.run_tag = tag 4128 self.results.add_run(self.run_name, self.run_card) 4129 else: 4130 for tag in upgrade_tag[level]: 4131 if getattr(self.results[self.run_name][-1], tag): 4132 tag = self.get_available_tag() 4133 self.run_card['run_tag'] = tag 4134 self.run_tag = tag 4135 self.results.add_run(self.run_name, self.run_card) 4136 break 4137 return # Nothing to do anymore 4138 4139 # save/clean previous run 4140 if self.run_name: 4141 self.store_result() 4142 # store new name 4143 self.run_name = name 4144 4145 # Read run_card 4146 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 4147 self.run_card = banner_mod.RunCardNLO(run_card) 4148 4149 new_tag = False 4150 # First call for this run -> set the banner 4151 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 4152 if 'mgruncard' in self.banner: 4153 self.run_card = self.banner.charge_card('run_card') 4154 if tag: 4155 self.run_card['run_tag'] = tag 4156 new_tag = True 4157 elif not self.run_name in self.results and level =='parton': 4158 pass # No results yet, so current tag is fine 4159 elif not self.run_name in self.results: 4160 #This is only for case when you want to trick the interface 4161 logger.warning('Trying to run data on unknown run.') 4162 self.results.add_run(name, self.run_card) 4163 self.results.update('add run %s' % name, 'all', makehtml=True) 4164 else: 4165 for tag in upgrade_tag[level]: 4166 4167 if getattr(self.results[self.run_name][-1], tag): 4168 # LEVEL is already define in the last tag -> need to switch tag 4169 tag = self.get_available_tag() 4170 self.run_card['run_tag'] = tag 4171 new_tag = True 4172 break 4173 if not new_tag: 4174 # We can add the results to the current run 4175 tag = self.results[self.run_name][-1]['tag'] 4176 self.run_card['run_tag'] = tag # ensure that run_tag is correct 4177 4178 4179 if name in self.results and not new_tag: 4180 self.results.def_current(self.run_name) 4181 else: 4182 self.results.add_run(self.run_name, self.run_card) 4183 4184 self.run_tag = self.run_card['run_tag'] 4185 4186 # Return the tag of the previous run having the required data for this 4187 # tag/run to working wel. 4188 if level == 'parton': 4189 return 4190 elif level == 'pythia': 4191 return self.results[self.run_name][0]['tag'] 4192 else: 4193 for i in range(-1,-len(self.results[self.run_name])-1,-1): 4194 tagRun = self.results[self.run_name][i] 4195 if tagRun.pythia: 4196 return tagRun['tag']
4197 4198
4199 - def store_result(self):
4200 """ tar the pythia results. This is done when we are quite sure that 4201 the pythia output will not be use anymore """ 4202 4203 if not self.run_name: 4204 return 4205 4206 self.results.save() 4207 4208 if not self.to_store: 4209 return 4210 4211 if 'event' in self.to_store: 4212 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 4213 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 4214 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 4215 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4216 else: 4217 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 4218 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 4219 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 4220 4221 4222 tag = self.run_card['run_tag'] 4223 4224 self.to_store = []
4225 4226 4227 ############################################################################
4228 - def get_Gdir(self, Pdir=None):
4229 """get the list of Gdirectory if not yet saved.""" 4230 4231 if hasattr(self, "Gdirs"): 4232 if self.me_dir in self.Gdirs: 4233 if Pdir is None: 4234 return sum(self.Gdirs.values()) 4235 else: 4236 return self.Gdirs[Pdir] 4237 4238 Pdirs = self.get_Pdir() 4239 Gdirs = {self.me_dir:[]} 4240 for P in Pdirs: 4241 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and 4242 os.path.isdir(pjoin(P,G))] 4243 4244 self.Gdirs = Gdirs 4245 return self.getGdir(Pdir)
4246 4247
4248 - def get_init_dict(self, evt_file):
4249 """reads the info in the init block and returns them in a dictionary""" 4250 ev_file = open(evt_file) 4251 init = "" 4252 found = False 4253 while True: 4254 line = ev_file.readline() 4255 if "<init>" in line: 4256 found = True 4257 elif found and not line.startswith('#'): 4258 init += line 4259 if "</init>" in line or "<event>" in line: 4260 break 4261 ev_file.close() 4262 4263 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 4264 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 4265 # these are not included (so far) in the init_dict 4266 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 4267 4268 init_dict = {} 4269 init_dict['idbmup1'] = int(init.split()[0]) 4270 init_dict['idbmup2'] = int(init.split()[1]) 4271 init_dict['ebmup1'] = float(init.split()[2]) 4272 init_dict['ebmup2'] = float(init.split()[3]) 4273 init_dict['pdfgup1'] = int(init.split()[4]) 4274 init_dict['pdfgup2'] = int(init.split()[5]) 4275 init_dict['pdfsup1'] = int(init.split()[6]) 4276 init_dict['pdfsup2'] = int(init.split()[7]) 4277 init_dict['idwtup'] = int(init.split()[8]) 4278 init_dict['nprup'] = int(init.split()[9]) 4279 4280 return init_dict
4281 4282
4283 - def banner_to_mcatnlo(self, evt_file):
4284 """creates the mcatnlo input script using the values set in the header of the event_file. 4285 It also checks if the lhapdf library is used""" 4286 4287 shower = self.banner.get('run_card', 'parton_shower').upper() 4288 pdlabel = self.banner.get('run_card', 'pdlabel') 4289 itry = 0 4290 nevents = self.shower_card['nevents'] 4291 init_dict = self.get_init_dict(evt_file) 4292 4293 if nevents < 0 or \ 4294 nevents > self.banner.get_detail('run_card', 'nevents'): 4295 nevents = self.banner.get_detail('run_card', 'nevents') 4296 4297 nevents = nevents / self.shower_card['nsplit_jobs'] 4298 4299 mcmass_dict = {} 4300 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 4301 pdg = int(line.split()[0]) 4302 mass = float(line.split()[1]) 4303 mcmass_dict[pdg] = mass 4304 4305 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 4306 content += 'NEVENTS=%d\n' % nevents 4307 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 4308 self.shower_card['nsplit_jobs']) 4309 content += 'MCMODE=%s\n' % shower 4310 content += 'PDLABEL=%s\n' % pdlabel 4311 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 4312 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 4313 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4314 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 4315 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 4316 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 4317 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 4318 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 4319 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 4320 try: 4321 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 4322 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 4323 except KeyError: 4324 content += 'HGGMASS=120.\n' 4325 content += 'HGGWIDTH=0.00575308848\n' 4326 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 4327 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 4328 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 4329 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 4330 content += 'DMASS=%s\n' % mcmass_dict[1] 4331 content += 'UMASS=%s\n' % mcmass_dict[2] 4332 content += 'SMASS=%s\n' % mcmass_dict[3] 4333 content += 'CMASS=%s\n' % mcmass_dict[4] 4334 content += 'BMASS=%s\n' % mcmass_dict[5] 4335 try: 4336 content += 'EMASS=%s\n' % mcmass_dict[11] 4337 content += 'MUMASS=%s\n' % mcmass_dict[13] 4338 content += 'TAUMASS=%s\n' % mcmass_dict[15] 4339 except KeyError: 4340 # this is for backward compatibility 4341 mcmass_lines = [l for l in \ 4342 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 4343 ).read().split('\n') if l] 4344 new_mcmass_dict = {} 4345 for l in mcmass_lines: 4346 key, val = l.split('=') 4347 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 4348 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 4349 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 4350 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 4351 4352 content += 'GMASS=%s\n' % mcmass_dict[21] 4353 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 4354 # check if need to link lhapdf 4355 if int(self.shower_card['pdfcode']) > 1 or \ 4356 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 4357 shower=='HERWIGPP' : 4358 # Use LHAPDF (should be correctly installed, because 4359 # either events were already generated with them, or the 4360 # user explicitly gives an LHAPDF number in the 4361 # shower_card). 4362 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4363 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4364 stdout = subprocess.PIPE).stdout.read().strip() 4365 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4366 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4367 if self.shower_card['pdfcode']==0: 4368 lhaid_list = '' 4369 content += '' 4370 elif self.shower_card['pdfcode']==1: 4371 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4372 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4373 else: 4374 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 4375 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 4376 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4377 elif int(self.shower_card['pdfcode'])==1 or \ 4378 int(self.shower_card['pdfcode'])==-1 and True: 4379 # Try to use LHAPDF because user wants to use the same PDF 4380 # as was used for the event generation. However, for the 4381 # event generation, LHAPDF was not used, so non-trivial to 4382 # see if if LHAPDF is available with the corresponding PDF 4383 # set. If not found, give a warning and use build-in PDF 4384 # set instead. 4385 try: 4386 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 4387 stdout = subprocess.PIPE).stdout.read().strip() 4388 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 4389 content += 'LHAPDFPATH=%s\n' % lhapdfpath 4390 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4391 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 4392 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 4393 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4394 except Exception: 4395 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 4396 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 4397 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 4398 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 4399 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 4400 content += 'LHAPDFPATH=\n' 4401 content += 'PDFCODE=0\n' 4402 else: 4403 content += 'LHAPDFPATH=\n' 4404 content += 'PDFCODE=0\n' 4405 4406 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 4407 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 4408 # add the pythia8/hwpp path(s) 4409 if self.options['pythia8_path']: 4410 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 4411 if self.options['hwpp_path']: 4412 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 4413 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 4414 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 4415 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 4416 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 4417 4418 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 4419 output.write(content) 4420 output.close() 4421 return shower
4422 4423
4424 - def run_reweight(self, only):
4425 """runs the reweight_xsec_events executables on each sub-event file generated 4426 to compute on the fly scale and/or PDF uncertainities""" 4427 logger.info(' Doing reweight') 4428 4429 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 4430 # if only doing reweight, copy back the nevents_unweighted file 4431 if only: 4432 if os.path.exists(nev_unw + '.orig'): 4433 files.cp(nev_unw + '.orig', nev_unw) 4434 else: 4435 raise aMCatNLOError('Cannot find event file information') 4436 4437 #read the nevents_unweighted file to get the list of event files 4438 file = open(nev_unw) 4439 lines = file.read().split('\n') 4440 file.close() 4441 # make copy of the original nevent_unweighted file 4442 files.cp(nev_unw, nev_unw + '.orig') 4443 # loop over lines (all but the last one whith is empty) and check that the 4444 # number of events is not 0 4445 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 4446 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 4447 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0: 4448 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts] 4449 #prepare the job_dict 4450 job_dict = {} 4451 exe = 'reweight_xsec_events.local' 4452 for i, evt_file in enumerate(evt_files): 4453 path, evt = os.path.split(evt_file) 4454 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 4455 pjoin(self.me_dir, 'SubProcesses', path)) 4456 job_dict[path] = [exe] 4457 4458 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 4459 4460 #check that the new event files are complete 4461 for evt_file in evt_files: 4462 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 4463 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 4464 stdout = subprocess.PIPE).stdout.read().strip() 4465 if last_line != "</LesHouchesEvents>": 4466 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 4467 '\'reweight_xsec_events.output\' files inside the ' + \ 4468 '\'SubProcesses/P*/G*/ directories for details') 4469 4470 #update file name in nevents_unweighted 4471 newfile = open(nev_unw, 'w') 4472 for line in lines: 4473 if line: 4474 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4475 newfile.close() 4476 4477 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4478
4479 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4480 """This function takes the files with the scale and pdf values 4481 written by the reweight_xsec_events.f code 4482 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4483 scale and PDF uncertainty (the latter is computed using the 4484 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4485 and returns it in percents. The expected format of the file 4486 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4487 xsec_pdf0 xsec_pdf1 ....""" 4488 4489 scales=[] 4490 pdfs=[] 4491 for i,evt_file in enumerate(evt_files): 4492 path, evt=os.path.split(evt_file) 4493 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4494 data_line=f.readline() 4495 if "scale variations:" in data_line: 4496 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4497 data_line = f.readline().split() 4498 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4499 try: 4500 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4501 except IndexError: 4502 scales+=[scales_this] 4503 data_line=f.readline() 4504 if "pdf variations:" in data_line: 4505 for j,pdf in enumerate(self.run_card['lhaid']): 4506 data_line = f.readline().split() 4507 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4508 try: 4509 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4510 except IndexError: 4511 pdfs+=[pdfs_this] 4512 4513 # get the scale uncertainty in percent 4514 scale_info=[] 4515 for j,scale in enumerate(scales): 4516 s_cen=scale[0] 4517 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4518 # max and min of the full envelope 4519 s_max=(max(scale)/s_cen-1)*100 4520 s_min=(1-min(scale)/s_cen)*100 4521 # ren and fac scale dependence added in quadrature 4522 ren_var=[] 4523 fac_var=[] 4524 for i in range(len(self.run_card['rw_rscale'])): 4525 ren_var.append(scale[i]-s_cen) # central fac scale 4526 for i in range(len(self.run_card['rw_fscale'])): 4527 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4528 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4529 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4530 s_size=len(scale) 4531 else: 4532 s_max=0.0 4533 s_min=0.0 4534 s_max_q=0.0 4535 s_min_q=0.0 4536 s_size=len(scale) 4537 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4538 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4539 'label':self.run_card['dynamical_scale_choice'][j], \ 4540 'unc':self.run_card['reweight_scale'][j]}) 4541 4542 # check if we can use LHAPDF to compute the PDF uncertainty 4543 if any(self.run_card['reweight_pdf']): 4544 use_lhapdf=False 4545 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4546 stdout=subprocess.PIPE).stdout.read().strip() 4547 4548 try: 4549 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4550 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4551 except OSError: 4552 candidates=[] 4553 for candidate in candidates: 4554 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4555 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4556 try: 4557 import lhapdf 4558 use_lhapdf=True 4559 break 4560 except ImportError: 4561 sys.path.pop(0) 4562 continue 4563 4564 if not use_lhapdf: 4565 try: 4566 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4567 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4568 except OSError: 4569 candidates=[] 4570 for candidate in candidates: 4571 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4572 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4573 try: 4574 import lhapdf 4575 use_lhapdf=True 4576 break 4577 except ImportError: 4578 sys.path.pop(0) 4579 continue 4580 4581 if not use_lhapdf: 4582 try: 4583 import lhapdf 4584 use_lhapdf=True 4585 except ImportError: 4586 logger.warning("Failed to access python version of LHAPDF: "\ 4587 "cannot compute PDF uncertainty from the "\ 4588 "weights in the events. The weights in the LHE " \ 4589 "event files will still cover all PDF set members, "\ 4590 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4591 "If the python interface to LHAPDF is available on your system, try "\ 4592 "adding its location to the PYTHONPATH environment variable and the"\ 4593 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4594 use_lhapdf=False 4595 4596 # turn off lhapdf printing any messages 4597 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4598 4599 pdf_info=[] 4600 for j,pdfset in enumerate(pdfs): 4601 p_cen=pdfset[0] 4602 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4603 if use_lhapdf: 4604 pdfsetname=self.run_card['lhapdfsetname'][j] 4605 try: 4606 p=lhapdf.getPDFSet(pdfsetname) 4607 ep=p.uncertainty(pdfset,-1) 4608 p_cen=ep.central 4609 p_min=abs(ep.errminus/p_cen)*100 4610 p_max=abs(ep.errplus/p_cen)*100 4611 p_type=p.errorType 4612 p_size=p.size 4613 p_conf=p.errorConfLevel 4614 except: 4615 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4616 p_min=0.0 4617 p_max=0.0 4618 p_type='unknown' 4619 p_conf='unknown' 4620 p_size=len(pdfset) 4621 else: 4622 p_min=0.0 4623 p_max=0.0 4624 p_type='unknown' 4625 p_conf='unknown' 4626 p_size=len(pdfset) 4627 pdfsetname=self.run_card['lhaid'][j] 4628 else: 4629 p_min=0.0 4630 p_max=0.0 4631 p_type='none' 4632 p_conf='unknown' 4633 p_size=len(pdfset) 4634 pdfsetname=self.run_card['lhaid'][j] 4635 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4636 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4637 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4638 4639 scale_pdf_info=[scale_info,pdf_info] 4640 return scale_pdf_info
4641 4642
4643 - def wait_for_complete(self, run_type):
4644 """this function waits for jobs on cluster to complete their run.""" 4645 starttime = time.time() 4646 #logger.info(' Waiting for submitted jobs to complete') 4647 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4648 starttime=starttime, level='parton', update_results=True) 4649 try: 4650 self.cluster.wait(self.me_dir, update_status) 4651 except: 4652 self.cluster.remove() 4653 raise
4654
4655 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4656 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4657 self.ijob = 0 4658 if run_type != 'shower': 4659 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4660 for args in arg_list: 4661 for Pdir, jobs in job_dict.items(): 4662 for job in jobs: 4663 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4664 if self.cluster_mode == 2: 4665 time.sleep(1) # security to allow all jobs to be launched 4666 else: 4667 self.njobs = len(arg_list) 4668 for args in arg_list: 4669 [(cwd, exe)] = job_dict.items() 4670 self.run_exe(exe, args, run_type, cwd) 4671 4672 self.wait_for_complete(run_type)
4673 4674 4675
4676 - def check_event_files(self,jobs):
4677 """check the integrity of the event files after splitting, and resubmit 4678 those which are not nicely terminated""" 4679 jobs_to_resubmit = [] 4680 for job in jobs: 4681 last_line = '' 4682 try: 4683 last_line = subprocess.Popen( 4684 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4685 stdout = subprocess.PIPE).stdout.read().strip() 4686 except IOError: 4687 pass 4688 if last_line != "</LesHouchesEvents>": 4689 jobs_to_resubmit.append(job) 4690 self.njobs = 0 4691 if jobs_to_resubmit: 4692 run_type = 'Resubmitting broken jobs' 4693 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4694 for job in jobs_to_resubmit: 4695 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4696 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4697 4698
4699 - def find_jobs_to_split(self, pdir, job, arg):
4700 """looks into the nevents_unweighed_splitted file to check how many 4701 split jobs are needed for this (pdir, job). arg is F, B or V""" 4702 # find the number of the integration channel 4703 splittings = [] 4704 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4705 pattern = re.compile('for i in (\d+) ; do') 4706 match = re.search(pattern, ajob) 4707 channel = match.groups()[0] 4708 # then open the nevents_unweighted_splitted file and look for the 4709 # number of splittings to be done 4710 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4711 # This skips the channels with zero events, because they are 4712 # not of the form GFXX_YY, but simply GFXX 4713 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4714 pjoin(pdir, 'G%s%s' % (arg,channel))) 4715 matches = re.findall(pattern, nevents_file) 4716 for m in matches: 4717 splittings.append(m) 4718 return splittings
4719 4720
4721 - def run_exe(self, exe, args, run_type, cwd=None):
4722 """this basic function launch locally/on cluster exe with args as argument. 4723 """ 4724 # first test that exe exists: 4725 execpath = None 4726 if cwd and os.path.exists(pjoin(cwd, exe)): 4727 execpath = pjoin(cwd, exe) 4728 elif not cwd and os.path.exists(exe): 4729 execpath = exe 4730 else: 4731 raise aMCatNLOError('Cannot find executable %s in %s' \ 4732 % (exe, os.getcwd())) 4733 # check that the executable has exec permissions 4734 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4735 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4736 # finally run it 4737 if self.cluster_mode == 0: 4738 #this is for the serial run 4739 misc.call(['./'+exe] + args, cwd=cwd) 4740 self.ijob += 1 4741 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4742 min([1, self.njobs - self.ijob]), 4743 self.ijob, run_type), level='parton') 4744 4745 #this is for the cluster/multicore run 4746 elif 'reweight' in exe: 4747 # a reweight run 4748 # Find the correct PDF input file 4749 input_files, output_files = [], [] 4750 pdfinput = self.get_pdf_input_filename() 4751 if os.path.exists(pdfinput): 4752 input_files.append(pdfinput) 4753 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4754 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4755 input_files.append(args[0]) 4756 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4757 output_files.append('reweight_xsec_events.output') 4758 output_files.append('scale_pdf_dependence.dat') 4759 4760 return self.cluster.submit2(exe, args, cwd=cwd, 4761 input_files=input_files, output_files=output_files, 4762 required_output=output_files) 4763 4764 elif 'ajob' in exe: 4765 # the 'standard' amcatnlo job 4766 # check if args is a list of string 4767 if type(args[0]) == str: 4768 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4769 #submitting 4770 self.cluster.submit2(exe, args, cwd=cwd, 4771 input_files=input_files, output_files=output_files, 4772 required_output=required_output) 4773 4774 # # keep track of folders and arguments for splitted evt gen 4775 # subfolder=output_files[-1].split('/')[0] 4776 # if len(args) == 4 and '_' in subfolder: 4777 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4778 4779 elif 'shower' in exe: 4780 # a shower job 4781 # args are [shower, output(HEP or TOP), run_name] 4782 # cwd is the shower rundir, where the executable are found 4783 input_files, output_files = [], [] 4784 shower = args[0] 4785 # the input files 4786 if shower == 'PYTHIA8': 4787 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4788 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4789 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4790 input_files.append(pjoin(cwd, 'config.sh')) 4791 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4792 else: 4793 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4794 else: 4795 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4796 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4797 if shower == 'HERWIGPP': 4798 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4799 input_files.append(pjoin(cwd, 'Herwig++')) 4800 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4801 input_files.append(pjoin(cwd, 'Herwig')) 4802 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4803 if len(args) == 3: 4804 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4805 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4806 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4807 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4808 else: 4809 raise aMCatNLOError, 'Event file not present in %s' % \ 4810 pjoin(self.me_dir, 'Events', self.run_name) 4811 else: 4812 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4813 # the output files 4814 if len(args) == 3: 4815 output_files.append('mcatnlo_run.log') 4816 else: 4817 output_files.append('mcatnlo_run_%s.log' % args[3]) 4818 if args[1] == 'HEP': 4819 if len(args) == 3: 4820 fname = 'events' 4821 else: 4822 fname = 'events_%s' % args[3] 4823 if shower in ['PYTHIA8', 'HERWIGPP']: 4824 output_files.append(fname + '.hepmc.gz') 4825 else: 4826 output_files.append(fname + '.hep.gz') 4827 elif args[1] == 'TOP' or args[1] == 'HWU': 4828 if len(args) == 3: 4829 fname = 'histfile' 4830 else: 4831 fname = 'histfile_%s' % args[3] 4832 output_files.append(fname + '.tar') 4833 else: 4834 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4835 #submitting 4836 self.cluster.submit2(exe, args, cwd=cwd, 4837 input_files=input_files, output_files=output_files) 4838 4839 else: 4840 return self.cluster.submit(exe, args, cwd=cwd)
4841
4842 - def getIO_ajob(self,exe,cwd, args):
4843 # use local disk if possible => need to stands what are the 4844 # input/output files 4845 4846 output_files = [] 4847 required_output = [] 4848 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4849 pjoin(cwd, 'symfact.dat'), 4850 pjoin(cwd, 'iproc.dat'), 4851 pjoin(cwd, 'initial_states_map.dat'), 4852 pjoin(cwd, 'configs_and_props_info.dat'), 4853 pjoin(cwd, 'leshouche_info.dat'), 4854 pjoin(cwd, 'FKS_params.dat')] 4855 4856 # For GoSam interface, we must copy the SLHA card as well 4857 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4858 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4859 4860 if os.path.exists(pjoin(cwd,'nevents.tar')): 4861 input_files.append(pjoin(cwd,'nevents.tar')) 4862 4863 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4864 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4865 4866 # File for the loop (might not be present if MadLoop is not used) 4867 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4868 cluster.need_transfer(self.options): 4869 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4870 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4871 cluster.need_transfer(self.options): 4872 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4873 dereference=True) 4874 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4875 tf.close() 4876 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4877 4878 if args[1] == 'born' or args[1] == 'all': 4879 # MADEVENT MINT FO MODE 4880 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4881 if args[2] == '0': 4882 current = '%s_G%s' % (args[1],args[0]) 4883 else: 4884 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4885 if os.path.exists(pjoin(cwd,current)): 4886 input_files.append(pjoin(cwd, current)) 4887 output_files.append(current) 4888 4889 required_output.append('%s/results.dat' % current) 4890 required_output.append('%s/res_%s.dat' % (current,args[3])) 4891 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4892 required_output.append('%s/mint_grids' % current) 4893 required_output.append('%s/grid.MC_integer' % current) 4894 if args[3] != '0': 4895 required_output.append('%s/scale_pdf_dependence.dat' % current) 4896 4897 elif args[1] == 'F' or args[1] == 'B': 4898 # MINTMC MODE 4899 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4900 4901 if args[2] == '0': 4902 current = 'G%s%s' % (args[1],args[0]) 4903 else: 4904 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4905 if os.path.exists(pjoin(cwd,current)): 4906 input_files.append(pjoin(cwd, current)) 4907 output_files.append(current) 4908 if args[2] > '0': 4909 # this is for the split event generation 4910 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4911 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4912 4913 else: 4914 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4915 if args[3] in ['0','1']: 4916 required_output.append('%s/results.dat' % current) 4917 if args[3] == '1': 4918 output_files.append('%s/results.dat' % current) 4919 4920 else: 4921 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4922 4923 #Find the correct PDF input file 4924 pdfinput = self.get_pdf_input_filename() 4925 if os.path.exists(pdfinput): 4926 input_files.append(pdfinput) 4927 return input_files, output_files, required_output, args
4928 4929
4930 - def compile(self, mode, options):
4931 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4932 specified in mode""" 4933 4934 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4935 4936 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4937 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4938 4939 self.get_characteristics(pjoin(self.me_dir, 4940 'SubProcesses', 'proc_characteristics')) 4941 4942 #define a bunch of log files 4943 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4944 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4945 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4946 test_log = pjoin(self.me_dir, 'test.log') 4947 4948 # environmental variables to be included in make_opts 4949 self.make_opts_var = {} 4950 if self.proc_characteristics['has_loops'] and \ 4951 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4952 self.make_opts_var['madloop'] = 'true' 4953 4954 self.update_status('Compiling the code', level=None, update_results=True) 4955 4956 libdir = pjoin(self.me_dir, 'lib') 4957 sourcedir = pjoin(self.me_dir, 'Source') 4958 4959 #clean files 4960 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4961 #define which executable/tests to compile 4962 if '+' in mode: 4963 mode = mode.split('+')[0] 4964 if mode in ['NLO', 'LO']: 4965 exe = 'madevent_mintFO' 4966 tests = ['test_ME'] 4967 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4968 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4969 exe = 'madevent_mintMC' 4970 tests = ['test_ME', 'test_MC'] 4971 # write an analyse_opts with a dummy analysis so that compilation goes through 4972 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4973 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4974 4975 #directory where to compile exe 4976 p_dirs = [d for d in \ 4977 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4978 # create param_card.inc and run_card.inc 4979 self.do_treatcards('', amcatnlo=True, mode=mode) 4980 # if --nocompile option is specified, check here that all exes exists. 4981 # If they exists, return 4982 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4983 for p_dir in p_dirs]) and options['nocompile']: 4984 return 4985 4986 # rm links to lhapdflib/ PDFsets if exist 4987 if os.path.exists(pjoin(libdir, 'PDFsets')): 4988 files.rm(pjoin(libdir, 'PDFsets')) 4989 4990 # read the run_card to find if lhapdf is used or not 4991 if self.run_card['pdlabel'] == 'lhapdf' and \ 4992 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4993 self.banner.get_detail('run_card', 'lpp2') != 0): 4994 4995 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4996 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4997 lhaid_list = self.run_card['lhaid'] 4998 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4999 5000 else: 5001 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 5002 logger.info('Using built-in libraries for PDFs') 5003 5004 self.make_opts_var['lhapdf'] = "" 5005 5006 # read the run_card to find if applgrid is used or not 5007 if self.run_card['iappl'] != 0: 5008 self.make_opts_var['applgrid'] = 'True' 5009 # check versions of applgrid and amcfast 5010 for code in ['applgrid','amcfast']: 5011 try: 5012 p = subprocess.Popen([self.options[code], '--version'], \ 5013 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 5014 except OSError: 5015 raise aMCatNLOError(('No valid %s installation found. \n' + \ 5016 'Please set the path to %s-config by using \n' + \ 5017 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 5018 else: 5019 output, _ = p.communicate() 5020 if code is 'applgrid' and output < '1.4.63': 5021 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 5022 +' You are using %s',output) 5023 if code is 'amcfast' and output < '1.1.1': 5024 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 5025 +' You are using %s',output) 5026 5027 # set-up the Source/make_opts with the correct applgrid-config file 5028 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 5029 % (self.options['amcfast'],self.options['applgrid']) 5030 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 5031 text_out=[] 5032 for line in text: 5033 if line.strip().startswith('APPLLIBS=$'): 5034 line=appllibs 5035 text_out.append(line) 5036 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 5037 fsock.writelines(text_out) 5038 else: 5039 self.make_opts_var['applgrid'] = "" 5040 5041 if 'fastjet' in self.options.keys() and self.options['fastjet']: 5042 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 5043 5044 # add the make_opts_var to make_opts 5045 self.update_make_opts() 5046 5047 # make Source 5048 self.update_status('Compiling source...', level=None) 5049 misc.compile(['clean4pdf'], cwd = sourcedir) 5050 misc.compile(cwd = sourcedir) 5051 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 5052 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 5053 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 5054 and os.path.exists(pjoin(libdir, 'libpdf.a')): 5055 logger.info(' ...done, continuing with P* directories') 5056 else: 5057 raise aMCatNLOError('Compilation failed') 5058 5059 # make StdHep (only necessary with MG option output_dependencies='internal') 5060 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 5061 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 5062 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 5063 if os.path.exists(pjoin(sourcedir,'StdHEP')): 5064 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 5065 misc.compile(['StdHEP'], cwd = sourcedir) 5066 logger.info(' ...done.') 5067 else: 5068 raise aMCatNLOError('Could not compile StdHEP because its'+\ 5069 ' source directory could not be found in the SOURCE folder.\n'+\ 5070 " Check the MG5_aMC option 'output_dependencies.'") 5071 5072 # make CutTools (only necessary with MG option output_dependencies='internal') 5073 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5074 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5075 if os.path.exists(pjoin(sourcedir,'CutTools')): 5076 logger.info('Compiling CutTools (can take a couple of minutes) ...') 5077 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5078 logger.info(' ...done.') 5079 else: 5080 raise aMCatNLOError('Could not compile CutTools because its'+\ 5081 ' source directory could not be found in the SOURCE folder.\n'+\ 5082 " Check the MG5_aMC option 'output_dependencies.'") 5083 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 5084 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 5085 raise aMCatNLOError('CutTools compilation failed.') 5086 5087 # Verify compatibility between current compiler and the one which was 5088 # used when last compiling CutTools (if specified). 5089 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5090 libdir, 'libcts.a')))),'compiler_version.log') 5091 if os.path.exists(compiler_log_path): 5092 compiler_version_used = open(compiler_log_path,'r').read() 5093 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5094 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5095 if os.path.exists(pjoin(sourcedir,'CutTools')): 5096 logger.info('CutTools was compiled with a different fortran'+\ 5097 ' compiler. Re-compiling it now...') 5098 misc.compile(['cleanCT'], cwd = sourcedir) 5099 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 5100 logger.info(' ...done.') 5101 else: 5102 raise aMCatNLOError("CutTools installation in %s"\ 5103 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 5104 " seems to have been compiled with a different compiler than"+\ 5105 " the one specified in MG5_aMC. Please recompile CutTools.") 5106 5107 # make IREGI (only necessary with MG option output_dependencies='internal') 5108 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 5109 and os.path.exists(pjoin(sourcedir,'IREGI')): 5110 logger.info('Compiling IREGI (can take a couple of minutes) ...') 5111 misc.compile(['IREGI'], cwd = sourcedir) 5112 logger.info(' ...done.') 5113 5114 if os.path.exists(pjoin(libdir, 'libiregi.a')): 5115 # Verify compatibility between current compiler and the one which was 5116 # used when last compiling IREGI (if specified). 5117 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 5118 libdir, 'libiregi.a')))),'compiler_version.log') 5119 if os.path.exists(compiler_log_path): 5120 compiler_version_used = open(compiler_log_path,'r').read() 5121 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 5122 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 5123 if os.path.exists(pjoin(sourcedir,'IREGI')): 5124 logger.info('IREGI was compiled with a different fortran'+\ 5125 ' compiler. Re-compiling it now...') 5126 misc.compile(['cleanIR'], cwd = sourcedir) 5127 misc.compile(['IREGI'], cwd = sourcedir) 5128 logger.info(' ...done.') 5129 else: 5130 raise aMCatNLOError("IREGI installation in %s"\ 5131 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 5132 " seems to have been compiled with a different compiler than"+\ 5133 " the one specified in MG5_aMC. Please recompile IREGI.") 5134 5135 # check if MadLoop virtuals have been generated 5136 if self.proc_characteristics['has_loops'] and \ 5137 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 5138 if mode in ['NLO', 'aMC@NLO', 'noshower']: 5139 tests.append('check_poles') 5140 5141 # make and run tests (if asked for), gensym and make madevent in each dir 5142 self.update_status('Compiling directories...', level=None) 5143 5144 for test in tests: 5145 self.write_test_input(test) 5146 5147 try: 5148 import multiprocessing 5149 if not self.nb_core: 5150 try: 5151 self.nb_core = int(self.options['nb_core']) 5152 except TypeError: 5153 self.nb_core = multiprocessing.cpu_count() 5154 except ImportError: 5155 self.nb_core = 1 5156 5157 compile_options = copy.copy(self.options) 5158 compile_options['nb_core'] = self.nb_core 5159 compile_cluster = cluster.MultiCore(**compile_options) 5160 logger.info('Compiling on %d cores' % self.nb_core) 5161 5162 update_status = lambda i, r, f: self.donothing(i,r,f) 5163 for p_dir in p_dirs: 5164 compile_cluster.submit(prog = compile_dir, 5165 argument = [self.me_dir, p_dir, mode, options, 5166 tests, exe, self.options['run_mode']]) 5167 try: 5168 compile_cluster.wait(self.me_dir, update_status) 5169 except Exception, error: 5170 logger.warning("Fail to compile the Subprocesses") 5171 if __debug__: 5172 raise 5173 compile_cluster.remove() 5174 self.do_quit('') 5175 5176 logger.info('Checking test output:') 5177 for p_dir in p_dirs: 5178 logger.info(p_dir) 5179 for test in tests: 5180 logger.info(' Result for %s:' % test) 5181 5182 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 5183 #check that none of the tests failed 5184 self.check_tests(test, this_dir)
5185 5186
5187 - def donothing(*args):
5188 pass
5189 5190
5191 - def check_tests(self, test, dir):
5192 """just call the correct parser for the test log. 5193 Skip check_poles for LOonly folders""" 5194 if test in ['test_ME', 'test_MC']: 5195 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 5196 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 5197 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5198 5199
5200 - def parse_test_mx_log(self, log):
5201 """read and parse the test_ME/MC.log file""" 5202 content = open(log).read() 5203 if 'FAILED' in content: 5204 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD') 5205 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 5206 'Please check that widths of final state particles (e.g. top) have been' + \ 5207 ' set to 0 in the param_card.dat.') 5208 else: 5209 lines = [l for l in content.split('\n') if 'PASSED' in l] 5210 logger.info(' Passed.') 5211 logger.debug('\n'+'\n'.join(lines))
5212 5213
5214 - def parse_check_poles_log(self, log):
5215 """reads and parse the check_poles.log file""" 5216 content = open(log).read() 5217 npass = 0 5218 nfail = 0 5219 for line in content.split('\n'): 5220 if 'PASSED' in line: 5221 npass +=1 5222 tolerance = float(line.split()[1]) 5223 if 'FAILED' in line: 5224 nfail +=1 5225 tolerance = float(line.split()[1]) 5226 5227 if nfail + npass == 0: 5228 logger.warning('0 points have been tried') 5229 return 5230 5231 if float(nfail)/float(nfail+npass) > 0.1: 5232 raise aMCatNLOError('Poles do not cancel, run cannot continue') 5233 else: 5234 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 5235 %(npass, nfail+npass, tolerance))
5236 5237
5238 - def write_test_input(self, test):
5239 """write the input files to run test_ME/MC or check_poles""" 5240 if test in ['test_ME', 'test_MC']: 5241 content = "-2 -2\n" #generate randomly energy/angle 5242 content+= "100 100\n" #run 100 points for soft and collinear tests 5243 content+= "0\n" #all FKS configs 5244 content+= '\n'.join(["-1"] * 50) #random diagram (=first diagram) 5245 elif test == 'check_poles': 5246 content = '20 \n -1\n' 5247 5248 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 5249 if test == 'test_MC': 5250 shower = self.run_card['parton_shower'] 5251 header = "1 \n %s\n 1 -0.1\n-1 -0.1\n" % shower 5252 file.write(header + content) 5253 elif test == 'test_ME': 5254 header = "2 \n" 5255 file.write(header + content) 5256 else: 5257 file.write(content) 5258 file.close()
5259 5260 5261 action_switcher = AskRunNLO 5262 ############################################################################
5263 - def ask_run_configuration(self, mode, options, switch={}):
5264 """Ask the question when launching generate_events/multi_run""" 5265 5266 if 'parton' not in options: 5267 options['parton'] = False 5268 if 'reweightonly' not in options: 5269 options['reweightonly'] = False 5270 5271 if mode == 'auto': 5272 mode = None 5273 if not mode and (options['parton'] or options['reweightonly']): 5274 mode = 'noshower' 5275 5276 passing_cmd = [] 5277 for key,value in switch.keys(): 5278 passing_cmd.append('%s=%s' % (key,value)) 5279 5280 if 'do_reweight' in options and options['do_reweight']: 5281 passing_cmd.append('reweight=ON') 5282 if 'do_madspin' in options and options['do_madspin']: 5283 passing_cmd.append('madspin=ON') 5284 5285 force = self.force 5286 if mode == 'onlyshower': 5287 passing_cmd.append('onlyshower') 5288 force = True 5289 elif mode: 5290 passing_cmd.append(mode) 5291 5292 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher, 5293 mode=mode, force=force, 5294 first_cmd=passing_cmd, 5295 return_instance=True) 5296 5297 if 'mode' in switch: 5298 mode = switch['mode'] 5299 5300 #assign the mode depending of the switch 5301 if not mode or mode == 'auto': 5302 if switch['order'] == 'LO': 5303 if switch['runshower']: 5304 mode = 'aMC@LO' 5305 elif switch['fixed_order'] == 'ON': 5306 mode = 'LO' 5307 else: 5308 mode = 'noshowerLO' 5309 elif switch['order'] == 'NLO': 5310 if switch['runshower']: 5311 mode = 'aMC@NLO' 5312 elif switch['fixed_order'] == 'ON': 5313 mode = 'NLO' 5314 else: 5315 mode = 'noshower' 5316 logger.info('will run in mode: %s' % mode) 5317 5318 if mode == 'noshower': 5319 if switch['shower'] == 'OFF': 5320 logger.warning("""You have chosen not to run a parton shower. 5321 NLO events without showering are NOT physical. 5322 Please, shower the LesHouches events before using them for physics analyses. 5323 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""") 5324 else: 5325 logger.info("""Your Parton-shower choice is not available for running. 5326 The events will be generated for the associated Parton-Shower. 5327 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD') 5328 5329 5330 # specify the cards which are needed for this run. 5331 cards = ['param_card.dat', 'run_card.dat'] 5332 ignore = [] 5333 if mode in ['LO', 'NLO']: 5334 options['parton'] = True 5335 ignore = ['shower_card.dat', 'madspin_card.dat'] 5336 cards.append('FO_analyse_card.dat') 5337 else: 5338 if switch['madspin'] != 'OFF': 5339 cards.append('madspin_card.dat') 5340 if switch['reweight'] != 'OFF': 5341 cards.append('reweight_card.dat') 5342 if switch['madanalysis'] in ['HADRON', 'ON']: 5343 cards.append('madanalysis5_hadron_card.dat') 5344 if 'aMC@' in mode: 5345 cards.append('shower_card.dat') 5346 if mode == 'onlyshower': 5347 cards = ['shower_card.dat'] 5348 if options['reweightonly']: 5349 cards = ['run_card.dat'] 5350 5351 self.keep_cards(cards, ignore) 5352 5353 if mode =='onlyshower': 5354 cards = ['shower_card.dat'] 5355 5356 5357 # automatically switch to keep_wgt option 5358 first_cmd = cmd_switch.get_cardcmd() 5359 5360 if not options['force'] and not self.force: 5361 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5362 5363 self.banner = banner_mod.Banner() 5364 5365 # store the cards in the banner 5366 for card in cards: 5367 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5368 # and the run settings 5369 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5370 self.banner.add_text('run_settings', run_settings) 5371 5372 if not mode =='onlyshower': 5373 self.run_card = self.banner.charge_card('run_card') 5374 self.run_tag = self.run_card['run_tag'] 5375 #this is if the user did not provide a name for the current run 5376 if not hasattr(self, 'run_name') or not self.run_name: 5377 self.run_name = self.find_available_run_name(self.me_dir) 5378 #add a tag in the run_name for distinguish run_type 5379 if self.run_name.startswith('run_'): 5380 if mode in ['LO','aMC@LO','noshowerLO']: 5381 self.run_name += '_LO' 5382 self.set_run_name(self.run_name, self.run_tag, 'parton') 5383 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5384 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5385 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5386 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5387 samples of various multiplicities without double counting, you 5388 have to remove some events after showering 'by hand'. Please 5389 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5390 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5391 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5392 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5393 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5394 "Type \'n\' to stop or \'y\' to continue" 5395 answers = ['n','y'] 5396 answer = self.ask(question, 'n', answers, alias=alias) 5397 if answer == 'n': 5398 error = '''Stop opertation''' 5399 self.ask_run_configuration(mode, options) 5400 # raise aMCatNLOError(error) 5401 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5402 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5403 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5404 if 'aMC@' in mode or mode == 'onlyshower': 5405 self.shower_card = self.banner.charge_card('shower_card') 5406 5407 elif mode in ['LO', 'NLO']: 5408 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5409 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5410 5411 return mode
5412
5413 5414 #=============================================================================== 5415 # aMCatNLOCmd 5416 #=============================================================================== 5417 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5418 """The command line processor of MadGraph"""
5419 5420 _compile_usage = "compile [MODE] [options]\n" + \ 5421 "-- compiles aMC@NLO \n" + \ 5422 " MODE can be either FO, for fixed-order computations, \n" + \ 5423 " or MC for matching with parton-shower monte-carlos. \n" + \ 5424 " (if omitted, it is set to MC)\n" 5425 _compile_parser = misc.OptionParser(usage=_compile_usage) 5426 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5427 help="Use the card present in the directory for the launch, without editing them") 5428 5429 _launch_usage = "launch [MODE] [options]\n" + \ 5430 "-- execute aMC@NLO \n" + \ 5431 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5432 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5433 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5434 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5435 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5436 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5437 " in the run_card.dat\n" 5438 5439 _launch_parser = misc.OptionParser(usage=_launch_usage) 5440 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5441 help="Use the card present in the directory for the launch, without editing them") 5442 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5443 help="Submit the jobs on the cluster") 5444 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5445 help="Submit the jobs on multicore mode") 5446 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5447 help="Skip compilation. Ignored if no executable is found") 5448 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5449 help="Skip integration and event generation, just run reweight on the" + \ 5450 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5451 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5452 help="Stop the run after the parton level file generation (you need " + \ 5453 "to shower the file in order to get physical results)") 5454 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5455 help="Skip grid set up, just generate events starting from " + \ 5456 "the last available results") 5457 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5458 help="Provide a name to the run") 5459 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5460 help="For use with APPLgrid only: start from existing grids") 5461 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5462 help="Run the reweight module (reweighting by different model parameters)") 5463 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5464 help="Run the madspin package") 5465 5466 5467 5468 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5469 "-- execute aMC@NLO \n" + \ 5470 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5471 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5472 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5473 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5474 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5475 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5476 " in the run_card.dat\n" 5477 5478 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5479 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5480 help="Use the card present in the directory for the generate_events, without editing them") 5481 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5482 help="Submit the jobs on the cluster") 5483 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5484 help="Submit the jobs on multicore mode") 5485 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5486 help="Skip compilation. Ignored if no executable is found") 5487 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5488 help="Skip integration and event generation, just run reweight on the" + \ 5489 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5490 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5491 help="Stop the run after the parton level file generation (you need " + \ 5492 "to shower the file in order to get physical results)") 5493 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5494 help="Skip grid set up, just generate events starting from " + \ 5495 "the last available results") 5496 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5497 help="Provide a name to the run") 5498 5499 5500 5501 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5502 "-- calculate cross section up to ORDER.\n" + \ 5503 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5504 5505 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5506 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5507 help="Use the card present in the directory for the launch, without editing them") 5508 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5509 help="Submit the jobs on the cluster") 5510 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5511 help="Submit the jobs on multicore mode") 5512 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5513 help="Skip compilation. Ignored if no executable is found") 5514 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5515 help="Provide a name to the run") 5516 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5517 help="For use with APPLgrid only: start from existing grids") 5518 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5519 help="Skip grid set up, just generate events starting from " + \ 5520 "the last available results") 5521 5522 _shower_usage = 'shower run_name [options]\n' + \ 5523 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5524 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5525 ' are directly read from the header of the event file\n' 5526 _shower_parser = misc.OptionParser(usage=_shower_usage) 5527 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5528 help="Use the shower_card present in the directory for the launch, without editing") 5529 5530 if '__main__' == __name__: 5531 # Launch the interface without any check if one code is already running. 5532 # This can ONLY run a single command !! 5533 import sys 5534 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5535 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5536 'Please upgrate your version of python.') 5537 5538 import os 5539 import optparse 5540 # Get the directory of the script real path (bin) 5541 # and add it to the current PYTHONPATH 5542 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5543 sys.path.insert(0, root_path)
5544 5545 - class MyOptParser(optparse.OptionParser):
5546 - class InvalidOption(Exception): pass
5547 - def error(self, msg=''):
5548 raise MyOptParser.InvalidOption(msg)
5549 # Write out nice usage message if called with -h or --help 5550 usage = "usage: %prog [options] [FILE] " 5551 parser = MyOptParser(usage=usage) 5552 parser.add_option("-l", "--logging", default='INFO', 5553 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5554 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5555 help='force toce to be in secure mode') 5556 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5557 help='force to launch debug mode') 5558 parser_error = '' 5559 done = False 5560 5561 for i in range(len(sys.argv)-1): 5562 try: 5563 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5564 done = True 5565 except MyOptParser.InvalidOption, error: 5566 pass 5567 else: 5568 args += sys.argv[len(sys.argv)-i:] 5569 if not done: 5570 # raise correct error: 5571 try: 5572 (options, args) = parser.parse_args() 5573 except MyOptParser.InvalidOption, error: 5574 print error 5575 sys.exit(2) 5576 5577 if len(args) == 0: 5578 args = '' 5579 5580 import subprocess 5581 import logging 5582 import logging.config 5583 # Set logging level according to the logging level given by options 5584 #logging.basicConfig(level=vars(logging)[options.logging]) 5585 import internal.coloring_logging 5586 try: 5587 if __debug__ and options.logging == 'INFO': 5588 options.logging = 'DEBUG' 5589 if options.logging.isdigit(): 5590 level = int(options.logging) 5591 else: 5592 level = eval('logging.' + options.logging) 5593 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5594 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5595 logging.root.setLevel(level) 5596 logging.getLogger('madgraph').setLevel(level) 5597 except: 5598 raise 5599 pass 5600 5601 # Call the cmd interface main loop 5602 try: 5603 if args: 5604 # a single command is provided 5605 if '--web' in args: 5606 i = args.index('--web') 5607 args.pop(i) 5608 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5609 else: 5610 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5611 5612 if not hasattr(cmd_line, 'do_%s' % args[0]): 5613 if parser_error: 5614 print parser_error 5615 print 'and %s can not be interpreted as a valid command.' % args[0] 5616 else: 5617 print 'ERROR: %s not a valid command. Please retry' % args[0] 5618 else: 5619 cmd_line.use_rawinput = False 5620 cmd_line.run_cmd(' '.join(args)) 5621 cmd_line.run_cmd('quit') 5622 5623 except KeyboardInterrupt: 5624 print 'quit on KeyboardInterrupt' 5625 pass 5626