Package madgraph :: Package interface :: Module amcatnlo_run_interface
[hide private]
[frames] | no frames]

Source Code for Module madgraph.interface.amcatnlo_run_interface

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2011 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """A user friendly command line interface to access MadGraph5_aMC@NLO features. 
  16     Uses the cmd package for command interpretation and tab completion. 
  17  """ 
  18  from __future__ import division 
  19   
  20  import atexit 
  21  import glob 
  22  import logging 
  23  import math 
  24  import optparse 
  25  import os 
  26  import pydoc 
  27  import random 
  28  import re 
  29  import shutil 
  30  import subprocess 
  31  import sys 
  32  import traceback 
  33  import time 
  34  import signal 
  35  import tarfile 
  36  import copy 
  37  import datetime 
  38  import tarfile 
  39  import traceback 
  40  import StringIO 
  41  try: 
  42      import cpickle as pickle 
  43  except: 
  44      import pickle 
  45   
  46  try: 
  47      import readline 
  48      GNU_SPLITTING = ('GNU' in readline.__doc__) 
  49  except: 
  50      GNU_SPLITTING = True 
  51   
  52  root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0] 
  53  root_path = os.path.split(root_path)[0] 
  54  sys.path.insert(0, os.path.join(root_path,'bin')) 
  55   
  56  # usefull shortcut 
  57  pjoin = os.path.join 
  58  # Special logger for the Cmd Interface 
  59  logger = logging.getLogger('madgraph.stdout') # -> stdout 
  60  logger_stderr = logging.getLogger('madgraph.stderr') # ->stderr 
  61    
  62  try: 
  63      import madgraph 
  64  except ImportError:  
  65      aMCatNLO = True  
  66      import internal.extended_cmd as cmd 
  67      import internal.common_run_interface as common_run 
  68      import internal.banner as banner_mod 
  69      import internal.misc as misc     
  70      from internal import InvalidCmd, MadGraph5Error 
  71      import internal.files as files 
  72      import internal.cluster as cluster 
  73      import internal.save_load_object as save_load_object 
  74      import internal.gen_crossxhtml as gen_crossxhtml 
  75      import internal.sum_html as sum_html 
  76      import internal.shower_card as shower_card 
  77      import internal.FO_analyse_card as analyse_card  
  78      import internal.lhe_parser as lhe_parser 
  79  else: 
  80      # import from madgraph directory 
  81      aMCatNLO = False 
  82      import madgraph.interface.extended_cmd as cmd 
  83      import madgraph.interface.common_run_interface as common_run 
  84      import madgraph.iolibs.files as files 
  85      import madgraph.iolibs.save_load_object as save_load_object 
  86      import madgraph.madevent.gen_crossxhtml as gen_crossxhtml 
  87      import madgraph.madevent.sum_html as sum_html 
  88      import madgraph.various.banner as banner_mod 
  89      import madgraph.various.cluster as cluster 
  90      import madgraph.various.misc as misc 
  91      import madgraph.various.shower_card as shower_card 
  92      import madgraph.various.FO_analyse_card as analyse_card 
  93      import madgraph.various.lhe_parser as lhe_parser 
  94      from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR 
  95   
96 -class aMCatNLOError(Exception):
97 pass
98 99
100 -def compile_dir(*arguments):
101 """compile the direcory p_dir 102 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode) 103 this function needs not to be a class method in order to do 104 the compilation on multicore""" 105 106 if len(arguments) == 1: 107 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0] 108 elif len(arguments)==7: 109 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments 110 else: 111 raise aMCatNLOError, 'not correct number of argument' 112 logger.info(' Compiling %s...' % p_dir) 113 114 this_dir = pjoin(me_dir, 'SubProcesses', p_dir) 115 116 try: 117 #compile everything 118 # compile and run tests 119 for test in tests: 120 # skip check_poles for LOonly dirs 121 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')): 122 continue 123 misc.compile([test], cwd = this_dir, job_specs = False) 124 input = pjoin(me_dir, '%s_input.txt' % test) 125 #this can be improved/better written to handle the output 126 misc.call(['./%s' % (test)], cwd=this_dir, 127 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'), 128 close_fds=True) 129 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) : 130 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz', 131 dereference=True) 132 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources') 133 tf.close() 134 135 if not options['reweightonly']: 136 misc.compile(['gensym'], cwd=this_dir, job_specs = False) 137 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode) 138 misc.call(['./gensym'],cwd= this_dir, 139 stdin=open(pjoin(this_dir, 'gensym_input.txt')), 140 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'), 141 close_fds=True) 142 #compile madevent_mintMC/mintFO 143 misc.compile([exe], cwd=this_dir, job_specs = False) 144 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 145 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False) 146 147 logger.info(' %s done.' % p_dir) 148 return 0 149 except MadGraph5Error, msg: 150 return msg
151 152
153 -def check_compiler(options, block=False):
154 """check that the current fortran compiler is gfortran 4.6 or later. 155 If block, stops the execution, otherwise just print a warning""" 156 157 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \ 158 'gfortran 4.6 or later installed.\n%s has been detected\n'+\ 159 'Note that You can still run all MadEvent run without any problem!' 160 #first check that gfortran is installed 161 if options['fortran_compiler']: 162 compiler = options['fortran_compiler'] 163 elif misc.which('gfortran'): 164 compiler = 'gfortran' 165 else: 166 compiler = '' 167 168 if 'gfortran' not in compiler: 169 if block: 170 raise aMCatNLOError(msg % compiler) 171 else: 172 logger.warning(msg % compiler) 173 else: 174 curr_version = misc.get_gfortran_version(compiler) 175 if not ''.join(curr_version.split('.')) >= '46': 176 if block: 177 raise aMCatNLOError(msg % (compiler + ' ' + curr_version)) 178 else: 179 logger.warning(msg % (compiler + ' ' + curr_version))
180 181 182 183 #=============================================================================== 184 # CmdExtended 185 #===============================================================================
186 -class CmdExtended(common_run.CommonRunCmd):
187 """Particularisation of the cmd command for aMCatNLO""" 188 189 #suggested list of command 190 next_possibility = { 191 'start': [], 192 } 193 194 debug_output = 'ME5_debug' 195 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n' 196 error_debug += 'More information is found in \'%(debug)s\'.\n' 197 error_debug += 'Please attach this file to your report.' 198 199 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n' 200 201 202 keyboard_stop_msg = """stopping all operation 203 in order to quit MadGraph5_aMC@NLO please enter exit""" 204 205 # Define the Error 206 InvalidCmd = InvalidCmd 207 ConfigurationError = aMCatNLOError 208
209 - def __init__(self, me_dir, options, *arg, **opt):
210 """Init history and line continuation""" 211 212 # Tag allowing/forbiding question 213 self.force = False 214 215 # If possible, build an info line with current version number 216 # and date, from the VERSION text file 217 info = misc.get_pkg_info() 218 info_line = "" 219 if info and info.has_key('version') and info.has_key('date'): 220 len_version = len(info['version']) 221 len_date = len(info['date']) 222 if len_version + len_date < 30: 223 info_line = "#* VERSION %s %s %s *\n" % \ 224 (info['version'], 225 (30 - len_version - len_date) * ' ', 226 info['date']) 227 else: 228 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip() 229 info_line = "#* VERSION %s %s *\n" % \ 230 (version, (24 - len(version)) * ' ') 231 232 # Create a header for the history file. 233 # Remember to fill in time at writeout time! 234 self.history_header = \ 235 '#************************************************************\n' + \ 236 '#* MadGraph5_aMC@NLO *\n' + \ 237 '#* *\n' + \ 238 "#* * * *\n" + \ 239 "#* * * * * *\n" + \ 240 "#* * * * * 5 * * * * *\n" + \ 241 "#* * * * * *\n" + \ 242 "#* * * *\n" + \ 243 "#* *\n" + \ 244 "#* *\n" + \ 245 info_line + \ 246 "#* *\n" + \ 247 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 248 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \ 249 "#* and *\n" + \ 250 "#* http://amcatnlo.cern.ch *\n" + \ 251 '#* *\n' + \ 252 '#************************************************************\n' + \ 253 '#* *\n' + \ 254 '#* Command File for aMCatNLO *\n' + \ 255 '#* *\n' + \ 256 '#* run as ./bin/aMCatNLO.py filename *\n' + \ 257 '#* *\n' + \ 258 '#************************************************************\n' 259 260 if info_line: 261 info_line = info_line[1:] 262 263 logger.info(\ 264 "************************************************************\n" + \ 265 "* *\n" + \ 266 "* W E L C O M E to M A D G R A P H 5 *\n" + \ 267 "* a M C @ N L O *\n" + \ 268 "* *\n" + \ 269 "* * * *\n" + \ 270 "* * * * * *\n" + \ 271 "* * * * * 5 * * * * *\n" + \ 272 "* * * * * *\n" + \ 273 "* * * *\n" + \ 274 "* *\n" + \ 275 info_line + \ 276 "* *\n" + \ 277 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \ 278 "* http://amcatnlo.cern.ch *\n" + \ 279 "* *\n" + \ 280 "* Type 'help' for in-line help. *\n" + \ 281 "* *\n" + \ 282 "************************************************************") 283 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
284 285
286 - def get_history_header(self):
287 """return the history header""" 288 return self.history_header % misc.get_time_info()
289
290 - def stop_on_keyboard_stop(self):
291 """action to perform to close nicely on a keyboard interupt""" 292 try: 293 if hasattr(self, 'cluster'): 294 logger.info('rm jobs on queue') 295 self.cluster.remove() 296 if hasattr(self, 'results'): 297 self.update_status('Stop by the user', level=None, makehtml=True, error=True) 298 self.add_error_log_in_html(KeyboardInterrupt) 299 except: 300 pass
301
302 - def postcmd(self, stop, line):
303 """ Update the status of the run for finishing interactive command """ 304 305 # relaxing the tag forbidding question 306 self.force = False 307 308 if not self.use_rawinput: 309 return stop 310 311 312 arg = line.split() 313 if len(arg) == 0: 314 return stop 315 elif str(arg[0]) in ['exit','quit','EOF']: 316 return stop 317 318 try: 319 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0], 320 level=None, error=True) 321 except Exception: 322 misc.sprint('self.update_status fails', log=logger) 323 pass
324
325 - def nice_user_error(self, error, line):
326 """If a ME run is currently running add a link in the html output""" 327 328 self.add_error_log_in_html() 329 cmd.Cmd.nice_user_error(self, error, line)
330
331 - def nice_config_error(self, error, line):
332 """If a ME run is currently running add a link in the html output""" 333 334 self.add_error_log_in_html() 335 cmd.Cmd.nice_config_error(self, error, line)
336
337 - def nice_error_handling(self, error, line):
338 """If a ME run is currently running add a link in the html output""" 339 340 self.add_error_log_in_html() 341 cmd.Cmd.nice_error_handling(self, error, line)
342 343 344 345 #=============================================================================== 346 # HelpToCmd 347 #===============================================================================
348 -class HelpToCmd(object):
349 """ The Series of help routine for the aMCatNLOCmd""" 350
351 - def help_launch(self):
352 """help for launch command""" 353 _launch_parser.print_help()
354
355 - def help_banner_run(self):
356 logger.info("syntax: banner_run Path|RUN [--run_options]") 357 logger.info("-- Reproduce a run following a given banner") 358 logger.info(" One of the following argument is require:") 359 logger.info(" Path should be the path of a valid banner.") 360 logger.info(" RUN should be the name of a run of the current directory") 361 self.run_options_help([('-f','answer all question by default'), 362 ('--name=X', 'Define the name associated with the new run')])
363 364
365 - def help_compile(self):
366 """help for compile command""" 367 _compile_parser.print_help()
368
369 - def help_generate_events(self):
370 """help for generate_events commandi 371 just call help_launch""" 372 _generate_events_parser.print_help()
373 374
375 - def help_calculate_xsect(self):
376 """help for generate_events command""" 377 _calculate_xsect_parser.print_help()
378
379 - def help_shower(self):
380 """help for shower command""" 381 _shower_parser.print_help()
382 383
384 - def help_open(self):
385 logger.info("syntax: open FILE ") 386 logger.info("-- open a file with the appropriate editor.") 387 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat') 388 logger.info(' the path to the last created/used directory is used')
389
390 - def run_options_help(self, data):
391 if data: 392 logger.info('-- local options:') 393 for name, info in data: 394 logger.info(' %s : %s' % (name, info)) 395 396 logger.info("-- session options:") 397 logger.info(" Note that those options will be kept for the current session") 398 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type']) 399 logger.info(" --multicore : Run in multi-core configuration") 400 logger.info(" --nb_core=X : limit the number of core to use to X.")
401 402 403 404 405 #=============================================================================== 406 # CheckValidForCmd 407 #===============================================================================
408 -class CheckValidForCmd(object):
409 """ The Series of check routine for the aMCatNLOCmd""" 410
411 - def check_shower(self, args, options):
412 """Check the validity of the line. args[0] is the run_directory""" 413 414 if options['force']: 415 self.force = True 416 417 if len(args) == 0: 418 self.help_shower() 419 raise self.InvalidCmd, 'Invalid syntax, please specify the run name' 420 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])): 421 raise self.InvalidCmd, 'Directory %s does not exists' % \ 422 pjoin(os.getcwd(), 'Events', args[0]) 423 424 self.set_run_name(args[0], level= 'shower') 425 args[0] = pjoin(self.me_dir, 'Events', args[0])
426
427 - def check_plot(self, args):
428 """Check the argument for the plot command 429 plot run_name modes""" 430 431 432 madir = self.options['madanalysis_path'] 433 td = self.options['td_path'] 434 435 if not madir or not td: 436 logger.info('Retry to read configuration file to find madanalysis/td') 437 self.set_configuration() 438 439 madir = self.options['madanalysis_path'] 440 td = self.options['td_path'] 441 442 if not madir: 443 error_msg = 'No Madanalysis path correctly set.' 444 error_msg += 'Please use the set command to define the path and retry.' 445 error_msg += 'You can also define it in the configuration file.' 446 raise self.InvalidCmd(error_msg) 447 if not td: 448 error_msg = 'No path to td directory correctly set.' 449 error_msg += 'Please use the set command to define the path and retry.' 450 error_msg += 'You can also define it in the configuration file.' 451 raise self.InvalidCmd(error_msg) 452 453 if len(args) == 0: 454 if not hasattr(self, 'run_name') or not self.run_name: 455 self.help_plot() 456 raise self.InvalidCmd('No run name currently define. Please add this information.') 457 args.append('all') 458 return 459 460 461 if args[0] not in self._plot_mode: 462 self.set_run_name(args[0], level='plot') 463 del args[0] 464 if len(args) == 0: 465 args.append('all') 466 elif not self.run_name: 467 self.help_plot() 468 raise self.InvalidCmd('No run name currently define. Please add this information.') 469 470 for arg in args: 471 if arg not in self._plot_mode and arg != self.run_name: 472 self.help_plot() 473 raise self.InvalidCmd('unknown options %s' % arg)
474
475 - def check_pgs(self, arg):
476 """Check the argument for pythia command 477 syntax: pgs [NAME] 478 Note that other option are already remove at this point 479 """ 480 481 # If not pythia-pgs path 482 if not self.options['pythia-pgs_path']: 483 logger.info('Retry to read configuration file to find pythia-pgs path') 484 self.set_configuration() 485 486 if not self.options['pythia-pgs_path'] or not \ 487 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')): 488 error_msg = 'No pythia-pgs path correctly set.' 489 error_msg += 'Please use the set command to define the path and retry.' 490 error_msg += 'You can also define it in the configuration file.' 491 raise self.InvalidCmd(error_msg) 492 493 tag = [a for a in arg if a.startswith('--tag=')] 494 if tag: 495 arg.remove(tag[0]) 496 tag = tag[0][6:] 497 498 499 if len(arg) == 0 and not self.run_name: 500 if self.results.lastrun: 501 arg.insert(0, self.results.lastrun) 502 else: 503 raise self.InvalidCmd('No run name currently define. Please add this information.') 504 505 if len(arg) == 1 and self.run_name == arg[0]: 506 arg.pop(0) 507 508 if not len(arg) and \ 509 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 510 self.help_pgs() 511 raise self.InvalidCmd('''No file file pythia_events.hep currently available 512 Please specify a valid run_name''') 513 514 lock = None 515 if len(arg) == 1: 516 prev_tag = self.set_run_name(arg[0], tag, 'pgs') 517 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 518 519 if not filenames: 520 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag)) 521 else: 522 input_file = filenames[0] 523 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 524 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 525 argument=['-c', input_file], 526 close_fds=True) 527 else: 528 if tag: 529 self.run_card['run_tag'] = tag 530 self.set_run_name(self.run_name, tag, 'pgs') 531 532 return lock
533 534
535 - def check_delphes(self, arg):
536 """Check the argument for pythia command 537 syntax: delphes [NAME] 538 Note that other option are already remove at this point 539 """ 540 541 # If not pythia-pgs path 542 if not self.options['delphes_path']: 543 logger.info('Retry to read configuration file to find delphes path') 544 self.set_configuration() 545 546 if not self.options['delphes_path']: 547 error_msg = 'No delphes path correctly set.' 548 error_msg += 'Please use the set command to define the path and retry.' 549 error_msg += 'You can also define it in the configuration file.' 550 raise self.InvalidCmd(error_msg) 551 552 tag = [a for a in arg if a.startswith('--tag=')] 553 if tag: 554 arg.remove(tag[0]) 555 tag = tag[0][6:] 556 557 558 if len(arg) == 0 and not self.run_name: 559 if self.results.lastrun: 560 arg.insert(0, self.results.lastrun) 561 else: 562 raise self.InvalidCmd('No run name currently define. Please add this information.') 563 564 if len(arg) == 1 and self.run_name == arg[0]: 565 arg.pop(0) 566 567 if not len(arg) and \ 568 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')): 569 self.help_pgs() 570 raise self.InvalidCmd('''No file file pythia_events.hep currently available 571 Please specify a valid run_name''') 572 573 if len(arg) == 1: 574 prev_tag = self.set_run_name(arg[0], tag, 'delphes') 575 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events')) 576 577 578 if not filenames: 579 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\ 580 % (self.run_name, prev_tag, 581 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag))) 582 else: 583 input_file = filenames[0] 584 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep') 585 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'), 586 argument=['-c', input_file], 587 close_fds=True) 588 else: 589 if tag: 590 self.run_card['run_tag'] = tag 591 self.set_run_name(self.run_name, tag, 'delphes')
592
593 - def check_calculate_xsect(self, args, options):
594 """check the validity of the line. args is ORDER, 595 ORDER being LO or NLO. If no mode is passed, NLO is used""" 596 # modify args in order to be DIR 597 # mode being either standalone or madevent 598 599 if options['force']: 600 self.force = True 601 602 if not args: 603 args.append('NLO') 604 return 605 606 if len(args) > 1: 607 self.help_calculate_xsect() 608 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 609 610 elif len(args) == 1: 611 if not args[0] in ['NLO', 'LO']: 612 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 613 mode = args[0] 614 615 # check for incompatible options/modes 616 if options['multicore'] and options['cluster']: 617 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 618 ' are not compatible. Please choose one.'
619 620
621 - def check_generate_events(self, args, options):
622 """check the validity of the line. args is ORDER, 623 ORDER being LO or NLO. If no mode is passed, NLO is used""" 624 # modify args in order to be DIR 625 # mode being either standalone or madevent 626 627 if not args: 628 args.append('NLO') 629 return 630 631 if len(args) > 1: 632 self.help_generate_events() 633 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 634 635 elif len(args) == 1: 636 if not args[0] in ['NLO', 'LO']: 637 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1] 638 mode = args[0] 639 640 # check for incompatible options/modes 641 if options['multicore'] and options['cluster']: 642 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 643 ' are not compatible. Please choose one.'
644
645 - def check_banner_run(self, args):
646 """check the validity of line""" 647 648 if len(args) == 0: 649 self.help_banner_run() 650 raise self.InvalidCmd('banner_run requires at least one argument.') 651 652 tag = [a[6:] for a in args if a.startswith('--tag=')] 653 654 655 if os.path.exists(args[0]): 656 type ='banner' 657 format = self.detect_card_type(args[0]) 658 if format != 'banner': 659 raise self.InvalidCmd('The file is not a valid banner.') 660 elif tag: 661 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 662 (args[0], tag)) 663 if not os.path.exists(args[0]): 664 raise self.InvalidCmd('No banner associates to this name and tag.') 665 else: 666 name = args[0] 667 type = 'run' 668 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0])) 669 if not banners: 670 raise self.InvalidCmd('No banner associates to this name.') 671 elif len(banners) == 1: 672 args[0] = banners[0] 673 else: 674 #list the tag and propose those to the user 675 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners] 676 tag = self.ask('which tag do you want to use?', tags[0], tags) 677 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \ 678 (args[0], tag)) 679 680 run_name = [arg[7:] for arg in args if arg.startswith('--name=')] 681 if run_name: 682 try: 683 self.exec_cmd('remove %s all banner -f' % run_name) 684 except Exception: 685 pass 686 self.set_run_name(args[0], tag=None, level='parton', reload_card=True) 687 elif type == 'banner': 688 self.set_run_name(self.find_available_run_name(self.me_dir)) 689 elif type == 'run': 690 if not self.results[name].is_empty(): 691 run_name = self.find_available_run_name(self.me_dir) 692 logger.info('Run %s is not empty so will use run_name: %s' % \ 693 (name, run_name)) 694 self.set_run_name(run_name) 695 else: 696 try: 697 self.exec_cmd('remove %s all banner -f' % run_name) 698 except Exception: 699 pass 700 self.set_run_name(name)
701 702 703
704 - def check_launch(self, args, options):
705 """check the validity of the line. args is MODE 706 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used""" 707 # modify args in order to be DIR 708 # mode being either standalone or madevent 709 710 if options['force']: 711 self.force = True 712 713 714 if not args: 715 args.append('auto') 716 return 717 718 if len(args) > 1: 719 self.help_launch() 720 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 721 722 elif len(args) == 1: 723 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']: 724 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0] 725 mode = args[0] 726 727 # check for incompatible options/modes 728 if options['multicore'] and options['cluster']: 729 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \ 730 ' are not compatible. Please choose one.' 731 if mode == 'NLO' and options['reweightonly']: 732 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
733 734
735 - def check_compile(self, args, options):
736 """check the validity of the line. args is MODE 737 MODE being FO or MC. If no mode is passed, MC is used""" 738 # modify args in order to be DIR 739 # mode being either standalone or madevent 740 741 if options['force']: 742 self.force = True 743 744 if not args: 745 args.append('MC') 746 return 747 748 if len(args) > 1: 749 self.help_compile() 750 raise self.InvalidCmd, 'Invalid Syntax: Too many argument' 751 752 elif len(args) == 1: 753 if not args[0] in ['MC', 'FO']: 754 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0] 755 mode = args[0]
756 757 # check for incompatible options/modes 758 759 760 #=============================================================================== 761 # CompleteForCmd 762 #===============================================================================
763 -class CompleteForCmd(CheckValidForCmd):
764 """ The Series of help routine for the MadGraphCmd""" 765
766 - def complete_launch(self, text, line, begidx, endidx):
767 """auto-completion for launch command""" 768 769 args = self.split_arg(line[0:begidx]) 770 if len(args) == 1: 771 #return mode 772 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line) 773 elif len(args) == 2 and line[begidx-1] == '@': 774 return self.list_completion(text,['LO','NLO'],line) 775 else: 776 opts = [] 777 for opt in _launch_parser.option_list: 778 opts += opt._long_opts + opt._short_opts 779 return self.list_completion(text, opts, line)
780
781 - def complete_banner_run(self, text, line, begidx, endidx, formatting=True):
782 "Complete the banner run command" 783 try: 784 785 786 args = self.split_arg(line[0:begidx], error=False) 787 788 if args[-1].endswith(os.path.sep): 789 return self.path_completion(text, 790 os.path.join('.',*[a for a in args \ 791 if a.endswith(os.path.sep)])) 792 793 794 if len(args) > 1: 795 # only options are possible 796 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1])) 797 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags] 798 799 if args[-1] != '--tag=': 800 tags = ['--tag=%s' % t for t in tags] 801 else: 802 return self.list_completion(text, tags) 803 return self.list_completion(text, tags +['--name=','-f'], line) 804 805 # First argument 806 possibilites = {} 807 808 comp = self.path_completion(text, os.path.join('.',*[a for a in args \ 809 if a.endswith(os.path.sep)])) 810 if os.path.sep in line: 811 return comp 812 else: 813 possibilites['Path from ./'] = comp 814 815 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events')) 816 run_list = [n.rsplit('/',2)[1] for n in run_list] 817 possibilites['RUN Name'] = self.list_completion(text, run_list) 818 819 return self.deal_multiple_categories(possibilites, formatting) 820 821 822 except Exception, error: 823 print error
824 825
826 - def complete_compile(self, text, line, begidx, endidx):
827 """auto-completion for launch command""" 828 829 args = self.split_arg(line[0:begidx]) 830 if len(args) == 1: 831 #return mode 832 return self.list_completion(text,['FO','MC'],line) 833 else: 834 opts = [] 835 for opt in _compile_parser.option_list: 836 opts += opt._long_opts + opt._short_opts 837 return self.list_completion(text, opts, line)
838
839 - def complete_calculate_xsect(self, text, line, begidx, endidx):
840 """auto-completion for launch command""" 841 842 args = self.split_arg(line[0:begidx]) 843 if len(args) == 1: 844 #return mode 845 return self.list_completion(text,['LO','NLO'],line) 846 else: 847 opts = [] 848 for opt in _calculate_xsect_parser.option_list: 849 opts += opt._long_opts + opt._short_opts 850 return self.list_completion(text, opts, line)
851
852 - def complete_generate_events(self, text, line, begidx, endidx):
853 """auto-completion for generate_events command 854 call the compeltion for launch""" 855 self.complete_launch(text, line, begidx, endidx)
856 857
858 - def complete_shower(self, text, line, begidx, endidx):
859 args = self.split_arg(line[0:begidx]) 860 if len(args) == 1: 861 #return valid run_name 862 data = misc.glob(pjoin('*','events.lhe.gz', pjoin(self.me_dir, 'Events'))) 863 data = [n.rsplit('/',2)[1] for n in data] 864 tmp1 = self.list_completion(text, data) 865 if not self.run_name: 866 return tmp1
867
868 - def complete_plot(self, text, line, begidx, endidx):
869 """ Complete the plot command """ 870 871 args = self.split_arg(line[0:begidx], error=False) 872 873 if len(args) == 1: 874 #return valid run_name 875 data = misc.glob(pjoin('*','events.lhe*', pjoin(self.me_dir, 'Events'))) 876 data = [n.rsplit('/',2)[1] for n in data] 877 tmp1 = self.list_completion(text, data) 878 if not self.run_name: 879 return tmp1 880 881 if len(args) > 1: 882 return self.list_completion(text, self._plot_mode)
883
884 - def complete_pgs(self,text, line, begidx, endidx):
885 "Complete the pgs command" 886 args = self.split_arg(line[0:begidx], error=False) 887 if len(args) == 1: 888 #return valid run_name 889 data = misc.glob(pjoin('*', 'events_*.hep.gz'), 890 pjoin(self.me_dir, 'Events')) 891 data = [n.rsplit('/',2)[1] for n in data] 892 tmp1 = self.list_completion(text, data) 893 if not self.run_name: 894 return tmp1 895 else: 896 tmp2 = self.list_completion(text, self._run_options + ['-f', 897 '--tag=' ,'--no_default'], line) 898 return tmp1 + tmp2 899 else: 900 return self.list_completion(text, self._run_options + ['-f', 901 '--tag=','--no_default'], line)
902 903 complete_delphes = complete_pgs
904
905 -class aMCatNLOAlreadyRunning(InvalidCmd):
906 pass
907 908 #=============================================================================== 909 # aMCatNLOCmd 910 #===============================================================================
911 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
912 """The command line processor of MadGraph""" 913 914 # Truth values 915 true = ['T','.true.',True,'true'] 916 # Options and formats available 917 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m'] 918 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes'] 919 _calculate_decay_options = ['-f', '--accuracy=0.'] 920 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout'] 921 _plot_mode = ['all', 'parton','shower','pgs','delphes'] 922 _clean_mode = _plot_mode + ['channel', 'banner'] 923 _display_opts = ['run_name', 'options', 'variable'] 924 # survey options, dict from name to type, default value, and help text 925 # Variables to store object information 926 web = False 927 cluster_mode = 0 928 queue = 'madgraph' 929 nb_core = None 930 make_opts_var = {} 931 932 next_possibility = { 933 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]', 934 'help generate_events'], 935 'generate_events': ['generate_events [OPTIONS]', 'shower'], 936 'launch': ['launch [OPTIONS]', 'shower'], 937 'shower' : ['generate_events [OPTIONS]'] 938 } 939 940 941 ############################################################################
942 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
943 """ add information to the cmd """ 944 945 self.start_time = 0 946 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin) 947 #common_run.CommonRunCmd.__init__(self, me_dir, options) 948 949 self.mode = 'aMCatNLO' 950 self.nb_core = 0 951 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir)) 952 953 954 self.load_results_db() 955 self.results.def_web_mode(self.web) 956 # check that compiler is gfortran 4.6 or later if virtuals have been exported 957 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read() 958 959 if not '[real=QCD]' in proc_card: 960 check_compiler(self.options, block=True)
961 962 963 ############################################################################
964 - def do_shower(self, line):
965 """ run the shower on a given parton level file """ 966 argss = self.split_arg(line) 967 (options, argss) = _launch_parser.parse_args(argss) 968 # check argument validity and normalise argument 969 options = options.__dict__ 970 options['reweightonly'] = False 971 self.check_shower(argss, options) 972 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe') 973 self.ask_run_configuration('onlyshower', options) 974 self.run_mcatnlo(evt_file, options) 975 976 self.update_status('', level='all', update_results=True)
977 978 ################################################################################
979 - def do_plot(self, line):
980 """Create the plot for a given run""" 981 982 # Since in principle, all plot are already done automaticaly 983 args = self.split_arg(line) 984 # Check argument's validity 985 self.check_plot(args) 986 logger.info('plot for run %s' % self.run_name) 987 988 if not self.force: 989 self.ask_edit_cards([], args, plot=True) 990 991 if any([arg in ['parton'] for arg in args]): 992 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe') 993 if os.path.exists(filename+'.gz'): 994 misc.gunzip(filename) 995 if os.path.exists(filename): 996 logger.info('Found events.lhe file for run %s' % self.run_name) 997 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe')) 998 self.create_plot('parton') 999 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename) 1000 misc.gzip(filename) 1001 1002 if any([arg in ['all','parton'] for arg in args]): 1003 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top') 1004 if os.path.exists(filename): 1005 logger.info('Found MADatNLO.top file for run %s' % \ 1006 self.run_name) 1007 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html') 1008 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton') 1009 1010 if not os.path.isdir(plot_dir): 1011 os.makedirs(plot_dir) 1012 top_file = pjoin(plot_dir, 'plots.top') 1013 files.cp(filename, top_file) 1014 madir = self.options['madanalysis_path'] 1015 tag = self.run_card['run_tag'] 1016 td = self.options['td_path'] 1017 misc.call(['%s/plot' % self.dirbin, madir, td], 1018 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1019 stderr = subprocess.STDOUT, 1020 cwd=plot_dir) 1021 1022 misc.call(['%s/plot_page-pl' % self.dirbin, 1023 os.path.basename(plot_dir), 1024 'parton'], 1025 stdout = open(pjoin(plot_dir, 'plot.log'),'a'), 1026 stderr = subprocess.STDOUT, 1027 cwd=pjoin(self.me_dir, 'HTML', self.run_name)) 1028 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'), 1029 output) 1030 1031 os.remove(pjoin(self.me_dir, 'Events', 'plots.top')) 1032 1033 if any([arg in ['all','shower'] for arg in args]): 1034 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1035 if len(filenames) != 1: 1036 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name)) 1037 if len(filenames) != 1: 1038 logger.info('No shower level file found for run %s' % \ 1039 self.run_name) 1040 return 1041 filename = filenames[0] 1042 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1043 1044 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')): 1045 if aMCatNLO and not self.options['mg5_path']: 1046 raise "plotting NLO HEP file needs MG5 utilities" 1047 1048 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'), 1049 pjoin(self.me_dir, 'Cards', 'pythia_card.dat')) 1050 self.run_hep2lhe() 1051 else: 1052 filename = filenames[0] 1053 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep')) 1054 1055 self.create_plot('shower') 1056 lhe_file_name = filename.replace('.hep.gz', '.lhe') 1057 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'), 1058 lhe_file_name) 1059 misc.gzip(lhe_file_name) 1060 1061 if any([arg in ['all','pgs'] for arg in args]): 1062 filename = pjoin(self.me_dir, 'Events', self.run_name, 1063 '%s_pgs_events.lhco' % self.run_tag) 1064 if os.path.exists(filename+'.gz'): 1065 misc.gunzip(filename) 1066 if os.path.exists(filename): 1067 self.create_plot('PGS') 1068 misc.gzip(filename) 1069 else: 1070 logger.info('No valid files for pgs plot') 1071 1072 if any([arg in ['all','delphes'] for arg in args]): 1073 filename = pjoin(self.me_dir, 'Events', self.run_name, 1074 '%s_delphes_events.lhco' % self.run_tag) 1075 if os.path.exists(filename+'.gz'): 1076 misc.gunzip(filename) 1077 if os.path.exists(filename): 1078 #shutil.move(filename, pjoin(self.me_dir, 'Events','delphes_events.lhco')) 1079 self.create_plot('Delphes') 1080 #shutil.move(pjoin(self.me_dir, 'Events','delphes_events.lhco'), filename) 1081 misc.gzip(filename) 1082 else: 1083 logger.info('No valid files for delphes plot')
1084 1085 1086 ############################################################################
1087 - def do_calculate_xsect(self, line):
1088 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO 1089 this function wraps the do_launch one""" 1090 1091 self.start_time = time.time() 1092 argss = self.split_arg(line) 1093 # check argument validity and normalise argument 1094 (options, argss) = _calculate_xsect_parser.parse_args(argss) 1095 options = options.__dict__ 1096 options['reweightonly'] = False 1097 options['parton'] = True 1098 self.check_calculate_xsect(argss, options) 1099 self.do_launch(line, options, argss)
1100 1101 ############################################################################
1102 - def do_banner_run(self, line):
1103 """Make a run from the banner file""" 1104 1105 args = self.split_arg(line) 1106 #check the validity of the arguments 1107 self.check_banner_run(args) 1108 1109 # Remove previous cards 1110 for name in ['shower_card.dat', 'madspin_card.dat']: 1111 try: 1112 os.remove(pjoin(self.me_dir, 'Cards', name)) 1113 except Exception: 1114 pass 1115 1116 banner_mod.split_banner(args[0], self.me_dir, proc_card=False) 1117 1118 # Check if we want to modify the run 1119 if not self.force: 1120 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n']) 1121 if ans == 'n': 1122 self.force = True 1123 1124 # Compute run mode: 1125 if self.force: 1126 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True} 1127 banner = banner_mod.Banner(args[0]) 1128 for line in banner['run_settings']: 1129 if '=' in line: 1130 mode, value = [t.strip() for t in line.split('=')] 1131 mode_status[mode] = value 1132 else: 1133 mode_status = {} 1134 1135 # Call Generate events 1136 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''), 1137 switch=mode_status)
1138 1139 ############################################################################
1140 - def do_generate_events(self, line):
1141 """Main commands: generate events 1142 this function just wraps the do_launch one""" 1143 self.do_launch(line)
1144 1145 1146 ############################################################################
1147 - def do_treatcards(self, line, amcatnlo=True,mode=''):
1148 """Advanced commands: this is for creating the correct run_card.inc from the nlo format""" 1149 #check if no 'Auto' are present in the file 1150 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat')) 1151 1152 # propagate the FO_card entry FO_LHE_weight_ratio to the run_card. 1153 # this variable is system only in the run_card 1154 # can not be done in EditCard since this parameter is not written in the 1155 # run_card directly. 1156 if mode in ['LO', 'NLO']: 1157 name = 'fo_lhe_weight_ratio' 1158 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat')) 1159 if name in FO_card: 1160 self.run_card.set(name, FO_card[name], user=False) 1161 name = 'fo_lhe_postprocessing' 1162 if name in FO_card: 1163 self.run_card.set(name, FO_card[name], user=False) 1164 1165 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1166 1167 ############################################################################
1168 - def set_configuration(self, amcatnlo=True, **opt):
1169 """assign all configuration variable from file 1170 loop over the different config file if config_file not define """ 1171 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1172 1173 ############################################################################
1174 - def do_launch(self, line, options={}, argss=[], switch={}):
1175 """Main commands: launch the full chain 1176 options and args are relevant if the function is called from other 1177 functions, such as generate_events or calculate_xsect 1178 mode gives the list of switch needed for the computation (usefull for banner_run) 1179 """ 1180 1181 if not argss and not options: 1182 self.start_time = time.time() 1183 argss = self.split_arg(line) 1184 # check argument validity and normalise argument 1185 (options, argss) = _launch_parser.parse_args(argss) 1186 options = options.__dict__ 1187 self.check_launch(argss, options) 1188 1189 1190 if 'run_name' in options.keys() and options['run_name']: 1191 self.run_name = options['run_name'] 1192 # if a dir with the given run_name already exists 1193 # remove it and warn the user 1194 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)): 1195 logger.warning('Removing old run information in \n'+ 1196 pjoin(self.me_dir, 'Events', self.run_name)) 1197 files.rm(pjoin(self.me_dir, 'Events', self.run_name)) 1198 self.results.delete_run(self.run_name) 1199 else: 1200 self.run_name = '' # will be set later 1201 1202 if options['multicore']: 1203 self.cluster_mode = 2 1204 elif options['cluster']: 1205 self.cluster_mode = 1 1206 1207 if not switch: 1208 mode = argss[0] 1209 1210 if mode in ['LO', 'NLO']: 1211 options['parton'] = True 1212 mode = self.ask_run_configuration(mode, options) 1213 else: 1214 mode = self.ask_run_configuration('auto', options, switch) 1215 1216 self.results.add_detail('run_mode', mode) 1217 1218 self.update_status('Starting run', level=None, update_results=True) 1219 1220 if self.options['automatic_html_opening']: 1221 misc.open_file(os.path.join(self.me_dir, 'crossx.html')) 1222 self.options['automatic_html_opening'] = False 1223 1224 if '+' in mode: 1225 mode = mode.split('+')[0] 1226 self.compile(mode, options) 1227 evt_file = self.run(mode, options) 1228 1229 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']: 1230 logger.info('No event file generated: grids have been set-up with a '\ 1231 'relative precision of %s' % self.run_card['req_acc']) 1232 return 1233 1234 if not mode in ['LO', 'NLO']: 1235 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')) 1236 1237 if self.run_card['systematics_program'] == 'systematics': 1238 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments']))) 1239 1240 self.exec_cmd('reweight -from_cards', postcmd=False) 1241 self.exec_cmd('decay_events -from_cards', postcmd=False) 1242 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe') 1243 1244 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \ 1245 and not options['parton']: 1246 self.run_mcatnlo(evt_file, options) 1247 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False) 1248 1249 elif mode == 'noshower': 1250 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 1251 Please, shower the Les Houches events before using them for physics analyses.""") 1252 1253 1254 self.update_status('', level='all', update_results=True) 1255 if self.run_card['ickkw'] == 3 and \ 1256 (mode in ['noshower'] or \ 1257 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))): 1258 logger.warning("""You are running with FxFx merging enabled. 1259 To be able to merge samples of various multiplicities without double counting, 1260 you have to remove some events after showering 'by hand'. 1261 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 1262 1263 self.store_result() 1264 #check if the param_card defines a scan. 1265 if self.param_card_iterator: 1266 param_card_iterator = self.param_card_iterator 1267 self.param_card_iterator = [] #avoid to next generate go trough here 1268 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1269 orig_name = self.run_name 1270 #go trough the scal 1271 with misc.TMP_variable(self, 'allow_notification_center', False): 1272 for i,card in enumerate(param_card_iterator): 1273 card.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1274 self.check_param_card(pjoin(self.me_dir,'Cards','param_card.dat'), dependent=True) 1275 if not options['force']: 1276 options['force'] = True 1277 if options['run_name']: 1278 options['run_name'] = '%s_%s' % (orig_name, i+1) 1279 if not argss: 1280 argss = [mode, "-f"] 1281 elif argss[0] == "auto": 1282 argss[0] = mode 1283 self.do_launch("", options=options, argss=argss, switch=switch) 1284 #self.exec_cmd("launch -f ",precmd=True, postcmd=True,errorhandling=False) 1285 param_card_iterator.store_entry(self.run_name, self.results.current['cross']) 1286 #restore original param_card 1287 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat')) 1288 name = misc.get_scan_name(orig_name, self.run_name) 1289 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name) 1290 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK') 1291 param_card_iterator.write_summary(path) 1292 1293 if self.allow_notification_center: 1294 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir), 1295 '%s: %s +- %s ' % (self.results.current['run_name'], 1296 self.results.current['cross'], 1297 self.results.current['error']))
1298 1299 1300 ############################################################################
1301 - def do_compile(self, line):
1302 """Advanced commands: just compile the executables """ 1303 argss = self.split_arg(line) 1304 # check argument validity and normalise argument 1305 (options, argss) = _compile_parser.parse_args(argss) 1306 options = options.__dict__ 1307 options['reweightonly'] = False 1308 options['nocompile'] = False 1309 self.check_compile(argss, options) 1310 1311 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]] 1312 self.ask_run_configuration(mode, options) 1313 self.compile(mode, options) 1314 1315 1316 self.update_status('', level='all', update_results=True)
1317 1318
1319 - def update_random_seed(self):
1320 """Update random number seed with the value from the run_card. 1321 If this is 0, update the number according to a fresh one""" 1322 iseed = self.run_card['iseed'] 1323 if iseed == 0: 1324 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit')) 1325 iseed = int(randinit.read()[2:]) + 1 1326 randinit.close() 1327 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w') 1328 randinit.write('r=%d' % iseed) 1329 randinit.close()
1330 1331
1332 - def run(self, mode, options):
1333 """runs aMC@NLO. Returns the name of the event file created""" 1334 logger.info('Starting run') 1335 1336 if not 'only_generation' in options.keys(): 1337 options['only_generation'] = False 1338 1339 # for second step in applgrid mode, do only the event generation step 1340 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']: 1341 options['only_generation'] = True 1342 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics')) 1343 self.setup_cluster_or_multicore() 1344 self.update_random_seed() 1345 #find and keep track of all the jobs 1346 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'], 1347 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']} 1348 folder_names['noshower'] = folder_names['aMC@NLO'] 1349 folder_names['noshowerLO'] = folder_names['aMC@LO'] 1350 p_dirs = [d for d in \ 1351 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 1352 #Clean previous results 1353 self.clean_previous_results(options,p_dirs,folder_names[mode]) 1354 1355 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events'] 1356 1357 1358 if options['reweightonly']: 1359 event_norm=self.run_card['event_norm'] 1360 nevents=self.run_card['nevents'] 1361 return self.reweight_and_collect_events(options, mode, nevents, event_norm) 1362 1363 if mode in ['LO', 'NLO']: 1364 # this is for fixed order runs 1365 mode_dict = {'NLO': 'all', 'LO': 'born'} 1366 logger.info('Doing fixed order %s' % mode) 1367 req_acc = self.run_card['req_acc_FO'] 1368 1369 # Re-distribute the grids for the 2nd step of the applgrid 1370 # running 1371 if self.run_card['iappl'] == 2: 1372 self.applgrid_distribute(options,mode_dict[mode],p_dirs) 1373 1374 # create a list of dictionaries "jobs_to_run" with all the 1375 # jobs that need to be run 1376 integration_step=-1 1377 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1378 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True) 1379 self.prepare_directories(jobs_to_run,mode) 1380 1381 # loop over the integration steps. After every step, check 1382 # if we have the required accuracy. If this is the case, 1383 # stop running, else do another step. 1384 while True: 1385 integration_step=integration_step+1 1386 self.run_all_jobs(jobs_to_run,integration_step) 1387 self.collect_log_files(jobs_to_run,integration_step) 1388 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1389 jobs_to_collect,integration_step,mode,mode_dict[mode]) 1390 if not jobs_to_run: 1391 # there are no more jobs to run (jobs_to_run is empty) 1392 break 1393 # We are done. 1394 self.finalise_run_FO(folder_names[mode],jobs_to_collect) 1395 self.update_status('Run complete', level='parton', update_results=True) 1396 return 1397 1398 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']: 1399 if self.ninitial == 1: 1400 raise aMCatNLOError('Decay processes can only be run at fixed order.') 1401 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\ 1402 'noshower': 'all', 'noshowerLO': 'born'} 1403 shower = self.run_card['parton_shower'].upper() 1404 nevents = self.run_card['nevents'] 1405 req_acc = self.run_card['req_acc'] 1406 if nevents == 0 and req_acc < 0 : 1407 raise aMCatNLOError('Cannot determine the required accuracy from the number '\ 1408 'of events, because 0 events requested. Please set '\ 1409 'the "req_acc" parameter in the run_card to a value '\ 1410 'between 0 and 1') 1411 elif req_acc >1 or req_acc == 0 : 1412 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\ 1413 'be between larger than 0 and smaller than 1, '\ 1414 'or set to -1 for automatic determination. Current '\ 1415 'value is %f' % req_acc) 1416 # For more than 1M events, set req_acc to 0.001 (except when it was explicitly set in the run_card) 1417 elif req_acc < 0 and nevents > 1000000 : 1418 req_acc=0.001 1419 1420 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8'] 1421 1422 if not shower in shower_list: 1423 raise aMCatNLOError('%s is not a valid parton shower. '\ 1424 'Please use one of the following: %s' \ 1425 % (shower, ', '.join(shower_list))) 1426 1427 # check that PYTHIA6PT is not used for processes with FSR 1428 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']: 1429 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR') 1430 1431 if mode in ['aMC@NLO', 'aMC@LO']: 1432 logger.info('Doing %s matched to parton shower' % mode[4:]) 1433 elif mode in ['noshower','noshowerLO']: 1434 logger.info('Generating events without running the shower.') 1435 elif options['only_generation']: 1436 logger.info('Generating events starting from existing results') 1437 1438 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \ 1439 req_acc,mode_dict[mode],1,mode,fixed_order=False) 1440 1441 # Make sure to update all the jobs to be ready for the event generation step 1442 if options['only_generation']: 1443 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1444 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False) 1445 else: 1446 self.prepare_directories(jobs_to_run,mode,fixed_order=False) 1447 1448 1449 # Main loop over the three MINT generation steps: 1450 for mint_step, status in enumerate(mcatnlo_status): 1451 if options['only_generation'] and mint_step < 2: 1452 continue 1453 self.update_status(status, level='parton') 1454 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False) 1455 self.collect_log_files(jobs_to_run,mint_step) 1456 if mint_step+1==2 and nevents==0: 1457 self.print_summary(options,2,mode) 1458 return 1459 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \ 1460 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False) 1461 # Sanity check on the event files. If error the jobs are resubmitted 1462 self.check_event_files(jobs_to_collect) 1463 1464 if self.cluster_mode == 1: 1465 #if cluster run, wait 10 sec so that event files are transferred back 1466 self.update_status( 1467 'Waiting while files are transferred back from the cluster nodes', 1468 level='parton') 1469 time.sleep(10) 1470 1471 event_norm=self.run_card['event_norm'] 1472 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1473
1474 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\ 1475 integration_step,mode,fixed_order=True):
1476 """Creates a list of dictionaries with all the jobs to be run""" 1477 jobs_to_run=[] 1478 if not options['only_generation']: 1479 # Fresh, new run. Check all the P*/channels.txt files 1480 # (created by the 'gensym' executable) to set-up all the 1481 # jobs using the default inputs. 1482 npoints = self.run_card['npoints_FO_grid'] 1483 niters = self.run_card['niters_FO_grid'] 1484 for p_dir in p_dirs: 1485 try: 1486 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file: 1487 channels=chan_file.readline().split() 1488 except IOError: 1489 logger.warning('No integration channels found for contribution %s' % p_dir) 1490 continue 1491 if fixed_order: 1492 lch=len(channels) 1493 maxchannels=20 # combine up to 20 channels in a single job 1494 if self.run_card['iappl'] != 0: maxchannels=1 1495 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \ 1496 else int(lch/maxchannels)) 1497 for nj in range(1,njobs+1): 1498 job={} 1499 job['p_dir']=p_dir 1500 job['channel']=str(nj) 1501 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs)) 1502 job['configs']=' '.join(channels[:job['nchans']]) 1503 del channels[:job['nchans']] 1504 job['split']=0 1505 if req_acc == -1: 1506 job['accuracy']=0 1507 job['niters']=niters 1508 job['npoints']=npoints 1509 elif req_acc > 0: 1510 job['accuracy']=0.05 1511 job['niters']=6 1512 job['npoints']=-1 1513 else: 1514 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+ 1515 'between 0 and 1 or set it equal to -1.') 1516 job['mint_mode']=0 1517 job['run_mode']=run_mode 1518 job['wgt_frac']=1.0 1519 job['wgt_mult']=1.0 1520 jobs_to_run.append(job) 1521 if channels: 1522 raise aMCatNLOError('channels is not empty %s' % channels) 1523 else: 1524 for channel in channels: 1525 job={} 1526 job['p_dir']=p_dir 1527 job['channel']=channel 1528 job['split']=0 1529 job['accuracy']=0.03 1530 job['niters']=12 1531 job['npoints']=-1 1532 job['mint_mode']=0 1533 job['run_mode']=run_mode 1534 job['wgt_frac']=1.0 1535 jobs_to_run.append(job) 1536 jobs_to_collect=copy.copy(jobs_to_run) # These are all jobs 1537 else: 1538 # if options['only_generation'] is true, just read the current jobs from file 1539 try: 1540 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f: 1541 jobs_to_collect=pickle.load(f) 1542 jobs_to_run=copy.copy(jobs_to_collect) 1543 except: 1544 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \ 1545 pjoin(self.me_dir,'SubProcesses','job_status.pkl')) 1546 # Update cross sections and determine which jobs to run next 1547 if fixed_order: 1548 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, 1549 jobs_to_collect,integration_step,mode,run_mode) 1550 # Update the integration_step to make sure that nothing will be overwritten 1551 integration_step=1 1552 for job in jobs_to_run: 1553 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)): 1554 integration_step=integration_step+1 1555 integration_step=integration_step-1 1556 else: 1557 self.append_the_results(jobs_to_collect,integration_step) 1558 return jobs_to_run,jobs_to_collect,integration_step
1559
1560 - def prepare_directories(self,jobs_to_run,mode,fixed_order=True):
1561 """Set-up the G* directories for running""" 1562 name_suffix={'born' :'B' , 'all':'F'} 1563 for job in jobs_to_run: 1564 if job['split'] == 0: 1565 if fixed_order : 1566 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1567 job['run_mode']+'_G'+job['channel']) 1568 else: 1569 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1570 'G'+name_suffix[job['run_mode']]+job['channel']) 1571 else: 1572 if fixed_order : 1573 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1574 job['run_mode']+'_G'+job['channel']+'_'+str(job['split'])) 1575 else: 1576 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'], 1577 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split'])) 1578 job['dirname']=dirname 1579 if not os.path.isdir(dirname): 1580 os.makedirs(dirname) 1581 self.write_input_file(job,fixed_order) 1582 # link or copy the grids from the base directory to the split directory: 1583 if not fixed_order: 1584 if job['split'] != 0: 1585 for f in ['grid.MC_integer','mint_grids','res_1']: 1586 if not os.path.isfile(pjoin(job['dirname'],f)): 1587 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname']) 1588 else: 1589 if job['split'] != 0: 1590 for f in ['grid.MC_integer','mint_grids']: 1591 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1592 1593
1594 - def write_input_file(self,job,fixed_order):
1595 """write the input file for the madevent_mint* executable in the appropriate directory""" 1596 if fixed_order: 1597 content= \ 1598 """NPOINTS = %(npoints)s 1599 NITERATIONS = %(niters)s 1600 ACCURACY = %(accuracy)s 1601 ADAPT_GRID = 2 1602 MULTICHANNEL = 1 1603 SUM_HELICITY = 1 1604 NCHANS = %(nchans)s 1605 CHANNEL = %(configs)s 1606 SPLIT = %(split)s 1607 WGT_MULT= %(wgt_mult)s 1608 RUN_MODE = %(run_mode)s 1609 RESTART = %(mint_mode)s 1610 """ \ 1611 % job 1612 else: 1613 content = \ 1614 """-1 12 ! points, iterations 1615 %(accuracy)s ! desired fractional accuracy 1616 1 -0.1 ! alpha, beta for Gsoft 1617 1 -0.1 ! alpha, beta for Gazi 1618 1 ! Suppress amplitude (0 no, 1 yes)? 1619 1 ! Exact helicity sum (0 yes, n = number/event)? 1620 %(channel)s ! Enter Configuration Number: 1621 %(mint_mode)s ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events 1622 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij 1623 %(run_mode)s ! all, born, real, virt 1624 """ \ 1625 % job 1626 with open(pjoin(job['dirname'], 'input_app.txt'), 'w') as input_file: 1627 input_file.write(content)
1628 1629
1630 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1631 """Loops over the jobs_to_run and executes them using the function 'run_exe'""" 1632 if fixed_order: 1633 if integration_step == 0: 1634 self.update_status('Setting up grids', level=None) 1635 else: 1636 self.update_status('Refining results, step %i' % integration_step, level=None) 1637 self.ijob = 0 1638 name_suffix={'born' :'B', 'all':'F'} 1639 if fixed_order: 1640 run_type="Fixed order integration step %s" % integration_step 1641 else: 1642 run_type="MINT step %s" % integration_step 1643 self.njobs=len(jobs_to_run) 1644 for job in jobs_to_run: 1645 executable='ajob1' 1646 if fixed_order: 1647 arguments=[job['channel'],job['run_mode'], \ 1648 str(job['split']),str(integration_step)] 1649 else: 1650 arguments=[job['channel'],name_suffix[job['run_mode']], \ 1651 str(job['split']),str(integration_step)] 1652 self.run_exe(executable,arguments,run_type, 1653 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir'])) 1654 1655 if self.cluster_mode == 2: 1656 time.sleep(1) # security to allow all jobs to be launched 1657 self.wait_for_complete(run_type)
1658 1659
1660 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\ 1661 integration_step,mode,run_mode,fixed_order=True):
1662 """Collect the results, make HTML pages, print the summary and 1663 determine if there are more jobs to run. Returns the list 1664 of the jobs that still need to be run, as well as the 1665 complete list of jobs that need to be collected to get the 1666 final answer. 1667 """ 1668 # Get the results of the current integration/MINT step 1669 self.append_the_results(jobs_to_run,integration_step) 1670 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step) 1671 # Update HTML pages 1672 if fixed_order: 1673 cross, error = sum_html.make_all_html_results(self, jobs=jobs_to_collect) 1674 else: 1675 name_suffix={'born' :'B' , 'all':'F'} 1676 cross, error = sum_html.make_all_html_results(self, folder_names=['G%s*' % name_suffix[run_mode]]) 1677 self.results.add_detail('cross', cross) 1678 self.results.add_detail('error', error) 1679 # Combine grids from split fixed order jobs 1680 if fixed_order: 1681 jobs_to_run=self.combine_split_order_run(jobs_to_run) 1682 # Set-up jobs for the next iteration/MINT step 1683 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order) 1684 # IF THERE ARE NO MORE JOBS, WE ARE DONE!!! 1685 if fixed_order: 1686 # Write the jobs_to_collect directory to file so that we 1687 # can restart them later (with only-generation option) 1688 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 1689 pickle.dump(jobs_to_collect,f) 1690 # Print summary 1691 if (not jobs_to_run_new) and fixed_order: 1692 # print final summary of results (for fixed order) 1693 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect) 1694 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True) 1695 return jobs_to_run_new,jobs_to_collect 1696 elif jobs_to_run_new: 1697 # print intermediate summary of results 1698 scale_pdf_info=[] 1699 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False) 1700 else: 1701 # When we are done for (N)LO+PS runs, do not print 1702 # anything yet. This will be done after the reweighting 1703 # and collection of the events 1704 scale_pdf_info=[] 1705 # Prepare for the next integration/MINT step 1706 if (not fixed_order) and integration_step+1 == 2 : 1707 # Write the jobs_to_collect directory to file so that we 1708 # can restart them later (with only-generation option) 1709 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f: 1710 pickle.dump(jobs_to_collect,f) 1711 # next step is event generation (mint_step 2) 1712 jobs_to_run_new,jobs_to_collect_new= \ 1713 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect) 1714 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1715 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect) 1716 self.write_nevts_files(jobs_to_run_new) 1717 else: 1718 if fixed_order and self.run_card['iappl'] == 0 \ 1719 and self.run_card['req_acc_FO'] > 0: 1720 jobs_to_run_new,jobs_to_collect= \ 1721 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect) 1722 self.prepare_directories(jobs_to_run_new,mode,fixed_order) 1723 jobs_to_collect_new=jobs_to_collect 1724 return jobs_to_run_new,jobs_to_collect_new
1725 1726
1727 - def write_nevents_unweighted_file(self,jobs,jobs0events):
1728 """writes the nevents_unweighted file in the SubProcesses directory. 1729 We also need to write the jobs that will generate 0 events, 1730 because that makes sure that the cross section from those channels 1731 is taken into account in the event weights (by collect_events.f). 1732 """ 1733 content=[] 1734 for job in jobs: 1735 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1736 lhefile=pjoin(path,'events.lhe') 1737 content.append(' %s %d %9e %9e' % \ 1738 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac'])) 1739 for job in jobs0events: 1740 if job['nevents']==0: 1741 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1]) 1742 lhefile=pjoin(path,'events.lhe') 1743 content.append(' %s %d %9e %9e' % \ 1744 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.)) 1745 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f: 1746 f.write('\n'.join(content)+'\n')
1747
1748 - def write_nevts_files(self,jobs):
1749 """write the nevts files in the SubProcesses/P*/G*/ directories""" 1750 for job in jobs: 1751 with open(pjoin(job['dirname'],'nevts'),'w') as f: 1752 f.write('%i\n' % job['nevents'])
1753
1754 - def combine_split_order_run(self,jobs_to_run):
1755 """Combines jobs and grids from split jobs that have been run""" 1756 # combine the jobs that need to be combined in job 1757 # groups. Simply combine the ones that have the same p_dir and 1758 # same channel. 1759 jobgroups_to_combine=[] 1760 jobs_to_run_new=[] 1761 for job in jobs_to_run: 1762 if job['split'] == 0: 1763 job['combined']=1 1764 jobs_to_run_new.append(job) # this jobs wasn't split 1765 elif job['split'] == 1: 1766 jobgroups_to_combine.append(filter(lambda j: j['p_dir'] == job['p_dir'] and \ 1767 j['channel'] == job['channel'], jobs_to_run)) 1768 else: 1769 continue 1770 for job_group in jobgroups_to_combine: 1771 # Combine the grids (mint-grids & MC-integer grids) first 1772 self.combine_split_order_grids(job_group) 1773 jobs_to_run_new.append(self.combine_split_order_jobs(job_group)) 1774 return jobs_to_run_new
1775
1776 - def combine_split_order_jobs(self,job_group):
1777 """combine the jobs in job_group and return a single summed job""" 1778 # first copy one of the jobs in 'jobs' 1779 sum_job=copy.copy(job_group[0]) 1780 # update the information to have a 'non-split' job: 1781 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0]) 1782 sum_job['split']=0 1783 sum_job['wgt_mult']=1.0 1784 sum_job['combined']=len(job_group) 1785 # information to be summed: 1786 keys=['niters_done','npoints_done','niters','npoints',\ 1787 'result','resultABS','time_spend'] 1788 keys2=['error','errorABS'] 1789 # information to be summed in quadrature: 1790 for key in keys2: 1791 sum_job[key]=math.pow(sum_job[key],2) 1792 # Loop over the jobs and sum the information 1793 for i,job in enumerate(job_group): 1794 if i==0 : continue # skip the first 1795 for key in keys: 1796 sum_job[key]+=job[key] 1797 for key in keys2: 1798 sum_job[key]+=math.pow(job[key],2) 1799 for key in keys2: 1800 sum_job[key]=math.sqrt(sum_job[key]) 1801 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100. 1802 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100. 1803 sum_job['niters']=int(sum_job['niters_done']/len(job_group)) 1804 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group)) 1805 return sum_job
1806 1807
1808 - def combine_split_order_grids(self,job_group):
1809 """Combines the mint_grids and MC-integer grids from the split order 1810 jobs (fixed order only). 1811 """ 1812 files_mint_grids=[] 1813 files_MC_integer=[] 1814 location=None 1815 for job in job_group: 1816 files_mint_grids.append(open(pjoin(job['dirname'],'mint_grids'),'r+')) 1817 files_MC_integer.append(open(pjoin(job['dirname'],'grid.MC_integer'),'r+')) 1818 if not location: 1819 location=pjoin(job['dirname'].rsplit('_',1)[0]) 1820 else: 1821 if location != pjoin(job['dirname'].rsplit('_',1)[0]) : 1822 raise aMCatNLOError('Not all jobs have the same location. '\ 1823 +'Cannot combine them.') 1824 # Needed to average the grids (both xgrids, ave_virt and 1825 # MC_integer grids), but sum the cross section info. The 1826 # latter is only the only line that contains integers. 1827 for j,fs in enumerate([files_mint_grids,files_MC_integer]): 1828 linesoffiles=[f.readlines() for f in fs] 1829 to_write=[] 1830 for rowgrp in zip(*linesoffiles): 1831 try: 1832 # check that last element on the line is an 1833 # integer (will raise ValueError if not the 1834 # case). If integer, this is the line that 1835 # contains information that needs to be 1836 # summed. All other lines can be averaged. 1837 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp] 1838 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 1839 floatgrps = zip(*floatsbyfile) 1840 special=[] 1841 for i,floatgrp in enumerate(floatgrps): 1842 if i==0: # sum X-sec 1843 special.append(sum(floatgrp)) 1844 elif i==1: # sum unc in quadrature 1845 special.append(math.sqrt(sum([err**2 for err in floatgrp]))) 1846 elif i==2: # average number of PS per iteration 1847 special.append(int(sum(floatgrp)/len(floatgrp))) 1848 elif i==3: # sum the number of iterations 1849 special.append(int(sum(floatgrp))) 1850 elif i==4: # average the nhits_in_grids 1851 special.append(int(sum(floatgrp)/len(floatgrp))) 1852 else: 1853 raise aMCatNLOError('"mint_grids" files not in correct format. '+\ 1854 'Cannot combine them.') 1855 to_write.append(" ".join(str(s) for s in special) + "\n") 1856 except ValueError: 1857 # just average all 1858 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp] 1859 floatgrps = zip(*floatsbyfile) 1860 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps] 1861 to_write.append(" ".join(str(a) for a in averages) + "\n") 1862 # close the files 1863 for f in fs: 1864 f.close 1865 # write the data over the master location 1866 if j==0: 1867 with open(pjoin(location,'mint_grids'),'w') as f: 1868 f.writelines(to_write) 1869 elif j==1: 1870 with open(pjoin(location,'grid.MC_integer'),'w') as f: 1871 f.writelines(to_write)
1872 1873
1874 - def split_jobs_fixed_order(self,jobs_to_run,jobs_to_collect):
1875 """Looks in the jobs_to_run to see if there is the need to split the 1876 jobs, depending on the expected time they take. Updates 1877 jobs_to_run and jobs_to_collect to replace the split-job by 1878 its splits. 1879 """ 1880 # determine the number jobs we should have (this is per p_dir) 1881 if self.options['run_mode'] ==2: 1882 nb_submit = int(self.options['nb_core']) 1883 elif self.options['run_mode'] ==1: 1884 nb_submit = int(self.options['cluster_size']) 1885 else: 1886 nb_submit =1 1887 # total expected aggregated running time 1888 time_expected=0 1889 for job in jobs_to_run: 1890 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \ 1891 (job['niters_done']*job['npoints_done']) 1892 # this means that we must expect the following per job (in 1893 # ideal conditions) 1894 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2)) 1895 jobs_to_run_new=[] 1896 jobs_to_collect_new=copy.copy(jobs_to_collect) 1897 for job in jobs_to_run: 1898 # remove current job from jobs_to_collect. Make sure 1899 # to remove all the split ones in case the original 1900 # job had been a split one (before it was re-combined) 1901 for j in filter(lambda j: j['p_dir'] == job['p_dir'] and \ 1902 j['channel'] == job['channel'], jobs_to_collect_new): 1903 jobs_to_collect_new.remove(j) 1904 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \ 1905 (job['niters_done']*job['npoints_done']) 1906 # if the time expected for this job is (much) larger than 1907 # the time spend in the previous iteration, and larger 1908 # than the expected time per job, split it 1909 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job): 1910 # determine the number of splits needed 1911 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit) 1912 for i in range(1,nsplit+1): 1913 job_new=copy.copy(job) 1914 job_new['split']=i 1915 job_new['wgt_mult']=1./float(nsplit) 1916 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 1917 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1) 1918 if nsplit >= job['niters']: 1919 job_new['npoints']=int(job['npoints']*job['niters']/nsplit) 1920 job_new['niters']=1 1921 else: 1922 job_new['npoints']=int(job['npoints']/nsplit) 1923 jobs_to_collect_new.append(job_new) 1924 jobs_to_run_new.append(job_new) 1925 else: 1926 jobs_to_collect_new.append(job) 1927 jobs_to_run_new.append(job) 1928 return jobs_to_run_new,jobs_to_collect_new
1929 1930
1931 - def check_the_need_to_split(self,jobs_to_run,jobs_to_collect):
1932 """Looks in the jobs_to_run to see if there is the need to split the 1933 event generation step. Updates jobs_to_run and 1934 jobs_to_collect to replace the split-job by its 1935 splits. Also removes jobs that do not need any events. 1936 """ 1937 nevt_job=self.run_card['nevt_job'] 1938 if nevt_job > 0: 1939 jobs_to_collect_new=copy.copy(jobs_to_collect) 1940 for job in jobs_to_run: 1941 nevents=job['nevents'] 1942 if nevents == 0: 1943 jobs_to_collect_new.remove(job) 1944 elif nevents > nevt_job: 1945 jobs_to_collect_new.remove(job) 1946 if nevents % nevt_job != 0 : 1947 nsplit=int(nevents/nevt_job)+1 1948 else: 1949 nsplit=int(nevents/nevt_job) 1950 for i in range(1,nsplit+1): 1951 job_new=copy.copy(job) 1952 left_over=nevents % nsplit 1953 if i <= left_over: 1954 job_new['nevents']=int(nevents/nsplit)+1 1955 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1956 else: 1957 job_new['nevents']=int(nevents/nsplit) 1958 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents) 1959 job_new['split']=i 1960 job_new['dirname']=job['dirname']+'_%i' % job_new['split'] 1961 jobs_to_collect_new.append(job_new) 1962 jobs_to_run_new=copy.copy(jobs_to_collect_new) 1963 else: 1964 jobs_to_run_new=copy.copy(jobs_to_collect) 1965 for job in jobs_to_collect: 1966 if job['nevents'] == 0: 1967 jobs_to_run_new.remove(job) 1968 jobs_to_collect_new=copy.copy(jobs_to_run_new) 1969 1970 return jobs_to_run_new,jobs_to_collect_new
1971 1972
1973 - def update_jobs_to_run(self,req_acc,step,jobs,fixed_order=True):
1974 """ 1975 For (N)LO+PS: determines the number of events and/or the required 1976 accuracy per job. 1977 For fixed order: determines which jobs need higher precision and 1978 returns those with the newly requested precision. 1979 """ 1980 err=self.cross_sect_dict['errt'] 1981 tot=self.cross_sect_dict['xsect'] 1982 errABS=self.cross_sect_dict['erra'] 1983 totABS=self.cross_sect_dict['xseca'] 1984 jobs_new=[] 1985 if fixed_order: 1986 if req_acc == -1: 1987 if step+1 == 1: 1988 npoints = self.run_card['npoints_FO'] 1989 niters = self.run_card['niters_FO'] 1990 for job in jobs: 1991 job['mint_mode']=-1 1992 job['niters']=niters 1993 job['npoints']=npoints 1994 jobs_new.append(job) 1995 elif step+1 == 2: 1996 pass 1997 elif step+1 > 2: 1998 raise aMCatNLOError('Cannot determine number of iterations and PS points '+ 1999 'for integration step %i' % step ) 2000 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0: 2001 req_accABS=req_acc*abs(tot)/totABS # overal relative required accuracy on ABS Xsec. 2002 for job in jobs: 2003 job['mint_mode']=-1 2004 # Determine relative required accuracy on the ABS for this job 2005 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS']) 2006 # If already accurate enough, skip the job (except when doing the first 2007 # step for the iappl=2 run: we need to fill all the applgrid grids!) 2008 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \ 2009 and not (step==-1 and self.run_card['iappl'] == 2): 2010 continue 2011 # Update the number of PS points based on errorABS, ncall and accuracy 2012 itmax_fl=job['niters_done']*math.pow(job['errorABS']/ 2013 (job['accuracy']*job['resultABS']),2) 2014 if itmax_fl <= 4.0 : 2015 job['niters']=max(int(round(itmax_fl)),2) 2016 job['npoints']=job['npoints_done']*2 2017 elif itmax_fl > 4.0 and itmax_fl <= 16.0 : 2018 job['niters']=4 2019 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2 2020 else: 2021 if itmax_fl > 100.0 : itmax_fl=50.0 2022 job['niters']=int(round(math.sqrt(itmax_fl))) 2023 job['npoints']=int(round(job['npoints_done']*itmax_fl/ 2024 round(math.sqrt(itmax_fl))))*2 2025 # Add the job to the list of jobs that need to be run 2026 jobs_new.append(job) 2027 return jobs_new 2028 elif step+1 <= 2: 2029 nevents=self.run_card['nevents'] 2030 # Total required accuracy for the upper bounding envelope 2031 if req_acc<0: 2032 req_acc2_inv=nevents 2033 else: 2034 req_acc2_inv=1/(req_acc*req_acc) 2035 if step+1 == 1 or step+1 == 2 : 2036 # determine the req. accuracy for each of the jobs for Mint-step = 1 2037 for job in jobs: 2038 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2) 2039 job['accuracy']=accuracy 2040 if step+1 == 2: 2041 # Randomly (based on the relative ABS Xsec of the job) determine the 2042 # number of events each job needs to generate for MINT-step = 2. 2043 r=self.get_randinit_seed() 2044 random.seed(r) 2045 totevts=nevents 2046 for job in jobs: 2047 job['nevents'] = 0 2048 while totevts : 2049 target = random.random() * totABS 2050 crosssum = 0. 2051 i = 0 2052 while i<len(jobs) and crosssum < target: 2053 job = jobs[i] 2054 crosssum += job['resultABS'] 2055 i += 1 2056 totevts -= 1 2057 i -= 1 2058 jobs[i]['nevents'] += 1 2059 for job in jobs: 2060 job['mint_mode']=step+1 # next step 2061 return jobs 2062 else: 2063 return []
2064 2065
2066 - def get_randinit_seed(self):
2067 """ Get the random number seed from the randinit file """ 2068 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit: 2069 # format of the file is "r=%d". 2070 iseed = int(randinit.read()[2:]) 2071 return iseed
2072 2073
2074 - def append_the_results(self,jobs,integration_step):
2075 """Appends the results for each of the jobs in the job list""" 2076 error_found=False 2077 for job in jobs: 2078 try: 2079 if integration_step >= 0 : 2080 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file: 2081 results=res_file.readline().split() 2082 else: 2083 # should only be here when doing fixed order with the 'only_generation' 2084 # option equal to True. Take the results from the final run done. 2085 with open(pjoin(job['dirname'],'res.dat')) as res_file: 2086 results=res_file.readline().split() 2087 except IOError: 2088 if not error_found: 2089 error_found=True 2090 error_log=[] 2091 error_log.append(pjoin(job['dirname'],'log.txt')) 2092 continue 2093 job['resultABS']=float(results[0]) 2094 job['errorABS']=float(results[1]) 2095 job['result']=float(results[2]) 2096 job['error']=float(results[3]) 2097 job['niters_done']=int(results[4]) 2098 job['npoints_done']=int(results[5]) 2099 job['time_spend']=float(results[6]) 2100 job['err_percABS'] = job['errorABS']/job['resultABS']*100. 2101 job['err_perc'] = job['error']/job['result']*100. 2102 if error_found: 2103 raise aMCatNLOError('An error occurred during the collection of results.\n' + 2104 'Please check the .log files inside the directories which failed:\n' + 2105 '\n'.join(error_log)+'\n')
2106 2107 2108
2109 - def write_res_txt_file(self,jobs,integration_step):
2110 """writes the res.txt files in the SubProcess dir""" 2111 jobs.sort(key = lambda job: -job['errorABS']) 2112 content=[] 2113 content.append('\n\nCross section per integration channel:') 2114 for job in jobs: 2115 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job) 2116 content.append('\n\nABS cross section per integration channel:') 2117 for job in jobs: 2118 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job) 2119 totABS=0 2120 errABS=0 2121 tot=0 2122 err=0 2123 for job in jobs: 2124 totABS+= job['resultABS']*job['wgt_frac'] 2125 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac'] 2126 tot+= job['result']*job['wgt_frac'] 2127 err+= math.pow(job['error'],2)*job['wgt_frac'] 2128 if jobs: 2129 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\ 2130 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\ 2131 tot, math.sqrt(err), math.sqrt(err)/tot *100.)) 2132 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file: 2133 res_file.write('\n'.join(content)) 2134 randinit=self.get_randinit_seed() 2135 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\ 2136 'erra':math.sqrt(errABS),'randinit':randinit}
2137 2138
2139 - def collect_scale_pdf_info(self,options,jobs):
2140 """read the scale_pdf_dependence.dat files and collects there results""" 2141 scale_pdf_info=[] 2142 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 2143 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 2144 evt_files=[] 2145 evt_wghts=[] 2146 for job in jobs: 2147 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat')) 2148 evt_wghts.append(job['wgt_frac']) 2149 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts) 2150 return scale_pdf_info
2151 2152
2153 - def combine_plots_FO(self,folder_name,jobs):
2154 """combines the plots and puts then in the Events/run* directory""" 2155 devnull = open(os.devnull, 'w') 2156 2157 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer': 2158 misc.call(['./combine_plots_FO.sh'] + folder_name, \ 2159 stdout=devnull, 2160 cwd=pjoin(self.me_dir, 'SubProcesses')) 2161 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'), 2162 pjoin(self.me_dir, 'Events', self.run_name)) 2163 logger.info('The results of this run and the TopDrawer file with the plots' + \ 2164 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2165 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu': 2166 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO') 2167 self.combine_plots_HwU(jobs,out) 2168 try: 2169 misc.call(['gnuplot','MADatNLO.gnuplot'],\ 2170 stdout=devnull,stderr=devnull,\ 2171 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 2172 except Exception: 2173 pass 2174 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \ 2175 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2176 elif self.analyse_card['fo_analysis_format'].lower() == 'root': 2177 misc.call(['./combine_root.sh'] + folder_name, \ 2178 stdout=devnull, 2179 cwd=pjoin(self.me_dir, 'SubProcesses')) 2180 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'), 2181 pjoin(self.me_dir, 'Events', self.run_name)) 2182 logger.info('The results of this run and the ROOT file with the plots' + \ 2183 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2184 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe': 2185 self.combine_FO_lhe(jobs) 2186 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \ 2187 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name)) 2188 else: 2189 logger.info('The results of this run' + \ 2190 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2191
2192 - def combine_FO_lhe(self,jobs):
2193 """combine the various lhe file generated in each directory. 2194 They are two steps: 2195 1) banner 2196 2) reweight each sample by the factor written at the end of each file 2197 3) concatenate each of the new files (gzip those). 2198 """ 2199 2200 logger.info('Combining lhe events for plotting analysis') 2201 start = time.time() 2202 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']] 2203 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 2204 if os.path.exists(output): 2205 os.remove(output) 2206 2207 2208 2209 2210 # 1. write the banner 2211 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read() 2212 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>') 2213 self.banner['initrwgt'] = text[10+i1:i2] 2214 # 2215 # <init> 2216 # 2212 2212 6.500000e+03 6.500000e+03 0 0 247000 247000 -4 1 2217 # 8.430000e+02 2.132160e+00 8.430000e+02 1 2218 # <generator name='MadGraph5_aMC@NLO' version='2.5.2'>please cite 1405.0301 </generator> 2219 # </init> 2220 2221 cross = sum(j['result'] for j in jobs) 2222 error = math.sqrt(sum(j['error'] for j in jobs)) 2223 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross) 2224 self.banner.write(output[:-3], close_tag=False) 2225 misc.gzip(output[:-3]) 2226 2227 2228 2229 fsock = lhe_parser.EventFile(output,'a') 2230 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']: 2231 fsock.eventgroup = False 2232 else: 2233 fsock.eventgroup = True 2234 2235 if 'norandom' in self.run_card['fo_lhe_postprocessing']: 2236 for job in jobs: 2237 dirname = job['dirname'] 2238 #read last line 2239 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2240 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2241 # get normalisation ratio 2242 ratio = cross/sumwgt 2243 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe')) 2244 lhe.eventgroup = True # read the events by eventgroup 2245 for eventsgroup in lhe: 2246 neweventsgroup = [] 2247 for i,event in enumerate(eventsgroup): 2248 event.rescale_weights(ratio) 2249 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2250 and event == neweventsgroup[-1]: 2251 neweventsgroup[-1].wgt += event.wgt 2252 for key in event.reweight_data: 2253 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2254 else: 2255 neweventsgroup.append(event) 2256 fsock.write_events(neweventsgroup) 2257 lhe.close() 2258 os.remove(pjoin(dirname,'events.lhe')) 2259 else: 2260 lhe = [] 2261 lenlhe = [] 2262 misc.sprint('need to combine %s event file' % len(jobs)) 2263 globallhe = lhe_parser.MultiEventFile() 2264 globallhe.eventgroup = True 2265 for job in jobs: 2266 dirname = job['dirname'] 2267 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline() 2268 nb_event, sumwgt, cross = [float(i) for i in lastline.split()] 2269 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross, 2270 nb_event=int(nb_event), scale=cross/sumwgt) 2271 for eventsgroup in globallhe: 2272 neweventsgroup = [] 2273 for i,event in enumerate(eventsgroup): 2274 event.rescale_weights(event.sample_scale) 2275 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \ 2276 and event == neweventsgroup[-1]: 2277 neweventsgroup[-1].wgt += event.wgt 2278 for key in event.reweight_data: 2279 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key] 2280 else: 2281 neweventsgroup.append(event) 2282 fsock.write_events(neweventsgroup) 2283 globallhe.close() 2284 fsock.write('</LesHouchesEvents>\n') 2285 fsock.close() 2286 misc.sprint('combining lhe file done in ', time.time()-start) 2287 for job in jobs: 2288 dirname = job['dirname'] 2289 os.remove(pjoin(dirname,'events.lhe')) 2290 2291 2292 2293 misc.sprint('combining lhe file done in ', time.time()-start)
2294 2295 2296 2297 2298 2299
2300 - def combine_plots_HwU(self,jobs,out,normalisation=None):
2301 """Sums all the plots in the HwU format.""" 2302 logger.debug('Combining HwU plots.') 2303 2304 command = [] 2305 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py')) 2306 for job in jobs: 2307 if job['dirname'].endswith('.HwU'): 2308 command.append(job['dirname']) 2309 else: 2310 command.append(pjoin(job['dirname'],'MADatNLO.HwU')) 2311 command.append("--out="+out) 2312 command.append("--gnuplot") 2313 command.append("--band=[]") 2314 command.append("--lhapdf-config="+self.options['lhapdf']) 2315 if normalisation: 2316 command.append("--multiply="+(','.join([str(n) for n in normalisation]))) 2317 command.append("--sum") 2318 command.append("--keep_all_weights") 2319 command.append("--no_open") 2320 2321 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir) 2322 2323 while p.poll() is None: 2324 line = p.stdout.readline() 2325 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']): 2326 print line[:-1] 2327 elif __debug__ and line: 2328 logger.debug(line[:-1])
2329 2330
2331 - def applgrid_combine(self,cross,error,jobs):
2332 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories""" 2333 logger.debug('Combining APPLgrids \n') 2334 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'), 2335 'applgrid-combine') 2336 all_jobs=[] 2337 for job in jobs: 2338 all_jobs.append(job['dirname']) 2339 ngrids=len(all_jobs) 2340 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")]) 2341 for obs in range(0,nobs): 2342 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs] 2343 # combine APPLgrids from different channels for observable 'obs' 2344 if self.run_card["iappl"] == 1: 2345 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name, 2346 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir) 2347 elif self.run_card["iappl"] == 2: 2348 unc2_inv=pow(cross/error,2) 2349 unc2_inv_ngrids=pow(cross/error,2)*ngrids 2350 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events", 2351 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s', 2352 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir) 2353 for job in all_jobs: 2354 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root")) 2355 else: 2356 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2') 2357 # after combining, delete the original grids 2358 for ggdir in gdir: 2359 os.remove(ggdir)
2360 2361
2362 - def applgrid_distribute(self,options,mode,p_dirs):
2363 """Distributes the APPLgrids ready to be filled by a second run of the code""" 2364 # if no appl_start_grid argument given, guess it from the time stamps 2365 # of the starting grid files 2366 if not('appl_start_grid' in options.keys() and options['appl_start_grid']): 2367 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'), 2368 pjoin(self.me_dir,'Events')) 2369 2370 time_stamps={} 2371 for root_file in gfiles: 2372 time_stamps[root_file]=os.path.getmtime(root_file) 2373 options['appl_start_grid']= \ 2374 max(time_stamps.iterkeys(), key=(lambda key: 2375 time_stamps[key])).split('/')[-2] 2376 logger.info('No --appl_start_grid option given. '+\ 2377 'Guessing that start grid from run "%s" should be used.' \ 2378 % options['appl_start_grid']) 2379 2380 if 'appl_start_grid' in options.keys() and options['appl_start_grid']: 2381 self.appl_start_grid = options['appl_start_grid'] 2382 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid) 2383 # check that this dir exists and at least one grid file is there 2384 if not os.path.exists(pjoin(start_grid_dir, 2385 'aMCfast_obs_0_starting_grid.root')): 2386 raise self.InvalidCmd('APPLgrid file not found: %s' % \ 2387 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')) 2388 else: 2389 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \ 2390 start_grid_dir) if name.endswith("_starting_grid.root")] 2391 nobs =len(all_grids) 2392 gstring=" ".join(all_grids) 2393 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid: 2394 raise self.InvalidCmd('No APPLgrid name currently defined.'+ 2395 'Please provide this information.') 2396 #copy the grid to all relevant directories 2397 for pdir in p_dirs: 2398 g_dirs = [file for file in os.listdir(pjoin(self.me_dir, 2399 "SubProcesses",pdir)) if file.startswith(mode+'_G') and 2400 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))] 2401 for g_dir in g_dirs: 2402 for grid in all_grids: 2403 obs=grid.split('_')[-3] 2404 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir, 2405 'grid_obs_'+obs+'_in.root'))
2406 2407 2408 2409
2410 - def collect_log_files(self, jobs, integration_step):
2411 """collect the log files and put them in a single, html-friendly file 2412 inside the Events/run_.../ directory""" 2413 log_file = pjoin(self.me_dir, 'Events', self.run_name, 2414 'alllogs_%d.html' % integration_step) 2415 outfile = open(log_file, 'w') 2416 2417 content = '' 2418 content += '<HTML><BODY>\n<font face="courier" size=2>' 2419 for job in jobs: 2420 # put an anchor 2421 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step) 2422 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace( 2423 pjoin(self.me_dir,'SubProcesses'),'')) 2424 # and put some nice header 2425 content += '<font color="red">\n' 2426 content += '<br>LOG file for integration channel %s, %s <br>' % \ 2427 (os.path.dirname(log).replace(pjoin(self.me_dir, 2428 'SubProcesses'), ''), 2429 integration_step) 2430 content += '</font>\n' 2431 #then just flush the content of the small log inside the big log 2432 #the PRE tag prints everything verbatim 2433 with open(log) as l: 2434 content += '<PRE>\n' + l.read() + '\n</PRE>' 2435 content +='<br>\n' 2436 outfile.write(content) 2437 content='' 2438 2439 outfile.write('</font>\n</BODY></HTML>\n') 2440 outfile.close()
2441 2442
2443 - def finalise_run_FO(self,folder_name,jobs):
2444 """Combine the plots and put the res*.txt files in the Events/run.../ folder.""" 2445 # Copy the res_*.txt files to the Events/run* folder 2446 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses')) 2447 for res_file in res_files: 2448 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 2449 # Collect the plots and put them in the Events/run* folder 2450 self.combine_plots_FO(folder_name,jobs) 2451 # If doing the applgrid-stuff, also combine those grids 2452 # and put those in the Events/run* folder 2453 if self.run_card['iappl'] != 0: 2454 cross=self.cross_sect_dict['xsect'] 2455 error=self.cross_sect_dict['errt'] 2456 self.applgrid_combine(cross,error,jobs)
2457 2458
2459 - def setup_cluster_or_multicore(self):
2460 """setup the number of cores for multicore, and the cluster-type for cluster runs""" 2461 if self.cluster_mode == 1: 2462 cluster_name = self.options['cluster_type'] 2463 try: 2464 self.cluster = cluster.from_name[cluster_name](**self.options) 2465 except KeyError: 2466 if aMCatNLO and ('mg5_path' not in self.options or not self.options['mg5_path']): 2467 if not self.plugin_path: 2468 raise self.InvalidCmd('%s not native cluster type and no plugin directory available.' % cluster_name) 2469 elif aMCatNLO: 2470 mg5dir = self.options['mg5_path'] 2471 if mg5dir not in sys.path: 2472 sys.path.append(mg5dir) 2473 if pjoin(mg5dir, 'PLUGIN') not in self.plugin_path: 2474 self.plugin_path.append(pjoin(mg5dir)) 2475 else: 2476 mg5dir = MG5DIR 2477 # Check if a plugin define this type of cluster 2478 # check for PLUGIN format 2479 for plugpath in self.plugin_path: 2480 plugindirname = os.path.basename(plugpath) 2481 for plug in os.listdir(plugpath): 2482 if os.path.exists(pjoin(plugpath, plug, '__init__.py')): 2483 try: 2484 __import__('%s.%s' % (plugindirname, plug)) 2485 except Exception, error: 2486 logger.critical('plugin directory %s/%s fail to be loaded. Please check it',plugindirname, plug) 2487 continue 2488 plugin = sys.modules['%s.%s' % (plugindirname,plug)] 2489 if not hasattr(plugin, 'new_cluster'): 2490 continue 2491 if not misc.is_plugin_supported(plugin): 2492 continue 2493 if cluster_name in plugin.new_cluster: 2494 logger.info("cluster handling will be done with PLUGIN: %s" % plug,'$MG:color:BLACK') 2495 self.cluster = plugin.new_cluster[cluster_name](**self.options) 2496 break 2497 2498 if self.cluster_mode == 2: 2499 try: 2500 import multiprocessing 2501 if not self.nb_core: 2502 try: 2503 self.nb_core = int(self.options['nb_core']) 2504 except TypeError: 2505 self.nb_core = multiprocessing.cpu_count() 2506 logger.info('Using %d cores' % self.nb_core) 2507 except ImportError: 2508 self.nb_core = 1 2509 logger.warning('Impossible to detect the number of cores => Using One.\n'+ 2510 'Use set nb_core X in order to set this number and be able to'+ 2511 'run in multicore.') 2512 2513 self.cluster = cluster.MultiCore(**self.options)
2514 2515
2516 - def clean_previous_results(self,options,p_dirs,folder_name):
2517 """Clean previous results. 2518 o. If doing only the reweighting step, do not delete anything and return directlty. 2519 o. Always remove all the G*_* files (from split event generation). 2520 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only.""" 2521 if options['reweightonly']: 2522 return 2523 if not options['only_generation']: 2524 self.update_status('Cleaning previous results', level=None) 2525 for dir in p_dirs: 2526 #find old folders to be removed 2527 for obj in folder_name: 2528 # list all the G* (or all_G* or born_G*) directories 2529 to_rm = [file for file in \ 2530 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2531 if file.startswith(obj[:-1]) and \ 2532 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2533 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2534 # list all the G*_* directories (from split event generation) 2535 to_always_rm = [file for file in \ 2536 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \ 2537 if file.startswith(obj[:-1]) and 2538 '_' in file and not '_G' in file and \ 2539 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \ 2540 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))] 2541 2542 if not options['only_generation']: 2543 to_always_rm.extend(to_rm) 2544 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')): 2545 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')) 2546 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm]) 2547 return
2548 2549
2550 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2551 """print a summary of the results contained in self.cross_sect_dict. 2552 step corresponds to the mintMC step, if =2 (i.e. after event generation) 2553 some additional infos are printed""" 2554 # find process name 2555 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n') 2556 process = '' 2557 for line in proc_card_lines: 2558 if line.startswith('generate') or line.startswith('add process'): 2559 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; ' 2560 lpp = {0:'l', 1:'p', -1:'pbar'} 2561 if self.ninitial == 1: 2562 proc_info = '\n Process %s' % process[:-3] 2563 else: 2564 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \ 2565 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']], 2566 self.run_card['ebeam1'], self.run_card['ebeam2']) 2567 2568 if self.ninitial == 1: 2569 self.cross_sect_dict['unit']='GeV' 2570 self.cross_sect_dict['xsec_string']='(Partial) decay width' 2571 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)' 2572 else: 2573 self.cross_sect_dict['unit']='pb' 2574 self.cross_sect_dict['xsec_string']='Total cross section' 2575 self.cross_sect_dict['axsec_string']='Total abs(cross section)' 2576 2577 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2578 status = ['Determining the number of unweighted events per channel', 2579 'Updating the number of unweighted events per channel', 2580 'Summary:'] 2581 computed='(computed from LHE events)' 2582 elif mode in ['NLO', 'LO']: 2583 status = ['Results after grid setup:','Current results:', 2584 'Final results and run summary:'] 2585 computed='(computed from histogram information)' 2586 2587 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2588 message = status[step] + '\n\n Intermediate results:' + \ 2589 ('\n Random seed: %(randinit)d' + \ 2590 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \ 2591 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \ 2592 % self.cross_sect_dict 2593 elif mode in ['NLO','LO'] and not done: 2594 if step == 0: 2595 message = '\n ' + status[0] + \ 2596 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2597 self.cross_sect_dict 2598 else: 2599 message = '\n ' + status[1] + \ 2600 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2601 self.cross_sect_dict 2602 2603 else: 2604 message = '\n --------------------------------------------------------------' 2605 message = message + \ 2606 '\n ' + status[2] + proc_info 2607 if mode not in ['LO', 'NLO']: 2608 message = message + \ 2609 '\n Number of events generated: %s' % self.run_card['nevents'] 2610 message = message + \ 2611 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \ 2612 self.cross_sect_dict 2613 message = message + \ 2614 '\n --------------------------------------------------------------' 2615 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']): 2616 if scale_pdf_info[0]: 2617 # scale uncertainties 2618 message = message + '\n Scale variation %s:' % computed 2619 for s in scale_pdf_info[0]: 2620 if s['unc']: 2621 if self.run_card['ickkw'] != -1: 2622 message = message + \ 2623 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\ 2624 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s 2625 else: 2626 message = message + \ 2627 ('\n Soft and hard scale dependence (added in quadrature): '\ 2628 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s 2629 2630 else: 2631 message = message + \ 2632 ('\n Dynamical_scale_choice %(label)i: '\ 2633 '\n %(cen)8.3e pb') % s 2634 2635 if scale_pdf_info[1]: 2636 message = message + '\n PDF variation %s:' % computed 2637 for p in scale_pdf_info[1]: 2638 if p['unc']=='none': 2639 message = message + \ 2640 ('\n %(name)s (central value only): '\ 2641 '\n %(cen)8.3e pb') % p 2642 2643 elif p['unc']=='unknown': 2644 message = message + \ 2645 ('\n %(name)s (%(size)s members; combination method unknown): '\ 2646 '\n %(cen)8.3e pb') % p 2647 else: 2648 message = message + \ 2649 ('\n %(name)s (%(size)s members; using %(unc)s method): '\ 2650 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p 2651 # pdf uncertainties 2652 message = message + \ 2653 '\n --------------------------------------------------------------' 2654 2655 2656 if (mode in ['NLO', 'LO'] and not done) or \ 2657 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2): 2658 logger.info(message+'\n') 2659 return 2660 2661 # Some advanced general statistics are shown in the debug message at the 2662 # end of the run 2663 # Make sure it never stops a run 2664 # Gather some basic statistics for the run and extracted from the log files. 2665 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']: 2666 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'), 2667 pjoin(self.me_dir, 'SubProcesses')) 2668 all_log_files = log_GV_files 2669 elif mode == 'NLO': 2670 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'), 2671 pjoin(self.me_dir, 'SubProcesses')) 2672 all_log_files = log_GV_files 2673 2674 elif mode == 'LO': 2675 log_GV_files = '' 2676 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'), 2677 pjoin(self.me_dir, 'SubProcesses')) 2678 else: 2679 raise aMCatNLOError, 'Running mode %s not supported.'%mode 2680 2681 try: 2682 message, debug_msg = \ 2683 self.compile_advanced_stats(log_GV_files, all_log_files, message) 2684 except Exception as e: 2685 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e) 2686 err_string = StringIO.StringIO() 2687 traceback.print_exc(limit=4, file=err_string) 2688 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\ 2689 %err_string.getvalue() 2690 2691 logger.debug(debug_msg+'\n') 2692 logger.info(message+'\n') 2693 2694 # Now copy relevant information in the Events/Run_<xxx> directory 2695 evt_path = pjoin(self.me_dir, 'Events', self.run_name) 2696 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n') 2697 open(pjoin(evt_path, '.full_summary.txt'), 2698 'w').write(message+'\n\n'+debug_msg+'\n') 2699 2700 self.archive_files(evt_path,mode)
2701
2702 - def archive_files(self, evt_path, mode):
2703 """ Copies in the Events/Run_<xxx> directory relevant files characterizing 2704 the run.""" 2705 2706 files_to_arxiv = [pjoin('Cards','param_card.dat'), 2707 pjoin('Cards','MadLoopParams.dat'), 2708 pjoin('Cards','FKS_params.dat'), 2709 pjoin('Cards','run_card.dat'), 2710 pjoin('Subprocesses','setscales.f'), 2711 pjoin('Subprocesses','cuts.f')] 2712 2713 if mode in ['NLO', 'LO']: 2714 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat')) 2715 2716 if not os.path.exists(pjoin(evt_path,'RunMaterial')): 2717 os.mkdir(pjoin(evt_path,'RunMaterial')) 2718 2719 for path in files_to_arxiv: 2720 if os.path.isfile(pjoin(self.me_dir,path)): 2721 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial')) 2722 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path) 2723 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2724
2725 - def compile_advanced_stats(self,log_GV_files,all_log_files,message):
2726 """ This functions goes through the log files given in arguments and 2727 compiles statistics about MadLoop stability, virtual integration 2728 optimization and detection of potential error messages into a nice 2729 debug message to printed at the end of the run """ 2730 2731 def safe_float(str_float): 2732 try: 2733 return float(str_float) 2734 except ValueError: 2735 logger.debug('Could not convert the following float during'+ 2736 ' advanced statistics printout: %s'%str(str_float)) 2737 return -1.0
2738 2739 2740 # > UPS is a dictionary of tuples with this format {channel:[nPS,nUPS]} 2741 # > Errors is a list of tuples with this format (log_file,nErrors) 2742 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}} 2743 mint_search = re.compile(r"MINT(?P<ID>\d*).txt") 2744 2745 # ================================== 2746 # == MadLoop stability statistics == 2747 # ================================== 2748 2749 # Recuperate the fraction of unstable PS points found in the runs for 2750 # the virtuals 2751 UPS_stat_finder = re.compile( 2752 r"Satistics from MadLoop:.*"+\ 2753 r"Total points tried\:\s+(?P<ntot>\d+).*"+\ 2754 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\ 2755 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\ 2756 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\ 2757 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\ 2758 r"Double precision used\:\s+(?P<nddp>\d+).*"+\ 2759 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\ 2760 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\ 2761 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\ 2762 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL) 2763 2764 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)', 2765 1 : 'CutTools (double precision)', 2766 2 : 'PJFry++', 2767 3 : 'IREGI', 2768 4 : 'Golem95', 2769 5 : 'Samurai', 2770 6 : 'Ninja (double precision)', 2771 7 : 'COLLIER', 2772 8 : 'Ninja (quadruple precision)', 2773 9 : 'CutTools (quadruple precision)'} 2774 RetUnit_finder =re.compile( 2775 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)") 2776 #Unit 2777 2778 for gv_log in log_GV_files: 2779 channel_name = '/'.join(gv_log.split('/')[-5:-1]) 2780 log=open(gv_log,'r').read() 2781 UPS_stats = re.search(UPS_stat_finder,log) 2782 for retunit_stats in re.finditer(RetUnit_finder, log): 2783 if channel_name not in stats['UPS'].keys(): 2784 stats['UPS'][channel_name] = [0]*10+[[0]*10] 2785 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \ 2786 += int(retunit_stats.group('n_occurences')) 2787 if not UPS_stats is None: 2788 try: 2789 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot')) 2790 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun')) 2791 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps')) 2792 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups')) 2793 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps')) 2794 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp')) 2795 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp')) 2796 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini')) 2797 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100')) 2798 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10')) 2799 except KeyError: 2800 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')), 2801 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')), 2802 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')), 2803 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')), 2804 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')), 2805 int(UPS_stats.group('n10')),[0]*10] 2806 debug_msg = "" 2807 if len(stats['UPS'].keys())>0: 2808 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0) 2809 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0) 2810 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0) 2811 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0) 2812 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0) 2813 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0) 2814 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0) 2815 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0) 2816 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0) 2817 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0) 2818 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \ 2819 for i in range(10)] 2820 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \ 2821 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()] 2822 maxUPS = max(UPSfracs, key = lambda w: w[1]) 2823 2824 tmpStr = "" 2825 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS 2826 tmpStr += '\n Stability unknown: %d'%nTotsun 2827 tmpStr += '\n Stable PS point: %d'%nTotsps 2828 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups 2829 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps 2830 tmpStr += '\n Only double precision used: %d'%nTotddp 2831 tmpStr += '\n Quadruple precision used: %d'%nTotqdp 2832 tmpStr += '\n Initialization phase-space points: %d'%nTotini 2833 tmpStr += '\n Reduction methods used:' 2834 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \ 2835 unit_code_meaning.keys() if nTot1[i]>0] 2836 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True): 2837 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n) 2838 if nTot100 != 0: 2839 debug_msg += '\n Unknown return code (100): %d'%nTot100 2840 if nTot10 != 0: 2841 debug_msg += '\n Unknown return code (10): %d'%nTot10 2842 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \ 2843 not in unit_code_meaning.keys()) 2844 if nUnknownUnit != 0: 2845 debug_msg += '\n Unknown return code (1): %d'\ 2846 %nUnknownUnit 2847 2848 if maxUPS[1]>0.001: 2849 message += tmpStr 2850 message += '\n Total number of unstable PS point detected:'+\ 2851 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS) 2852 message += '\n Maximum fraction of UPS points in '+\ 2853 'channel %s (%4.2f%%)'%maxUPS 2854 message += '\n Please report this to the authors while '+\ 2855 'providing the file' 2856 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir), 2857 maxUPS[0],'UPS.log')) 2858 else: 2859 debug_msg += tmpStr 2860 2861 2862 # ==================================================== 2863 # == aMC@NLO virtual integration optimization stats == 2864 # ==================================================== 2865 2866 virt_tricks_finder = re.compile( 2867 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\ 2868 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\ 2869 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\ 2870 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)") 2871 2872 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\ 2873 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)") 2874 2875 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)") 2876 2877 channel_contr_list = {} 2878 for gv_log in log_GV_files: 2879 logfile=open(gv_log,'r') 2880 log = logfile.read() 2881 logfile.close() 2882 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2883 vf_stats = None 2884 for vf_stats in re.finditer(virt_frac_finder, log): 2885 pass 2886 if not vf_stats is None: 2887 v_frac = safe_float(vf_stats.group('v_frac')) 2888 v_average = safe_float(vf_stats.group('v_average')) 2889 try: 2890 if v_frac < stats['virt_stats']['v_frac_min'][0]: 2891 stats['virt_stats']['v_frac_min']=(v_frac,channel_name) 2892 if v_frac > stats['virt_stats']['v_frac_max'][0]: 2893 stats['virt_stats']['v_frac_max']=(v_frac,channel_name) 2894 stats['virt_stats']['v_frac_avg'][0] += v_frac 2895 stats['virt_stats']['v_frac_avg'][1] += 1 2896 except KeyError: 2897 stats['virt_stats']['v_frac_min']=[v_frac,channel_name] 2898 stats['virt_stats']['v_frac_max']=[v_frac,channel_name] 2899 stats['virt_stats']['v_frac_avg']=[v_frac,1] 2900 2901 2902 ccontr_stats = None 2903 for ccontr_stats in re.finditer(channel_contr_finder, log): 2904 pass 2905 if not ccontr_stats is None: 2906 contrib = safe_float(ccontr_stats.group('v_contr')) 2907 try: 2908 if contrib>channel_contr_list[channel_name]: 2909 channel_contr_list[channel_name]=contrib 2910 except KeyError: 2911 channel_contr_list[channel_name]=contrib 2912 2913 2914 # Now build the list of relevant virt log files to look for the maxima 2915 # of virt fractions and such. 2916 average_contrib = 0.0 2917 for value in channel_contr_list.values(): 2918 average_contrib += value 2919 if len(channel_contr_list.values()) !=0: 2920 average_contrib = average_contrib / len(channel_contr_list.values()) 2921 2922 relevant_log_GV_files = [] 2923 excluded_channels = set([]) 2924 all_channels = set([]) 2925 for log_file in log_GV_files: 2926 channel_name = '/'.join(log_file.split('/')[-3:-1]) 2927 all_channels.add(channel_name) 2928 try: 2929 if channel_contr_list[channel_name] > (0.1*average_contrib): 2930 relevant_log_GV_files.append(log_file) 2931 else: 2932 excluded_channels.add(channel_name) 2933 except KeyError: 2934 relevant_log_GV_files.append(log_file) 2935 2936 # Now we want to use the latest occurence of accumulated result in the log file 2937 for gv_log in relevant_log_GV_files: 2938 logfile=open(gv_log,'r') 2939 log = logfile.read() 2940 logfile.close() 2941 channel_name = '/'.join(gv_log.split('/')[-3:-1]) 2942 2943 vt_stats = None 2944 for vt_stats in re.finditer(virt_tricks_finder, log): 2945 pass 2946 if not vt_stats is None: 2947 vt_stats_group = vt_stats.groupdict() 2948 v_ratio = safe_float(vt_stats.group('v_ratio')) 2949 v_ratio_err = safe_float(vt_stats.group('v_ratio_err')) 2950 v_contr = safe_float(vt_stats.group('v_abs_contr')) 2951 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err')) 2952 try: 2953 if v_ratio < stats['virt_stats']['v_ratio_min'][0]: 2954 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name) 2955 if v_ratio > stats['virt_stats']['v_ratio_max'][0]: 2956 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name) 2957 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]: 2958 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name) 2959 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]: 2960 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name) 2961 if v_contr < stats['virt_stats']['v_contr_min'][0]: 2962 stats['virt_stats']['v_contr_min']=(v_contr,channel_name) 2963 if v_contr > stats['virt_stats']['v_contr_max'][0]: 2964 stats['virt_stats']['v_contr_max']=(v_contr,channel_name) 2965 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]: 2966 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name) 2967 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]: 2968 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name) 2969 except KeyError: 2970 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name] 2971 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name] 2972 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name] 2973 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name] 2974 stats['virt_stats']['v_contr_min']=[v_contr,channel_name] 2975 stats['virt_stats']['v_contr_max']=[v_contr,channel_name] 2976 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name] 2977 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name] 2978 2979 vf_stats = None 2980 for vf_stats in re.finditer(virt_frac_finder, log): 2981 pass 2982 if not vf_stats is None: 2983 v_frac = safe_float(vf_stats.group('v_frac')) 2984 v_average = safe_float(vf_stats.group('v_average')) 2985 try: 2986 if v_average < stats['virt_stats']['v_average_min'][0]: 2987 stats['virt_stats']['v_average_min']=(v_average,channel_name) 2988 if v_average > stats['virt_stats']['v_average_max'][0]: 2989 stats['virt_stats']['v_average_max']=(v_average,channel_name) 2990 stats['virt_stats']['v_average_avg'][0] += v_average 2991 stats['virt_stats']['v_average_avg'][1] += 1 2992 except KeyError: 2993 stats['virt_stats']['v_average_min']=[v_average,channel_name] 2994 stats['virt_stats']['v_average_max']=[v_average,channel_name] 2995 stats['virt_stats']['v_average_avg']=[v_average,1] 2996 2997 try: 2998 debug_msg += '\n\n Statistics on virtual integration optimization : ' 2999 3000 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\ 3001 %tuple(stats['virt_stats']['v_frac_max']) 3002 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\ 3003 %tuple(stats['virt_stats']['v_frac_min']) 3004 debug_msg += '\n Average virt fraction computed %.3f'\ 3005 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1])) 3006 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\ 3007 (len(excluded_channels),len(all_channels)) 3008 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\ 3009 %tuple(stats['virt_stats']['v_average_max']) 3010 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\ 3011 %tuple(stats['virt_stats']['v_ratio_max']) 3012 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\ 3013 %tuple(stats['virt_stats']['v_ratio_err_max']) 3014 debug_msg += tmpStr 3015 # After all it was decided that it is better not to alarm the user unecessarily 3016 # with such printout of the statistics. 3017 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0 or \ 3018 # stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3019 # message += "\n Suspiciously large MC error in :" 3020 # if stats['virt_stats']['v_ratio_err_max'][0]>100.0: 3021 # message += tmpStr 3022 3023 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\ 3024 %tuple(stats['virt_stats']['v_contr_err_max']) 3025 debug_msg += tmpStr 3026 # if stats['virt_stats']['v_contr_err_max'][0]>100.0: 3027 # message += tmpStr 3028 3029 3030 except KeyError: 3031 debug_msg += '\n Could not find statistics on the integration optimization. ' 3032 3033 # ======================================= 3034 # == aMC@NLO timing profile statistics == 3035 # ======================================= 3036 3037 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\ 3038 "(?P<time>[\d\+-Eed\.]*)\s*") 3039 3040 for logf in log_GV_files: 3041 logfile=open(logf,'r') 3042 log = logfile.read() 3043 logfile.close() 3044 channel_name = '/'.join(logf.split('/')[-3:-1]) 3045 mint = re.search(mint_search,logf) 3046 if not mint is None: 3047 channel_name = channel_name+' [step %s]'%mint.group('ID') 3048 3049 for time_stats in re.finditer(timing_stat_finder, log): 3050 try: 3051 stats['timings'][time_stats.group('name')][channel_name]+=\ 3052 safe_float(time_stats.group('time')) 3053 except KeyError: 3054 if time_stats.group('name') not in stats['timings'].keys(): 3055 stats['timings'][time_stats.group('name')] = {} 3056 stats['timings'][time_stats.group('name')][channel_name]=\ 3057 safe_float(time_stats.group('time')) 3058 3059 # useful inline function 3060 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs))) 3061 try: 3062 totTimeList = [(time, chan) for chan, time in \ 3063 stats['timings']['Total'].items()] 3064 except KeyError: 3065 totTimeList = [] 3066 3067 totTimeList.sort() 3068 if len(totTimeList)>0: 3069 debug_msg += '\n\n Inclusive timing profile :' 3070 debug_msg += '\n Overall slowest channel %s (%s)'%\ 3071 (Tstr(totTimeList[-1][0]),totTimeList[-1][1]) 3072 debug_msg += '\n Average channel running time %s'%\ 3073 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList)) 3074 debug_msg += '\n Aggregated total running time %s'%\ 3075 Tstr(sum([el[0] for el in totTimeList])) 3076 else: 3077 debug_msg += '\n\n Inclusive timing profile non available.' 3078 3079 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \ 3080 sum(stats['timings'][stat].values()), reverse=True) 3081 for name in sorted_keys: 3082 if name=='Total': 3083 continue 3084 if sum(stats['timings'][name].values())<=0.0: 3085 debug_msg += '\n Zero time record for %s.'%name 3086 continue 3087 try: 3088 TimeList = [((100.0*time/stats['timings']['Total'][chan]), 3089 chan) for chan, time in stats['timings'][name].items()] 3090 except KeyError, ZeroDivisionError: 3091 debug_msg += '\n\n Timing profile for %s unavailable.'%name 3092 continue 3093 TimeList.sort() 3094 debug_msg += '\n Timing profile for <%s> :'%name 3095 try: 3096 debug_msg += '\n Overall fraction of time %.3f %%'%\ 3097 safe_float((100.0*(sum(stats['timings'][name].values())/ 3098 sum(stats['timings']['Total'].values())))) 3099 except KeyError, ZeroDivisionError: 3100 debug_msg += '\n Overall fraction of time unavailable.' 3101 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\ 3102 (TimeList[-1][0],TimeList[-1][1]) 3103 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\ 3104 (TimeList[0][0],TimeList[0][1]) 3105 3106 # ============================= 3107 # == log file eror detection == 3108 # ============================= 3109 3110 # Find the number of potential errors found in all log files 3111 # This re is a simple match on a case-insensitve 'error' but there is 3112 # also some veto added for excluding the sentence 3113 # "See Section 6 of paper for error calculation." 3114 # which appear in the header of lhapdf in the logs. 3115 err_finder = re.compile(\ 3116 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE) 3117 for log in all_log_files: 3118 logfile=open(log,'r') 3119 nErrors = len(re.findall(err_finder, logfile.read())) 3120 logfile.close() 3121 if nErrors != 0: 3122 stats['Errors'].append((str(log),nErrors)) 3123 3124 nErrors = sum([err[1] for err in stats['Errors']],0) 3125 if nErrors != 0: 3126 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\ 3127 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\ 3128 'found in the following log file%s:'%('s' if \ 3129 len(stats['Errors'])>1 else '') 3130 for error in stats['Errors'][:3]: 3131 log_name = '/'.join(error[0].split('/')[-5:]) 3132 debug_msg += '\n > %d error%s in %s'%\ 3133 (error[1],'s' if error[1]>1 else '',log_name) 3134 if len(stats['Errors'])>3: 3135 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0) 3136 nRemainingLogs = len(stats['Errors'])-3 3137 debug_msg += '\n And another %d error%s in %d other log file%s'%\ 3138 (nRemainingErrors, 's' if nRemainingErrors>1 else '', 3139 nRemainingLogs, 's ' if nRemainingLogs>1 else '') 3140 3141 return message, debug_msg 3142 3143
3144 - def reweight_and_collect_events(self, options, mode, nevents, event_norm):
3145 """this function calls the reweighting routines and creates the event file in the 3146 Event dir. Return the name of the event file created 3147 """ 3148 scale_pdf_info=[] 3149 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \ 3150 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1: 3151 scale_pdf_info = self.run_reweight(options['reweightonly']) 3152 self.update_status('Collecting events', level='parton', update_results=True) 3153 misc.compile(['collect_events'], 3154 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile']) 3155 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'), 3156 stdin=subprocess.PIPE, 3157 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w')) 3158 if event_norm.lower() == 'sum': 3159 p.communicate(input = '1\n') 3160 elif event_norm.lower() == 'unity': 3161 p.communicate(input = '3\n') 3162 else: 3163 p.communicate(input = '2\n') 3164 3165 #get filename from collect events 3166 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1] 3167 3168 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)): 3169 raise aMCatNLOError('An error occurred during event generation. ' + \ 3170 'The event file has not been created. Check collect_events.log') 3171 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz') 3172 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file) 3173 if not options['reweightonly']: 3174 self.print_summary(options, 2, mode, scale_pdf_info) 3175 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses')) 3176 for res_file in res_files: 3177 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name)) 3178 3179 logger.info('The %s file has been generated.\n' % (evt_file)) 3180 self.results.add_detail('nb_event', nevents) 3181 self.update_status('Events generated', level='parton', update_results=True) 3182 return evt_file[:-3]
3183 3184
3185 - def run_mcatnlo(self, evt_file, options):
3186 """runs mcatnlo on the generated event file, to produce showered-events 3187 """ 3188 logger.info('Preparing MCatNLO run') 3189 try: 3190 misc.gunzip(evt_file) 3191 except Exception: 3192 pass 3193 3194 self.banner = banner_mod.Banner(evt_file) 3195 shower = self.banner.get_detail('run_card', 'parton_shower').upper() 3196 3197 #check that the number of split event files divides the number of 3198 # events, otherwise set it to 1 3199 if int(self.banner.get_detail('run_card', 'nevents') / \ 3200 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \ 3201 != self.banner.get_detail('run_card', 'nevents'): 3202 logger.warning(\ 3203 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \ 3204 'Setting it to 1.') 3205 self.shower_card['nsplit_jobs'] = 1 3206 3207 # don't split jobs if the user asks to shower only a part of the events 3208 if self.shower_card['nevents'] > 0 and \ 3209 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \ 3210 self.shower_card['nsplit_jobs'] != 1: 3211 logger.warning(\ 3212 'Only a part of the events will be showered.\n' + \ 3213 'Setting nsplit_jobs in the shower_card to 1.') 3214 self.shower_card['nsplit_jobs'] = 1 3215 3216 self.banner_to_mcatnlo(evt_file) 3217 3218 # if fastjet has to be linked (in extralibs) then 3219 # add lib /include dirs for fastjet if fastjet-config is present on the 3220 # system, otherwise add fjcore to the files to combine 3221 if 'fastjet' in self.shower_card['extralibs']: 3222 #first, check that stdc++ is also linked 3223 if not 'stdc++' in self.shower_card['extralibs']: 3224 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS') 3225 self.shower_card['extralibs'] += ' stdc++' 3226 # then check if options[fastjet] corresponds to a valid fj installation 3227 try: 3228 #this is for a complete fj installation 3229 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \ 3230 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3231 output, error = p.communicate() 3232 #remove the line break from output (last character) 3233 output = output[:-1] 3234 # add lib/include paths 3235 if not pjoin(output, 'lib') in self.shower_card['extrapaths']: 3236 logger.warning('Linking FastJet: updating EXTRAPATHS') 3237 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib') 3238 if not pjoin(output, 'include') in self.shower_card['includepaths']: 3239 logger.warning('Linking FastJet: updating INCLUDEPATHS') 3240 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include') 3241 # to be changed in the fortran wrapper 3242 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ' 3243 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ' 3244 except Exception: 3245 logger.warning('Linking FastJet: using fjcore') 3246 # this is for FJcore, so no FJ library has to be linked 3247 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '') 3248 if not 'fjcore.o' in self.shower_card['analyse']: 3249 self.shower_card['analyse'] += ' fjcore.o' 3250 # to be changed in the fortran wrapper 3251 include_line = '#include "fjcore.hh"//INCLUDE_FJ' 3252 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ' 3253 # change the fortran wrapper with the correct namespaces/include 3254 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n') 3255 for line in fjwrapper_lines: 3256 if '//INCLUDE_FJ' in line: 3257 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line 3258 if '//NAMESPACE_FJ' in line: 3259 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line 3260 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock: 3261 fsock.write('\n'.join(fjwrapper_lines) + '\n') 3262 3263 extrapaths = self.shower_card['extrapaths'].split() 3264 3265 # check that the path needed by HW++ and PY8 are set if one uses these shower 3266 if shower in ['HERWIGPP', 'PYTHIA8']: 3267 path_dict = {'HERWIGPP': ['hepmc_path', 3268 'thepeg_path', 3269 'hwpp_path'], 3270 'PYTHIA8': ['pythia8_path']} 3271 3272 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]): 3273 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \ 3274 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower]))) 3275 3276 if shower == 'HERWIGPP': 3277 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib')) 3278 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib') 3279 3280 # add the HEPMC path of the pythia8 installation 3281 if shower == 'PYTHIA8': 3282 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'], 3283 stdout = subprocess.PIPE).stdout.read().strip() 3284 #this gives all the flags, i.e. 3285 #-I/Path/to/HepMC/include -L/Path/to/HepMC/lib -lHepMC 3286 # we just need the path to the HepMC libraries 3287 extrapaths.append(hepmc.split()[1].replace('-L', '')) 3288 3289 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 3290 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib')) 3291 3292 # set the PATH for the dynamic libraries 3293 if sys.platform == 'darwin': 3294 ld_library_path = 'DYLD_LIBRARY_PATH' 3295 else: 3296 ld_library_path = 'LD_LIBRARY_PATH' 3297 if ld_library_path in os.environ.keys(): 3298 paths = os.environ[ld_library_path] 3299 else: 3300 paths = '' 3301 paths += ':' + ':'.join(extrapaths) 3302 os.putenv(ld_library_path, paths) 3303 3304 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat') 3305 self.shower_card.write_card(shower, shower_card_path) 3306 3307 # overwrite if shower_card_set.dat exists in MCatNLO 3308 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')): 3309 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'), 3310 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')) 3311 3312 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log') 3313 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower') 3314 3315 3316 # libdl may be needded for pythia 82xx 3317 #if shower == 'PYTHIA8' and not \ 3318 # os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \ 3319 # 'dl' not in self.shower_card['extralibs'].split(): 3320 # # 'dl' has to be linked with the extralibs 3321 # self.shower_card['extralibs'] += ' dl' 3322 # logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \ 3323 # "It is needed for the correct running of PY8.2xx.\n" + \ 3324 # "If this library cannot be found on your system, a crash will occur.") 3325 3326 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'), 3327 stderr=open(mcatnlo_log, 'w'), 3328 cwd=pjoin(self.me_dir, 'MCatNLO'), 3329 close_fds=True) 3330 3331 exe = 'MCATNLO_%s_EXE' % shower 3332 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \ 3333 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')): 3334 print open(mcatnlo_log).read() 3335 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log) 3336 logger.info(' ... done') 3337 3338 # create an empty dir where to run 3339 count = 1 3340 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3341 (shower, count))): 3342 count += 1 3343 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \ 3344 (shower, count)) 3345 os.mkdir(rundir) 3346 files.cp(shower_card_path, rundir) 3347 3348 #look for the event files (don't resplit if one asks for the 3349 # same number of event files as in the previous run) 3350 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3351 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']: 3352 logger.info('Cleaning old files and splitting the event file...') 3353 #clean the old files 3354 files.rm([f for f in event_files if 'events.lhe' not in f]) 3355 if self.shower_card['nsplit_jobs'] > 1: 3356 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile']) 3357 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')], 3358 stdin=subprocess.PIPE, 3359 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'), 3360 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3361 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs']) 3362 logger.info('Splitting done.') 3363 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name)) 3364 3365 event_files.sort() 3366 3367 self.update_status('Showering events...', level='shower') 3368 logger.info('(Running in %s)' % rundir) 3369 if shower != 'PYTHIA8': 3370 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir) 3371 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir) 3372 else: 3373 # special treatment for pythia8 3374 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir) 3375 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir) 3376 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): # this is PY8.1xxx 3377 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir) 3378 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir) 3379 else: # this is PY8.2xxx 3380 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir) 3381 #link the hwpp exe in the rundir 3382 if shower == 'HERWIGPP': 3383 try: 3384 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 3385 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir) 3386 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 3387 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir) 3388 except Exception: 3389 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.') 3390 3391 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')): 3392 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir) 3393 3394 files.ln(evt_file, rundir, 'events.lhe') 3395 for i, f in enumerate(event_files): 3396 files.ln(f, rundir,'events_%d.lhe' % (i + 1)) 3397 3398 if not self.shower_card['analyse']: 3399 # an hep/hepmc file as output 3400 out_id = 'HEP' 3401 else: 3402 # one or more .top file(s) as output 3403 if "HwU" in self.shower_card['analyse']: 3404 out_id = 'HWU' 3405 else: 3406 out_id = 'TOP' 3407 3408 # write the executable 3409 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock: 3410 # set the PATH for the dynamic libraries 3411 if sys.platform == 'darwin': 3412 ld_library_path = 'DYLD_LIBRARY_PATH' 3413 else: 3414 ld_library_path = 'LD_LIBRARY_PATH' 3415 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \ 3416 % {'ld_library_path': ld_library_path, 3417 'extralibs': ':'.join(extrapaths)}) 3418 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')]) 3419 3420 if event_files: 3421 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \ 3422 for i in range(len(event_files))] 3423 else: 3424 arg_list = [[shower, out_id, self.run_name]] 3425 3426 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower') 3427 self.njobs = 1 3428 self.wait_for_complete('shower') 3429 3430 # now collect the results 3431 message = '' 3432 warning = '' 3433 to_gzip = [evt_file] 3434 if out_id == 'HEP': 3435 #copy the showered stdhep/hepmc file back in events 3436 if shower in ['PYTHIA8', 'HERWIGPP']: 3437 hep_format = 'HEPMC' 3438 ext = 'hepmc' 3439 else: 3440 hep_format = 'StdHEP' 3441 ext = 'hep' 3442 3443 hep_file = '%s_%s_0.%s.gz' % \ 3444 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext) 3445 count = 0 3446 3447 # find the first available name for the output: 3448 # check existing results with or without event splitting 3449 while os.path.exists(hep_file) or \ 3450 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) : 3451 count +=1 3452 hep_file = '%s_%s_%d.%s.gz' % \ 3453 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext) 3454 3455 try: 3456 if self.shower_card['nsplit_jobs'] == 1: 3457 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file) 3458 message = ('The file %s has been generated. \nIt contains showered' + \ 3459 ' and hadronized events in the %s format obtained' + \ 3460 ' showering the parton-level event file %s.gz with %s') % \ 3461 (hep_file, hep_format, evt_file, shower) 3462 else: 3463 hep_list = [] 3464 for i in range(self.shower_card['nsplit_jobs']): 3465 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext))) 3466 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1]) 3467 message = ('The following files have been generated:\n %s\nThey contain showered' + \ 3468 ' and hadronized events in the %s format obtained' + \ 3469 ' showering the (split) parton-level event file %s.gz with %s') % \ 3470 ('\n '.join(hep_list), hep_format, evt_file, shower) 3471 3472 except OSError, IOError: 3473 raise aMCatNLOError('No file has been generated, an error occurred.'+\ 3474 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log')) 3475 3476 # run the plot creation in a secure way 3477 if hep_format == 'StdHEP': 3478 try: 3479 self.do_plot('%s -f' % self.run_name) 3480 except Exception, error: 3481 logger.info("Fail to make the plot. Continue...") 3482 pass 3483 3484 elif out_id == 'TOP' or out_id == 'HWU': 3485 #copy the topdrawer or HwU file(s) back in events 3486 if out_id=='TOP': 3487 ext='top' 3488 elif out_id=='HWU': 3489 ext='HwU' 3490 topfiles = [] 3491 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)] 3492 for top_tar in top_tars: 3493 topfiles.extend(top_tar.getnames()) 3494 3495 # safety check 3496 if len(top_tars) != self.shower_card['nsplit_jobs']: 3497 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \ 3498 (self.shower_card['nsplit_jobs'], len(top_tars))) 3499 3500 # find the first available name for the output: 3501 # check existing results with or without event splitting 3502 filename = 'plot_%s_%d_' % (shower, 1) 3503 count = 1 3504 while os.path.exists(pjoin(self.me_dir, 'Events', 3505 self.run_name, '%s0.%s' % (filename,ext))) or \ 3506 os.path.exists(pjoin(self.me_dir, 'Events', 3507 self.run_name, '%s0__1.%s' % (filename,ext))): 3508 count += 1 3509 filename = 'plot_%s_%d_' % (shower, count) 3510 3511 if out_id=='TOP': 3512 hist_format='TopDrawer format' 3513 elif out_id=='HWU': 3514 hist_format='HwU and GnuPlot formats' 3515 3516 if not topfiles: 3517 # if no topfiles are found just warn the user 3518 warning = 'No .top file has been generated. For the results of your ' +\ 3519 'run, please check inside %s' % rundir 3520 elif self.shower_card['nsplit_jobs'] == 1: 3521 # only one job for the shower 3522 top_tars[0].extractall(path = rundir) 3523 plotfiles = [] 3524 for i, file in enumerate(topfiles): 3525 if out_id=='TOP': 3526 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3527 '%s%d.top' % (filename, i)) 3528 files.mv(pjoin(rundir, file), plotfile) 3529 elif out_id=='HWU': 3530 out=pjoin(self.me_dir,'Events', 3531 self.run_name,'%s%d'% (filename,i)) 3532 histos=[{'dirname':pjoin(rundir,file)}] 3533 self.combine_plots_HwU(histos,out) 3534 try: 3535 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\ 3536 stdout=os.open(os.devnull, os.O_RDWR),\ 3537 stderr=os.open(os.devnull, os.O_RDWR),\ 3538 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3539 except Exception: 3540 pass 3541 plotfile=pjoin(self.me_dir,'Events',self.run_name, 3542 '%s%d.HwU'% (filename,i)) 3543 plotfiles.append(plotfile) 3544 3545 ffiles = 'files' 3546 have = 'have' 3547 if len(plotfiles) == 1: 3548 ffiles = 'file' 3549 have = 'has' 3550 3551 message = ('The %s %s %s been generated, with histograms in the' + \ 3552 ' %s, obtained by showering the parton-level' + \ 3553 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \ 3554 hist_format, evt_file, shower) 3555 else: 3556 # many jobs for the shower have been run 3557 topfiles_set = set(topfiles) 3558 plotfiles = [] 3559 for j, top_tar in enumerate(top_tars): 3560 top_tar.extractall(path = rundir) 3561 for i, file in enumerate(topfiles_set): 3562 plotfile = pjoin(self.me_dir, 'Events', self.run_name, 3563 '%s%d__%d.%s' % (filename, i, j + 1,ext)) 3564 files.mv(pjoin(rundir, file), plotfile) 3565 plotfiles.append(plotfile) 3566 3567 # check if the user asked to combine the .top into a single file 3568 if self.shower_card['combine_td']: 3569 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities')) 3570 3571 if self.banner.get('run_card', 'event_norm').lower() == 'sum': 3572 norm = 1. 3573 elif self.banner.get('run_card', 'event_norm').lower() == 'average': 3574 norm = 1./float(self.shower_card['nsplit_jobs']) 3575 3576 plotfiles2 = [] 3577 for i, file in enumerate(topfiles_set): 3578 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \ 3579 for j in range(self.shower_card['nsplit_jobs'])] 3580 if out_id=='TOP': 3581 infile="%d\n%s\n%s\n" % \ 3582 (self.shower_card['nsplit_jobs'], 3583 '\n'.join(filelist), 3584 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs'])) 3585 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')], 3586 stdin=subprocess.PIPE, 3587 stdout=os.open(os.devnull, os.O_RDWR), 3588 cwd=pjoin(self.me_dir, 'Events', self.run_name)) 3589 p.communicate(input = infile) 3590 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'), 3591 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i))) 3592 elif out_id=='HWU': 3593 out=pjoin(self.me_dir,'Events', 3594 self.run_name,'%s%d'% (filename,i)) 3595 histos=[] 3596 norms=[] 3597 for plotfile in plotfiles: 3598 histos.append({'dirname':plotfile}) 3599 norms.append(norm) 3600 self.combine_plots_HwU(histos,out,normalisation=norms) 3601 try: 3602 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\ 3603 stdout=os.open(os.devnull, os.O_RDWR),\ 3604 stderr=os.open(os.devnull, os.O_RDWR),\ 3605 cwd=pjoin(self.me_dir, 'Events',self.run_name)) 3606 except Exception: 3607 pass 3608 3609 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext))) 3610 tar = tarfile.open( 3611 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz') 3612 for f in filelist: 3613 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f) 3614 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist]) 3615 3616 tar.close() 3617 3618 ffiles = 'files' 3619 have = 'have' 3620 if len(plotfiles2) == 1: 3621 ffiles = 'file' 3622 have = 'has' 3623 3624 message = ('The %s %s %s been generated, with histograms in the' + \ 3625 ' %s, obtained by showering the parton-level' + \ 3626 ' file %s.gz with %s.\n' + \ 3627 'The files from the different shower ' + \ 3628 'jobs (before combining them) can be found inside %s.') % \ 3629 (ffiles, ', '.join(plotfiles2), have, hist_format,\ 3630 evt_file, shower, 3631 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2])) 3632 3633 else: 3634 message = ('The following files have been generated:\n %s\n' + \ 3635 'They contain histograms in the' + \ 3636 ' %s, obtained by showering the parton-level' + \ 3637 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \ 3638 hist_format, evt_file, shower) 3639 3640 # Now arxiv the shower card used if RunMaterial is present 3641 run_dir_path = pjoin(rundir, self.run_name) 3642 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')): 3643 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path) 3644 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'), 3645 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\ 3646 %(shower, count))) 3647 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'], 3648 cwd=run_dir_path) 3649 shutil.rmtree(pjoin(run_dir_path,'RunMaterial')) 3650 # end of the run, gzip files and print out the message/warning 3651 for f in to_gzip: 3652 misc.gzip(f) 3653 if message: 3654 logger.info(message) 3655 if warning: 3656 logger.warning(warning) 3657 3658 self.update_status('Run complete', level='shower', update_results=True)
3659 3660 ############################################################################
3661 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3662 """define the run name, the run_tag, the banner and the results.""" 3663 3664 # when are we force to change the tag new_run:previous run requiring changes 3665 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'], 3666 'shower': ['shower','delphes','madanalysis5_hadron'], 3667 'delphes':['delphes'], 3668 'madanalysis5_hadron':['madanalysis5_hadron'], 3669 'plot':[]} 3670 3671 if name == self.run_name: 3672 if reload_card: 3673 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3674 self.run_card = banner_mod.RunCardNLO(run_card) 3675 3676 #check if we need to change the tag 3677 if tag: 3678 self.run_card['run_tag'] = tag 3679 self.run_tag = tag 3680 self.results.add_run(self.run_name, self.run_card) 3681 else: 3682 for tag in upgrade_tag[level]: 3683 if getattr(self.results[self.run_name][-1], tag): 3684 tag = self.get_available_tag() 3685 self.run_card['run_tag'] = tag 3686 self.run_tag = tag 3687 self.results.add_run(self.run_name, self.run_card) 3688 break 3689 return # Nothing to do anymore 3690 3691 # save/clean previous run 3692 if self.run_name: 3693 self.store_result() 3694 # store new name 3695 self.run_name = name 3696 3697 # Read run_card 3698 run_card = pjoin(self.me_dir, 'Cards','run_card.dat') 3699 self.run_card = banner_mod.RunCardNLO(run_card) 3700 3701 new_tag = False 3702 # First call for this run -> set the banner 3703 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag) 3704 if 'mgruncard' in self.banner: 3705 self.run_card = self.banner.charge_card('run_card') 3706 if tag: 3707 self.run_card['run_tag'] = tag 3708 new_tag = True 3709 elif not self.run_name in self.results and level =='parton': 3710 pass # No results yet, so current tag is fine 3711 elif not self.run_name in self.results: 3712 #This is only for case when you want to trick the interface 3713 logger.warning('Trying to run data on unknown run.') 3714 self.results.add_run(name, self.run_card) 3715 self.results.update('add run %s' % name, 'all', makehtml=True) 3716 else: 3717 for tag in upgrade_tag[level]: 3718 3719 if getattr(self.results[self.run_name][-1], tag): 3720 # LEVEL is already define in the last tag -> need to switch tag 3721 tag = self.get_available_tag() 3722 self.run_card['run_tag'] = tag 3723 new_tag = True 3724 break 3725 if not new_tag: 3726 # We can add the results to the current run 3727 tag = self.results[self.run_name][-1]['tag'] 3728 self.run_card['run_tag'] = tag # ensure that run_tag is correct 3729 3730 3731 if name in self.results and not new_tag: 3732 self.results.def_current(self.run_name) 3733 else: 3734 self.results.add_run(self.run_name, self.run_card) 3735 3736 self.run_tag = self.run_card['run_tag'] 3737 3738 # Return the tag of the previous run having the required data for this 3739 # tag/run to working wel. 3740 if level == 'parton': 3741 return 3742 elif level == 'pythia': 3743 return self.results[self.run_name][0]['tag'] 3744 else: 3745 for i in range(-1,-len(self.results[self.run_name])-1,-1): 3746 tagRun = self.results[self.run_name][i] 3747 if tagRun.pythia: 3748 return tagRun['tag']
3749 3750
3751 - def store_result(self):
3752 """ tar the pythia results. This is done when we are quite sure that 3753 the pythia output will not be use anymore """ 3754 3755 if not self.run_name: 3756 return 3757 3758 self.results.save() 3759 3760 if not self.to_store: 3761 return 3762 3763 if 'event' in self.to_store: 3764 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')): 3765 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')): 3766 self.update_status('gzipping output file: events.lhe', level='parton', error=True) 3767 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3768 else: 3769 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')) 3770 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')): 3771 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe')) 3772 3773 3774 tag = self.run_card['run_tag'] 3775 3776 self.to_store = []
3777 3778
3779 - def get_init_dict(self, evt_file):
3780 """reads the info in the init block and returns them in a dictionary""" 3781 ev_file = open(evt_file) 3782 init = "" 3783 found = False 3784 while True: 3785 line = ev_file.readline() 3786 if "<init>" in line: 3787 found = True 3788 elif found and not line.startswith('#'): 3789 init += line 3790 if "</init>" in line or "<event>" in line: 3791 break 3792 ev_file.close() 3793 3794 # IDBMUP(1),IDBMUP(2),EBMUP(1),EBMUP(2), PDFGUP(1),PDFGUP(2), 3795 # PDFSUP(1),PDFSUP(2),IDWTUP,NPRUP 3796 # these are not included (so far) in the init_dict 3797 # XSECUP(1),XERRUP(1),XMAXUP(1),LPRUP(1) 3798 3799 init_dict = {} 3800 init_dict['idbmup1'] = int(init.split()[0]) 3801 init_dict['idbmup2'] = int(init.split()[1]) 3802 init_dict['ebmup1'] = float(init.split()[2]) 3803 init_dict['ebmup2'] = float(init.split()[3]) 3804 init_dict['pdfgup1'] = int(init.split()[4]) 3805 init_dict['pdfgup2'] = int(init.split()[5]) 3806 init_dict['pdfsup1'] = int(init.split()[6]) 3807 init_dict['pdfsup2'] = int(init.split()[7]) 3808 init_dict['idwtup'] = int(init.split()[8]) 3809 init_dict['nprup'] = int(init.split()[9]) 3810 3811 return init_dict
3812 3813
3814 - def banner_to_mcatnlo(self, evt_file):
3815 """creates the mcatnlo input script using the values set in the header of the event_file. 3816 It also checks if the lhapdf library is used""" 3817 shower = self.banner.get('run_card', 'parton_shower').upper() 3818 pdlabel = self.banner.get('run_card', 'pdlabel') 3819 itry = 0 3820 nevents = self.shower_card['nevents'] 3821 init_dict = self.get_init_dict(evt_file) 3822 3823 if nevents < 0 or \ 3824 nevents > self.banner.get_detail('run_card', 'nevents'): 3825 nevents = self.banner.get_detail('run_card', 'nevents') 3826 3827 nevents = nevents / self.shower_card['nsplit_jobs'] 3828 3829 mcmass_dict = {} 3830 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]: 3831 pdg = int(line.split()[0]) 3832 mass = float(line.split()[1]) 3833 mcmass_dict[pdg] = mass 3834 3835 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1]) 3836 content += 'NEVENTS=%d\n' % nevents 3837 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\ 3838 self.shower_card['nsplit_jobs']) 3839 content += 'MCMODE=%s\n' % shower 3840 content += 'PDLABEL=%s\n' % pdlabel 3841 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value 3842 #content += 'PDFSET=%s\n' % self.banner.get_detail('run_card', 'lhaid') 3843 #content += 'PDFSET=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3844 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value 3845 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value 3846 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value 3847 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value 3848 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value 3849 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value 3850 try: 3851 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value 3852 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value 3853 except KeyError: 3854 content += 'HGGMASS=120.\n' 3855 content += 'HGGWIDTH=0.00575308848\n' 3856 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1') 3857 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2') 3858 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1') 3859 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2') 3860 content += 'DMASS=%s\n' % mcmass_dict[1] 3861 content += 'UMASS=%s\n' % mcmass_dict[2] 3862 content += 'SMASS=%s\n' % mcmass_dict[3] 3863 content += 'CMASS=%s\n' % mcmass_dict[4] 3864 content += 'BMASS=%s\n' % mcmass_dict[5] 3865 try: 3866 content += 'EMASS=%s\n' % mcmass_dict[11] 3867 content += 'MUMASS=%s\n' % mcmass_dict[13] 3868 content += 'TAUMASS=%s\n' % mcmass_dict[15] 3869 except KeyError: 3870 # this is for backward compatibility 3871 mcmass_lines = [l for l in \ 3872 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper()) 3873 ).read().split('\n') if l] 3874 new_mcmass_dict = {} 3875 for l in mcmass_lines: 3876 key, val = l.split('=') 3877 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip() 3878 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)'] 3879 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)'] 3880 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)'] 3881 3882 content += 'GMASS=%s\n' % mcmass_dict[21] 3883 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower() 3884 # check if need to link lhapdf 3885 if int(self.shower_card['pdfcode']) > 1 or \ 3886 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \ 3887 shower=='HERWIGPP' : 3888 # Use LHAPDF (should be correctly installed, because 3889 # either events were already generated with them, or the 3890 # user explicitly gives an LHAPDF number in the 3891 # shower_card). 3892 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3893 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3894 stdout = subprocess.PIPE).stdout.read().strip() 3895 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3896 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3897 if self.shower_card['pdfcode']==0: 3898 lhaid_list = '' 3899 content += '' 3900 elif self.shower_card['pdfcode']==1: 3901 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3902 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3903 else: 3904 lhaid_list = [abs(int(self.shower_card['pdfcode']))] 3905 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode'] 3906 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3907 elif int(self.shower_card['pdfcode'])==1: 3908 # Try to use LHAPDF because user wants to use the same PDF 3909 # as was used for the event generation. However, for the 3910 # event generation, LHAPDF was not used, so non-trivial to 3911 # see if if LHAPDF is available with the corresponding PDF 3912 # set. If not found, give a warning and use build-in PDF 3913 # set instead. 3914 try: 3915 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'], 3916 stdout = subprocess.PIPE).stdout.read().strip() 3917 self.link_lhapdf(pjoin(self.me_dir, 'lib')) 3918 content += 'LHAPDFPATH=%s\n' % lhapdfpath 3919 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 3920 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])] 3921 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']]) 3922 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 3923 except Exception: 3924 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\ 3925 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\ 3926 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\ 3927 ' same set as was used in the event generation install LHAPDF and set the path using'+\ 3928 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell') 3929 content += 'LHAPDFPATH=\n' 3930 content += 'PDFCODE=0\n' 3931 else: 3932 content += 'LHAPDFPATH=\n' 3933 content += 'PDFCODE=0\n' 3934 3935 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw') 3936 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj') 3937 # add the pythia8/hwpp path(s) 3938 if self.options['pythia8_path']: 3939 content+='PY8PATH=%s\n' % self.options['pythia8_path'] 3940 if self.options['hwpp_path']: 3941 content+='HWPPPATH=%s\n' % self.options['hwpp_path'] 3942 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']: 3943 content+='THEPEGPATH=%s\n' % self.options['thepeg_path'] 3944 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']: 3945 content+='HEPMCPATH=%s\n' % self.options['hepmc_path'] 3946 3947 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w') 3948 output.write(content) 3949 output.close() 3950 return shower
3951 3952
3953 - def run_reweight(self, only):
3954 """runs the reweight_xsec_events executables on each sub-event file generated 3955 to compute on the fly scale and/or PDF uncertainities""" 3956 logger.info(' Doing reweight') 3957 3958 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted') 3959 # if only doing reweight, copy back the nevents_unweighted file 3960 if only: 3961 if os.path.exists(nev_unw + '.orig'): 3962 files.cp(nev_unw + '.orig', nev_unw) 3963 else: 3964 raise aMCatNLOError('Cannot find event file information') 3965 3966 #read the nevents_unweighted file to get the list of event files 3967 file = open(nev_unw) 3968 lines = file.read().split('\n') 3969 file.close() 3970 # make copy of the original nevent_unweighted file 3971 files.cp(nev_unw, nev_unw + '.orig') 3972 # loop over lines (all but the last one whith is empty) and check that the 3973 # number of events is not 0 3974 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0'] 3975 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0'] 3976 #prepare the job_dict 3977 job_dict = {} 3978 exe = 'reweight_xsec_events.local' 3979 for i, evt_file in enumerate(evt_files): 3980 path, evt = os.path.split(evt_file) 3981 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \ 3982 pjoin(self.me_dir, 'SubProcesses', path)) 3983 job_dict[path] = [exe] 3984 3985 self.run_all(job_dict, [[evt, '1']], 'Running reweight') 3986 3987 #check that the new event files are complete 3988 for evt_file in evt_files: 3989 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \ 3990 pjoin(self.me_dir, 'SubProcesses', evt_file)], \ 3991 stdout = subprocess.PIPE).stdout.read().strip() 3992 if last_line != "</LesHouchesEvents>": 3993 raise aMCatNLOError('An error occurred during reweight. Check the' + \ 3994 '\'reweight_xsec_events.output\' files inside the ' + \ 3995 '\'SubProcesses/P*/G*/ directories for details') 3996 3997 #update file name in nevents_unweighted 3998 newfile = open(nev_unw, 'w') 3999 for line in lines: 4000 if line: 4001 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n') 4002 newfile.close() 4003 4004 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4005
4006 - def pdf_scale_from_reweighting(self, evt_files,evt_wghts):
4007 """This function takes the files with the scale and pdf values 4008 written by the reweight_xsec_events.f code 4009 (P*/G*/pdf_scale_dependence.dat) and computes the overall 4010 scale and PDF uncertainty (the latter is computed using the 4011 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000)) 4012 and returns it in percents. The expected format of the file 4013 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf 4014 xsec_pdf0 xsec_pdf1 ....""" 4015 4016 scales=[] 4017 pdfs=[] 4018 for i,evt_file in enumerate(evt_files): 4019 path, evt=os.path.split(evt_file) 4020 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f: 4021 data_line=f.readline() 4022 if "scale variations:" in data_line: 4023 for j,scale in enumerate(self.run_card['dynamical_scale_choice']): 4024 data_line = f.readline().split() 4025 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4026 try: 4027 scales[j] = [a + b for a, b in zip(scales[j], scales_this)] 4028 except IndexError: 4029 scales+=[scales_this] 4030 data_line=f.readline() 4031 if "pdf variations:" in data_line: 4032 for j,pdf in enumerate(self.run_card['lhaid']): 4033 data_line = f.readline().split() 4034 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()] 4035 try: 4036 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)] 4037 except IndexError: 4038 pdfs+=[pdfs_this] 4039 4040 # get the scale uncertainty in percent 4041 scale_info=[] 4042 for j,scale in enumerate(scales): 4043 s_cen=scale[0] 4044 if s_cen != 0.0 and self.run_card['reweight_scale'][j]: 4045 # max and min of the full envelope 4046 s_max=(max(scale)/s_cen-1)*100 4047 s_min=(1-min(scale)/s_cen)*100 4048 # ren and fac scale dependence added in quadrature 4049 ren_var=[] 4050 fac_var=[] 4051 for i in range(len(self.run_card['rw_rscale'])): 4052 ren_var.append(scale[i]-s_cen) # central fac scale 4053 for i in range(len(self.run_card['rw_fscale'])): 4054 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen) # central ren scale 4055 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100 4056 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100 4057 s_size=len(scale) 4058 else: 4059 s_max=0.0 4060 s_min=0.0 4061 s_max_q=0.0 4062 s_min_q=0.0 4063 s_size=len(scale) 4064 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \ 4065 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \ 4066 'label':self.run_card['dynamical_scale_choice'][j], \ 4067 'unc':self.run_card['reweight_scale'][j]}) 4068 4069 # check if we can use LHAPDF to compute the PDF uncertainty 4070 if any(self.run_card['reweight_pdf']): 4071 use_lhapdf=False 4072 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\ 4073 stdout=subprocess.PIPE).stdout.read().strip() 4074 4075 try: 4076 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \ 4077 if os.path.isdir(pjoin(lhapdf_libdir,dirname))] 4078 except OSError: 4079 candidates=[] 4080 for candidate in candidates: 4081 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')): 4082 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages')) 4083 try: 4084 import lhapdf 4085 use_lhapdf=True 4086 break 4087 except ImportError: 4088 sys.path.pop(0) 4089 continue 4090 4091 if not use_lhapdf: 4092 try: 4093 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \ 4094 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))] 4095 except OSError: 4096 candidates=[] 4097 for candidate in candidates: 4098 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')): 4099 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages')) 4100 try: 4101 import lhapdf 4102 use_lhapdf=True 4103 break 4104 except ImportError: 4105 sys.path.pop(0) 4106 continue 4107 4108 if not use_lhapdf: 4109 try: 4110 import lhapdf 4111 use_lhapdf=True 4112 except ImportError: 4113 logger.warning("Failed to access python version of LHAPDF: "\ 4114 "cannot compute PDF uncertainty from the "\ 4115 "weights in the events. The weights in the LHE " \ 4116 "event files will still cover all PDF set members, "\ 4117 "but there will be no PDF uncertainty printed in the run summary. \n "\ 4118 "If the python interface to LHAPDF is available on your system, try "\ 4119 "adding its location to the PYTHONPATH environment variable and the"\ 4120 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).") 4121 use_lhapdf=False 4122 4123 # turn off lhapdf printing any messages 4124 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0) 4125 4126 pdf_info=[] 4127 for j,pdfset in enumerate(pdfs): 4128 p_cen=pdfset[0] 4129 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]: 4130 if use_lhapdf: 4131 pdfsetname=self.run_card['lhapdfsetname'][j] 4132 try: 4133 p=lhapdf.getPDFSet(pdfsetname) 4134 ep=p.uncertainty(pdfset,-1) 4135 p_cen=ep.central 4136 p_min=abs(ep.errminus/p_cen)*100 4137 p_max=abs(ep.errplus/p_cen)*100 4138 p_type=p.errorType 4139 p_size=p.size 4140 p_conf=p.errorConfLevel 4141 except: 4142 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname) 4143 p_min=0.0 4144 p_max=0.0 4145 p_type='unknown' 4146 p_conf='unknown' 4147 p_size=len(pdfset) 4148 else: 4149 p_min=0.0 4150 p_max=0.0 4151 p_type='unknown' 4152 p_conf='unknown' 4153 p_size=len(pdfset) 4154 pdfsetname=self.run_card['lhaid'][j] 4155 else: 4156 p_min=0.0 4157 p_max=0.0 4158 p_type='none' 4159 p_conf='unknown' 4160 p_size=len(pdfset) 4161 pdfsetname=self.run_card['lhaid'][j] 4162 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \ 4163 'unc':p_type, 'name':pdfsetname, 'size':p_size, \ 4164 'label':self.run_card['lhaid'][j], 'conf':p_conf}) 4165 4166 scale_pdf_info=[scale_info,pdf_info] 4167 return scale_pdf_info
4168 4169
4170 - def wait_for_complete(self, run_type):
4171 """this function waits for jobs on cluster to complete their run.""" 4172 starttime = time.time() 4173 #logger.info(' Waiting for submitted jobs to complete') 4174 update_status = lambda i, r, f: self.update_status((i, r, f, run_type), 4175 starttime=starttime, level='parton', update_results=True) 4176 try: 4177 self.cluster.wait(self.me_dir, update_status) 4178 except: 4179 self.cluster.remove() 4180 raise
4181
4182 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4183 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args""" 4184 self.ijob = 0 4185 if run_type != 'shower': 4186 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list) 4187 for args in arg_list: 4188 for Pdir, jobs in job_dict.items(): 4189 for job in jobs: 4190 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) ) 4191 if self.cluster_mode == 2: 4192 time.sleep(1) # security to allow all jobs to be launched 4193 else: 4194 self.njobs = len(arg_list) 4195 for args in arg_list: 4196 [(cwd, exe)] = job_dict.items() 4197 self.run_exe(exe, args, run_type, cwd) 4198 4199 self.wait_for_complete(run_type)
4200 4201 4202
4203 - def check_event_files(self,jobs):
4204 """check the integrity of the event files after splitting, and resubmit 4205 those which are not nicely terminated""" 4206 jobs_to_resubmit = [] 4207 for job in jobs: 4208 last_line = '' 4209 try: 4210 last_line = subprocess.Popen( 4211 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \ 4212 stdout = subprocess.PIPE).stdout.read().strip() 4213 except IOError: 4214 pass 4215 if last_line != "</LesHouchesEvents>": 4216 jobs_to_resubmit.append(job) 4217 self.njobs = 0 4218 if jobs_to_resubmit: 4219 run_type = 'Resubmitting broken jobs' 4220 logger.info('Some event files are broken, corresponding jobs will be resubmitted.') 4221 for job in jobs_to_resubmit: 4222 logger.debug('Resubmitting ' + job['dirname'] + '\n') 4223 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4224 4225
4226 - def find_jobs_to_split(self, pdir, job, arg):
4227 """looks into the nevents_unweighed_splitted file to check how many 4228 split jobs are needed for this (pdir, job). arg is F, B or V""" 4229 # find the number of the integration channel 4230 splittings = [] 4231 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read() 4232 pattern = re.compile('for i in (\d+) ; do') 4233 match = re.search(pattern, ajob) 4234 channel = match.groups()[0] 4235 # then open the nevents_unweighted_splitted file and look for the 4236 # number of splittings to be done 4237 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read() 4238 # This skips the channels with zero events, because they are 4239 # not of the form GFXX_YY, but simply GFXX 4240 pattern = re.compile(r"%s_(\d+)/events.lhe" % \ 4241 pjoin(pdir, 'G%s%s' % (arg,channel))) 4242 matches = re.findall(pattern, nevents_file) 4243 for m in matches: 4244 splittings.append(m) 4245 return splittings
4246 4247
4248 - def run_exe(self, exe, args, run_type, cwd=None):
4249 """this basic function launch locally/on cluster exe with args as argument. 4250 """ 4251 # first test that exe exists: 4252 execpath = None 4253 if cwd and os.path.exists(pjoin(cwd, exe)): 4254 execpath = pjoin(cwd, exe) 4255 elif not cwd and os.path.exists(exe): 4256 execpath = exe 4257 else: 4258 raise aMCatNLOError('Cannot find executable %s in %s' \ 4259 % (exe, os.getcwd())) 4260 # check that the executable has exec permissions 4261 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK): 4262 subprocess.call(['chmod', '+x', exe], cwd=cwd) 4263 # finally run it 4264 if self.cluster_mode == 0: 4265 #this is for the serial run 4266 misc.call(['./'+exe] + args, cwd=cwd) 4267 self.ijob += 1 4268 self.update_status((max([self.njobs - self.ijob - 1, 0]), 4269 min([1, self.njobs - self.ijob]), 4270 self.ijob, run_type), level='parton') 4271 4272 #this is for the cluster/multicore run 4273 elif 'reweight' in exe: 4274 # a reweight run 4275 # Find the correct PDF input file 4276 input_files, output_files = [], [] 4277 pdfinput = self.get_pdf_input_filename() 4278 if os.path.exists(pdfinput): 4279 input_files.append(pdfinput) 4280 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events')) 4281 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat')) 4282 input_files.append(args[0]) 4283 output_files.append('%s.rwgt' % os.path.basename(args[0])) 4284 output_files.append('reweight_xsec_events.output') 4285 output_files.append('scale_pdf_dependence.dat') 4286 4287 return self.cluster.submit2(exe, args, cwd=cwd, 4288 input_files=input_files, output_files=output_files, 4289 required_output=output_files) 4290 4291 elif 'ajob' in exe: 4292 # the 'standard' amcatnlo job 4293 # check if args is a list of string 4294 if type(args[0]) == str: 4295 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args) 4296 #submitting 4297 self.cluster.submit2(exe, args, cwd=cwd, 4298 input_files=input_files, output_files=output_files, 4299 required_output=required_output) 4300 4301 # # keep track of folders and arguments for splitted evt gen 4302 # subfolder=output_files[-1].split('/')[0] 4303 # if len(args) == 4 and '_' in subfolder: 4304 # self.split_folders[pjoin(cwd,subfolder)] = [exe] + args 4305 4306 elif 'shower' in exe: 4307 # a shower job 4308 # args are [shower, output(HEP or TOP), run_name] 4309 # cwd is the shower rundir, where the executable are found 4310 input_files, output_files = [], [] 4311 shower = args[0] 4312 # the input files 4313 if shower == 'PYTHIA8': 4314 input_files.append(pjoin(cwd, 'Pythia8.exe')) 4315 input_files.append(pjoin(cwd, 'Pythia8.cmd')) 4316 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')): 4317 input_files.append(pjoin(cwd, 'config.sh')) 4318 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc')) 4319 else: 4320 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc')) 4321 else: 4322 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower)) 4323 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower)) 4324 if shower == 'HERWIGPP': 4325 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')): 4326 input_files.append(pjoin(cwd, 'Herwig++')) 4327 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')): 4328 input_files.append(pjoin(cwd, 'Herwig')) 4329 input_files.append(pjoin(cwd, 'HepMCFortran.so')) 4330 if len(args) == 3: 4331 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')): 4332 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')) 4333 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')): 4334 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')) 4335 else: 4336 raise aMCatNLOError, 'Event file not present in %s' % \ 4337 pjoin(self.me_dir, 'Events', self.run_name) 4338 else: 4339 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3])) 4340 # the output files 4341 if len(args) == 3: 4342 output_files.append('mcatnlo_run.log') 4343 else: 4344 output_files.append('mcatnlo_run_%s.log' % args[3]) 4345 if args[1] == 'HEP': 4346 if len(args) == 3: 4347 fname = 'events' 4348 else: 4349 fname = 'events_%s' % args[3] 4350 if shower in ['PYTHIA8', 'HERWIGPP']: 4351 output_files.append(fname + '.hepmc.gz') 4352 else: 4353 output_files.append(fname + '.hep.gz') 4354 elif args[1] == 'TOP' or args[1] == 'HWU': 4355 if len(args) == 3: 4356 fname = 'histfile' 4357 else: 4358 fname = 'histfile_%s' % args[3] 4359 output_files.append(fname + '.tar') 4360 else: 4361 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1] 4362 #submitting 4363 self.cluster.submit2(exe, args, cwd=cwd, 4364 input_files=input_files, output_files=output_files) 4365 4366 else: 4367 return self.cluster.submit(exe, args, cwd=cwd)
4368
4369 - def getIO_ajob(self,exe,cwd, args):
4370 # use local disk if possible => need to stands what are the 4371 # input/output files 4372 4373 output_files = [] 4374 required_output = [] 4375 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'), 4376 pjoin(cwd, 'symfact.dat'), 4377 pjoin(cwd, 'iproc.dat'), 4378 pjoin(cwd, 'initial_states_map.dat'), 4379 pjoin(cwd, 'configs_and_props_info.dat'), 4380 pjoin(cwd, 'leshouche_info.dat'), 4381 pjoin(cwd, 'FKS_params.dat')] 4382 4383 # For GoSam interface, we must copy the SLHA card as well 4384 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')): 4385 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat')) 4386 4387 if os.path.exists(pjoin(cwd,'nevents.tar')): 4388 input_files.append(pjoin(cwd,'nevents.tar')) 4389 4390 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')): 4391 input_files.append(pjoin(cwd, 'OLE_order.olc')) 4392 4393 # File for the loop (might not be present if MadLoop is not used) 4394 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \ 4395 cluster.need_transfer(self.options): 4396 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4397 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \ 4398 cluster.need_transfer(self.options): 4399 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz', 4400 dereference=True) 4401 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources') 4402 tf.close() 4403 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz')) 4404 4405 if args[1] == 'born' or args[1] == 'all': 4406 # MADEVENT MINT FO MODE 4407 input_files.append(pjoin(cwd, 'madevent_mintFO')) 4408 if args[2] == '0': 4409 current = '%s_G%s' % (args[1],args[0]) 4410 else: 4411 current = '%s_G%s_%s' % (args[1],args[0],args[2]) 4412 if os.path.exists(pjoin(cwd,current)): 4413 input_files.append(pjoin(cwd, current)) 4414 output_files.append(current) 4415 4416 required_output.append('%s/results.dat' % current) 4417 required_output.append('%s/res_%s.dat' % (current,args[3])) 4418 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4419 required_output.append('%s/mint_grids' % current) 4420 required_output.append('%s/grid.MC_integer' % current) 4421 if args[3] != '0': 4422 required_output.append('%s/scale_pdf_dependence.dat' % current) 4423 4424 elif args[1] == 'F' or args[1] == 'B': 4425 # MINTMC MODE 4426 input_files.append(pjoin(cwd, 'madevent_mintMC')) 4427 4428 if args[2] == '0': 4429 current = 'G%s%s' % (args[1],args[0]) 4430 else: 4431 current = 'G%s%s_%s' % (args[1],args[0],args[2]) 4432 if os.path.exists(pjoin(cwd,current)): 4433 input_files.append(pjoin(cwd, current)) 4434 output_files.append(current) 4435 if args[2] > '0': 4436 # this is for the split event generation 4437 output_files.append('G%s%s_%s' % (args[1], args[0], args[2])) 4438 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3])) 4439 4440 else: 4441 required_output.append('%s/log_MINT%s.txt' % (current,args[3])) 4442 if args[3] in ['0','1']: 4443 required_output.append('%s/results.dat' % current) 4444 if args[3] == '1': 4445 output_files.append('%s/results.dat' % current) 4446 4447 else: 4448 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args)) 4449 4450 #Find the correct PDF input file 4451 pdfinput = self.get_pdf_input_filename() 4452 if os.path.exists(pdfinput): 4453 input_files.append(pdfinput) 4454 return input_files, output_files, required_output, args
4455 4456
4457 - def compile(self, mode, options):
4458 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as 4459 specified in mode""" 4460 4461 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name)) 4462 4463 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name, 4464 '%s_%s_banner.txt' % (self.run_name, self.run_tag))) 4465 4466 self.get_characteristics(pjoin(self.me_dir, 4467 'SubProcesses', 'proc_characteristics')) 4468 4469 #define a bunch of log files 4470 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log') 4471 madloop_log = pjoin(self.me_dir, 'compile_madloop.log') 4472 reweight_log = pjoin(self.me_dir, 'compile_reweight.log') 4473 test_log = pjoin(self.me_dir, 'test.log') 4474 4475 # environmental variables to be included in make_opts 4476 self.make_opts_var = {} 4477 if self.proc_characteristics['has_loops'] and \ 4478 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4479 self.make_opts_var['madloop'] = 'true' 4480 4481 self.update_status('Compiling the code', level=None, update_results=True) 4482 4483 libdir = pjoin(self.me_dir, 'lib') 4484 sourcedir = pjoin(self.me_dir, 'Source') 4485 4486 #clean files 4487 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log]) 4488 #define which executable/tests to compile 4489 if '+' in mode: 4490 mode = mode.split('+')[0] 4491 if mode in ['NLO', 'LO']: 4492 exe = 'madevent_mintFO' 4493 tests = ['test_ME'] 4494 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts')) 4495 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']: 4496 exe = 'madevent_mintMC' 4497 tests = ['test_ME', 'test_MC'] 4498 # write an analyse_opts with a dummy analysis so that compilation goes through 4499 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock: 4500 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n') 4501 4502 #directory where to compile exe 4503 p_dirs = [d for d in \ 4504 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d] 4505 # create param_card.inc and run_card.inc 4506 self.do_treatcards('', amcatnlo=True, mode=mode) 4507 # if --nocompile option is specified, check here that all exes exists. 4508 # If they exists, return 4509 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \ 4510 for p_dir in p_dirs]) and options['nocompile']: 4511 return 4512 4513 # rm links to lhapdflib/ PDFsets if exist 4514 if os.path.exists(pjoin(libdir, 'PDFsets')): 4515 files.rm(pjoin(libdir, 'PDFsets')) 4516 4517 # read the run_card to find if lhapdf is used or not 4518 if self.run_card['pdlabel'] == 'lhapdf' and \ 4519 (self.banner.get_detail('run_card', 'lpp1') != 0 or \ 4520 self.banner.get_detail('run_card', 'lpp2') != 0): 4521 4522 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs]) 4523 pdfsetsdir = self.get_lhapdf_pdfsetsdir() 4524 lhaid_list = self.run_card['lhaid'] 4525 self.copy_lhapdf_set(lhaid_list, pdfsetsdir) 4526 4527 else: 4528 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']: 4529 logger.info('Using built-in libraries for PDFs') 4530 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']: 4531 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.') 4532 self.make_opts_var['lhapdf'] = "" 4533 4534 # read the run_card to find if applgrid is used or not 4535 if self.run_card['iappl'] != 0: 4536 self.make_opts_var['applgrid'] = 'True' 4537 # check versions of applgrid and amcfast 4538 for code in ['applgrid','amcfast']: 4539 try: 4540 p = subprocess.Popen([self.options[code], '--version'], \ 4541 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 4542 except OSError: 4543 raise aMCatNLOError(('No valid %s installation found. \n' + \ 4544 'Please set the path to %s-config by using \n' + \ 4545 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code)) 4546 else: 4547 output, _ = p.communicate() 4548 if code is 'applgrid' and output < '1.4.63': 4549 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\ 4550 +' You are using %s',output) 4551 if code is 'amcfast' and output < '1.1.1': 4552 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\ 4553 +' You are using %s',output) 4554 4555 # set-up the Source/make_opts with the correct applgrid-config file 4556 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \ 4557 % (self.options['amcfast'],self.options['applgrid']) 4558 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines() 4559 text_out=[] 4560 for line in text: 4561 if line.strip().startswith('APPLLIBS=$'): 4562 line=appllibs 4563 text_out.append(line) 4564 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock: 4565 fsock.writelines(text_out) 4566 else: 4567 self.make_opts_var['applgrid'] = "" 4568 4569 if 'fastjet' in self.options.keys() and self.options['fastjet']: 4570 self.make_opts_var['fastjet_config'] = self.options['fastjet'] 4571 4572 # add the make_opts_var to make_opts 4573 self.update_make_opts() 4574 4575 # make Source 4576 self.update_status('Compiling source...', level=None) 4577 misc.compile(['clean4pdf'], cwd = sourcedir) 4578 misc.compile(cwd = sourcedir) 4579 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \ 4580 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \ 4581 and os.path.exists(pjoin(libdir, 'libmodel.a')) \ 4582 and os.path.exists(pjoin(libdir, 'libpdf.a')): 4583 logger.info(' ...done, continuing with P* directories') 4584 else: 4585 raise aMCatNLOError('Compilation failed') 4586 4587 # make StdHep (only necessary with MG option output_dependencies='internal') 4588 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib') 4589 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \ 4590 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))): 4591 if os.path.exists(pjoin(sourcedir,'StdHEP')): 4592 logger.info('Compiling StdHEP (can take a couple of minutes) ...') 4593 misc.compile(['StdHEP'], cwd = sourcedir) 4594 logger.info(' ...done.') 4595 else: 4596 raise aMCatNLOError('Could not compile StdHEP because its'+\ 4597 ' source directory could not be found in the SOURCE folder.\n'+\ 4598 " Check the MG5_aMC option 'output_dependencies.'") 4599 4600 # make CutTools (only necessary with MG option output_dependencies='internal') 4601 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4602 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4603 if os.path.exists(pjoin(sourcedir,'CutTools')): 4604 logger.info('Compiling CutTools (can take a couple of minutes) ...') 4605 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 4606 logger.info(' ...done.') 4607 else: 4608 raise aMCatNLOError('Could not compile CutTools because its'+\ 4609 ' source directory could not be found in the SOURCE folder.\n'+\ 4610 " Check the MG5_aMC option 'output_dependencies.'") 4611 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \ 4612 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))): 4613 raise aMCatNLOError('CutTools compilation failed.') 4614 4615 # Verify compatibility between current compiler and the one which was 4616 # used when last compiling CutTools (if specified). 4617 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4618 libdir, 'libcts.a')))),'compiler_version.log') 4619 if os.path.exists(compiler_log_path): 4620 compiler_version_used = open(compiler_log_path,'r').read() 4621 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4622 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4623 if os.path.exists(pjoin(sourcedir,'CutTools')): 4624 logger.info('CutTools was compiled with a different fortran'+\ 4625 ' compiler. Re-compiling it now...') 4626 misc.compile(['cleanCT'], cwd = sourcedir) 4627 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1) 4628 logger.info(' ...done.') 4629 else: 4630 raise aMCatNLOError("CutTools installation in %s"\ 4631 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\ 4632 " seems to have been compiled with a different compiler than"+\ 4633 " the one specified in MG5_aMC. Please recompile CutTools.") 4634 4635 # make IREGI (only necessary with MG option output_dependencies='internal') 4636 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \ 4637 and os.path.exists(pjoin(sourcedir,'IREGI')): 4638 logger.info('Compiling IREGI (can take a couple of minutes) ...') 4639 misc.compile(['IREGI'], cwd = sourcedir) 4640 logger.info(' ...done.') 4641 4642 if os.path.exists(pjoin(libdir, 'libiregi.a')): 4643 # Verify compatibility between current compiler and the one which was 4644 # used when last compiling IREGI (if specified). 4645 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin( 4646 libdir, 'libiregi.a')))),'compiler_version.log') 4647 if os.path.exists(compiler_log_path): 4648 compiler_version_used = open(compiler_log_path,'r').read() 4649 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\ 4650 pjoin(sourcedir,'make_opts')))) in compiler_version_used: 4651 if os.path.exists(pjoin(sourcedir,'IREGI')): 4652 logger.info('IREGI was compiled with a different fortran'+\ 4653 ' compiler. Re-compiling it now...') 4654 misc.compile(['cleanIR'], cwd = sourcedir) 4655 misc.compile(['IREGI'], cwd = sourcedir) 4656 logger.info(' ...done.') 4657 else: 4658 raise aMCatNLOError("IREGI installation in %s"\ 4659 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\ 4660 " seems to have been compiled with a different compiler than"+\ 4661 " the one specified in MG5_aMC. Please recompile IREGI.") 4662 4663 # check if MadLoop virtuals have been generated 4664 if self.proc_characteristics['has_loops'] and \ 4665 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')): 4666 if mode in ['NLO', 'aMC@NLO', 'noshower']: 4667 tests.append('check_poles') 4668 4669 # make and run tests (if asked for), gensym and make madevent in each dir 4670 self.update_status('Compiling directories...', level=None) 4671 4672 for test in tests: 4673 self.write_test_input(test) 4674 4675 try: 4676 import multiprocessing 4677 if not self.nb_core: 4678 try: 4679 self.nb_core = int(self.options['nb_core']) 4680 except TypeError: 4681 self.nb_core = multiprocessing.cpu_count() 4682 except ImportError: 4683 self.nb_core = 1 4684 4685 compile_options = copy.copy(self.options) 4686 compile_options['nb_core'] = self.nb_core 4687 compile_cluster = cluster.MultiCore(**compile_options) 4688 logger.info('Compiling on %d cores' % self.nb_core) 4689 4690 update_status = lambda i, r, f: self.donothing(i,r,f) 4691 for p_dir in p_dirs: 4692 compile_cluster.submit(prog = compile_dir, 4693 argument = [self.me_dir, p_dir, mode, options, 4694 tests, exe, self.options['run_mode']]) 4695 try: 4696 compile_cluster.wait(self.me_dir, update_status) 4697 except Exception, error: 4698 logger.warning("Fail to compile the Subprocesses") 4699 if __debug__: 4700 raise 4701 compile_cluster.remove() 4702 self.do_quit('') 4703 4704 logger.info('Checking test output:') 4705 for p_dir in p_dirs: 4706 logger.info(p_dir) 4707 for test in tests: 4708 logger.info(' Result for %s:' % test) 4709 4710 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir) 4711 #check that none of the tests failed 4712 self.check_tests(test, this_dir)
4713 4714
4715 - def donothing(*args):
4716 pass
4717 4718
4719 - def check_tests(self, test, dir):
4720 """just call the correct parser for the test log. 4721 Skip check_poles for LOonly folders""" 4722 if test in ['test_ME', 'test_MC']: 4723 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test)) 4724 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')): 4725 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4726 4727
4728 - def parse_test_mx_log(self, log):
4729 """read and parse the test_ME/MC.log file""" 4730 content = open(log).read() 4731 if 'FAILED' in content: 4732 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK') 4733 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \ 4734 'Please check that widths of final state particles (e.g. top) have been' + \ 4735 ' set to 0 in the param_card.dat.') 4736 else: 4737 lines = [l for l in content.split('\n') if 'PASSED' in l] 4738 logger.info(' Passed.') 4739 logger.debug('\n'+'\n'.join(lines))
4740 4741
4742 - def parse_check_poles_log(self, log):
4743 """reads and parse the check_poles.log file""" 4744 content = open(log).read() 4745 npass = 0 4746 nfail = 0 4747 for line in content.split('\n'): 4748 if 'PASSED' in line: 4749 npass +=1 4750 tolerance = float(line.split()[1]) 4751 if 'FAILED' in line: 4752 nfail +=1 4753 tolerance = float(line.split()[1]) 4754 4755 if nfail + npass == 0: 4756 logger.warning('0 points have been tried') 4757 return 4758 4759 if float(nfail)/float(nfail+npass) > 0.1: 4760 raise aMCatNLOError('Poles do not cancel, run cannot continue') 4761 else: 4762 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \ 4763 %(npass, nfail+npass, tolerance))
4764 4765
4766 - def write_test_input(self, test):
4767 """write the input files to run test_ME/MC or check_poles""" 4768 if test in ['test_ME', 'test_MC']: 4769 content = "-2 -2\n" #generate randomly energy/angle 4770 content+= "100 100\n" #run 100 points for soft and collinear tests 4771 content+= "0\n" #sum over helicities 4772 content+= "0\n" #all FKS configs 4773 content+= '\n'.join(["-1"] * 50) #random diagram 4774 elif test == 'check_poles': 4775 content = '20 \n -1\n' 4776 4777 file = open(pjoin(self.me_dir, '%s_input.txt' % test), 'w') 4778 if test == 'test_MC': 4779 shower = self.run_card['parton_shower'] 4780 MC_header = "%s\n " % shower + \ 4781 "1 \n1 -0.1\n-1 -0.1\n" 4782 file.write(MC_header + content) 4783 else: 4784 file.write(content) 4785 file.close()
4786 4787 4788 4789 ############################################################################
4790 - def find_model_name(self):
4791 """ return the model name """ 4792 if hasattr(self, 'model_name'): 4793 return self.model_name 4794 4795 model = 'sm' 4796 proc = [] 4797 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')): 4798 line = line.split('#')[0] 4799 #line = line.split('=')[0] 4800 if line.startswith('import') and 'model' in line: 4801 model = line.split()[2] 4802 proc = [] 4803 elif line.startswith('generate'): 4804 proc.append(line.split(None,1)[1]) 4805 elif line.startswith('add process'): 4806 proc.append(line.split(None,2)[2]) 4807 4808 self.model = model 4809 self.process = proc 4810 return model
4811 4812 4813 4814 ############################################################################
4815 - def ask_run_configuration(self, mode, options, switch={}):
4816 """Ask the question when launching generate_events/multi_run""" 4817 4818 if 'parton' not in options: 4819 options['parton'] = False 4820 if 'reweightonly' not in options: 4821 options['reweightonly'] = False 4822 4823 4824 void = 'Not installed' 4825 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight','madanalysis5'] 4826 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void, 4827 'madspin': void,'reweight':'OFF','madanalysis5':void} 4828 if not switch: 4829 switch = switch_default 4830 else: 4831 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch)) 4832 default_switch = ['ON', 'OFF'] 4833 4834 4835 allowed_switch_value = {'order': ['LO', 'NLO'], 4836 'fixed_order': default_switch, 4837 'shower': default_switch, 4838 'madspin': default_switch, 4839 'reweight': default_switch, 4840 'madanalysis5':['OFF','HADRON']} 4841 4842 if not os.path.exists(pjoin(self.me_dir, 'Cards', 4843 'madanalysis5_hadron_card_default.dat')): 4844 allowed_switch_value['madanalysis5']=[] 4845 4846 description = {'order': 'Perturbative order of the calculation:', 4847 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):', 4848 'shower': 'Shower the generated events:', 4849 'madspin': 'Decay particles with the MadSpin module:', 4850 'reweight': 'Add weights to the events based on changing model parameters:', 4851 'madanalysis5':'Run MadAnalysis5 on the events generated:'} 4852 4853 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'}, 4854 ('madspin', 'ON'): {'fixed_order':'OFF'}, 4855 ('reweight', 'ON'): {'fixed_order':'OFF'}, 4856 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF','madanalysis5':'OFF'}, 4857 ('madanalysis5','HADRON'): {'shower': 'ON','fixed_order':'OFF'}, 4858 ('shower','OFF'): {'madanalysis5': 'OFF'}, 4859 } 4860 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] 4861 4862 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void ) 4863 4864 if self.proc_characteristics['ninitial'] == 1: 4865 switch['fixed_order'] = 'ON' 4866 switch['shower'] = 'Not available for decay' 4867 switch['madspin'] = 'Not available for decay' 4868 switch['reweight'] = 'Not available for decay' 4869 switch['madanalysis5'] = 'Not available for decay' 4870 allowed_switch_value['fixed_order'] = ['ON'] 4871 allowed_switch_value['shower'] = ['OFF'] 4872 allowed_switch_value['madspin'] = ['OFF'] 4873 allowed_switch_value['reweight'] = ['OFF'] 4874 allowed_switch_value['madanalysis5'] = ['OFF'] 4875 available_mode = ['0','1'] 4876 special_values = ['LO', 'NLO'] 4877 else: 4878 # Init the switch value according to the current status 4879 available_mode = ['0', '1', '2','3'] 4880 4881 if mode == 'auto': 4882 mode = None 4883 if not mode and (options['parton'] or options['reweightonly']): 4884 mode = 'noshower' 4885 4886 4887 if '3' in available_mode: 4888 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')): 4889 switch['shower'] = 'ON' 4890 else: 4891 switch['shower'] = 'OFF' 4892 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card_default.dat')): 4893 available_mode.append('6') 4894 if os.path.exists(pjoin(self.me_dir, 'Cards', 'madanalysis5_hadron_card.dat')): 4895 switch['madanalysis5'] = 'HADRON' 4896 else: 4897 switch['madanalysis5'] = 'OFF' 4898 4899 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode: 4900 available_mode.append('4') 4901 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4902 switch['madspin'] = 'ON' 4903 else: 4904 switch['madspin'] = 'OFF' 4905 if misc.has_f2py() or self.options['f2py_compiler']: 4906 available_mode.append('5') 4907 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')): 4908 switch['reweight'] = 'ON' 4909 else: 4910 switch['reweight'] = 'OFF' 4911 else: 4912 switch['reweight'] = 'Not available (requires NumPy)' 4913 4914 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode: 4915 if switch['reweight'] == "OFF": 4916 switch['reweight'] = "ON" 4917 elif switch['reweight'] != "ON": 4918 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight']) 4919 if 'do_madspin' in options and options['do_madspin']: 4920 if switch['madspin'] == "OFF": 4921 switch['madspin'] = 'ON' 4922 elif switch['madspin'] != "ON": 4923 logger.critical("Cannot run MadSpin module: %s" % switch['reweight']) 4924 4925 answers = list(available_mode) + ['auto', 'done'] 4926 alias = {} 4927 for id, key in enumerate(switch_order): 4928 if switch[key] != void and switch[key] in allowed_switch_value[key] and \ 4929 len(allowed_switch_value[key])>1: 4930 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]] 4931 #allow lower case for on/off 4932 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s)) 4933 for s in allowed_switch_value[key])) 4934 answers += special_values 4935 4936 def create_question(switch): 4937 switch_format = " %i %-61s %12s=%s\n" 4938 question = "The following switches determine which operations are executed:\n" 4939 for id, key in enumerate(switch_order): 4940 question += switch_format % (id+1, description[key], key, switch[key]) 4941 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1) 4942 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n' 4943 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n' 4944 return question
4945 4946 4947 def modify_switch(mode, answer, switch): 4948 if '=' in answer: 4949 key, status = answer.split('=') 4950 switch[key] = status 4951 if (key, status) in force_switch: 4952 for key2, status2 in force_switch[(key, status)].items(): 4953 if switch[key2] not in [status2, void]: 4954 logger.info('For coherence \'%s\' is set to \'%s\'' 4955 % (key2, status2), '$MG:color:BLACK') 4956 switch[key2] = status2 4957 elif answer in ['0', 'auto', 'done']: 4958 return 4959 elif answer in special_values: 4960 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK') 4961 #assign_switch('reweight', 'OFF') 4962 #assign_switch('madspin', 'OFF') 4963 if answer == 'LO': 4964 switch['order'] = 'LO' 4965 switch['fixed_order'] = 'ON' 4966 assign_switch('shower', 'OFF') 4967 elif answer == 'NLO': 4968 switch['order'] = 'NLO' 4969 switch['fixed_order'] = 'ON' 4970 assign_switch('shower', 'OFF') 4971 elif answer == 'aMC@NLO': 4972 switch['order'] = 'NLO' 4973 switch['fixed_order'] = 'OFF' 4974 assign_switch('shower', 'ON') 4975 elif answer == 'aMC@LO': 4976 switch['order'] = 'LO' 4977 switch['fixed_order'] = 'OFF' 4978 assign_switch('shower', 'ON') 4979 elif answer == 'noshower': 4980 switch['order'] = 'NLO' 4981 switch['fixed_order'] = 'OFF' 4982 assign_switch('shower', 'OFF') 4983 elif answer == 'noshowerLO': 4984 switch['order'] = 'LO' 4985 switch['fixed_order'] = 'OFF' 4986 assign_switch('shower', 'OFF') 4987 if mode: 4988 return 4989 return switch 4990 4991 modify_switch(mode, self.last_mode, switch) 4992 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')): 4993 assign_switch('madspin', 'ON') 4994 4995 if not self.force: 4996 answer = '' 4997 while answer not in ['0', 'done', 'auto', 'onlyshower']: 4998 question = create_question(switch) 4999 if mode: 5000 answer = mode 5001 else: 5002 answer = self.ask(question, '0', answers, alias=alias) 5003 if answer.isdigit() and answer != '0': 5004 key = switch_order[int(answer) - 1] 5005 opt1 = allowed_switch_value[key][0] 5006 opt2 = allowed_switch_value[key][1] 5007 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2) 5008 5009 if not modify_switch(mode, answer, switch): 5010 break 5011 5012 #assign the mode depending of the switch 5013 if not mode or mode == 'auto': 5014 if switch['order'] == 'LO': 5015 if switch['shower'] == 'ON': 5016 mode = 'aMC@LO' 5017 elif switch['fixed_order'] == 'ON': 5018 mode = 'LO' 5019 else: 5020 mode = 'noshowerLO' 5021 elif switch['order'] == 'NLO': 5022 if switch['shower'] == 'ON': 5023 mode = 'aMC@NLO' 5024 elif switch['fixed_order'] == 'ON': 5025 mode = 'NLO' 5026 else: 5027 mode = 'noshower' 5028 logger.info('will run in mode: %s' % mode) 5029 5030 if mode == 'noshower': 5031 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical. 5032 Please, shower the Les Houches events before using them for physics analyses.""") 5033 5034 5035 # specify the cards which are needed for this run. 5036 cards = ['param_card.dat', 'run_card.dat'] 5037 ignore = [] 5038 if mode in ['LO', 'NLO']: 5039 options['parton'] = True 5040 ignore = ['shower_card.dat', 'madspin_card.dat'] 5041 cards.append('FO_analyse_card.dat') 5042 else: 5043 if switch['madspin'] == 'ON': 5044 cards.append('madspin_card.dat') 5045 if switch['reweight'] == 'ON': 5046 cards.append('reweight_card.dat') 5047 if switch['madanalysis5'] == 'HADRON': 5048 cards.append('madanalysis5_hadron_card.dat') 5049 if 'aMC@' in mode: 5050 cards.append('shower_card.dat') 5051 if mode == 'onlyshower': 5052 cards = ['shower_card.dat'] 5053 if options['reweightonly']: 5054 cards = ['run_card.dat'] 5055 5056 self.keep_cards(cards, ignore) 5057 5058 if mode =='onlyshower': 5059 cards = ['shower_card.dat'] 5060 5061 5062 # automatically switch to keep_wgt option 5063 first_cmd = [] # force to change some switch 5064 5065 if not options['force'] and not self.force: 5066 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd) 5067 5068 self.banner = banner_mod.Banner() 5069 5070 # store the cards in the banner 5071 for card in cards: 5072 self.banner.add(pjoin(self.me_dir, 'Cards', card)) 5073 # and the run settings 5074 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()]) 5075 self.banner.add_text('run_settings', run_settings) 5076 5077 if not mode =='onlyshower': 5078 self.run_card = self.banner.charge_card('run_card') 5079 self.run_tag = self.run_card['run_tag'] 5080 #this is if the user did not provide a name for the current run 5081 if not hasattr(self, 'run_name') or not self.run_name: 5082 self.run_name = self.find_available_run_name(self.me_dir) 5083 #add a tag in the run_name for distinguish run_type 5084 if self.run_name.startswith('run_'): 5085 if mode in ['LO','aMC@LO','noshowerLO']: 5086 self.run_name += '_LO' 5087 self.set_run_name(self.run_name, self.run_tag, 'parton') 5088 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']: 5089 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""") 5090 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']: 5091 logger.warning("""You are running with FxFx merging enabled. To be able to merge 5092 samples of various multiplicities without double counting, you 5093 have to remove some events after showering 'by hand'. Please 5094 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""") 5095 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q': 5096 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""") 5097 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8': 5098 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \ 5099 "Type \'n\' to stop or \'y\' to continue" 5100 answers = ['n','y'] 5101 answer = self.ask(question, 'n', answers, alias=alias) 5102 if answer == 'n': 5103 error = '''Stop opertation''' 5104 self.ask_run_configuration(mode, options) 5105 # raise aMCatNLOError(error) 5106 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']: 5107 # NNLL+NLO jet-veto only possible for LO event generation or fNLO runs. 5108 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""") 5109 if 'aMC@' in mode or mode == 'onlyshower': 5110 self.shower_card = self.banner.charge_card('shower_card') 5111 5112 elif mode in ['LO', 'NLO']: 5113 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat') 5114 self.analyse_card = self.banner.charge_card('FO_analyse_card') 5115 5116 return mode 5117 5118 5119 #=============================================================================== 5120 # aMCatNLOCmd 5121 #===============================================================================
5122 -class aMCatNLOCmdShell(aMCatNLOCmd, cmd.CmdShell):
5123 """The command line processor of MadGraph"""
5124 5125 _compile_usage = "compile [MODE] [options]\n" + \ 5126 "-- compiles aMC@NLO \n" + \ 5127 " MODE can be either FO, for fixed-order computations, \n" + \ 5128 " or MC for matching with parton-shower monte-carlos. \n" + \ 5129 " (if omitted, it is set to MC)\n" 5130 _compile_parser = misc.OptionParser(usage=_compile_usage) 5131 _compile_parser.add_option("-f", "--force", default=False, action='store_true', 5132 help="Use the card present in the directory for the launch, without editing them") 5133 5134 _launch_usage = "launch [MODE] [options]\n" + \ 5135 "-- execute aMC@NLO \n" + \ 5136 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5137 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5138 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5139 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5140 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5141 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5142 " in the run_card.dat\n" 5143 5144 _launch_parser = misc.OptionParser(usage=_launch_usage) 5145 _launch_parser.add_option("-f", "--force", default=False, action='store_true', 5146 help="Use the card present in the directory for the launch, without editing them") 5147 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true', 5148 help="Submit the jobs on the cluster") 5149 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true', 5150 help="Submit the jobs on multicore mode") 5151 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5152 help="Skip compilation. Ignored if no executable is found") 5153 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5154 help="Skip integration and event generation, just run reweight on the" + \ 5155 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5156 _launch_parser.add_option("-p", "--parton", default=False, action='store_true', 5157 help="Stop the run after the parton level file generation (you need " + \ 5158 "to shower the file in order to get physical results)") 5159 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5160 help="Skip grid set up, just generate events starting from " + \ 5161 "the last available results") 5162 _launch_parser.add_option("-n", "--name", default=False, dest='run_name', 5163 help="Provide a name to the run") 5164 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5165 help="For use with APPLgrid only: start from existing grids") 5166 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true', 5167 help="Run the reweight module (reweighting by different model parameters)") 5168 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true', 5169 help="Run the madspin package") 5170 5171 5172 5173 _generate_events_usage = "generate_events [MODE] [options]\n" + \ 5174 "-- execute aMC@NLO \n" + \ 5175 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \ 5176 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \ 5177 " computation of the total cross section and the filling of parton-level histograms \n" + \ 5178 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \ 5179 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \ 5180 " event file is generated which will be showered with the MonteCarlo specified \n" + \ 5181 " in the run_card.dat\n" 5182 5183 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage) 5184 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true', 5185 help="Use the card present in the directory for the generate_events, without editing them") 5186 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true', 5187 help="Submit the jobs on the cluster") 5188 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true', 5189 help="Submit the jobs on multicore mode") 5190 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5191 help="Skip compilation. Ignored if no executable is found") 5192 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true', 5193 help="Skip integration and event generation, just run reweight on the" + \ 5194 " latest generated event files (see list in SubProcesses/nevents_unweighted)") 5195 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true', 5196 help="Stop the run after the parton level file generation (you need " + \ 5197 "to shower the file in order to get physical results)") 5198 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5199 help="Skip grid set up, just generate events starting from " + \ 5200 "the last available results") 5201 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name', 5202 help="Provide a name to the run") 5203 5204 5205 5206 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \ 5207 "-- calculate cross section up to ORDER.\n" + \ 5208 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n" 5209 5210 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage) 5211 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true', 5212 help="Use the card present in the directory for the launch, without editing them") 5213 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true', 5214 help="Submit the jobs on the cluster") 5215 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true', 5216 help="Submit the jobs on multicore mode") 5217 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true', 5218 help="Skip compilation. Ignored if no executable is found") 5219 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name', 5220 help="Provide a name to the run") 5221 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid', 5222 help="For use with APPLgrid only: start from existing grids") 5223 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true', 5224 help="Skip grid set up, just generate events starting from " + \ 5225 "the last available results") 5226 5227 _shower_usage = 'shower run_name [options]\n' + \ 5228 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \ 5229 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \ 5230 ' are directly read from the header of the event file\n' 5231 _shower_parser = misc.OptionParser(usage=_shower_usage) 5232 _shower_parser.add_option("-f", "--force", default=False, action='store_true', 5233 help="Use the shower_card present in the directory for the launch, without editing") 5234 5235 if '__main__' == __name__: 5236 # Launch the interface without any check if one code is already running. 5237 # This can ONLY run a single command !! 5238 import sys 5239 if not sys.version_info[0] == 2 or sys.version_info[1] < 6: 5240 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\ 5241 'Please upgrate your version of python.') 5242 5243 import os 5244 import optparse 5245 # Get the directory of the script real path (bin) 5246 # and add it to the current PYTHONPATH 5247 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ ))) 5248 sys.path.insert(0, root_path) 5249
5250 - class MyOptParser(optparse.OptionParser):
5251 - class InvalidOption(Exception): pass
5252 - def error(self, msg=''):
5253 raise MyOptParser.InvalidOption(msg)
5254 # Write out nice usage message if called with -h or --help 5255 usage = "usage: %prog [options] [FILE] " 5256 parser = MyOptParser(usage=usage) 5257 parser.add_option("-l", "--logging", default='INFO', 5258 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]") 5259 parser.add_option("","--web", action="store_true", default=False, dest='web', \ 5260 help='force toce to be in secure mode') 5261 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \ 5262 help='force to launch debug mode') 5263 parser_error = '' 5264 done = False 5265 5266 for i in range(len(sys.argv)-1): 5267 try: 5268 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i]) 5269 done = True 5270 except MyOptParser.InvalidOption, error: 5271 pass 5272 else: 5273 args += sys.argv[len(sys.argv)-i:] 5274 if not done: 5275 # raise correct error: 5276 try: 5277 (options, args) = parser.parse_args() 5278 except MyOptParser.InvalidOption, error: 5279 print error 5280 sys.exit(2) 5281 5282 if len(args) == 0: 5283 args = '' 5284 5285 import subprocess 5286 import logging 5287 import logging.config 5288 # Set logging level according to the logging level given by options 5289 #logging.basicConfig(level=vars(logging)[options.logging]) 5290 import internal.coloring_logging 5291 try: 5292 if __debug__ and options.logging == 'INFO': 5293 options.logging = 'DEBUG' 5294 if options.logging.isdigit(): 5295 level = int(options.logging) 5296 else: 5297 level = eval('logging.' + options.logging) 5298 print os.path.join(root_path, 'internal', 'me5_logging.conf') 5299 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf')) 5300 logging.root.setLevel(level) 5301 logging.getLogger('madgraph').setLevel(level) 5302 except: 5303 raise 5304 pass 5305 5306 # Call the cmd interface main loop 5307 try: 5308 if args: 5309 # a single command is provided 5310 if '--web' in args: 5311 i = args.index('--web') 5312 args.pop(i) 5313 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True) 5314 else: 5315 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True) 5316 5317 if not hasattr(cmd_line, 'do_%s' % args[0]): 5318 if parser_error: 5319 print parser_error 5320 print 'and %s can not be interpreted as a valid command.' % args[0] 5321 else: 5322 print 'ERROR: %s not a valid command. Please retry' % args[0] 5323 else: 5324 cmd_line.use_rawinput = False 5325 cmd_line.run_cmd(' '.join(args)) 5326 cmd_line.run_cmd('quit') 5327 5328 except KeyboardInterrupt: 5329 print 'quit on KeyboardInterrupt' 5330 pass 5331