1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 from __future__ import absolute_import
21 from __future__ import print_function
22 import atexit
23 import glob
24 import logging
25 import math
26 import optparse
27 import os
28 import pydoc
29 import random
30 import re
31 import shutil
32 import subprocess
33 import sys
34 import traceback
35 import time
36 import signal
37 import tarfile
38 import copy
39 import datetime
40 import tarfile
41 import traceback
42 import six
43 StringIO = six
44 from six.moves import range
45 from six.moves import zip
46 try:
47 import cpickle as pickle
48 except:
49 import pickle
50
51 try:
52 import readline
53 GNU_SPLITTING = ('GNU' in readline.__doc__)
54 except:
55 GNU_SPLITTING = True
56
57 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
58 root_path = os.path.split(root_path)[0]
59 sys.path.insert(0, os.path.join(root_path,'bin'))
60
61
62 pjoin = os.path.join
63
64 logger = logging.getLogger('madgraph.stdout')
65 logger_stderr = logging.getLogger('madgraph.stderr')
66
67 try:
68 import madgraph
69 except ImportError:
70 aMCatNLO = True
71 import internal.extended_cmd as cmd
72 import internal.common_run_interface as common_run
73 import internal.banner as banner_mod
74 import internal.misc as misc
75 from internal import InvalidCmd, MadGraph5Error
76 import internal.files as files
77 import internal.cluster as cluster
78 import internal.save_load_object as save_load_object
79 import internal.gen_crossxhtml as gen_crossxhtml
80 import internal.sum_html as sum_html
81 import internal.shower_card as shower_card
82 import internal.FO_analyse_card as analyse_card
83 import internal.lhe_parser as lhe_parser
84 else:
85
86 aMCatNLO = False
87 import madgraph.interface.extended_cmd as cmd
88 import madgraph.interface.common_run_interface as common_run
89 import madgraph.iolibs.files as files
90 import madgraph.iolibs.save_load_object as save_load_object
91 import madgraph.madevent.gen_crossxhtml as gen_crossxhtml
92 import madgraph.madevent.sum_html as sum_html
93 import madgraph.various.banner as banner_mod
94 import madgraph.various.cluster as cluster
95 import madgraph.various.misc as misc
96 import madgraph.various.shower_card as shower_card
97 import madgraph.various.FO_analyse_card as analyse_card
98 import madgraph.various.lhe_parser as lhe_parser
99 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error,MG5DIR
103
106 """compile the direcory p_dir
107 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
108 this function needs not to be a class method in order to do
109 the compilation on multicore"""
110
111 if len(arguments) == 1:
112 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0]
113 elif len(arguments)==7:
114 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
115 else:
116 raise aMCatNLOError('not correct number of argument')
117 logger.info(' Compiling %s...' % p_dir)
118
119 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
120
121 try:
122
123
124 for test in tests:
125
126 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')):
127 continue
128 if test == 'test_ME' or test == 'test_MC':
129 test_exe='test_soft_col_limits'
130 else:
131 test_exe=test
132 misc.compile([test_exe], cwd = this_dir, job_specs = False)
133 input = pjoin(me_dir, '%s_input.txt' % test)
134
135 misc.call(['./%s' % (test_exe)], cwd=this_dir,
136 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'),
137 close_fds=True)
138 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
139 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
140 dereference=True)
141 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
142 tf.close()
143
144 if not options['reweightonly']:
145 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
146 misc.call(['./gensym'],cwd= this_dir,
147 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'),
148 close_fds=True)
149
150 misc.compile([exe], cwd=this_dir, job_specs = False)
151 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
152 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
153
154 logger.info(' %s done.' % p_dir)
155 return 0
156 except MadGraph5Error as msg:
157 return msg
158
161 """check that the current fortran compiler is gfortran 4.6 or later.
162 If block, stops the execution, otherwise just print a warning"""
163
164 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
165 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
166 'Note that You can still run all MadEvent run without any problem!'
167
168 if options['fortran_compiler']:
169 compiler = options['fortran_compiler']
170 elif misc.which('gfortran'):
171 compiler = 'gfortran'
172 else:
173 compiler = ''
174
175 if 'gfortran' not in compiler:
176 if block:
177 raise aMCatNLOError(msg % compiler)
178 else:
179 logger.warning(msg % compiler)
180 else:
181 curr_version = misc.get_gfortran_version(compiler)
182 curr_version = curr_version.split('.')
183 if len(curr_version) == 1:
184 curr_version.append(0)
185
186 if int(curr_version[0]) < 5:
187 if int(curr_version[0]) == 4 and int(curr_version[1]) > 5:
188 return
189 if block:
190 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
191 else:
192 logger.warning(msg % (compiler + ' ' + curr_version))
193
194
195
196
197
198
199 -class CmdExtended(common_run.CommonRunCmd):
200 """Particularisation of the cmd command for aMCatNLO"""
201
202
203 next_possibility = {
204 'start': [],
205 }
206
207 debug_output = 'ME5_debug'
208 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n'
209 error_debug += 'More information is found in \'%(debug)s\'.\n'
210 error_debug += 'Please attach this file to your report.'
211
212 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n'
213
214
215 keyboard_stop_msg = """stopping all operation
216 in order to quit MadGraph5_aMC@NLO please enter exit"""
217
218
219 InvalidCmd = InvalidCmd
220 ConfigurationError = aMCatNLOError
221
222 - def __init__(self, me_dir, options, *arg, **opt):
223 """Init history and line continuation"""
224
225
226 self.force = False
227
228
229
230 info = misc.get_pkg_info()
231 info_line = ""
232 if info and 'version' in info and 'date' in info:
233 len_version = len(info['version'])
234 len_date = len(info['date'])
235 if len_version + len_date < 30:
236 info_line = "#* VERSION %s %s %s *\n" % \
237 (info['version'],
238 (30 - len_version - len_date) * ' ',
239 info['date'])
240 else:
241 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
242 info_line = "#* VERSION %s %s *\n" % \
243 (version, (24 - len(version)) * ' ')
244
245
246
247 self.history_header = \
248 '#************************************************************\n' + \
249 '#* MadGraph5_aMC@NLO *\n' + \
250 '#* *\n' + \
251 "#* * * *\n" + \
252 "#* * * * * *\n" + \
253 "#* * * * * 5 * * * * *\n" + \
254 "#* * * * * *\n" + \
255 "#* * * *\n" + \
256 "#* *\n" + \
257 "#* *\n" + \
258 info_line + \
259 "#* *\n" + \
260 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
261 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
262 "#* and *\n" + \
263 "#* http://amcatnlo.cern.ch *\n" + \
264 '#* *\n' + \
265 '#************************************************************\n' + \
266 '#* *\n' + \
267 '#* Command File for aMCatNLO *\n' + \
268 '#* *\n' + \
269 '#* run as ./bin/aMCatNLO.py filename *\n' + \
270 '#* *\n' + \
271 '#************************************************************\n'
272
273 if info_line:
274 info_line = info_line[1:]
275
276 logger.info(\
277 "************************************************************\n" + \
278 "* *\n" + \
279 "* W E L C O M E to M A D G R A P H 5 *\n" + \
280 "* a M C @ N L O *\n" + \
281 "* *\n" + \
282 "* * * *\n" + \
283 "* * * * * *\n" + \
284 "* * * * * 5 * * * * *\n" + \
285 "* * * * * *\n" + \
286 "* * * *\n" + \
287 "* *\n" + \
288 info_line + \
289 "* *\n" + \
290 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
291 "* http://amcatnlo.cern.ch *\n" + \
292 "* *\n" + \
293 "* Type 'help' for in-line help. *\n" + \
294 "* *\n" + \
295 "************************************************************")
296 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
297
298
300 """return the history header"""
301 return self.history_header % misc.get_time_info()
302
304 """action to perform to close nicely on a keyboard interupt"""
305 try:
306 if hasattr(self, 'cluster'):
307 logger.info('rm jobs on queue')
308 self.cluster.remove()
309 if hasattr(self, 'results'):
310 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
311 self.add_error_log_in_html(KeyboardInterrupt)
312 except:
313 pass
314
315 - def postcmd(self, stop, line):
316 """ Update the status of the run for finishing interactive command """
317
318
319 self.force = False
320
321 if not self.use_rawinput:
322 return stop
323
324
325 arg = line.split()
326 if len(arg) == 0:
327 return stop
328 elif str(arg[0]) in ['exit','quit','EOF']:
329 return stop
330
331 try:
332 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
333 level=None, error=True)
334 except Exception:
335 misc.sprint('self.update_status fails', log=logger)
336 pass
337
343
349
355
362 """ The Series of help routine for the aMCatNLOCmd"""
363
367
369 logger.info("syntax: banner_run Path|RUN [--run_options]")
370 logger.info("-- Reproduce a run following a given banner")
371 logger.info(" One of the following argument is require:")
372 logger.info(" Path should be the path of a valid banner.")
373 logger.info(" RUN should be the name of a run of the current directory")
374 self.run_options_help([('-f','answer all question by default'),
375 ('--name=X', 'Define the name associated with the new run')])
376
377
381
386
387
391
395
396
398 logger.info("syntax: open FILE ")
399 logger.info("-- open a file with the appropriate editor.")
400 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
401 logger.info(' the path to the last created/used directory is used')
402
404 if data:
405 logger.info('-- local options:')
406 for name, info in data:
407 logger.info(' %s : %s' % (name, info))
408
409 logger.info("-- session options:")
410 logger.info(" Note that those options will be kept for the current session")
411 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
412 logger.info(" --multicore : Run in multi-core configuration")
413 logger.info(" --nb_core=X : limit the number of core to use to X.")
414
422 """ The Series of check routine for the aMCatNLOCmd"""
423
425 """Check the validity of the line. args[0] is the run_directory"""
426
427 if options['force']:
428 self.force = True
429
430 if len(args) == 0:
431 self.help_shower()
432 raise self.InvalidCmd('Invalid syntax, please specify the run name')
433 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
434 raise self.InvalidCmd('Directory %s does not exists' % \
435 pjoin(os.getcwd(), 'Events', args[0]))
436
437 self.set_run_name(args[0], level= 'shower')
438 args[0] = pjoin(self.me_dir, 'Events', args[0])
439
441 """Check the argument for the plot command
442 plot run_name modes"""
443
444
445 madir = self.options['madanalysis_path']
446 td = self.options['td_path']
447
448 if not madir or not td:
449 logger.info('Retry to read configuration file to find madanalysis/td')
450 self.set_configuration()
451
452 madir = self.options['madanalysis_path']
453 td = self.options['td_path']
454
455 if not madir:
456 error_msg = 'No Madanalysis path correctly set.'
457 error_msg += 'Please use the set command to define the path and retry.'
458 error_msg += 'You can also define it in the configuration file.'
459 raise self.InvalidCmd(error_msg)
460 if not td:
461 error_msg = 'No path to td directory correctly set.'
462 error_msg += 'Please use the set command to define the path and retry.'
463 error_msg += 'You can also define it in the configuration file.'
464 raise self.InvalidCmd(error_msg)
465
466 if len(args) == 0:
467 if not hasattr(self, 'run_name') or not self.run_name:
468 self.help_plot()
469 raise self.InvalidCmd('No run name currently define. Please add this information.')
470 args.append('all')
471 return
472
473
474 if args[0] not in self._plot_mode:
475 self.set_run_name(args[0], level='plot')
476 del args[0]
477 if len(args) == 0:
478 args.append('all')
479 elif not self.run_name:
480 self.help_plot()
481 raise self.InvalidCmd('No run name currently define. Please add this information.')
482
483 for arg in args:
484 if arg not in self._plot_mode and arg != self.run_name:
485 self.help_plot()
486 raise self.InvalidCmd('unknown options %s' % arg)
487
489 """Check the argument for pythia command
490 syntax: pgs [NAME]
491 Note that other option are already remove at this point
492 """
493
494
495 if not self.options['pythia-pgs_path']:
496 logger.info('Retry to read configuration file to find pythia-pgs path')
497 self.set_configuration()
498
499 if not self.options['pythia-pgs_path'] or not \
500 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
501 error_msg = 'No pythia-pgs path correctly set.'
502 error_msg += 'Please use the set command to define the path and retry.'
503 error_msg += 'You can also define it in the configuration file.'
504 raise self.InvalidCmd(error_msg)
505
506 tag = [a for a in arg if a.startswith('--tag=')]
507 if tag:
508 arg.remove(tag[0])
509 tag = tag[0][6:]
510
511
512 if len(arg) == 0 and not self.run_name:
513 if self.results.lastrun:
514 arg.insert(0, self.results.lastrun)
515 else:
516 raise self.InvalidCmd('No run name currently define. Please add this information.')
517
518 if len(arg) == 1 and self.run_name == arg[0]:
519 arg.pop(0)
520
521 if not len(arg) and \
522 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
523 self.help_pgs()
524 raise self.InvalidCmd('''No file file pythia_events.hep currently available
525 Please specify a valid run_name''')
526
527 lock = None
528 if len(arg) == 1:
529 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
530 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
531
532 if not filenames:
533 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
534 else:
535 input_file = filenames[0]
536 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
537 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
538 argument=['-c', input_file],
539 close_fds=True)
540 else:
541 if tag:
542 self.run_card['run_tag'] = tag
543 self.set_run_name(self.run_name, tag, 'pgs')
544
545 return lock
546
547
549 """Check the argument for pythia command
550 syntax: delphes [NAME]
551 Note that other option are already remove at this point
552 """
553
554
555 if not self.options['delphes_path']:
556 logger.info('Retry to read configuration file to find delphes path')
557 self.set_configuration()
558
559 if not self.options['delphes_path']:
560 error_msg = 'No delphes path correctly set.'
561 error_msg += 'Please use the set command to define the path and retry.'
562 error_msg += 'You can also define it in the configuration file.'
563 raise self.InvalidCmd(error_msg)
564
565 tag = [a for a in arg if a.startswith('--tag=')]
566 if tag:
567 arg.remove(tag[0])
568 tag = tag[0][6:]
569
570
571 if len(arg) == 0 and not self.run_name:
572 if self.results.lastrun:
573 arg.insert(0, self.results.lastrun)
574 else:
575 raise self.InvalidCmd('No run name currently define. Please add this information.')
576
577 if len(arg) == 1 and self.run_name == arg[0]:
578 arg.pop(0)
579
580 if not len(arg) and \
581 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
582 self.help_pgs()
583 raise self.InvalidCmd('''No file file pythia_events.hep currently available
584 Please specify a valid run_name''')
585
586 if len(arg) == 1:
587 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
588 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events'))
589
590
591 if not filenames:
592 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
593 % (self.run_name, prev_tag,
594 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
595 else:
596 input_file = filenames[0]
597 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
598 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
599 argument=['-c', input_file],
600 close_fds=True)
601 else:
602 if tag:
603 self.run_card['run_tag'] = tag
604 self.set_run_name(self.run_name, tag, 'delphes')
605
607 """check the validity of the line. args is ORDER,
608 ORDER being LO or NLO. If no mode is passed, NLO is used"""
609
610
611
612 if options['force']:
613 self.force = True
614
615 if not args:
616 args.append('NLO')
617 return
618
619 if len(args) > 1:
620 self.help_calculate_xsect()
621 raise self.InvalidCmd('Invalid Syntax: Too many argument')
622
623 elif len(args) == 1:
624 if not args[0] in ['NLO', 'LO']:
625 raise self.InvalidCmd('%s is not a valid mode, please use "LO" or "NLO"' % args[1])
626 mode = args[0]
627
628
629 if options['multicore'] and options['cluster']:
630 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \
631 ' are not compatible. Please choose one.')
632
633
635 """check the validity of the line. args is ORDER,
636 ORDER being LO or NLO. If no mode is passed, NLO is used"""
637
638
639
640 if not args:
641 args.append('NLO')
642 return
643
644 if len(args) > 1:
645 self.help_generate_events()
646 raise self.InvalidCmd('Invalid Syntax: Too many argument')
647
648 elif len(args) == 1:
649 if not args[0] in ['NLO', 'LO']:
650 raise self.InvalidCmd('%s is not a valid mode, please use "LO" or "NLO"' % args[1])
651 mode = args[0]
652
653
654 if options['multicore'] and options['cluster']:
655 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \
656 ' are not compatible. Please choose one.')
657
659 """check the validity of line"""
660
661 if len(args) == 0:
662 self.help_banner_run()
663 raise self.InvalidCmd('banner_run requires at least one argument.')
664
665 tag = [a[6:] for a in args if a.startswith('--tag=')]
666
667
668 if os.path.exists(args[0]):
669 type ='banner'
670 format = self.detect_card_type(args[0])
671 if format != 'banner':
672 raise self.InvalidCmd('The file is not a valid banner.')
673 elif tag:
674 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
675 (args[0], tag))
676 if not os.path.exists(args[0]):
677 raise self.InvalidCmd('No banner associates to this name and tag.')
678 else:
679 name = args[0]
680 type = 'run'
681 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0]))
682 if not banners:
683 raise self.InvalidCmd('No banner associates to this name.')
684 elif len(banners) == 1:
685 args[0] = banners[0]
686 else:
687
688 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
689 tag = self.ask('which tag do you want to use?', tags[0], tags)
690 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
691 (args[0], tag))
692
693 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
694 if run_name:
695 try:
696 self.exec_cmd('remove %s all banner -f' % run_name)
697 except Exception:
698 pass
699 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
700 elif type == 'banner':
701 self.set_run_name(self.find_available_run_name(self.me_dir))
702 elif type == 'run':
703 if not self.results[name].is_empty():
704 run_name = self.find_available_run_name(self.me_dir)
705 logger.info('Run %s is not empty so will use run_name: %s' % \
706 (name, run_name))
707 self.set_run_name(run_name)
708 else:
709 try:
710 self.exec_cmd('remove %s all banner -f' % run_name)
711 except Exception:
712 pass
713 self.set_run_name(name)
714
715
716
718 """check the validity of the line. args is MODE
719 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
720
721
722
723 if options['force']:
724 self.force = True
725
726
727 if not args:
728 args.append('auto')
729 return
730
731 if len(args) > 1:
732 self.help_launch()
733 raise self.InvalidCmd('Invalid Syntax: Too many argument')
734
735 elif len(args) == 1:
736 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
737 raise self.InvalidCmd('%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0])
738 mode = args[0]
739
740
741 if options['multicore'] and options['cluster']:
742 raise self.InvalidCmd('options -m (--multicore) and -c (--cluster)' + \
743 ' are not compatible. Please choose one.')
744 if mode == 'NLO' and options['reweightonly']:
745 raise self.InvalidCmd('option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"')
746
747
749 """check the validity of the line. args is MODE
750 MODE being FO or MC. If no mode is passed, MC is used"""
751
752
753
754 if options['force']:
755 self.force = True
756
757 if not args:
758 args.append('MC')
759 return
760
761 if len(args) > 1:
762 self.help_compile()
763 raise self.InvalidCmd('Invalid Syntax: Too many argument')
764
765 elif len(args) == 1:
766 if not args[0] in ['MC', 'FO']:
767 raise self.InvalidCmd('%s is not a valid mode, please use "FO" or "MC"' % args[0])
768 mode = args[0]
769
777 """ The Series of help routine for the MadGraphCmd"""
778
780 """auto-completion for launch command"""
781
782 args = self.split_arg(line[0:begidx])
783 if len(args) == 1:
784
785 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
786 elif len(args) == 2 and line[begidx-1] == '@':
787 return self.list_completion(text,['LO','NLO'],line)
788 else:
789 opts = []
790 for opt in _launch_parser.option_list:
791 opts += opt._long_opts + opt._short_opts
792 return self.list_completion(text, opts, line)
793
795 "Complete the banner run command"
796 try:
797
798
799 args = self.split_arg(line[0:begidx], error=False)
800
801 if args[-1].endswith(os.path.sep):
802 return self.path_completion(text,
803 os.path.join('.',*[a for a in args \
804 if a.endswith(os.path.sep)]))
805
806
807 if len(args) > 1:
808
809 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1]))
810 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
811
812 if args[-1] != '--tag=':
813 tags = ['--tag=%s' % t for t in tags]
814 else:
815 return self.list_completion(text, tags)
816 return self.list_completion(text, tags +['--name=','-f'], line)
817
818
819 possibilites = {}
820
821 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
822 if a.endswith(os.path.sep)]))
823 if os.path.sep in line:
824 return comp
825 else:
826 possibilites['Path from ./'] = comp
827
828 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events'))
829 run_list = [n.rsplit('/',2)[1] for n in run_list]
830 possibilites['RUN Name'] = self.list_completion(text, run_list)
831
832 return self.deal_multiple_categories(possibilites, formatting)
833
834
835 except Exception as error:
836 print(error)
837
838
851
864
866 """auto-completion for generate_events command
867 call the compeltion for launch"""
868 self.complete_launch(text, line, begidx, endidx)
869
870
880
896
898 "Complete the pgs command"
899 args = self.split_arg(line[0:begidx], error=False)
900 if len(args) == 1:
901
902 data = misc.glob(pjoin('*', 'events_*.hep.gz'),
903 pjoin(self.me_dir, 'Events'))
904 data = [n.rsplit('/',2)[1] for n in data]
905 tmp1 = self.list_completion(text, data)
906 if not self.run_name:
907 return tmp1
908 else:
909 tmp2 = self.list_completion(text, self._run_options + ['-f',
910 '--tag=' ,'--no_default'], line)
911 return tmp1 + tmp2
912 else:
913 return self.list_completion(text, self._run_options + ['-f',
914 '--tag=','--no_default'], line)
915
916 complete_delphes = complete_pgs
917
920
922
923 to_control = [('order', 'Type of perturbative computation'),
924 ('fixed_order', 'No MC@[N]LO matching / event generation'),
925 ('shower', 'Shower the generated events'),
926 ('madspin', 'Decay onshell particles'),
927 ('reweight', 'Add weights to events for new hypp.'),
928 ('madanalysis','Run MadAnalysis5 on the events generated')]
929
930 quit_on = cmd.ControlSwitch.quit_on + ['onlyshower']
931
932 - def __init__(self, question, line_args=[], mode=None, force=False,
933 *args, **opt):
934
935 self.me_dir = opt['mother_interface'].me_dir
936 self.check_available_module(opt['mother_interface'].options)
937 self.last_mode = opt['mother_interface'].last_mode
938 self.proc_characteristics = opt['mother_interface'].proc_characteristics
939 self.run_card = banner_mod.RunCard(pjoin(self.me_dir,'Cards', 'run_card.dat'),
940 consistency='warning')
941 super(AskRunNLO,self).__init__(self.to_control, opt['mother_interface'],
942 *args, **opt)
943
944 @property
946
947 out = super(AskRunNLO, self).answer
948 if out['shower'] == 'HERWIG7':
949 out['shower'] = 'HERWIGPP'
950
951 if out['shower'] not in self.get_allowed('shower') or out['shower'] =='OFF':
952 out['runshower'] = False
953 else:
954 out['runshower'] = True
955 return out
956
957
959
960 self.available_module = set()
961 if options['madanalysis5_path']:
962 self.available_module.add('MA5')
963 if not aMCatNLO or ('mg5_path' in options and options['mg5_path']):
964
965 self.available_module.add('MadSpin')
966 if misc.has_f2py() or options['f2py_compiler']:
967 self.available_module.add('reweight')
968 if options['pythia8_path']:
969 self.available_module.add('PY8')
970 if options['hwpp_path'] and options['thepeg_path'] and options['hepmc_path']:
971 self.available_module.add('HW7')
972
973 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
974 if os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))):
975 self.available_module.add('StdHEP')
976
977
978
980 """ function called if the user type lo=value. or lo (then value is None)"""
981
982 if value is None:
983 self.switch['order'] = 'LO'
984 self.switch['fixed_order'] = 'ON'
985 self.set_switch('shower', 'OFF')
986 else:
987 logger.warning('Invalid command: lo=%s' % value)
988
990 if value is None:
991 self.switch['order'] = 'NLO'
992 self.switch['fixed_order'] = 'ON'
993 self.set_switch('shower', 'OFF')
994 else:
995 logger.warning('Invalid command: nlo=%s' % value)
996
998 if value is None:
999 self.switch['order'] = 'NLO'
1000 self.switch['fixed_order'] = 'OFF'
1001 self.set_switch('shower', 'ON')
1002 else:
1003 logger.warning('Invalid command: aMC@NLO=%s' % value)
1004
1006 if value is None:
1007 self.switch['order'] = 'LO'
1008 self.switch['fixed_order'] = 'OFF'
1009 self.set_switch('shower', 'ON')
1010 else:
1011 logger.warning('Invalid command: aMC@LO=%s' % value)
1012
1014 if value is None:
1015 self.switch['order'] = 'NLO'
1016 self.switch['fixed_order'] = 'OFF'
1017 self.set_switch('shower', 'OFF')
1018 else:
1019 logger.warning('Invalid command: noshower=%s' % value)
1020
1022 if value is None:
1023 self.switch['mode'] = 'onlyshower'
1024 self.switch['madspin'] = 'OFF'
1025 self.switch['reweight'] = 'OFF'
1026 else:
1027 logger.warning('Invalid command: onlyshower=%s' % value)
1028
1030 if value is None:
1031 self.switch['order'] = 'LO'
1032 self.switch['fixed_order'] = 'OFF'
1033 self.set_switch('shower', 'OFF')
1034 else:
1035 logger.warning('Invalid command: noshowerlo=%s' % value)
1036
1038 """ shortcut madanalysis5 -> madanalysis """
1039
1040 if value is None:
1041 return self.onecmd('madanalysis')
1042 else:
1043 self.set_switch('madanalysis', value)
1044
1045
1046
1048 return ["LO", "NLO"]
1049
1051
1052 if self.last_mode in ['LO', 'aMC@L0', 'noshowerLO']:
1053 self.switch['order'] = 'LO'
1054 self.switch['order'] = 'NLO'
1055
1058
1059
1060
1062 """ """
1063 if self.proc_characteristics['ninitial'] == 1:
1064 return ['ON']
1065 else:
1066 return ['ON', 'OFF']
1067
1069
1070 if self.last_mode in ['LO', 'NLO']:
1071 self.switch['fixed_order'] = 'ON'
1072 if self.proc_characteristics['ninitial'] == 1:
1073 self.switch['fixed_order'] = 'ON'
1074 else:
1075 self.switch['fixed_order'] = 'OFF'
1076
1078
1079 if switch_value in ['OFF']:
1080 return self.green % switch_value
1081 else:
1082 return self.red % switch_value
1083
1085
1086 if switch_value in ['ON']:
1087 return self.green % switch_value
1088 elif switch_value in self.get_allowed('shower'):
1089 return self.green % switch_value
1090 else:
1091 return self.red % switch_value
1092
1094 """ consistency_XX_YY(val_XX, val_YY)
1095 -> XX is the new key set by the user to a new value val_XX
1096 -> YY is another key set by the user.
1097 -> return value should be None or "replace_YY"
1098 """
1099
1100 if vfix == 'ON' and vshower != 'OFF' :
1101 return 'OFF'
1102 return None
1103
1104 consistency_fixed_order_madspin = consistency_fixed_order_shower
1105 consistency_fixed_order_reweight = consistency_fixed_order_shower
1106
1108
1109 if vfix == 'ON' and vma5 == 'ON' :
1110 return 'OFF'
1111 return None
1112
1113
1115 """ consistency_XX_YY(val_XX, val_YY)
1116 -> XX is the new key set by the user to a new value val_XX
1117 -> YY is another key set by the user.
1118 -> return value should be None or "replace_YY"
1119 """
1120
1121 if vshower != 'OFF' and vfix == 'ON':
1122 return 'OFF'
1123 return None
1124
1125 consistency_madspin_fixed_order = consistency_shower_fixed_order
1126 consistency_reweight_fixed_order = consistency_shower_fixed_order
1127 consistency_madanalysis_fixed_order = consistency_shower_fixed_order
1128
1129
1130
1131
1132
1134 """ """
1135
1136 if hasattr(self, 'allowed_shower'):
1137 return self.allowed_shower
1138
1139 if not misc.which('bc'):
1140 return ['OFF']
1141
1142 if self.proc_characteristics['ninitial'] == 1:
1143 self.allowed_shower = ['OFF']
1144 return ['OFF']
1145 else:
1146 if 'StdHEP' in self.available_module:
1147 allowed = ['HERWIG6','OFF', 'PYTHIA6Q', 'PYTHIA6PT', ]
1148 else:
1149 allowed = ['OFF']
1150 if 'PY8' in self.available_module:
1151 allowed.append('PYTHIA8')
1152 if 'HW7' in self.available_module:
1153 allowed.append('HERWIGPP')
1154
1155
1156 self.allowed_shower = allowed
1157
1158 return allowed
1159
1161 """ """
1162
1163 if value.upper() in self.get_allowed_shower():
1164 return True
1165 if value.upper() in ['PYTHIA8', 'HERWIGPP']:
1166 return True
1167 if value.upper() == 'ON':
1168 return self.run_card['parton_shower']
1169 if value.upper() in ['P8','PY8','PYTHIA_8']:
1170 return 'PYTHIA8'
1171 if value.upper() in ['PY6','P6','PY6PT', 'PYTHIA_6', 'PYTHIA_6PT','PYTHIA6PT','PYTHIA6_PT']:
1172 return 'PYTHIA6PT'
1173 if value.upper() in ['PY6Q', 'PYTHIA_6Q','PYTHIA6Q', 'PYTHIA6_Q']:
1174 return 'PYTHIA6Q'
1175 if value.upper() in ['HW7', 'HERWIG7']:
1176 return 'HERWIG7'
1177 if value.upper() in ['HW++', 'HWPP', 'HERWIG++']:
1178 return 'HERWIGPP'
1179 if value.upper() in ['HW6', 'HERWIG_6']:
1180 return 'HERWIG6'
1181
1183
1184 if self.last_mode in ['LO', 'NLO', 'noshower', 'noshowerLO']:
1185 self.switch['shower'] = 'OFF'
1186 return
1187
1188 if self.proc_characteristics['ninitial'] == 1:
1189 self.switch['shower'] = 'OFF'
1190 return
1191
1192 if not misc.which('bc'):
1193 logger.warning('bc command not available. Forbids to run the shower. please install it if you want to run the shower. (sudo apt-get install bc)')
1194 self.switch['shower'] = 'OFF'
1195 return
1196
1197 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
1198 self.switch['shower'] = self.run_card['parton_shower']
1199
1200 self.switch['fixed_order'] = "OFF"
1201 else:
1202 self.switch['shower'] = 'OFF'
1203
1205 """ MA5 only possible with (N)LO+PS if shower is run"""
1206
1207 if vshower == 'OFF' and vma5 == 'ON':
1208 return 'OFF'
1209 return None
1210
1212
1213 if vma5=='ON' and vshower == 'OFF':
1214 return 'ON'
1215 return None
1216
1218 """ adpat run_card according to this setup. return list of cmd to run"""
1219
1220 if value != 'OFF':
1221 return ['set parton_shower %s' % self.switch['shower']]
1222 return []
1223
1224
1225
1226
1228 """ """
1229
1230 if hasattr(self, 'allowed_madspin'):
1231 return self.allowed_madspin
1232
1233 self.allowed_madspin = []
1234
1235
1236 if 'MadSpin' not in self.available_module:
1237 return self.allowed_madspin
1238 if self.proc_characteristics['ninitial'] == 1:
1239 self.available_module.remove('MadSpin')
1240 self.allowed_madspin = ['OFF']
1241 return self.allowed_madspin
1242 else:
1243 self.allowed_madspin = ['OFF', 'ON', 'onshell']
1244 return self.allowed_madspin
1245
1247 """handle alias and valid option not present in get_allowed_madspin
1248 remember that this mode should always be OFF for 1>N. (ON not in allowed value)"""
1249
1250 if value.upper() in self.get_allowed_madspin():
1251 if value == value.upper():
1252 return True
1253 else:
1254 return value.upper()
1255 elif value.lower() in self.get_allowed_madspin():
1256 if value == value.lower():
1257 return True
1258 else:
1259 return value.lower()
1260
1261 if 'MadSpin' not in self.available_module or \
1262 'ON' not in self.get_allowed_madspin():
1263 return False
1264
1265 if value.lower() in ['madspin', 'full']:
1266 return 'full'
1267 elif value.lower() in ['none']:
1268 return 'none'
1269
1271
1272 if 'MadSpin' in self.available_module:
1273 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
1274 self.switch['madspin'] = 'ON'
1275 else:
1276 self.switch['madspin'] = 'OFF'
1277 else:
1278 self.switch['madspin'] = 'Not Avail.'
1279
1281 """set some command to run before allowing the user to modify the cards."""
1282
1283 if value == 'onshell':
1284 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode onshell"]
1285 elif value in ['full', 'madspin']:
1286 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode madspin"]
1287 elif value == 'none':
1288 return ["edit madspin_card --replace_line='set spinmode' --before_line='decay' set spinmode none"]
1289 else:
1290 return []
1291
1292
1293
1294
1296 """set the valid (visible) options for reweight"""
1297
1298 if hasattr(self, 'allowed_reweight'):
1299 return getattr(self, 'allowed_reweight')
1300
1301 self.allowed_reweight = []
1302 if 'reweight' not in self.available_module:
1303 return self.allowed_reweight
1304 if self.proc_characteristics['ninitial'] == 1:
1305 self.available_module.remove('reweight')
1306 self.allowed_reweight.append('OFF')
1307 return self.allowed_reweight
1308 else:
1309 self.allowed_reweight = [ 'OFF', 'ON', 'NLO', 'NLO_TREE','LO']
1310 return self.allowed_reweight
1311
1313 """initialise the switch for reweight"""
1314
1315 if 'reweight' in self.available_module:
1316 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')):
1317 self.switch['reweight'] = 'ON'
1318 else:
1319 self.switch['reweight'] = 'OFF'
1320 else:
1321 self.switch['reweight'] = 'Not Avail.'
1322
1324 """ adpat run_card according to this setup. return list of cmd to run"""
1325
1326 if value == 'LO':
1327 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode LO"]
1328 elif value == 'NLO':
1329 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO",
1330 "set store_rwgt_info T"]
1331 elif value == 'NLO_TREE':
1332 return ["edit reweight_card --replace_line='change mode' --before_line='launch' change mode NLO_tree",
1333 "set store_rwgt_info T"]
1334 return []
1335
1336
1337
1338
1340
1341 if hasattr(self, 'allowed_madanalysis'):
1342 return self.allowed_madanalysis
1343
1344 self.allowed_madanalysis = []
1345
1346
1347 if 'MA5' not in self.available_module:
1348 return self.allowed_madanalysis
1349
1350 if self.proc_characteristics['ninitial'] == 1:
1351 self.available_module.remove('MA5')
1352 self.allowed_madanalysis = ['OFF']
1353 return self.allowed_madanalysis
1354 else:
1355 self.allowed_madanalysis = ['OFF', 'ON']
1356 return self.allowed_madanalysis
1357
1359 """initialise the switch for reweight"""
1360
1361 if 'MA5' not in self.available_module:
1362 self.switch['madanalysis'] = 'Not Avail.'
1363 elif os.path.exists(pjoin(self.me_dir,'Cards', 'madanalysis5_hadron_card.dat')):
1364 self.switch['madanalysis'] = 'ON'
1365 else:
1366 self.switch['madanalysis'] = 'OFF'
1367
1369 """check an entry is valid. return the valid entry in case of shortcut"""
1370
1371 if value.upper() in self.get_allowed('madanalysis'):
1372 return True
1373 value = value.lower()
1374 if value == 'hadron':
1375 return 'ON' if 'ON' in self.get_allowed_madanalysis5 else False
1376 else:
1377 return False
1378
1379
1380
1381
1382
1383 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
1384 """The command line processor of MadGraph"""
1385
1386
1387 true = ['T','.true.',True,'true']
1388
1389 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
1390 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
1391 _calculate_decay_options = ['-f', '--accuracy=0.']
1392 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
1393 _plot_mode = ['all', 'parton','shower','pgs','delphes']
1394 _clean_mode = _plot_mode + ['channel', 'banner']
1395 _display_opts = ['run_name', 'options', 'variable']
1396
1397
1398 web = False
1399 cluster_mode = 0
1400 queue = 'madgraph'
1401 nb_core = None
1402 make_opts_var = {}
1403
1404 next_possibility = {
1405 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
1406 'help generate_events'],
1407 'generate_events': ['generate_events [OPTIONS]', 'shower'],
1408 'launch': ['launch [OPTIONS]', 'shower'],
1409 'shower' : ['generate_events [OPTIONS]']
1410 }
1411
1412
1413
1414 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
1415 """ add information to the cmd """
1416
1417 self.start_time = 0
1418 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
1419
1420
1421 self.mode = 'aMCatNLO'
1422 self.nb_core = 0
1423 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
1424
1425
1426 self.load_results_db()
1427 self.results.def_web_mode(self.web)
1428
1429 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
1430
1431 if not '[real=QCD]' in proc_card:
1432 check_compiler(self.options, block=True)
1433
1434
1435
1437 """ run the shower on a given parton level file """
1438 argss = self.split_arg(line)
1439 (options, argss) = _launch_parser.parse_args(argss)
1440
1441 options = options.__dict__
1442 options['reweightonly'] = False
1443 self.check_shower(argss, options)
1444 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
1445 self.ask_run_configuration('onlyshower', options)
1446 self.run_mcatnlo(evt_file, options)
1447
1448 self.update_status('', level='all', update_results=True)
1449
1450
1452 """Create the plot for a given run"""
1453
1454
1455 args = self.split_arg(line)
1456
1457 self.check_plot(args)
1458 logger.info('plot for run %s' % self.run_name)
1459
1460 if not self.force:
1461 self.ask_edit_cards([], args, plot=True)
1462
1463 if any([arg in ['parton'] for arg in args]):
1464 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
1465 if os.path.exists(filename+'.gz'):
1466 misc.gunzip(filename)
1467 if os.path.exists(filename):
1468 logger.info('Found events.lhe file for run %s' % self.run_name)
1469 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
1470 self.create_plot('parton')
1471 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
1472 misc.gzip(filename)
1473
1474 if any([arg in ['all','parton'] for arg in args]):
1475 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
1476 if os.path.exists(filename):
1477 logger.info('Found MADatNLO.top file for run %s' % \
1478 self.run_name)
1479 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
1480 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
1481
1482 if not os.path.isdir(plot_dir):
1483 os.makedirs(plot_dir)
1484 top_file = pjoin(plot_dir, 'plots.top')
1485 files.cp(filename, top_file)
1486 madir = self.options['madanalysis_path']
1487 tag = self.run_card['run_tag']
1488 td = self.options['td_path']
1489 misc.call(['%s/plot' % self.dirbin, madir, td],
1490 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1491 stderr = subprocess.STDOUT,
1492 cwd=plot_dir)
1493
1494 misc.call(['%s/plot_page-pl' % self.dirbin,
1495 os.path.basename(plot_dir),
1496 'parton'],
1497 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1498 stderr = subprocess.STDOUT,
1499 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1500 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1501 output)
1502
1503 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1504
1505 if any([arg in ['all','shower'] for arg in args]):
1506 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name))
1507 if len(filenames) != 1:
1508 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
1509 if len(filenames) != 1:
1510 logger.info('No shower level file found for run %s' % \
1511 self.run_name)
1512 return
1513 filename = filenames[0]
1514 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1515
1516 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1517 if aMCatNLO and not self.options['mg5_path']:
1518 raise Exception("plotting NLO HEP file needs MG5 utilities")
1519
1520 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1521 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1522 self.run_hep2lhe()
1523 else:
1524 filename = filenames[0]
1525 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1526
1527 self.create_plot('shower')
1528 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1529 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1530 lhe_file_name)
1531 misc.gzip(lhe_file_name)
1532
1533 if any([arg in ['all','pgs'] for arg in args]):
1534 filename = pjoin(self.me_dir, 'Events', self.run_name,
1535 '%s_pgs_events.lhco' % self.run_tag)
1536 if os.path.exists(filename+'.gz'):
1537 misc.gunzip(filename)
1538 if os.path.exists(filename):
1539 self.create_plot('PGS')
1540 misc.gzip(filename)
1541 else:
1542 logger.info('No valid files for pgs plot')
1543
1544 if any([arg in ['all','delphes'] for arg in args]):
1545 filename = pjoin(self.me_dir, 'Events', self.run_name,
1546 '%s_delphes_events.lhco' % self.run_tag)
1547 if os.path.exists(filename+'.gz'):
1548 misc.gunzip(filename)
1549 if os.path.exists(filename):
1550
1551 self.create_plot('Delphes')
1552
1553 misc.gzip(filename)
1554 else:
1555 logger.info('No valid files for delphes plot')
1556
1557
1558
1560 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1561 this function wraps the do_launch one"""
1562
1563 self.start_time = time.time()
1564 argss = self.split_arg(line)
1565
1566 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1567 options = options.__dict__
1568 options['reweightonly'] = False
1569 options['parton'] = True
1570 self.check_calculate_xsect(argss, options)
1571 self.do_launch(line, options, argss)
1572
1573
1575 """Make a run from the banner file"""
1576
1577 args = self.split_arg(line)
1578
1579 self.check_banner_run(args)
1580
1581
1582 for name in ['shower_card.dat', 'madspin_card.dat']:
1583 try:
1584 os.remove(pjoin(self.me_dir, 'Cards', name))
1585 except Exception:
1586 pass
1587
1588 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1589
1590
1591 if not self.force:
1592 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1593 if ans == 'n':
1594 self.force = True
1595
1596
1597 if self.force:
1598 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1599 banner = banner_mod.Banner(args[0])
1600 for line in banner['run_settings']:
1601 if '=' in line:
1602 mode, value = [t.strip() for t in line.split('=')]
1603 mode_status[mode] = value
1604 else:
1605 mode_status = {}
1606
1607
1608 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1609 switch=mode_status)
1610
1611
1613 """Main commands: generate events
1614 this function just wraps the do_launch one"""
1615 self.do_launch(line)
1616
1617
1618
1620 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1621
1622 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1623
1624
1625
1626
1627
1628 if mode in ['LO', 'NLO']:
1629 name = 'fo_lhe_weight_ratio'
1630 FO_card = analyse_card.FOAnalyseCard(pjoin(self.me_dir,'Cards', 'FO_analyse_card.dat'))
1631 if name in FO_card:
1632 self.run_card.set(name, FO_card[name], user=False)
1633 name = 'fo_lhe_postprocessing'
1634 if name in FO_card:
1635 self.run_card.set(name, FO_card[name], user=False)
1636
1637 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1638
1639
1641 """assign all configuration variable from file
1642 loop over the different config file if config_file not define """
1643 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1644
1645
1646 - def do_launch(self, line, options={}, argss=[], switch={}):
1647 """Main commands: launch the full chain
1648 options and args are relevant if the function is called from other
1649 functions, such as generate_events or calculate_xsect
1650 mode gives the list of switch needed for the computation (usefull for banner_run)
1651 """
1652
1653 if not argss and not options:
1654 self.start_time = time.time()
1655 argss = self.split_arg(line)
1656
1657 (options, argss) = _launch_parser.parse_args(argss)
1658 options = options.__dict__
1659 self.check_launch(argss, options)
1660
1661
1662 if 'run_name' in list(options.keys()) and options['run_name']:
1663 self.run_name = options['run_name']
1664
1665
1666 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1667 logger.warning('Removing old run information in \n'+
1668 pjoin(self.me_dir, 'Events', self.run_name))
1669 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1670 self.results.delete_run(self.run_name)
1671 else:
1672 self.run_name = ''
1673
1674 if options['multicore']:
1675 self.cluster_mode = 2
1676 elif options['cluster']:
1677 self.cluster_mode = 1
1678
1679 if not switch:
1680 mode = argss[0]
1681
1682 if mode in ['LO', 'NLO']:
1683 options['parton'] = True
1684 mode = self.ask_run_configuration(mode, options)
1685 else:
1686 mode = self.ask_run_configuration('auto', options, switch)
1687
1688 self.results.add_detail('run_mode', mode)
1689
1690 self.update_status('Starting run', level=None, update_results=True)
1691
1692 if self.options['automatic_html_opening']:
1693 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1694 self.options['automatic_html_opening'] = False
1695
1696 if '+' in mode:
1697 mode = mode.split('+')[0]
1698 self.compile(mode, options)
1699 evt_file = self.run(mode, options)
1700
1701 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
1702 logger.info('No event file generated: grids have been set-up with a '\
1703 'relative precision of %s' % self.run_card['req_acc'])
1704 return
1705
1706 if not mode in ['LO', 'NLO']:
1707 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1708
1709 if self.run_card['systematics_program'] == 'systematics':
1710 self.exec_cmd('systematics %s %s ' % (self.run_name, ' '.join(self.run_card['systematics_arguments'])))
1711
1712 self.exec_cmd('reweight -from_cards', postcmd=False)
1713 self.exec_cmd('decay_events -from_cards', postcmd=False)
1714 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1715
1716 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1717 and not options['parton']:
1718 self.run_mcatnlo(evt_file, options)
1719 self.exec_cmd('madanalysis5_hadron --no_default', postcmd=False, printcmd=False)
1720
1721 elif mode == 'noshower':
1722 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1723 Please, shower the Les Houches events before using them for physics analyses.""")
1724
1725
1726 self.update_status('', level='all', update_results=True)
1727 if self.run_card['ickkw'] == 3 and \
1728 (mode in ['noshower'] or \
1729 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))):
1730 logger.warning("""You are running with FxFx merging enabled.
1731 To be able to merge samples of various multiplicities without double counting,
1732 you have to remove some events after showering 'by hand'.
1733 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1734
1735 self.store_result()
1736
1737 if self.param_card_iterator:
1738 cpath = pjoin(self.me_dir,'Cards','param_card.dat')
1739 param_card_iterator = self.param_card_iterator
1740 self.param_card_iterator = []
1741 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1742 error=self.results.current['error'],
1743 param_card_path=cpath)
1744 orig_name = self.run_name
1745
1746 with misc.TMP_variable(self, 'allow_notification_center', False):
1747 for i,card in enumerate(param_card_iterator):
1748 card.write(cpath)
1749 self.check_param_card(cpath, dependent=True)
1750 if not options['force']:
1751 options['force'] = True
1752 if options['run_name']:
1753 options['run_name'] = '%s_%s' % (orig_name, i+1)
1754 if not argss:
1755 argss = [mode, "-f"]
1756 elif argss[0] == "auto":
1757 argss[0] = mode
1758 self.do_launch("", options=options, argss=argss, switch=switch)
1759
1760 param_card_iterator.store_entry(self.run_name, self.results.current['cross'],
1761 error=self.results.current['error'],
1762 param_card_path=cpath)
1763
1764 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1765 name = misc.get_scan_name(orig_name, self.run_name)
1766 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
1767 logger.info("write all cross-section results in %s" % path, '$MG:BOLD')
1768 param_card_iterator.write_summary(path)
1769
1770 if self.allow_notification_center:
1771 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir),
1772 '%s: %s +- %s ' % (self.results.current['run_name'],
1773 self.results.current['cross'],
1774 self.results.current['error']))
1775
1776
1777
1779 """Advanced commands: just compile the executables """
1780 argss = self.split_arg(line)
1781
1782 (options, argss) = _compile_parser.parse_args(argss)
1783 options = options.__dict__
1784 options['reweightonly'] = False
1785 options['nocompile'] = False
1786 self.check_compile(argss, options)
1787
1788 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1789 self.ask_run_configuration(mode, options)
1790 self.compile(mode, options)
1791
1792
1793 self.update_status('', level='all', update_results=True)
1794
1795
1797 """Update random number seed with the value from the run_card.
1798 If this is 0, update the number according to a fresh one"""
1799 iseed = self.run_card['iseed']
1800 if iseed == 0:
1801 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1802 iseed = int(randinit.read()[2:]) + 1
1803 randinit.close()
1804 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1805 randinit.write('r=%d' % iseed)
1806 randinit.close()
1807
1808
1809 - def run(self, mode, options):
1810 """runs aMC@NLO. Returns the name of the event file created"""
1811 logger.info('Starting run')
1812
1813 if not 'only_generation' in list(options.keys()):
1814 options['only_generation'] = False
1815
1816
1817 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
1818 options['only_generation'] = True
1819 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1820 self.setup_cluster_or_multicore()
1821 self.update_random_seed()
1822
1823 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1824 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1825 folder_names['noshower'] = folder_names['aMC@NLO']
1826 folder_names['noshowerLO'] = folder_names['aMC@LO']
1827 p_dirs = [d for d in \
1828 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1829
1830 self.clean_previous_results(options,p_dirs,folder_names[mode])
1831
1832 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
1833
1834
1835 if options['reweightonly']:
1836 event_norm=self.run_card['event_norm']
1837 nevents=self.run_card['nevents']
1838 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1839
1840 if mode in ['LO', 'NLO']:
1841
1842 mode_dict = {'NLO': 'all', 'LO': 'born'}
1843 logger.info('Doing fixed order %s' % mode)
1844 req_acc = self.run_card['req_acc_FO']
1845
1846
1847
1848 if self.run_card['iappl'] == 2:
1849 self.applgrid_distribute(options,mode_dict[mode],p_dirs)
1850
1851
1852
1853 integration_step=-1
1854 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1855 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
1856 self.prepare_directories(jobs_to_run,mode)
1857
1858
1859
1860
1861 while True:
1862 integration_step=integration_step+1
1863 self.run_all_jobs(jobs_to_run,integration_step)
1864 self.collect_log_files(jobs_to_run,integration_step)
1865 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1866 jobs_to_collect,integration_step,mode,mode_dict[mode])
1867 if not jobs_to_run:
1868
1869 break
1870
1871 self.finalise_run_FO(folder_names[mode],jobs_to_collect)
1872 self.update_status('Run complete', level='parton', update_results=True)
1873 return
1874
1875 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1876 if self.ninitial == 1:
1877 raise aMCatNLOError('Decay processes can only be run at fixed order.')
1878 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
1879 'noshower': 'all', 'noshowerLO': 'born'}
1880 shower = self.run_card['parton_shower'].upper()
1881 nevents = self.run_card['nevents']
1882 req_acc = self.run_card['req_acc']
1883 if nevents == 0 and req_acc < 0 :
1884 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1885 'of events, because 0 events requested. Please set '\
1886 'the "req_acc" parameter in the run_card to a value '\
1887 'between 0 and 1')
1888 elif req_acc >1 or req_acc == 0 :
1889 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1890 'be between larger than 0 and smaller than 1, '\
1891 'or set to -1 for automatic determination. Current '\
1892 'value is %f' % req_acc)
1893
1894 elif req_acc < 0 and nevents > 1000000 :
1895 req_acc=0.001
1896
1897 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1898
1899 if not shower in shower_list:
1900 raise aMCatNLOError('%s is not a valid parton shower. '\
1901 'Please use one of the following: %s' \
1902 % (shower, ', '.join(shower_list)))
1903
1904
1905 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
1906 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1907
1908 if mode in ['aMC@NLO', 'aMC@LO']:
1909 logger.info('Doing %s matched to parton shower' % mode[4:])
1910 elif mode in ['noshower','noshowerLO']:
1911 logger.info('Generating events without running the shower.')
1912 elif options['only_generation']:
1913 logger.info('Generating events starting from existing results')
1914
1915 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1916 req_acc,mode_dict[mode],1,mode,fixed_order=False)
1917
1918 if options['only_generation']:
1919 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1920 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
1921 else:
1922 self.prepare_directories(jobs_to_run,mode,fixed_order=False)
1923
1924
1925
1926 for mint_step, status in enumerate(mcatnlo_status):
1927 if options['only_generation'] and mint_step < 2:
1928 continue
1929 self.update_status(status, level='parton')
1930 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
1931 self.collect_log_files(jobs_to_run,mint_step)
1932 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1933 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
1934 if mint_step+1==2 and nevents==0:
1935 self.print_summary(options,2,mode)
1936 return
1937
1938
1939 self.check_event_files(jobs_to_collect)
1940
1941 if self.cluster_mode == 1:
1942
1943 self.update_status(
1944 'Waiting while files are transferred back from the cluster nodes',
1945 level='parton')
1946 time.sleep(10)
1947
1948 event_norm=self.run_card['event_norm']
1949 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1950
1951 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
1952 integration_step,mode,fixed_order=True):
1953 """Creates a list of dictionaries with all the jobs to be run"""
1954 jobs_to_run=[]
1955 if not options['only_generation']:
1956
1957
1958
1959 npoints = self.run_card['npoints_FO_grid']
1960 niters = self.run_card['niters_FO_grid']
1961 for p_dir in p_dirs:
1962 try:
1963 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
1964 channels=chan_file.readline().split()
1965 except IOError:
1966 logger.warning('No integration channels found for contribution %s' % p_dir)
1967 continue
1968 if fixed_order:
1969 lch=len(channels)
1970 maxchannels=20
1971 if self.run_card['iappl'] != 0: maxchannels=1
1972 njobs=(int(lch/maxchannels)+1 if lch%maxchannels!= 0 \
1973 else int(lch/maxchannels))
1974 for nj in range(1,njobs+1):
1975 job={}
1976 job['p_dir']=p_dir
1977 job['channel']=str(nj)
1978 job['nchans']=(int(lch/njobs)+1 if nj <= lch%njobs else int(lch/njobs))
1979 job['configs']=' '.join(channels[:job['nchans']])
1980 del channels[:job['nchans']]
1981 job['split']=0
1982 if req_acc == -1:
1983 job['accuracy']=0
1984 job['niters']=niters
1985 job['npoints']=npoints
1986 elif req_acc > 0:
1987 job['accuracy']=0.05
1988 job['niters']=6
1989 job['npoints']=-1
1990 else:
1991 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
1992 'between 0 and 1 or set it equal to -1.')
1993 job['mint_mode']=0
1994 job['run_mode']=run_mode
1995 job['wgt_frac']=1.0
1996 job['wgt_mult']=1.0
1997 jobs_to_run.append(job)
1998 if channels:
1999 raise aMCatNLOError('channels is not empty %s' % channels)
2000 else:
2001 for channel in channels:
2002 job={}
2003 job['p_dir']=p_dir
2004 job['channel']=channel
2005 job['split']=0
2006 job['accuracy']=0.03
2007 job['niters']=12
2008 job['npoints']=-1
2009 job['mint_mode']=0
2010 job['run_mode']=run_mode
2011 job['wgt_frac']=1.0
2012 jobs_to_run.append(job)
2013 jobs_to_collect=copy.copy(jobs_to_run)
2014 else:
2015
2016 try:
2017 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'rb') as f:
2018 jobs_to_collect=pickle.load(f)
2019 for job in jobs_to_collect:
2020 job['dirname']=pjoin(self.me_dir,'SubProcesses',job['dirname'].rsplit('/SubProcesses/',1)[1])
2021 jobs_to_run=copy.copy(jobs_to_collect)
2022 except:
2023 raise aMCatNLOError('Cannot reconstruct saved job status in %s' % \
2024 pjoin(self.me_dir,'SubProcesses','job_status.pkl'))
2025
2026 if fixed_order:
2027 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
2028 jobs_to_collect,integration_step,mode,run_mode)
2029
2030 integration_step=1
2031 for job in jobs_to_run:
2032 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
2033 integration_step=integration_step+1
2034 integration_step=integration_step-1
2035 else:
2036 self.append_the_results(jobs_to_collect,integration_step)
2037 return jobs_to_run,jobs_to_collect,integration_step
2038
2040 """Set-up the G* directories for running"""
2041 name_suffix={'born' :'B' , 'all':'F'}
2042 for job in jobs_to_run:
2043 if job['split'] == 0:
2044 if fixed_order :
2045 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2046 job['run_mode']+'_G'+job['channel'])
2047 else:
2048 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2049 'G'+name_suffix[job['run_mode']]+job['channel'])
2050 else:
2051 if fixed_order :
2052 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2053 job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
2054 else:
2055 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
2056 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
2057 job['dirname']=dirname
2058 if not os.path.isdir(dirname):
2059 os.makedirs(dirname)
2060 self.write_input_file(job,fixed_order)
2061
2062 if not fixed_order:
2063 if job['split'] != 0:
2064 for f in ['grid.MC_integer','mint_grids','res_1']:
2065 if not os.path.isfile(pjoin(job['dirname'],f)):
2066 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2067 else:
2068 if job['split'] != 0:
2069 for f in ['grid.MC_integer','mint_grids']:
2070 files.cp(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
2071
2072
2107
2108
2109 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
2110 """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
2111 if fixed_order:
2112 if integration_step == 0:
2113 self.update_status('Setting up grids', level=None)
2114 else:
2115 self.update_status('Refining results, step %i' % integration_step, level=None)
2116 self.ijob = 0
2117 name_suffix={'born' :'B', 'all':'F'}
2118 if fixed_order:
2119 run_type="Fixed order integration step %s" % integration_step
2120 else:
2121 run_type="MINT step %s" % integration_step
2122 self.njobs=len(jobs_to_run)
2123 for job in jobs_to_run:
2124 executable='ajob1'
2125 if fixed_order:
2126 arguments=[job['channel'],job['run_mode'], \
2127 str(job['split']),str(integration_step)]
2128 else:
2129 arguments=[job['channel'],name_suffix[job['run_mode']], \
2130 str(job['split']),str(integration_step)]
2131 self.run_exe(executable,arguments,run_type,
2132 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
2133
2134 if self.cluster_mode == 2:
2135 time.sleep(1)
2136 self.wait_for_complete(run_type)
2137
2138
2139 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
2140 integration_step,mode,run_mode,fixed_order=True):
2141 """Collect the results, make HTML pages, print the summary and
2142 determine if there are more jobs to run. Returns the list
2143 of the jobs that still need to be run, as well as the
2144 complete list of jobs that need to be collected to get the
2145 final answer.
2146 """
2147
2148 self.append_the_results(jobs_to_run,integration_step)
2149 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
2150
2151 if fixed_order:
2152 cross, error = self.make_make_all_html_results(folder_names=['%s*' % run_mode],
2153 jobs=jobs_to_collect)
2154 else:
2155 name_suffix={'born' :'B' , 'all':'F'}
2156 cross, error = self.make_make_all_html_results(folder_names=['G%s*' % name_suffix[run_mode]])
2157 self.results.add_detail('cross', cross)
2158 self.results.add_detail('error', error)
2159
2160 if fixed_order:
2161 jobs_to_run=self.combine_split_order_run(jobs_to_run)
2162
2163 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
2164
2165 if fixed_order:
2166
2167
2168 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
2169 pickle.dump(jobs_to_collect,f)
2170
2171 if (not jobs_to_run_new) and fixed_order:
2172
2173 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
2174 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
2175 return jobs_to_run_new,jobs_to_collect
2176 elif jobs_to_run_new:
2177
2178 scale_pdf_info=[]
2179 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
2180 else:
2181
2182
2183
2184 scale_pdf_info=[]
2185
2186 if (not fixed_order) and integration_step+1 == 2 :
2187
2188
2189 with open(pjoin(self.me_dir,"SubProcesses","job_status.pkl"),'wb') as f:
2190 pickle.dump(jobs_to_collect,f)
2191
2192 jobs_to_run_new,jobs_to_collect_new= \
2193 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
2194 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
2195 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect)
2196 self.write_nevts_files(jobs_to_run_new)
2197 else:
2198 if fixed_order and self.run_card['iappl'] == 0 \
2199 and self.run_card['req_acc_FO'] > 0:
2200 jobs_to_run_new,jobs_to_collect= \
2201 self.split_jobs_fixed_order(jobs_to_run_new,jobs_to_collect)
2202 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
2203 jobs_to_collect_new=jobs_to_collect
2204 return jobs_to_run_new,jobs_to_collect_new
2205
2206
2208 """writes the nevents_unweighted file in the SubProcesses directory.
2209 We also need to write the jobs that will generate 0 events,
2210 because that makes sure that the cross section from those channels
2211 is taken into account in the event weights (by collect_events.f).
2212 """
2213 content=[]
2214 for job in jobs:
2215 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
2216 lhefile=pjoin(path,'events.lhe')
2217 content.append(' %s %d %9e %9e' % \
2218 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
2219 for job in jobs0events:
2220 if job['nevents']==0:
2221 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
2222 lhefile=pjoin(path,'events.lhe')
2223 content.append(' %s %d %9e %9e' % \
2224 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.))
2225 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
2226 f.write('\n'.join(content)+'\n')
2227
2229 """write the nevts files in the SubProcesses/P*/G*/ directories"""
2230 for job in jobs:
2231 with open(pjoin(job['dirname'],'nevts'),'w') as f:
2232 if self.run_card['event_norm'].lower()=='bias':
2233 f.write('%i %f\n' % (job['nevents'],self.cross_sect_dict['xseca']))
2234 else:
2235 f.write('%i\n' % job['nevents'])
2236
2238 """Combines jobs and grids from split jobs that have been run"""
2239
2240
2241
2242 jobgroups_to_combine=[]
2243 jobs_to_run_new=[]
2244 for job in jobs_to_run:
2245 if job['split'] == 0:
2246 job['combined']=1
2247 jobs_to_run_new.append(job)
2248 elif job['split'] == 1:
2249 jobgroups_to_combine.append([j for j in jobs_to_run if j['p_dir'] == job['p_dir'] and \
2250 j['channel'] == job['channel']])
2251 else:
2252 continue
2253 for job_group in jobgroups_to_combine:
2254
2255 self.combine_split_order_grids(job_group)
2256 jobs_to_run_new.append(self.combine_split_order_jobs(job_group))
2257 return jobs_to_run_new
2258
2260 """combine the jobs in job_group and return a single summed job"""
2261
2262 sum_job=copy.copy(job_group[0])
2263
2264 sum_job['dirname']=pjoin(sum_job['dirname'].rsplit('_',1)[0])
2265 sum_job['split']=0
2266 sum_job['wgt_mult']=1.0
2267 sum_job['combined']=len(job_group)
2268
2269 keys=['niters_done','npoints_done','niters','npoints',\
2270 'result','resultABS','time_spend']
2271 keys2=['error','errorABS']
2272
2273 for key in keys2:
2274 sum_job[key]=math.pow(sum_job[key],2)
2275
2276 for i,job in enumerate(job_group):
2277 if i==0 : continue
2278 for key in keys:
2279 sum_job[key]+=job[key]
2280 for key in keys2:
2281 sum_job[key]+=math.pow(job[key],2)
2282 for key in keys2:
2283 sum_job[key]=math.sqrt(sum_job[key])
2284 sum_job['err_percABS'] = sum_job['errorABS']/sum_job['resultABS']*100.
2285 sum_job['err_perc'] = sum_job['error']/sum_job['result']*100.
2286 sum_job['niters']=int(sum_job['niters_done']/len(job_group))
2287 sum_job['niters_done']=int(sum_job['niters_done']/len(job_group))
2288 return sum_job
2289
2290
2292 """Combines the mint_grids and MC-integer grids from the split order
2293 jobs (fixed order only).
2294 """
2295 files_mint_grids=[]
2296 files_MC_integer=[]
2297 location=None
2298 for job in job_group:
2299 files_mint_grids.append(pjoin(job['dirname'],'mint_grids'))
2300 files_MC_integer.append(pjoin(job['dirname'],'grid.MC_integer'))
2301 if not location:
2302 location=pjoin(job['dirname'].rsplit('_',1)[0])
2303 else:
2304 if location != pjoin(job['dirname'].rsplit('_',1)[0]) :
2305 raise aMCatNLOError('Not all jobs have the same location. '\
2306 +'Cannot combine them.')
2307
2308
2309
2310 for j,fs in enumerate([files_mint_grids,files_MC_integer]):
2311 linesoffiles=[]
2312 for f in fs:
2313 with open(f,'r+') as fi:
2314 linesoffiles.append(fi.readlines())
2315 to_write=[]
2316 for rowgrp in zip(*linesoffiles):
2317 try:
2318
2319
2320
2321
2322
2323 is_integer = [[int(row.strip().split()[-1])] for row in rowgrp]
2324 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
2325 floatgrps = list(zip(*floatsbyfile))
2326 special=[]
2327 for i,floatgrp in enumerate(floatgrps):
2328 if i==0:
2329 special.append(sum(floatgrp))
2330 elif i==1:
2331 special.append(math.sqrt(sum([err**2 for err in floatgrp])))
2332 elif i==2:
2333 special.append(int(sum(floatgrp)/len(floatgrp)))
2334 elif i==3:
2335 special.append(int(sum(floatgrp)))
2336 elif i==4:
2337 special.append(int(sum(floatgrp)/len(floatgrp)))
2338 else:
2339 raise aMCatNLOError('"mint_grids" files not in correct format. '+\
2340 'Cannot combine them.')
2341 to_write.append(" ".join(str(s) for s in special) + "\n")
2342 except ValueError:
2343
2344 floatsbyfile = [[float(a) for a in row.strip().split()] for row in rowgrp]
2345 floatgrps = list(zip(*floatsbyfile))
2346 averages = [sum(floatgrp)/len(floatgrp) for floatgrp in floatgrps]
2347 to_write.append(" ".join(str(a) for a in averages) + "\n")
2348
2349 if j==0:
2350 with open(pjoin(location,'mint_grids'),'w') as f:
2351 f.writelines(to_write)
2352 elif j==1:
2353 with open(pjoin(location,'grid.MC_integer'),'w') as f:
2354 f.writelines(to_write)
2355
2356
2358 """Looks in the jobs_to_run to see if there is the need to split the
2359 jobs, depending on the expected time they take. Updates
2360 jobs_to_run and jobs_to_collect to replace the split-job by
2361 its splits.
2362 """
2363
2364 if self.options['run_mode'] ==2:
2365 nb_submit = int(self.options['nb_core'])
2366 elif self.options['run_mode'] ==1:
2367 nb_submit = int(self.options['cluster_size'])
2368 else:
2369 nb_submit =1
2370
2371 time_expected=0
2372 for job in jobs_to_run:
2373 time_expected+=job['time_spend']*(job['niters']*job['npoints'])/ \
2374 (job['niters_done']*job['npoints_done'])
2375
2376
2377 time_per_job=time_expected/(nb_submit*(1+len(jobs_to_run)/2))
2378 jobs_to_run_new=[]
2379 jobs_to_collect_new=copy.copy(jobs_to_collect)
2380 for job in jobs_to_run:
2381
2382
2383
2384 for j in [j for j in jobs_to_collect_new if j['p_dir'] == job['p_dir'] and \
2385 j['channel'] == job['channel']]:
2386 jobs_to_collect_new.remove(j)
2387 time_expected=job['time_spend']*(job['niters']*job['npoints'])/ \
2388 (job['niters_done']*job['npoints_done'])
2389
2390
2391
2392 if time_expected > max(2*job['time_spend']/job['combined'],time_per_job):
2393
2394 nsplit=min(max(int(time_expected/max(2*job['time_spend']/job['combined'],time_per_job)),2),nb_submit)
2395 for i in range(1,nsplit+1):
2396 job_new=copy.copy(job)
2397 job_new['split']=i
2398 job_new['wgt_mult']=1./float(nsplit)
2399 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
2400 job_new['accuracy']=min(job['accuracy']*math.sqrt(float(nsplit)),0.1)
2401 if nsplit >= job['niters']:
2402 job_new['npoints']=int(job['npoints']*job['niters']/nsplit)
2403 job_new['niters']=1
2404 else:
2405 job_new['npoints']=int(job['npoints']/nsplit)
2406 jobs_to_collect_new.append(job_new)
2407 jobs_to_run_new.append(job_new)
2408 else:
2409 jobs_to_collect_new.append(job)
2410 jobs_to_run_new.append(job)
2411 return jobs_to_run_new,jobs_to_collect_new
2412
2413
2415 """Looks in the jobs_to_run to see if there is the need to split the
2416 event generation step. Updates jobs_to_run and
2417 jobs_to_collect to replace the split-job by its
2418 splits. Also removes jobs that do not need any events.
2419 """
2420 nevt_job=self.run_card['nevt_job']
2421 if nevt_job > 0:
2422 jobs_to_collect_new=copy.copy(jobs_to_collect)
2423 for job in jobs_to_run:
2424 nevents=job['nevents']
2425 if nevents == 0:
2426 jobs_to_collect_new.remove(job)
2427 elif nevents > nevt_job:
2428 jobs_to_collect_new.remove(job)
2429 if nevents % nevt_job != 0 :
2430 nsplit=int(nevents/nevt_job)+1
2431 else:
2432 nsplit=int(nevents/nevt_job)
2433 for i in range(1,nsplit+1):
2434 job_new=copy.copy(job)
2435 left_over=nevents % nsplit
2436 if i <= left_over:
2437 job_new['nevents']=int(nevents/nsplit)+1
2438 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
2439 else:
2440 job_new['nevents']=int(nevents/nsplit)
2441 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
2442 job_new['split']=i
2443 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
2444 jobs_to_collect_new.append(job_new)
2445 jobs_to_run_new=copy.copy(jobs_to_collect_new)
2446 else:
2447 jobs_to_run_new=copy.copy(jobs_to_collect)
2448 for job in jobs_to_collect:
2449 if job['nevents'] == 0:
2450 jobs_to_run_new.remove(job)
2451 jobs_to_collect_new=copy.copy(jobs_to_run_new)
2452
2453 return jobs_to_run_new,jobs_to_collect_new
2454
2455
2457 """
2458 For (N)LO+PS: determines the number of events and/or the required
2459 accuracy per job.
2460 For fixed order: determines which jobs need higher precision and
2461 returns those with the newly requested precision.
2462 """
2463 err=self.cross_sect_dict['errt']
2464 tot=self.cross_sect_dict['xsect']
2465 errABS=self.cross_sect_dict['erra']
2466 totABS=self.cross_sect_dict['xseca']
2467 jobs_new=[]
2468 if fixed_order:
2469 if req_acc == -1:
2470 if step+1 == 1:
2471 npoints = self.run_card['npoints_FO']
2472 niters = self.run_card['niters_FO']
2473 for job in jobs:
2474 job['mint_mode']=-1
2475 job['niters']=niters
2476 job['npoints']=npoints
2477 jobs_new.append(job)
2478 elif step+1 == 2:
2479 pass
2480 elif step+1 > 2:
2481 raise aMCatNLOError('Cannot determine number of iterations and PS points '+
2482 'for integration step %i' % step )
2483 elif ( req_acc > 0 and err/abs(tot) > req_acc*1.2 ) or step <= 0:
2484 req_accABS=req_acc*abs(tot)/totABS
2485 for job in jobs:
2486 job['mint_mode']=-1
2487
2488 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
2489
2490
2491 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \
2492 and not (step==-1 and self.run_card['iappl'] == 2):
2493 continue
2494
2495 itmax_fl=job['niters_done']*math.pow(job['errorABS']/
2496 (job['accuracy']*job['resultABS']),2)
2497 if itmax_fl <= 4.0 :
2498 job['niters']=max(int(round(itmax_fl)),2)
2499 job['npoints']=job['npoints_done']*2
2500 elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
2501 job['niters']=4
2502 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
2503 else:
2504 if itmax_fl > 100.0 : itmax_fl=50.0
2505 job['niters']=int(round(math.sqrt(itmax_fl)))
2506 job['npoints']=int(round(job['npoints_done']*itmax_fl/
2507 round(math.sqrt(itmax_fl))))*2
2508
2509 jobs_new.append(job)
2510 return jobs_new
2511 elif step+1 <= 2:
2512 nevents=self.run_card['nevents']
2513
2514 if req_acc<0:
2515 req_acc2_inv=nevents
2516 else:
2517 req_acc2_inv=1/(req_acc*req_acc)
2518 if step+1 == 1 or step+1 == 2 :
2519
2520 for job in jobs:
2521 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
2522 job['accuracy']=accuracy
2523 if step+1 == 2:
2524
2525
2526 r=self.get_randinit_seed()
2527 random.seed(r)
2528 totevts=nevents
2529 for job in jobs:
2530 job['nevents'] = 0
2531 while totevts :
2532 target = random.random() * totABS
2533 crosssum = 0.
2534 i = 0
2535 while i<len(jobs) and crosssum < target:
2536 job = jobs[i]
2537 crosssum += job['resultABS']
2538 i += 1
2539 totevts -= 1
2540 i -= 1
2541 jobs[i]['nevents'] += 1
2542 for job in jobs:
2543 job['mint_mode']=step+1
2544 return jobs
2545 else:
2546 return []
2547
2548
2550 """ Get the random number seed from the randinit file """
2551 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
2552
2553 iseed = int(randinit.read()[2:])
2554 return iseed
2555
2556
2558 """Appends the results for each of the jobs in the job list"""
2559 error_found=False
2560 for job in jobs:
2561 try:
2562 if integration_step >= 0 :
2563 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
2564 results=res_file.readline().split()
2565 else:
2566
2567
2568 with open(pjoin(job['dirname'],'res.dat')) as res_file:
2569 results=res_file.readline().split()
2570 except IOError:
2571 if not error_found:
2572 error_found=True
2573 error_log=[]
2574 error_log.append(pjoin(job['dirname'],'log.txt'))
2575 continue
2576 job['resultABS']=float(results[0])
2577 job['errorABS']=float(results[1])
2578 job['result']=float(results[2])
2579 job['error']=float(results[3])
2580 job['niters_done']=int(results[4])
2581 job['npoints_done']=int(results[5])
2582 job['time_spend']=float(results[6])
2583 job['err_percABS'] = job['errorABS']/job['resultABS']*100.
2584 job['err_perc'] = job['error']/job['result']*100.
2585 if error_found:
2586 raise aMCatNLOError('An error occurred during the collection of results.\n' +
2587 'Please check the .log files inside the directories which failed:\n' +
2588 '\n'.join(error_log)+'\n')
2589
2590
2591
2593 """writes the res.txt files in the SubProcess dir"""
2594 jobs.sort(key = lambda job: -job['errorABS'])
2595 content=[]
2596 content.append('\n\nCross section per integration channel:')
2597 for job in jobs:
2598 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
2599 content.append('\n\nABS cross section per integration channel:')
2600 for job in jobs:
2601 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
2602 totABS=0
2603 errABS=0
2604 tot=0
2605 err=0
2606 for job in jobs:
2607 totABS+= job['resultABS']*job['wgt_frac']
2608 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac']
2609 tot+= job['result']*job['wgt_frac']
2610 err+= math.pow(job['error'],2)*job['wgt_frac']
2611 if jobs:
2612 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
2613 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
2614 tot, math.sqrt(err), math.sqrt(err)/tot *100.))
2615 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
2616 res_file.write('\n'.join(content))
2617 randinit=self.get_randinit_seed()
2618 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
2619 'erra':math.sqrt(errABS),'randinit':randinit}
2620
2621
2623 """read the scale_pdf_dependence.dat files and collects there results"""
2624 scale_pdf_info=[]
2625 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
2626 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
2627 evt_files=[]
2628 evt_wghts=[]
2629 for job in jobs:
2630 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
2631 evt_wghts.append(job['wgt_frac'])
2632 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts)
2633 return scale_pdf_info
2634
2635
2637 """combines the plots and puts then in the Events/run* directory"""
2638 devnull = open(os.devnull, 'w')
2639
2640 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
2641 topfiles = []
2642 for job in jobs:
2643 if job['dirname'].endswith('.top'):
2644 topfiles.append(job['dirname'])
2645 else:
2646 topfiles.append(pjoin(job['dirname'],'MADatNLO.top'))
2647 misc.call(['./combine_plots_FO.sh'] + topfiles, \
2648 stdout=devnull,
2649 cwd=pjoin(self.me_dir, 'SubProcesses'))
2650 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
2651 pjoin(self.me_dir, 'Events', self.run_name))
2652 logger.info('The results of this run and the TopDrawer file with the plots' + \
2653 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2654 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
2655 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO')
2656 self.combine_plots_HwU(jobs,out)
2657 try:
2658 misc.call(['gnuplot','MADatNLO.gnuplot'],\
2659 stdout=devnull,stderr=devnull,\
2660 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2661 except Exception:
2662 pass
2663 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
2664 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2665 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
2666 rootfiles = []
2667 for job in jobs:
2668 if job['dirname'].endswith('.root'):
2669 rootfiles.append(job['dirname'])
2670 else:
2671 rootfiles.append(pjoin(job['dirname'],'MADatNLO.root'))
2672 misc.call(['./combine_root.sh'] + folder_name + rootfiles, \
2673 stdout=devnull,
2674 cwd=pjoin(self.me_dir, 'SubProcesses'))
2675 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
2676 pjoin(self.me_dir, 'Events', self.run_name))
2677 logger.info('The results of this run and the ROOT file with the plots' + \
2678 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2679 elif self.analyse_card['fo_analysis_format'].lower() == 'lhe':
2680 self.combine_FO_lhe(jobs)
2681 logger.info('The results of this run and the LHE File (to be used for plotting only)' + \
2682 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2683 else:
2684 logger.info('The results of this run' + \
2685 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
2686
2688 """combine the various lhe file generated in each directory.
2689 They are two steps:
2690 1) banner
2691 2) reweight each sample by the factor written at the end of each file
2692 3) concatenate each of the new files (gzip those).
2693 """
2694
2695 logger.info('Combining lhe events for plotting analysis')
2696 start = time.time()
2697 self.run_card['fo_lhe_postprocessing'] = [i.lower() for i in self.run_card['fo_lhe_postprocessing']]
2698 output = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2699 if os.path.exists(output):
2700 os.remove(output)
2701
2702
2703
2704
2705
2706 text = open(pjoin(jobs[0]['dirname'],'header.txt'),'r').read()
2707 i1, i2 = text.find('<initrwgt>'),text.find('</initrwgt>')
2708 self.banner['initrwgt'] = text[10+i1:i2]
2709
2710
2711
2712
2713
2714
2715
2716 cross = sum(j['result'] for j in jobs)
2717 error = math.sqrt(sum(j['error'] for j in jobs))
2718 self.banner['init'] = "0 0 0e0 0e0 0 0 0 0 -4 1\n %s %s %s 1" % (cross, error, cross)
2719 self.banner.write(output[:-3], close_tag=False)
2720 misc.gzip(output[:-3])
2721
2722
2723
2724 fsock = lhe_parser.EventFile(output,'a')
2725 if 'nogrouping' in self.run_card['fo_lhe_postprocessing']:
2726 fsock.eventgroup = False
2727 else:
2728 fsock.eventgroup = True
2729
2730 if 'norandom' in self.run_card['fo_lhe_postprocessing']:
2731 for job in jobs:
2732 dirname = job['dirname']
2733
2734 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2735 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2736
2737 ratio = cross/sumwgt
2738 lhe = lhe_parser.EventFile(pjoin(dirname,'events.lhe'))
2739 lhe.eventgroup = True
2740 for eventsgroup in lhe:
2741 neweventsgroup = []
2742 for i,event in enumerate(eventsgroup):
2743 event.rescale_weights(ratio)
2744 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2745 and event == neweventsgroup[-1]:
2746 neweventsgroup[-1].wgt += event.wgt
2747 for key in event.reweight_data:
2748 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2749 else:
2750 neweventsgroup.append(event)
2751 fsock.write_events(neweventsgroup)
2752 lhe.close()
2753 os.remove(pjoin(dirname,'events.lhe'))
2754 else:
2755 lhe = []
2756 lenlhe = []
2757 misc.sprint('need to combine %s event file' % len(jobs))
2758 globallhe = lhe_parser.MultiEventFile()
2759 globallhe.eventgroup = True
2760 for job in jobs:
2761 dirname = job['dirname']
2762 lastline = misc.BackRead(pjoin(dirname,'events.lhe')).readline()
2763 nb_event, sumwgt, cross = [float(i) for i in lastline.split()]
2764 lastlhe = globallhe.add(pjoin(dirname,'events.lhe'),cross, 0, cross,
2765 nb_event=int(nb_event), scale=cross/sumwgt)
2766 for eventsgroup in globallhe:
2767 neweventsgroup = []
2768 for i,event in enumerate(eventsgroup):
2769 event.rescale_weights(event.sample_scale)
2770 if i>0 and 'noidentification' not in self.run_card['fo_lhe_postprocessing'] \
2771 and event == neweventsgroup[-1]:
2772 neweventsgroup[-1].wgt += event.wgt
2773 for key in event.reweight_data:
2774 neweventsgroup[-1].reweight_data[key] += event.reweight_data[key]
2775 else:
2776 neweventsgroup.append(event)
2777 fsock.write_events(neweventsgroup)
2778 globallhe.close()
2779 fsock.write('</LesHouchesEvents>\n')
2780 fsock.close()
2781 misc.sprint('combining lhe file done in ', time.time()-start)
2782 for job in jobs:
2783 dirname = job['dirname']
2784 os.remove(pjoin(dirname,'events.lhe'))
2785
2786
2787
2788 misc.sprint('combining lhe file done in ', time.time()-start)
2789
2790
2791
2792
2793
2794
2796 """Sums all the plots in the HwU format."""
2797 logger.debug('Combining HwU plots.')
2798
2799 command = [sys.executable]
2800 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py'))
2801 for job in jobs:
2802 if job['dirname'].endswith('.HwU'):
2803 command.append(job['dirname'])
2804 else:
2805 command.append(pjoin(job['dirname'],'MADatNLO.HwU'))
2806 command.append("--out="+out)
2807 command.append("--gnuplot")
2808 command.append("--band=[]")
2809 command.append("--lhapdf-config="+self.options['lhapdf'])
2810 if normalisation:
2811 command.append("--multiply="+(','.join([str(n) for n in normalisation])))
2812 command.append("--sum")
2813 command.append("--keep_all_weights")
2814 command.append("--no_open")
2815
2816 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir)
2817
2818 while p.poll() is None:
2819 line = p.stdout.readline().decode()
2820
2821 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']):
2822 print(line[:-1])
2823 elif __debug__ and line:
2824 logger.debug(line[:-1])
2825
2826
2828 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
2829 logger.debug('Combining APPLgrids \n')
2830 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
2831 'applgrid-combine')
2832 all_jobs=[]
2833 for job in jobs:
2834 all_jobs.append(job['dirname'])
2835 ngrids=len(all_jobs)
2836 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
2837 for obs in range(0,nobs):
2838 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
2839
2840 if self.run_card["iappl"] == 1:
2841 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
2842 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
2843 elif self.run_card["iappl"] == 2:
2844 unc2_inv=pow(cross/error,2)
2845 unc2_inv_ngrids=pow(cross/error,2)*ngrids
2846 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",
2847 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
2848 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
2849 for job in all_jobs:
2850 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
2851 else:
2852 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
2853
2854 for ggdir in gdir:
2855 os.remove(ggdir)
2856
2857
2859 """Distributes the APPLgrids ready to be filled by a second run of the code"""
2860
2861
2862 if not('appl_start_grid' in list(options.keys()) and options['appl_start_grid']):
2863 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'),
2864 pjoin(self.me_dir,'Events'))
2865
2866 time_stamps={}
2867 for root_file in gfiles:
2868 time_stamps[root_file]=os.path.getmtime(root_file)
2869 options['appl_start_grid']= \
2870 max(six.iterkeys(time_stamps), key=(lambda key:
2871 time_stamps[key])).split('/')[-2]
2872 logger.info('No --appl_start_grid option given. '+\
2873 'Guessing that start grid from run "%s" should be used.' \
2874 % options['appl_start_grid'])
2875
2876 if 'appl_start_grid' in list(options.keys()) and options['appl_start_grid']:
2877 self.appl_start_grid = options['appl_start_grid']
2878 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
2879
2880 if not os.path.exists(pjoin(start_grid_dir,
2881 'aMCfast_obs_0_starting_grid.root')):
2882 raise self.InvalidCmd('APPLgrid file not found: %s' % \
2883 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
2884 else:
2885 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \
2886 start_grid_dir) if name.endswith("_starting_grid.root")]
2887 nobs =len(all_grids)
2888 gstring=" ".join(all_grids)
2889 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
2890 raise self.InvalidCmd('No APPLgrid name currently defined.'+
2891 'Please provide this information.')
2892
2893 for pdir in p_dirs:
2894 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
2895 "SubProcesses",pdir)) if file.startswith(mode+'_G') and
2896 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
2897 for g_dir in g_dirs:
2898 for grid in all_grids:
2899 obs=grid.split('_')[-3]
2900 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,
2901 'grid_obs_'+obs+'_in.root'))
2902
2903
2904
2905
2907 """collect the log files and put them in a single, html-friendly file
2908 inside the Events/run_.../ directory"""
2909 log_file = pjoin(self.me_dir, 'Events', self.run_name,
2910 'alllogs_%d.html' % integration_step)
2911 outfile = open(log_file, 'w')
2912
2913 content = ''
2914 content += '<HTML><BODY>\n<font face="courier" size=2>'
2915 for job in jobs:
2916
2917 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
2918 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
2919 pjoin(self.me_dir,'SubProcesses'),''))
2920
2921 content += '<font color="red">\n'
2922 content += '<br>LOG file for integration channel %s, %s <br>' % \
2923 (os.path.dirname(log).replace(pjoin(self.me_dir,
2924 'SubProcesses'), ''),
2925 integration_step)
2926 content += '</font>\n'
2927
2928
2929 with open(log) as l:
2930 content += '<PRE>\n' + l.read() + '\n</PRE>'
2931 content +='<br>\n'
2932 outfile.write(content)
2933 content=''
2934
2935 outfile.write('</font>\n</BODY></HTML>\n')
2936 outfile.close()
2937
2938
2940 """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
2941
2942 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses'))
2943 for res_file in res_files:
2944 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2945
2946 self.combine_plots_FO(folder_name,jobs)
2947
2948
2949 if self.run_card['iappl'] != 0:
2950 cross=self.cross_sect_dict['xsect']
2951 error=self.cross_sect_dict['errt']
2952 self.applgrid_combine(cross,error,jobs)
2953
2954
2956 """setup the number of cores for multicore, and the cluster-type for cluster runs"""
2957 if self.cluster_mode == 1:
2958 cluster_name = self.options['cluster_type']
2959 try:
2960 self.cluster = cluster.from_name[cluster_name](**self.options)
2961 except KeyError:
2962
2963
2964 cluster_class = misc.from_plugin_import(self.plugin_path,
2965 'new_cluster', cluster_name,
2966 info = 'cluster handling will be done with PLUGIN: %{plug}s' )
2967 if cluster_class:
2968 self.cluster = cluster_class(**self.options)
2969
2970 if self.cluster_mode == 2:
2971 try:
2972 import multiprocessing
2973 if not self.nb_core:
2974 try:
2975 self.nb_core = int(self.options['nb_core'])
2976 except TypeError:
2977 self.nb_core = multiprocessing.cpu_count()
2978 logger.info('Using %d cores' % self.nb_core)
2979 except ImportError:
2980 self.nb_core = 1
2981 logger.warning('Impossible to detect the number of cores => Using One.\n'+
2982 'Use set nb_core X in order to set this number and be able to'+
2983 'run in multicore.')
2984
2985 self.cluster = cluster.MultiCore(**self.options)
2986
2987
2989 """Clean previous results.
2990 o. If doing only the reweighting step, do not delete anything and return directlty.
2991 o. Always remove all the G*_* files (from split event generation).
2992 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
2993 if options['reweightonly']:
2994 return
2995 if not options['only_generation']:
2996 self.update_status('Cleaning previous results', level=None)
2997 for dir in p_dirs:
2998
2999 for obj in folder_name:
3000
3001 to_rm = [file for file in \
3002 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
3003 if file.startswith(obj[:-1]) and \
3004 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
3005 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
3006
3007 to_always_rm = [file for file in \
3008 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
3009 if file.startswith(obj[:-1]) and
3010 '_' in file and not '_G' in file and \
3011 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
3012 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
3013
3014 if not options['only_generation']:
3015 to_always_rm.extend(to_rm)
3016 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
3017 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
3018 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
3019 return
3020
3021
3022 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
3023 """print a summary of the results contained in self.cross_sect_dict.
3024 step corresponds to the mintMC step, if =2 (i.e. after event generation)
3025 some additional infos are printed"""
3026
3027 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
3028 process = ''
3029 for line in proc_card_lines:
3030 if line.startswith('generate') or line.startswith('add process'):
3031 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
3032 lpp = {0:'l', 1:'p', -1:'pbar', 2:'elastic photon from p', 3:'elastic photon from e'}
3033 if self.ninitial == 1:
3034 proc_info = '\n Process %s' % process[:-3]
3035 else:
3036 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
3037 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
3038 self.run_card['ebeam1'], self.run_card['ebeam2'])
3039
3040 if self.ninitial == 1:
3041 self.cross_sect_dict['unit']='GeV'
3042 self.cross_sect_dict['xsec_string']='(Partial) decay width'
3043 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
3044 else:
3045 self.cross_sect_dict['unit']='pb'
3046 self.cross_sect_dict['xsec_string']='Total cross section'
3047 self.cross_sect_dict['axsec_string']='Total abs(cross section)'
3048 if self.run_card['event_norm'].lower()=='bias':
3049 self.cross_sect_dict['xsec_string']+=', incl. bias (DO NOT USE)'
3050
3051 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3052 status = ['Determining the number of unweighted events per channel',
3053 'Updating the number of unweighted events per channel',
3054 'Summary:']
3055 computed='(computed from LHE events)'
3056 elif mode in ['NLO', 'LO']:
3057 status = ['Results after grid setup:','Current results:',
3058 'Final results and run summary:']
3059 computed='(computed from histogram information)'
3060
3061 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3062 message = status[step] + '\n\n Intermediate results:' + \
3063 ('\n Random seed: %(randinit)d' + \
3064 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
3065 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
3066 % self.cross_sect_dict
3067 elif mode in ['NLO','LO'] and not done:
3068 if step == 0:
3069 message = '\n ' + status[0] + \
3070 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3071 self.cross_sect_dict
3072 else:
3073 message = '\n ' + status[1] + \
3074 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3075 self.cross_sect_dict
3076
3077 else:
3078 message = '\n --------------------------------------------------------------'
3079 message = message + \
3080 '\n ' + status[2] + proc_info
3081 if mode not in ['LO', 'NLO']:
3082 message = message + \
3083 '\n Number of events generated: %s' % self.run_card['nevents']
3084 message = message + \
3085 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
3086 self.cross_sect_dict
3087 message = message + \
3088 '\n --------------------------------------------------------------'
3089 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']):
3090 if scale_pdf_info[0]:
3091
3092 message = message + '\n Scale variation %s:' % computed
3093 for s in scale_pdf_info[0]:
3094 if s['unc']:
3095 if self.run_card['ickkw'] != -1:
3096 message = message + \
3097 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\
3098 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s
3099 else:
3100 message = message + \
3101 ('\n Soft and hard scale dependence (added in quadrature): '\
3102 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s
3103
3104 else:
3105 message = message + \
3106 ('\n Dynamical_scale_choice %(label)i: '\
3107 '\n %(cen)8.3e pb') % s
3108
3109 if scale_pdf_info[1]:
3110 message = message + '\n PDF variation %s:' % computed
3111 for p in scale_pdf_info[1]:
3112 if p['unc']=='none':
3113 message = message + \
3114 ('\n %(name)s (central value only): '\
3115 '\n %(cen)8.3e pb') % p
3116
3117 elif p['unc']=='unknown':
3118 message = message + \
3119 ('\n %(name)s (%(size)s members; combination method unknown): '\
3120 '\n %(cen)8.3e pb') % p
3121 else:
3122 message = message + \
3123 ('\n %(name)s (%(size)s members; using %(unc)s method): '\
3124 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p
3125
3126 message = message + \
3127 '\n --------------------------------------------------------------'
3128
3129
3130 if (mode in ['NLO', 'LO'] and not done) or \
3131 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
3132 logger.info(message+'\n')
3133 return
3134
3135
3136
3137
3138
3139 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
3140 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'),
3141 pjoin(self.me_dir, 'SubProcesses'))
3142 all_log_files = log_GV_files
3143 elif mode == 'NLO':
3144 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'),
3145 pjoin(self.me_dir, 'SubProcesses'))
3146 all_log_files = log_GV_files
3147
3148 elif mode == 'LO':
3149 log_GV_files = ''
3150 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'),
3151 pjoin(self.me_dir, 'SubProcesses'))
3152 else:
3153 raise aMCatNLOError('Running mode %s not supported.'%mode)
3154
3155 try:
3156 message, debug_msg = \
3157 self.compile_advanced_stats(log_GV_files, all_log_files, message)
3158 except Exception as e:
3159 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e)
3160 err_string = StringIO.StringIO()
3161 traceback.print_exc(limit=4, file=err_string)
3162 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\
3163 %err_string.getvalue()
3164
3165 logger.debug(debug_msg+'\n')
3166 logger.info(message+'\n')
3167
3168
3169 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
3170 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
3171 open(pjoin(evt_path, '.full_summary.txt'),
3172 'w').write(message+'\n\n'+debug_msg+'\n')
3173
3174 self.archive_files(evt_path,mode)
3175
3177 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
3178 the run."""
3179
3180 files_to_arxiv = [pjoin('Cards','param_card.dat'),
3181 pjoin('Cards','MadLoopParams.dat'),
3182 pjoin('Cards','FKS_params.dat'),
3183 pjoin('Cards','run_card.dat'),
3184 pjoin('Subprocesses','setscales.f'),
3185 pjoin('Subprocesses','cuts.f')]
3186
3187 if mode in ['NLO', 'LO']:
3188 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
3189
3190 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
3191 os.mkdir(pjoin(evt_path,'RunMaterial'))
3192
3193 for path in files_to_arxiv:
3194 if os.path.isfile(pjoin(self.me_dir,path)):
3195 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
3196 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
3197 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
3198
3200 """ This functions goes through the log files given in arguments and
3201 compiles statistics about MadLoop stability, virtual integration
3202 optimization and detection of potential error messages into a nice
3203 debug message to printed at the end of the run """
3204
3205 def safe_float(str_float):
3206 try:
3207 return float(str_float)
3208 except ValueError:
3209 logger.debug('Could not convert the following float during'+
3210 ' advanced statistics printout: %s'%str(str_float))
3211 return -1.0
3212
3213
3214
3215
3216 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
3217 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
3218
3219
3220
3221
3222
3223
3224
3225 UPS_stat_finder = re.compile(
3226 r"Satistics from MadLoop:.*"+\
3227 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
3228 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
3229 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
3230 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
3231 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
3232 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
3233 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
3234 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
3235 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
3236 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
3237
3238 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
3239 1 : 'CutTools (double precision)',
3240 2 : 'PJFry++',
3241 3 : 'IREGI',
3242 4 : 'Golem95',
3243 5 : 'Samurai',
3244 6 : 'Ninja (double precision)',
3245 7 : 'COLLIER',
3246 8 : 'Ninja (quadruple precision)',
3247 9 : 'CutTools (quadruple precision)'}
3248 RetUnit_finder =re.compile(
3249 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
3250
3251
3252 for gv_log in log_GV_files:
3253 channel_name = '/'.join(gv_log.split('/')[-5:-1])
3254 log=open(gv_log,'r').read()
3255 UPS_stats = re.search(UPS_stat_finder,log)
3256 for retunit_stats in re.finditer(RetUnit_finder, log):
3257 if channel_name not in list(stats['UPS'].keys()):
3258 stats['UPS'][channel_name] = [0]*10+[[0]*10]
3259 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
3260 += int(retunit_stats.group('n_occurences'))
3261 if not UPS_stats is None:
3262 try:
3263 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
3264 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
3265 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
3266 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
3267 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
3268 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
3269 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
3270 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
3271 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
3272 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
3273 except KeyError:
3274 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
3275 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
3276 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
3277 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
3278 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
3279 int(UPS_stats.group('n10')),[0]*10]
3280 debug_msg = ""
3281 if len(list(stats['UPS'].keys()))>0:
3282 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
3283 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
3284 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
3285 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
3286 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
3287 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
3288 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
3289 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
3290 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
3291 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
3292 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
3293 for i in range(10)]
3294 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
3295 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
3296 maxUPS = max(UPSfracs, key = lambda w: w[1])
3297
3298 tmpStr = ""
3299 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
3300 tmpStr += '\n Stability unknown: %d'%nTotsun
3301 tmpStr += '\n Stable PS point: %d'%nTotsps
3302 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
3303 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
3304 tmpStr += '\n Only double precision used: %d'%nTotddp
3305 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
3306 tmpStr += '\n Initialization phase-space points: %d'%nTotini
3307 tmpStr += '\n Reduction methods used:'
3308 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
3309 unit_code_meaning.keys() if nTot1[i]>0]
3310 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
3311 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
3312 if nTot100 != 0:
3313 debug_msg += '\n Unknown return code (100): %d'%nTot100
3314 if nTot10 != 0:
3315 debug_msg += '\n Unknown return code (10): %d'%nTot10
3316 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
3317 not in list(unit_code_meaning.keys()))
3318 if nUnknownUnit != 0:
3319 debug_msg += '\n Unknown return code (1): %d'\
3320 %nUnknownUnit
3321
3322 if maxUPS[1]>0.001:
3323 message += tmpStr
3324 message += '\n Total number of unstable PS point detected:'+\
3325 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS)
3326 message += '\n Maximum fraction of UPS points in '+\
3327 'channel %s (%4.2f%%)'%maxUPS
3328 message += '\n Please report this to the authors while '+\
3329 'providing the file'
3330 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
3331 maxUPS[0],'UPS.log'))
3332 else:
3333 debug_msg += tmpStr
3334
3335
3336
3337
3338
3339
3340 virt_tricks_finder = re.compile(
3341 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
3342 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
3343 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
3344 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
3345
3346 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
3347 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
3348
3349 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
3350
3351 channel_contr_list = {}
3352 for gv_log in log_GV_files:
3353 logfile=open(gv_log,'r')
3354 log = logfile.read()
3355 logfile.close()
3356 channel_name = '/'.join(gv_log.split('/')[-3:-1])
3357 vf_stats = None
3358 for vf_stats in re.finditer(virt_frac_finder, log):
3359 pass
3360 if not vf_stats is None:
3361 v_frac = safe_float(vf_stats.group('v_frac'))
3362 v_average = safe_float(vf_stats.group('v_average'))
3363 try:
3364 if v_frac < stats['virt_stats']['v_frac_min'][0]:
3365 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
3366 if v_frac > stats['virt_stats']['v_frac_max'][0]:
3367 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
3368 stats['virt_stats']['v_frac_avg'][0] += v_frac
3369 stats['virt_stats']['v_frac_avg'][1] += 1
3370 except KeyError:
3371 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
3372 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
3373 stats['virt_stats']['v_frac_avg']=[v_frac,1]
3374
3375
3376 ccontr_stats = None
3377 for ccontr_stats in re.finditer(channel_contr_finder, log):
3378 pass
3379 if not ccontr_stats is None:
3380 contrib = safe_float(ccontr_stats.group('v_contr'))
3381 try:
3382 if contrib>channel_contr_list[channel_name]:
3383 channel_contr_list[channel_name]=contrib
3384 except KeyError:
3385 channel_contr_list[channel_name]=contrib
3386
3387
3388
3389
3390 average_contrib = 0.0
3391 for value in channel_contr_list.values():
3392 average_contrib += value
3393 if len(list(channel_contr_list.values())) !=0:
3394 average_contrib = average_contrib / len(list(channel_contr_list.values()))
3395
3396 relevant_log_GV_files = []
3397 excluded_channels = set([])
3398 all_channels = set([])
3399 for log_file in log_GV_files:
3400 channel_name = '/'.join(log_file.split('/')[-3:-1])
3401 all_channels.add(channel_name)
3402 try:
3403 if channel_contr_list[channel_name] > (0.1*average_contrib):
3404 relevant_log_GV_files.append(log_file)
3405 else:
3406 excluded_channels.add(channel_name)
3407 except KeyError:
3408 relevant_log_GV_files.append(log_file)
3409
3410
3411 for gv_log in relevant_log_GV_files:
3412 logfile=open(gv_log,'r')
3413 log = logfile.read()
3414 logfile.close()
3415 channel_name = '/'.join(gv_log.split('/')[-3:-1])
3416
3417 vt_stats = None
3418 for vt_stats in re.finditer(virt_tricks_finder, log):
3419 pass
3420 if not vt_stats is None:
3421 vt_stats_group = vt_stats.groupdict()
3422 v_ratio = safe_float(vt_stats.group('v_ratio'))
3423 v_ratio_err = safe_float(vt_stats.group('v_ratio_err'))
3424 v_contr = safe_float(vt_stats.group('v_abs_contr'))
3425 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err'))
3426 try:
3427 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
3428 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
3429 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
3430 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
3431 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
3432 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
3433 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
3434 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
3435 if v_contr < stats['virt_stats']['v_contr_min'][0]:
3436 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
3437 if v_contr > stats['virt_stats']['v_contr_max'][0]:
3438 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
3439 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
3440 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
3441 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
3442 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
3443 except KeyError:
3444 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
3445 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
3446 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
3447 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
3448 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
3449 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
3450 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
3451 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
3452
3453 vf_stats = None
3454 for vf_stats in re.finditer(virt_frac_finder, log):
3455 pass
3456 if not vf_stats is None:
3457 v_frac = safe_float(vf_stats.group('v_frac'))
3458 v_average = safe_float(vf_stats.group('v_average'))
3459 try:
3460 if v_average < stats['virt_stats']['v_average_min'][0]:
3461 stats['virt_stats']['v_average_min']=(v_average,channel_name)
3462 if v_average > stats['virt_stats']['v_average_max'][0]:
3463 stats['virt_stats']['v_average_max']=(v_average,channel_name)
3464 stats['virt_stats']['v_average_avg'][0] += v_average
3465 stats['virt_stats']['v_average_avg'][1] += 1
3466 except KeyError:
3467 stats['virt_stats']['v_average_min']=[v_average,channel_name]
3468 stats['virt_stats']['v_average_max']=[v_average,channel_name]
3469 stats['virt_stats']['v_average_avg']=[v_average,1]
3470
3471 try:
3472 debug_msg += '\n\n Statistics on virtual integration optimization : '
3473
3474 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
3475 %tuple(stats['virt_stats']['v_frac_max'])
3476 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
3477 %tuple(stats['virt_stats']['v_frac_min'])
3478 debug_msg += '\n Average virt fraction computed %.3f'\
3479 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1]))
3480 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
3481 (len(excluded_channels),len(all_channels))
3482 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
3483 %tuple(stats['virt_stats']['v_average_max'])
3484 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
3485 %tuple(stats['virt_stats']['v_ratio_max'])
3486 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
3487 %tuple(stats['virt_stats']['v_ratio_err_max'])
3488 debug_msg += tmpStr
3489
3490
3491
3492
3493
3494
3495
3496
3497 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
3498 %tuple(stats['virt_stats']['v_contr_err_max'])
3499 debug_msg += tmpStr
3500
3501
3502
3503
3504 except KeyError:
3505 debug_msg += '\n Could not find statistics on the integration optimization. '
3506
3507
3508
3509
3510
3511 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
3512 "(?P<time>[\d\+-Eed\.]*)\s*")
3513
3514 for logf in log_GV_files:
3515 logfile=open(logf,'r')
3516 log = logfile.read()
3517 logfile.close()
3518 channel_name = '/'.join(logf.split('/')[-3:-1])
3519 mint = re.search(mint_search,logf)
3520 if not mint is None:
3521 channel_name = channel_name+' [step %s]'%mint.group('ID')
3522
3523 for time_stats in re.finditer(timing_stat_finder, log):
3524 try:
3525 stats['timings'][time_stats.group('name')][channel_name]+=\
3526 safe_float(time_stats.group('time'))
3527 except KeyError:
3528 if time_stats.group('name') not in list(stats['timings'].keys()):
3529 stats['timings'][time_stats.group('name')] = {}
3530 stats['timings'][time_stats.group('name')][channel_name]=\
3531 safe_float(time_stats.group('time'))
3532
3533
3534 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
3535 try:
3536 totTimeList = [(time, chan) for chan, time in \
3537 stats['timings']['Total'].items()]
3538 except KeyError:
3539 totTimeList = []
3540
3541 totTimeList.sort()
3542 if len(totTimeList)>0:
3543 debug_msg += '\n\n Inclusive timing profile :'
3544 debug_msg += '\n Overall slowest channel %s (%s)'%\
3545 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
3546 debug_msg += '\n Average channel running time %s'%\
3547 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
3548 debug_msg += '\n Aggregated total running time %s'%\
3549 Tstr(sum([el[0] for el in totTimeList]))
3550 else:
3551 debug_msg += '\n\n Inclusive timing profile non available.'
3552
3553 sorted_keys = sorted(list(stats['timings'].keys()), key= lambda stat: \
3554 sum(stats['timings'][stat].values()), reverse=True)
3555 for name in sorted_keys:
3556 if name=='Total':
3557 continue
3558 if sum(stats['timings'][name].values())<=0.0:
3559 debug_msg += '\n Zero time record for %s.'%name
3560 continue
3561 try:
3562 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
3563 chan) for chan, time in stats['timings'][name].items()]
3564 except KeyError as ZeroDivisionError:
3565 debug_msg += '\n\n Timing profile for %s unavailable.'%name
3566 continue
3567 TimeList.sort()
3568 debug_msg += '\n Timing profile for <%s> :'%name
3569 try:
3570 debug_msg += '\n Overall fraction of time %.3f %%'%\
3571 safe_float((100.0*(sum(stats['timings'][name].values())/
3572 sum(stats['timings']['Total'].values()))))
3573 except KeyError as ZeroDivisionError:
3574 debug_msg += '\n Overall fraction of time unavailable.'
3575 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
3576 (TimeList[-1][0],TimeList[-1][1])
3577 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
3578 (TimeList[0][0],TimeList[0][1])
3579
3580
3581
3582
3583
3584
3585
3586
3587
3588
3589 err_finder = re.compile(\
3590 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
3591 for log in all_log_files:
3592 logfile=open(log,'r')
3593 nErrors = len(re.findall(err_finder, logfile.read()))
3594 logfile.close()
3595 if nErrors != 0:
3596 stats['Errors'].append((str(log),nErrors))
3597
3598 nErrors = sum([err[1] for err in stats['Errors']],0)
3599 if nErrors != 0:
3600 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
3601 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
3602 'found in the following log file%s:'%('s' if \
3603 len(stats['Errors'])>1 else '')
3604 for error in stats['Errors'][:3]:
3605 log_name = '/'.join(error[0].split('/')[-5:])
3606 debug_msg += '\n > %d error%s in %s'%\
3607 (error[1],'s' if error[1]>1 else '',log_name)
3608 if len(stats['Errors'])>3:
3609 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
3610 nRemainingLogs = len(stats['Errors'])-3
3611 debug_msg += '\n And another %d error%s in %d other log file%s'%\
3612 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
3613 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
3614
3615 return message, debug_msg
3616
3617
3619 """this function calls the reweighting routines and creates the event file in the
3620 Event dir. Return the name of the event file created
3621 """
3622 scale_pdf_info=[]
3623 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
3624 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1\
3625 or self.run_card['store_rwgt_info']:
3626 scale_pdf_info = self.run_reweight(options['reweightonly'])
3627 self.update_status('Collecting events', level='parton', update_results=True)
3628 misc.compile(['collect_events'],
3629 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile'])
3630 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
3631 stdin=subprocess.PIPE,
3632 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
3633 if event_norm.lower() == 'sum':
3634 p.communicate(input = '1\n'.encode())
3635 elif event_norm.lower() == 'unity':
3636 p.communicate(input = '3\n'.encode())
3637 elif event_norm.lower() == 'bias':
3638 p.communicate(input = '0\n'.encode())
3639 else:
3640 p.communicate(input = '2\n'.encode())
3641
3642
3643 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
3644
3645 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
3646 raise aMCatNLOError('An error occurred during event generation. ' + \
3647 'The event file has not been created. Check collect_events.log')
3648 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
3649 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
3650 if not options['reweightonly']:
3651 self.print_summary(options, 2, mode, scale_pdf_info)
3652 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses'))
3653 for res_file in res_files:
3654 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
3655
3656 logger.info('The %s file has been generated.\n' % (evt_file))
3657 self.results.add_detail('nb_event', nevents)
3658 self.update_status('Events generated', level='parton', update_results=True)
3659 return evt_file[:-3]
3660
3661
3663 """runs mcatnlo on the generated event file, to produce showered-events
3664 """
3665 logger.info('Preparing MCatNLO run')
3666 try:
3667 misc.gunzip(evt_file)
3668 except Exception:
3669 pass
3670
3671 self.banner = banner_mod.Banner(evt_file)
3672 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
3673
3674
3675
3676 if int(self.banner.get_detail('run_card', 'nevents') / \
3677 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
3678 != self.banner.get_detail('run_card', 'nevents'):
3679 logger.warning(\
3680 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
3681 'Setting it to 1.')
3682 self.shower_card['nsplit_jobs'] = 1
3683
3684
3685 if self.shower_card['nevents'] > 0 and \
3686 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
3687 self.shower_card['nsplit_jobs'] != 1:
3688 logger.warning(\
3689 'Only a part of the events will be showered.\n' + \
3690 'Setting nsplit_jobs in the shower_card to 1.')
3691 self.shower_card['nsplit_jobs'] = 1
3692
3693 self.banner_to_mcatnlo(evt_file)
3694
3695
3696
3697
3698 if 'fastjet' in self.shower_card['extralibs']:
3699
3700 if not 'stdc++' in self.shower_card['extralibs']:
3701 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
3702 self.shower_card['extralibs'] += ' stdc++'
3703
3704 try:
3705
3706 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
3707 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
3708 output, error = p.communicate()
3709
3710 output = output.decode()[:-1]
3711
3712 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
3713 logger.warning('Linking FastJet: updating EXTRAPATHS')
3714 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
3715 if not pjoin(output, 'include') in self.shower_card['includepaths']:
3716 logger.warning('Linking FastJet: updating INCLUDEPATHS')
3717 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
3718
3719 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
3720 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
3721 except Exception:
3722 logger.warning('Linking FastJet: using fjcore')
3723
3724 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
3725 if not 'fjcore.o' in self.shower_card['analyse']:
3726 self.shower_card['analyse'] += ' fjcore.o'
3727
3728 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
3729 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
3730
3731 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
3732 for line in fjwrapper_lines:
3733 if '//INCLUDE_FJ' in line:
3734 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
3735 if '//NAMESPACE_FJ' in line:
3736 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
3737 with open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w') as fsock:
3738 fsock.write('\n'.join(fjwrapper_lines) + '\n')
3739
3740 extrapaths = self.shower_card['extrapaths'].split()
3741
3742
3743 if shower in ['HERWIGPP', 'PYTHIA8']:
3744 path_dict = {'HERWIGPP': ['hepmc_path',
3745 'thepeg_path',
3746 'hwpp_path'],
3747 'PYTHIA8': ['pythia8_path']}
3748
3749 if not all([self.options[ppath] and os.path.exists(self.options[ppath]) for ppath in path_dict[shower]]):
3750 raise aMCatNLOError('Some paths are missing or invalid in the configuration file.\n' + \
3751 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower])))
3752
3753 if shower == 'HERWIGPP':
3754 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
3755 self.shower_card['extrapaths'] += ' %s' % pjoin(self.options['hepmc_path'], 'lib')
3756
3757
3758 if shower == 'PYTHIA8':
3759 hepmc = subprocess.Popen([pjoin(self.options['pythia8_path'], 'bin', 'pythia8-config'), '--hepmc2'],
3760 stdout = subprocess.PIPE).stdout.read().decode().strip()
3761
3762
3763
3764 extrapaths.append(hepmc.split()[1].replace('-L', ''))
3765
3766 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3767 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
3768
3769
3770 if sys.platform == 'darwin':
3771 ld_library_path = 'DYLD_LIBRARY_PATH'
3772 else:
3773 ld_library_path = 'LD_LIBRARY_PATH'
3774 if ld_library_path in list(os.environ.keys()):
3775 paths = os.environ[ld_library_path]
3776 else:
3777 paths = ''
3778 paths += ':' + ':'.join(extrapaths)
3779 os.putenv(ld_library_path, paths)
3780
3781 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
3782 self.shower_card.write_card(shower, shower_card_path)
3783
3784
3785 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')):
3786 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'),
3787 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat'))
3788
3789 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
3790 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
3791
3792
3793
3794
3795
3796
3797
3798
3799
3800
3801
3802
3803 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
3804 stderr=open(mcatnlo_log, 'w'),
3805 cwd=pjoin(self.me_dir, 'MCatNLO'),
3806 close_fds=True)
3807
3808 exe = 'MCATNLO_%s_EXE' % shower
3809 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
3810 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
3811 print(open(mcatnlo_log).read())
3812 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
3813 logger.info(' ... done')
3814
3815
3816 count = 1
3817 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3818 (shower, count))):
3819 count += 1
3820 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
3821 (shower, count))
3822 os.mkdir(rundir)
3823 files.cp(shower_card_path, rundir)
3824
3825
3826
3827 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3828 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
3829 logger.info('Cleaning old files and splitting the event file...')
3830
3831 files.rm([f for f in event_files if 'events.lhe' not in f])
3832 if self.shower_card['nsplit_jobs'] > 1:
3833 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile'])
3834 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
3835 stdin=subprocess.PIPE,
3836 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
3837 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3838 p.communicate(input = ('events.lhe\n%d\n' % self.shower_card['nsplit_jobs']).encode())
3839 logger.info('Splitting done.')
3840 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
3841
3842 event_files.sort()
3843
3844 self.update_status('Showering events...', level='shower')
3845 logger.info('(Running in %s)' % rundir)
3846 if shower != 'PYTHIA8':
3847 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
3848 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
3849 else:
3850
3851 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
3852 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
3853 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3854 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
3855 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
3856 else:
3857 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
3858
3859 if shower == 'HERWIGPP':
3860 try:
3861 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
3862 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
3863 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
3864 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig'), rundir)
3865 except Exception:
3866 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
3867
3868 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
3869 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
3870
3871 files.ln(evt_file, rundir, 'events.lhe')
3872 for i, f in enumerate(event_files):
3873 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
3874
3875 if not self.shower_card['analyse']:
3876
3877 out_id = 'HEP'
3878 else:
3879
3880 if "HwU" in self.shower_card['analyse']:
3881 out_id = 'HWU'
3882 else:
3883 out_id = 'TOP'
3884
3885
3886 with open(pjoin(rundir, 'shower.sh'), 'w') as fsock:
3887
3888 if sys.platform == 'darwin':
3889 ld_library_path = 'DYLD_LIBRARY_PATH'
3890 else:
3891 ld_library_path = 'LD_LIBRARY_PATH'
3892 fsock.write(open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
3893 % {'ld_library_path': ld_library_path,
3894 'extralibs': ':'.join(extrapaths)})
3895 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
3896
3897 if event_files:
3898 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
3899 for i in range(len(event_files))]
3900 else:
3901 arg_list = [[shower, out_id, self.run_name]]
3902
3903 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
3904 self.njobs = 1
3905 self.wait_for_complete('shower')
3906
3907
3908 message = ''
3909 warning = ''
3910 to_gzip = [evt_file]
3911 if out_id == 'HEP':
3912
3913 if shower in ['PYTHIA8', 'HERWIGPP']:
3914 hep_format = 'HEPMC'
3915 ext = 'hepmc'
3916 else:
3917 hep_format = 'StdHEP'
3918 ext = 'hep'
3919
3920 hep_file = '%s_%s_0.%s.gz' % \
3921 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
3922 count = 0
3923
3924
3925
3926 while os.path.exists(hep_file) or \
3927 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
3928 count +=1
3929 hep_file = '%s_%s_%d.%s.gz' % \
3930 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
3931
3932 try:
3933 if self.shower_card['nsplit_jobs'] == 1:
3934 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
3935 message = ('The file %s has been generated. \nIt contains showered' + \
3936 ' and hadronized events in the %s format obtained' + \
3937 ' showering the parton-level event file %s.gz with %s') % \
3938 (hep_file, hep_format, evt_file, shower)
3939 else:
3940 hep_list = []
3941 for i in range(self.shower_card['nsplit_jobs']):
3942 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
3943 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
3944 message = ('The following files have been generated:\n %s\nThey contain showered' + \
3945 ' and hadronized events in the %s format obtained' + \
3946 ' showering the (split) parton-level event file %s.gz with %s') % \
3947 ('\n '.join(hep_list), hep_format, evt_file, shower)
3948
3949 except OSError as IOError:
3950 raise aMCatNLOError('No file has been generated, an error occurred.'+\
3951 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
3952
3953
3954 if hep_format == 'StdHEP':
3955 try:
3956 self.do_plot('%s -f' % self.run_name)
3957 except Exception as error:
3958 logger.info("Fail to make the plot. Continue...")
3959 pass
3960
3961 elif out_id == 'TOP' or out_id == 'HWU':
3962
3963 if out_id=='TOP':
3964 ext='top'
3965 elif out_id=='HWU':
3966 ext='HwU'
3967 topfiles = []
3968 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)]
3969 for top_tar in top_tars:
3970 topfiles.extend(top_tar.getnames())
3971
3972
3973 if len(top_tars) != self.shower_card['nsplit_jobs']:
3974 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
3975 (self.shower_card['nsplit_jobs'], len(top_tars)))
3976
3977
3978
3979 filename = 'plot_%s_%d_' % (shower, 1)
3980 count = 1
3981 while os.path.exists(pjoin(self.me_dir, 'Events',
3982 self.run_name, '%s0.%s' % (filename,ext))) or \
3983 os.path.exists(pjoin(self.me_dir, 'Events',
3984 self.run_name, '%s0__1.%s' % (filename,ext))):
3985 count += 1
3986 filename = 'plot_%s_%d_' % (shower, count)
3987
3988 if out_id=='TOP':
3989 hist_format='TopDrawer format'
3990 elif out_id=='HWU':
3991 hist_format='HwU and GnuPlot formats'
3992
3993 if not topfiles:
3994
3995 warning = 'No .top file has been generated. For the results of your ' +\
3996 'run, please check inside %s' % rundir
3997 elif self.shower_card['nsplit_jobs'] == 1:
3998
3999 top_tars[0].extractall(path = rundir)
4000 plotfiles = []
4001 for i, file in enumerate(topfiles):
4002 if out_id=='TOP':
4003 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
4004 '%s%d.top' % (filename, i))
4005 files.mv(pjoin(rundir, file), plotfile)
4006 elif out_id=='HWU':
4007 out=pjoin(self.me_dir,'Events',
4008 self.run_name,'%s%d'% (filename,i))
4009 histos=[{'dirname':pjoin(rundir,file)}]
4010 self.combine_plots_HwU(histos,out)
4011 try:
4012 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\
4013 stdout=os.open(os.devnull, os.O_RDWR),\
4014 stderr=os.open(os.devnull, os.O_RDWR),\
4015 cwd=pjoin(self.me_dir, 'Events', self.run_name))
4016 except Exception:
4017 pass
4018 plotfile=pjoin(self.me_dir,'Events',self.run_name,
4019 '%s%d.HwU'% (filename,i))
4020 plotfiles.append(plotfile)
4021
4022 ffiles = 'files'
4023 have = 'have'
4024 if len(plotfiles) == 1:
4025 ffiles = 'file'
4026 have = 'has'
4027
4028 message = ('The %s %s %s been generated, with histograms in the' + \
4029 ' %s, obtained by showering the parton-level' + \
4030 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
4031 hist_format, evt_file, shower)
4032 else:
4033
4034 topfiles_set = set(topfiles)
4035 plotfiles = []
4036 for j, top_tar in enumerate(top_tars):
4037 top_tar.extractall(path = rundir)
4038 for i, file in enumerate(topfiles_set):
4039 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
4040 '%s%d__%d.%s' % (filename, i, j + 1,ext))
4041 files.mv(pjoin(rundir, file), plotfile)
4042 plotfiles.append(plotfile)
4043
4044
4045 if self.shower_card['combine_td']:
4046 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
4047
4048 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
4049 norm = 1.
4050 else:
4051 norm = 1./float(self.shower_card['nsplit_jobs'])
4052
4053 plotfiles2 = []
4054 for i, file in enumerate(topfiles_set):
4055 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \
4056 for j in range(self.shower_card['nsplit_jobs'])]
4057 if out_id=='TOP':
4058 infile="%d\n%s\n%s\n" % \
4059 (self.shower_card['nsplit_jobs'],
4060 '\n'.join(filelist),
4061 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
4062 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
4063 stdin=subprocess.PIPE,
4064 stdout=os.open(os.devnull, os.O_RDWR),
4065 cwd=pjoin(self.me_dir, 'Events', self.run_name))
4066 p.communicate(input = infile.encode())
4067 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
4068 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
4069 elif out_id=='HWU':
4070 out=pjoin(self.me_dir,'Events',
4071 self.run_name,'%s%d'% (filename,i))
4072 histos=[]
4073 norms=[]
4074 for plotfile in plotfiles:
4075 histos.append({'dirname':plotfile})
4076 norms.append(norm)
4077 self.combine_plots_HwU(histos,out,normalisation=norms)
4078 try:
4079 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\
4080 stdout=os.open(os.devnull, os.O_RDWR),\
4081 stderr=os.open(os.devnull, os.O_RDWR),\
4082 cwd=pjoin(self.me_dir, 'Events',self.run_name))
4083 except Exception:
4084 pass
4085
4086 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext)))
4087 tar = tarfile.open(
4088 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
4089 for f in filelist:
4090 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
4091 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
4092
4093 tar.close()
4094
4095 ffiles = 'files'
4096 have = 'have'
4097 if len(plotfiles2) == 1:
4098 ffiles = 'file'
4099 have = 'has'
4100
4101 message = ('The %s %s %s been generated, with histograms in the' + \
4102 ' %s, obtained by showering the parton-level' + \
4103 ' file %s.gz with %s.\n' + \
4104 'The files from the different shower ' + \
4105 'jobs (before combining them) can be found inside %s.') % \
4106 (ffiles, ', '.join(plotfiles2), have, hist_format,\
4107 evt_file, shower,
4108 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2]))
4109
4110 else:
4111 message = ('The following files have been generated:\n %s\n' + \
4112 'They contain histograms in the' + \
4113 ' %s, obtained by showering the parton-level' + \
4114 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
4115 hist_format, evt_file, shower)
4116
4117
4118 run_dir_path = pjoin(rundir, self.run_name)
4119 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
4120 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
4121 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
4122 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
4123 %(shower, count)))
4124 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
4125 cwd=run_dir_path)
4126 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
4127
4128 for f in to_gzip:
4129 misc.gzip(f)
4130 if message:
4131 logger.info(message)
4132 if warning:
4133 logger.warning(warning)
4134
4135 self.update_status('Run complete', level='shower', update_results=True)
4136
4137
4138 - def set_run_name(self, name, tag=None, level='parton', reload_card=False,**opts):
4139 """define the run name, the run_tag, the banner and the results."""
4140
4141
4142 upgrade_tag = {'parton': ['parton','delphes','shower','madanalysis5_hadron'],
4143 'shower': ['shower','delphes','madanalysis5_hadron'],
4144 'delphes':['delphes'],
4145 'madanalysis5_hadron':['madanalysis5_hadron'],
4146 'plot':[]}
4147
4148 if name == self.run_name:
4149 if reload_card:
4150 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
4151 self.run_card = banner_mod.RunCardNLO(run_card)
4152
4153
4154 if tag:
4155 self.run_card['run_tag'] = tag
4156 self.run_tag = tag
4157 self.results.add_run(self.run_name, self.run_card)
4158 else:
4159 for tag in upgrade_tag[level]:
4160 if getattr(self.results[self.run_name][-1], tag):
4161 tag = self.get_available_tag()
4162 self.run_card['run_tag'] = tag
4163 self.run_tag = tag
4164 self.results.add_run(self.run_name, self.run_card)
4165 break
4166 return
4167
4168
4169 if self.run_name:
4170 self.store_result()
4171
4172 self.run_name = name
4173
4174
4175 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
4176 self.run_card = banner_mod.RunCardNLO(run_card)
4177
4178 new_tag = False
4179
4180 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
4181 if 'mgruncard' in self.banner:
4182 self.run_card = self.banner.charge_card('run_card')
4183 if tag:
4184 self.run_card['run_tag'] = tag
4185 new_tag = True
4186 elif not self.run_name in self.results and level =='parton':
4187 pass
4188 elif not self.run_name in self.results:
4189
4190 logger.warning('Trying to run data on unknown run.')
4191 self.results.add_run(name, self.run_card)
4192 self.results.update('add run %s' % name, 'all', makehtml=True)
4193 else:
4194 for tag in upgrade_tag[level]:
4195
4196 if getattr(self.results[self.run_name][-1], tag):
4197
4198 tag = self.get_available_tag()
4199 self.run_card['run_tag'] = tag
4200 new_tag = True
4201 break
4202 if not new_tag:
4203
4204 tag = self.results[self.run_name][-1]['tag']
4205 self.run_card['run_tag'] = tag
4206
4207
4208 if name in self.results and not new_tag:
4209 self.results.def_current(self.run_name)
4210 else:
4211 self.results.add_run(self.run_name, self.run_card)
4212
4213 self.run_tag = self.run_card['run_tag']
4214
4215
4216
4217 if level == 'parton':
4218 return
4219 elif level == 'pythia':
4220 return self.results[self.run_name][0]['tag']
4221 else:
4222 for i in range(-1,-len(self.results[self.run_name])-1,-1):
4223 tagRun = self.results[self.run_name][i]
4224 if tagRun.pythia:
4225 return tagRun['tag']
4226
4227
4229 """ tar the pythia results. This is done when we are quite sure that
4230 the pythia output will not be use anymore """
4231
4232 if not self.run_name:
4233 return
4234
4235 self.results.save()
4236
4237 if not self.to_store:
4238 return
4239
4240 if 'event' in self.to_store:
4241 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')):
4242 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')):
4243 self.update_status('gzipping output file: events.lhe', level='parton', error=True)
4244 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
4245 else:
4246 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
4247 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')):
4248 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe'))
4249
4250
4251 tag = self.run_card['run_tag']
4252
4253 self.to_store = []
4254
4255
4256
4258 """get the list of Gdirectory if not yet saved."""
4259
4260 if hasattr(self, "Gdirs"):
4261 if self.me_dir in self.Gdirs:
4262 if Pdir is None:
4263 return sum(self.Gdirs.values())
4264 else:
4265 return self.Gdirs[Pdir]
4266
4267 Pdirs = self.get_Pdir()
4268 Gdirs = {self.me_dir:[]}
4269 for P in Pdirs:
4270 Gdirs[P] = [pjoin(P,G) for G in os.listdir(P) if G.startswith('G') and
4271 os.path.isdir(pjoin(P,G))]
4272
4273 self.Gdirs = Gdirs
4274 return self.getGdir(Pdir)
4275
4276
4278 """reads the info in the init block and returns them in a dictionary"""
4279 ev_file = open(evt_file)
4280 init = ""
4281 found = False
4282 while True:
4283 line = ev_file.readline()
4284 if "<init>" in line:
4285 found = True
4286 elif found and not line.startswith('#'):
4287 init += line
4288 if "</init>" in line or "<event>" in line:
4289 break
4290 ev_file.close()
4291
4292
4293
4294
4295
4296
4297 init_dict = {}
4298 init_dict['idbmup1'] = int(init.split()[0])
4299 init_dict['idbmup2'] = int(init.split()[1])
4300 init_dict['ebmup1'] = float(init.split()[2])
4301 init_dict['ebmup2'] = float(init.split()[3])
4302 init_dict['pdfgup1'] = int(init.split()[4])
4303 init_dict['pdfgup2'] = int(init.split()[5])
4304 init_dict['pdfsup1'] = int(init.split()[6])
4305 init_dict['pdfsup2'] = int(init.split()[7])
4306 init_dict['idwtup'] = int(init.split()[8])
4307 init_dict['nprup'] = int(init.split()[9])
4308
4309 return init_dict
4310
4311
4313 """creates the mcatnlo input script using the values set in the header of the event_file.
4314 It also checks if the lhapdf library is used"""
4315
4316 shower = self.banner.get('run_card', 'parton_shower').upper()
4317 pdlabel = self.banner.get('run_card', 'pdlabel')
4318 itry = 0
4319 nevents = self.shower_card['nevents']
4320 init_dict = self.get_init_dict(evt_file)
4321
4322 if nevents < 0 or \
4323 nevents > self.banner.get_detail('run_card', 'nevents'):
4324 nevents = self.banner.get_detail('run_card', 'nevents')
4325
4326 nevents = nevents / self.shower_card['nsplit_jobs']
4327
4328 mcmass_dict = {}
4329 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
4330 pdg = int(line.split()[0])
4331 mass = float(line.split()[1])
4332 mcmass_dict[pdg] = mass
4333
4334 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
4335 content += 'NEVENTS=%d\n' % nevents
4336 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
4337 self.shower_card['nsplit_jobs'])
4338 content += 'MCMODE=%s\n' % shower
4339 content += 'PDLABEL=%s\n' % pdlabel
4340
4341 try:
4342 aewm1 = self.banner.get_detail('param_card', 'sminputs', 1).value
4343 raise KeyError
4344 except KeyError:
4345 mod = self.get_model()
4346 if not hasattr(mod, 'parameter_dict'):
4347 from models import model_reader
4348 mod = model_reader.ModelReader(mod)
4349 mod.set_parameters_and_couplings(self.banner.param_card)
4350 aewm1 = 0
4351 for key in ['aEWM1', 'AEWM1', 'aEWm1', 'aewm1']:
4352 if key in mod['parameter_dict']:
4353 aewm1 = mod['parameter_dict'][key]
4354 break
4355 elif 'mdl_%s' % key in mod['parameter_dict']:
4356 aewm1 = mod['parameter_dict']['mod_%s' % key]
4357 break
4358 else:
4359 for key in ['aEW', 'AEW', 'aEw', 'aew']:
4360 if key in mod['parameter_dict']:
4361 aewm1 = 1./mod['parameter_dict'][key]
4362 break
4363 elif 'mdl_%s' % key in mod['parameter_dict']:
4364 aewm1 = 1./mod['parameter_dict']['mod_%s' % key]
4365 break
4366
4367 content += 'ALPHAEW=%s\n' % aewm1
4368
4369
4370 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
4371 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
4372 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
4373 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
4374 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
4375 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
4376 try:
4377 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
4378 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
4379 except KeyError:
4380 content += 'HGGMASS=120.\n'
4381 content += 'HGGWIDTH=0.00575308848\n'
4382 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
4383 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
4384 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
4385 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
4386 content += 'DMASS=%s\n' % mcmass_dict[1]
4387 content += 'UMASS=%s\n' % mcmass_dict[2]
4388 content += 'SMASS=%s\n' % mcmass_dict[3]
4389 content += 'CMASS=%s\n' % mcmass_dict[4]
4390 content += 'BMASS=%s\n' % mcmass_dict[5]
4391 try:
4392 content += 'EMASS=%s\n' % mcmass_dict[11]
4393 content += 'MUMASS=%s\n' % mcmass_dict[13]
4394 content += 'TAUMASS=%s\n' % mcmass_dict[15]
4395 except KeyError:
4396
4397 mcmass_lines = [l for l in \
4398 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
4399 ).read().split('\n') if l]
4400 new_mcmass_dict = {}
4401 for l in mcmass_lines:
4402 key, val = l.split('=')
4403 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
4404 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
4405 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
4406 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
4407
4408 content += 'GMASS=%s\n' % mcmass_dict[21]
4409 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
4410
4411 if int(self.shower_card['pdfcode']) > 1 or \
4412 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1) or \
4413 shower=='HERWIGPP' :
4414
4415
4416
4417
4418 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
4419 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
4420 stdout = subprocess.PIPE).stdout.read().decode().strip()
4421 content += 'LHAPDFPATH=%s\n' % lhapdfpath
4422 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4423 if self.shower_card['pdfcode']==0:
4424 lhaid_list = ''
4425 content += ''
4426 elif self.shower_card['pdfcode']==1:
4427 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
4428 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
4429 else:
4430 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
4431 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
4432 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4433 elif int(self.shower_card['pdfcode'])==1 or \
4434 int(self.shower_card['pdfcode'])==-1 and True:
4435
4436
4437
4438
4439
4440
4441 try:
4442 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
4443 stdout = subprocess.PIPE).stdout.read().decode().strip()
4444 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
4445 content += 'LHAPDFPATH=%s\n' % lhapdfpath
4446 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4447 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
4448 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
4449 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4450 except Exception:
4451 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
4452 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
4453 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
4454 ' same set as was used in the event generation install LHAPDF and set the path using'+\
4455 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
4456 content += 'LHAPDFPATH=\n'
4457 content += 'PDFCODE=0\n'
4458 else:
4459 content += 'LHAPDFPATH=\n'
4460 content += 'PDFCODE=0\n'
4461
4462 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
4463 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
4464
4465 if self.options['pythia8_path']:
4466 content+='PY8PATH=%s\n' % self.options['pythia8_path']
4467 if self.options['hwpp_path']:
4468 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
4469 if self.options['thepeg_path'] and self.options['thepeg_path'] != self.options['hwpp_path']:
4470 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
4471 if self.options['hepmc_path'] and self.options['hepmc_path'] != self.options['hwpp_path']:
4472 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
4473
4474 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
4475 output.write(content)
4476 output.close()
4477 return shower
4478
4479
4481 """runs the reweight_xsec_events executables on each sub-event file generated
4482 to compute on the fly scale and/or PDF uncertainities"""
4483 logger.info(' Doing reweight')
4484
4485 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
4486
4487 if only:
4488 if os.path.exists(nev_unw + '.orig'):
4489 files.cp(nev_unw + '.orig', nev_unw)
4490 else:
4491 raise aMCatNLOError('Cannot find event file information')
4492
4493
4494 file = open(nev_unw)
4495 lines = file.read().split('\n')
4496 file.close()
4497
4498 files.cp(nev_unw, nev_unw + '.orig')
4499
4500
4501 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
4502 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0']
4503 if self.run_card['event_norm'].lower()=='bias' and self.run_card['nevents'] != 0:
4504 evt_wghts[:]=[1./float(self.run_card['nevents']) for wgt in evt_wghts]
4505
4506 job_dict = {}
4507 exe = 'reweight_xsec_events.local'
4508 for i, evt_file in enumerate(evt_files):
4509 path, evt = os.path.split(evt_file)
4510 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
4511 pjoin(self.me_dir, 'SubProcesses', path))
4512 job_dict[path] = [exe]
4513
4514 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
4515
4516
4517 for evt_file in evt_files:
4518 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
4519 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
4520 stdout = subprocess.PIPE).stdout.read().decode().strip()
4521 if last_line != "</LesHouchesEvents>":
4522 raise aMCatNLOError('An error occurred during reweight. Check the' + \
4523 '\'reweight_xsec_events.output\' files inside the ' + \
4524 '\'SubProcesses/P*/G*/ directories for details')
4525
4526
4527 newfile = open(nev_unw, 'w')
4528 for line in lines:
4529 if line:
4530 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
4531 newfile.close()
4532 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
4533
4535 """This function takes the files with the scale and pdf values
4536 written by the reweight_xsec_events.f code
4537 (P*/G*/pdf_scale_dependence.dat) and computes the overall
4538 scale and PDF uncertainty (the latter is computed using the
4539 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
4540 and returns it in percents. The expected format of the file
4541 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
4542 xsec_pdf0 xsec_pdf1 ...."""
4543
4544 scales=[]
4545 pdfs=[]
4546 for i,evt_file in enumerate(evt_files):
4547 path, evt=os.path.split(evt_file)
4548 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f:
4549 data_line=f.readline()
4550 if "scale variations:" in data_line:
4551 for j,scale in enumerate(self.run_card['dynamical_scale_choice']):
4552 data_line = f.readline().split()
4553 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4554 try:
4555 scales[j] = [a + b for a, b in zip(scales[j], scales_this)]
4556 except IndexError:
4557 scales+=[scales_this]
4558 data_line=f.readline()
4559 if "pdf variations:" in data_line:
4560 for j,pdf in enumerate(self.run_card['lhaid']):
4561 data_line = f.readline().split()
4562 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
4563 try:
4564 pdfs[j] = [a + b for a, b in zip(pdfs[j], pdfs_this)]
4565 except IndexError:
4566 pdfs+=[pdfs_this]
4567
4568
4569 scale_info=[]
4570 for j,scale in enumerate(scales):
4571 s_cen=scale[0]
4572 if s_cen != 0.0 and self.run_card['reweight_scale'][j]:
4573
4574 s_max=(max(scale)/s_cen-1)*100
4575 s_min=(1-min(scale)/s_cen)*100
4576
4577 ren_var=[]
4578 fac_var=[]
4579 for i in range(len(self.run_card['rw_rscale'])):
4580 ren_var.append(scale[i]-s_cen)
4581 for i in range(len(self.run_card['rw_fscale'])):
4582 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen)
4583 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100
4584 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100
4585 s_size=len(scale)
4586 else:
4587 s_max=0.0
4588 s_min=0.0
4589 s_max_q=0.0
4590 s_min_q=0.0
4591 s_size=len(scale)
4592 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \
4593 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \
4594 'label':self.run_card['dynamical_scale_choice'][j], \
4595 'unc':self.run_card['reweight_scale'][j]})
4596
4597
4598 if any(self.run_card['reweight_pdf']):
4599 lhapdf = misc.import_python_lhapdf(self.options['lhapdf'])
4600 if lhapdf:
4601 use_lhapdf = True
4602 else:
4603 logger.warning("Failed to access python version of LHAPDF: "\
4604 "cannot compute PDF uncertainty from the "\
4605 "weights in the events. The weights in the LHE " \
4606 "event files will still cover all PDF set members, "\
4607 "but there will be no PDF uncertainty printed in the run summary. \n "\
4608 "If the python interface to LHAPDF is available on your system, try "\
4609 "adding its location to the PYTHONPATH environment variable and the"\
4610 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).")
4611 use_lhapdf=False
4612
4613
4614 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0)
4615
4616 pdf_info=[]
4617 for j,pdfset in enumerate(pdfs):
4618 p_cen=pdfset[0]
4619 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]:
4620 if use_lhapdf:
4621 pdfsetname=self.run_card['lhapdfsetname'][j]
4622 try:
4623 p=lhapdf.getPDFSet(pdfsetname)
4624 ep=p.uncertainty(pdfset,-1)
4625 p_cen=ep.central
4626 p_min=abs(ep.errminus/p_cen)*100
4627 p_max=abs(ep.errplus/p_cen)*100
4628 p_type=p.errorType
4629 p_size=p.size
4630 p_conf=p.errorConfLevel
4631 except:
4632 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname)
4633 p_min=0.0
4634 p_max=0.0
4635 p_type='unknown'
4636 p_conf='unknown'
4637 p_size=len(pdfset)
4638 else:
4639 p_min=0.0
4640 p_max=0.0
4641 p_type='unknown'
4642 p_conf='unknown'
4643 p_size=len(pdfset)
4644 pdfsetname=self.run_card['lhaid'][j]
4645 else:
4646 p_min=0.0
4647 p_max=0.0
4648 p_type='none'
4649 p_conf='unknown'
4650 p_size=len(pdfset)
4651 pdfsetname=self.run_card['lhaid'][j]
4652 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \
4653 'unc':p_type, 'name':pdfsetname, 'size':p_size, \
4654 'label':self.run_card['lhaid'][j], 'conf':p_conf})
4655
4656 scale_pdf_info=[scale_info,pdf_info]
4657 return scale_pdf_info
4658
4659
4671
4672 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
4673 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
4674 self.ijob = 0
4675 if run_type != 'shower':
4676 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
4677 for args in arg_list:
4678 for Pdir, jobs in job_dict.items():
4679 for job in jobs:
4680 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
4681 if self.cluster_mode == 2:
4682 time.sleep(1)
4683 else:
4684 self.njobs = len(arg_list)
4685 for args in arg_list:
4686 [(cwd, exe)] = list(job_dict.items())
4687 self.run_exe(exe, args, run_type, cwd)
4688
4689 self.wait_for_complete(run_type)
4690
4691
4692
4694 """check the integrity of the event files after splitting, and resubmit
4695 those which are not nicely terminated"""
4696 jobs_to_resubmit = []
4697 for job in jobs:
4698 last_line = ''
4699 try:
4700 last_line = subprocess.Popen(
4701 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
4702 stdout = subprocess.PIPE).stdout.read().decode().strip()
4703 except IOError:
4704 pass
4705 if last_line != "</LesHouchesEvents>":
4706 jobs_to_resubmit.append(job)
4707 self.njobs = 0
4708 if jobs_to_resubmit:
4709 run_type = 'Resubmitting broken jobs'
4710 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
4711 for job in jobs_to_resubmit:
4712 logger.debug('Resubmitting ' + job['dirname'] + '\n')
4713 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
4714
4715
4717 """looks into the nevents_unweighed_splitted file to check how many
4718 split jobs are needed for this (pdir, job). arg is F, B or V"""
4719
4720 splittings = []
4721 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
4722 pattern = re.compile('for i in (\d+) ; do')
4723 match = re.search(pattern, ajob)
4724 channel = match.groups()[0]
4725
4726
4727 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
4728
4729
4730 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
4731 pjoin(pdir, 'G%s%s' % (arg,channel)))
4732 matches = re.findall(pattern, nevents_file)
4733 for m in matches:
4734 splittings.append(m)
4735 return splittings
4736
4737
4738 - def run_exe(self, exe, args, run_type, cwd=None):
4739 """this basic function launch locally/on cluster exe with args as argument.
4740 """
4741
4742 execpath = None
4743 if cwd and os.path.exists(pjoin(cwd, exe)):
4744 execpath = pjoin(cwd, exe)
4745 elif not cwd and os.path.exists(exe):
4746 execpath = exe
4747 else:
4748 raise aMCatNLOError('Cannot find executable %s in %s' \
4749 % (exe, os.getcwd()))
4750
4751 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
4752 subprocess.call(['chmod', '+x', exe], cwd=cwd)
4753
4754 if self.cluster_mode == 0:
4755
4756 misc.call(['./'+exe] + args, cwd=cwd)
4757 self.ijob += 1
4758 self.update_status((max([self.njobs - self.ijob - 1, 0]),
4759 min([1, self.njobs - self.ijob]),
4760 self.ijob, run_type), level='parton')
4761
4762
4763 elif 'reweight' in exe:
4764
4765
4766 input_files, output_files = [], []
4767 pdfinput = self.get_pdf_input_filename()
4768 if os.path.exists(pdfinput):
4769 input_files.append(pdfinput)
4770 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
4771 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
4772 input_files.append(args[0])
4773 output_files.append('%s.rwgt' % os.path.basename(args[0]))
4774 output_files.append('reweight_xsec_events.output')
4775 output_files.append('scale_pdf_dependence.dat')
4776
4777 return self.cluster.submit2(exe, args, cwd=cwd,
4778 input_files=input_files, output_files=output_files,
4779 required_output=output_files)
4780
4781 elif 'ajob' in exe:
4782
4783
4784 if type(args[0]) == str:
4785 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
4786
4787 self.cluster.submit2(exe, args, cwd=cwd,
4788 input_files=input_files, output_files=output_files,
4789 required_output=required_output)
4790
4791
4792
4793
4794
4795
4796 elif 'shower' in exe:
4797
4798
4799
4800 input_files, output_files = [], []
4801 shower = args[0]
4802
4803 if shower == 'PYTHIA8':
4804 input_files.append(pjoin(cwd, 'Pythia8.exe'))
4805 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
4806 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
4807 input_files.append(pjoin(cwd, 'config.sh'))
4808 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
4809 else:
4810 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
4811 else:
4812 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
4813 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
4814 if shower == 'HERWIGPP':
4815 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++')):
4816 input_files.append(pjoin(cwd, 'Herwig++'))
4817 if os.path.exists(pjoin(self.options['hwpp_path'], 'bin', 'Herwig')):
4818 input_files.append(pjoin(cwd, 'Herwig'))
4819 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
4820 if len(args) == 3:
4821 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
4822 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
4823 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
4824 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
4825 else:
4826 raise aMCatNLOError('Event file not present in %s' % \
4827 pjoin(self.me_dir, 'Events', self.run_name))
4828 else:
4829 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
4830
4831 if len(args) == 3:
4832 output_files.append('mcatnlo_run.log')
4833 else:
4834 output_files.append('mcatnlo_run_%s.log' % args[3])
4835 if args[1] == 'HEP':
4836 if len(args) == 3:
4837 fname = 'events'
4838 else:
4839 fname = 'events_%s' % args[3]
4840 if shower in ['PYTHIA8', 'HERWIGPP']:
4841 output_files.append(fname + '.hepmc.gz')
4842 else:
4843 output_files.append(fname + '.hep.gz')
4844 elif args[1] == 'TOP' or args[1] == 'HWU':
4845 if len(args) == 3:
4846 fname = 'histfile'
4847 else:
4848 fname = 'histfile_%s' % args[3]
4849 output_files.append(fname + '.tar')
4850 else:
4851 raise aMCatNLOError('Not a valid output argument for shower job : %d' % args[1])
4852
4853 self.cluster.submit2(exe, args, cwd=cwd,
4854 input_files=input_files, output_files=output_files)
4855
4856 else:
4857 return self.cluster.submit(exe, args, cwd=cwd)
4858
4860
4861
4862
4863 output_files = []
4864 required_output = []
4865 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
4866 pjoin(cwd, 'symfact.dat'),
4867 pjoin(cwd, 'iproc.dat'),
4868 pjoin(cwd, 'initial_states_map.dat'),
4869 pjoin(cwd, 'configs_and_props_info.dat'),
4870 pjoin(cwd, 'leshouche_info.dat'),
4871 pjoin(cwd, 'FKS_params.dat')]
4872
4873
4874 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')):
4875 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat'))
4876
4877 if os.path.exists(pjoin(cwd,'nevents.tar')):
4878 input_files.append(pjoin(cwd,'nevents.tar'))
4879
4880 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
4881 input_files.append(pjoin(cwd, 'OLE_order.olc'))
4882
4883
4884 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
4885 cluster.need_transfer(self.options):
4886 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4887 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
4888 cluster.need_transfer(self.options):
4889 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4890 dereference=True)
4891 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4892 tf.close()
4893 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4894
4895 if args[1] == 'born' or args[1] == 'all':
4896
4897 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4898 if args[2] == '0':
4899 current = '%s_G%s' % (args[1],args[0])
4900 else:
4901 current = '%s_G%s_%s' % (args[1],args[0],args[2])
4902 if os.path.exists(pjoin(cwd,current)):
4903 input_files.append(pjoin(cwd, current))
4904 output_files.append(current)
4905
4906 required_output.append('%s/results.dat' % current)
4907 required_output.append('%s/res_%s.dat' % (current,args[3]))
4908 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4909 required_output.append('%s/mint_grids' % current)
4910 required_output.append('%s/grid.MC_integer' % current)
4911 if args[3] != '0':
4912 required_output.append('%s/scale_pdf_dependence.dat' % current)
4913
4914 elif args[1] == 'F' or args[1] == 'B':
4915
4916 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4917
4918 if args[2] == '0':
4919 current = 'G%s%s' % (args[1],args[0])
4920 else:
4921 current = 'G%s%s_%s' % (args[1],args[0],args[2])
4922 if os.path.exists(pjoin(cwd,current)):
4923 input_files.append(pjoin(cwd, current))
4924 output_files.append(current)
4925 if args[2] > '0':
4926
4927 output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4928 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4929
4930 else:
4931 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4932 if args[3] in ['0','1']:
4933 required_output.append('%s/results.dat' % current)
4934 if args[3] == '1':
4935 output_files.append('%s/results.dat' % current)
4936
4937 else:
4938 raise aMCatNLOError('not valid arguments: %s' %(', '.join(args)))
4939
4940
4941 pdfinput = self.get_pdf_input_filename()
4942 if os.path.exists(pdfinput):
4943 input_files.append(pdfinput)
4944 return input_files, output_files, required_output, args
4945
4946
4947 - def compile(self, mode, options):
4948 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4949 specified in mode"""
4950
4951 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
4952
4953 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
4954 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
4955
4956 self.get_characteristics(pjoin(self.me_dir,
4957 'SubProcesses', 'proc_characteristics'))
4958
4959
4960 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
4961 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
4962 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
4963 test_log = pjoin(self.me_dir, 'test.log')
4964
4965
4966 self.make_opts_var = {}
4967 if self.proc_characteristics['has_loops'] and \
4968 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4969 self.make_opts_var['madloop'] = 'true'
4970
4971 self.update_status('Compiling the code', level=None, update_results=True)
4972
4973 libdir = pjoin(self.me_dir, 'lib')
4974 sourcedir = pjoin(self.me_dir, 'Source')
4975
4976
4977 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
4978
4979 if '+' in mode:
4980 mode = mode.split('+')[0]
4981 if mode in ['NLO', 'LO']:
4982 exe = 'madevent_mintFO'
4983 tests = ['test_ME']
4984 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
4985 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
4986 exe = 'madevent_mintMC'
4987 tests = ['test_ME', 'test_MC']
4988
4989 with open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w') as fsock:
4990 fsock.write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n')
4991
4992
4993 p_dirs = [d for d in \
4994 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
4995
4996 self.do_treatcards('', amcatnlo=True, mode=mode)
4997
4998
4999 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
5000 for p_dir in p_dirs]) and options['nocompile']:
5001 return
5002
5003
5004 if os.path.exists(pjoin(libdir, 'PDFsets')):
5005 files.rm(pjoin(libdir, 'PDFsets'))
5006
5007
5008 if self.run_card['pdlabel'] == 'lhapdf' and \
5009 (self.banner.get_detail('run_card', 'lpp1') != 0 or \
5010 self.banner.get_detail('run_card', 'lpp2') != 0):
5011
5012 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
5013 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
5014 lhaid_list = self.run_card['lhaid']
5015 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
5016
5017 else:
5018 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']:
5019 logger.info('Using built-in libraries for PDFs')
5020
5021 self.make_opts_var['lhapdf'] = ""
5022
5023
5024 if self.run_card['iappl'] != 0:
5025 self.make_opts_var['applgrid'] = 'True'
5026
5027 for code in ['applgrid','amcfast']:
5028 try:
5029 p = subprocess.Popen([self.options[code], '--version'], \
5030 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
5031 except OSError:
5032 raise aMCatNLOError(('No valid %s installation found. \n' + \
5033 'Please set the path to %s-config by using \n' + \
5034 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
5035 else:
5036 output, _ = p.communicate()
5037 output.decode()
5038 if code == 'applgrid' and output < '1.4.63':
5039 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
5040 +' You are using %s',output)
5041 if code == 'amcfast' and output < '1.1.1':
5042 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
5043 +' You are using %s',output)
5044
5045
5046 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \
5047 % (self.options['amcfast'],self.options['applgrid'])
5048 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
5049 text_out=[]
5050 for line in text:
5051 if line.strip().startswith('APPLLIBS=$'):
5052 line=appllibs
5053 text_out.append(line)
5054 with open(pjoin(self.me_dir,'Source','make_opts'),'w') as fsock:
5055 fsock.writelines(text_out)
5056 else:
5057 self.make_opts_var['applgrid'] = ""
5058
5059 if 'fastjet' in list(self.options.keys()) and self.options['fastjet']:
5060 self.make_opts_var['fastjet_config'] = self.options['fastjet']
5061
5062
5063 self.update_make_opts()
5064
5065
5066 self.update_status('Compiling source...', level=None)
5067 misc.compile(['clean4pdf'], cwd = sourcedir)
5068 misc.compile(cwd = sourcedir)
5069 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
5070 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
5071 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
5072 and os.path.exists(pjoin(libdir, 'libpdf.a')):
5073 logger.info(' ...done, continuing with P* directories')
5074 else:
5075 raise aMCatNLOError('Compilation failed')
5076
5077
5078 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
5079 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
5080 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
5081 if os.path.exists(pjoin(sourcedir,'StdHEP')):
5082 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
5083 try:
5084 misc.compile(['StdHEP'], cwd = sourcedir)
5085 except Exception as error:
5086 logger.debug(str(error))
5087 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6")
5088 logger.info("details on the compilation error are available if the code is run with --debug flag")
5089 else:
5090 logger.info(' ...done.')
5091 else:
5092 logger.warning('Could not compile StdHEP because its'+\
5093 ' source directory could not be found in the SOURCE folder.\n'+\
5094 " Check the MG5_aMC option 'output_dependencies'.\n"+\
5095 " This will prevent the use of HERWIG6/Pythia6 shower.")
5096
5097
5098
5099 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
5100 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
5101 if os.path.exists(pjoin(sourcedir,'CutTools')):
5102 logger.info('Compiling CutTools (can take a couple of minutes) ...')
5103 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
5104 logger.info(' ...done.')
5105 else:
5106 raise aMCatNLOError('Could not compile CutTools because its'+\
5107 ' source directory could not be found in the SOURCE folder.\n'+\
5108 " Check the MG5_aMC option 'output_dependencies.'")
5109 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
5110 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
5111 raise aMCatNLOError('CutTools compilation failed.')
5112
5113
5114
5115 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
5116 libdir, 'libcts.a')))),'compiler_version.log')
5117 if os.path.exists(compiler_log_path):
5118 compiler_version_used = open(compiler_log_path,'r').read()
5119 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
5120 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
5121 if os.path.exists(pjoin(sourcedir,'CutTools')):
5122 logger.info('CutTools was compiled with a different fortran'+\
5123 ' compiler. Re-compiling it now...')
5124 misc.compile(['cleanCT'], cwd = sourcedir)
5125 misc.compile(['CutTools','-j1'], cwd = sourcedir, nb_core=1)
5126 logger.info(' ...done.')
5127 else:
5128 raise aMCatNLOError("CutTools installation in %s"\
5129 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
5130 " seems to have been compiled with a different compiler than"+\
5131 " the one specified in MG5_aMC. Please recompile CutTools.")
5132
5133
5134 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
5135 and os.path.exists(pjoin(sourcedir,'IREGI')):
5136 logger.info('Compiling IREGI (can take a couple of minutes) ...')
5137 misc.compile(['IREGI'], cwd = sourcedir)
5138 logger.info(' ...done.')
5139
5140 if os.path.exists(pjoin(libdir, 'libiregi.a')):
5141
5142
5143 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
5144 libdir, 'libiregi.a')))),'compiler_version.log')
5145 if os.path.exists(compiler_log_path):
5146 compiler_version_used = open(compiler_log_path,'r').read()
5147 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
5148 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
5149 if os.path.exists(pjoin(sourcedir,'IREGI')):
5150 logger.info('IREGI was compiled with a different fortran'+\
5151 ' compiler. Re-compiling it now...')
5152 misc.compile(['cleanIR'], cwd = sourcedir)
5153 misc.compile(['IREGI'], cwd = sourcedir)
5154 logger.info(' ...done.')
5155 else:
5156 raise aMCatNLOError("IREGI installation in %s"\
5157 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
5158 " seems to have been compiled with a different compiler than"+\
5159 " the one specified in MG5_aMC. Please recompile IREGI.")
5160
5161
5162 if self.proc_characteristics['has_loops'] and \
5163 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
5164 if mode in ['NLO', 'aMC@NLO', 'noshower']:
5165 tests.append('check_poles')
5166
5167
5168 self.update_status('Compiling directories...', level=None)
5169
5170 for test in tests:
5171 self.write_test_input(test)
5172
5173 try:
5174 import multiprocessing
5175 if not self.nb_core:
5176 try:
5177 self.nb_core = int(self.options['nb_core'])
5178 except TypeError:
5179 self.nb_core = multiprocessing.cpu_count()
5180 except ImportError:
5181 self.nb_core = 1
5182
5183 compile_options = copy.copy(self.options)
5184 compile_options['nb_core'] = self.nb_core
5185 compile_cluster = cluster.MultiCore(**compile_options)
5186 logger.info('Compiling on %d cores' % self.nb_core)
5187
5188 update_status = lambda i, r, f: self.donothing(i,r,f)
5189 for p_dir in p_dirs:
5190 compile_cluster.submit(prog = compile_dir,
5191 argument = [self.me_dir, p_dir, mode, options,
5192 tests, exe, self.options['run_mode']])
5193 try:
5194 compile_cluster.wait(self.me_dir, update_status)
5195 except Exception as error:
5196 logger.warning("Fail to compile the Subprocesses")
5197 if __debug__:
5198 raise
5199 compile_cluster.remove()
5200 self.do_quit('')
5201
5202 logger.info('Checking test output:')
5203 for p_dir in p_dirs:
5204 logger.info(p_dir)
5205 for test in tests:
5206 logger.info(' Result for %s:' % test)
5207
5208 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
5209
5210 self.check_tests(test, this_dir)
5211
5212
5215
5216
5218 """just call the correct parser for the test log.
5219 Skip check_poles for LOonly folders"""
5220 if test in ['test_ME', 'test_MC']:
5221 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
5222 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')):
5223 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
5224
5225
5227 """read and parse the test_ME/MC.log file"""
5228 content = open(log).read()
5229 if 'FAILED' in content:
5230 logger.info('Output of the failing test:\n'+content[:-1],'$MG:BOLD')
5231 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
5232 'Please check that widths of final state particles (e.g. top) have been' + \
5233 ' set to 0 in the param_card.dat.')
5234 else:
5235 lines = [l for l in content.split('\n') if 'PASSED' in l]
5236 logger.info(' Passed.')
5237 logger.debug('\n'+'\n'.join(lines))
5238
5239
5241 """reads and parse the check_poles.log file"""
5242 content = open(log).read()
5243 npass = 0
5244 nfail = 0
5245 for line in content.split('\n'):
5246 if 'PASSED' in line:
5247 npass +=1
5248 tolerance = float(line.split()[1])
5249 if 'FAILED' in line:
5250 nfail +=1
5251 tolerance = float(line.split()[1])
5252
5253 if nfail + npass == 0:
5254 logger.warning('0 points have been tried')
5255 return
5256
5257 if float(nfail)/float(nfail+npass) > 0.1:
5258 raise aMCatNLOError('Poles do not cancel, run cannot continue')
5259 else:
5260 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
5261 %(npass, nfail+npass, tolerance))
5262
5263
5285
5286
5287 action_switcher = AskRunNLO
5288
5290 """Ask the question when launching generate_events/multi_run"""
5291
5292 if 'parton' not in options:
5293 options['parton'] = False
5294 if 'reweightonly' not in options:
5295 options['reweightonly'] = False
5296
5297 if mode == 'auto':
5298 mode = None
5299 if not mode and (options['parton'] or options['reweightonly']):
5300 mode = 'noshower'
5301
5302 passing_cmd = []
5303 for key,value in switch.keys():
5304 passing_cmd.append('%s=%s' % (key,value))
5305
5306 if 'do_reweight' in options and options['do_reweight']:
5307 passing_cmd.append('reweight=ON')
5308 if 'do_madspin' in options and options['do_madspin']:
5309 passing_cmd.append('madspin=ON')
5310
5311 force = self.force
5312 if mode == 'onlyshower':
5313 passing_cmd.append('onlyshower')
5314 force = True
5315 elif mode:
5316 passing_cmd.append(mode)
5317
5318 switch, cmd_switch = self.ask('', '0', [], ask_class = self.action_switcher,
5319 mode=mode, force=force,
5320 first_cmd=passing_cmd,
5321 return_instance=True)
5322
5323 if 'mode' in switch:
5324 mode = switch['mode']
5325
5326
5327 if not mode or mode == 'auto':
5328 if switch['order'] == 'LO':
5329 if switch['runshower']:
5330 mode = 'aMC@LO'
5331 elif switch['fixed_order'] == 'ON':
5332 mode = 'LO'
5333 else:
5334 mode = 'noshowerLO'
5335 elif switch['order'] == 'NLO':
5336 if switch['runshower']:
5337 mode = 'aMC@NLO'
5338 elif switch['fixed_order'] == 'ON':
5339 mode = 'NLO'
5340 else:
5341 mode = 'noshower'
5342 logger.info('will run in mode: %s' % mode)
5343
5344 if mode == 'noshower':
5345 if switch['shower'] == 'OFF':
5346 logger.warning("""You have chosen not to run a parton shower.
5347 NLO events without showering are NOT physical.
5348 Please, shower the LesHouches events before using them for physics analyses.
5349 You have to choose NOW which parton-shower you WILL use and specify it in the run_card.""")
5350 else:
5351 logger.info("""Your Parton-shower choice is not available for running.
5352 The events will be generated for the associated Parton-Shower.
5353 Remember that NLO events without showering are NOT physical.""", '$MG:BOLD')
5354
5355
5356
5357 cards = ['param_card.dat', 'run_card.dat']
5358 ignore = []
5359 if mode in ['LO', 'NLO']:
5360 options['parton'] = True
5361 ignore = ['shower_card.dat', 'madspin_card.dat']
5362 cards.append('FO_analyse_card.dat')
5363 else:
5364 if switch['madspin'] != 'OFF':
5365 cards.append('madspin_card.dat')
5366 if switch['reweight'] != 'OFF':
5367 cards.append('reweight_card.dat')
5368 if switch['madanalysis'] in ['HADRON', 'ON']:
5369 cards.append('madanalysis5_hadron_card.dat')
5370 if 'aMC@' in mode:
5371 cards.append('shower_card.dat')
5372 if mode == 'onlyshower':
5373 cards = ['shower_card.dat']
5374 if options['reweightonly']:
5375 cards = ['run_card.dat']
5376
5377 self.keep_cards(cards, ignore)
5378
5379 if mode =='onlyshower':
5380 cards = ['shower_card.dat']
5381
5382
5383
5384 first_cmd = cmd_switch.get_cardcmd()
5385
5386 if not options['force'] and not self.force:
5387 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd)
5388
5389 self.banner = banner_mod.Banner()
5390
5391
5392 for card in cards:
5393 self.banner.add(pjoin(self.me_dir, 'Cards', card))
5394
5395 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
5396 self.banner.add_text('run_settings', run_settings)
5397
5398 if not mode =='onlyshower':
5399 self.run_card = self.banner.charge_card('run_card')
5400 self.run_tag = self.run_card['run_tag']
5401
5402 if not hasattr(self, 'run_name') or not self.run_name:
5403 self.run_name = self.find_available_run_name(self.me_dir)
5404
5405 if self.run_name.startswith('run_'):
5406 if mode in ['LO','aMC@LO','noshowerLO']:
5407 self.run_name += '_LO'
5408 self.set_run_name(self.run_name, self.run_tag, 'parton')
5409 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
5410 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
5411 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
5412 logger.warning("""You are running with FxFx merging enabled. To be able to merge
5413 samples of various multiplicities without double counting, you
5414 have to remove some events after showering 'by hand'. Please
5415 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
5416 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
5417 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""")
5418 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8' and self.run_card['parton_shower'].upper() != 'HERWIGPP':
5419 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
5420 "Type \'n\' to stop or \'y\' to continue"
5421 answers = ['n','y']
5422 answer = self.ask(question, 'n', answers)
5423 if answer == 'n':
5424 error = '''Stop opertation'''
5425 self.ask_run_configuration(mode, options)
5426
5427 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
5428
5429 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
5430 if 'aMC@' in mode or mode == 'onlyshower':
5431 self.shower_card = self.banner.charge_card('shower_card')
5432
5433 elif mode in ['LO', 'NLO']:
5434 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
5435 self.analyse_card = self.banner.charge_card('FO_analyse_card')
5436
5437 return mode
5438
5444 """The command line processor of MadGraph"""
5445
5446 _compile_usage = "compile [MODE] [options]\n" + \
5447 "-- compiles aMC@NLO \n" + \
5448 " MODE can be either FO, for fixed-order computations, \n" + \
5449 " or MC for matching with parton-shower monte-carlos. \n" + \
5450 " (if omitted, it is set to MC)\n"
5451 _compile_parser = misc.OptionParser(usage=_compile_usage)
5452 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
5453 help="Use the card present in the directory for the launch, without editing them")
5454
5455 _launch_usage = "launch [MODE] [options]\n" + \
5456 "-- execute aMC@NLO \n" + \
5457 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5458 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5459 " computation of the total cross section and the filling of parton-level histograms \n" + \
5460 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5461 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5462 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5463 " in the run_card.dat\n"
5464
5465 _launch_parser = misc.OptionParser(usage=_launch_usage)
5466 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
5467 help="Use the card present in the directory for the launch, without editing them")
5468 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
5469 help="Submit the jobs on the cluster")
5470 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
5471 help="Submit the jobs on multicore mode")
5472 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5473 help="Skip compilation. Ignored if no executable is found")
5474 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5475 help="Skip integration and event generation, just run reweight on the" + \
5476 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5477 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
5478 help="Stop the run after the parton level file generation (you need " + \
5479 "to shower the file in order to get physical results)")
5480 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5481 help="Skip grid set up, just generate events starting from " + \
5482 "the last available results")
5483 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
5484 help="Provide a name to the run")
5485 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5486 help="For use with APPLgrid only: start from existing grids")
5487 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true',
5488 help="Run the reweight module (reweighting by different model parameters)")
5489 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true',
5490 help="Run the madspin package")
5491
5492
5493
5494 _generate_events_usage = "generate_events [MODE] [options]\n" + \
5495 "-- execute aMC@NLO \n" + \
5496 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
5497 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
5498 " computation of the total cross section and the filling of parton-level histograms \n" + \
5499 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
5500 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
5501 " event file is generated which will be showered with the MonteCarlo specified \n" + \
5502 " in the run_card.dat\n"
5503
5504 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
5505 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
5506 help="Use the card present in the directory for the generate_events, without editing them")
5507 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
5508 help="Submit the jobs on the cluster")
5509 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
5510 help="Submit the jobs on multicore mode")
5511 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5512 help="Skip compilation. Ignored if no executable is found")
5513 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
5514 help="Skip integration and event generation, just run reweight on the" + \
5515 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
5516 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
5517 help="Stop the run after the parton level file generation (you need " + \
5518 "to shower the file in order to get physical results)")
5519 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5520 help="Skip grid set up, just generate events starting from " + \
5521 "the last available results")
5522 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
5523 help="Provide a name to the run")
5524
5525
5526
5527 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
5528 "-- calculate cross section up to ORDER.\n" + \
5529 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
5530
5531 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
5532 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
5533 help="Use the card present in the directory for the launch, without editing them")
5534 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
5535 help="Submit the jobs on the cluster")
5536 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
5537 help="Submit the jobs on multicore mode")
5538 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
5539 help="Skip compilation. Ignored if no executable is found")
5540 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
5541 help="Provide a name to the run")
5542 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
5543 help="For use with APPLgrid only: start from existing grids")
5544 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
5545 help="Skip grid set up, just generate events starting from " + \
5546 "the last available results")
5547
5548 _shower_usage = 'shower run_name [options]\n' + \
5549 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
5550 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
5551 ' are directly read from the header of the event file\n'
5552 _shower_parser = misc.OptionParser(usage=_shower_usage)
5553 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
5554 help="Use the shower_card present in the directory for the launch, without editing")
5555
5556 if '__main__' == __name__:
5557
5558
5559 import sys
5560 if not sys.version_info[0] == 2 or sys.version_info[1] < 6:
5561 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\
5562 'Please upgrate your version of python.')
5563
5564 import os
5565 import optparse
5566
5567
5568 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ )))
5569 sys.path.insert(0, root_path)
5573 - def error(self, msg=''):
5575
5576 usage = "usage: %prog [options] [FILE] "
5577 parser = MyOptParser(usage=usage)
5578 parser.add_option("-l", "--logging", default='INFO',
5579 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]")
5580 parser.add_option("","--web", action="store_true", default=False, dest='web', \
5581 help='force toce to be in secure mode')
5582 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \
5583 help='force to launch debug mode')
5584 parser_error = ''
5585 done = False
5586
5587 for i in range(len(sys.argv)-1):
5588 try:
5589 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i])
5590 done = True
5591 except MyOptParser.InvalidOption as error:
5592 pass
5593 else:
5594 args += sys.argv[len(sys.argv)-i:]
5595 if not done:
5596
5597 try:
5598 (options, args) = parser.parse_args()
5599 except MyOptParser.InvalidOption as error:
5600 print(error)
5601 sys.exit(2)
5602
5603 if len(args) == 0:
5604 args = ''
5605
5606 import subprocess
5607 import logging
5608 import logging.config
5609
5610
5611 import internal.coloring_logging
5612 try:
5613 if __debug__ and options.logging == 'INFO':
5614 options.logging = 'DEBUG'
5615 if options.logging.isdigit():
5616 level = int(options.logging)
5617 else:
5618 level = eval('logging.' + options.logging)
5619 print(os.path.join(root_path, 'internal', 'me5_logging.conf'))
5620 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf'))
5621 logging.root.setLevel(level)
5622 logging.getLogger('madgraph').setLevel(level)
5623 except:
5624 raise
5625 pass
5626
5627
5628 try:
5629 if args:
5630
5631 if '--web' in args:
5632 i = args.index('--web')
5633 args.pop(i)
5634 cmd_line = aMCatNLOCmd(me_dir=os.path.dirname(root_path),force_run=True)
5635 else:
5636 cmd_line = aMCatNLOCmdShell(me_dir=os.path.dirname(root_path),force_run=True)
5637
5638 if not hasattr(cmd_line, 'do_%s' % args[0]):
5639 if parser_error:
5640 print(parser_error)
5641 print('and %s can not be interpreted as a valid command.' % args[0])
5642 else:
5643 print('ERROR: %s not a valid command. Please retry' % args[0])
5644 else:
5645 cmd_line.use_rawinput = False
5646 cmd_line.run_cmd(' '.join(args))
5647 cmd_line.run_cmd('quit')
5648
5649 except KeyboardInterrupt:
5650 print('quit on KeyboardInterrupt')
5651 pass
5652