1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39
40 try:
41 import readline
42 GNU_SPLITTING = ('GNU' in readline.__doc__)
43 except:
44 GNU_SPLITTING = True
45
46 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
47 root_path = os.path.split(root_path)[0]
48 sys.path.insert(0, os.path.join(root_path,'bin'))
49
50
51 pjoin = os.path.join
52
53 logger = logging.getLogger('madgraph.stdout')
54 logger_stderr = logging.getLogger('madgraph.stderr')
55
56 try:
57
58 import madgraph.interface.extended_cmd as cmd
59 import madgraph.interface.common_run_interface as common_run
60 import madgraph.iolibs.files as files
61 import madgraph.iolibs.save_load_object as save_load_object
62 import madgraph.various.banner as banner_mod
63 import madgraph.various.cluster as cluster
64 import madgraph.various.misc as misc
65 import madgraph.various.gen_crossxhtml as gen_crossxhtml
66 import madgraph.various.sum_html as sum_html
67 import madgraph.various.shower_card as shower_card
68 import madgraph.various.FO_analyse_card as analyse_card
69
70 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error
71 aMCatNLO = False
72 except ImportError, error:
73 logger.debug(error)
74
75 import internal.extended_cmd as cmd
76 import internal.common_run_interface as common_run
77 import internal.banner as banner_mod
78 import internal.misc as misc
79 from internal import InvalidCmd, MadGraph5Error
80 import internal.files as files
81 import internal.cluster as cluster
82 import internal.save_load_object as save_load_object
83 import internal.gen_crossxhtml as gen_crossxhtml
84 import internal.sum_html as sum_html
85 import internal.shower_card as shower_card
86 import internal.FO_analyse_card as analyse_card
87 aMCatNLO = True
88
91
92
94 """compile the direcory p_dir
95 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
96 this function needs not to be a class method in order to do
97 the compilation on multicore"""
98
99 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
100 logger.info(' Compiling %s...' % p_dir)
101
102 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
103
104 try:
105
106
107 for test in tests:
108 misc.compile([test], cwd = this_dir, job_specs = False)
109 input = pjoin(me_dir, '%s_input.txt' % test)
110
111 misc.call(['./%s' % (test)], cwd=this_dir,
112 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'))
113
114 if not options['reweightonly']:
115 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
116 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode)
117 misc.call(['./gensym'],cwd= this_dir,
118 stdin=open(pjoin(this_dir, 'gensym_input.txt')),
119 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'))
120
121 misc.compile([exe], cwd=this_dir, job_specs = False)
122 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
123 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
124
125 logger.info(' %s done.' % p_dir)
126 return 0
127 except MadGraph5Error, msg:
128 return msg
129
130
132 """check that the current fortran compiler is gfortran 4.6 or later.
133 If block, stops the execution, otherwise just print a warning"""
134
135 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
136 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
137 'Note that You can still run all MadEvent run without any problem!'
138
139 if options['fortran_compiler']:
140 compiler = options['fortran_compiler']
141 elif misc.which('gfortran'):
142 compiler = 'gfortran'
143
144 if 'gfortran' not in compiler:
145 if block:
146 raise aMCatNLOError(msg % compiler)
147 else:
148 logger.warning(msg % compiler)
149 else:
150 curr_version = misc.get_gfortran_version(compiler)
151 if not ''.join(curr_version.split('.')) >= '46':
152 if block:
153 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
154 else:
155 logger.warning(msg % (compiler + ' ' + curr_version))
156
157
158
159
160
161
163 """Particularisation of the cmd command for aMCatNLO"""
164
165
166 next_possibility = {
167 'start': [],
168 }
169
170 debug_output = 'ME5_debug'
171 error_debug = 'Please report this bug on https://bugs.launchpad.net/madgraph5\n'
172 error_debug += 'More information is found in \'%(debug)s\'.\n'
173 error_debug += 'Please attach this file to your report.'
174
175 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/madgraph5\n'
176
177
178 keyboard_stop_msg = """stopping all operation
179 in order to quit MadGraph5_aMC@NLO please enter exit"""
180
181
182 InvalidCmd = InvalidCmd
183 ConfigurationError = aMCatNLOError
184
185 - def __init__(self, me_dir, options, *arg, **opt):
186 """Init history and line continuation"""
187
188
189 self.force = False
190
191
192
193 info = misc.get_pkg_info()
194 info_line = ""
195 if info and info.has_key('version') and info.has_key('date'):
196 len_version = len(info['version'])
197 len_date = len(info['date'])
198 if len_version + len_date < 30:
199 info_line = "#* VERSION %s %s %s *\n" % \
200 (info['version'],
201 (30 - len_version - len_date) * ' ',
202 info['date'])
203 else:
204 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
205 info_line = "#* VERSION %s %s *\n" % \
206 (version, (24 - len(version)) * ' ')
207
208
209
210 self.history_header = \
211 '#************************************************************\n' + \
212 '#* MadGraph5_aMC@NLO *\n' + \
213 '#* *\n' + \
214 "#* * * *\n" + \
215 "#* * * * * *\n" + \
216 "#* * * * * 5 * * * * *\n" + \
217 "#* * * * * *\n" + \
218 "#* * * *\n" + \
219 "#* *\n" + \
220 "#* *\n" + \
221 info_line + \
222 "#* *\n" + \
223 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
224 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
225 "#* and *\n" + \
226 "#* http://amcatnlo.cern.ch *\n" + \
227 '#* *\n' + \
228 '#************************************************************\n' + \
229 '#* *\n' + \
230 '#* Command File for aMCatNLO *\n' + \
231 '#* *\n' + \
232 '#* run as ./bin/aMCatNLO.py filename *\n' + \
233 '#* *\n' + \
234 '#************************************************************\n'
235
236 if info_line:
237 info_line = info_line[1:]
238
239 logger.info(\
240 "************************************************************\n" + \
241 "* *\n" + \
242 "* W E L C O M E to M A D G R A P H 5 *\n" + \
243 "* a M C @ N L O *\n" + \
244 "* *\n" + \
245 "* * * *\n" + \
246 "* * * * * *\n" + \
247 "* * * * * 5 * * * * *\n" + \
248 "* * * * * *\n" + \
249 "* * * *\n" + \
250 "* *\n" + \
251 info_line + \
252 "* *\n" + \
253 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
254 "* http://amcatnlo.cern.ch *\n" + \
255 "* *\n" + \
256 "* Type 'help' for in-line help. *\n" + \
257 "* *\n" + \
258 "************************************************************")
259 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
260
261
263 """return the history header"""
264 return self.history_header % misc.get_time_info()
265
267 """action to perform to close nicely on a keyboard interupt"""
268 try:
269 if hasattr(self, 'cluster'):
270 logger.info('rm jobs on queue')
271 self.cluster.remove()
272 if hasattr(self, 'results'):
273 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
274 self.add_error_log_in_html(KeyboardInterrupt)
275 except:
276 pass
277
278 - def postcmd(self, stop, line):
279 """ Update the status of the run for finishing interactive command """
280
281
282 self.force = False
283
284 if not self.use_rawinput:
285 return stop
286
287
288 arg = line.split()
289 if len(arg) == 0:
290 return stop
291 elif str(arg[0]) in ['exit','quit','EOF']:
292 return stop
293
294 try:
295 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
296 level=None, error=True)
297 except Exception:
298 misc.sprint('self.update_status fails', log=logger)
299 pass
300
306
312
318
319
320
321
322
323
325 """ The Series of help routine for the aMCatNLOCmd"""
326
330
332 logger.info("syntax: banner_run Path|RUN [--run_options]")
333 logger.info("-- Reproduce a run following a given banner")
334 logger.info(" One of the following argument is require:")
335 logger.info(" Path should be the path of a valid banner.")
336 logger.info(" RUN should be the name of a run of the current directory")
337 self.run_options_help([('-f','answer all question by default'),
338 ('--name=X', 'Define the name associated with the new run')])
339
340
344
349
350
354
358
359
361 logger.info("syntax: open FILE ")
362 logger.info("-- open a file with the appropriate editor.")
363 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
364 logger.info(' the path to the last created/used directory is used')
365
367 if data:
368 logger.info('-- local options:')
369 for name, info in data:
370 logger.info(' %s : %s' % (name, info))
371
372 logger.info("-- session options:")
373 logger.info(" Note that those options will be kept for the current session")
374 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
375 logger.info(" --multicore : Run in multi-core configuration")
376 logger.info(" --nb_core=X : limit the number of core to use to X.")
377
378
379
380
381
382
383
385 """ The Series of check routine for the aMCatNLOCmd"""
386
388 """Check the validity of the line. args[0] is the run_directory"""
389
390 if options['force']:
391 self.force = True
392
393 if len(args) == 0:
394 self.help_shower()
395 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
396 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
397 raise self.InvalidCmd, 'Directory %s does not exists' % \
398 pjoin(os.getcwd(), 'Events', args[0])
399
400 self.set_run_name(args[0], level= 'shower')
401 args[0] = pjoin(self.me_dir, 'Events', args[0])
402
404 """Check the argument for the plot command
405 plot run_name modes"""
406
407
408 madir = self.options['madanalysis_path']
409 td = self.options['td_path']
410
411 if not madir or not td:
412 logger.info('Retry to read configuration file to find madanalysis/td')
413 self.set_configuration()
414
415 madir = self.options['madanalysis_path']
416 td = self.options['td_path']
417
418 if not madir:
419 error_msg = 'No Madanalysis path correctly set.'
420 error_msg += 'Please use the set command to define the path and retry.'
421 error_msg += 'You can also define it in the configuration file.'
422 raise self.InvalidCmd(error_msg)
423 if not td:
424 error_msg = 'No path to td directory correctly set.'
425 error_msg += 'Please use the set command to define the path and retry.'
426 error_msg += 'You can also define it in the configuration file.'
427 raise self.InvalidCmd(error_msg)
428
429 if len(args) == 0:
430 if not hasattr(self, 'run_name') or not self.run_name:
431 self.help_plot()
432 raise self.InvalidCmd('No run name currently define. Please add this information.')
433 args.append('all')
434 return
435
436
437 if args[0] not in self._plot_mode:
438 self.set_run_name(args[0], level='plot')
439 del args[0]
440 if len(args) == 0:
441 args.append('all')
442 elif not self.run_name:
443 self.help_plot()
444 raise self.InvalidCmd('No run name currently define. Please add this information.')
445
446 for arg in args:
447 if arg not in self._plot_mode and arg != self.run_name:
448 self.help_plot()
449 raise self.InvalidCmd('unknown options %s' % arg)
450
452 """Check the argument for pythia command
453 syntax: pgs [NAME]
454 Note that other option are already remove at this point
455 """
456
457
458 if not self.options['pythia-pgs_path']:
459 logger.info('Retry to read configuration file to find pythia-pgs path')
460 self.set_configuration()
461
462 if not self.options['pythia-pgs_path'] or not \
463 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
464 error_msg = 'No pythia-pgs path correctly set.'
465 error_msg += 'Please use the set command to define the path and retry.'
466 error_msg += 'You can also define it in the configuration file.'
467 raise self.InvalidCmd(error_msg)
468
469 tag = [a for a in arg if a.startswith('--tag=')]
470 if tag:
471 arg.remove(tag[0])
472 tag = tag[0][6:]
473
474
475 if len(arg) == 0 and not self.run_name:
476 if self.results.lastrun:
477 arg.insert(0, self.results.lastrun)
478 else:
479 raise self.InvalidCmd('No run name currently define. Please add this information.')
480
481 if len(arg) == 1 and self.run_name == arg[0]:
482 arg.pop(0)
483
484 if not len(arg) and \
485 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
486 self.help_pgs()
487 raise self.InvalidCmd('''No file file pythia_events.hep currently available
488 Please specify a valid run_name''')
489
490 lock = None
491 if len(arg) == 1:
492 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
493 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
494 'events_*.hep.gz'))
495 if not filenames:
496 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
497 else:
498 input_file = filenames[0]
499 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
500 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
501 argument=['-c', input_file])
502 else:
503 if tag:
504 self.run_card['run_tag'] = tag
505 self.set_run_name(self.run_name, tag, 'pgs')
506
507 return lock
508
509
511 """Check the argument for pythia command
512 syntax: delphes [NAME]
513 Note that other option are already remove at this point
514 """
515
516
517 if not self.options['delphes_path']:
518 logger.info('Retry to read configuration file to find delphes path')
519 self.set_configuration()
520
521 if not self.options['delphes_path']:
522 error_msg = 'No delphes path correctly set.'
523 error_msg += 'Please use the set command to define the path and retry.'
524 error_msg += 'You can also define it in the configuration file.'
525 raise self.InvalidCmd(error_msg)
526
527 tag = [a for a in arg if a.startswith('--tag=')]
528 if tag:
529 arg.remove(tag[0])
530 tag = tag[0][6:]
531
532
533 if len(arg) == 0 and not self.run_name:
534 if self.results.lastrun:
535 arg.insert(0, self.results.lastrun)
536 else:
537 raise self.InvalidCmd('No run name currently define. Please add this information.')
538
539 if len(arg) == 1 and self.run_name == arg[0]:
540 arg.pop(0)
541
542 if not len(arg) and \
543 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
544 self.help_pgs()
545 raise self.InvalidCmd('''No file file pythia_events.hep currently available
546 Please specify a valid run_name''')
547
548 if len(arg) == 1:
549 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
550 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
551 'events_*.hep.gz'))
552 if not filenames:
553 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
554 % (self.run_name, prev_tag,
555 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
556 else:
557 input_file = filenames[0]
558 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
559 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
560 argument=['-c', input_file])
561 else:
562 if tag:
563 self.run_card['run_tag'] = tag
564 self.set_run_name(self.run_name, tag, 'delphes')
565
567 """check the validity of the line. args is ORDER,
568 ORDER being LO or NLO. If no mode is passed, NLO is used"""
569
570
571
572 if options['force']:
573 self.force = True
574
575 if not args:
576 args.append('NLO')
577 return
578
579 if len(args) > 1:
580 self.help_calculate_xsect()
581 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
582
583 elif len(args) == 1:
584 if not args[0] in ['NLO', 'LO']:
585 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
586 mode = args[0]
587
588
589 if options['multicore'] and options['cluster']:
590 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
591 ' are not compatible. Please choose one.'
592
593
595 """check the validity of the line. args is ORDER,
596 ORDER being LO or NLO. If no mode is passed, NLO is used"""
597
598
599
600 if not args:
601 args.append('NLO')
602 return
603
604 if len(args) > 1:
605 self.help_generate_events()
606 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
607
608 elif len(args) == 1:
609 if not args[0] in ['NLO', 'LO']:
610 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
611 mode = args[0]
612
613
614 if options['multicore'] and options['cluster']:
615 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
616 ' are not compatible. Please choose one.'
617
619 """check the validity of line"""
620
621 if len(args) == 0:
622 self.help_banner_run()
623 raise self.InvalidCmd('banner_run requires at least one argument.')
624
625 tag = [a[6:] for a in args if a.startswith('--tag=')]
626
627
628 if os.path.exists(args[0]):
629 type ='banner'
630 format = self.detect_card_type(args[0])
631 if format != 'banner':
632 raise self.InvalidCmd('The file is not a valid banner.')
633 elif tag:
634 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
635 (args[0], tag))
636 if not os.path.exists(args[0]):
637 raise self.InvalidCmd('No banner associates to this name and tag.')
638 else:
639 name = args[0]
640 type = 'run'
641 banners = glob.glob(pjoin(self.me_dir,'Events', args[0], '*_banner.txt'))
642 if not banners:
643 raise self.InvalidCmd('No banner associates to this name.')
644 elif len(banners) == 1:
645 args[0] = banners[0]
646 else:
647
648 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
649 tag = self.ask('which tag do you want to use?', tags[0], tags)
650 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
651 (args[0], tag))
652
653 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
654 if run_name:
655 try:
656 self.exec_cmd('remove %s all banner -f' % run_name)
657 except Exception:
658 pass
659 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
660 elif type == 'banner':
661 self.set_run_name(self.find_available_run_name(self.me_dir))
662 elif type == 'run':
663 if not self.results[name].is_empty():
664 run_name = self.find_available_run_name(self.me_dir)
665 logger.info('Run %s is not empty so will use run_name: %s' % \
666 (name, run_name))
667 self.set_run_name(run_name)
668 else:
669 try:
670 self.exec_cmd('remove %s all banner -f' % run_name)
671 except Exception:
672 pass
673 self.set_run_name(name)
674
675
676
678 """check the validity of the line. args is MODE
679 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
680
681
682
683 if options['force']:
684 self.force = True
685
686
687 if not args:
688 args.append('auto')
689 return
690
691 if len(args) > 1:
692 self.help_launch()
693 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
694
695 elif len(args) == 1:
696 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
697 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
698 mode = args[0]
699
700
701 if options['multicore'] and options['cluster']:
702 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
703 ' are not compatible. Please choose one.'
704 if mode == 'NLO' and options['reweightonly']:
705 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
706
707
709 """check the validity of the line. args is MODE
710 MODE being FO or MC. If no mode is passed, MC is used"""
711
712
713
714 if options['force']:
715 self.force = True
716
717 if not args:
718 args.append('MC')
719 return
720
721 if len(args) > 1:
722 self.help_compile()
723 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
724
725 elif len(args) == 1:
726 if not args[0] in ['MC', 'FO']:
727 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
728 mode = args[0]
729
730
731
732
733
734
735
737 """ The Series of help routine for the MadGraphCmd"""
738
740 """auto-completion for launch command"""
741
742 args = self.split_arg(line[0:begidx])
743 if len(args) == 1:
744
745 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
746 elif len(args) == 2 and line[begidx-1] == '@':
747 return self.list_completion(text,['LO','NLO'],line)
748 else:
749 opts = []
750 for opt in _launch_parser.option_list:
751 opts += opt._long_opts + opt._short_opts
752 return self.list_completion(text, opts, line)
753
755 "Complete the banner run command"
756 try:
757
758
759 args = self.split_arg(line[0:begidx], error=False)
760
761 if args[-1].endswith(os.path.sep):
762 return self.path_completion(text,
763 os.path.join('.',*[a for a in args \
764 if a.endswith(os.path.sep)]))
765
766
767 if len(args) > 1:
768
769 tags = glob.glob(pjoin(self.me_dir, 'Events' , args[1],'%s_*_banner.txt' % args[1]))
770 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
771
772 if args[-1] != '--tag=':
773 tags = ['--tag=%s' % t for t in tags]
774 else:
775 return self.list_completion(text, tags)
776 return self.list_completion(text, tags +['--name=','-f'], line)
777
778
779 possibilites = {}
780
781 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
782 if a.endswith(os.path.sep)]))
783 if os.path.sep in line:
784 return comp
785 else:
786 possibilites['Path from ./'] = comp
787
788 run_list = glob.glob(pjoin(self.me_dir, 'Events', '*','*_banner.txt'))
789 run_list = [n.rsplit('/',2)[1] for n in run_list]
790 possibilites['RUN Name'] = self.list_completion(text, run_list)
791
792 return self.deal_multiple_categories(possibilites)
793
794
795 except Exception, error:
796 print error
797
798
811
824
826 """auto-completion for generate_events command
827 call the compeltion for launch"""
828 self.complete_launch(text, line, begidx, endidx)
829
830
832 args = self.split_arg(line[0:begidx])
833 if len(args) == 1:
834
835 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe.gz'))
836 data = [n.rsplit('/',2)[1] for n in data]
837 tmp1 = self.list_completion(text, data)
838 if not self.run_name:
839 return tmp1
840
842 """ Complete the plot command """
843
844 args = self.split_arg(line[0:begidx], error=False)
845
846 if len(args) == 1:
847
848 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe*'))
849 data = [n.rsplit('/',2)[1] for n in data]
850 tmp1 = self.list_completion(text, data)
851 if not self.run_name:
852 return tmp1
853
854 if len(args) > 1:
855 return self.list_completion(text, self._plot_mode)
856
858 "Complete the pgs command"
859 args = self.split_arg(line[0:begidx], error=False)
860 if len(args) == 1:
861
862 data = glob.glob(pjoin(self.me_dir, 'Events', '*', 'events_*.hep.gz'))
863 data = [n.rsplit('/',2)[1] for n in data]
864 tmp1 = self.list_completion(text, data)
865 if not self.run_name:
866 return tmp1
867 else:
868 tmp2 = self.list_completion(text, self._run_options + ['-f',
869 '--tag=' ,'--no_default'], line)
870 return tmp1 + tmp2
871 else:
872 return self.list_completion(text, self._run_options + ['-f',
873 '--tag=','--no_default'], line)
874
875 complete_delphes = complete_pgs
876
879
880
881
882
883 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
884 """The command line processor of MadGraph"""
885
886
887 true = ['T','.true.',True,'true']
888
889 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
890 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
891 _calculate_decay_options = ['-f', '--accuracy=0.']
892 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
893 _plot_mode = ['all', 'parton','shower','pgs','delphes']
894 _clean_mode = _plot_mode + ['channel', 'banner']
895 _display_opts = ['run_name', 'options', 'variable']
896
897
898 web = False
899 cluster_mode = 0
900 queue = 'madgraph'
901 nb_core = None
902
903 next_possibility = {
904 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
905 'help generate_events'],
906 'generate_events': ['generate_events [OPTIONS]', 'shower'],
907 'launch': ['launch [OPTIONS]', 'shower'],
908 'shower' : ['generate_events [OPTIONS]']
909 }
910
911
912
913 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
914 """ add information to the cmd """
915
916 self.start_time = 0
917 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
918
919
920 self.mode = 'aMCatNLO'
921 self.nb_core = 0
922 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
923
924
925 if os.path.exists(pjoin(self.me_dir,'HTML','results.pkl')):
926 self.results = save_load_object.load_from_file(pjoin(self.me_dir,'HTML','results.pkl'))
927 self.results.resetall(self.me_dir)
928 self.last_mode = self.results[self.results.lastrun][-1]['run_mode']
929 else:
930 model = self.find_model_name()
931 process = self.process
932 self.results = gen_crossxhtml.AllResultsNLO(model, process, self.me_dir)
933 self.last_mode = ''
934 self.results.def_web_mode(self.web)
935
936 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
937
938 if not '[real=QCD]' in proc_card:
939 check_compiler(self.options, block=True)
940
941
942
944 """ run the shower on a given parton level file """
945 argss = self.split_arg(line)
946 (options, argss) = _launch_parser.parse_args(argss)
947
948 options = options.__dict__
949 options['reweightonly'] = False
950 self.check_shower(argss, options)
951 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
952 self.ask_run_configuration('onlyshower', options)
953 self.run_mcatnlo(evt_file)
954
955 self.update_status('', level='all', update_results=True)
956
957
959 """Create the plot for a given run"""
960
961
962 args = self.split_arg(line)
963
964 self.check_plot(args)
965 logger.info('plot for run %s' % self.run_name)
966
967 if not self.force:
968 self.ask_edit_cards([], args, plot=True)
969
970 if any([arg in ['parton'] for arg in args]):
971 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
972 if os.path.exists(filename+'.gz'):
973 misc.gunzip(filename)
974 if os.path.exists(filename):
975 logger.info('Found events.lhe file for run %s' % self.run_name)
976 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
977 self.create_plot('parton')
978 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
979 misc.gzip(filename)
980
981 if any([arg in ['all','parton'] for arg in args]):
982 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
983 if os.path.exists(filename):
984 logger.info('Found MADatNLO.top file for run %s' % \
985 self.run_name)
986 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
987 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
988
989 if not os.path.isdir(plot_dir):
990 os.makedirs(plot_dir)
991 top_file = pjoin(plot_dir, 'plots.top')
992 files.cp(filename, top_file)
993 madir = self.options['madanalysis_path']
994 tag = self.run_card['run_tag']
995 td = self.options['td_path']
996 misc.call(['%s/plot' % self.dirbin, madir, td],
997 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
998 stderr = subprocess.STDOUT,
999 cwd=plot_dir)
1000
1001 misc.call(['%s/plot_page-pl' % self.dirbin,
1002 os.path.basename(plot_dir),
1003 'parton'],
1004 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1005 stderr = subprocess.STDOUT,
1006 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1007 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1008 output)
1009
1010 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1011
1012 if any([arg in ['all','shower'] for arg in args]):
1013 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
1014 'events_*.lhe.gz'))
1015 if len(filenames) != 1:
1016 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
1017 'events_*.hep.gz'))
1018 if len(filenames) != 1:
1019 logger.info('No shower level file found for run %s' % \
1020 self.run_name)
1021 return
1022 filename = filenames[0]
1023 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1024
1025 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1026 if aMCatNLO and not self.options['mg5_path']:
1027 raise "plotting NLO HEP file needs MG5 utilities"
1028
1029 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1030 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1031 self.run_hep2lhe()
1032 else:
1033 filename = filenames[0]
1034 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1035
1036 self.create_plot('shower')
1037 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1038 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1039 lhe_file_name)
1040 misc.gzip(lhe_file_name)
1041
1042 if any([arg in ['all','pgs'] for arg in args]):
1043 filename = pjoin(self.me_dir, 'Events', self.run_name,
1044 '%s_pgs_events.lhco' % self.run_tag)
1045 if os.path.exists(filename+'.gz'):
1046 misc.gunzip(filename)
1047 if os.path.exists(filename):
1048 self.create_plot('PGS')
1049 misc.gzip(filename)
1050 else:
1051 logger.info('No valid files for pgs plot')
1052
1053 if any([arg in ['all','delphes'] for arg in args]):
1054 filename = pjoin(self.me_dir, 'Events', self.run_name,
1055 '%s_delphes_events.lhco' % self.run_tag)
1056 if os.path.exists(filename+'.gz'):
1057 misc.gunzip(filename)
1058 if os.path.exists(filename):
1059
1060 self.create_plot('Delphes')
1061
1062 misc.gzip(filename)
1063 else:
1064 logger.info('No valid files for delphes plot')
1065
1066
1067
1069 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1070 this function wraps the do_launch one"""
1071
1072 self.start_time = time.time()
1073 argss = self.split_arg(line)
1074
1075 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1076 options = options.__dict__
1077 options['reweightonly'] = False
1078 options['parton'] = True
1079 self.check_calculate_xsect(argss, options)
1080 self.do_launch(line, options, argss)
1081
1082
1084 """Make a run from the banner file"""
1085
1086 args = self.split_arg(line)
1087
1088 self.check_banner_run(args)
1089
1090
1091 for name in ['shower_card.dat', 'madspin_card.dat']:
1092 try:
1093 os.remove(pjoin(self.me_dir, 'Cards', name))
1094 except Exception:
1095 pass
1096
1097 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1098
1099
1100 if not self.force:
1101 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1102 if ans == 'n':
1103 self.force = True
1104
1105
1106 if self.force:
1107 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1108 banner = banner_mod.Banner(args[0])
1109 for line in banner['run_settings']:
1110 if '=' in line:
1111 mode, value = [t.strip() for t in line.split('=')]
1112 mode_status[mode] = value
1113 else:
1114 mode_status = {}
1115
1116
1117 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1118 switch=mode_status)
1119
1120
1122 """Main commands: generate events
1123 this function just wraps the do_launch one"""
1124 self.do_launch(line)
1125
1126
1127
1129 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1130
1131 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1132 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1133
1134
1136 """assign all configuration variable from file
1137 loop over the different config file if config_file not define """
1138 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1139
1140
1141 - def do_launch(self, line, options={}, argss=[], switch={}):
1142 """Main commands: launch the full chain
1143 options and args are relevant if the function is called from other
1144 functions, such as generate_events or calculate_xsect
1145 mode gives the list of switch needed for the computation (usefull for banner_run)
1146 """
1147
1148 if not argss and not options:
1149 self.start_time = time.time()
1150 argss = self.split_arg(line)
1151
1152 (options, argss) = _launch_parser.parse_args(argss)
1153 options = options.__dict__
1154 self.check_launch(argss, options)
1155
1156 if 'run_name' in options.keys() and options['run_name']:
1157 self.run_name = options['run_name']
1158
1159
1160 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1161 logger.warning('Removing old run information in \n'+
1162 pjoin(self.me_dir, 'Events', self.run_name))
1163 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1164 self.results.delete_run(self.run_name)
1165
1166 if options['multicore']:
1167 self.cluster_mode = 2
1168 elif options['cluster']:
1169 self.cluster_mode = 1
1170
1171 if not switch:
1172 mode = argss[0]
1173 if mode in ['LO', 'NLO']:
1174 options['parton'] = True
1175 mode = self.ask_run_configuration(mode, options)
1176 else:
1177 mode = self.ask_run_configuration('auto', options, switch)
1178
1179 self.results.add_detail('run_mode', mode)
1180
1181 self.update_status('Starting run', level=None, update_results=True)
1182
1183 if self.options['automatic_html_opening']:
1184 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1185 self.options['automatic_html_opening'] = False
1186
1187 if '+' in mode:
1188 mode = mode.split('+')[0]
1189 self.compile(mode, options)
1190 evt_file = self.run(mode, options)
1191
1192 if int(self.run_card['nevents']) == 0 and not mode in ['LO', 'NLO']:
1193 logger.info('No event file generated: grids have been set-up with a '\
1194 'relative precision of %s' % self.run_card['req_acc'])
1195 return
1196
1197 if not mode in ['LO', 'NLO']:
1198 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1199 self.exec_cmd('decay_events -from_cards', postcmd=False)
1200 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1201
1202 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1203 and not options['parton']:
1204 self.run_mcatnlo(evt_file)
1205 elif mode == 'noshower':
1206 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1207 Please, shower the Les Houches events before using them for physics analyses.""")
1208
1209
1210 self.update_status('', level='all', update_results=True)
1211 if int(self.run_card['ickkw']) == 3 and mode in ['noshower', 'aMC@NLO']:
1212 logger.warning("""You are running with FxFx merging enabled.
1213 To be able to merge samples of various multiplicities without double counting,
1214 you have to remove some events after showering 'by hand'.
1215 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1216
1217
1218
1219
1221 """Advanced commands: just compile the executables """
1222 argss = self.split_arg(line)
1223
1224 (options, argss) = _compile_parser.parse_args(argss)
1225 options = options.__dict__
1226 options['reweightonly'] = False
1227 options['nocompile'] = False
1228 self.check_compile(argss, options)
1229
1230 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1231 self.ask_run_configuration(mode, options)
1232 self.compile(mode, options)
1233
1234
1235 self.update_status('', level='all', update_results=True)
1236
1238 """Have a nice results prints in the shell,
1239 data should be of type: gen_crossxhtml.OneTagResults"""
1240 if not data:
1241 return
1242 logger.info(" === Results Summary for run: %s tag: %s ===\n" % (data['run_name'],data['tag']))
1243 if self.ninitial == 1:
1244 logger.info(" Width : %.4g +- %.4g GeV" % (data['cross'], data['error']))
1245 else:
1246 logger.info(" Cross-section : %.4g +- %.4g pb" % (data['cross'], data['error']))
1247 logger.info(" Nb of events : %s" % data['nb_event'] )
1248
1249
1250
1251
1252
1253
1254
1255
1256 logger.info(" " )
1257
1259 """Have a nice results prints in the shell,
1260 data should be of type: gen_crossxhtml.OneTagResults"""
1261 if not data:
1262 return
1263
1264 fsock = open(path, mode)
1265
1266 fsock.write(" === Results Summary for run: %s tag: %s process: %s ===\n" % \
1267 (data['run_name'],data['tag'], os.path.basename(self.me_dir)))
1268
1269 if self.ninitial == 1:
1270 fsock.write(" Width : %.4g +- %.4g GeV\n" % (data['cross'], data['error']))
1271 else:
1272 fsock.write(" Cross-section : %.4g +- %.4g pb\n" % (data['cross'], data['error']))
1273 fsock.write(" Nb of events : %s\n" % data['nb_event'] )
1274
1275
1276
1277
1278
1279
1280 fsock.write(" \n" )
1281
1282
1283
1284
1285
1287 """Update random number seed with the value from the run_card.
1288 If this is 0, update the number according to a fresh one"""
1289 iseed = int(self.run_card['iseed'])
1290 if iseed == 0:
1291 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1292 iseed = int(randinit.read()[2:]) + 1
1293 randinit.close()
1294 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1295 randinit.write('r=%d' % iseed)
1296 randinit.close()
1297
1298
1300 """reads the proc_characteristics file and initialises the correspondant
1301 dictionary"""
1302 lines = [l for l in open(file).read().split('\n') if l and not l.startswith('#')]
1303 self.proc_characteristics = {}
1304 for l in lines:
1305 key, value = l.split('=')
1306 self.proc_characteristics[key.strip()] = value.strip()
1307
1308
1309 - def run(self, mode, options):
1310 """runs aMC@NLO. Returns the name of the event file created"""
1311 logger.info('Starting run')
1312
1313 if not 'only_generation' in options.keys():
1314 options['only_generation'] = False
1315
1316 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == '2' and not options['only_generation']:
1317 options['only_generation'] = True
1318 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1319
1320 if self.cluster_mode == 1:
1321 cluster_name = self.options['cluster_type']
1322 self.cluster = cluster.from_name[cluster_name](**self.options)
1323 if self.cluster_mode == 2:
1324 try:
1325 import multiprocessing
1326 if not self.nb_core:
1327 try:
1328 self.nb_core = int(self.options['nb_core'])
1329 except TypeError:
1330 self.nb_core = multiprocessing.cpu_count()
1331 logger.info('Using %d cores' % self.nb_core)
1332 except ImportError:
1333 self.nb_core = 1
1334 logger.warning('Impossible to detect the number of cores => Using One.\n'+
1335 'Use set nb_core X in order to set this number and be able to'+
1336 'run in multicore.')
1337
1338 self.cluster = cluster.MultiCore(**self.options)
1339 self.update_random_seed()
1340
1341 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1342 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1343 folder_names['noshower'] = folder_names['aMC@NLO']
1344 folder_names['noshowerLO'] = folder_names['aMC@LO']
1345 job_dict = {}
1346 p_dirs = [d for d in \
1347 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1348
1349 if not options['only_generation'] and not options['reweightonly']:
1350 self.update_status('Cleaning previous results', level=None)
1351 for dir in p_dirs:
1352 job_dict[dir] = [file for file in \
1353 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1354 if file.startswith('ajob')]
1355
1356 for obj in folder_names[mode]:
1357 to_rm = [file for file in \
1358 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1359 if file.startswith(obj[:-1]) and \
1360 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
1361 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
1362
1363
1364
1365 to_always_rm = [file for file in \
1366 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1367 if file.startswith(obj[:-1]) and
1368 '_' in file and not '_G' in file and \
1369 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
1370 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
1371
1372 if not options['only_generation'] and not options['reweightonly']:
1373 to_always_rm.extend(to_rm)
1374 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
1375 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
1376 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
1377
1378 mcatnlo_status = ['Setting up grid', 'Computing upper envelope', 'Generating events']
1379
1380 if self.run_card['iappl']=='2':
1381 self.applgrid_distribute(options,mode,p_dirs)
1382
1383 if options['reweightonly']:
1384 event_norm=self.run_card['event_norm']
1385 nevents=int(self.run_card['nevents'])
1386 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1387
1388 devnull = os.open(os.devnull, os.O_RDWR)
1389 if mode in ['LO', 'NLO']:
1390
1391 mode_dict = {'NLO': 'all', 'LO': 'born'}
1392 logger.info('Doing fixed order %s' % mode)
1393 req_acc = self.run_card['req_acc_FO']
1394 if not options['only_generation'] and req_acc != '-1':
1395 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, '-1', '6','0.10')
1396 self.update_status('Setting up grids', level=None)
1397 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
1398 elif not options['only_generation']:
1399 npoints = self.run_card['npoints_FO_grid']
1400 niters = self.run_card['niters_FO_grid']
1401 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, npoints, niters)
1402 self.update_status('Setting up grids', level=None)
1403 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
1404
1405 npoints = self.run_card['npoints_FO']
1406 niters = self.run_card['niters_FO']
1407 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], -1, npoints, niters)
1408
1409 self.collect_log_files(folder_names[mode], 0)
1410 p = misc.Popen(['./combine_results_FO.sh', req_acc, '%s_G*' % mode_dict[mode]], \
1411 stdout=subprocess.PIPE, \
1412 cwd=pjoin(self.me_dir, 'SubProcesses'))
1413 output = p.communicate()
1414
1415 self.cross_sect_dict = self.read_results(output, mode)
1416 self.print_summary(options, 0, mode)
1417 cross, error = sum_html.make_all_html_results(self, ['%s*' % mode_dict[mode]])
1418 self.results.add_detail('cross', cross)
1419 self.results.add_detail('error', error)
1420
1421 self.update_status('Computing cross-section', level=None)
1422 self.run_all(job_dict, [['0', mode_dict[mode], '0', mode_dict[mode]]], 'Computing cross-section')
1423
1424
1425 self.collect_log_files(folder_names[mode], 1)
1426 p = misc.Popen(['./combine_results_FO.sh', '-1'] + folder_names[mode], \
1427 stdout=subprocess.PIPE,
1428 cwd=pjoin(self.me_dir, 'SubProcesses'))
1429 output = p.communicate()
1430 self.cross_sect_dict = self.read_results(output, mode)
1431
1432
1433 scale_pdf_info={}
1434 if self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.':
1435 data_files=[]
1436 for dir in p_dirs:
1437 for obj in folder_names[mode]:
1438 for file in os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)):
1439 if file.startswith(obj[:-1]) and \
1440 (os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file,'scale_pdf_dependence.dat'))):
1441 data_files.append(pjoin(dir,file,'scale_pdf_dependence.dat'))
1442 scale_pdf_info = self.pdf_scale_from_reweighting(data_files)
1443
1444 self.print_summary(options, 1, mode, scale_pdf_info)
1445
1446 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res.txt'),
1447 pjoin(self.me_dir, 'Events', self.run_name))
1448
1449 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
1450 misc.call(['./combine_plots_FO.sh'] + folder_names[mode], \
1451 stdout=devnull,
1452 cwd=pjoin(self.me_dir, 'SubProcesses'))
1453 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
1454 pjoin(self.me_dir, 'Events', self.run_name))
1455 logger.info('The results of this run and the TopDrawer file with the plots' + \
1456 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1457 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
1458 misc.call(['./combine_root.sh'] + folder_names[mode], \
1459 stdout=devnull,
1460 cwd=pjoin(self.me_dir, 'SubProcesses'))
1461 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
1462 pjoin(self.me_dir, 'Events', self.run_name))
1463 logger.info('The results of this run and the ROOT file with the plots' + \
1464 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1465 else:
1466 logger.info('The results of this run' + \
1467 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1468
1469 cross, error = sum_html.make_all_html_results(self, folder_names[mode])
1470 self.results.add_detail('cross', cross)
1471 self.results.add_detail('error', error)
1472 if self.run_card['iappl'] != '0':
1473 self.applgrid_combine(cross,error)
1474 self.update_status('Run complete', level='parton', update_results=True)
1475
1476 return
1477
1478 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1479 shower = self.run_card['parton_shower'].upper()
1480 nevents = int(self.run_card['nevents'])
1481 req_acc = self.run_card['req_acc']
1482 if nevents == 0 and float(req_acc) < 0 :
1483 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1484 'of events, because 0 events requested. Please set '\
1485 'the "req_acc" parameter in the run_card to a value between 0 and 1')
1486 elif float(req_acc) >1 or float(req_acc) == 0 :
1487 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1488 'be between larger than 0 and smaller than 1, '\
1489 'or set to -1 for automatic determination. Current value is %s' % req_acc)
1490
1491 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1492
1493 if not shower in shower_list:
1494 raise aMCatNLOError('%s is not a valid parton shower. Please use one of the following: %s' \
1495 % (shower, ', '.join(shower_list)))
1496
1497
1498 if shower == 'PYTHIA6PT' and \
1499 self.proc_characteristics['has_fsr'] == 'true':
1500 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1501
1502 if mode in ['aMC@NLO', 'aMC@LO']:
1503 logger.info('Doing %s matched to parton shower' % mode[4:])
1504 elif mode in ['noshower','noshowerLO']:
1505 logger.info('Generating events without running the shower.')
1506 elif options['only_generation']:
1507 logger.info('Generating events starting from existing results')
1508
1509
1510 for i, status in enumerate(mcatnlo_status):
1511
1512
1513 try:
1514 nevents_unweighted = open(pjoin(self.me_dir,
1515 'SubProcesses',
1516 'nevents_unweighted')).read().split('\n')
1517 except IOError:
1518 nevents_unweighted = []
1519
1520 split = i == 2 and \
1521 int(self.run_card['nevt_job']) > 0 and \
1522 any([int(l.split()[1]) > int(self.run_card['nevt_job']) \
1523 for l in nevents_unweighted if l])
1524
1525 if i == 2 or not options['only_generation']:
1526
1527
1528 if i==2 and nevents==0:
1529 self.print_summary(options, 2,mode)
1530 return
1531
1532 if split:
1533
1534 misc.call([pjoin(self.me_dir, 'bin', 'internal', 'split_jobs.py')] + \
1535 [self.run_card['nevt_job']],
1536 stdout = devnull,
1537 cwd = pjoin(self.me_dir, 'SubProcesses'))
1538 assert os.path.exists(pjoin(self.me_dir, 'SubProcesses',
1539 'nevents_unweighted_splitted'))
1540
1541 self.update_status(status, level='parton')
1542 if mode in ['aMC@NLO', 'noshower']:
1543 self.write_madinMMC_file(pjoin(self.me_dir, 'SubProcesses'), 'all', i)
1544 self.run_all(job_dict, [['2', 'F', '%d' % i]], status, split_jobs = split)
1545
1546 elif mode in ['aMC@LO', 'noshowerLO']:
1547 self.write_madinMMC_file(
1548 pjoin(self.me_dir, 'SubProcesses'), 'born', i)
1549 self.run_all(job_dict,
1550 [['2', 'B', '%d' % i]],
1551 '%s at LO' % status, split_jobs = split)
1552
1553 if (i < 2 and not options['only_generation']) or i == 1 :
1554
1555 self.collect_log_files(folder_names[mode], i)
1556 p = misc.Popen(['./combine_results.sh'] + \
1557 ['%d' % i,'%d' % nevents, '%s' % req_acc ] + \
1558 folder_names[mode],
1559 stdout=subprocess.PIPE,
1560 cwd = pjoin(self.me_dir, 'SubProcesses'))
1561 output = p.communicate()
1562 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res_%d.txt' % i), \
1563 pjoin(self.me_dir, 'Events', self.run_name))
1564
1565 self.cross_sect_dict = self.read_results(output, mode)
1566 self.print_summary(options, i, mode)
1567
1568 cross, error = sum_html.make_all_html_results(self, folder_names[mode])
1569 self.results.add_detail('cross', cross)
1570 self.results.add_detail('error', error)
1571
1572
1573 if split:
1574 self.check_event_files()
1575
1576 if self.cluster_mode == 1:
1577
1578 self.update_status(
1579 'Waiting while files are transferred back from the cluster nodes',
1580 level='parton')
1581 time.sleep(10)
1582 if split:
1583 files.cp(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted'), \
1584 pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted'))
1585
1586
1587 event_norm=self.run_card['event_norm']
1588 self.collect_log_files(folder_names[mode], 2)
1589 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1590
1591
1593 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
1594 logger.debug('Combining APPLgrids \n')
1595 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),'applgrid-combine')
1596 with open(pjoin(self.me_dir,'SubProcesses','dirs.txt')) as dirf:
1597 all_jobs=dirf.readlines()
1598 ngrids=len(all_jobs)
1599 nobs =len([name for name in os.listdir(pjoin(self.me_dir,'SubProcesses',all_jobs[0].rstrip())) \
1600 if name.endswith("_out.root")])
1601 for obs in range(0,nobs):
1602 gdir = [pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
1603
1604 if self.run_card["iappl"] == "1":
1605 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
1606 elif self.run_card["iappl"] == "2":
1607 unc2_inv=pow(cross/error,2)
1608 unc2_inv_ngrids=pow(cross/error,2)*ngrids
1609 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
1610 for job in all_jobs:
1611 os.remove(pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_in.root"))
1612 else:
1613 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
1614
1615 for ggdir in gdir:
1616 os.remove(ggdir)
1617
1618
1620 """Distributes the APPLgrids ready to be filled by a second run of the code"""
1621
1622 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
1623 gfiles=glob.glob(pjoin(self.me_dir, 'Events','*','aMCfast_obs_0_starting_grid.root'))
1624 time_stamps={}
1625 for root_file in gfiles:
1626 time_stamps[root_file]=os.path.getmtime(root_file)
1627 options['appl_start_grid']= \
1628 max(time_stamps.iterkeys(), key=(lambda key: time_stamps[key])).split('/')[-2]
1629 logger.info('No --appl_start_grid option given. Guessing that start grid from run "%s" should be used.' \
1630 % options['appl_start_grid'])
1631
1632 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
1633 self.appl_start_grid = options['appl_start_grid']
1634 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
1635
1636 if not os.path.exists(pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')):
1637 raise self.InvalidCmd('APPLgrid file not found: %s' % \
1638 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
1639 else:
1640 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir(start_grid_dir) \
1641 if name.endswith("_starting_grid.root")]
1642 nobs =len(all_grids)
1643 gstring=" ".join(all_grids)
1644 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
1645 raise self.InvalidCmd('No APPLgrid name currently defined. Please provide this information.')
1646 if mode == 'NLO':
1647 gdir='all_G'
1648 elif mode == 'LO':
1649 gdir='born_G'
1650
1651 for pdir in p_dirs:
1652 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,"SubProcesses",pdir)) \
1653 if file.startswith(gdir) and os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
1654 for g_dir in g_dirs:
1655 for grid in all_grids:
1656 obs=grid.split('_')[-3]
1657 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,'grid_obs_'+obs+'_in.root'))
1658
1659
1661 """collect the log files and put them in a single, html-friendly file inside the run_...
1662 directory"""
1663 step_list = ['Grid setting', 'Cross-section computation', 'Event generation']
1664 log_file = pjoin(self.me_dir, 'Events', self.run_name,
1665 'alllogs_%d.html' % istep)
1666
1667 channel_dict = {}
1668 log_files = []
1669 for folder in folders:
1670 log_files += glob.glob(pjoin(self.me_dir, 'SubProcesses', 'P*', folder, 'log.txt'))
1671
1672 content = ''
1673
1674 content += '<HTML><BODY>\n<font face="courier" size=2>'
1675 for log in log_files:
1676 channel_dict[os.path.dirname(log)] = [istep]
1677
1678 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'),''))
1679
1680 content += '<font color="red">\n'
1681 content += '<br>LOG file for integration channel %s, %s <br>' % \
1682 (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'), ''),
1683 step_list[istep])
1684 content += '</font>\n'
1685
1686
1687 content += '<PRE>\n' + open(log).read() + '\n</PRE>'
1688 content +='<br>\n'
1689
1690 content += '</font>\n</BODY></HTML>\n'
1691 open(log_file, 'w').write(content)
1692
1693
1695 """extract results (cross-section, absolute cross-section and errors)
1696 from output, which should be formatted as
1697 Found 4 correctly terminated jobs
1698 random seed found in 'randinit' is 33
1699 Integrated abs(cross-section)
1700 7.94473937e+03 +- 2.9953e+01 (3.7702e-01%)
1701 Integrated cross-section
1702 6.63392298e+03 +- 3.7669e+01 (5.6782e-01%)
1703 for aMC@NLO/aMC@LO, and as
1704
1705 for NLO/LO
1706 The cross_sect_dict is returned"""
1707 res = {}
1708 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
1709 pat = re.compile(\
1710 '''Found (\d+) correctly terminated jobs
1711 random seed found in 'randinit' is (\d+)
1712 Integrated abs\(cross-section\)
1713 \s*(\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\d+\.\d+e[+-]\d+)\%\)
1714 Integrated cross-section
1715 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
1716 else:
1717 pat = re.compile(\
1718 '''Found (\d+) correctly terminated jobs
1719 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
1720 pass
1721
1722 match = re.search(pat, output[0])
1723 if not match or output[1]:
1724 logger.info('Return code of the event collection: '+str(output[1]))
1725 logger.info('Output of the event collection:\n'+output[0])
1726 raise aMCatNLOError('An error occurred during the collection of results.\n' +
1727 'Please check the .log files inside the directories which failed.')
1728
1729
1730 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
1731 return {'randinit' : int(match.groups()[1]),
1732 'xseca' : float(match.groups()[2]),
1733 'erra' : float(match.groups()[3]),
1734 'xsect' : float(match.groups()[5]),
1735 'errt' : float(match.groups()[6])}
1736 else:
1737 return {'xsect' : float(match.groups()[1]),
1738 'errt' : float(match.groups()[2])}
1739
1740 - def print_summary(self, options, step, mode, scale_pdf_info={}):
1741 """print a summary of the results contained in self.cross_sect_dict.
1742 step corresponds to the mintMC step, if =2 (i.e. after event generation)
1743 some additional infos are printed"""
1744
1745 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
1746 process = ''
1747 for line in proc_card_lines:
1748 if line.startswith('generate') or line.startswith('add process'):
1749 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
1750 lpp = {'0':'l', '1':'p', '-1':'pbar'}
1751 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
1752 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
1753 self.run_card['ebeam1'], self.run_card['ebeam2'])
1754
1755
1756 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
1757 log_GV_files = glob.glob(pjoin(self.me_dir, \
1758 'SubProcesses', 'P*','G*','log_MINT*.txt'))
1759 all_log_files = glob.glob(pjoin(self.me_dir, \
1760 'SubProcesses', 'P*','G*','log*.txt'))
1761 elif mode == 'NLO':
1762 log_GV_files = glob.glob(pjoin(self.me_dir, \
1763 'SubProcesses', 'P*','all_G*','log*.txt'))
1764 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
1765 '%sG*'%foldName,'log*.txt')) for foldName in ['all_']],[])
1766 elif mode == 'LO':
1767 log_GV_files = ''
1768 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
1769 '%sG*'%foldName,'log*.txt')) for foldName in ['born_']],[])
1770 else:
1771 raise aMCatNLOError, 'Running mode %s not supported.'%mode
1772
1773
1774 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
1775 status = ['Determining the number of unweighted events per channel',
1776 'Updating the number of unweighted events per channel',
1777 'Summary:']
1778 if step != 2:
1779 message = status[step] + '\n\n Intermediate results:' + \
1780 ('\n Random seed: %(randinit)d' + \
1781 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' + \
1782 '\n Total abs(cross-section): %(xseca)8.3e +- %(erra)6.1e pb \n') \
1783 % self.cross_sect_dict
1784 else:
1785
1786 message = '\n ' + status[step] + proc_info + \
1787 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1788 self.cross_sect_dict
1789
1790 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_scale']=='.true.' and int(self.run_card['ickkw']) != 4:
1791 message = message + \
1792 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \
1793 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low'])
1794 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_PDF']=='.true.' and int(self.run_card['ickkw']) != 4:
1795 message = message + \
1796 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \
1797 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low'])
1798
1799 neg_frac = (self.cross_sect_dict['xseca'] - self.cross_sect_dict['xsect'])/\
1800 (2. * self.cross_sect_dict['xseca'])
1801 message = message + \
1802 ('\n Number of events generated: %s' + \
1803 '\n Parton shower to be used: %s' + \
1804 '\n Fraction of negative weights: %4.2f' + \
1805 '\n Total running time : %s') % \
1806 (self.run_card['nevents'],
1807 self.run_card['parton_shower'],
1808 neg_frac,
1809 misc.format_timer(time.time()-self.start_time))
1810
1811 elif mode in ['NLO', 'LO']:
1812 status = ['Results after grid setup (cross-section is non-physical):',
1813 'Final results and run summary:']
1814 if step == 0:
1815 message = '\n ' + status[step] + \
1816 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1817 self.cross_sect_dict
1818 elif step == 1:
1819 message = '\n ' + status[step] + proc_info + \
1820 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1821 self.cross_sect_dict
1822 if self.run_card['reweight_scale']=='.true.':
1823 message = message + \
1824 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \
1825 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low'])
1826 if self.run_card['reweight_PDF']=='.true.':
1827 message = message + \
1828 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \
1829 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low'])
1830
1831 if (mode in ['NLO', 'LO'] and step!=1) or \
1832 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
1833 logger.info(message+'\n')
1834 return
1835
1836
1837
1838
1839 try:
1840 message, debug_msg = \
1841 self.compile_advanced_stats(log_GV_files, all_log_files, message)
1842 except Exception as e:
1843 debug_msg = 'Advanced statistics collection failed with error "%s"'%str(e)
1844
1845 logger.debug(debug_msg+'\n')
1846 logger.info(message+'\n')
1847
1848
1849 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
1850 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
1851 open(pjoin(evt_path, '.full_summary.txt'),
1852 'w').write(message+'\n\n'+debug_msg+'\n')
1853
1854 self.archive_files(evt_path,mode)
1855
1857 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
1858 the run."""
1859
1860 files_to_arxiv = [pjoin('Cards','param_card.dat'),
1861 pjoin('Cards','MadLoopParams.dat'),
1862 pjoin('Cards','FKS_params.dat'),
1863 pjoin('Cards','run_card.dat'),
1864 pjoin('Subprocesses','setscales.f'),
1865 pjoin('Subprocesses','cuts.f')]
1866
1867 if mode in ['NLO', 'LO']:
1868 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
1869
1870 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
1871 os.mkdir(pjoin(evt_path,'RunMaterial'))
1872
1873 for path in files_to_arxiv:
1874 if os.path.isfile(pjoin(self.me_dir,path)):
1875 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
1876 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
1877 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
1878
1880 """ This functions goes through the log files given in arguments and
1881 compiles statistics about MadLoop stability, virtual integration
1882 optimization and detection of potential error messages into a nice
1883 debug message to printed at the end of the run """
1884
1885
1886
1887 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
1888 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
1889
1890
1891
1892
1893
1894
1895
1896 UPS_stat_finder = re.compile(
1897 r"Satistics from MadLoop:.*"+\
1898 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
1899 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
1900 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
1901 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
1902 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
1903 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
1904 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
1905 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
1906 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
1907 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
1908
1909 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
1910 1 : 'CutTools (double precision)',
1911 2 : 'PJFry++',
1912 3 : 'IREGI',
1913 4 : 'Golem95',
1914 9 : 'CutTools (quadruple precision)'}
1915 RetUnit_finder =re.compile(
1916 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
1917
1918
1919 for gv_log in log_GV_files:
1920 channel_name = '/'.join(gv_log.split('/')[-5:-1])
1921 log=open(gv_log,'r').read()
1922 UPS_stats = re.search(UPS_stat_finder,log)
1923 for retunit_stats in re.finditer(RetUnit_finder, log):
1924 if channel_name not in stats['UPS'].keys():
1925 stats['UPS'][channel_name] = [0]*10+[[0]*10]
1926 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
1927 += int(retunit_stats.group('n_occurences'))
1928 if not UPS_stats is None:
1929 try:
1930 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
1931 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
1932 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
1933 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
1934 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
1935 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
1936 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
1937 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
1938 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
1939 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
1940 except KeyError:
1941 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
1942 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
1943 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
1944 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
1945 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
1946 int(UPS_stats.group('n10')),[0]*10]
1947 debug_msg = ""
1948 if len(stats['UPS'].keys())>0:
1949 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
1950 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
1951 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
1952 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
1953 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
1954 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
1955 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
1956 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
1957 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
1958 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
1959 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
1960 for i in range(10)]
1961 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
1962 float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
1963 maxUPS = max(UPSfracs, key = lambda w: w[1])
1964
1965 tmpStr = ""
1966 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
1967 tmpStr += '\n Stability unknown: %d'%nTotsun
1968 tmpStr += '\n Stable PS point: %d'%nTotsps
1969 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
1970 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
1971 tmpStr += '\n Only double precision used: %d'%nTotddp
1972 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
1973 tmpStr += '\n Initialization phase-space points: %d'%nTotini
1974 tmpStr += '\n Reduction methods used:'
1975 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
1976 unit_code_meaning.keys() if nTot1[i]>0]
1977 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
1978 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
1979 if nTot100 != 0:
1980 debug_msg += '\n Unknown return code (100): %d'%nTot100
1981 if nTot10 != 0:
1982 debug_msg += '\n Unknown return code (10): %d'%nTot10
1983 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
1984 not in unit_code_meaning.keys())
1985 if nUnknownUnit != 0:
1986 debug_msg += '\n Unknown return code (1): %d'\
1987 %nUnknownUnit
1988
1989 if maxUPS[1]>0.001:
1990 message += tmpStr
1991 message += '\n Total number of unstable PS point detected:'+\
1992 ' %d (%4.2f%%)'%(nToteps,float(100*nToteps)/nTotPS)
1993 message += '\n Maximum fraction of UPS points in '+\
1994 'channel %s (%4.2f%%)'%maxUPS
1995 message += '\n Please report this to the authors while '+\
1996 'providing the file'
1997 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
1998 maxUPS[0],'UPS.log'))
1999 else:
2000 debug_msg += tmpStr
2001
2002
2003
2004
2005
2006
2007 virt_tricks_finder = re.compile(
2008 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
2009 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
2010 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
2011 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
2012
2013 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
2014 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
2015
2016 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
2017
2018 channel_contr_list = {}
2019 for gv_log in log_GV_files:
2020 logfile=open(gv_log,'r')
2021 log = logfile.read()
2022 logfile.close()
2023 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2024 vf_stats = None
2025 for vf_stats in re.finditer(virt_frac_finder, log):
2026 pass
2027 if not vf_stats is None:
2028 v_frac = float(vf_stats.group('v_frac'))
2029 v_average = float(vf_stats.group('v_average'))
2030 try:
2031 if v_frac < stats['virt_stats']['v_frac_min'][0]:
2032 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
2033 if v_frac > stats['virt_stats']['v_frac_max'][0]:
2034 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
2035 stats['virt_stats']['v_frac_avg'][0] += v_frac
2036 stats['virt_stats']['v_frac_avg'][1] += 1
2037 except KeyError:
2038 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
2039 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
2040 stats['virt_stats']['v_frac_avg']=[v_frac,1]
2041
2042
2043 ccontr_stats = None
2044 for ccontr_stats in re.finditer(channel_contr_finder, log):
2045 pass
2046 if not ccontr_stats is None:
2047 contrib = float(ccontr_stats.group('v_contr'))
2048 try:
2049 if contrib>channel_contr_list[channel_name]:
2050 channel_contr_list[channel_name]=contrib
2051 except KeyError:
2052 channel_contr_list[channel_name]=contrib
2053
2054
2055
2056
2057 average_contrib = 0.0
2058 for value in channel_contr_list.values():
2059 average_contrib += value
2060 if len(channel_contr_list.values()) !=0:
2061 average_contrib = average_contrib / len(channel_contr_list.values())
2062
2063 relevant_log_GV_files = []
2064 excluded_channels = set([])
2065 all_channels = set([])
2066 for log_file in log_GV_files:
2067 channel_name = '/'.join(log_file.split('/')[-3:-1])
2068 all_channels.add(channel_name)
2069 try:
2070 if channel_contr_list[channel_name] > (0.1*average_contrib):
2071 relevant_log_GV_files.append(log_file)
2072 else:
2073 excluded_channels.add(channel_name)
2074 except KeyError:
2075 relevant_log_GV_files.append(log_file)
2076
2077
2078 for gv_log in relevant_log_GV_files:
2079 logfile=open(gv_log,'r')
2080 log = logfile.read()
2081 logfile.close()
2082 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2083
2084 vt_stats = None
2085 for vt_stats in re.finditer(virt_tricks_finder, log):
2086 pass
2087 if not vt_stats is None:
2088 vt_stats_group = vt_stats.groupdict()
2089 v_ratio = float(vt_stats.group('v_ratio'))
2090 v_ratio_err = float(vt_stats.group('v_ratio_err'))
2091 v_contr = float(vt_stats.group('v_abs_contr'))
2092 v_contr_err = float(vt_stats.group('v_abs_contr_err'))
2093 try:
2094 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
2095 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
2096 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
2097 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
2098 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
2099 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
2100 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
2101 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
2102 if v_contr < stats['virt_stats']['v_contr_min'][0]:
2103 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
2104 if v_contr > stats['virt_stats']['v_contr_max'][0]:
2105 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
2106 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
2107 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
2108 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
2109 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
2110 except KeyError:
2111 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
2112 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
2113 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
2114 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
2115 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
2116 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
2117 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
2118 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
2119
2120 vf_stats = None
2121 for vf_stats in re.finditer(virt_frac_finder, log):
2122 pass
2123 if not vf_stats is None:
2124 v_frac = float(vf_stats.group('v_frac'))
2125 v_average = float(vf_stats.group('v_average'))
2126 try:
2127 if v_average < stats['virt_stats']['v_average_min'][0]:
2128 stats['virt_stats']['v_average_min']=(v_average,channel_name)
2129 if v_average > stats['virt_stats']['v_average_max'][0]:
2130 stats['virt_stats']['v_average_max']=(v_average,channel_name)
2131 stats['virt_stats']['v_average_avg'][0] += v_average
2132 stats['virt_stats']['v_average_avg'][1] += 1
2133 except KeyError:
2134 stats['virt_stats']['v_average_min']=[v_average,channel_name]
2135 stats['virt_stats']['v_average_max']=[v_average,channel_name]
2136 stats['virt_stats']['v_average_avg']=[v_average,1]
2137
2138 try:
2139 debug_msg += '\n\n Statistics on virtual integration optimization : '
2140
2141 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
2142 %tuple(stats['virt_stats']['v_frac_max'])
2143 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
2144 %tuple(stats['virt_stats']['v_frac_min'])
2145 debug_msg += '\n Average virt fraction computed %.3f'\
2146 %float(stats['virt_stats']['v_frac_avg'][0]/float(stats['virt_stats']['v_frac_avg'][1]))
2147 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
2148 (len(excluded_channels),len(all_channels))
2149 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
2150 %tuple(stats['virt_stats']['v_average_max'])
2151 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
2152 %tuple(stats['virt_stats']['v_ratio_max'])
2153 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
2154 %tuple(stats['virt_stats']['v_ratio_err_max'])
2155 debug_msg += tmpStr
2156
2157
2158
2159
2160
2161
2162
2163
2164 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
2165 %tuple(stats['virt_stats']['v_contr_err_max'])
2166 debug_msg += tmpStr
2167
2168
2169
2170
2171 except KeyError:
2172 debug_msg += '\n Could not find statistics on the integration optimization. '
2173
2174
2175
2176
2177
2178 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
2179 "(?P<time>[\d\+-Eed\.]*)\s*")
2180
2181 for logf in log_GV_files:
2182 logfile=open(logf,'r')
2183 log = logfile.read()
2184 logfile.close()
2185 channel_name = '/'.join(logf.split('/')[-3:-1])
2186 mint = re.search(mint_search,logf)
2187 if not mint is None:
2188 channel_name = channel_name+' [step %s]'%mint.group('ID')
2189
2190 for time_stats in re.finditer(timing_stat_finder, log):
2191 try:
2192 stats['timings'][time_stats.group('name')][channel_name]+=\
2193 float(time_stats.group('time'))
2194 except KeyError:
2195 if time_stats.group('name') not in stats['timings'].keys():
2196 stats['timings'][time_stats.group('name')] = {}
2197 stats['timings'][time_stats.group('name')][channel_name]=\
2198 float(time_stats.group('time'))
2199
2200
2201 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
2202 try:
2203 totTimeList = [(time, chan) for chan, time in \
2204 stats['timings']['Total'].items()]
2205 except KeyError:
2206 totTimeList = []
2207
2208 totTimeList.sort()
2209 if len(totTimeList)>0:
2210 debug_msg += '\n\n Inclusive timing profile :'
2211 debug_msg += '\n Overall slowest channel %s (%s)'%\
2212 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
2213 debug_msg += '\n Average channel running time %s'%\
2214 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
2215 debug_msg += '\n Aggregated total running time %s'%\
2216 Tstr(sum([el[0] for el in totTimeList]))
2217 else:
2218 debug_msg += '\n\n Inclusive timing profile non available.'
2219
2220 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
2221 sum(stats['timings'][stat].values()), reverse=True)
2222 for name in sorted_keys:
2223 if name=='Total':
2224 continue
2225 if sum(stats['timings'][name].values())<=0.0:
2226 debug_msg += '\n Zero time record for %s.'%name
2227 continue
2228 try:
2229 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
2230 chan) for chan, time in stats['timings'][name].items()]
2231 except KeyError, ZeroDivisionError:
2232 debug_msg += '\n\n Timing profile for %s unavailable.'%name
2233 continue
2234 TimeList.sort()
2235 debug_msg += '\n Timing profile for <%s> :'%name
2236 try:
2237 debug_msg += '\n Overall fraction of time %.3f %%'%\
2238 float((100.0*(sum(stats['timings'][name].values())/
2239 sum(stats['timings']['Total'].values()))))
2240 except KeyError, ZeroDivisionError:
2241 debug_msg += '\n Overall fraction of time unavailable.'
2242 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
2243 (TimeList[-1][0],TimeList[-1][1])
2244 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
2245 (TimeList[0][0],TimeList[0][1])
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256 err_finder = re.compile(\
2257 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
2258 for log in all_log_files:
2259 logfile=open(log,'r')
2260 nErrors = len(re.findall(err_finder, logfile.read()))
2261 logfile.close()
2262 if nErrors != 0:
2263 stats['Errors'].append((str(log),nErrors))
2264
2265 nErrors = sum([err[1] for err in stats['Errors']],0)
2266 if nErrors != 0:
2267 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
2268 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
2269 'found in the following log file%s:'%('s' if \
2270 len(stats['Errors'])>1 else '')
2271 for error in stats['Errors'][:3]:
2272 log_name = '/'.join(error[0].split('/')[-5:])
2273 debug_msg += '\n > %d error%s in %s'%\
2274 (error[1],'s' if error[1]>1 else '',log_name)
2275 if len(stats['Errors'])>3:
2276 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
2277 nRemainingLogs = len(stats['Errors'])-3
2278 debug_msg += '\n And another %d error%s in %d other log file%s'%\
2279 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
2280 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
2281
2282 return message, debug_msg
2283
2284
2286 """this function calls the reweighting routines and creates the event file in the
2287 Event dir. Return the name of the event file created
2288 """
2289 scale_pdf_info={}
2290 if (self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.') and int(self.run_card['ickkw']) != 4 :
2291 scale_pdf_info = self.run_reweight(options['reweightonly'])
2292
2293 self.update_status('Collecting events', level='parton', update_results=True)
2294 misc.compile(['collect_events'],
2295 cwd=pjoin(self.me_dir, 'SubProcesses'))
2296 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
2297 stdin=subprocess.PIPE,
2298 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
2299 if event_norm.lower() == 'sum':
2300 p.communicate(input = '1\n')
2301 elif event_norm.lower() == 'unity':
2302 p.communicate(input = '3\n')
2303 else:
2304 p.communicate(input = '2\n')
2305
2306
2307 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
2308
2309 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
2310 raise aMCatNLOError('An error occurred during event generation. ' + \
2311 'The event file has not been created. Check collect_events.log')
2312 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2313 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
2314 if not options['reweightonly']:
2315 self.print_summary(options, 2, mode, scale_pdf_info)
2316 logger.info('The %s file has been generated.\n' % (evt_file))
2317 self.results.add_detail('nb_event', nevents)
2318 self.update_status('Events generated', level='parton', update_results=True)
2319 return evt_file[:-3]
2320
2321
2323 """runs mcatnlo on the generated event file, to produce showered-events
2324 """
2325 logger.info('Prepairing MCatNLO run')
2326 try:
2327 misc.gunzip(evt_file)
2328 except Exception:
2329 pass
2330
2331 self.banner = banner_mod.Banner(evt_file)
2332 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
2333
2334
2335
2336 if int(int(self.banner.get_detail('run_card', 'nevents')) / \
2337 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
2338 != int(self.banner.get_detail('run_card', 'nevents')):
2339 logger.warning(\
2340 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
2341 'Setting it to 1.')
2342 self.shower_card['nsplit_jobs'] = 1
2343
2344
2345 if self.shower_card['nevents'] > 0 and \
2346 self.shower_card['nevents'] < int(self.banner.get_detail('run_card', 'nevents')) and \
2347 self.shower_card['nsplit_jobs'] != 1:
2348 logger.warning(\
2349 'Only a part of the events will be showered.\n' + \
2350 'Setting nsplit_jobs in the shower_card to 1.')
2351 self.shower_card['nsplit_jobs'] = 1
2352
2353 self.banner_to_mcatnlo(evt_file)
2354
2355
2356
2357
2358 if 'fastjet' in self.shower_card['extralibs']:
2359
2360 if not 'stdc++' in self.shower_card['extralibs']:
2361 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
2362 self.shower_card['extralibs'] += ' stdc++'
2363
2364 try:
2365
2366 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
2367 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2368 output, error = p.communicate()
2369
2370 output = output[:-1]
2371
2372 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
2373 logger.warning('Linking FastJet: updating EXTRAPATHS')
2374 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
2375 if not pjoin(output, 'include') in self.shower_card['includepaths']:
2376 logger.warning('Linking FastJet: updating INCLUDEPATHS')
2377 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
2378
2379 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
2380 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
2381 except Exception:
2382 logger.warning('Linking FastJet: using fjcore')
2383
2384 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
2385 if not 'fjcore.o' in self.shower_card['analyse']:
2386 self.shower_card['analyse'] += ' fjcore.o'
2387
2388 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
2389 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
2390
2391 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
2392 for line in fjwrapper_lines:
2393 if '//INCLUDE_FJ' in line:
2394 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
2395 if '//NAMESPACE_FJ' in line:
2396 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
2397 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\
2398 '\n'.join(fjwrapper_lines) + '\n')
2399
2400 extrapaths = self.shower_card['extrapaths'].split()
2401 if shower == 'HERWIGPP':
2402 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
2403
2404 if 'LD_LIBRARY_PATH' in os.environ.keys():
2405 ldlibrarypath = os.environ['LD_LIBRARY_PATH']
2406 else:
2407 ldlibrarypath = ''
2408 ldlibrarypath += ':' + ':'.join(extrapaths)
2409 os.putenv('LD_LIBRARY_PATH', ldlibrarypath)
2410
2411 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
2412 self.shower_card.write_card(shower, shower_card_path)
2413
2414 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
2415 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
2416 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
2417 stderr=open(mcatnlo_log, 'w'),
2418 cwd=pjoin(self.me_dir, 'MCatNLO'))
2419
2420 exe = 'MCATNLO_%s_EXE' % shower
2421 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
2422 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
2423 print open(mcatnlo_log).read()
2424 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
2425 logger.info(' ... done')
2426
2427
2428 count = 1
2429 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2430 (shower, count))):
2431 count += 1
2432 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2433 (shower, count))
2434 os.mkdir(rundir)
2435 files.cp(shower_card_path, rundir)
2436
2437
2438
2439 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
2440 'events_*.lhe'))
2441 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
2442 logger.info('Cleaning old files and splitting the event file...')
2443
2444 files.rm([f for f in event_files if 'events.lhe' not in f])
2445 if self.shower_card['nsplit_jobs'] > 1:
2446 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'))
2447 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
2448 stdin=subprocess.PIPE,
2449 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
2450 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2451 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
2452 logger.info('Splitting done.')
2453 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
2454 'events_*.lhe'))
2455
2456 event_files.sort()
2457
2458 self.update_status('Showering events...', level='shower')
2459 logger.info('(Running in %s)' % rundir)
2460 if shower != 'PYTHIA8':
2461 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
2462 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
2463 else:
2464
2465 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
2466 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
2467 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
2468 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
2469
2470 if shower == 'HERWIGPP':
2471 try:
2472 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
2473 except Exception:
2474 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
2475
2476 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
2477 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
2478
2479 files.ln(evt_file, rundir, 'events.lhe')
2480 for i, f in enumerate(event_files):
2481 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
2482
2483 if not self.shower_card['analyse']:
2484
2485 out_id = 'HEP'
2486 else:
2487
2488 out_id = 'TOP'
2489
2490
2491 open(pjoin(rundir, 'shower.sh'), 'w').write(\
2492 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
2493 % {'extralibs': ':'.join(extrapaths)})
2494 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
2495
2496 if event_files:
2497 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
2498 for i in range(len(event_files))]
2499 else:
2500 arg_list = [[shower, out_id, self.run_name]]
2501
2502 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
2503 self.njobs = 1
2504 self.wait_for_complete('shower')
2505
2506
2507 message = ''
2508 warning = ''
2509 to_gzip = [evt_file]
2510 if out_id == 'HEP':
2511
2512 if shower in ['PYTHIA8', 'HERWIGPP']:
2513 hep_format = 'HEPMC'
2514 ext = 'hepmc'
2515 else:
2516 hep_format = 'StdHEP'
2517 ext = 'hep'
2518
2519 hep_file = '%s_%s_0.%s.gz' % \
2520 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
2521 count = 0
2522
2523
2524
2525 while os.path.exists(hep_file) or \
2526 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
2527 count +=1
2528 hep_file = '%s_%s_%d.%s.gz' % \
2529 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
2530
2531 try:
2532 if self.shower_card['nsplit_jobs'] == 1:
2533 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
2534 message = ('The file %s has been generated. \nIt contains showered' + \
2535 ' and hadronized events in the %s format obtained' + \
2536 ' showering the parton-level event file %s.gz with %s') % \
2537 (hep_file, hep_format, evt_file, shower)
2538 else:
2539 hep_list = []
2540 for i in range(self.shower_card['nsplit_jobs']):
2541 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
2542 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
2543 message = ('The following files have been generated:\n %s\nThey contain showered' + \
2544 ' and hadronized events in the %s format obtained' + \
2545 ' showering the (split) parton-level event file %s.gz with %s') % \
2546 ('\n '.join(hep_list), hep_format, evt_file, shower)
2547
2548 except OSError, IOError:
2549 raise aMCatNLOError('No file has been generated, an error occurred.'+\
2550 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
2551
2552
2553 try:
2554 self.do_plot('%s -f' % self.run_name)
2555 except Exception, error:
2556 logger.info("Fail to make the plot. Continue...")
2557 pass
2558
2559 elif out_id == 'TOP':
2560
2561 topfiles = []
2562 top_tars = [tarfile.TarFile(f) for f in glob.glob(pjoin(rundir, 'topfile*.tar'))]
2563 for top_tar in top_tars:
2564 topfiles.extend(top_tar.getnames())
2565
2566
2567 if len(top_tars) != self.shower_card['nsplit_jobs']:
2568 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
2569 (self.shower_card['nsplit_jobs'], len(top_tars)))
2570
2571
2572
2573 filename = 'plot_%s_%d_' % (shower, 1)
2574 count = 1
2575 while os.path.exists(pjoin(self.me_dir, 'Events',
2576 self.run_name, '%s0.top' % filename)) or \
2577 os.path.exists(pjoin(self.me_dir, 'Events',
2578 self.run_name, '%s0__1.top' % filename)):
2579 count += 1
2580 filename = 'plot_%s_%d_' % (shower, count)
2581
2582 if not topfiles:
2583
2584 waarning = 'No .top file has been generated. For the results of your ' +\
2585 'run, please check inside %s' % rundir
2586
2587 elif self.shower_card['nsplit_jobs'] == 1:
2588
2589 top_tars[0].extractall(path = rundir)
2590 plotfiles = []
2591 for i, file in enumerate(topfiles):
2592 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
2593 '%s%d.top' % (filename, i))
2594 files.mv(pjoin(rundir, file), plotfile)
2595 plotfiles.append(plotfile)
2596
2597 ffiles = 'files'
2598 have = 'have'
2599 if len(plotfiles) == 1:
2600 ffiles = 'file'
2601 have = 'has'
2602
2603 message = ('The %s %s %s been generated, with histograms in the' + \
2604 ' TopDrawer format, obtained by showering the parton-level' + \
2605 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
2606 evt_file, shower)
2607 else:
2608
2609 topfiles_set = set(topfiles)
2610 plotfiles = []
2611 for j, top_tar in enumerate(top_tars):
2612 top_tar.extractall(path = rundir)
2613 for i, file in enumerate(topfiles_set):
2614 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
2615 '%s%d__%d.top' % (filename, i, j + 1))
2616 files.mv(pjoin(rundir, file), plotfile)
2617 plotfiles.append(plotfile)
2618
2619
2620 if self.shower_card['combine_td']:
2621 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
2622
2623 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
2624 norm = 1.
2625 elif self.banner.get('run_card', 'event_norm').lower() == 'average':
2626 norm = 1./float(self.shower_card['nsplit_jobs'])
2627
2628 plotfiles = []
2629 for i, file in enumerate(topfiles_set):
2630 filelist = ['%s%d__%d.top' % (filename, i, j + 1) \
2631 for j in range(self.shower_card['nsplit_jobs'])]
2632 infile="%d\n%s\n%s\n" % \
2633 (self.shower_card['nsplit_jobs'],
2634 '\n'.join(filelist),
2635 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
2636
2637 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
2638 stdin=subprocess.PIPE,
2639 stdout=os.open(os.devnull, os.O_RDWR),
2640 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2641 p.communicate(input = infile)
2642 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
2643 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
2644 plotfiles.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
2645 tar = tarfile.open(
2646 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
2647 for f in filelist:
2648 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
2649 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
2650
2651 tar.close()
2652
2653 ffiles = 'files'
2654 have = 'have'
2655 if len(plotfiles) == 1:
2656 ffiles = 'file'
2657 have = 'has'
2658
2659 message = ('The %s %s %s been generated, with histograms in the' + \
2660 ' TopDrawer format, obtained by showering the parton-level' + \
2661 ' file %s.gz with %s.\n' + \
2662 'The files from the different shower ' + \
2663 'jobs (before combining them) can be found inside %s.') % \
2664 (ffiles, ', '.join(plotfiles), have, \
2665 evt_file, shower,
2666 ', '.join([f.replace('top', 'tar.gz') for f in plotfiles]))
2667
2668 else:
2669 message = ('The following files have been generated:\n %s\n' + \
2670 'They contain histograms in the' + \
2671 ' TopDrawer format, obtained by showering the parton-level' + \
2672 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
2673 evt_file, shower)
2674
2675
2676 run_dir_path = pjoin(rundir, self.run_name)
2677 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
2678 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
2679 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
2680 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
2681 %(shower, count)))
2682 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
2683 cwd=run_dir_path)
2684 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
2685
2686 for f in to_gzip:
2687 misc.gzip(f)
2688 if message:
2689 logger.info(message)
2690 if warning:
2691 logger.warning(warning)
2692
2693 self.update_status('Run complete', level='shower', update_results=True)
2694
2695
2696
2697 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
2698 """define the run name, the run_tag, the banner and the results."""
2699
2700
2701 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'],
2702 'pythia': ['pythia','pgs','delphes'],
2703 'shower': ['shower'],
2704 'pgs': ['pgs'],
2705 'delphes':['delphes'],
2706 'plot':[]}
2707
2708
2709
2710 if name == self.run_name:
2711 if reload_card:
2712 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
2713 self.run_card = banner_mod.RunCardNLO(run_card)
2714
2715
2716 if tag:
2717 self.run_card['run_tag'] = tag
2718 self.run_tag = tag
2719 self.results.add_run(self.run_name, self.run_card)
2720 else:
2721 for tag in upgrade_tag[level]:
2722 if getattr(self.results[self.run_name][-1], tag):
2723 tag = self.get_available_tag()
2724 self.run_card['run_tag'] = tag
2725 self.run_tag = tag
2726 self.results.add_run(self.run_name, self.run_card)
2727 break
2728 return
2729
2730
2731 if self.run_name:
2732 self.store_result()
2733
2734 self.run_name = name
2735
2736
2737 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
2738 self.run_card = banner_mod.RunCardNLO(run_card)
2739
2740 new_tag = False
2741
2742 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
2743 if tag:
2744 self.run_card['run_tag'] = tag
2745 new_tag = True
2746 elif not self.run_name in self.results and level =='parton':
2747 pass
2748 elif not self.run_name in self.results:
2749
2750 logger.warning('Trying to run data on unknown run.')
2751 self.results.add_run(name, self.run_card)
2752 self.results.update('add run %s' % name, 'all', makehtml=True)
2753 else:
2754 for tag in upgrade_tag[level]:
2755
2756 if getattr(self.results[self.run_name][-1], tag):
2757
2758 tag = self.get_available_tag()
2759 self.run_card['run_tag'] = tag
2760 new_tag = True
2761 break
2762 if not new_tag:
2763
2764 tag = self.results[self.run_name][-1]['tag']
2765 self.run_card['run_tag'] = tag
2766
2767
2768 if name in self.results and not new_tag:
2769 self.results.def_current(self.run_name)
2770 else:
2771 self.results.add_run(self.run_name, self.run_card)
2772
2773 self.run_tag = self.run_card['run_tag']
2774
2775
2776
2777 if level == 'parton':
2778 return
2779 elif level == 'pythia':
2780 return self.results[self.run_name][0]['tag']
2781 else:
2782 for i in range(-1,-len(self.results[self.run_name])-1,-1):
2783 tagRun = self.results[self.run_name][i]
2784 if tagRun.pythia:
2785 return tagRun['tag']
2786
2787
2789 """ tar the pythia results. This is done when we are quite sure that
2790 the pythia output will not be use anymore """
2791
2792 if not self.run_name:
2793 return
2794
2795 self.results.save()
2796
2797 if not self.to_store:
2798 return
2799
2800 tag = self.run_card['run_tag']
2801
2802 self.to_store = []
2803
2804
2806 """reads the info in the init block and returns them in a dictionary"""
2807 ev_file = open(evt_file)
2808 init = ""
2809 found = False
2810 while True:
2811 line = ev_file.readline()
2812 if "<init>" in line:
2813 found = True
2814 elif found and not line.startswith('#'):
2815 init += line
2816 if "</init>" in line or "<event>" in line:
2817 break
2818 ev_file.close()
2819
2820
2821
2822
2823
2824
2825 init_dict = {}
2826 init_dict['idbmup1'] = int(init.split()[0])
2827 init_dict['idbmup2'] = int(init.split()[1])
2828 init_dict['ebmup1'] = float(init.split()[2])
2829 init_dict['ebmup2'] = float(init.split()[3])
2830 init_dict['pdfgup1'] = int(init.split()[4])
2831 init_dict['pdfgup2'] = int(init.split()[5])
2832 init_dict['pdfsup1'] = int(init.split()[6])
2833 init_dict['pdfsup2'] = int(init.split()[7])
2834 init_dict['idwtup'] = int(init.split()[8])
2835 init_dict['nprup'] = int(init.split()[9])
2836
2837 return init_dict
2838
2839
2841 """creates the mcatnlo input script using the values set in the header of the event_file.
2842 It also checks if the lhapdf library is used"""
2843 shower = self.banner.get('run_card', 'parton_shower').upper()
2844 pdlabel = self.banner.get('run_card', 'pdlabel')
2845 itry = 0
2846 nevents = self.shower_card['nevents']
2847 init_dict = self.get_init_dict(evt_file)
2848
2849 if nevents < 0 or \
2850 nevents > int(self.banner.get_detail('run_card', 'nevents')):
2851 nevents = int(self.banner.get_detail('run_card', 'nevents'))
2852
2853 nevents = nevents / self.shower_card['nsplit_jobs']
2854
2855 mcmass_dict = {}
2856 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
2857 pdg = int(line.split()[0])
2858 mass = float(line.split()[1])
2859 mcmass_dict[pdg] = mass
2860
2861 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
2862 content += 'NEVENTS=%d\n' % nevents
2863 content += 'NEVENTS_TOT=%d\n' % (int(self.banner.get_detail('run_card', 'nevents')) /\
2864 self.shower_card['nsplit_jobs'])
2865 content += 'MCMODE=%s\n' % shower
2866 content += 'PDLABEL=%s\n' % pdlabel
2867 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value
2868
2869
2870 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
2871 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
2872 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
2873 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
2874 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
2875 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
2876 try:
2877 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
2878 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
2879 except KeyError:
2880 content += 'HGGMASS=120.\n'
2881 content += 'HGGWIDTH=0.00575308848\n'
2882 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
2883 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
2884 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
2885 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
2886 content += 'DMASS=%s\n' % mcmass_dict[1]
2887 content += 'UMASS=%s\n' % mcmass_dict[2]
2888 content += 'SMASS=%s\n' % mcmass_dict[3]
2889 content += 'CMASS=%s\n' % mcmass_dict[4]
2890 content += 'BMASS=%s\n' % mcmass_dict[5]
2891 try:
2892 content += 'EMASS=%s\n' % mcmass_dict[11]
2893 content += 'MUMASS=%s\n' % mcmass_dict[13]
2894 content += 'TAUMASS=%s\n' % mcmass_dict[15]
2895 except KeyError:
2896
2897 mcmass_lines = [l for l in \
2898 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
2899 ).read().split('\n') if l]
2900 new_mcmass_dict = {}
2901 for l in mcmass_lines:
2902 key, val = l.split('=')
2903 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
2904 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
2905 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
2906 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
2907
2908 content += 'GMASS=%s\n' % mcmass_dict[21]
2909 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
2910
2911 if int(self.shower_card['pdfcode']) > 1 or \
2912 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1):
2913
2914
2915
2916
2917 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
2918 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
2919 stdout = subprocess.PIPE).stdout.read().strip()
2920 content += 'LHAPDFPATH=%s\n' % lhapdfpath
2921 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
2922 if self.shower_card['pdfcode']==1:
2923 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
2924 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
2925 else:
2926 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
2927 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
2928 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
2929 elif int(self.shower_card['pdfcode'])==1:
2930
2931
2932
2933
2934
2935
2936 try:
2937 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
2938 stdout = subprocess.PIPE).stdout.read().strip()
2939 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
2940 content += 'LHAPDFPATH=%s\n' % lhapdfpath
2941 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
2942 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
2943 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
2944 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
2945 except Exception:
2946 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
2947 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
2948 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
2949 ' same set as was used in the event generation install LHAPDF and set the path using'+\
2950 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
2951 content += 'LHAPDFPATH=\n'
2952 content += 'PDFCODE=0\n'
2953 else:
2954 content += 'LHAPDFPATH=\n'
2955 content += 'PDFCODE=0\n'
2956
2957 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
2958 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
2959
2960 if self.options['pythia8_path']:
2961 content+='PY8PATH=%s\n' % self.options['pythia8_path']
2962 if self.options['hwpp_path']:
2963 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
2964 if self.options['thepeg_path']:
2965 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
2966 if self.options['hepmc_path']:
2967 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
2968
2969 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
2970 output.write(content)
2971 output.close()
2972 return shower
2973
2974
2976 """runs the reweight_xsec_events eecutables on each sub-event file generated
2977 to compute on the fly scale and/or PDF uncertainities"""
2978 logger.info(' Doing reweight')
2979
2980 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
2981
2982 if only:
2983 if os.path.exists(nev_unw + '.orig'):
2984 files.cp(nev_unw + '.orig', nev_unw)
2985 else:
2986 raise aMCatNLOError('Cannot find event file information')
2987
2988
2989 file = open(nev_unw)
2990 lines = file.read().split('\n')
2991 file.close()
2992
2993 files.cp(nev_unw, nev_unw + '.orig')
2994
2995
2996 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
2997
2998 job_dict = {}
2999 exe = 'reweight_xsec_events.local'
3000 for i, evt_file in enumerate(evt_files):
3001 path, evt = os.path.split(evt_file)
3002 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
3003 pjoin(self.me_dir, 'SubProcesses', path))
3004 job_dict[path] = [exe]
3005
3006 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
3007
3008
3009 for evt_file in evt_files:
3010 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
3011 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
3012 stdout = subprocess.PIPE).stdout.read().strip()
3013 if last_line != "</LesHouchesEvents>":
3014 raise aMCatNLOError('An error occurred during reweight. Check the' + \
3015 '\'reweight_xsec_events.output\' files inside the ' + \
3016 '\'SubProcesses/P*/G*/ directories for details')
3017
3018
3019 newfile = open(nev_unw, 'w')
3020 for line in lines:
3021 if line:
3022 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
3023 newfile.close()
3024
3025 return self.pdf_scale_from_reweighting(evt_files)
3026
3028 """This function takes the files with the scale and pdf values
3029 written by the reweight_xsec_events.f code
3030 (P*/G*/pdf_scale_dependence.dat) and computes the overall
3031 scale and PDF uncertainty (the latter is computed using the
3032 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
3033 and returns it in percents. The expected format of the file
3034 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
3035 xsec_pdf0 xsec_pdf1 ...."""
3036 scale_pdf_info={}
3037 scales=[]
3038 pdfs=[]
3039 numofpdf = 0
3040 numofscales = 0
3041 for evt_file in evt_files:
3042 path, evt=os.path.split(evt_file)
3043 data_file=open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat')).read()
3044 lines = data_file.replace("D", "E").split("\n")
3045 if not numofscales:
3046 numofscales = int(lines[0])
3047 if not numofpdf:
3048 numofpdf = int(lines[2])
3049 scales_this = [float(val) for val in lines[1].split()]
3050 pdfs_this = [float(val) for val in lines[3].split()]
3051
3052 if numofscales != len(scales_this) or numofpdf !=len(pdfs_this):
3053
3054 logger.info(data_file)
3055 logger.info((' Expected # of scales: %d\n'+
3056 ' Found # of scales: %d\n'+
3057 ' Expected # of pdfs: %d\n'+
3058 ' Found # of pdfs: %d\n') %
3059 (numofscales, len(scales_this), numofpdf, len(pdfs_this)))
3060 raise aMCatNLOError('inconsistent scale_pdf_dependence.dat')
3061 if not scales:
3062 scales = [0.] * numofscales
3063 if not pdfs:
3064 pdfs = [0.] * numofpdf
3065
3066 scales = [a + b for a, b in zip(scales, scales_this)]
3067 pdfs = [a + b for a, b in zip(pdfs, pdfs_this)]
3068
3069
3070 if numofscales>0 and numofpdf==0:
3071 cntrl_val=scales[0]
3072 elif numofpdf>0 and numofscales==0:
3073 cntrl_val=pdfs[0]
3074 elif numofpdf>0 and numofscales>0:
3075 if abs(1-scales[0]/pdfs[0])>0.0001:
3076 raise aMCatNLOError('Central values for scale and PDF variation not identical')
3077 else:
3078 cntrl_val=scales[0]
3079
3080
3081 scale_upp=0.0
3082 scale_low=0.0
3083 if numofscales>0:
3084 scale_pdf_info['scale_upp'] = (max(scales)/cntrl_val-1)*100
3085 scale_pdf_info['scale_low'] = (1-min(scales)/cntrl_val)*100
3086
3087
3088 lhaid=int(self.run_card['lhaid'])
3089 pdf_upp=0.0
3090 pdf_low=0.0
3091 if lhaid <= 90000:
3092
3093 if numofpdf>1:
3094 for i in range(int(numofpdf/2)):
3095 pdf_upp=pdf_upp+math.pow(max(0.0,pdfs[2*i+1]-cntrl_val,pdfs[2*i+2]-cntrl_val),2)
3096 pdf_low=pdf_low+math.pow(max(0.0,cntrl_val-pdfs[2*i+1],cntrl_val-pdfs[2*i+2]),2)
3097 scale_pdf_info['pdf_upp'] = math.sqrt(pdf_upp)/cntrl_val*100
3098 scale_pdf_info['pdf_low'] = math.sqrt(pdf_low)/cntrl_val*100
3099 else:
3100
3101 pdf_stdev=0.0
3102 for i in range(int(numofpdf-1)):
3103 pdf_stdev = pdf_stdev + pow(pdfs[i+1] - cntrl_val,2)
3104 pdf_stdev = math.sqrt(pdf_stdev/int(numofpdf-2))
3105 scale_pdf_info['pdf_upp'] = pdf_stdev/cntrl_val*100
3106 scale_pdf_info['pdf_low'] = scale_pdf_info['pdf_upp']
3107 return scale_pdf_info
3108
3109
3111 """this function waits for jobs on cluster to complete their run."""
3112
3113 starttime = time.time()
3114
3115 update_status = lambda i, r, f: self.update_status((i, r, f, run_type),
3116 starttime=starttime, level='parton', update_results=True)
3117 try:
3118 self.cluster.wait(self.me_dir, update_status)
3119 except:
3120 self.cluster.remove()
3121 raise
3122
3123 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3124 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
3125 njob_split = 0
3126 self.ijob = 0
3127
3128
3129
3130 self.split_folders = {}
3131
3132 if run_type != 'shower':
3133 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
3134 for args in arg_list:
3135 for Pdir, jobs in job_dict.items():
3136 for job in jobs:
3137 if not split_jobs:
3138 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3139 else:
3140 for n in self.find_jobs_to_split(Pdir, job, args[1]):
3141 self.run_exe(job, args + [n], run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3142 njob_split += 1
3143
3144 if self.cluster_mode == 2:
3145 time.sleep(1)
3146 if njob_split > 0:
3147 self.njobs = njob_split
3148 else:
3149 self.njobs = len(arg_list)
3150 for args in arg_list:
3151 [(cwd, exe)] = job_dict.items()
3152 self.run_exe(exe, args, run_type, cwd)
3153
3154 self.wait_for_complete(run_type)
3155
3156
3157
3159 """check the integrity of the event files after splitting, and resubmit
3160 those which are not nicely terminated"""
3161 to_resubmit = []
3162 for dir in self.split_folders.keys():
3163 last_line = ''
3164 try:
3165 last_line = subprocess.Popen(
3166 ['tail', '-n1', pjoin(dir, 'events.lhe')], \
3167 stdout = subprocess.PIPE).stdout.read().strip()
3168 except IOError:
3169 pass
3170
3171 if last_line != "</LesHouchesEvents>":
3172 to_resubmit.append(dir)
3173
3174 self.njobs = 0
3175 if to_resubmit:
3176 run_type = 'Resubmitting broken jobs'
3177 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
3178 logger.debug('Resubmitting\n' + '\n'.join(to_resubmit) + '\n')
3179 for dir in to_resubmit:
3180 files.rm([dir])
3181 job = self.split_folders[dir][0]
3182 args = self.split_folders[dir][1:]
3183 run_type = 'monitor'
3184 cwd = os.path.split(dir)[0]
3185 self.run_exe(job, args, run_type, cwd=cwd )
3186 self.njobs +=1
3187
3188 self.wait_for_complete(run_type)
3189
3190
3192 """looks into the nevents_unweighed_splitted file to check how many
3193 split jobs are needed for this (pdir, job). arg is F, B or V"""
3194
3195 splittings = []
3196 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
3197 pattern = re.compile('for i in (\d+) ; do')
3198 match = re.search(pattern, ajob)
3199 channel = match.groups()[0]
3200
3201
3202 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
3203
3204
3205 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
3206 pjoin(pdir, 'G%s%s' % (arg,channel)))
3207 matches = re.findall(pattern, nevents_file)
3208 for m in matches:
3209 splittings.append(m)
3210 return splittings
3211
3212
3213 - def run_exe(self, exe, args, run_type, cwd=None):
3214 """this basic function launch locally/on cluster exe with args as argument.
3215 """
3216
3217
3218 execpath = None
3219 if cwd and os.path.exists(pjoin(cwd, exe)):
3220 execpath = pjoin(cwd, exe)
3221 elif not cwd and os.path.exists(exe):
3222 execpath = exe
3223 else:
3224 raise aMCatNLOError('Cannot find executable %s in %s' \
3225 % (exe, os.getcwd()))
3226
3227 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
3228 subprocess.call(['chmod', '+x', exe], cwd=cwd)
3229
3230 if self.cluster_mode == 0:
3231
3232 misc.call(['./'+exe] + args, cwd=cwd)
3233 self.ijob += 1
3234 self.update_status((max([self.njobs - self.ijob - 1, 0]),
3235 min([1, self.njobs - self.ijob]),
3236 self.ijob, run_type), level='parton')
3237
3238
3239 elif 'reweight' in exe:
3240
3241
3242 input_files, output_files = [], []
3243 pdfinput = self.get_pdf_input_filename()
3244 if os.path.exists(pdfinput):
3245 input_files.append(pdfinput)
3246 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
3247 input_files.append(args[0])
3248 output_files.append('%s.rwgt' % os.path.basename(args[0]))
3249 output_files.append('reweight_xsec_events.output')
3250 output_files.append('scale_pdf_dependence.dat')
3251
3252 return self.cluster.submit2(exe, args, cwd=cwd,
3253 input_files=input_files, output_files=output_files)
3254
3255 elif 'ajob' in exe:
3256
3257
3258 if type(args[0]) == str:
3259 input_files, output_files, args = self.getIO_ajob(exe,cwd, args)
3260
3261 self.cluster.submit2(exe, args, cwd=cwd,
3262 input_files=input_files, output_files=output_files)
3263
3264
3265 if len(args) == 4 and '_' in output_files[-1]:
3266 self.split_folders[pjoin(cwd,output_files[-1])] = [exe] + args
3267
3268 elif 'shower' in exe:
3269
3270
3271
3272 input_files, output_files = [], []
3273 shower = args[0]
3274
3275 if shower == 'PYTHIA8':
3276 input_files.append(pjoin(cwd, 'Pythia8.exe'))
3277 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
3278 input_files.append(pjoin(cwd, 'config.sh'))
3279 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
3280 else:
3281 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
3282 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
3283 if shower == 'HERWIGPP':
3284 input_files.append(pjoin(cwd, 'Herwig++'))
3285 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
3286 if len(args) == 3:
3287 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
3288 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
3289 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
3290 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
3291 else:
3292 raise aMCatNLOError, 'Event file not present in %s' % \
3293 pjoin(self.me_dir, 'Events', self.run_name)
3294 else:
3295 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
3296
3297 if len(args) == 3:
3298 output_files.append('mcatnlo_run.log')
3299 else:
3300 output_files.append('mcatnlo_run_%s.log' % args[3])
3301 if args[1] == 'HEP':
3302 if len(args) == 3:
3303 fname = 'events'
3304 else:
3305 fname = 'events_%s' % args[3]
3306 if shower in ['PYTHIA8', 'HERWIGPP']:
3307 output_files.append(fname + '.hepmc.gz')
3308 else:
3309 output_files.append(fname + '.hep.gz')
3310 elif args[1] == 'TOP':
3311 if len(args) == 3:
3312 fname = 'topfile'
3313 else:
3314 fname = 'topfile_%s' % args[3]
3315 output_files.append(fname + '.tar')
3316 else:
3317 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
3318
3319 self.cluster.submit2(exe, args, cwd=cwd,
3320 input_files=input_files, output_files=output_files)
3321
3322 else:
3323 return self.cluster.submit(exe, args, cwd=cwd)
3324
3326
3327
3328
3329 keep_fourth_arg = False
3330 output_files = []
3331 input_files = [pjoin(self.me_dir, 'MGMEVersion.txt'),
3332 pjoin(self.me_dir, 'SubProcesses', 'randinit'),
3333 pjoin(cwd, 'symfact.dat'),
3334 pjoin(cwd, 'iproc.dat'),
3335 pjoin(cwd, 'initial_states_map.dat'),
3336 pjoin(cwd, 'param_card.dat'),
3337 pjoin(cwd, 'FKS_params.dat')]
3338
3339 if os.path.exists(pjoin(cwd,'nevents.tar')):
3340 input_files.append(pjoin(cwd,'nevents.tar'))
3341
3342 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
3343 input_files.append(pjoin(cwd, 'OLE_order.olc'))
3344
3345
3346 if os.path.exists(pjoin(cwd,'MadLoop5_resources')):
3347 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
3348 if not os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')):
3349 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
3350 dereference=True)
3351 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
3352 tf.close()
3353
3354 Ire = re.compile("for i in ([\d\s]*) ; do")
3355 try :
3356 fsock = open(exe)
3357 except IOError:
3358 fsock = open(pjoin(cwd,exe))
3359 text = fsock.read()
3360 data = Ire.findall(text)
3361 subdir = ' '.join(data).split()
3362
3363 if args[0] == '0':
3364
3365 input_files.append(pjoin(cwd, 'madevent_mintFO'))
3366 input_files.append(pjoin(self.me_dir, 'SubProcesses','madin.%s' % args[1]))
3367
3368 for i in subdir:
3369 current = '%s_G%s' % (args[1],i)
3370 if os.path.exists(pjoin(cwd,current)):
3371 input_files.append(pjoin(cwd, current))
3372 output_files.append(current)
3373 if len(args) == 4:
3374 args[2] = '-1'
3375
3376 base = '%s_G%s' % (args[3],i)
3377 if args[0] == '0':
3378 to_move = ['grid.MC_integer','mint_grids']
3379 elif args[0] == '1':
3380 to_move = ['mint_grids', 'grid.MC_integer']
3381 else:
3382 to_move = []
3383 if self.run_card['iappl'] =='2':
3384 for grid in glob.glob(pjoin(cwd,base,'grid_obs_*_in.root')):
3385 to_move.append(grid)
3386 if not os.path.exists(pjoin(cwd,current)):
3387 os.mkdir(pjoin(cwd,current))
3388 input_files.append(pjoin(cwd, current))
3389 for name in to_move:
3390 files.cp(pjoin(cwd,base, name),
3391 pjoin(cwd,current))
3392 files.cp(pjoin(cwd,base, 'grid.MC_integer'),
3393 pjoin(cwd,current))
3394
3395 elif args[0] == '2':
3396
3397 input_files.append(pjoin(cwd, 'madevent_mintMC'))
3398 if args[2] in ['0','2']:
3399 input_files.append(pjoin(self.me_dir, 'SubProcesses','madinMMC_%s.2' % args[1]))
3400
3401 for i in subdir:
3402 current = 'G%s%s' % (args[1], i)
3403 if os.path.exists(pjoin(cwd,current)):
3404 input_files.append(pjoin(cwd, current))
3405 output_files.append(current)
3406 if len(args) == 4 and args[3] in ['H','S','V','B','F']:
3407
3408 base = '%s_%s' % (args[3],i)
3409 files.ln(pjoin(cwd,base,'mint_grids'), name = 'preset_mint_grids',
3410 starting_dir=pjoin(cwd,current))
3411 files.ln(pjoin(cwd,base,'grid.MC_integer'),
3412 starting_dir=pjoin(cwd,current))
3413 elif len(args) ==4:
3414 keep_fourth_arg = True
3415
3416 output_files.append('G%s%s_%s' % (args[1], i, args[3]))
3417
3418 else:
3419 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
3420
3421
3422 pdfinput = self.get_pdf_input_filename()
3423 if os.path.exists(pdfinput):
3424 input_files.append(pdfinput)
3425
3426 if len(args) == 4 and not keep_fourth_arg:
3427 args = args[:3]
3428
3429 return input_files, output_files, args
3430
3432 """writes the madinMMC_?.2 file"""
3433
3434 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB']
3435 if run_mode not in run_modes:
3436 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
3437 % (run_mode, ', '.join(run_modes)))
3438 mint_modes = [0, 1, 2]
3439 if mint_mode not in mint_modes:
3440 raise aMCatNLOError('%s is not a valid mode for mintMC. Please use one of the following: %s' \
3441 % (mint_mode, ', '.join(mint_modes)))
3442 if run_mode in ['born']:
3443 name_suffix = 'B'
3444 elif run_mode in ['virt', 'viSB']:
3445 name_suffix = 'V'
3446 else:
3447 name_suffix = 'F'
3448
3449 content = \
3450 """-1 12 ! points, iterations
3451 0.03 ! desired fractional accuracy
3452 1 -0.1 ! alpha, beta for Gsoft
3453 -1 -0.1 ! alpha, beta for Gazi
3454 1 ! Suppress amplitude (0 no, 1 yes)?
3455 1 ! Exact helicity sum (0 yes, n = number/event)?
3456 1 ! Enter Configuration Number:
3457 %1d ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events
3458 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij
3459 %s ! all, born, real, virt
3460 """ \
3461 % (mint_mode, run_mode)
3462 file = open(pjoin(path, 'madinMMC_%s.2' % name_suffix), 'w')
3463 file.write(content)
3464 file.close()
3465
3466 - def write_madin_file(self, path, run_mode, vegas_mode, npoints, niters, accuracy='0'):
3467 """writes the madin.run_mode file"""
3468
3469 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB', 'grid']
3470 if run_mode not in run_modes:
3471 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
3472 % (run_mode, ', '.join(run_modes)))
3473 name_suffix = run_mode
3474
3475 content = \
3476 """%s %s ! points, iterations
3477 %s ! accuracy
3478 2 ! 0 fixed grid 2 adjust
3479 1 ! 1 suppress amp, 0 doesnt
3480 1 ! 0 for exact hel sum
3481 1 ! hel configuration numb
3482 'test'
3483 1 ! 1 to save grids
3484 %s ! 0 to exclude, 1 for new run, 2 to restart, 3 to reset w/ keeping grid
3485 %s ! all, born, real, virt
3486 """ \
3487 % (npoints,niters,accuracy,vegas_mode,run_mode)
3488 file = open(pjoin(path, 'madin.%s' % name_suffix), 'w')
3489 file.write(content)
3490 file.close()
3491
3492 - def compile(self, mode, options):
3493 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
3494 specified in mode"""
3495
3496 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
3497
3498 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
3499 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
3500
3501 self.get_characteristics(pjoin(self.me_dir,
3502 'SubProcesses', 'proc_characteristics'))
3503
3504
3505 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
3506 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
3507 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
3508 test_log = pjoin(self.me_dir, 'test.log')
3509
3510 self.update_status('Compiling the code', level=None, update_results=True)
3511
3512
3513 libdir = pjoin(self.me_dir, 'lib')
3514 sourcedir = pjoin(self.me_dir, 'Source')
3515
3516
3517 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
3518
3519 if '+' in mode:
3520 mode = mode.split('+')[0]
3521 if mode in ['NLO', 'LO']:
3522 exe = 'madevent_mintFO'
3523 tests = ['test_ME']
3524 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
3525 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
3526 exe = 'madevent_mintMC'
3527 tests = ['test_ME', 'test_MC']
3528
3529 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o\n')
3530
3531
3532 p_dirs = [d for d in \
3533 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
3534
3535 self.do_treatcards('', amcatnlo=True)
3536
3537
3538 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
3539 for p_dir in p_dirs]) and options['nocompile']:
3540 return
3541
3542
3543 if os.path.exists(pjoin(libdir, 'PDFsets')):
3544 files.rm(pjoin(libdir, 'PDFsets'))
3545
3546
3547 if self.run_card['pdlabel'] == 'lhapdf' and \
3548 (self.banner.get_detail('run_card', 'lpp1') != '0' or \
3549 self.banner.get_detail('run_card', 'lpp1') != '0'):
3550
3551 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
3552 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3553 lhaid_list = [int(self.run_card['lhaid'])]
3554 if self.run_card['reweight_PDF'].lower() == '.true.':
3555 lhaid_list.append(int(self.run_card['PDF_set_min']))
3556 lhaid_list.append(int(self.run_card['PDF_set_max']))
3557 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3558
3559 else:
3560 if self.run_card['lpp1'] == '1' == self.run_card['lpp2']:
3561 logger.info('Using built-in libraries for PDFs')
3562 if self.run_card['lpp1'] == '0' == self.run_card['lpp2']:
3563 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.')
3564 try:
3565 del os.environ['lhapdf']
3566 except KeyError:
3567 pass
3568
3569
3570 if self.run_card['iappl'] != '0':
3571 os.environ['applgrid'] = 'True'
3572
3573 for code in ['applgrid','amcfast']:
3574 try:
3575 p = subprocess.Popen([self.options[code], '--version'], \
3576 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
3577 output, error = p.communicate()
3578 if code is 'applgrid' and output < '1.4.63':
3579 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
3580 +' You are using %s',output)
3581 if code is 'amcfast' and output < '1.1.1':
3582 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
3583 +' You are using %s',output)
3584 except Exception:
3585 raise aMCatNLOError(('No valid %s installation found. \n' + \
3586 'Please set the path to %s-config by using \n' + \
3587 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
3588
3589 appllibs=" APPLLIBS=$(shell %s --ldcflags) $(shell %s --ldflags) \n" \
3590 % (self.options['applgrid'],self.options['amcfast'])
3591 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
3592 text_out=[]
3593 for line in text:
3594 if line.strip().startswith('APPLLIBS=$'):
3595 line=appllibs
3596 text_out.append(line)
3597 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out)
3598 else:
3599 try:
3600 del os.environ['applgrid']
3601 except KeyError:
3602 pass
3603
3604 try:
3605 os.environ['fastjet_config'] = self.options['fastjet']
3606 except (TypeError, KeyError):
3607 if 'fastjet_config' in os.environ:
3608 del os.environ['fastjet_config']
3609 os.unsetenv('fastjet_config')
3610
3611
3612 self.update_status('Compiling source...', level=None)
3613 misc.compile(['clean4pdf'], cwd = sourcedir)
3614 misc.compile(cwd = sourcedir)
3615 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
3616 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
3617 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
3618 and os.path.exists(pjoin(libdir, 'libpdf.a')):
3619 logger.info(' ...done, continuing with P* directories')
3620 else:
3621 raise aMCatNLOError('Compilation failed')
3622
3623
3624 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
3625 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
3626 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
3627 if os.path.exists(pjoin(sourcedir,'StdHEP')):
3628 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
3629 misc.compile(['StdHEP'], cwd = sourcedir)
3630 logger.info(' ...done.')
3631 else:
3632 raise aMCatNLOError('Could not compile StdHEP because its'+\
3633 ' source directory could not be found in the SOURCE folder.\n'+\
3634 " Check the MG5_aMC option 'output_dependencies.'")
3635
3636
3637 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
3638 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
3639 if os.path.exists(pjoin(sourcedir,'CutTools')):
3640 logger.info('Compiling CutTools (can take a couple of minutes) ...')
3641 misc.compile(['CutTools'], cwd = sourcedir)
3642 logger.info(' ...done.')
3643 else:
3644 raise aMCatNLOError('Could not compile CutTools because its'+\
3645 ' source directory could not be found in the SOURCE folder.\n'+\
3646 " Check the MG5_aMC option 'output_dependencies.'")
3647 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
3648 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
3649 raise aMCatNLOError('CutTools compilation failed.')
3650
3651
3652
3653 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
3654 libdir, 'libcts.a')))),'compiler_version.log')
3655 if os.path.exists(compiler_log_path):
3656 compiler_version_used = open(compiler_log_path,'r').read()
3657 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
3658 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
3659 if os.path.exists(pjoin(sourcedir,'CutTools')):
3660 logger.info('CutTools was compiled with a different fortran'+\
3661 ' compiler. Re-compiling it now...')
3662 misc.compile(['cleanCT'], cwd = sourcedir)
3663 misc.compile(['CutTools'], cwd = sourcedir)
3664 logger.info(' ...done.')
3665 else:
3666 raise aMCatNLOError("CutTools installation in %s"\
3667 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
3668 " seems to have been compiled with a different compiler than"+\
3669 " the one specified in MG5_aMC. Please recompile CutTools.")
3670
3671
3672 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
3673 and os.path.exists(pjoin(sourcedir,'IREGI')):
3674 logger.info('Compiling IREGI (can take a couple of minutes) ...')
3675 misc.compile(['IREGI'], cwd = sourcedir)
3676 logger.info(' ...done.')
3677
3678 if os.path.exists(pjoin(libdir, 'libiregi.a')):
3679
3680
3681 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
3682 libdir, 'libiregi.a')))),'compiler_version.log')
3683 if os.path.exists(compiler_log_path):
3684 compiler_version_used = open(compiler_log_path,'r').read()
3685 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
3686 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
3687 if os.path.exists(pjoin(sourcedir,'IREGI')):
3688 logger.info('IREGI was compiled with a different fortran'+\
3689 ' compiler. Re-compiling it now...')
3690 misc.compile(['cleanIR'], cwd = sourcedir)
3691 misc.compile(['IREGI'], cwd = sourcedir)
3692 logger.info(' ...done.')
3693 else:
3694 raise aMCatNLOError("IREGI installation in %s"\
3695 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
3696 " seems to have been compiled with a different compiler than"+\
3697 " the one specified in MG5_aMC. Please recompile IREGI.")
3698
3699
3700 if self.proc_characteristics['has_loops'].lower() == 'true' and \
3701 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
3702 os.environ['madloop'] = 'true'
3703 if mode in ['NLO', 'aMC@NLO', 'noshower']:
3704 tests.append('check_poles')
3705 else:
3706 os.unsetenv('madloop')
3707
3708
3709 self.update_status('Compiling directories...', level=None)
3710
3711 for test in tests:
3712 self.write_test_input(test)
3713
3714 try:
3715 import multiprocessing
3716 if not self.nb_core:
3717 try:
3718 self.nb_core = int(self.options['nb_core'])
3719 except TypeError:
3720 self.nb_core = multiprocessing.cpu_count()
3721 except ImportError:
3722 self.nb_core = 1
3723
3724 compile_options = copy.copy(self.options)
3725 compile_options['nb_core'] = self.nb_core
3726 compile_cluster = cluster.MultiCore(**compile_options)
3727 logger.info('Compiling on %d cores' % self.nb_core)
3728
3729 update_status = lambda i, r, f: self.donothing(i,r,f)
3730 for p_dir in p_dirs:
3731 compile_cluster.submit(prog = compile_dir,
3732 argument = [self.me_dir, p_dir, mode, options,
3733 tests, exe, self.options['run_mode']])
3734 try:
3735 compile_cluster.wait(self.me_dir, update_status)
3736
3737 except:
3738 compile_cluster.remove()
3739 self.quit()
3740
3741 logger.info('Checking test output:')
3742 for p_dir in p_dirs:
3743 logger.info(p_dir)
3744 for test in tests:
3745 logger.info(' Result for %s:' % test)
3746
3747 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
3748
3749 self.check_tests(test, this_dir)
3750
3751
3754
3755
3757 """just call the correct parser for the test log"""
3758 if test in ['test_ME', 'test_MC']:
3759 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
3760 elif test == 'check_poles':
3761 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
3762
3763
3765 """read and parse the test_ME/MC.log file"""
3766 content = open(log).read()
3767 if 'FAILED' in content:
3768 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK')
3769 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
3770 'Please check that widths of final state particles (e.g. top) have been' + \
3771 ' set to 0 in the param_card.dat.')
3772 else:
3773 lines = [l for l in content.split('\n') if 'PASSED' in l]
3774 logger.info(' Passed.')
3775 logger.debug('\n'+'\n'.join(lines))
3776
3777
3779 """reads and parse the check_poles.log file"""
3780 content = open(log).read()
3781 npass = 0
3782 nfail = 0
3783 for line in content.split('\n'):
3784 if 'PASSED' in line:
3785 npass +=1
3786 tolerance = float(line.split()[1])
3787 if 'FAILED' in line:
3788 nfail +=1
3789 tolerance = float(line.split()[1])
3790
3791 if nfail + npass == 0:
3792 logger.warning('0 points have been tried')
3793 return
3794
3795 if float(nfail)/float(nfail+npass) > 0.1:
3796 raise aMCatNLOError('Poles do not cancel, run cannot continue')
3797 else:
3798 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
3799 %(npass, nfail+npass, tolerance))
3800
3801
3822
3823
3824
3825
3827 """ return the model name """
3828 if hasattr(self, 'model_name'):
3829 return self.model_name
3830
3831 model = 'sm'
3832 proc = []
3833 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')):
3834 line = line.split('#')[0]
3835
3836 if line.startswith('import') and 'model' in line:
3837 model = line.split()[2]
3838 proc = []
3839 elif line.startswith('generate'):
3840 proc.append(line.split(None,1)[1])
3841 elif line.startswith('add process'):
3842 proc.append(line.split(None,2)[2])
3843
3844 self.model = model
3845 self.process = proc
3846 return model
3847
3848
3849
3850
3852 """Ask the question when launching generate_events/multi_run"""
3853
3854 if 'parton' not in options:
3855 options['parton'] = False
3856 if 'reweightonly' not in options:
3857 options['reweightonly'] = False
3858
3859
3860 void = 'NOT INSTALLED'
3861 switch_order = ['order', 'fixed_order', 'shower','madspin']
3862 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
3863 'madspin': void}
3864 if not switch:
3865 switch = switch_default
3866 else:
3867 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
3868
3869 default_switch = ['ON', 'OFF']
3870 allowed_switch_value = {'order': ['LO', 'NLO'],
3871 'fixed_order': default_switch,
3872 'shower': default_switch,
3873 'madspin': default_switch}
3874
3875 description = {'order': 'Perturbative order of the calculation:',
3876 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
3877 'shower': 'Shower the generated events:',
3878 'madspin': 'Decay particles with the MadSpin module:' }
3879
3880 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'},
3881 ('madspin', 'ON'): {'fixed_order':'OFF'},
3882 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF'}
3883 }
3884 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
3885
3886 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
3887
3888
3889 if mode == 'auto':
3890 mode = None
3891 if not mode and (options['parton'] or options['reweightonly']):
3892 mode = 'noshower'
3893
3894
3895 available_mode = ['0', '1', '2']
3896 available_mode.append('3')
3897 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
3898 switch['shower'] = 'ON'
3899 else:
3900 switch['shower'] = 'OFF'
3901
3902 if not aMCatNLO or self.options['mg5_path']:
3903 available_mode.append('4')
3904 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
3905 switch['madspin'] = 'ON'
3906 else:
3907 switch['madspin'] = 'OFF'
3908
3909 answers = list(available_mode) + ['auto', 'done']
3910 alias = {}
3911 for id, key in enumerate(switch_order):
3912 if switch[key] != void:
3913 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
3914
3915 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
3916 for s in allowed_switch_value[key]))
3917 answers += special_values
3918
3919 def create_question(switch):
3920 switch_format = " %i %-60s %12s=%s\n"
3921 question = "The following switches determine which operations are executed:\n"
3922 for id, key in enumerate(switch_order):
3923 question += switch_format % (id+1, description[key], key, switch[key])
3924 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1)
3925 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n'
3926 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n'
3927 return question
3928
3929
3930 def modify_switch(mode, answer, switch):
3931 if '=' in answer:
3932 key, status = answer.split('=')
3933 switch[key] = status
3934 if (key, status) in force_switch:
3935 for key2, status2 in force_switch[(key, status)].items():
3936 if switch[key2] not in [status2, void]:
3937 logger.info('For coherence \'%s\' is set to \'%s\''
3938 % (key2, status2), '$MG:color:BLACK')
3939 switch[key2] = status2
3940 elif answer in ['0', 'auto', 'done']:
3941 return
3942 elif answer in special_values:
3943 logger.info('Enter mode value: Go to the related mode', '$MG:color:BLACK')
3944 if answer == 'LO':
3945 switch['order'] = 'LO'
3946 switch['fixed_order'] = 'ON'
3947 assign_switch('shower', 'OFF')
3948 assign_switch('madspin', 'OFF')
3949 elif answer == 'NLO':
3950 switch['order'] = 'NLO'
3951 switch['fixed_order'] = 'ON'
3952 assign_switch('shower', 'OFF')
3953 assign_switch('madspin', 'OFF')
3954 elif answer == 'aMC@NLO':
3955 switch['order'] = 'NLO'
3956 switch['fixed_order'] = 'OFF'
3957 assign_switch('shower', 'ON')
3958 assign_switch('madspin', 'OFF')
3959 elif answer == 'aMC@LO':
3960 switch['order'] = 'LO'
3961 switch['fixed_order'] = 'OFF'
3962 assign_switch('shower', 'ON')
3963 assign_switch('madspin', 'OFF')
3964 elif answer == 'noshower':
3965 switch['order'] = 'NLO'
3966 switch['fixed_order'] = 'OFF'
3967 assign_switch('shower', 'OFF')
3968 assign_switch('madspin', 'OFF')
3969 elif answer == 'noshowerLO':
3970 switch['order'] = 'LO'
3971 switch['fixed_order'] = 'OFF'
3972 assign_switch('shower', 'OFF')
3973 assign_switch('madspin', 'OFF')
3974 if mode:
3975 return
3976 return switch
3977
3978
3979 modify_switch(mode, self.last_mode, switch)
3980 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
3981 assign_switch('madspin', 'ON')
3982
3983 if not self.force:
3984 answer = ''
3985 while answer not in ['0', 'done', 'auto', 'onlyshower']:
3986 question = create_question(switch)
3987 if mode:
3988 answer = mode
3989 else:
3990 answer = self.ask(question, '0', answers, alias=alias)
3991 if answer.isdigit() and answer != '0':
3992 key = switch_order[int(answer) - 1]
3993 opt1 = allowed_switch_value[key][0]
3994 opt2 = allowed_switch_value[key][1]
3995 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2)
3996
3997 if not modify_switch(mode, answer, switch):
3998 break
3999
4000
4001 if not mode or mode == 'auto':
4002 if switch['order'] == 'LO':
4003 if switch['shower'] == 'ON':
4004 mode = 'aMC@LO'
4005 elif switch['fixed_order'] == 'ON':
4006 mode = 'LO'
4007 else:
4008 mode = 'noshowerLO'
4009 elif switch['order'] == 'NLO':
4010 if switch['shower'] == 'ON':
4011 mode = 'aMC@NLO'
4012 elif switch['fixed_order'] == 'ON':
4013 mode = 'NLO'
4014 else:
4015 mode = 'noshower'
4016 logger.info('will run in mode: %s' % mode)
4017
4018 if mode == 'noshower':
4019 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
4020 Please, shower the Les Houches events before using them for physics analyses.""")
4021
4022
4023
4024 cards = ['param_card.dat', 'run_card.dat']
4025 ignore = []
4026 if mode in ['LO', 'NLO']:
4027 options['parton'] = True
4028 ignore = ['shower_card.dat', 'madspin_card.dat']
4029 cards.append('FO_analyse_card.dat')
4030 elif switch['madspin'] == 'ON':
4031 cards.append('madspin_card.dat')
4032 if 'aMC@' in mode:
4033 cards.append('shower_card.dat')
4034 if mode == 'onlyshower':
4035 cards = ['shower_card.dat']
4036 if options['reweightonly']:
4037 cards = ['run_card.dat']
4038
4039 self.keep_cards(cards, ignore)
4040
4041 if mode =='onlyshower':
4042 cards = ['shower_card.dat']
4043
4044 if not options['force'] and not self.force:
4045 self.ask_edit_cards(cards, plot=False)
4046
4047 self.banner = banner_mod.Banner()
4048
4049
4050 for card in cards:
4051 self.banner.add(pjoin(self.me_dir, 'Cards', card))
4052
4053 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
4054 self.banner.add_text('run_settings', run_settings)
4055
4056 if not mode =='onlyshower':
4057 self.run_card = self.banner.charge_card('run_card')
4058 self.run_tag = self.run_card['run_tag']
4059
4060 if not hasattr(self, 'run_name') or not self.run_name:
4061 self.run_name = self.find_available_run_name(self.me_dir)
4062
4063 if self.run_name.startswith('run_'):
4064 if mode in ['LO','aMC@LO','noshowerLO']:
4065 self.run_name += '_LO'
4066 self.set_run_name(self.run_name, self.run_tag, 'parton')
4067 if int(self.run_card['ickkw']) == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4068 logger.error("""FxFx merging (ickkw=3) not allowed at LO""")
4069 raise self.InvalidCmd(error)
4070 elif int(self.run_card['ickkw']) == 3 and mode in ['aMC@NLO', 'noshower']:
4071 logger.warning("""You are running with FxFx merging enabled. To be able to merge
4072 samples of various multiplicities without double counting, you
4073 have to remove some events after showering 'by hand'. Please
4074 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
4075 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
4076 logger.error("""FxFx merging does not work with Q-squared ordered showers.""")
4077 raise self.InvalidCmd(error)
4078 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8':
4079 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
4080 "Type \'n\' to stop or \'y\' to continue"
4081 answers = ['n','y']
4082 answer = self.ask(question, 'n', answers, alias=alias)
4083 if answer == 'n':
4084 error = '''Stop opertation'''
4085 self.ask_run_configuration(mode, options)
4086
4087 if 'aMC@' in mode or mode == 'onlyshower':
4088 self.shower_card = self.banner.charge_card('shower_card')
4089
4090 elif mode in ['LO', 'NLO']:
4091 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
4092 self.analyse_card = self.banner.charge_card('FO_analyse_card')
4093
4094
4095 return mode
4096
4097
4098
4099
4100
4102 """The command line processor of MadGraph"""
4103
4104 _compile_usage = "compile [MODE] [options]\n" + \
4105 "-- compiles aMC@NLO \n" + \
4106 " MODE can be either FO, for fixed-order computations, \n" + \
4107 " or MC for matching with parton-shower monte-carlos. \n" + \
4108 " (if omitted, it is set to MC)\n"
4109 _compile_parser = misc.OptionParser(usage=_compile_usage)
4110 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
4111 help="Use the card present in the directory for the launch, without editing them")
4112
4113 _launch_usage = "launch [MODE] [options]\n" + \
4114 "-- execute aMC@NLO \n" + \
4115 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4116 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4117 " computation of the total cross-section and the filling of parton-level histograms \n" + \
4118 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4119 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4120 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4121 " in the run_card.dat\n"
4122
4123 _launch_parser = misc.OptionParser(usage=_launch_usage)
4124 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
4125 help="Use the card present in the directory for the launch, without editing them")
4126 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
4127 help="Submit the jobs on the cluster")
4128 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
4129 help="Submit the jobs on multicore mode")
4130 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4131 help="Skip compilation. Ignored if no executable is found")
4132 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4133 help="Skip integration and event generation, just run reweight on the" + \
4134 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4135 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
4136 help="Stop the run after the parton level file generation (you need " + \
4137 "to shower the file in order to get physical results)")
4138 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4139 help="Skip grid set up, just generate events starting from " + \
4140 "the last available results")
4141 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
4142 help="Provide a name to the run")
4143 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4144 help="For use with APPLgrid only: start from existing grids")
4145
4146
4147 _generate_events_usage = "generate_events [MODE] [options]\n" + \
4148 "-- execute aMC@NLO \n" + \
4149 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4150 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4151 " computation of the total cross-section and the filling of parton-level histograms \n" + \
4152 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4153 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4154 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4155 " in the run_card.dat\n"
4156
4157 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
4158 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
4159 help="Use the card present in the directory for the generate_events, without editing them")
4160 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
4161 help="Submit the jobs on the cluster")
4162 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
4163 help="Submit the jobs on multicore mode")
4164 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4165 help="Skip compilation. Ignored if no executable is found")
4166 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4167 help="Skip integration and event generation, just run reweight on the" + \
4168 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4169 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
4170 help="Stop the run after the parton level file generation (you need " + \
4171 "to shower the file in order to get physical results)")
4172 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4173 help="Skip grid set up, just generate events starting from " + \
4174 "the last available results")
4175 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
4176 help="Provide a name to the run")
4177
4178
4179
4180 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
4181 "-- calculate cross-section up to ORDER.\n" + \
4182 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
4183
4184 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
4185 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
4186 help="Use the card present in the directory for the launch, without editing them")
4187 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
4188 help="Submit the jobs on the cluster")
4189 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
4190 help="Submit the jobs on multicore mode")
4191 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4192 help="Skip compilation. Ignored if no executable is found")
4193 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
4194 help="Provide a name to the run")
4195 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4196 help="For use with APPLgrid only: start from existing grids")
4197 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4198 help="Skip grid set up, just generate events starting from " + \
4199 "the last available results")
4200
4201 _shower_usage = 'shower run_name [options]\n' + \
4202 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
4203 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
4204 ' are directly read from the header of the event file\n'
4205 _shower_parser = misc.OptionParser(usage=_shower_usage)
4206 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
4207 help="Use the shower_card present in the directory for the launch, without editing")
4208