1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39 import traceback
40 import StringIO
41
42 try:
43 import readline
44 GNU_SPLITTING = ('GNU' in readline.__doc__)
45 except:
46 GNU_SPLITTING = True
47
48 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
49 root_path = os.path.split(root_path)[0]
50 sys.path.insert(0, os.path.join(root_path,'bin'))
51
52
53 pjoin = os.path.join
54
55 logger = logging.getLogger('madgraph.stdout')
56 logger_stderr = logging.getLogger('madgraph.stderr')
57
58 try:
59 import madgraph
60 except ImportError:
61 aMCatNLO = True
62 import internal.extended_cmd as cmd
63 import internal.common_run_interface as common_run
64 import internal.banner as banner_mod
65 import internal.misc as misc
66 from internal import InvalidCmd, MadGraph5Error
67 import internal.files as files
68 import internal.cluster as cluster
69 import internal.save_load_object as save_load_object
70 import internal.gen_crossxhtml as gen_crossxhtml
71 import internal.sum_html as sum_html
72 import internal.shower_card as shower_card
73 import internal.FO_analyse_card as analyse_card
74 import internal.histograms as histograms
75 else:
76
77 aMCatNLO = False
78 import madgraph.interface.extended_cmd as cmd
79 import madgraph.interface.common_run_interface as common_run
80 import madgraph.iolibs.files as files
81 import madgraph.iolibs.save_load_object as save_load_object
82 import madgraph.madevent.gen_crossxhtml as gen_crossxhtml
83 import madgraph.madevent.sum_html as sum_html
84 import madgraph.various.banner as banner_mod
85 import madgraph.various.cluster as cluster
86 import madgraph.various.misc as misc
87 import madgraph.various.shower_card as shower_card
88 import madgraph.various.FO_analyse_card as analyse_card
89 import madgraph.various.histograms as histograms
90 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error
91
94
95
97 """compile the direcory p_dir
98 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
99 this function needs not to be a class method in order to do
100 the compilation on multicore"""
101
102 if len(arguments) == 1:
103 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments[0]
104 elif len(arguments)==7:
105 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
106 else:
107 raise aMCatNLOError, 'not correct number of argument'
108 logger.info(' Compiling %s...' % p_dir)
109
110 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
111
112 try:
113
114
115 for test in tests:
116
117 if test == 'check_poles' and os.path.exists(pjoin(this_dir, 'parton_lum_0.f')):
118 continue
119 misc.compile([test], cwd = this_dir, job_specs = False)
120 input = pjoin(me_dir, '%s_input.txt' % test)
121
122 misc.call(['./%s' % (test)], cwd=this_dir,
123 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'))
124 if test == 'check_poles' and os.path.exists(pjoin(this_dir,'MadLoop5_resources')) :
125 tf=tarfile.open(pjoin(this_dir,'MadLoop5_resources.tar.gz'),'w:gz',
126 dereference=True)
127 tf.add(pjoin(this_dir,'MadLoop5_resources'),arcname='MadLoop5_resources')
128 tf.close()
129
130 if not options['reweightonly']:
131 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
132 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode)
133 misc.call(['./gensym'],cwd= this_dir,
134 stdin=open(pjoin(this_dir, 'gensym_input.txt')),
135 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'))
136
137 misc.compile([exe], cwd=this_dir, job_specs = False)
138 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
139 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
140
141 logger.info(' %s done.' % p_dir)
142 return 0
143 except MadGraph5Error, msg:
144 return msg
145
146
148 """check that the current fortran compiler is gfortran 4.6 or later.
149 If block, stops the execution, otherwise just print a warning"""
150
151 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
152 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
153 'Note that You can still run all MadEvent run without any problem!'
154
155 if options['fortran_compiler']:
156 compiler = options['fortran_compiler']
157 elif misc.which('gfortran'):
158 compiler = 'gfortran'
159 else:
160 compiler = ''
161
162 if 'gfortran' not in compiler:
163 if block:
164 raise aMCatNLOError(msg % compiler)
165 else:
166 logger.warning(msg % compiler)
167 else:
168 curr_version = misc.get_gfortran_version(compiler)
169 if not ''.join(curr_version.split('.')) >= '46':
170 if block:
171 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
172 else:
173 logger.warning(msg % (compiler + ' ' + curr_version))
174
175
176
177
178
179
181 """Particularisation of the cmd command for aMCatNLO"""
182
183
184 next_possibility = {
185 'start': [],
186 }
187
188 debug_output = 'ME5_debug'
189 error_debug = 'Please report this bug on https://bugs.launchpad.net/mg5amcnlo\n'
190 error_debug += 'More information is found in \'%(debug)s\'.\n'
191 error_debug += 'Please attach this file to your report.'
192
193 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/mg5amcnlo\n'
194
195
196 keyboard_stop_msg = """stopping all operation
197 in order to quit MadGraph5_aMC@NLO please enter exit"""
198
199
200 InvalidCmd = InvalidCmd
201 ConfigurationError = aMCatNLOError
202
203 - def __init__(self, me_dir, options, *arg, **opt):
204 """Init history and line continuation"""
205
206
207 self.force = False
208
209
210
211 info = misc.get_pkg_info()
212 info_line = ""
213 if info and info.has_key('version') and info.has_key('date'):
214 len_version = len(info['version'])
215 len_date = len(info['date'])
216 if len_version + len_date < 30:
217 info_line = "#* VERSION %s %s %s *\n" % \
218 (info['version'],
219 (30 - len_version - len_date) * ' ',
220 info['date'])
221 else:
222 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
223 info_line = "#* VERSION %s %s *\n" % \
224 (version, (24 - len(version)) * ' ')
225
226
227
228 self.history_header = \
229 '#************************************************************\n' + \
230 '#* MadGraph5_aMC@NLO *\n' + \
231 '#* *\n' + \
232 "#* * * *\n" + \
233 "#* * * * * *\n" + \
234 "#* * * * * 5 * * * * *\n" + \
235 "#* * * * * *\n" + \
236 "#* * * *\n" + \
237 "#* *\n" + \
238 "#* *\n" + \
239 info_line + \
240 "#* *\n" + \
241 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
242 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
243 "#* and *\n" + \
244 "#* http://amcatnlo.cern.ch *\n" + \
245 '#* *\n' + \
246 '#************************************************************\n' + \
247 '#* *\n' + \
248 '#* Command File for aMCatNLO *\n' + \
249 '#* *\n' + \
250 '#* run as ./bin/aMCatNLO.py filename *\n' + \
251 '#* *\n' + \
252 '#************************************************************\n'
253
254 if info_line:
255 info_line = info_line[1:]
256
257 logger.info(\
258 "************************************************************\n" + \
259 "* *\n" + \
260 "* W E L C O M E to M A D G R A P H 5 *\n" + \
261 "* a M C @ N L O *\n" + \
262 "* *\n" + \
263 "* * * *\n" + \
264 "* * * * * *\n" + \
265 "* * * * * 5 * * * * *\n" + \
266 "* * * * * *\n" + \
267 "* * * *\n" + \
268 "* *\n" + \
269 info_line + \
270 "* *\n" + \
271 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
272 "* http://amcatnlo.cern.ch *\n" + \
273 "* *\n" + \
274 "* Type 'help' for in-line help. *\n" + \
275 "* *\n" + \
276 "************************************************************")
277 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
278
279
281 """return the history header"""
282 return self.history_header % misc.get_time_info()
283
285 """action to perform to close nicely on a keyboard interupt"""
286 try:
287 if hasattr(self, 'cluster'):
288 logger.info('rm jobs on queue')
289 self.cluster.remove()
290 if hasattr(self, 'results'):
291 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
292 self.add_error_log_in_html(KeyboardInterrupt)
293 except:
294 pass
295
296 - def postcmd(self, stop, line):
297 """ Update the status of the run for finishing interactive command """
298
299
300 self.force = False
301
302 if not self.use_rawinput:
303 return stop
304
305
306 arg = line.split()
307 if len(arg) == 0:
308 return stop
309 elif str(arg[0]) in ['exit','quit','EOF']:
310 return stop
311
312 try:
313 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
314 level=None, error=True)
315 except Exception:
316 misc.sprint('self.update_status fails', log=logger)
317 pass
318
324
330
336
337
338
339
340
341
343 """ The Series of help routine for the aMCatNLOCmd"""
344
348
350 logger.info("syntax: banner_run Path|RUN [--run_options]")
351 logger.info("-- Reproduce a run following a given banner")
352 logger.info(" One of the following argument is require:")
353 logger.info(" Path should be the path of a valid banner.")
354 logger.info(" RUN should be the name of a run of the current directory")
355 self.run_options_help([('-f','answer all question by default'),
356 ('--name=X', 'Define the name associated with the new run')])
357
358
362
367
368
372
376
377
379 logger.info("syntax: open FILE ")
380 logger.info("-- open a file with the appropriate editor.")
381 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
382 logger.info(' the path to the last created/used directory is used')
383
385 if data:
386 logger.info('-- local options:')
387 for name, info in data:
388 logger.info(' %s : %s' % (name, info))
389
390 logger.info("-- session options:")
391 logger.info(" Note that those options will be kept for the current session")
392 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
393 logger.info(" --multicore : Run in multi-core configuration")
394 logger.info(" --nb_core=X : limit the number of core to use to X.")
395
396
397
398
399
400
401
403 """ The Series of check routine for the aMCatNLOCmd"""
404
406 """Check the validity of the line. args[0] is the run_directory"""
407
408 if options['force']:
409 self.force = True
410
411 if len(args) == 0:
412 self.help_shower()
413 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
414 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
415 raise self.InvalidCmd, 'Directory %s does not exists' % \
416 pjoin(os.getcwd(), 'Events', args[0])
417
418 self.set_run_name(args[0], level= 'shower')
419 args[0] = pjoin(self.me_dir, 'Events', args[0])
420
422 """Check the argument for the plot command
423 plot run_name modes"""
424
425
426 madir = self.options['madanalysis_path']
427 td = self.options['td_path']
428
429 if not madir or not td:
430 logger.info('Retry to read configuration file to find madanalysis/td')
431 self.set_configuration()
432
433 madir = self.options['madanalysis_path']
434 td = self.options['td_path']
435
436 if not madir:
437 error_msg = 'No Madanalysis path correctly set.'
438 error_msg += 'Please use the set command to define the path and retry.'
439 error_msg += 'You can also define it in the configuration file.'
440 raise self.InvalidCmd(error_msg)
441 if not td:
442 error_msg = 'No path to td directory correctly set.'
443 error_msg += 'Please use the set command to define the path and retry.'
444 error_msg += 'You can also define it in the configuration file.'
445 raise self.InvalidCmd(error_msg)
446
447 if len(args) == 0:
448 if not hasattr(self, 'run_name') or not self.run_name:
449 self.help_plot()
450 raise self.InvalidCmd('No run name currently define. Please add this information.')
451 args.append('all')
452 return
453
454
455 if args[0] not in self._plot_mode:
456 self.set_run_name(args[0], level='plot')
457 del args[0]
458 if len(args) == 0:
459 args.append('all')
460 elif not self.run_name:
461 self.help_plot()
462 raise self.InvalidCmd('No run name currently define. Please add this information.')
463
464 for arg in args:
465 if arg not in self._plot_mode and arg != self.run_name:
466 self.help_plot()
467 raise self.InvalidCmd('unknown options %s' % arg)
468
470 """Check the argument for pythia command
471 syntax: pgs [NAME]
472 Note that other option are already remove at this point
473 """
474
475
476 if not self.options['pythia-pgs_path']:
477 logger.info('Retry to read configuration file to find pythia-pgs path')
478 self.set_configuration()
479
480 if not self.options['pythia-pgs_path'] or not \
481 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
482 error_msg = 'No pythia-pgs path correctly set.'
483 error_msg += 'Please use the set command to define the path and retry.'
484 error_msg += 'You can also define it in the configuration file.'
485 raise self.InvalidCmd(error_msg)
486
487 tag = [a for a in arg if a.startswith('--tag=')]
488 if tag:
489 arg.remove(tag[0])
490 tag = tag[0][6:]
491
492
493 if len(arg) == 0 and not self.run_name:
494 if self.results.lastrun:
495 arg.insert(0, self.results.lastrun)
496 else:
497 raise self.InvalidCmd('No run name currently define. Please add this information.')
498
499 if len(arg) == 1 and self.run_name == arg[0]:
500 arg.pop(0)
501
502 if not len(arg) and \
503 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
504 self.help_pgs()
505 raise self.InvalidCmd('''No file file pythia_events.hep currently available
506 Please specify a valid run_name''')
507
508 lock = None
509 if len(arg) == 1:
510 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
511 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
512
513 if not filenames:
514 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
515 else:
516 input_file = filenames[0]
517 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
518 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
519 argument=['-c', input_file])
520 else:
521 if tag:
522 self.run_card['run_tag'] = tag
523 self.set_run_name(self.run_name, tag, 'pgs')
524
525 return lock
526
527
529 """Check the argument for pythia command
530 syntax: delphes [NAME]
531 Note that other option are already remove at this point
532 """
533
534
535 if not self.options['delphes_path']:
536 logger.info('Retry to read configuration file to find delphes path')
537 self.set_configuration()
538
539 if not self.options['delphes_path']:
540 error_msg = 'No delphes path correctly set.'
541 error_msg += 'Please use the set command to define the path and retry.'
542 error_msg += 'You can also define it in the configuration file.'
543 raise self.InvalidCmd(error_msg)
544
545 tag = [a for a in arg if a.startswith('--tag=')]
546 if tag:
547 arg.remove(tag[0])
548 tag = tag[0][6:]
549
550
551 if len(arg) == 0 and not self.run_name:
552 if self.results.lastrun:
553 arg.insert(0, self.results.lastrun)
554 else:
555 raise self.InvalidCmd('No run name currently define. Please add this information.')
556
557 if len(arg) == 1 and self.run_name == arg[0]:
558 arg.pop(0)
559
560 if not len(arg) and \
561 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
562 self.help_pgs()
563 raise self.InvalidCmd('''No file file pythia_events.hep currently available
564 Please specify a valid run_name''')
565
566 if len(arg) == 1:
567 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
568 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events'))
569
570
571 if not filenames:
572 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
573 % (self.run_name, prev_tag,
574 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
575 else:
576 input_file = filenames[0]
577 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
578 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
579 argument=['-c', input_file])
580 else:
581 if tag:
582 self.run_card['run_tag'] = tag
583 self.set_run_name(self.run_name, tag, 'delphes')
584
586 """check the validity of the line. args is ORDER,
587 ORDER being LO or NLO. If no mode is passed, NLO is used"""
588
589
590
591 if options['force']:
592 self.force = True
593
594 if not args:
595 args.append('NLO')
596 return
597
598 if len(args) > 1:
599 self.help_calculate_xsect()
600 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
601
602 elif len(args) == 1:
603 if not args[0] in ['NLO', 'LO']:
604 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
605 mode = args[0]
606
607
608 if options['multicore'] and options['cluster']:
609 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
610 ' are not compatible. Please choose one.'
611
612
614 """check the validity of the line. args is ORDER,
615 ORDER being LO or NLO. If no mode is passed, NLO is used"""
616
617
618
619 if not args:
620 args.append('NLO')
621 return
622
623 if len(args) > 1:
624 self.help_generate_events()
625 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
626
627 elif len(args) == 1:
628 if not args[0] in ['NLO', 'LO']:
629 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
630 mode = args[0]
631
632
633 if options['multicore'] and options['cluster']:
634 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
635 ' are not compatible. Please choose one.'
636
638 """check the validity of line"""
639
640 if len(args) == 0:
641 self.help_banner_run()
642 raise self.InvalidCmd('banner_run requires at least one argument.')
643
644 tag = [a[6:] for a in args if a.startswith('--tag=')]
645
646
647 if os.path.exists(args[0]):
648 type ='banner'
649 format = self.detect_card_type(args[0])
650 if format != 'banner':
651 raise self.InvalidCmd('The file is not a valid banner.')
652 elif tag:
653 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
654 (args[0], tag))
655 if not os.path.exists(args[0]):
656 raise self.InvalidCmd('No banner associates to this name and tag.')
657 else:
658 name = args[0]
659 type = 'run'
660 banners = misc.glob('*_banner.txt', pjoin(self.me_dir,'Events', args[0]))
661 if not banners:
662 raise self.InvalidCmd('No banner associates to this name.')
663 elif len(banners) == 1:
664 args[0] = banners[0]
665 else:
666
667 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
668 tag = self.ask('which tag do you want to use?', tags[0], tags)
669 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
670 (args[0], tag))
671
672 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
673 if run_name:
674 try:
675 self.exec_cmd('remove %s all banner -f' % run_name)
676 except Exception:
677 pass
678 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
679 elif type == 'banner':
680 self.set_run_name(self.find_available_run_name(self.me_dir))
681 elif type == 'run':
682 if not self.results[name].is_empty():
683 run_name = self.find_available_run_name(self.me_dir)
684 logger.info('Run %s is not empty so will use run_name: %s' % \
685 (name, run_name))
686 self.set_run_name(run_name)
687 else:
688 try:
689 self.exec_cmd('remove %s all banner -f' % run_name)
690 except Exception:
691 pass
692 self.set_run_name(name)
693
694
695
697 """check the validity of the line. args is MODE
698 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
699
700
701
702 if options['force']:
703 self.force = True
704
705
706 if not args:
707 args.append('auto')
708 return
709
710 if len(args) > 1:
711 self.help_launch()
712 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
713
714 elif len(args) == 1:
715 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
716 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
717 mode = args[0]
718
719
720 if options['multicore'] and options['cluster']:
721 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
722 ' are not compatible. Please choose one.'
723 if mode == 'NLO' and options['reweightonly']:
724 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
725
726
728 """check the validity of the line. args is MODE
729 MODE being FO or MC. If no mode is passed, MC is used"""
730
731
732
733 if options['force']:
734 self.force = True
735
736 if not args:
737 args.append('MC')
738 return
739
740 if len(args) > 1:
741 self.help_compile()
742 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
743
744 elif len(args) == 1:
745 if not args[0] in ['MC', 'FO']:
746 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
747 mode = args[0]
748
749
750
751
752
753
754
756 """ The Series of help routine for the MadGraphCmd"""
757
759 """auto-completion for launch command"""
760
761 args = self.split_arg(line[0:begidx])
762 if len(args) == 1:
763
764 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
765 elif len(args) == 2 and line[begidx-1] == '@':
766 return self.list_completion(text,['LO','NLO'],line)
767 else:
768 opts = []
769 for opt in _launch_parser.option_list:
770 opts += opt._long_opts + opt._short_opts
771 return self.list_completion(text, opts, line)
772
774 "Complete the banner run command"
775 try:
776
777
778 args = self.split_arg(line[0:begidx], error=False)
779
780 if args[-1].endswith(os.path.sep):
781 return self.path_completion(text,
782 os.path.join('.',*[a for a in args \
783 if a.endswith(os.path.sep)]))
784
785
786 if len(args) > 1:
787
788 tags = misc.glob('%s_*_banner.txt' % args[1],pjoin(self.me_dir, 'Events' , args[1]))
789 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
790
791 if args[-1] != '--tag=':
792 tags = ['--tag=%s' % t for t in tags]
793 else:
794 return self.list_completion(text, tags)
795 return self.list_completion(text, tags +['--name=','-f'], line)
796
797
798 possibilites = {}
799
800 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
801 if a.endswith(os.path.sep)]))
802 if os.path.sep in line:
803 return comp
804 else:
805 possibilites['Path from ./'] = comp
806
807 run_list = misc.glob(pjoin('*','*_banner.txt'), pjoin(self.me_dir, 'Events'))
808 run_list = [n.rsplit('/',2)[1] for n in run_list]
809 possibilites['RUN Name'] = self.list_completion(text, run_list)
810
811 return self.deal_multiple_categories(possibilites)
812
813
814 except Exception, error:
815 print error
816
817
830
843
845 """auto-completion for generate_events command
846 call the compeltion for launch"""
847 self.complete_launch(text, line, begidx, endidx)
848
849
859
875
877 "Complete the pgs command"
878 args = self.split_arg(line[0:begidx], error=False)
879 if len(args) == 1:
880
881 data = misc.glob(pjoin('*', 'events_*.hep.gz'),
882 pjoin(self.me_dir, 'Events'))
883 data = [n.rsplit('/',2)[1] for n in data]
884 tmp1 = self.list_completion(text, data)
885 if not self.run_name:
886 return tmp1
887 else:
888 tmp2 = self.list_completion(text, self._run_options + ['-f',
889 '--tag=' ,'--no_default'], line)
890 return tmp1 + tmp2
891 else:
892 return self.list_completion(text, self._run_options + ['-f',
893 '--tag=','--no_default'], line)
894
895 complete_delphes = complete_pgs
896
899
900
901
902
903 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
904 """The command line processor of MadGraph"""
905
906
907 true = ['T','.true.',True,'true']
908
909 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
910 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
911 _calculate_decay_options = ['-f', '--accuracy=0.']
912 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
913 _plot_mode = ['all', 'parton','shower','pgs','delphes']
914 _clean_mode = _plot_mode + ['channel', 'banner']
915 _display_opts = ['run_name', 'options', 'variable']
916
917
918 web = False
919 cluster_mode = 0
920 queue = 'madgraph'
921 nb_core = None
922 make_opts_var = {}
923
924 next_possibility = {
925 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
926 'help generate_events'],
927 'generate_events': ['generate_events [OPTIONS]', 'shower'],
928 'launch': ['launch [OPTIONS]', 'shower'],
929 'shower' : ['generate_events [OPTIONS]']
930 }
931
932
933
934 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
935 """ add information to the cmd """
936
937 self.start_time = 0
938 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
939
940
941 self.mode = 'aMCatNLO'
942 self.nb_core = 0
943 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
944
945
946 self.load_results_db()
947 self.results.def_web_mode(self.web)
948
949 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
950
951 if not '[real=QCD]' in proc_card:
952 check_compiler(self.options, block=True)
953
954
955
957 """ run the shower on a given parton level file """
958 argss = self.split_arg(line)
959 (options, argss) = _launch_parser.parse_args(argss)
960
961 options = options.__dict__
962 options['reweightonly'] = False
963 self.check_shower(argss, options)
964 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
965 self.ask_run_configuration('onlyshower', options)
966 self.run_mcatnlo(evt_file, options)
967
968 self.update_status('', level='all', update_results=True)
969
970
972 """Create the plot for a given run"""
973
974
975 args = self.split_arg(line)
976
977 self.check_plot(args)
978 logger.info('plot for run %s' % self.run_name)
979
980 if not self.force:
981 self.ask_edit_cards([], args, plot=True)
982
983 if any([arg in ['parton'] for arg in args]):
984 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
985 if os.path.exists(filename+'.gz'):
986 misc.gunzip(filename)
987 if os.path.exists(filename):
988 logger.info('Found events.lhe file for run %s' % self.run_name)
989 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
990 self.create_plot('parton')
991 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
992 misc.gzip(filename)
993
994 if any([arg in ['all','parton'] for arg in args]):
995 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
996 if os.path.exists(filename):
997 logger.info('Found MADatNLO.top file for run %s' % \
998 self.run_name)
999 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
1000 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
1001
1002 if not os.path.isdir(plot_dir):
1003 os.makedirs(plot_dir)
1004 top_file = pjoin(plot_dir, 'plots.top')
1005 files.cp(filename, top_file)
1006 madir = self.options['madanalysis_path']
1007 tag = self.run_card['run_tag']
1008 td = self.options['td_path']
1009 misc.call(['%s/plot' % self.dirbin, madir, td],
1010 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1011 stderr = subprocess.STDOUT,
1012 cwd=plot_dir)
1013
1014 misc.call(['%s/plot_page-pl' % self.dirbin,
1015 os.path.basename(plot_dir),
1016 'parton'],
1017 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1018 stderr = subprocess.STDOUT,
1019 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1020 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1021 output)
1022
1023 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1024
1025 if any([arg in ['all','shower'] for arg in args]):
1026 filenames = misc.glob('events_*.lhe.gz', pjoin(self.me_dir, 'Events', self.run_name))
1027 if len(filenames) != 1:
1028 filenames = misc.glob('events_*.hep.gz', pjoin(self.me_dir, 'Events', self.run_name))
1029 if len(filenames) != 1:
1030 logger.info('No shower level file found for run %s' % \
1031 self.run_name)
1032 return
1033 filename = filenames[0]
1034 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1035
1036 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1037 if aMCatNLO and not self.options['mg5_path']:
1038 raise "plotting NLO HEP file needs MG5 utilities"
1039
1040 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1041 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1042 self.run_hep2lhe()
1043 else:
1044 filename = filenames[0]
1045 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1046
1047 self.create_plot('shower')
1048 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1049 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1050 lhe_file_name)
1051 misc.gzip(lhe_file_name)
1052
1053 if any([arg in ['all','pgs'] for arg in args]):
1054 filename = pjoin(self.me_dir, 'Events', self.run_name,
1055 '%s_pgs_events.lhco' % self.run_tag)
1056 if os.path.exists(filename+'.gz'):
1057 misc.gunzip(filename)
1058 if os.path.exists(filename):
1059 self.create_plot('PGS')
1060 misc.gzip(filename)
1061 else:
1062 logger.info('No valid files for pgs plot')
1063
1064 if any([arg in ['all','delphes'] for arg in args]):
1065 filename = pjoin(self.me_dir, 'Events', self.run_name,
1066 '%s_delphes_events.lhco' % self.run_tag)
1067 if os.path.exists(filename+'.gz'):
1068 misc.gunzip(filename)
1069 if os.path.exists(filename):
1070
1071 self.create_plot('Delphes')
1072
1073 misc.gzip(filename)
1074 else:
1075 logger.info('No valid files for delphes plot')
1076
1077
1078
1080 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1081 this function wraps the do_launch one"""
1082
1083 self.start_time = time.time()
1084 argss = self.split_arg(line)
1085
1086 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1087 options = options.__dict__
1088 options['reweightonly'] = False
1089 options['parton'] = True
1090 self.check_calculate_xsect(argss, options)
1091 self.do_launch(line, options, argss)
1092
1093
1095 """Make a run from the banner file"""
1096
1097 args = self.split_arg(line)
1098
1099 self.check_banner_run(args)
1100
1101
1102 for name in ['shower_card.dat', 'madspin_card.dat']:
1103 try:
1104 os.remove(pjoin(self.me_dir, 'Cards', name))
1105 except Exception:
1106 pass
1107
1108 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1109
1110
1111 if not self.force:
1112 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1113 if ans == 'n':
1114 self.force = True
1115
1116
1117 if self.force:
1118 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1119 banner = banner_mod.Banner(args[0])
1120 for line in banner['run_settings']:
1121 if '=' in line:
1122 mode, value = [t.strip() for t in line.split('=')]
1123 mode_status[mode] = value
1124 else:
1125 mode_status = {}
1126
1127
1128 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1129 switch=mode_status)
1130
1131
1133 """Main commands: generate events
1134 this function just wraps the do_launch one"""
1135 self.do_launch(line)
1136
1137
1138
1140 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1141
1142 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1143 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1144
1145
1147 """assign all configuration variable from file
1148 loop over the different config file if config_file not define """
1149 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1150
1151
1152 - def do_launch(self, line, options={}, argss=[], switch={}):
1153 """Main commands: launch the full chain
1154 options and args are relevant if the function is called from other
1155 functions, such as generate_events or calculate_xsect
1156 mode gives the list of switch needed for the computation (usefull for banner_run)
1157 """
1158
1159 if not argss and not options:
1160 self.start_time = time.time()
1161 argss = self.split_arg(line)
1162
1163 (options, argss) = _launch_parser.parse_args(argss)
1164 options = options.__dict__
1165 self.check_launch(argss, options)
1166
1167
1168 if 'run_name' in options.keys() and options['run_name']:
1169 self.run_name = options['run_name']
1170
1171
1172 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1173 logger.warning('Removing old run information in \n'+
1174 pjoin(self.me_dir, 'Events', self.run_name))
1175 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1176 self.results.delete_run(self.run_name)
1177 else:
1178 self.run_name = ''
1179
1180 if options['multicore']:
1181 self.cluster_mode = 2
1182 elif options['cluster']:
1183 self.cluster_mode = 1
1184
1185 if not switch:
1186 mode = argss[0]
1187
1188 if mode in ['LO', 'NLO']:
1189 options['parton'] = True
1190 mode = self.ask_run_configuration(mode, options)
1191 else:
1192 mode = self.ask_run_configuration('auto', options, switch)
1193
1194 self.results.add_detail('run_mode', mode)
1195
1196 self.update_status('Starting run', level=None, update_results=True)
1197
1198 if self.options['automatic_html_opening']:
1199 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1200 self.options['automatic_html_opening'] = False
1201
1202 if '+' in mode:
1203 mode = mode.split('+')[0]
1204 self.compile(mode, options)
1205 evt_file = self.run(mode, options)
1206
1207 if self.run_card['nevents'] == 0 and not mode in ['LO', 'NLO']:
1208 logger.info('No event file generated: grids have been set-up with a '\
1209 'relative precision of %s' % self.run_card['req_acc'])
1210 return
1211
1212 if not mode in ['LO', 'NLO']:
1213 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1214 self.exec_cmd('reweight -from_cards', postcmd=False)
1215 self.exec_cmd('decay_events -from_cards', postcmd=False)
1216 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1217
1218 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1219 and not options['parton']:
1220 self.run_mcatnlo(evt_file, options)
1221 elif mode == 'noshower':
1222 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1223 Please, shower the Les Houches events before using them for physics analyses.""")
1224
1225
1226 self.update_status('', level='all', update_results=True)
1227 if self.run_card['ickkw'] == 3 and \
1228 (mode in ['noshower'] or \
1229 (('PYTHIA8' not in self.run_card['parton_shower'].upper()) and (mode in ['aMC@NLO']))):
1230 logger.warning("""You are running with FxFx merging enabled.
1231 To be able to merge samples of various multiplicities without double counting,
1232 you have to remove some events after showering 'by hand'.
1233 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1234
1235 self.store_result()
1236
1237 if self.param_card_iterator:
1238 param_card_iterator = self.param_card_iterator
1239 self.param_card_iterator = []
1240 param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
1241 orig_name = self.run_name
1242
1243 with misc.TMP_variable(self, 'allow_notification_center', False):
1244 for i,card in enumerate(param_card_iterator):
1245 card.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1246 if not options['force']:
1247 options['force'] = True
1248 if options['run_name']:
1249 options['run_name'] = '%s_%s' % (orig_name, i+1)
1250 if not argss:
1251 argss = [mode, "-f"]
1252 elif argss[0] == "auto":
1253 argss[0] = mode
1254 self.do_launch("", options=options, argss=argss, switch=switch)
1255
1256 param_card_iterator.store_entry(self.run_name, self.results.current['cross'])
1257
1258 param_card_iterator.write(pjoin(self.me_dir,'Cards','param_card.dat'))
1259 name = misc.get_scan_name(orig_name, self.run_name)
1260 path = pjoin(self.me_dir, 'Events','scan_%s.txt' % name)
1261 logger.info("write all cross-section results in %s" % path, '$MG:color:BLACK')
1262 param_card_iterator.write_summary(path)
1263
1264 if self.allow_notification_center:
1265 misc.apple_notify('Run %s finished' % os.path.basename(self.me_dir),
1266 '%s: %s +- %s ' % (self.results.current['run_name'],
1267 self.results.current['cross'],
1268 self.results.current['error']))
1269
1270
1271
1273 """Advanced commands: just compile the executables """
1274 argss = self.split_arg(line)
1275
1276 (options, argss) = _compile_parser.parse_args(argss)
1277 options = options.__dict__
1278 options['reweightonly'] = False
1279 options['nocompile'] = False
1280 self.check_compile(argss, options)
1281
1282 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1283 self.ask_run_configuration(mode, options)
1284 self.compile(mode, options)
1285
1286
1287 self.update_status('', level='all', update_results=True)
1288
1289
1291 """Update random number seed with the value from the run_card.
1292 If this is 0, update the number according to a fresh one"""
1293 iseed = self.run_card['iseed']
1294 if iseed == 0:
1295 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1296 iseed = int(randinit.read()[2:]) + 1
1297 randinit.close()
1298 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1299 randinit.write('r=%d' % iseed)
1300 randinit.close()
1301
1302
1303 - def run(self, mode, options):
1304 """runs aMC@NLO. Returns the name of the event file created"""
1305 logger.info('Starting run')
1306
1307 if not 'only_generation' in options.keys():
1308 options['only_generation'] = False
1309
1310
1311 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == 2 and not options['only_generation']:
1312 options['only_generation'] = True
1313 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1314 self.setup_cluster_or_multicore()
1315 self.update_random_seed()
1316
1317 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1318 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1319 folder_names['noshower'] = folder_names['aMC@NLO']
1320 folder_names['noshowerLO'] = folder_names['aMC@LO']
1321 p_dirs = [d for d in \
1322 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1323
1324 self.clean_previous_results(options,p_dirs,folder_names[mode])
1325
1326 mcatnlo_status = ['Setting up grids', 'Computing upper envelope', 'Generating events']
1327
1328
1329 if options['reweightonly']:
1330 event_norm=self.run_card['event_norm']
1331 nevents=self.run_card['nevents']
1332 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1333
1334 devnull = os.open(os.devnull, os.O_RDWR)
1335
1336 if mode in ['LO', 'NLO']:
1337
1338 mode_dict = {'NLO': 'all', 'LO': 'born'}
1339 logger.info('Doing fixed order %s' % mode)
1340 req_acc = self.run_card['req_acc_FO']
1341
1342
1343
1344 if self.run_card['iappl'] == 2:
1345 self.applgrid_distribute(options,mode_dict[mode],p_dirs)
1346
1347
1348
1349 integration_step=-1
1350 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1351 req_acc,mode_dict[mode],integration_step,mode,fixed_order=True)
1352 self.prepare_directories(jobs_to_run,mode)
1353
1354
1355
1356
1357 while True:
1358 integration_step=integration_step+1
1359 self.run_all_jobs(jobs_to_run,integration_step)
1360 self.collect_log_files(jobs_to_run,integration_step)
1361 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1362 jobs_to_collect,integration_step,mode,mode_dict[mode])
1363 if not jobs_to_run:
1364
1365 break
1366
1367 self.finalise_run_FO(folder_names[mode],jobs_to_collect)
1368 self.update_status('Run complete', level='parton', update_results=True)
1369 return
1370
1371 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1372 if self.ninitial == 1:
1373 raise aMCatNLOError('Decay processes can only be run at fixed order.')
1374 mode_dict = {'aMC@NLO': 'all', 'aMC@LO': 'born',\
1375 'noshower': 'all', 'noshowerLO': 'born'}
1376 shower = self.run_card['parton_shower'].upper()
1377 nevents = self.run_card['nevents']
1378 req_acc = self.run_card['req_acc']
1379 if nevents == 0 and req_acc < 0 :
1380 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1381 'of events, because 0 events requested. Please set '\
1382 'the "req_acc" parameter in the run_card to a value '\
1383 'between 0 and 1')
1384 elif req_acc >1 or req_acc == 0 :
1385 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1386 'be between larger than 0 and smaller than 1, '\
1387 'or set to -1 for automatic determination. Current '\
1388 'value is %f' % req_acc)
1389
1390 elif req_acc < 0 and nevents > 1000000 :
1391 req_acc=0.001
1392
1393 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1394
1395 if not shower in shower_list:
1396 raise aMCatNLOError('%s is not a valid parton shower. '\
1397 'Please use one of the following: %s' \
1398 % (shower, ', '.join(shower_list)))
1399
1400
1401 if shower == 'PYTHIA6PT' and self.proc_characteristics['has_fsr']:
1402 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1403
1404 if mode in ['aMC@NLO', 'aMC@LO']:
1405 logger.info('Doing %s matched to parton shower' % mode[4:])
1406 elif mode in ['noshower','noshowerLO']:
1407 logger.info('Generating events without running the shower.')
1408 elif options['only_generation']:
1409 logger.info('Generating events starting from existing results')
1410
1411 jobs_to_run,jobs_to_collect,integration_step = self.create_jobs_to_run(options,p_dirs, \
1412 req_acc,mode_dict[mode],1,mode,fixed_order=False)
1413
1414
1415 if options['only_generation']:
1416 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1417 jobs_to_collect,1,mode,mode_dict[mode],fixed_order=False)
1418 else:
1419 self.prepare_directories(jobs_to_run,mode,fixed_order=False)
1420
1421
1422
1423 for mint_step, status in enumerate(mcatnlo_status):
1424 if options['only_generation'] and mint_step < 2:
1425 continue
1426 self.update_status(status, level='parton')
1427 self.run_all_jobs(jobs_to_run,mint_step,fixed_order=False)
1428 self.collect_log_files(jobs_to_run,mint_step)
1429 if mint_step+1==2 and nevents==0:
1430 self.print_summary(options,2,mode)
1431 return
1432 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run, \
1433 jobs_to_collect,mint_step,mode,mode_dict[mode],fixed_order=False)
1434
1435 self.check_event_files(jobs_to_collect)
1436
1437 if self.cluster_mode == 1:
1438
1439 self.update_status(
1440 'Waiting while files are transferred back from the cluster nodes',
1441 level='parton')
1442 time.sleep(10)
1443
1444 event_norm=self.run_card['event_norm']
1445 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1446
1447 - def create_jobs_to_run(self,options,p_dirs,req_acc,run_mode,\
1448 integration_step,mode,fixed_order=True):
1449 """Creates a list of dictionaries with all the jobs to be run"""
1450 jobs_to_run=[]
1451 if not options['only_generation']:
1452
1453
1454
1455 npoints = self.run_card['npoints_FO_grid']
1456 niters = self.run_card['niters_FO_grid']
1457 for p_dir in p_dirs:
1458 try:
1459 with open(pjoin(self.me_dir,'SubProcesses',p_dir,'channels.txt')) as chan_file:
1460 channels=chan_file.readline().split()
1461 except IOError:
1462 logger.warning('No integration channels found for contribution %s' % p_dir)
1463 continue
1464 for channel in channels:
1465 job={}
1466 job['p_dir']=p_dir
1467 job['channel']=channel
1468 job['split']=0
1469 if fixed_order and req_acc == -1:
1470 job['accuracy']=0
1471 job['niters']=niters
1472 job['npoints']=npoints
1473 elif fixed_order and req_acc > 0:
1474 job['accuracy']=0.10
1475 job['niters']=6
1476 job['npoints']=-1
1477 elif not fixed_order:
1478 job['accuracy']=0.03
1479 job['niters']=12
1480 job['npoints']=-1
1481 else:
1482 raise aMCatNLOError('No consistent "req_acc_FO" set. Use a value '+
1483 'between 0 and 1 or set it equal to -1.')
1484 job['mint_mode']=0
1485 job['run_mode']=run_mode
1486 job['wgt_frac']=1.0
1487 jobs_to_run.append(job)
1488 jobs_to_collect=copy.copy(jobs_to_run)
1489 else:
1490
1491
1492
1493 name_suffix={'born' :'B', 'all':'F'}
1494 for p_dir in p_dirs:
1495 for chan_dir in os.listdir(pjoin(self.me_dir,'SubProcesses',p_dir)):
1496 if ((chan_dir.startswith(run_mode+'_G') and fixed_order) or\
1497 (chan_dir.startswith('G'+name_suffix[run_mode]) and (not fixed_order))) and \
1498 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)) or \
1499 os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir))):
1500 job={}
1501 job['p_dir']=p_dir
1502 if fixed_order:
1503 channel=chan_dir.split('_')[1]
1504 job['channel']=channel[1:]
1505 if len(chan_dir.split('_')) == 3:
1506 split=int(chan_dir.split('_')[2])
1507 else:
1508 split=0
1509 else:
1510 if len(chan_dir.split('_')) == 2:
1511 split=int(chan_dir.split('_')[1])
1512 channel=chan_dir.split('_')[0]
1513 job['channel']=channel[2:]
1514 else:
1515 job['channel']=chan_dir[2:]
1516 split=0
1517 job['split']=split
1518 job['run_mode']=run_mode
1519 job['dirname']=pjoin(self.me_dir, 'SubProcesses', p_dir, chan_dir)
1520 job['wgt_frac']=1.0
1521 if not fixed_order: job['mint_mode']=1
1522 jobs_to_run.append(job)
1523 jobs_to_collect=copy.copy(jobs_to_run)
1524 if fixed_order:
1525 jobs_to_run,jobs_to_collect=self.collect_the_results(options,req_acc,jobs_to_run,
1526 jobs_to_collect,integration_step,mode,run_mode)
1527
1528 integration_step=1
1529 for job in jobs_to_run:
1530 while os.path.exists(pjoin(job['dirname'],'res_%s.dat' % integration_step)):
1531 integration_step=integration_step+1
1532 integration_step=integration_step-1
1533 else:
1534 self.append_the_results(jobs_to_collect,integration_step)
1535 return jobs_to_run,jobs_to_collect,integration_step
1536
1538 """Set-up the G* directories for running"""
1539 name_suffix={'born' :'B' , 'all':'F'}
1540 for job in jobs_to_run:
1541 if job['split'] == 0:
1542 if fixed_order :
1543 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1544 job['run_mode']+'_G'+job['channel'])
1545 else:
1546 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1547 'G'+name_suffix[job['run_mode']]+job['channel'])
1548 else:
1549 if fixed_order :
1550 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1551 job['run_mode']+'_G'+job['channel']+'_'+str(job['split']))
1552 else:
1553 dirname=pjoin(self.me_dir,'SubProcesses',job['p_dir'],
1554 'G'+name_suffix[job['run_mode']]+job['channel']+'_'+str(job['split']))
1555 job['dirname']=dirname
1556 if not os.path.isdir(dirname):
1557 os.makedirs(dirname)
1558 self.write_input_file(job,fixed_order)
1559 if not fixed_order:
1560
1561 if job['split'] != 0:
1562 for f in ['grid.MC_integer','mint_grids','res_1']:
1563 if not os.path.isfile(pjoin(job['dirname'],f)):
1564 files.ln(pjoin(job['dirname'].rsplit("_",1)[0],f),job['dirname'])
1565
1566
1599
1600
1601 - def run_all_jobs(self,jobs_to_run,integration_step,fixed_order=True):
1602 """Loops over the jobs_to_run and executes them using the function 'run_exe'"""
1603 if fixed_order:
1604 if integration_step == 0:
1605 self.update_status('Setting up grids', level=None)
1606 else:
1607 self.update_status('Refining results, step %i' % integration_step, level=None)
1608 self.ijob = 0
1609 name_suffix={'born' :'B', 'all':'F'}
1610 if fixed_order:
1611 run_type="Fixed order integration step %s" % integration_step
1612 else:
1613 run_type="MINT step %s" % integration_step
1614 self.njobs=len(jobs_to_run)
1615 for job in jobs_to_run:
1616 executable='ajob1'
1617 if fixed_order:
1618 arguments=[job['channel'],job['run_mode'], \
1619 str(job['split']),str(integration_step)]
1620 else:
1621 arguments=[job['channel'],name_suffix[job['run_mode']], \
1622 str(job['split']),str(integration_step)]
1623 self.run_exe(executable,arguments,run_type,
1624 cwd=pjoin(self.me_dir,'SubProcesses',job['p_dir']))
1625
1626 if self.cluster_mode == 2:
1627 time.sleep(1)
1628 self.wait_for_complete(run_type)
1629
1630
1631 - def collect_the_results(self,options,req_acc,jobs_to_run,jobs_to_collect,\
1632 integration_step,mode,run_mode,fixed_order=True):
1633 """Collect the results, make HTML pages, print the summary and
1634 determine if there are more jobs to run. Returns the list
1635 of the jobs that still need to be run, as well as the
1636 complete list of jobs that need to be collected to get the
1637 final answer.
1638 """
1639
1640 self.append_the_results(jobs_to_run,integration_step)
1641 self.cross_sect_dict = self.write_res_txt_file(jobs_to_collect,integration_step)
1642
1643 if fixed_order:
1644 cross, error = sum_html.make_all_html_results(self, ['%s*' % run_mode])
1645 else:
1646 name_suffix={'born' :'B' , 'all':'F'}
1647 cross, error = sum_html.make_all_html_results(self, ['G%s*' % name_suffix[run_mode]])
1648 self.results.add_detail('cross', cross)
1649 self.results.add_detail('error', error)
1650
1651 jobs_to_run_new=self.update_jobs_to_run(req_acc,integration_step,jobs_to_run,fixed_order)
1652
1653
1654 if (not jobs_to_run_new) and fixed_order:
1655
1656 scale_pdf_info=self.collect_scale_pdf_info(options,jobs_to_collect)
1657 self.print_summary(options,integration_step,mode,scale_pdf_info,done=True)
1658 return jobs_to_run_new,jobs_to_collect
1659 elif jobs_to_run_new:
1660
1661 scale_pdf_info=[]
1662 self.print_summary(options,integration_step,mode,scale_pdf_info,done=False)
1663 else:
1664
1665
1666
1667 scale_pdf_info=[]
1668
1669 if (not fixed_order) and integration_step+1 == 2 :
1670
1671 jobs_to_run_new,jobs_to_collect_new= \
1672 self.check_the_need_to_split(jobs_to_run_new,jobs_to_collect)
1673 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1674 self.write_nevents_unweighted_file(jobs_to_collect_new,jobs_to_collect)
1675 self.write_nevts_files(jobs_to_run_new)
1676 else:
1677 self.prepare_directories(jobs_to_run_new,mode,fixed_order)
1678 jobs_to_collect_new=jobs_to_collect
1679 return jobs_to_run_new,jobs_to_collect_new
1680
1681
1683 """writes the nevents_unweighted file in the SubProcesses directory.
1684 We also need to write the jobs that will generate 0 events,
1685 because that makes sure that the cross section from those channels
1686 is taken into account in the event weights (by collect_events.f).
1687 """
1688 content=[]
1689 for job in jobs:
1690 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1691 lhefile=pjoin(path,'events.lhe')
1692 content.append(' %s %d %9e %9e' % \
1693 (lhefile.ljust(40),job['nevents'],job['resultABS']*job['wgt_frac'],job['wgt_frac']))
1694 for job in jobs0events:
1695 if job['nevents']==0:
1696 path=pjoin(job['dirname'].split('/')[-2],job['dirname'].split('/')[-1])
1697 lhefile=pjoin(path,'events.lhe')
1698 content.append(' %s %d %9e %9e' % \
1699 (lhefile.ljust(40),job['nevents'],job['resultABS'],1.))
1700 with open(pjoin(self.me_dir,'SubProcesses',"nevents_unweighted"),'w') as f:
1701 f.write('\n'.join(content)+'\n')
1702
1704 """write the nevts files in the SubProcesses/P*/G*/ directories"""
1705 for job in jobs:
1706 with open(pjoin(job['dirname'],'nevts'),'w') as f:
1707 f.write('%i\n' % job['nevents'])
1708
1710 """Looks in the jobs_to_run to see if there is the need to split the
1711 event generation step. Updates jobs_to_run and
1712 jobs_to_collect to replace the split-job by its
1713 splits. Also removes jobs that do not need any events.
1714 """
1715 nevt_job=self.run_card['nevt_job']
1716 if nevt_job > 0:
1717 jobs_to_collect_new=copy.copy(jobs_to_collect)
1718 for job in jobs_to_run:
1719 nevents=job['nevents']
1720 if nevents == 0:
1721 jobs_to_collect_new.remove(job)
1722 elif nevents > nevt_job:
1723 jobs_to_collect_new.remove(job)
1724 if nevents % nevt_job != 0 :
1725 nsplit=int(nevents/nevt_job)+1
1726 else:
1727 nsplit=int(nevents/nevt_job)
1728 for i in range(1,nsplit+1):
1729 job_new=copy.copy(job)
1730 left_over=nevents % nsplit
1731 if i <= left_over:
1732 job_new['nevents']=int(nevents/nsplit)+1
1733 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1734 else:
1735 job_new['nevents']=int(nevents/nsplit)
1736 job_new['wgt_frac']=float(job_new['nevents'])/float(nevents)
1737 job_new['split']=i
1738 job_new['dirname']=job['dirname']+'_%i' % job_new['split']
1739 jobs_to_collect_new.append(job_new)
1740 jobs_to_run_new=copy.copy(jobs_to_collect_new)
1741 else:
1742 jobs_to_run_new=copy.copy(jobs_to_collect)
1743 for job in jobs_to_collect:
1744 if job['nevents'] == 0:
1745 jobs_to_run_new.remove(job)
1746 jobs_to_collect_new=copy.copy(jobs_to_run_new)
1747
1748 return jobs_to_run_new,jobs_to_collect_new
1749
1750
1752 """
1753 For (N)LO+PS: determines the number of events and/or the required
1754 accuracy per job.
1755 For fixed order: determines which jobs need higher precision and
1756 returns those with the newly requested precision.
1757 """
1758 err=self.cross_sect_dict['errt']
1759 tot=self.cross_sect_dict['xsect']
1760 errABS=self.cross_sect_dict['erra']
1761 totABS=self.cross_sect_dict['xseca']
1762 jobs_new=[]
1763 if fixed_order:
1764 if req_acc == -1:
1765 if step+1 == 1:
1766 npoints = self.run_card['npoints_FO']
1767 niters = self.run_card['niters_FO']
1768 for job in jobs:
1769 job['mint_mode']=-1
1770 job['niters']=niters
1771 job['npoints']=npoints
1772 jobs_new.append(job)
1773 elif step+1 == 2:
1774 pass
1775 elif step+1 > 2:
1776 raise aMCatNLOError('Cannot determine number of iterations and PS points '+
1777 'for integration step %i' % step )
1778 elif ( req_acc > 0 and err/tot > req_acc*1.2 ) or step <= 0:
1779 req_accABS=req_acc*abs(tot)/totABS
1780 for job in jobs:
1781 job['mint_mode']=-1
1782
1783 job['accuracy']=req_accABS*math.sqrt(totABS/job['resultABS'])
1784
1785
1786 if (job['accuracy'] > job['errorABS']/job['resultABS'] and step != 0) \
1787 and not (step==-1 and self.run_card['iappl'] == 2):
1788 continue
1789
1790 itmax_fl=job['niters_done']*math.pow(job['errorABS']/
1791 (job['accuracy']*job['resultABS']),2)
1792 if itmax_fl <= 4.0 :
1793 job['niters']=max(int(round(itmax_fl)),2)
1794 job['npoints']=job['npoints_done']*2
1795 elif itmax_fl > 4.0 and itmax_fl <= 16.0 :
1796 job['niters']=4
1797 job['npoints']=int(round(job['npoints_done']*itmax_fl/4.0))*2
1798 else:
1799 if itmax_fl > 100.0 : itmax_fl=50.0
1800 job['niters']=int(round(math.sqrt(itmax_fl)))
1801 job['npoints']=int(round(job['npoints_done']*itmax_fl/
1802 round(math.sqrt(itmax_fl))))*2
1803
1804 jobs_new.append(job)
1805 return jobs_new
1806 elif step+1 <= 2:
1807 nevents=self.run_card['nevents']
1808
1809 if req_acc<0:
1810 req_acc2_inv=nevents
1811 else:
1812 req_acc2_inv=1/(req_acc*req_acc)
1813 if step+1 == 1 or step+1 == 2 :
1814
1815 for job in jobs:
1816 accuracy=min(math.sqrt(totABS/(req_acc2_inv*job['resultABS'])),0.2)
1817 job['accuracy']=accuracy
1818 if step+1 == 2:
1819
1820
1821 r=self.get_randinit_seed()
1822 random.seed(r)
1823 totevts=nevents
1824 for job in jobs:
1825 job['nevents'] = 0
1826 while totevts :
1827 target = random.random() * totABS
1828 crosssum = 0.
1829 i = 0
1830 while i<len(jobs) and crosssum < target:
1831 job = jobs[i]
1832 crosssum += job['resultABS']
1833 i += 1
1834 totevts -= 1
1835 i -= 1
1836 jobs[i]['nevents'] += 1
1837 for job in jobs:
1838 job['mint_mode']=step+1
1839 return jobs
1840 else:
1841 return []
1842
1843
1845 """ Get the random number seed from the randinit file """
1846 with open(pjoin(self.me_dir,"SubProcesses","randinit")) as randinit:
1847
1848 iseed = int(randinit.read()[2:])
1849 return iseed
1850
1851
1853 """Appends the results for each of the jobs in the job list"""
1854 error_found=False
1855 for job in jobs:
1856 try:
1857 if integration_step >= 0 :
1858 with open(pjoin(job['dirname'],'res_%s.dat' % integration_step)) as res_file:
1859 results=res_file.readline().split()
1860 else:
1861
1862
1863 with open(pjoin(job['dirname'],'res.dat')) as res_file:
1864 results=res_file.readline().split()
1865 except IOError:
1866 if not error_found:
1867 error_found=True
1868 error_log=[]
1869 error_log.append(pjoin(job['dirname'],'log.txt'))
1870 continue
1871 job['resultABS']=float(results[0])
1872 job['errorABS']=float(results[1])
1873 job['result']=float(results[2])
1874 job['error']=float(results[3])
1875 job['niters_done']=int(results[4])
1876 job['npoints_done']=int(results[5])
1877 job['time_spend']=float(results[6])
1878 job['err_percABS'] = job['errorABS']/job['resultABS']*100.
1879 job['err_perc'] = job['error']/job['result']*100.
1880 if error_found:
1881 raise aMCatNLOError('An error occurred during the collection of results.\n' +
1882 'Please check the .log files inside the directories which failed:\n' +
1883 '\n'.join(error_log)+'\n')
1884
1885
1886
1888 """writes the res.txt files in the SubProcess dir"""
1889 jobs.sort(key = lambda job: -job['errorABS'])
1890 content=[]
1891 content.append('\n\nCross section per integration channel:')
1892 for job in jobs:
1893 content.append('%(p_dir)20s %(channel)15s %(result)10.8e %(error)6.4e %(err_perc)6.4f%% ' % job)
1894 content.append('\n\nABS cross section per integration channel:')
1895 for job in jobs:
1896 content.append('%(p_dir)20s %(channel)15s %(resultABS)10.8e %(errorABS)6.4e %(err_percABS)6.4f%% ' % job)
1897 totABS=0
1898 errABS=0
1899 tot=0
1900 err=0
1901 for job in jobs:
1902 totABS+= job['resultABS']*job['wgt_frac']
1903 errABS+= math.pow(job['errorABS'],2)*job['wgt_frac']
1904 tot+= job['result']*job['wgt_frac']
1905 err+= math.pow(job['error'],2)*job['wgt_frac']
1906 if jobs:
1907 content.append('\nTotal ABS and \nTotal: \n %10.8e +- %6.4e (%6.4e%%)\n %10.8e +- %6.4e (%6.4e%%) \n' %\
1908 (totABS, math.sqrt(errABS), math.sqrt(errABS)/totABS *100.,\
1909 tot, math.sqrt(err), math.sqrt(err)/tot *100.))
1910 with open(pjoin(self.me_dir,'SubProcesses','res_%s.txt' % integration_step),'w') as res_file:
1911 res_file.write('\n'.join(content))
1912 randinit=self.get_randinit_seed()
1913 return {'xsect':tot,'xseca':totABS,'errt':math.sqrt(err),\
1914 'erra':math.sqrt(errABS),'randinit':randinit}
1915
1916
1918 """read the scale_pdf_dependence.dat files and collects there results"""
1919 scale_pdf_info=[]
1920 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
1921 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
1922 evt_files=[]
1923 evt_wghts=[]
1924 for job in jobs:
1925 evt_files.append(pjoin(job['dirname'],'scale_pdf_dependence.dat'))
1926 evt_wghts.append(job['wgt_frac'])
1927 scale_pdf_info = self.pdf_scale_from_reweighting(evt_files,evt_wghts)
1928 return scale_pdf_info
1929
1930
1932 """combines the plots and puts then in the Events/run* directory"""
1933 devnull = os.open(os.devnull, os.O_RDWR)
1934 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
1935 misc.call(['./combine_plots_FO.sh'] + folder_name, \
1936 stdout=devnull,
1937 cwd=pjoin(self.me_dir, 'SubProcesses'))
1938 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
1939 pjoin(self.me_dir, 'Events', self.run_name))
1940 logger.info('The results of this run and the TopDrawer file with the plots' + \
1941 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1942 elif self.analyse_card['fo_analysis_format'].lower() == 'hwu':
1943 out=pjoin(self.me_dir,'Events',self.run_name,'MADatNLO')
1944 self.combine_plots_HwU(jobs,out)
1945 try:
1946 misc.call(['gnuplot','MADatNLO.gnuplot'],\
1947 stdout=devnull,stderr=devnull,\
1948 cwd=pjoin(self.me_dir, 'Events', self.run_name))
1949 except Exception:
1950 pass
1951 logger.info('The results of this run and the HwU and GnuPlot files with the plots' + \
1952 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1953 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
1954 misc.call(['./combine_root.sh'] + folder_name, \
1955 stdout=devnull,
1956 cwd=pjoin(self.me_dir, 'SubProcesses'))
1957 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
1958 pjoin(self.me_dir, 'Events', self.run_name))
1959 logger.info('The results of this run and the ROOT file with the plots' + \
1960 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1961 else:
1962 logger.info('The results of this run' + \
1963 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1964
1965
1967 """Sums all the plots in the HwU format."""
1968 logger.debug('Combining HwU plots.')
1969
1970 command = []
1971 command.append(pjoin(self.me_dir, 'bin', 'internal','histograms.py'))
1972 for job in jobs:
1973 if job['dirname'].endswith('.HwU'):
1974 command.append(job['dirname'])
1975 else:
1976 command.append(pjoin(job['dirname'],'MADatNLO.HwU'))
1977 command.append("--out="+out)
1978 command.append("--gnuplot")
1979 command.append("--band=[]")
1980 command.append("--lhapdf-config="+self.options['lhapdf'])
1981 if normalisation:
1982 command.append("--multiply="+(','.join([str(n) for n in normalisation])))
1983 command.append("--sum")
1984 command.append("--keep_all_weights")
1985 command.append("--no_open")
1986
1987 p = misc.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT, cwd=self.me_dir)
1988
1989 while p.poll() is None:
1990 line = p.stdout.readline()
1991 if any(t in line for t in ['INFO:','WARNING:','CRITICAL:','ERROR:','KEEP:']):
1992 print line[:-1]
1993 elif __debug__ and line:
1994 logger.debug(line[:-1])
1995
1996
1998 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
1999 logger.debug('Combining APPLgrids \n')
2000 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),
2001 'applgrid-combine')
2002 all_jobs=[]
2003 for job in jobs:
2004 all_jobs.append(job['dirname'])
2005 ngrids=len(all_jobs)
2006 nobs =len([name for name in os.listdir(all_jobs[0]) if name.endswith("_out.root")])
2007 for obs in range(0,nobs):
2008 gdir = [pjoin(job,"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
2009
2010 if self.run_card["iappl"] == 1:
2011 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,
2012 "aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
2013 elif self.run_card["iappl"] == 2:
2014 unc2_inv=pow(cross/error,2)
2015 unc2_inv_ngrids=pow(cross/error,2)*ngrids
2016 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",
2017 self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',
2018 str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
2019 for job in all_jobs:
2020 os.remove(pjoin(job,"grid_obs_"+str(obs)+"_in.root"))
2021 else:
2022 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
2023
2024 for ggdir in gdir:
2025 os.remove(ggdir)
2026
2027
2029 """Distributes the APPLgrids ready to be filled by a second run of the code"""
2030
2031
2032 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
2033 gfiles = misc.glob(pjoin('*', 'aMCfast_obs_0_starting_grid.root'),
2034 pjoin(self.me_dir,'Events'))
2035
2036 time_stamps={}
2037 for root_file in gfiles:
2038 time_stamps[root_file]=os.path.getmtime(root_file)
2039 options['appl_start_grid']= \
2040 max(time_stamps.iterkeys(), key=(lambda key:
2041 time_stamps[key])).split('/')[-2]
2042 logger.info('No --appl_start_grid option given. '+\
2043 'Guessing that start grid from run "%s" should be used.' \
2044 % options['appl_start_grid'])
2045
2046 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
2047 self.appl_start_grid = options['appl_start_grid']
2048 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
2049
2050 if not os.path.exists(pjoin(start_grid_dir,
2051 'aMCfast_obs_0_starting_grid.root')):
2052 raise self.InvalidCmd('APPLgrid file not found: %s' % \
2053 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
2054 else:
2055 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir( \
2056 start_grid_dir) if name.endswith("_starting_grid.root")]
2057 nobs =len(all_grids)
2058 gstring=" ".join(all_grids)
2059 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
2060 raise self.InvalidCmd('No APPLgrid name currently defined.'+
2061 'Please provide this information.')
2062
2063 for pdir in p_dirs:
2064 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,
2065 "SubProcesses",pdir)) if file.startswith(mode+'_G') and
2066 os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
2067 for g_dir in g_dirs:
2068 for grid in all_grids:
2069 obs=grid.split('_')[-3]
2070 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,
2071 'grid_obs_'+obs+'_in.root'))
2072
2073
2074
2075
2077 """collect the log files and put them in a single, html-friendly file
2078 inside the Events/run_.../ directory"""
2079 log_file = pjoin(self.me_dir, 'Events', self.run_name,
2080 'alllogs_%d.html' % integration_step)
2081 outfile = open(log_file, 'w')
2082
2083 content = ''
2084 content += '<HTML><BODY>\n<font face="courier" size=2>'
2085 for job in jobs:
2086
2087 log=pjoin(job['dirname'],'log_MINT%s.txt' % integration_step)
2088 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(
2089 pjoin(self.me_dir,'SubProcesses'),''))
2090
2091 content += '<font color="red">\n'
2092 content += '<br>LOG file for integration channel %s, %s <br>' % \
2093 (os.path.dirname(log).replace(pjoin(self.me_dir,
2094 'SubProcesses'), ''),
2095 integration_step)
2096 content += '</font>\n'
2097
2098
2099 content += '<PRE>\n' + open(log).read() + '\n</PRE>'
2100 content +='<br>\n'
2101 outfile.write(content)
2102 content=''
2103
2104 outfile.write('</font>\n</BODY></HTML>\n')
2105 outfile.close()
2106
2107
2109 """Combine the plots and put the res*.txt files in the Events/run.../ folder."""
2110
2111 res_files = misc.glob('res_*.txt', pjoin(self.me_dir, 'SubProcesses'))
2112 for res_file in res_files:
2113 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2114
2115 self.combine_plots_FO(folder_name,jobs)
2116
2117
2118 if self.run_card['iappl'] != 0:
2119 cross=self.cross_sect_dict['xsect']
2120 error=self.cross_sect_dict['errt']
2121 self.applgrid_combine(cross,error,jobs)
2122
2123
2125 """setup the number of cores for multicore, and the cluster-type for cluster runs"""
2126 if self.cluster_mode == 1:
2127 cluster_name = self.options['cluster_type']
2128 self.cluster = cluster.from_name[cluster_name](**self.options)
2129 if self.cluster_mode == 2:
2130 try:
2131 import multiprocessing
2132 if not self.nb_core:
2133 try:
2134 self.nb_core = int(self.options['nb_core'])
2135 except TypeError:
2136 self.nb_core = multiprocessing.cpu_count()
2137 logger.info('Using %d cores' % self.nb_core)
2138 except ImportError:
2139 self.nb_core = 1
2140 logger.warning('Impossible to detect the number of cores => Using One.\n'+
2141 'Use set nb_core X in order to set this number and be able to'+
2142 'run in multicore.')
2143
2144 self.cluster = cluster.MultiCore(**self.options)
2145
2146
2148 """Clean previous results.
2149 o. If doing only the reweighting step, do not delete anything and return directlty.
2150 o. Always remove all the G*_* files (from split event generation).
2151 o. Remove the G* (or born_G* or all_G*) only when NOT doing only_generation or reweight_only."""
2152 if options['reweightonly']:
2153 return
2154 if not options['only_generation']:
2155 self.update_status('Cleaning previous results', level=None)
2156 for dir in p_dirs:
2157
2158 for obj in folder_name:
2159
2160 to_rm = [file for file in \
2161 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2162 if file.startswith(obj[:-1]) and \
2163 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2164 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2165
2166 to_always_rm = [file for file in \
2167 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
2168 if file.startswith(obj[:-1]) and
2169 '_' in file and not '_G' in file and \
2170 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
2171 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
2172
2173 if not options['only_generation']:
2174 to_always_rm.extend(to_rm)
2175 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
2176 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
2177 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
2178 return
2179
2180
2181 - def print_summary(self, options, step, mode, scale_pdf_info=[], done=True):
2182 """print a summary of the results contained in self.cross_sect_dict.
2183 step corresponds to the mintMC step, if =2 (i.e. after event generation)
2184 some additional infos are printed"""
2185
2186 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
2187 process = ''
2188 for line in proc_card_lines:
2189 if line.startswith('generate') or line.startswith('add process'):
2190 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
2191 lpp = {0:'l', 1:'p', -1:'pbar'}
2192 if self.ninitial == 1:
2193 proc_info = '\n Process %s' % process[:-3]
2194 else:
2195 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
2196 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
2197 self.run_card['ebeam1'], self.run_card['ebeam2'])
2198
2199 if self.ninitial == 1:
2200 self.cross_sect_dict['unit']='GeV'
2201 self.cross_sect_dict['xsec_string']='(Partial) decay width'
2202 self.cross_sect_dict['axsec_string']='(Partial) abs(decay width)'
2203 else:
2204 self.cross_sect_dict['unit']='pb'
2205 self.cross_sect_dict['xsec_string']='Total cross section'
2206 self.cross_sect_dict['axsec_string']='Total abs(cross section)'
2207
2208 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2209 status = ['Determining the number of unweighted events per channel',
2210 'Updating the number of unweighted events per channel',
2211 'Summary:']
2212 computed='(computed from LHE events)'
2213 elif mode in ['NLO', 'LO']:
2214 status = ['Results after grid setup:','Current results:',
2215 'Final results and run summary:']
2216 computed='(computed from histogram information)'
2217
2218 if step != 2 and mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2219 message = status[step] + '\n\n Intermediate results:' + \
2220 ('\n Random seed: %(randinit)d' + \
2221 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' + \
2222 '\n %(axsec_string)s: %(xseca)8.3e +- %(erra)6.1e %(unit)s \n') \
2223 % self.cross_sect_dict
2224 elif mode in ['NLO','LO'] and not done:
2225 if step == 0:
2226 message = '\n ' + status[0] + \
2227 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2228 self.cross_sect_dict
2229 else:
2230 message = '\n ' + status[1] + \
2231 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2232 self.cross_sect_dict
2233
2234 else:
2235 message = '\n --------------------------------------------------------------'
2236 message = message + \
2237 '\n ' + status[2] + proc_info
2238 if mode not in ['LO', 'NLO']:
2239 message = message + \
2240 '\n Number of events generated: %s' % self.run_card['nevents']
2241 message = message + \
2242 '\n %(xsec_string)s: %(xsect)8.3e +- %(errt)6.1e %(unit)s' % \
2243 self.cross_sect_dict
2244 message = message + \
2245 '\n --------------------------------------------------------------'
2246 if scale_pdf_info and (self.run_card['nevents']>=10000 or mode in ['NLO', 'LO']):
2247 if scale_pdf_info[0]:
2248
2249 message = message + '\n Scale variation %s:' % computed
2250 for s in scale_pdf_info[0]:
2251 if s['unc']:
2252 if self.run_card['ickkw'] != -1:
2253 message = message + \
2254 ('\n Dynamical_scale_choice %(label)i (envelope of %(size)s values): '\
2255 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % s
2256 else:
2257 message = message + \
2258 ('\n Soft and hard scale dependence (added in quadrature): '\
2259 '\n %(cen)8.3e pb +%(max_q)0.1f%% -%(min_q)0.1f%%') % s
2260
2261 else:
2262 message = message + \
2263 ('\n Dynamical_scale_choice %(label)i: '\
2264 '\n %(cen)8.3e pb') % s
2265
2266 if scale_pdf_info[1]:
2267 message = message + '\n PDF variation %s:' % computed
2268 for p in scale_pdf_info[1]:
2269 if p['unc']=='none':
2270 message = message + \
2271 ('\n %(name)s (central value only): '\
2272 '\n %(cen)8.3e pb') % p
2273
2274 elif p['unc']=='unknown':
2275 message = message + \
2276 ('\n %(name)s (%(size)s members; combination method unknown): '\
2277 '\n %(cen)8.3e pb') % p
2278 else:
2279 message = message + \
2280 ('\n %(name)s (%(size)s members; using %(unc)s method): '\
2281 '\n %(cen)8.3e pb +%(max)0.1f%% -%(min)0.1f%%') % p
2282
2283 message = message + \
2284 '\n --------------------------------------------------------------'
2285
2286
2287 if (mode in ['NLO', 'LO'] and not done) or \
2288 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
2289 logger.info(message+'\n')
2290 return
2291
2292
2293
2294
2295
2296 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
2297 log_GV_files = misc.glob(pjoin('P*','G*','log_MINT*.txt'),
2298 pjoin(self.me_dir, 'SubProcesses'))
2299 all_log_files = log_GV_files
2300 elif mode == 'NLO':
2301 log_GV_files = misc.glob(pjoin('P*','all_G*','log_MINT*.txt'),
2302 pjoin(self.me_dir, 'SubProcesses'))
2303 all_log_files = log_GV_files
2304
2305 elif mode == 'LO':
2306 log_GV_files = ''
2307 all_log_files = misc.glob(pjoin('P*','born_G*','log_MINT*.txt'),
2308 pjoin(self.me_dir, 'SubProcesses'))
2309 else:
2310 raise aMCatNLOError, 'Running mode %s not supported.'%mode
2311
2312 try:
2313 message, debug_msg = \
2314 self.compile_advanced_stats(log_GV_files, all_log_files, message)
2315 except Exception as e:
2316 debug_msg = 'Advanced statistics collection failed with error "%s"\n'%str(e)
2317 err_string = StringIO.StringIO()
2318 traceback.print_exc(limit=4, file=err_string)
2319 debug_msg += 'Please report this backtrace to a MadGraph developer:\n%s'\
2320 %err_string.getvalue()
2321
2322 logger.debug(debug_msg+'\n')
2323 logger.info(message+'\n')
2324
2325
2326 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
2327 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
2328 open(pjoin(evt_path, '.full_summary.txt'),
2329 'w').write(message+'\n\n'+debug_msg+'\n')
2330
2331 self.archive_files(evt_path,mode)
2332
2334 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
2335 the run."""
2336
2337 files_to_arxiv = [pjoin('Cards','param_card.dat'),
2338 pjoin('Cards','MadLoopParams.dat'),
2339 pjoin('Cards','FKS_params.dat'),
2340 pjoin('Cards','run_card.dat'),
2341 pjoin('Subprocesses','setscales.f'),
2342 pjoin('Subprocesses','cuts.f')]
2343
2344 if mode in ['NLO', 'LO']:
2345 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
2346
2347 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
2348 os.mkdir(pjoin(evt_path,'RunMaterial'))
2349
2350 for path in files_to_arxiv:
2351 if os.path.isfile(pjoin(self.me_dir,path)):
2352 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
2353 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
2354 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
2355
2357 """ This functions goes through the log files given in arguments and
2358 compiles statistics about MadLoop stability, virtual integration
2359 optimization and detection of potential error messages into a nice
2360 debug message to printed at the end of the run """
2361
2362 def safe_float(str_float):
2363 try:
2364 return float(str_float)
2365 except ValueError:
2366 logger.debug('Could not convert the following float during'+
2367 ' advanced statistics printout: %s'%str(str_float))
2368 return -1.0
2369
2370
2371
2372
2373 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
2374 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
2375
2376
2377
2378
2379
2380
2381
2382 UPS_stat_finder = re.compile(
2383 r"Satistics from MadLoop:.*"+\
2384 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
2385 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
2386 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
2387 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
2388 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
2389 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
2390 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
2391 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
2392 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
2393 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
2394
2395 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
2396 1 : 'CutTools (double precision)',
2397 2 : 'PJFry++',
2398 3 : 'IREGI',
2399 4 : 'Golem95',
2400 5 : 'Samurai',
2401 6 : 'Ninja (double precision)',
2402 8 : 'Ninja (quadruple precision)',
2403 9 : 'CutTools (quadruple precision)'}
2404 RetUnit_finder =re.compile(
2405 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
2406
2407
2408 for gv_log in log_GV_files:
2409 channel_name = '/'.join(gv_log.split('/')[-5:-1])
2410 log=open(gv_log,'r').read()
2411 UPS_stats = re.search(UPS_stat_finder,log)
2412 for retunit_stats in re.finditer(RetUnit_finder, log):
2413 if channel_name not in stats['UPS'].keys():
2414 stats['UPS'][channel_name] = [0]*10+[[0]*10]
2415 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
2416 += int(retunit_stats.group('n_occurences'))
2417 if not UPS_stats is None:
2418 try:
2419 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
2420 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
2421 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
2422 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
2423 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
2424 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
2425 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
2426 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
2427 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
2428 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
2429 except KeyError:
2430 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
2431 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
2432 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
2433 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
2434 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
2435 int(UPS_stats.group('n10')),[0]*10]
2436 debug_msg = ""
2437 if len(stats['UPS'].keys())>0:
2438 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
2439 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
2440 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
2441 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
2442 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
2443 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
2444 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
2445 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
2446 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
2447 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
2448 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
2449 for i in range(10)]
2450 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
2451 safe_float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
2452 maxUPS = max(UPSfracs, key = lambda w: w[1])
2453
2454 tmpStr = ""
2455 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
2456 tmpStr += '\n Stability unknown: %d'%nTotsun
2457 tmpStr += '\n Stable PS point: %d'%nTotsps
2458 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
2459 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
2460 tmpStr += '\n Only double precision used: %d'%nTotddp
2461 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
2462 tmpStr += '\n Initialization phase-space points: %d'%nTotini
2463 tmpStr += '\n Reduction methods used:'
2464 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
2465 unit_code_meaning.keys() if nTot1[i]>0]
2466 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
2467 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
2468 if nTot100 != 0:
2469 debug_msg += '\n Unknown return code (100): %d'%nTot100
2470 if nTot10 != 0:
2471 debug_msg += '\n Unknown return code (10): %d'%nTot10
2472 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
2473 not in unit_code_meaning.keys())
2474 if nUnknownUnit != 0:
2475 debug_msg += '\n Unknown return code (1): %d'\
2476 %nUnknownUnit
2477
2478 if maxUPS[1]>0.001:
2479 message += tmpStr
2480 message += '\n Total number of unstable PS point detected:'+\
2481 ' %d (%4.2f%%)'%(nToteps,safe_float(100*nToteps)/nTotPS)
2482 message += '\n Maximum fraction of UPS points in '+\
2483 'channel %s (%4.2f%%)'%maxUPS
2484 message += '\n Please report this to the authors while '+\
2485 'providing the file'
2486 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
2487 maxUPS[0],'UPS.log'))
2488 else:
2489 debug_msg += tmpStr
2490
2491
2492
2493
2494
2495
2496 virt_tricks_finder = re.compile(
2497 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
2498 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
2499 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
2500 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
2501
2502 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
2503 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
2504
2505 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
2506
2507 channel_contr_list = {}
2508 for gv_log in log_GV_files:
2509 logfile=open(gv_log,'r')
2510 log = logfile.read()
2511 logfile.close()
2512 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2513 vf_stats = None
2514 for vf_stats in re.finditer(virt_frac_finder, log):
2515 pass
2516 if not vf_stats is None:
2517 v_frac = safe_float(vf_stats.group('v_frac'))
2518 v_average = safe_float(vf_stats.group('v_average'))
2519 try:
2520 if v_frac < stats['virt_stats']['v_frac_min'][0]:
2521 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
2522 if v_frac > stats['virt_stats']['v_frac_max'][0]:
2523 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
2524 stats['virt_stats']['v_frac_avg'][0] += v_frac
2525 stats['virt_stats']['v_frac_avg'][1] += 1
2526 except KeyError:
2527 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
2528 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
2529 stats['virt_stats']['v_frac_avg']=[v_frac,1]
2530
2531
2532 ccontr_stats = None
2533 for ccontr_stats in re.finditer(channel_contr_finder, log):
2534 pass
2535 if not ccontr_stats is None:
2536 contrib = safe_float(ccontr_stats.group('v_contr'))
2537 try:
2538 if contrib>channel_contr_list[channel_name]:
2539 channel_contr_list[channel_name]=contrib
2540 except KeyError:
2541 channel_contr_list[channel_name]=contrib
2542
2543
2544
2545
2546 average_contrib = 0.0
2547 for value in channel_contr_list.values():
2548 average_contrib += value
2549 if len(channel_contr_list.values()) !=0:
2550 average_contrib = average_contrib / len(channel_contr_list.values())
2551
2552 relevant_log_GV_files = []
2553 excluded_channels = set([])
2554 all_channels = set([])
2555 for log_file in log_GV_files:
2556 channel_name = '/'.join(log_file.split('/')[-3:-1])
2557 all_channels.add(channel_name)
2558 try:
2559 if channel_contr_list[channel_name] > (0.1*average_contrib):
2560 relevant_log_GV_files.append(log_file)
2561 else:
2562 excluded_channels.add(channel_name)
2563 except KeyError:
2564 relevant_log_GV_files.append(log_file)
2565
2566
2567 for gv_log in relevant_log_GV_files:
2568 logfile=open(gv_log,'r')
2569 log = logfile.read()
2570 logfile.close()
2571 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2572
2573 vt_stats = None
2574 for vt_stats in re.finditer(virt_tricks_finder, log):
2575 pass
2576 if not vt_stats is None:
2577 vt_stats_group = vt_stats.groupdict()
2578 v_ratio = safe_float(vt_stats.group('v_ratio'))
2579 v_ratio_err = safe_float(vt_stats.group('v_ratio_err'))
2580 v_contr = safe_float(vt_stats.group('v_abs_contr'))
2581 v_contr_err = safe_float(vt_stats.group('v_abs_contr_err'))
2582 try:
2583 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
2584 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
2585 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
2586 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
2587 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
2588 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
2589 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
2590 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
2591 if v_contr < stats['virt_stats']['v_contr_min'][0]:
2592 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
2593 if v_contr > stats['virt_stats']['v_contr_max'][0]:
2594 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
2595 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
2596 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
2597 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
2598 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
2599 except KeyError:
2600 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
2601 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
2602 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
2603 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
2604 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
2605 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
2606 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
2607 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
2608
2609 vf_stats = None
2610 for vf_stats in re.finditer(virt_frac_finder, log):
2611 pass
2612 if not vf_stats is None:
2613 v_frac = safe_float(vf_stats.group('v_frac'))
2614 v_average = safe_float(vf_stats.group('v_average'))
2615 try:
2616 if v_average < stats['virt_stats']['v_average_min'][0]:
2617 stats['virt_stats']['v_average_min']=(v_average,channel_name)
2618 if v_average > stats['virt_stats']['v_average_max'][0]:
2619 stats['virt_stats']['v_average_max']=(v_average,channel_name)
2620 stats['virt_stats']['v_average_avg'][0] += v_average
2621 stats['virt_stats']['v_average_avg'][1] += 1
2622 except KeyError:
2623 stats['virt_stats']['v_average_min']=[v_average,channel_name]
2624 stats['virt_stats']['v_average_max']=[v_average,channel_name]
2625 stats['virt_stats']['v_average_avg']=[v_average,1]
2626
2627 try:
2628 debug_msg += '\n\n Statistics on virtual integration optimization : '
2629
2630 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
2631 %tuple(stats['virt_stats']['v_frac_max'])
2632 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
2633 %tuple(stats['virt_stats']['v_frac_min'])
2634 debug_msg += '\n Average virt fraction computed %.3f'\
2635 %safe_float(stats['virt_stats']['v_frac_avg'][0]/safe_float(stats['virt_stats']['v_frac_avg'][1]))
2636 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
2637 (len(excluded_channels),len(all_channels))
2638 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
2639 %tuple(stats['virt_stats']['v_average_max'])
2640 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
2641 %tuple(stats['virt_stats']['v_ratio_max'])
2642 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
2643 %tuple(stats['virt_stats']['v_ratio_err_max'])
2644 debug_msg += tmpStr
2645
2646
2647
2648
2649
2650
2651
2652
2653 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
2654 %tuple(stats['virt_stats']['v_contr_err_max'])
2655 debug_msg += tmpStr
2656
2657
2658
2659
2660 except KeyError:
2661 debug_msg += '\n Could not find statistics on the integration optimization. '
2662
2663
2664
2665
2666
2667 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
2668 "(?P<time>[\d\+-Eed\.]*)\s*")
2669
2670 for logf in log_GV_files:
2671 logfile=open(logf,'r')
2672 log = logfile.read()
2673 logfile.close()
2674 channel_name = '/'.join(logf.split('/')[-3:-1])
2675 mint = re.search(mint_search,logf)
2676 if not mint is None:
2677 channel_name = channel_name+' [step %s]'%mint.group('ID')
2678
2679 for time_stats in re.finditer(timing_stat_finder, log):
2680 try:
2681 stats['timings'][time_stats.group('name')][channel_name]+=\
2682 safe_float(time_stats.group('time'))
2683 except KeyError:
2684 if time_stats.group('name') not in stats['timings'].keys():
2685 stats['timings'][time_stats.group('name')] = {}
2686 stats['timings'][time_stats.group('name')][channel_name]=\
2687 safe_float(time_stats.group('time'))
2688
2689
2690 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
2691 try:
2692 totTimeList = [(time, chan) for chan, time in \
2693 stats['timings']['Total'].items()]
2694 except KeyError:
2695 totTimeList = []
2696
2697 totTimeList.sort()
2698 if len(totTimeList)>0:
2699 debug_msg += '\n\n Inclusive timing profile :'
2700 debug_msg += '\n Overall slowest channel %s (%s)'%\
2701 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
2702 debug_msg += '\n Average channel running time %s'%\
2703 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
2704 debug_msg += '\n Aggregated total running time %s'%\
2705 Tstr(sum([el[0] for el in totTimeList]))
2706 else:
2707 debug_msg += '\n\n Inclusive timing profile non available.'
2708
2709 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
2710 sum(stats['timings'][stat].values()), reverse=True)
2711 for name in sorted_keys:
2712 if name=='Total':
2713 continue
2714 if sum(stats['timings'][name].values())<=0.0:
2715 debug_msg += '\n Zero time record for %s.'%name
2716 continue
2717 try:
2718 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
2719 chan) for chan, time in stats['timings'][name].items()]
2720 except KeyError, ZeroDivisionError:
2721 debug_msg += '\n\n Timing profile for %s unavailable.'%name
2722 continue
2723 TimeList.sort()
2724 debug_msg += '\n Timing profile for <%s> :'%name
2725 try:
2726 debug_msg += '\n Overall fraction of time %.3f %%'%\
2727 safe_float((100.0*(sum(stats['timings'][name].values())/
2728 sum(stats['timings']['Total'].values()))))
2729 except KeyError, ZeroDivisionError:
2730 debug_msg += '\n Overall fraction of time unavailable.'
2731 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
2732 (TimeList[-1][0],TimeList[-1][1])
2733 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
2734 (TimeList[0][0],TimeList[0][1])
2735
2736
2737
2738
2739
2740
2741
2742
2743
2744
2745 err_finder = re.compile(\
2746 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
2747 for log in all_log_files:
2748 logfile=open(log,'r')
2749 nErrors = len(re.findall(err_finder, logfile.read()))
2750 logfile.close()
2751 if nErrors != 0:
2752 stats['Errors'].append((str(log),nErrors))
2753
2754 nErrors = sum([err[1] for err in stats['Errors']],0)
2755 if nErrors != 0:
2756 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
2757 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
2758 'found in the following log file%s:'%('s' if \
2759 len(stats['Errors'])>1 else '')
2760 for error in stats['Errors'][:3]:
2761 log_name = '/'.join(error[0].split('/')[-5:])
2762 debug_msg += '\n > %d error%s in %s'%\
2763 (error[1],'s' if error[1]>1 else '',log_name)
2764 if len(stats['Errors'])>3:
2765 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
2766 nRemainingLogs = len(stats['Errors'])-3
2767 debug_msg += '\n And another %d error%s in %d other log file%s'%\
2768 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
2769 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
2770
2771 return message, debug_msg
2772
2773
2775 """this function calls the reweighting routines and creates the event file in the
2776 Event dir. Return the name of the event file created
2777 """
2778 scale_pdf_info=[]
2779 if any(self.run_card['reweight_scale']) or any(self.run_card['reweight_PDF']) or \
2780 len(self.run_card['dynamical_scale_choice']) > 1 or len(self.run_card['lhaid']) > 1:
2781 scale_pdf_info = self.run_reweight(options['reweightonly'])
2782 self.update_status('Collecting events', level='parton', update_results=True)
2783 misc.compile(['collect_events'],
2784 cwd=pjoin(self.me_dir, 'SubProcesses'), nocompile=options['nocompile'])
2785 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
2786 stdin=subprocess.PIPE,
2787 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
2788 if event_norm.lower() == 'sum':
2789 p.communicate(input = '1\n')
2790 elif event_norm.lower() == 'unity':
2791 p.communicate(input = '3\n')
2792 else:
2793 p.communicate(input = '2\n')
2794
2795
2796 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
2797
2798 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
2799 raise aMCatNLOError('An error occurred during event generation. ' + \
2800 'The event file has not been created. Check collect_events.log')
2801 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2802 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
2803 if not options['reweightonly']:
2804 self.print_summary(options, 2, mode, scale_pdf_info)
2805 res_files = misc.glob('res*.txt', pjoin(self.me_dir, 'SubProcesses'))
2806 for res_file in res_files:
2807 files.mv(res_file,pjoin(self.me_dir, 'Events', self.run_name))
2808
2809 logger.info('The %s file has been generated.\n' % (evt_file))
2810 self.results.add_detail('nb_event', nevents)
2811 self.update_status('Events generated', level='parton', update_results=True)
2812 return evt_file[:-3]
2813
2814
2816 """runs mcatnlo on the generated event file, to produce showered-events
2817 """
2818 logger.info('Preparing MCatNLO run')
2819 try:
2820 misc.gunzip(evt_file)
2821 except Exception:
2822 pass
2823
2824 self.banner = banner_mod.Banner(evt_file)
2825 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
2826
2827
2828
2829 if int(self.banner.get_detail('run_card', 'nevents') / \
2830 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
2831 != self.banner.get_detail('run_card', 'nevents'):
2832 logger.warning(\
2833 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
2834 'Setting it to 1.')
2835 self.shower_card['nsplit_jobs'] = 1
2836
2837
2838 if self.shower_card['nevents'] > 0 and \
2839 self.shower_card['nevents'] < self.banner.get_detail('run_card', 'nevents') and \
2840 self.shower_card['nsplit_jobs'] != 1:
2841 logger.warning(\
2842 'Only a part of the events will be showered.\n' + \
2843 'Setting nsplit_jobs in the shower_card to 1.')
2844 self.shower_card['nsplit_jobs'] = 1
2845
2846 self.banner_to_mcatnlo(evt_file)
2847
2848
2849
2850
2851 if 'fastjet' in self.shower_card['extralibs']:
2852
2853 if not 'stdc++' in self.shower_card['extralibs']:
2854 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
2855 self.shower_card['extralibs'] += ' stdc++'
2856
2857 try:
2858
2859 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
2860 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2861 output, error = p.communicate()
2862
2863 output = output[:-1]
2864
2865 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
2866 logger.warning('Linking FastJet: updating EXTRAPATHS')
2867 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
2868 if not pjoin(output, 'include') in self.shower_card['includepaths']:
2869 logger.warning('Linking FastJet: updating INCLUDEPATHS')
2870 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
2871
2872 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
2873 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
2874 except Exception:
2875 logger.warning('Linking FastJet: using fjcore')
2876
2877 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
2878 if not 'fjcore.o' in self.shower_card['analyse']:
2879 self.shower_card['analyse'] += ' fjcore.o'
2880
2881 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
2882 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
2883
2884 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
2885 for line in fjwrapper_lines:
2886 if '//INCLUDE_FJ' in line:
2887 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
2888 if '//NAMESPACE_FJ' in line:
2889 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
2890 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\
2891 '\n'.join(fjwrapper_lines) + '\n')
2892
2893 extrapaths = self.shower_card['extrapaths'].split()
2894
2895
2896 if shower in ['HERWIGPP', 'PYTHIA8']:
2897 path_dict = {'HERWIGPP': ['hepmc_path',
2898 'thepeg_path',
2899 'hwpp_path'],
2900 'PYTHIA8': ['pythia8_path']}
2901
2902 if not all([self.options[ppath] for ppath in path_dict[shower]]):
2903 raise aMCatNLOError('Some paths are missing in the configuration file.\n' + \
2904 ('Please make sure you have set these variables: %s' % ', '.join(path_dict[shower])))
2905
2906 if shower == 'HERWIGPP':
2907 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
2908
2909 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2910 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
2911
2912 if 'LD_LIBRARY_PATH' in os.environ.keys():
2913 ldlibrarypath = os.environ['LD_LIBRARY_PATH']
2914 else:
2915 ldlibrarypath = ''
2916 ldlibrarypath += ':' + ':'.join(extrapaths)
2917 os.putenv('LD_LIBRARY_PATH', ldlibrarypath)
2918
2919 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
2920 self.shower_card.write_card(shower, shower_card_path)
2921
2922
2923 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat')):
2924 files.mv(pjoin(self.me_dir, 'MCatNLO', 'shower_card_set.dat'),
2925 pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat'))
2926
2927 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
2928 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
2929
2930
2931
2932 if shower == 'PYTHIA8' and not \
2933 os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')) and \
2934 'dl' not in self.shower_card['extralibs'].split():
2935
2936 self.shower_card['extralibs'] += ' dl'
2937 logger.warning("'dl' was added to extralibs from the shower_card.dat.\n" + \
2938 "It is needed for the correct running of PY8.2xx.\n" + \
2939 "If this library cannot be found on your system, a crash will occur.")
2940
2941 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
2942 stderr=open(mcatnlo_log, 'w'),
2943 cwd=pjoin(self.me_dir, 'MCatNLO'))
2944
2945 exe = 'MCATNLO_%s_EXE' % shower
2946 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
2947 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
2948 print open(mcatnlo_log).read()
2949 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
2950 logger.info(' ... done')
2951
2952
2953 count = 1
2954 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2955 (shower, count))):
2956 count += 1
2957 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2958 (shower, count))
2959 os.mkdir(rundir)
2960 files.cp(shower_card_path, rundir)
2961
2962
2963
2964 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
2965 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
2966 logger.info('Cleaning old files and splitting the event file...')
2967
2968 files.rm([f for f in event_files if 'events.lhe' not in f])
2969 if self.shower_card['nsplit_jobs'] > 1:
2970 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'), nocompile=options['nocompile'])
2971 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
2972 stdin=subprocess.PIPE,
2973 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
2974 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2975 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
2976 logger.info('Splitting done.')
2977 event_files = misc.glob('events_*.lhe', pjoin(self.me_dir, 'Events', self.run_name))
2978
2979 event_files.sort()
2980
2981 self.update_status('Showering events...', level='shower')
2982 logger.info('(Running in %s)' % rundir)
2983 if shower != 'PYTHIA8':
2984 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
2985 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
2986 else:
2987
2988 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
2989 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
2990 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2991 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
2992 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
2993 else:
2994 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
2995
2996 if shower == 'HERWIGPP':
2997 try:
2998 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
2999 except Exception:
3000 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
3001
3002 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
3003 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
3004
3005 files.ln(evt_file, rundir, 'events.lhe')
3006 for i, f in enumerate(event_files):
3007 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
3008
3009 if not self.shower_card['analyse']:
3010
3011 out_id = 'HEP'
3012 else:
3013
3014 if "HwU" in self.shower_card['analyse']:
3015 out_id = 'HWU'
3016 else:
3017 out_id = 'TOP'
3018
3019
3020 open(pjoin(rundir, 'shower.sh'), 'w').write(\
3021 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
3022 % {'extralibs': ':'.join(extrapaths)})
3023 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
3024
3025 if event_files:
3026 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
3027 for i in range(len(event_files))]
3028 else:
3029 arg_list = [[shower, out_id, self.run_name]]
3030
3031 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
3032 self.njobs = 1
3033 self.wait_for_complete('shower')
3034
3035
3036 message = ''
3037 warning = ''
3038 to_gzip = [evt_file]
3039 if out_id == 'HEP':
3040
3041 if shower in ['PYTHIA8', 'HERWIGPP']:
3042 hep_format = 'HEPMC'
3043 ext = 'hepmc'
3044 else:
3045 hep_format = 'StdHEP'
3046 ext = 'hep'
3047
3048 hep_file = '%s_%s_0.%s.gz' % \
3049 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
3050 count = 0
3051
3052
3053
3054 while os.path.exists(hep_file) or \
3055 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
3056 count +=1
3057 hep_file = '%s_%s_%d.%s.gz' % \
3058 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
3059
3060 try:
3061 if self.shower_card['nsplit_jobs'] == 1:
3062 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
3063 message = ('The file %s has been generated. \nIt contains showered' + \
3064 ' and hadronized events in the %s format obtained' + \
3065 ' showering the parton-level event file %s.gz with %s') % \
3066 (hep_file, hep_format, evt_file, shower)
3067 else:
3068 hep_list = []
3069 for i in range(self.shower_card['nsplit_jobs']):
3070 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
3071 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
3072 message = ('The following files have been generated:\n %s\nThey contain showered' + \
3073 ' and hadronized events in the %s format obtained' + \
3074 ' showering the (split) parton-level event file %s.gz with %s') % \
3075 ('\n '.join(hep_list), hep_format, evt_file, shower)
3076
3077 except OSError, IOError:
3078 raise aMCatNLOError('No file has been generated, an error occurred.'+\
3079 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
3080
3081
3082 if hep_format == 'StdHEP':
3083 try:
3084 self.do_plot('%s -f' % self.run_name)
3085 except Exception, error:
3086 logger.info("Fail to make the plot. Continue...")
3087 pass
3088
3089 elif out_id == 'TOP' or out_id == 'HWU':
3090
3091 if out_id=='TOP':
3092 ext='top'
3093 elif out_id=='HWU':
3094 ext='HwU'
3095 topfiles = []
3096 top_tars = [tarfile.TarFile(f) for f in misc.glob('histfile*.tar', rundir)]
3097 for top_tar in top_tars:
3098 topfiles.extend(top_tar.getnames())
3099
3100
3101 if len(top_tars) != self.shower_card['nsplit_jobs']:
3102 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
3103 (self.shower_card['nsplit_jobs'], len(top_tars)))
3104
3105
3106
3107 filename = 'plot_%s_%d_' % (shower, 1)
3108 count = 1
3109 while os.path.exists(pjoin(self.me_dir, 'Events',
3110 self.run_name, '%s0.%s' % (filename,ext))) or \
3111 os.path.exists(pjoin(self.me_dir, 'Events',
3112 self.run_name, '%s0__1.%s' % (filename,ext))):
3113 count += 1
3114 filename = 'plot_%s_%d_' % (shower, count)
3115
3116 if out_id=='TOP':
3117 hist_format='TopDrawer format'
3118 elif out_id=='HWU':
3119 hist_format='HwU and GnuPlot formats'
3120
3121 if not topfiles:
3122
3123 warning = 'No .top file has been generated. For the results of your ' +\
3124 'run, please check inside %s' % rundir
3125 elif self.shower_card['nsplit_jobs'] == 1:
3126
3127 top_tars[0].extractall(path = rundir)
3128 plotfiles = []
3129 for i, file in enumerate(topfiles):
3130 if out_id=='TOP':
3131 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3132 '%s%d.top' % (filename, i))
3133 files.mv(pjoin(rundir, file), plotfile)
3134 elif out_id=='HWU':
3135 out=pjoin(self.me_dir,'Events',
3136 self.run_name,'%s%d'% (filename,i))
3137 histos=[{'dirname':pjoin(rundir,file)}]
3138 self.combine_plots_HwU(histos,out)
3139 try:
3140 misc.call(['gnuplot','%s%d.gnuplot' % (filename,i)],\
3141 stdout=os.open(os.devnull, os.O_RDWR),\
3142 stderr=os.open(os.devnull, os.O_RDWR),\
3143 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3144 except Exception:
3145 pass
3146 plotfile=pjoin(self.me_dir,'Events',self.run_name,
3147 '%s%d.HwU'% (filename,i))
3148 plotfiles.append(plotfile)
3149
3150 ffiles = 'files'
3151 have = 'have'
3152 if len(plotfiles) == 1:
3153 ffiles = 'file'
3154 have = 'has'
3155
3156 message = ('The %s %s %s been generated, with histograms in the' + \
3157 ' %s, obtained by showering the parton-level' + \
3158 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
3159 hist_format, evt_file, shower)
3160 else:
3161
3162 topfiles_set = set(topfiles)
3163 plotfiles = []
3164 for j, top_tar in enumerate(top_tars):
3165 top_tar.extractall(path = rundir)
3166 for i, file in enumerate(topfiles_set):
3167 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
3168 '%s%d__%d.%s' % (filename, i, j + 1,ext))
3169 files.mv(pjoin(rundir, file), plotfile)
3170 plotfiles.append(plotfile)
3171
3172
3173 if self.shower_card['combine_td']:
3174 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
3175
3176 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
3177 norm = 1.
3178 elif self.banner.get('run_card', 'event_norm').lower() == 'average':
3179 norm = 1./float(self.shower_card['nsplit_jobs'])
3180
3181 plotfiles2 = []
3182 for i, file in enumerate(topfiles_set):
3183 filelist = ['%s%d__%d.%s' % (filename, i, j + 1,ext) \
3184 for j in range(self.shower_card['nsplit_jobs'])]
3185 if out_id=='TOP':
3186 infile="%d\n%s\n%s\n" % \
3187 (self.shower_card['nsplit_jobs'],
3188 '\n'.join(filelist),
3189 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
3190 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
3191 stdin=subprocess.PIPE,
3192 stdout=os.open(os.devnull, os.O_RDWR),
3193 cwd=pjoin(self.me_dir, 'Events', self.run_name))
3194 p.communicate(input = infile)
3195 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
3196 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
3197 elif out_id=='HWU':
3198 out=pjoin(self.me_dir,'Events',
3199 self.run_name,'%s%d'% (filename,i))
3200 histos=[]
3201 norms=[]
3202 for plotfile in plotfiles:
3203 histos.append({'dirname':plotfile})
3204 norms.append(norm)
3205 self.combine_plots_HwU(histos,out,normalisation=norms)
3206 try:
3207 misc.call(['gnuplot','%s%d.gnuplot' % (filename, i)],\
3208 stdout=os.open(os.devnull, os.O_RDWR),\
3209 stderr=os.open(os.devnull, os.O_RDWR),\
3210 cwd=pjoin(self.me_dir, 'Events',self.run_name))
3211 except Exception:
3212 pass
3213
3214 plotfiles2.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.%s' % (filename, i,ext)))
3215 tar = tarfile.open(
3216 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
3217 for f in filelist:
3218 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
3219 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
3220
3221 tar.close()
3222
3223 ffiles = 'files'
3224 have = 'have'
3225 if len(plotfiles2) == 1:
3226 ffiles = 'file'
3227 have = 'has'
3228
3229 message = ('The %s %s %s been generated, with histograms in the' + \
3230 ' %s, obtained by showering the parton-level' + \
3231 ' file %s.gz with %s.\n' + \
3232 'The files from the different shower ' + \
3233 'jobs (before combining them) can be found inside %s.') % \
3234 (ffiles, ', '.join(plotfiles2), have, hist_format,\
3235 evt_file, shower,
3236 ', '.join([f.replace('%s' % ext, 'tar.gz') for f in plotfiles2]))
3237
3238 else:
3239 message = ('The following files have been generated:\n %s\n' + \
3240 'They contain histograms in the' + \
3241 ' %s, obtained by showering the parton-level' + \
3242 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
3243 hist_format, evt_file, shower)
3244
3245
3246 run_dir_path = pjoin(rundir, self.run_name)
3247 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
3248 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
3249 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
3250 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
3251 %(shower, count)))
3252 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
3253 cwd=run_dir_path)
3254 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
3255
3256 for f in to_gzip:
3257 misc.gzip(f)
3258 if message:
3259 logger.info(message)
3260 if warning:
3261 logger.warning(warning)
3262
3263 self.update_status('Run complete', level='shower', update_results=True)
3264
3265
3266
3267 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
3268 """define the run name, the run_tag, the banner and the results."""
3269
3270
3271 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'],
3272 'pythia': ['pythia','pgs','delphes'],
3273 'shower': ['shower'],
3274 'pgs': ['pgs'],
3275 'delphes':['delphes'],
3276 'plot':[]}
3277
3278
3279
3280 if name == self.run_name:
3281 if reload_card:
3282 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3283 self.run_card = banner_mod.RunCardNLO(run_card)
3284
3285
3286 if tag:
3287 self.run_card['run_tag'] = tag
3288 self.run_tag = tag
3289 self.results.add_run(self.run_name, self.run_card)
3290 else:
3291 for tag in upgrade_tag[level]:
3292 if getattr(self.results[self.run_name][-1], tag):
3293 tag = self.get_available_tag()
3294 self.run_card['run_tag'] = tag
3295 self.run_tag = tag
3296 self.results.add_run(self.run_name, self.run_card)
3297 break
3298 return
3299
3300
3301 if self.run_name:
3302 self.store_result()
3303
3304 self.run_name = name
3305
3306
3307 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
3308 self.run_card = banner_mod.RunCardNLO(run_card)
3309
3310 new_tag = False
3311
3312 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
3313 if tag:
3314 self.run_card['run_tag'] = tag
3315 new_tag = True
3316 elif not self.run_name in self.results and level =='parton':
3317 pass
3318 elif not self.run_name in self.results:
3319
3320 logger.warning('Trying to run data on unknown run.')
3321 self.results.add_run(name, self.run_card)
3322 self.results.update('add run %s' % name, 'all', makehtml=True)
3323 else:
3324 for tag in upgrade_tag[level]:
3325
3326 if getattr(self.results[self.run_name][-1], tag):
3327
3328 tag = self.get_available_tag()
3329 self.run_card['run_tag'] = tag
3330 new_tag = True
3331 break
3332 if not new_tag:
3333
3334 tag = self.results[self.run_name][-1]['tag']
3335 self.run_card['run_tag'] = tag
3336
3337
3338 if name in self.results and not new_tag:
3339 self.results.def_current(self.run_name)
3340 else:
3341 self.results.add_run(self.run_name, self.run_card)
3342
3343 self.run_tag = self.run_card['run_tag']
3344
3345
3346
3347 if level == 'parton':
3348 return
3349 elif level == 'pythia':
3350 return self.results[self.run_name][0]['tag']
3351 else:
3352 for i in range(-1,-len(self.results[self.run_name])-1,-1):
3353 tagRun = self.results[self.run_name][i]
3354 if tagRun.pythia:
3355 return tagRun['tag']
3356
3357
3359 """ tar the pythia results. This is done when we are quite sure that
3360 the pythia output will not be use anymore """
3361
3362 if not self.run_name:
3363 return
3364
3365 self.results.save()
3366
3367 if not self.to_store:
3368 return
3369
3370 if 'event' in self.to_store:
3371 if os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')):
3372 if not os.path.exists(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz')):
3373 self.update_status('gzipping output file: events.lhe', level='parton', error=True)
3374 misc.gzip(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3375 else:
3376 os.remove(pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'))
3377 if os.path.exists(pjoin(self.me_dir,'Events','reweight.lhe')):
3378 os.remove(pjoin(self.me_dir,'Events', 'reweight.lhe'))
3379
3380
3381 tag = self.run_card['run_tag']
3382
3383 self.to_store = []
3384
3385
3387 """reads the info in the init block and returns them in a dictionary"""
3388 ev_file = open(evt_file)
3389 init = ""
3390 found = False
3391 while True:
3392 line = ev_file.readline()
3393 if "<init>" in line:
3394 found = True
3395 elif found and not line.startswith('#'):
3396 init += line
3397 if "</init>" in line or "<event>" in line:
3398 break
3399 ev_file.close()
3400
3401
3402
3403
3404
3405
3406 init_dict = {}
3407 init_dict['idbmup1'] = int(init.split()[0])
3408 init_dict['idbmup2'] = int(init.split()[1])
3409 init_dict['ebmup1'] = float(init.split()[2])
3410 init_dict['ebmup2'] = float(init.split()[3])
3411 init_dict['pdfgup1'] = int(init.split()[4])
3412 init_dict['pdfgup2'] = int(init.split()[5])
3413 init_dict['pdfsup1'] = int(init.split()[6])
3414 init_dict['pdfsup2'] = int(init.split()[7])
3415 init_dict['idwtup'] = int(init.split()[8])
3416 init_dict['nprup'] = int(init.split()[9])
3417
3418 return init_dict
3419
3420
3422 """creates the mcatnlo input script using the values set in the header of the event_file.
3423 It also checks if the lhapdf library is used"""
3424 shower = self.banner.get('run_card', 'parton_shower').upper()
3425 pdlabel = self.banner.get('run_card', 'pdlabel')
3426 itry = 0
3427 nevents = self.shower_card['nevents']
3428 init_dict = self.get_init_dict(evt_file)
3429
3430 if nevents < 0 or \
3431 nevents > self.banner.get_detail('run_card', 'nevents'):
3432 nevents = self.banner.get_detail('run_card', 'nevents')
3433
3434 nevents = nevents / self.shower_card['nsplit_jobs']
3435
3436 mcmass_dict = {}
3437 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
3438 pdg = int(line.split()[0])
3439 mass = float(line.split()[1])
3440 mcmass_dict[pdg] = mass
3441
3442 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
3443 content += 'NEVENTS=%d\n' % nevents
3444 content += 'NEVENTS_TOT=%d\n' % (self.banner.get_detail('run_card', 'nevents') /\
3445 self.shower_card['nsplit_jobs'])
3446 content += 'MCMODE=%s\n' % shower
3447 content += 'PDLABEL=%s\n' % pdlabel
3448 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value
3449
3450
3451 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
3452 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
3453 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
3454 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
3455 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
3456 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
3457 try:
3458 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
3459 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
3460 except KeyError:
3461 content += 'HGGMASS=120.\n'
3462 content += 'HGGWIDTH=0.00575308848\n'
3463 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
3464 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
3465 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
3466 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
3467 content += 'DMASS=%s\n' % mcmass_dict[1]
3468 content += 'UMASS=%s\n' % mcmass_dict[2]
3469 content += 'SMASS=%s\n' % mcmass_dict[3]
3470 content += 'CMASS=%s\n' % mcmass_dict[4]
3471 content += 'BMASS=%s\n' % mcmass_dict[5]
3472 try:
3473 content += 'EMASS=%s\n' % mcmass_dict[11]
3474 content += 'MUMASS=%s\n' % mcmass_dict[13]
3475 content += 'TAUMASS=%s\n' % mcmass_dict[15]
3476 except KeyError:
3477
3478 mcmass_lines = [l for l in \
3479 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
3480 ).read().split('\n') if l]
3481 new_mcmass_dict = {}
3482 for l in mcmass_lines:
3483 key, val = l.split('=')
3484 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
3485 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
3486 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
3487 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
3488
3489 content += 'GMASS=%s\n' % mcmass_dict[21]
3490 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
3491
3492 if int(self.shower_card['pdfcode']) > 1 or \
3493 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1):
3494
3495
3496
3497
3498 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3499 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3500 stdout = subprocess.PIPE).stdout.read().strip()
3501 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3502 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3503 if self.shower_card['pdfcode']==1:
3504 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3505 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3506 else:
3507 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
3508 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
3509 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3510 elif int(self.shower_card['pdfcode'])==1:
3511
3512
3513
3514
3515
3516
3517 try:
3518 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
3519 stdout = subprocess.PIPE).stdout.read().strip()
3520 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
3521 content += 'LHAPDFPATH=%s\n' % lhapdfpath
3522 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3523 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
3524 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
3525 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3526 except Exception:
3527 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
3528 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
3529 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
3530 ' same set as was used in the event generation install LHAPDF and set the path using'+\
3531 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
3532 content += 'LHAPDFPATH=\n'
3533 content += 'PDFCODE=0\n'
3534 else:
3535 content += 'LHAPDFPATH=\n'
3536 content += 'PDFCODE=0\n'
3537
3538 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
3539 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
3540
3541 if self.options['pythia8_path']:
3542 content+='PY8PATH=%s\n' % self.options['pythia8_path']
3543 if self.options['hwpp_path']:
3544 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
3545 if self.options['thepeg_path']:
3546 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
3547 if self.options['hepmc_path']:
3548 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
3549
3550 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
3551 output.write(content)
3552 output.close()
3553 return shower
3554
3555
3557 """runs the reweight_xsec_events executables on each sub-event file generated
3558 to compute on the fly scale and/or PDF uncertainities"""
3559 logger.info(' Doing reweight')
3560
3561 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
3562
3563 if only:
3564 if os.path.exists(nev_unw + '.orig'):
3565 files.cp(nev_unw + '.orig', nev_unw)
3566 else:
3567 raise aMCatNLOError('Cannot find event file information')
3568
3569
3570 file = open(nev_unw)
3571 lines = file.read().split('\n')
3572 file.close()
3573
3574 files.cp(nev_unw, nev_unw + '.orig')
3575
3576
3577 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
3578 evt_wghts = [float(line.split()[3]) for line in lines[:-1] if line.split()[1] != '0']
3579
3580 job_dict = {}
3581 exe = 'reweight_xsec_events.local'
3582 for i, evt_file in enumerate(evt_files):
3583 path, evt = os.path.split(evt_file)
3584 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
3585 pjoin(self.me_dir, 'SubProcesses', path))
3586 job_dict[path] = [exe]
3587
3588 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
3589
3590
3591 for evt_file in evt_files:
3592 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
3593 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
3594 stdout = subprocess.PIPE).stdout.read().strip()
3595 if last_line != "</LesHouchesEvents>":
3596 raise aMCatNLOError('An error occurred during reweight. Check the' + \
3597 '\'reweight_xsec_events.output\' files inside the ' + \
3598 '\'SubProcesses/P*/G*/ directories for details')
3599
3600
3601 newfile = open(nev_unw, 'w')
3602 for line in lines:
3603 if line:
3604 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
3605 newfile.close()
3606
3607 return self.pdf_scale_from_reweighting(evt_files,evt_wghts)
3608
3610 """This function takes the files with the scale and pdf values
3611 written by the reweight_xsec_events.f code
3612 (P*/G*/pdf_scale_dependence.dat) and computes the overall
3613 scale and PDF uncertainty (the latter is computed using the
3614 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
3615 and returns it in percents. The expected format of the file
3616 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
3617 xsec_pdf0 xsec_pdf1 ...."""
3618
3619 scales=[]
3620 pdfs=[]
3621 for i,evt_file in enumerate(evt_files):
3622 path, evt=os.path.split(evt_file)
3623 with open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat'),'r') as f:
3624 data_line=f.readline()
3625 if "scale variations:" in data_line:
3626 for i,scale in enumerate(self.run_card['dynamical_scale_choice']):
3627 data_line = f.readline().split()
3628 scales_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
3629 try:
3630 scales[i] = [a + b for a, b in zip(scales[i], scales_this)]
3631 except IndexError:
3632 scales+=[scales_this]
3633 data_line=f.readline()
3634 if "pdf variations:" in data_line:
3635 for i,pdf in enumerate(self.run_card['lhaid']):
3636 data_line = f.readline().split()
3637 pdfs_this = [float(val)*evt_wghts[i] for val in f.readline().replace("D", "E").split()]
3638 try:
3639 pdfs[i] = [a + b for a, b in zip(pdfs[i], pdfs_this)]
3640 except IndexError:
3641 pdfs+=[pdfs_this]
3642
3643
3644 scale_info=[]
3645 for j,scale in enumerate(scales):
3646 s_cen=scale[0]
3647 if s_cen != 0.0 and self.run_card['reweight_scale'][j]:
3648
3649 s_max=(max(scale)/s_cen-1)*100
3650 s_min=(1-min(scale)/s_cen)*100
3651
3652 ren_var=[]
3653 fac_var=[]
3654 for i in range(len(self.run_card['rw_rscale'])):
3655 ren_var.append(scale[i]-s_cen)
3656 for i in range(len(self.run_card['rw_fscale'])):
3657 fac_var.append(scale[i*len(self.run_card['rw_rscale'])]-s_cen)
3658 s_max_q=((s_cen+math.sqrt(math.pow(max(ren_var),2)+math.pow(max(fac_var),2)))/s_cen-1)*100
3659 s_min_q=(1-(s_cen-math.sqrt(math.pow(min(ren_var),2)+math.pow(min(fac_var),2)))/s_cen)*100
3660 s_size=len(scale)
3661 else:
3662 s_max=0.0
3663 s_min=0.0
3664 s_max_q=0.0
3665 s_min_q=0.0
3666 s_size=len(scale)
3667 scale_info.append({'cen':s_cen, 'min':s_min, 'max':s_max, \
3668 'min_q':s_min_q, 'max_q':s_max_q, 'size':s_size, \
3669 'label':self.run_card['dynamical_scale_choice'][j], \
3670 'unc':self.run_card['reweight_scale'][j]})
3671
3672
3673 if any(self.run_card['reweight_pdf']):
3674 use_lhapdf=False
3675 lhapdf_libdir=subprocess.Popen([self.options['lhapdf'],'--libdir'],\
3676 stdout=subprocess.PIPE).stdout.read().strip()
3677
3678 try:
3679 candidates=[dirname for dirname in os.listdir(lhapdf_libdir) \
3680 if os.path.isdir(pjoin(lhapdf_libdir,dirname))]
3681 except OSError:
3682 candidates=[]
3683 for candidate in candidates:
3684 if os.path.isfile(pjoin(lhapdf_libdir,candidate,'site-packages','lhapdf.so')):
3685 sys.path.insert(0,pjoin(lhapdf_libdir,candidate,'site-packages'))
3686 try:
3687 import lhapdf
3688 use_lhapdf=True
3689 break
3690 except ImportError:
3691 sys.path.pop(0)
3692 continue
3693
3694 if not use_lhapdf:
3695 try:
3696 candidates=[dirname for dirname in os.listdir(lhapdf_libdir+'64') \
3697 if os.path.isdir(pjoin(lhapdf_libdir+'64',dirname))]
3698 except OSError:
3699 candidates=[]
3700 for candidate in candidates:
3701 if os.path.isfile(pjoin(lhapdf_libdir+'64',candidate,'site-packages','lhapdf.so')):
3702 sys.path.insert(0,pjoin(lhapdf_libdir+'64',candidate,'site-packages'))
3703 try:
3704 import lhapdf
3705 use_lhapdf=True
3706 break
3707 except ImportError:
3708 sys.path.pop(0)
3709 continue
3710
3711 if not use_lhapdf:
3712 try:
3713 import lhapdf
3714 use_lhapdf=True
3715 except ImportError:
3716 logger.warning("Failed to access python version of LHAPDF: "\
3717 "cannot compute PDF uncertainty from the "\
3718 "weights in the events. The weights in the LHE " \
3719 "event files will still cover all PDF set members, "\
3720 "but there will be no PDF uncertainty printed in the run summary. \n "\
3721 "If the python interface to LHAPDF is available on your system, try "\
3722 "adding its location to the PYTHONPATH environment variable and the"\
3723 "LHAPDF library location to LD_LIBRARY_PATH (linux) or DYLD_LIBRARY_PATH (mac os x).")
3724 use_lhapdf=False
3725
3726
3727 if any(self.run_card['reweight_pdf']) and use_lhapdf: lhapdf.setVerbosity(0)
3728
3729 pdf_info=[]
3730 for j,pdfset in enumerate(pdfs):
3731 p_cen=pdfset[0]
3732 if p_cen != 0.0 and self.run_card['reweight_pdf'][j]:
3733 if use_lhapdf:
3734 pdfsetname=self.run_card['lhapdfsetname'][j]
3735 try:
3736 p=lhapdf.getPDFSet(pdfsetname)
3737 ep=p.uncertainty(pdfset,-1)
3738 p_cen=ep.central
3739 p_min=abs(ep.errminus/p_cen)*100
3740 p_max=abs(ep.errplus/p_cen)*100
3741 p_type=p.errorType
3742 p_size=p.size
3743 p_conf=p.errorConfLevel
3744 except:
3745 logger.warning("Could not access LHAPDF to compute uncertainties for %s" % pdfsetname)
3746 p_min=0.0
3747 p_max=0.0
3748 p_type='unknown'
3749 p_conf='unknown'
3750 p_size=len(pdfset)
3751 else:
3752 p_min=0.0
3753 p_max=0.0
3754 p_type='unknown'
3755 p_conf='unknown'
3756 p_size=len(pdfset)
3757 pdfsetname=self.run_card['lhaid'][j]
3758 else:
3759 p_min=0.0
3760 p_max=0.0
3761 p_type='none'
3762 p_conf='unknown'
3763 p_size=len(pdfset)
3764 pdfsetname=self.run_card['lhaid'][j]
3765 pdf_info.append({'cen':p_cen, 'min':p_min, 'max':p_max, \
3766 'unc':p_type, 'name':pdfsetname, 'size':p_size, \
3767 'label':self.run_card['lhaid'][j], 'conf':p_conf})
3768
3769 scale_pdf_info=[scale_info,pdf_info]
3770 return scale_pdf_info
3771
3772
3784
3785 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3786 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
3787 self.ijob = 0
3788 if run_type != 'shower':
3789 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
3790 for args in arg_list:
3791 for Pdir, jobs in job_dict.items():
3792 for job in jobs:
3793 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3794 if self.cluster_mode == 2:
3795 time.sleep(1)
3796 else:
3797 self.njobs = len(arg_list)
3798 for args in arg_list:
3799 [(cwd, exe)] = job_dict.items()
3800 self.run_exe(exe, args, run_type, cwd)
3801
3802 self.wait_for_complete(run_type)
3803
3804
3805
3807 """check the integrity of the event files after splitting, and resubmit
3808 those which are not nicely terminated"""
3809 jobs_to_resubmit = []
3810 for job in jobs:
3811 last_line = ''
3812 try:
3813 last_line = subprocess.Popen(
3814 ['tail', '-n1', pjoin(job['dirname'], 'events.lhe')], \
3815 stdout = subprocess.PIPE).stdout.read().strip()
3816 except IOError:
3817 pass
3818 if last_line != "</LesHouchesEvents>":
3819 jobs_to_resubmit.append(job)
3820 self.njobs = 0
3821 if jobs_to_resubmit:
3822 run_type = 'Resubmitting broken jobs'
3823 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
3824 for job in jobs_to_resubmit:
3825 logger.debug('Resubmitting ' + job['dirname'] + '\n')
3826 self.run_all_jobs(jobs_to_resubmit,2,fixed_order=False)
3827
3828
3830 """looks into the nevents_unweighed_splitted file to check how many
3831 split jobs are needed for this (pdir, job). arg is F, B or V"""
3832
3833 splittings = []
3834 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
3835 pattern = re.compile('for i in (\d+) ; do')
3836 match = re.search(pattern, ajob)
3837 channel = match.groups()[0]
3838
3839
3840 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
3841
3842
3843 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
3844 pjoin(pdir, 'G%s%s' % (arg,channel)))
3845 matches = re.findall(pattern, nevents_file)
3846 for m in matches:
3847 splittings.append(m)
3848 return splittings
3849
3850
3851 - def run_exe(self, exe, args, run_type, cwd=None):
3852 """this basic function launch locally/on cluster exe with args as argument.
3853 """
3854
3855
3856 execpath = None
3857 if cwd and os.path.exists(pjoin(cwd, exe)):
3858 execpath = pjoin(cwd, exe)
3859 elif not cwd and os.path.exists(exe):
3860 execpath = exe
3861 else:
3862 raise aMCatNLOError('Cannot find executable %s in %s' \
3863 % (exe, os.getcwd()))
3864
3865 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
3866 subprocess.call(['chmod', '+x', exe], cwd=cwd)
3867
3868 if self.cluster_mode == 0:
3869
3870 misc.call(['./'+exe] + args, cwd=cwd)
3871 self.ijob += 1
3872 self.update_status((max([self.njobs - self.ijob - 1, 0]),
3873 min([1, self.njobs - self.ijob]),
3874 self.ijob, run_type), level='parton')
3875
3876
3877 elif 'reweight' in exe:
3878
3879
3880 input_files, output_files = [], []
3881 pdfinput = self.get_pdf_input_filename()
3882 if os.path.exists(pdfinput):
3883 input_files.append(pdfinput)
3884 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
3885 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
3886 input_files.append(args[0])
3887 output_files.append('%s.rwgt' % os.path.basename(args[0]))
3888 output_files.append('reweight_xsec_events.output')
3889 output_files.append('scale_pdf_dependence.dat')
3890
3891 return self.cluster.submit2(exe, args, cwd=cwd,
3892 input_files=input_files, output_files=output_files,
3893 required_output=output_files)
3894
3895 elif 'ajob' in exe:
3896
3897
3898 if type(args[0]) == str:
3899 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd,args)
3900
3901 self.cluster.submit2(exe, args, cwd=cwd,
3902 input_files=input_files, output_files=output_files,
3903 required_output=required_output)
3904
3905
3906
3907
3908
3909
3910 elif 'shower' in exe:
3911
3912
3913
3914 input_files, output_files = [], []
3915 shower = args[0]
3916
3917 if shower == 'PYTHIA8':
3918 input_files.append(pjoin(cwd, 'Pythia8.exe'))
3919 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
3920 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3921 input_files.append(pjoin(cwd, 'config.sh'))
3922 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
3923 else:
3924 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
3925 else:
3926 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
3927 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
3928 if shower == 'HERWIGPP':
3929 input_files.append(pjoin(cwd, 'Herwig++'))
3930 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
3931 if len(args) == 3:
3932 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
3933 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
3934 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
3935 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
3936 else:
3937 raise aMCatNLOError, 'Event file not present in %s' % \
3938 pjoin(self.me_dir, 'Events', self.run_name)
3939 else:
3940 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
3941
3942 if len(args) == 3:
3943 output_files.append('mcatnlo_run.log')
3944 else:
3945 output_files.append('mcatnlo_run_%s.log' % args[3])
3946 if args[1] == 'HEP':
3947 if len(args) == 3:
3948 fname = 'events'
3949 else:
3950 fname = 'events_%s' % args[3]
3951 if shower in ['PYTHIA8', 'HERWIGPP']:
3952 output_files.append(fname + '.hepmc.gz')
3953 else:
3954 output_files.append(fname + '.hep.gz')
3955 elif args[1] == 'TOP' or args[1] == 'HWU':
3956 if len(args) == 3:
3957 fname = 'histfile'
3958 else:
3959 fname = 'histfile_%s' % args[3]
3960 output_files.append(fname + '.tar')
3961 else:
3962 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
3963
3964 self.cluster.submit2(exe, args, cwd=cwd,
3965 input_files=input_files, output_files=output_files)
3966
3967 else:
3968 return self.cluster.submit(exe, args, cwd=cwd)
3969
3971
3972
3973
3974 output_files = []
3975 required_output = []
3976 input_files = [pjoin(self.me_dir, 'SubProcesses', 'randinit'),
3977 pjoin(cwd, 'symfact.dat'),
3978 pjoin(cwd, 'iproc.dat'),
3979 pjoin(cwd, 'initial_states_map.dat'),
3980 pjoin(cwd, 'configs_and_props_info.dat'),
3981 pjoin(cwd, 'leshouche_info.dat'),
3982 pjoin(cwd, 'FKS_params.dat')]
3983
3984
3985 if os.path.exists(pjoin(self.me_dir,'OLP_virtuals','gosam.rc')):
3986 input_files.append(pjoin(self.me_dir, 'Cards', 'param_card.dat'))
3987
3988 if os.path.exists(pjoin(cwd,'nevents.tar')):
3989 input_files.append(pjoin(cwd,'nevents.tar'))
3990
3991 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
3992 input_files.append(pjoin(cwd, 'OLE_order.olc'))
3993
3994
3995 if os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')) and \
3996 cluster.need_transfer(self.options):
3997 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
3998 elif os.path.exists(pjoin(cwd,'MadLoop5_resources')) and \
3999 cluster.need_transfer(self.options):
4000 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
4001 dereference=True)
4002 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
4003 tf.close()
4004 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
4005
4006 if args[1] == 'born' or args[1] == 'all':
4007
4008 input_files.append(pjoin(cwd, 'madevent_mintFO'))
4009 if args[2] == '0':
4010 current = '%s_G%s' % (args[1],args[0])
4011 else:
4012 current = '%s_G%s_%s' % (args[1],args[0],args[2])
4013 if os.path.exists(pjoin(cwd,current)):
4014 input_files.append(pjoin(cwd, current))
4015 output_files.append(current)
4016
4017 required_output.append('%s/results.dat' % current)
4018 required_output.append('%s/res_%s.dat' % (current,args[3]))
4019 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4020 required_output.append('%s/mint_grids' % current)
4021 required_output.append('%s/grid.MC_integer' % current)
4022 if args[3] != '0':
4023 required_output.append('%s/scale_pdf_dependence.dat' % current)
4024
4025 elif args[1] == 'F' or args[1] == 'B':
4026
4027 input_files.append(pjoin(cwd, 'madevent_mintMC'))
4028
4029 if args[2] == '0':
4030 current = 'G%s%s' % (args[1],args[0])
4031 else:
4032 current = 'G%s%s_%s' % (args[1],args[0],args[2])
4033 if os.path.exists(pjoin(cwd,current)):
4034 input_files.append(pjoin(cwd, current))
4035 output_files.append(current)
4036 if args[2] > '0':
4037
4038 output_files.append('G%s%s_%s' % (args[1], args[0], args[2]))
4039 required_output.append('G%s%s_%s/log_MINT%s.txt' % (args[1],args[0],args[2],args[3]))
4040
4041 else:
4042 required_output.append('%s/log_MINT%s.txt' % (current,args[3]))
4043 if args[3] in ['0','1']:
4044 required_output.append('%s/results.dat' % current)
4045 if args[3] == '1':
4046 output_files.append('%s/results.dat' % current)
4047
4048 else:
4049 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
4050
4051
4052 pdfinput = self.get_pdf_input_filename()
4053 if os.path.exists(pdfinput):
4054 input_files.append(pdfinput)
4055 return input_files, output_files, required_output, args
4056
4057
4058 - def compile(self, mode, options):
4059 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
4060 specified in mode"""
4061
4062 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
4063
4064 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
4065 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
4066
4067 self.get_characteristics(pjoin(self.me_dir,
4068 'SubProcesses', 'proc_characteristics'))
4069
4070
4071 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
4072 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
4073 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
4074 test_log = pjoin(self.me_dir, 'test.log')
4075
4076
4077 self.make_opts_var = {}
4078 if self.proc_characteristics['has_loops'] and \
4079 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4080 self.make_opts_var['madloop'] = 'true'
4081
4082 self.update_status('Compiling the code', level=None, update_results=True)
4083
4084 libdir = pjoin(self.me_dir, 'lib')
4085 sourcedir = pjoin(self.me_dir, 'Source')
4086
4087
4088 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
4089
4090 if '+' in mode:
4091 mode = mode.split('+')[0]
4092 if mode in ['NLO', 'LO']:
4093 exe = 'madevent_mintFO'
4094 tests = ['test_ME']
4095 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
4096 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
4097 exe = 'madevent_mintMC'
4098 tests = ['test_ME', 'test_MC']
4099
4100 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o HwU_dummy.o\n')
4101
4102
4103 p_dirs = [d for d in \
4104 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
4105
4106 self.do_treatcards('', amcatnlo=True)
4107
4108
4109 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
4110 for p_dir in p_dirs]) and options['nocompile']:
4111 return
4112
4113
4114 if os.path.exists(pjoin(libdir, 'PDFsets')):
4115 files.rm(pjoin(libdir, 'PDFsets'))
4116
4117
4118 if self.run_card['pdlabel'] == 'lhapdf' and \
4119 (self.banner.get_detail('run_card', 'lpp1') != 0 or \
4120 self.banner.get_detail('run_card', 'lpp2') != 0):
4121
4122 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
4123 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
4124 lhaid_list = self.run_card['lhaid']
4125 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
4126
4127 else:
4128 if self.run_card['lpp1'] == 1 == self.run_card['lpp2']:
4129 logger.info('Using built-in libraries for PDFs')
4130 if self.run_card['lpp1'] == 0 == self.run_card['lpp2']:
4131 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.')
4132 self.make_opts_var['lhapdf'] = ""
4133
4134
4135 if self.run_card['iappl'] != 0:
4136 self.make_opts_var['applgrid'] = 'True'
4137
4138 for code in ['applgrid','amcfast']:
4139 try:
4140 p = subprocess.Popen([self.options[code], '--version'], \
4141 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
4142 except OSError:
4143 raise aMCatNLOError(('No valid %s installation found. \n' + \
4144 'Please set the path to %s-config by using \n' + \
4145 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
4146 else:
4147 output, _ = p.communicate()
4148 if code is 'applgrid' and output < '1.4.63':
4149 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
4150 +' You are using %s',output)
4151 if code is 'amcfast' and output < '1.1.1':
4152 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
4153 +' You are using %s',output)
4154
4155
4156 appllibs=" APPLLIBS=$(shell %s --ldflags) $(shell %s --ldcflags) \n" \
4157 % (self.options['amcfast'],self.options['applgrid'])
4158 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
4159 text_out=[]
4160 for line in text:
4161 if line.strip().startswith('APPLLIBS=$'):
4162 line=appllibs
4163 text_out.append(line)
4164 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out)
4165 else:
4166 self.make_opts_var['applgrid'] = ""
4167
4168 if 'fastjet' in self.options.keys() and self.options['fastjet']:
4169 self.make_opts_var['fastjet_config'] = self.options['fastjet']
4170
4171
4172 self.update_make_opts()
4173
4174
4175 self.update_status('Compiling source...', level=None)
4176 misc.compile(['clean4pdf'], cwd = sourcedir)
4177 misc.compile(cwd = sourcedir)
4178 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
4179 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
4180 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
4181 and os.path.exists(pjoin(libdir, 'libpdf.a')):
4182 logger.info(' ...done, continuing with P* directories')
4183 else:
4184 raise aMCatNLOError('Compilation failed')
4185
4186
4187 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
4188 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
4189 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
4190 if os.path.exists(pjoin(sourcedir,'StdHEP')):
4191 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
4192 misc.compile(['StdHEP'], cwd = sourcedir)
4193 logger.info(' ...done.')
4194 else:
4195 raise aMCatNLOError('Could not compile StdHEP because its'+\
4196 ' source directory could not be found in the SOURCE folder.\n'+\
4197 " Check the MG5_aMC option 'output_dependencies.'")
4198
4199
4200 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4201 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4202 if os.path.exists(pjoin(sourcedir,'CutTools')):
4203 logger.info('Compiling CutTools (can take a couple of minutes) ...')
4204 misc.compile(['CutTools'], cwd = sourcedir)
4205 logger.info(' ...done.')
4206 else:
4207 raise aMCatNLOError('Could not compile CutTools because its'+\
4208 ' source directory could not be found in the SOURCE folder.\n'+\
4209 " Check the MG5_aMC option 'output_dependencies.'")
4210 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
4211 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
4212 raise aMCatNLOError('CutTools compilation failed.')
4213
4214
4215
4216 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4217 libdir, 'libcts.a')))),'compiler_version.log')
4218 if os.path.exists(compiler_log_path):
4219 compiler_version_used = open(compiler_log_path,'r').read()
4220 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4221 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4222 if os.path.exists(pjoin(sourcedir,'CutTools')):
4223 logger.info('CutTools was compiled with a different fortran'+\
4224 ' compiler. Re-compiling it now...')
4225 misc.compile(['cleanCT'], cwd = sourcedir)
4226 misc.compile(['CutTools'], cwd = sourcedir)
4227 logger.info(' ...done.')
4228 else:
4229 raise aMCatNLOError("CutTools installation in %s"\
4230 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
4231 " seems to have been compiled with a different compiler than"+\
4232 " the one specified in MG5_aMC. Please recompile CutTools.")
4233
4234
4235 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
4236 and os.path.exists(pjoin(sourcedir,'IREGI')):
4237 logger.info('Compiling IREGI (can take a couple of minutes) ...')
4238 misc.compile(['IREGI'], cwd = sourcedir)
4239 logger.info(' ...done.')
4240
4241 if os.path.exists(pjoin(libdir, 'libiregi.a')):
4242
4243
4244 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
4245 libdir, 'libiregi.a')))),'compiler_version.log')
4246 if os.path.exists(compiler_log_path):
4247 compiler_version_used = open(compiler_log_path,'r').read()
4248 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
4249 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
4250 if os.path.exists(pjoin(sourcedir,'IREGI')):
4251 logger.info('IREGI was compiled with a different fortran'+\
4252 ' compiler. Re-compiling it now...')
4253 misc.compile(['cleanIR'], cwd = sourcedir)
4254 misc.compile(['IREGI'], cwd = sourcedir)
4255 logger.info(' ...done.')
4256 else:
4257 raise aMCatNLOError("IREGI installation in %s"\
4258 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
4259 " seems to have been compiled with a different compiler than"+\
4260 " the one specified in MG5_aMC. Please recompile IREGI.")
4261
4262
4263 if self.proc_characteristics['has_loops'] and \
4264 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
4265 if mode in ['NLO', 'aMC@NLO', 'noshower']:
4266 tests.append('check_poles')
4267
4268
4269 self.update_status('Compiling directories...', level=None)
4270
4271 for test in tests:
4272 self.write_test_input(test)
4273
4274 try:
4275 import multiprocessing
4276 if not self.nb_core:
4277 try:
4278 self.nb_core = int(self.options['nb_core'])
4279 except TypeError:
4280 self.nb_core = multiprocessing.cpu_count()
4281 except ImportError:
4282 self.nb_core = 1
4283
4284 compile_options = copy.copy(self.options)
4285 compile_options['nb_core'] = self.nb_core
4286 compile_cluster = cluster.MultiCore(**compile_options)
4287 logger.info('Compiling on %d cores' % self.nb_core)
4288
4289 update_status = lambda i, r, f: self.donothing(i,r,f)
4290 for p_dir in p_dirs:
4291 compile_cluster.submit(prog = compile_dir,
4292 argument = [self.me_dir, p_dir, mode, options,
4293 tests, exe, self.options['run_mode']])
4294 try:
4295 compile_cluster.wait(self.me_dir, update_status)
4296 except Exception, error:
4297 logger.warning("Fail to compile the Subprocesses")
4298 if __debug__:
4299 raise
4300 compile_cluster.remove()
4301 self.do_quit('')
4302
4303 logger.info('Checking test output:')
4304 for p_dir in p_dirs:
4305 logger.info(p_dir)
4306 for test in tests:
4307 logger.info(' Result for %s:' % test)
4308
4309 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
4310
4311 self.check_tests(test, this_dir)
4312
4313
4316
4317
4319 """just call the correct parser for the test log.
4320 Skip check_poles for LOonly folders"""
4321 if test in ['test_ME', 'test_MC']:
4322 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
4323 elif test == 'check_poles' and not os.path.exists(pjoin(dir,'parton_lum_0.f')):
4324 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
4325
4326
4328 """read and parse the test_ME/MC.log file"""
4329 content = open(log).read()
4330 if 'FAILED' in content:
4331 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK')
4332 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
4333 'Please check that widths of final state particles (e.g. top) have been' + \
4334 ' set to 0 in the param_card.dat.')
4335 else:
4336 lines = [l for l in content.split('\n') if 'PASSED' in l]
4337 logger.info(' Passed.')
4338 logger.debug('\n'+'\n'.join(lines))
4339
4340
4342 """reads and parse the check_poles.log file"""
4343 content = open(log).read()
4344 npass = 0
4345 nfail = 0
4346 for line in content.split('\n'):
4347 if 'PASSED' in line:
4348 npass +=1
4349 tolerance = float(line.split()[1])
4350 if 'FAILED' in line:
4351 nfail +=1
4352 tolerance = float(line.split()[1])
4353
4354 if nfail + npass == 0:
4355 logger.warning('0 points have been tried')
4356 return
4357
4358 if float(nfail)/float(nfail+npass) > 0.1:
4359 raise aMCatNLOError('Poles do not cancel, run cannot continue')
4360 else:
4361 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
4362 %(npass, nfail+npass, tolerance))
4363
4364
4385
4386
4387
4388
4390 """ return the model name """
4391 if hasattr(self, 'model_name'):
4392 return self.model_name
4393
4394 model = 'sm'
4395 proc = []
4396 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')):
4397 line = line.split('#')[0]
4398
4399 if line.startswith('import') and 'model' in line:
4400 model = line.split()[2]
4401 proc = []
4402 elif line.startswith('generate'):
4403 proc.append(line.split(None,1)[1])
4404 elif line.startswith('add process'):
4405 proc.append(line.split(None,2)[2])
4406
4407 self.model = model
4408 self.process = proc
4409 return model
4410
4411
4412
4413
4415 """Ask the question when launching generate_events/multi_run"""
4416
4417 if 'parton' not in options:
4418 options['parton'] = False
4419 if 'reweightonly' not in options:
4420 options['reweightonly'] = False
4421
4422
4423 void = 'NOT INSTALLED'
4424 switch_order = ['order', 'fixed_order', 'shower','madspin', 'reweight']
4425 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
4426 'madspin': void,'reweight':'OFF'}
4427 if not switch:
4428 switch = switch_default
4429 else:
4430 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
4431 default_switch = ['ON', 'OFF']
4432
4433
4434 allowed_switch_value = {'order': ['LO', 'NLO'],
4435 'fixed_order': default_switch,
4436 'shower': default_switch,
4437 'madspin': default_switch,
4438 'reweight': default_switch}
4439
4440 description = {'order': 'Perturbative order of the calculation:',
4441 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
4442 'shower': 'Shower the generated events:',
4443 'madspin': 'Decay particles with the MadSpin module:',
4444 'reweight': 'Add weights to the events based on changing model parameters:'}
4445
4446 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'},
4447 ('madspin', 'ON'): {'fixed_order':'OFF'},
4448 ('reweight', 'ON'): {'fixed_order':'OFF'},
4449 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF', 'reweight':'OFF'}
4450 }
4451 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
4452
4453 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
4454
4455 if self.proc_characteristics['ninitial'] == 1:
4456 switch['fixed_order'] = 'ON'
4457 switch['shower'] = 'Not available for decay'
4458 switch['madspin'] = 'Not available for decay'
4459 switch['reweight'] = 'Not available for decay'
4460 allowed_switch_value['fixed_order'] = ['ON']
4461 allowed_switch_value['shower'] = ['OFF']
4462 allowed_switch_value['madspin'] = ['OFF']
4463 allowed_switch_value['reweight'] = ['OFF']
4464 available_mode = ['0','1']
4465 special_values = ['LO', 'NLO']
4466 else:
4467
4468 available_mode = ['0', '1', '2','3']
4469
4470 if mode == 'auto':
4471 mode = None
4472 if not mode and (options['parton'] or options['reweightonly']):
4473 mode = 'noshower'
4474
4475
4476 if '3' in available_mode:
4477 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
4478 switch['shower'] = 'ON'
4479 else:
4480 switch['shower'] = 'OFF'
4481
4482 if (not aMCatNLO or self.options['mg5_path']) and '3' in available_mode:
4483 available_mode.append('4')
4484 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4485 switch['madspin'] = 'ON'
4486 else:
4487 switch['madspin'] = 'OFF'
4488 if misc.has_f2py() or self.options['f2py_compiler']:
4489 available_mode.append('5')
4490 if os.path.exists(pjoin(self.me_dir,'Cards','reweight_card.dat')):
4491 switch['reweight'] = 'ON'
4492 else:
4493 switch['reweight'] = 'OFF'
4494 else:
4495 switch['reweight'] = 'Not available (requires NumPy)'
4496
4497 if 'do_reweight' in options and options['do_reweight'] and '3' in available_mode:
4498 if switch['reweight'] == "OFF":
4499 switch['reweight'] = "ON"
4500 elif switch['reweight'] != "ON":
4501 logger.critical("Cannot run REWEIGHT: %s" % switch['reweight'])
4502 if 'do_madspin' in options and options['do_madspin']:
4503 if switch['madspin'] == "OFF":
4504 switch['madspin'] = 'ON'
4505 elif switch['madspin'] != "ON":
4506 logger.critical("Cannot run MadSpin module: %s" % switch['reweight'])
4507
4508 answers = list(available_mode) + ['auto', 'done']
4509 alias = {}
4510 for id, key in enumerate(switch_order):
4511 if switch[key] != void and switch[key] in allowed_switch_value[key] and \
4512 len(allowed_switch_value[key]) >1:
4513 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
4514
4515 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
4516 for s in allowed_switch_value[key]))
4517 answers += special_values
4518
4519 def create_question(switch):
4520 switch_format = " %i %-61s %12s=%s\n"
4521 question = "The following switches determine which operations are executed:\n"
4522 for id, key in enumerate(switch_order):
4523 question += switch_format % (id+1, description[key], key, switch[key])
4524 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1)
4525 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n'
4526 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n'
4527 return question
4528
4529
4530 def modify_switch(mode, answer, switch):
4531 if '=' in answer:
4532 key, status = answer.split('=')
4533 switch[key] = status
4534 if (key, status) in force_switch:
4535 for key2, status2 in force_switch[(key, status)].items():
4536 if switch[key2] not in [status2, void]:
4537 logger.info('For coherence \'%s\' is set to \'%s\''
4538 % (key2, status2), '$MG:color:BLACK')
4539 switch[key2] = status2
4540 elif answer in ['0', 'auto', 'done']:
4541 return
4542 elif answer in special_values:
4543 logger.info('Enter mode value: %s. Go to the related mode' % answer, '$MG:color:BLACK')
4544
4545
4546 if answer == 'LO':
4547 switch['order'] = 'LO'
4548 switch['fixed_order'] = 'ON'
4549 assign_switch('shower', 'OFF')
4550 elif answer == 'NLO':
4551 switch['order'] = 'NLO'
4552 switch['fixed_order'] = 'ON'
4553 assign_switch('shower', 'OFF')
4554 elif answer == 'aMC@NLO':
4555 switch['order'] = 'NLO'
4556 switch['fixed_order'] = 'OFF'
4557 assign_switch('shower', 'ON')
4558 elif answer == 'aMC@LO':
4559 switch['order'] = 'LO'
4560 switch['fixed_order'] = 'OFF'
4561 assign_switch('shower', 'ON')
4562 elif answer == 'noshower':
4563 switch['order'] = 'NLO'
4564 switch['fixed_order'] = 'OFF'
4565 assign_switch('shower', 'OFF')
4566 elif answer == 'noshowerLO':
4567 switch['order'] = 'LO'
4568 switch['fixed_order'] = 'OFF'
4569 assign_switch('shower', 'OFF')
4570 if mode:
4571 return
4572 return switch
4573
4574 modify_switch(mode, self.last_mode, switch)
4575 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4576 assign_switch('madspin', 'ON')
4577
4578 if not self.force:
4579 answer = ''
4580 while answer not in ['0', 'done', 'auto', 'onlyshower']:
4581 question = create_question(switch)
4582 if mode:
4583 answer = mode
4584 else:
4585 answer = self.ask(question, '0', answers, alias=alias)
4586 if answer.isdigit() and answer != '0':
4587 key = switch_order[int(answer) - 1]
4588 opt1 = allowed_switch_value[key][0]
4589 opt2 = allowed_switch_value[key][1]
4590 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2)
4591
4592 if not modify_switch(mode, answer, switch):
4593 break
4594
4595
4596 if not mode or mode == 'auto':
4597 if switch['order'] == 'LO':
4598 if switch['shower'] == 'ON':
4599 mode = 'aMC@LO'
4600 elif switch['fixed_order'] == 'ON':
4601 mode = 'LO'
4602 else:
4603 mode = 'noshowerLO'
4604 elif switch['order'] == 'NLO':
4605 if switch['shower'] == 'ON':
4606 mode = 'aMC@NLO'
4607 elif switch['fixed_order'] == 'ON':
4608 mode = 'NLO'
4609 else:
4610 mode = 'noshower'
4611 logger.info('will run in mode: %s' % mode)
4612
4613 if mode == 'noshower':
4614 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
4615 Please, shower the Les Houches events before using them for physics analyses.""")
4616
4617
4618
4619 cards = ['param_card.dat', 'run_card.dat']
4620 ignore = []
4621 if mode in ['LO', 'NLO']:
4622 options['parton'] = True
4623 ignore = ['shower_card.dat', 'madspin_card.dat']
4624 cards.append('FO_analyse_card.dat')
4625 else:
4626 if switch['madspin'] == 'ON':
4627 cards.append('madspin_card.dat')
4628 if switch['reweight'] == 'ON':
4629 cards.append('reweight_card.dat')
4630 if 'aMC@' in mode:
4631 cards.append('shower_card.dat')
4632 if mode == 'onlyshower':
4633 cards = ['shower_card.dat']
4634 if options['reweightonly']:
4635 cards = ['run_card.dat']
4636
4637 self.keep_cards(cards, ignore)
4638
4639 if mode =='onlyshower':
4640 cards = ['shower_card.dat']
4641
4642
4643
4644 first_cmd = []
4645
4646 if not options['force'] and not self.force:
4647 self.ask_edit_cards(cards, plot=False, first_cmd=first_cmd)
4648
4649
4650 self.banner = banner_mod.Banner()
4651
4652
4653 for card in cards:
4654 self.banner.add(pjoin(self.me_dir, 'Cards', card))
4655
4656 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
4657 self.banner.add_text('run_settings', run_settings)
4658
4659 if not mode =='onlyshower':
4660 self.run_card = self.banner.charge_card('run_card')
4661 self.run_tag = self.run_card['run_tag']
4662
4663 if not hasattr(self, 'run_name') or not self.run_name:
4664 self.run_name = self.find_available_run_name(self.me_dir)
4665
4666 if self.run_name.startswith('run_'):
4667 if mode in ['LO','aMC@LO','noshowerLO']:
4668 self.run_name += '_LO'
4669 self.set_run_name(self.run_name, self.run_tag, 'parton')
4670 if self.run_card['ickkw'] == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4671 raise self.InvalidCmd("""FxFx merging (ickkw=3) not allowed at LO""")
4672 elif self.run_card['ickkw'] == 3 and mode in ['aMC@NLO', 'noshower']:
4673 logger.warning("""You are running with FxFx merging enabled. To be able to merge
4674 samples of various multiplicities without double counting, you
4675 have to remove some events after showering 'by hand'. Please
4676 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
4677 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
4678 raise self.InvalidCmd("""FxFx merging does not work with Q-squared ordered showers.""")
4679 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8':
4680 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
4681 "Type \'n\' to stop or \'y\' to continue"
4682 answers = ['n','y']
4683 answer = self.ask(question, 'n', answers, alias=alias)
4684 if answer == 'n':
4685 error = '''Stop opertation'''
4686 self.ask_run_configuration(mode, options)
4687
4688 elif self.run_card['ickkw'] == -1 and mode in ['aMC@NLO', 'noshower']:
4689
4690 raise self.InvalidCmd("""NNLL+NLO jet veto runs (ickkw=-1) only possible for fNLO or LO.""")
4691 if 'aMC@' in mode or mode == 'onlyshower':
4692 self.shower_card = self.banner.charge_card('shower_card')
4693
4694 elif mode in ['LO', 'NLO']:
4695 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
4696 self.analyse_card = self.banner.charge_card('FO_analyse_card')
4697
4698 return mode
4699
4700
4701
4702
4703
4705 """The command line processor of MadGraph"""
4706
4707 _compile_usage = "compile [MODE] [options]\n" + \
4708 "-- compiles aMC@NLO \n" + \
4709 " MODE can be either FO, for fixed-order computations, \n" + \
4710 " or MC for matching with parton-shower monte-carlos. \n" + \
4711 " (if omitted, it is set to MC)\n"
4712 _compile_parser = misc.OptionParser(usage=_compile_usage)
4713 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
4714 help="Use the card present in the directory for the launch, without editing them")
4715
4716 _launch_usage = "launch [MODE] [options]\n" + \
4717 "-- execute aMC@NLO \n" + \
4718 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4719 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4720 " computation of the total cross section and the filling of parton-level histograms \n" + \
4721 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4722 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4723 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4724 " in the run_card.dat\n"
4725
4726 _launch_parser = misc.OptionParser(usage=_launch_usage)
4727 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
4728 help="Use the card present in the directory for the launch, without editing them")
4729 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
4730 help="Submit the jobs on the cluster")
4731 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
4732 help="Submit the jobs on multicore mode")
4733 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4734 help="Skip compilation. Ignored if no executable is found")
4735 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4736 help="Skip integration and event generation, just run reweight on the" + \
4737 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4738 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
4739 help="Stop the run after the parton level file generation (you need " + \
4740 "to shower the file in order to get physical results)")
4741 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4742 help="Skip grid set up, just generate events starting from " + \
4743 "the last available results")
4744 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
4745 help="Provide a name to the run")
4746 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4747 help="For use with APPLgrid only: start from existing grids")
4748 _launch_parser.add_option("-R", "--reweight", default=False, dest='do_reweight', action='store_true',
4749 help="Run the reweight module (reweighting by different model parameters)")
4750 _launch_parser.add_option("-M", "--madspin", default=False, dest='do_madspin', action='store_true',
4751 help="Run the madspin package")
4752
4753
4754
4755 _generate_events_usage = "generate_events [MODE] [options]\n" + \
4756 "-- execute aMC@NLO \n" + \
4757 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4758 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4759 " computation of the total cross section and the filling of parton-level histograms \n" + \
4760 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4761 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4762 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4763 " in the run_card.dat\n"
4764
4765 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
4766 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
4767 help="Use the card present in the directory for the generate_events, without editing them")
4768 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
4769 help="Submit the jobs on the cluster")
4770 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
4771 help="Submit the jobs on multicore mode")
4772 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4773 help="Skip compilation. Ignored if no executable is found")
4774 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4775 help="Skip integration and event generation, just run reweight on the" + \
4776 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4777 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
4778 help="Stop the run after the parton level file generation (you need " + \
4779 "to shower the file in order to get physical results)")
4780 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4781 help="Skip grid set up, just generate events starting from " + \
4782 "the last available results")
4783 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
4784 help="Provide a name to the run")
4785
4786
4787
4788 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
4789 "-- calculate cross section up to ORDER.\n" + \
4790 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
4791
4792 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
4793 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
4794 help="Use the card present in the directory for the launch, without editing them")
4795 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
4796 help="Submit the jobs on the cluster")
4797 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
4798 help="Submit the jobs on multicore mode")
4799 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4800 help="Skip compilation. Ignored if no executable is found")
4801 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
4802 help="Provide a name to the run")
4803 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4804 help="For use with APPLgrid only: start from existing grids")
4805 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4806 help="Skip grid set up, just generate events starting from " + \
4807 "the last available results")
4808
4809 _shower_usage = 'shower run_name [options]\n' + \
4810 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
4811 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
4812 ' are directly read from the header of the event file\n'
4813 _shower_parser = misc.OptionParser(usage=_shower_usage)
4814 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
4815 help="Use the shower_card present in the directory for the launch, without editing")
4816
4817 if '__main__' == __name__:
4818
4819
4820 import sys
4821 if not sys.version_info[0] == 2 or sys.version_info[1] < 6:
4822 sys.exit('MadGraph/MadEvent 5 works only with python 2.6 or later (but not python 3.X).\n'+\
4823 'Please upgrate your version of python.')
4824
4825 import os
4826 import optparse
4827
4828
4829 root_path = os.path.dirname(os.path.dirname(os.path.realpath( __file__ )))
4830 sys.path.insert(0, root_path)
4831
4834 - def error(self, msg=''):
4836
4837 usage = "usage: %prog [options] [FILE] "
4838 parser = MyOptParser(usage=usage)
4839 parser.add_option("-l", "--logging", default='INFO',
4840 help="logging level (DEBUG|INFO|WARNING|ERROR|CRITICAL) [%default]")
4841 parser.add_option("","--web", action="store_true", default=False, dest='web', \
4842 help='force toce to be in secure mode')
4843 parser.add_option("","--debug", action="store_true", default=False, dest='debug', \
4844 help='force to launch debug mode')
4845 parser_error = ''
4846 done = False
4847
4848 for i in range(len(sys.argv)-1):
4849 try:
4850 (options, args) = parser.parse_args(sys.argv[1:len(sys.argv)-i])
4851 done = True
4852 except MyOptParser.InvalidOption, error:
4853 pass
4854 else:
4855 args += sys.argv[len(sys.argv)-i:]
4856 if not done:
4857
4858 try:
4859 (options, args) = parser.parse_args()
4860 except MyOptParser.InvalidOption, error:
4861 print error
4862 sys.exit(2)
4863
4864 if len(args) == 0:
4865 args = ''
4866
4867 import subprocess
4868 import logging
4869 import logging.config
4870
4871
4872 import internal.coloring_logging
4873 try:
4874 if __debug__ and options.logging == 'INFO':
4875 options.logging = 'DEBUG'
4876 if options.logging.isdigit():
4877 level = int(options.logging)
4878 else:
4879 level = eval('logging.' + options.logging)
4880 print os.path.join(root_path, 'internal', 'me5_logging.conf')
4881 logging.config.fileConfig(os.path.join(root_path, 'internal', 'me5_logging.conf'))
4882 logging.root.setLevel(level)
4883 logging.getLogger('madgraph').setLevel(level)
4884 except:
4885 raise
4886 pass
4887
4888
4889 try:
4890 if args:
4891
4892 if '--web' in args:
4893 i = args.index('--web')
4894 args.pop(i)
4895 cmd_line = aMCatNLOCmd(force_run=True)
4896 else:
4897 cmd_line = aMCatNLOCmdShell(force_run=True)
4898
4899 if not hasattr(cmd_line, 'do_%s' % args[0]):
4900 if parser_error:
4901 print parser_error
4902 print 'and %s can not be interpreted as a valid command.' % args[0]
4903 else:
4904 print 'ERROR: %s not a valid command. Please retry' % args[0]
4905 else:
4906 cmd_line.use_rawinput = False
4907 cmd_line.run_cmd(' '.join(args))
4908 cmd_line.run_cmd('quit')
4909
4910 except KeyboardInterrupt:
4911 print 'quit on KeyboardInterrupt'
4912 pass
4913