1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """A user friendly command line interface to access MadGraph5_aMC@NLO features.
16 Uses the cmd package for command interpretation and tab completion.
17 """
18 from __future__ import division
19
20 import atexit
21 import glob
22 import logging
23 import math
24 import optparse
25 import os
26 import pydoc
27 import random
28 import re
29 import shutil
30 import subprocess
31 import sys
32 import traceback
33 import time
34 import signal
35 import tarfile
36 import copy
37 import datetime
38 import tarfile
39
40 try:
41 import readline
42 GNU_SPLITTING = ('GNU' in readline.__doc__)
43 except:
44 GNU_SPLITTING = True
45
46 root_path = os.path.split(os.path.dirname(os.path.realpath( __file__ )))[0]
47 root_path = os.path.split(root_path)[0]
48 sys.path.insert(0, os.path.join(root_path,'bin'))
49
50
51 pjoin = os.path.join
52
53 logger = logging.getLogger('madgraph.stdout')
54 logger_stderr = logging.getLogger('madgraph.stderr')
55
56 try:
57
58 import madgraph.interface.extended_cmd as cmd
59 import madgraph.interface.common_run_interface as common_run
60 import madgraph.iolibs.files as files
61 import madgraph.iolibs.save_load_object as save_load_object
62 import madgraph.various.banner as banner_mod
63 import madgraph.various.cluster as cluster
64 import madgraph.various.misc as misc
65 import madgraph.various.gen_crossxhtml as gen_crossxhtml
66 import madgraph.various.sum_html as sum_html
67 import madgraph.various.shower_card as shower_card
68 import madgraph.various.FO_analyse_card as analyse_card
69
70 from madgraph import InvalidCmd, aMCatNLOError, MadGraph5Error
71 aMCatNLO = False
72 except ImportError, error:
73 logger.debug(error)
74
75 import internal.extended_cmd as cmd
76 import internal.common_run_interface as common_run
77 import internal.banner as banner_mod
78 import internal.misc as misc
79 from internal import InvalidCmd, MadGraph5Error
80 import internal.files as files
81 import internal.cluster as cluster
82 import internal.save_load_object as save_load_object
83 import internal.gen_crossxhtml as gen_crossxhtml
84 import internal.sum_html as sum_html
85 import internal.shower_card as shower_card
86 import internal.FO_analyse_card as analyse_card
87 aMCatNLO = True
88
91
92
94 """compile the direcory p_dir
95 arguments is the tuple (me_dir, p_dir, mode, options, tests, exe, run_mode)
96 this function needs not to be a class method in order to do
97 the compilation on multicore"""
98
99 (me_dir, p_dir, mode, options, tests, exe, run_mode) = arguments
100 logger.info(' Compiling %s...' % p_dir)
101
102 this_dir = pjoin(me_dir, 'SubProcesses', p_dir)
103
104 try:
105
106
107 for test in tests:
108 misc.compile([test], cwd = this_dir, job_specs = False)
109 input = pjoin(me_dir, '%s_input.txt' % test)
110
111 misc.call(['./%s' % (test)], cwd=this_dir,
112 stdin = open(input), stdout=open(pjoin(this_dir, '%s.log' % test), 'w'))
113
114 if not options['reweightonly']:
115 misc.compile(['gensym'], cwd=this_dir, job_specs = False)
116 open(pjoin(this_dir, 'gensym_input.txt'), 'w').write('%s\n' % run_mode)
117 misc.call(['./gensym'],cwd= this_dir,
118 stdin=open(pjoin(this_dir, 'gensym_input.txt')),
119 stdout=open(pjoin(this_dir, 'gensym.log'), 'w'))
120
121 misc.compile([exe], cwd=this_dir, job_specs = False)
122 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
123 misc.compile(['reweight_xsec_events'], cwd=this_dir, job_specs = False)
124
125 logger.info(' %s done.' % p_dir)
126 return 0
127 except MadGraph5Error, msg:
128 return msg
129
130
132 """check that the current fortran compiler is gfortran 4.6 or later.
133 If block, stops the execution, otherwise just print a warning"""
134
135 msg = 'In order to be able to run at NLO MadGraph5_aMC@NLO, you need to have ' + \
136 'gfortran 4.6 or later installed.\n%s has been detected\n'+\
137 'Note that You can still run all MadEvent run without any problem!'
138
139 if options['fortran_compiler']:
140 compiler = options['fortran_compiler']
141 elif misc.which('gfortran'):
142 compiler = 'gfortran'
143
144 if 'gfortran' not in compiler:
145 if block:
146 raise aMCatNLOError(msg % compiler)
147 else:
148 logger.warning(msg % compiler)
149 else:
150 curr_version = misc.get_gfortran_version(compiler)
151 if not ''.join(curr_version.split('.')) >= '46':
152 if block:
153 raise aMCatNLOError(msg % (compiler + ' ' + curr_version))
154 else:
155 logger.warning(msg % (compiler + ' ' + curr_version))
156
157
158
159
160
161
163 """Particularisation of the cmd command for aMCatNLO"""
164
165
166 next_possibility = {
167 'start': [],
168 }
169
170 debug_output = 'ME5_debug'
171 error_debug = 'Please report this bug on https://bugs.launchpad.net/madgraph5\n'
172 error_debug += 'More information is found in \'%(debug)s\'.\n'
173 error_debug += 'Please attach this file to your report.'
174
175 config_debug = 'If you need help with this issue please contact us on https://answers.launchpad.net/madgraph5\n'
176
177
178 keyboard_stop_msg = """stopping all operation
179 in order to quit MadGraph5_aMC@NLO please enter exit"""
180
181
182 InvalidCmd = InvalidCmd
183 ConfigurationError = aMCatNLOError
184
185 - def __init__(self, me_dir, options, *arg, **opt):
186 """Init history and line continuation"""
187
188
189 self.force = False
190
191
192
193 info = misc.get_pkg_info()
194 info_line = ""
195 if info and info.has_key('version') and info.has_key('date'):
196 len_version = len(info['version'])
197 len_date = len(info['date'])
198 if len_version + len_date < 30:
199 info_line = "#* VERSION %s %s %s *\n" % \
200 (info['version'],
201 (30 - len_version - len_date) * ' ',
202 info['date'])
203 else:
204 version = open(pjoin(root_path,'MGMEVersion.txt')).readline().strip()
205 info_line = "#* VERSION %s %s *\n" % \
206 (version, (24 - len(version)) * ' ')
207
208
209
210 self.history_header = \
211 '#************************************************************\n' + \
212 '#* MadGraph5_aMC@NLO *\n' + \
213 '#* *\n' + \
214 "#* * * *\n" + \
215 "#* * * * * *\n" + \
216 "#* * * * * 5 * * * * *\n" + \
217 "#* * * * * *\n" + \
218 "#* * * *\n" + \
219 "#* *\n" + \
220 "#* *\n" + \
221 info_line + \
222 "#* *\n" + \
223 "#* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
224 "#* https://server06.fynu.ucl.ac.be/projects/madgraph *\n" + \
225 "#* and *\n" + \
226 "#* http://amcatnlo.cern.ch *\n" + \
227 '#* *\n' + \
228 '#************************************************************\n' + \
229 '#* *\n' + \
230 '#* Command File for aMCatNLO *\n' + \
231 '#* *\n' + \
232 '#* run as ./bin/aMCatNLO.py filename *\n' + \
233 '#* *\n' + \
234 '#************************************************************\n'
235
236 if info_line:
237 info_line = info_line[1:]
238
239 logger.info(\
240 "************************************************************\n" + \
241 "* *\n" + \
242 "* W E L C O M E to M A D G R A P H 5 *\n" + \
243 "* a M C @ N L O *\n" + \
244 "* *\n" + \
245 "* * * *\n" + \
246 "* * * * * *\n" + \
247 "* * * * * 5 * * * * *\n" + \
248 "* * * * * *\n" + \
249 "* * * *\n" + \
250 "* *\n" + \
251 info_line + \
252 "* *\n" + \
253 "* The MadGraph5_aMC@NLO Development Team - Find us at *\n" + \
254 "* http://amcatnlo.cern.ch *\n" + \
255 "* *\n" + \
256 "* Type 'help' for in-line help. *\n" + \
257 "* *\n" + \
258 "************************************************************")
259 super(CmdExtended, self).__init__(me_dir, options, *arg, **opt)
260
261
263 """return the history header"""
264 return self.history_header % misc.get_time_info()
265
267 """action to perform to close nicely on a keyboard interupt"""
268 try:
269 if hasattr(self, 'cluster'):
270 logger.info('rm jobs on queue')
271 self.cluster.remove()
272 if hasattr(self, 'results'):
273 self.update_status('Stop by the user', level=None, makehtml=True, error=True)
274 self.add_error_log_in_html(KeyboardInterrupt)
275 except:
276 pass
277
278 - def postcmd(self, stop, line):
279 """ Update the status of the run for finishing interactive command """
280
281
282 self.force = False
283
284 if not self.use_rawinput:
285 return stop
286
287
288 arg = line.split()
289 if len(arg) == 0:
290 return stop
291 elif str(arg[0]) in ['exit','quit','EOF']:
292 return stop
293
294 try:
295 self.update_status('Command \'%s\' done.<br> Waiting for instruction.' % arg[0],
296 level=None, error=True)
297 except Exception:
298 misc.sprint('self.update_status fails', log=logger)
299 pass
300
306
312
318
319
320
321
322
323
325 """ The Series of help routine for the aMCatNLOCmd"""
326
330
332 logger.info("syntax: banner_run Path|RUN [--run_options]")
333 logger.info("-- Reproduce a run following a given banner")
334 logger.info(" One of the following argument is require:")
335 logger.info(" Path should be the path of a valid banner.")
336 logger.info(" RUN should be the name of a run of the current directory")
337 self.run_options_help([('-f','answer all question by default'),
338 ('--name=X', 'Define the name associated with the new run')])
339
340
344
349
350
354
358
359
361 logger.info("syntax: open FILE ")
362 logger.info("-- open a file with the appropriate editor.")
363 logger.info(' If FILE belongs to index.html, param_card.dat, run_card.dat')
364 logger.info(' the path to the last created/used directory is used')
365
367 if data:
368 logger.info('-- local options:')
369 for name, info in data:
370 logger.info(' %s : %s' % (name, info))
371
372 logger.info("-- session options:")
373 logger.info(" Note that those options will be kept for the current session")
374 logger.info(" --cluster : Submit to the cluster. Current cluster: %s" % self.options['cluster_type'])
375 logger.info(" --multicore : Run in multi-core configuration")
376 logger.info(" --nb_core=X : limit the number of core to use to X.")
377
378
379
380
381
382
383
385 """ The Series of check routine for the aMCatNLOCmd"""
386
388 """Check the validity of the line. args[0] is the run_directory"""
389
390 if options['force']:
391 self.force = True
392
393 if len(args) == 0:
394 self.help_shower()
395 raise self.InvalidCmd, 'Invalid syntax, please specify the run name'
396 if not os.path.isdir(pjoin(self.me_dir, 'Events', args[0])):
397 raise self.InvalidCmd, 'Directory %s does not exists' % \
398 pjoin(os.getcwd(), 'Events', args[0])
399
400 self.set_run_name(args[0], level= 'shower')
401 args[0] = pjoin(self.me_dir, 'Events', args[0])
402
404 """Check the argument for the plot command
405 plot run_name modes"""
406
407
408 madir = self.options['madanalysis_path']
409 td = self.options['td_path']
410
411 if not madir or not td:
412 logger.info('Retry to read configuration file to find madanalysis/td')
413 self.set_configuration()
414
415 madir = self.options['madanalysis_path']
416 td = self.options['td_path']
417
418 if not madir:
419 error_msg = 'No Madanalysis path correctly set.'
420 error_msg += 'Please use the set command to define the path and retry.'
421 error_msg += 'You can also define it in the configuration file.'
422 raise self.InvalidCmd(error_msg)
423 if not td:
424 error_msg = 'No path to td directory correctly set.'
425 error_msg += 'Please use the set command to define the path and retry.'
426 error_msg += 'You can also define it in the configuration file.'
427 raise self.InvalidCmd(error_msg)
428
429 if len(args) == 0:
430 if not hasattr(self, 'run_name') or not self.run_name:
431 self.help_plot()
432 raise self.InvalidCmd('No run name currently define. Please add this information.')
433 args.append('all')
434 return
435
436
437 if args[0] not in self._plot_mode:
438 self.set_run_name(args[0], level='plot')
439 del args[0]
440 if len(args) == 0:
441 args.append('all')
442 elif not self.run_name:
443 self.help_plot()
444 raise self.InvalidCmd('No run name currently define. Please add this information.')
445
446 for arg in args:
447 if arg not in self._plot_mode and arg != self.run_name:
448 self.help_plot()
449 raise self.InvalidCmd('unknown options %s' % arg)
450
452 """Check the argument for pythia command
453 syntax: pgs [NAME]
454 Note that other option are already remove at this point
455 """
456
457
458 if not self.options['pythia-pgs_path']:
459 logger.info('Retry to read configuration file to find pythia-pgs path')
460 self.set_configuration()
461
462 if not self.options['pythia-pgs_path'] or not \
463 os.path.exists(pjoin(self.options['pythia-pgs_path'],'src')):
464 error_msg = 'No pythia-pgs path correctly set.'
465 error_msg += 'Please use the set command to define the path and retry.'
466 error_msg += 'You can also define it in the configuration file.'
467 raise self.InvalidCmd(error_msg)
468
469 tag = [a for a in arg if a.startswith('--tag=')]
470 if tag:
471 arg.remove(tag[0])
472 tag = tag[0][6:]
473
474
475 if len(arg) == 0 and not self.run_name:
476 if self.results.lastrun:
477 arg.insert(0, self.results.lastrun)
478 else:
479 raise self.InvalidCmd('No run name currently define. Please add this information.')
480
481 if len(arg) == 1 and self.run_name == arg[0]:
482 arg.pop(0)
483
484 if not len(arg) and \
485 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
486 self.help_pgs()
487 raise self.InvalidCmd('''No file file pythia_events.hep currently available
488 Please specify a valid run_name''')
489
490 lock = None
491 if len(arg) == 1:
492 prev_tag = self.set_run_name(arg[0], tag, 'pgs')
493 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
494 'events_*.hep.gz'))
495 if not filenames:
496 raise self.InvalidCmd('No events file corresponding to %s run with tag %s. '% (self.run_name, prev_tag))
497 else:
498 input_file = filenames[0]
499 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
500 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
501 argument=['-c', input_file])
502 else:
503 if tag:
504 self.run_card['run_tag'] = tag
505 self.set_run_name(self.run_name, tag, 'pgs')
506
507 return lock
508
509
511 """Check the argument for pythia command
512 syntax: delphes [NAME]
513 Note that other option are already remove at this point
514 """
515
516
517 if not self.options['delphes_path']:
518 logger.info('Retry to read configuration file to find delphes path')
519 self.set_configuration()
520
521 if not self.options['delphes_path']:
522 error_msg = 'No delphes path correctly set.'
523 error_msg += 'Please use the set command to define the path and retry.'
524 error_msg += 'You can also define it in the configuration file.'
525 raise self.InvalidCmd(error_msg)
526
527 tag = [a for a in arg if a.startswith('--tag=')]
528 if tag:
529 arg.remove(tag[0])
530 tag = tag[0][6:]
531
532
533 if len(arg) == 0 and not self.run_name:
534 if self.results.lastrun:
535 arg.insert(0, self.results.lastrun)
536 else:
537 raise self.InvalidCmd('No run name currently define. Please add this information.')
538
539 if len(arg) == 1 and self.run_name == arg[0]:
540 arg.pop(0)
541
542 if not len(arg) and \
543 not os.path.exists(pjoin(self.me_dir,'Events','pythia_events.hep')):
544 self.help_pgs()
545 raise self.InvalidCmd('''No file file pythia_events.hep currently available
546 Please specify a valid run_name''')
547
548 if len(arg) == 1:
549 prev_tag = self.set_run_name(arg[0], tag, 'delphes')
550 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
551 'events_*.hep.gz'))
552 if not filenames:
553 raise self.InvalidCmd('No events file corresponding to %s run with tag %s.:%s '\
554 % (self.run_name, prev_tag,
555 pjoin(self.me_dir,'Events',self.run_name, '%s_pythia_events.hep.gz' % prev_tag)))
556 else:
557 input_file = filenames[0]
558 output_file = pjoin(self.me_dir, 'Events', 'pythia_events.hep')
559 lock = cluster.asyncrone_launch('gunzip',stdout=open(output_file,'w'),
560 argument=['-c', input_file])
561 else:
562 if tag:
563 self.run_card['run_tag'] = tag
564 self.set_run_name(self.run_name, tag, 'delphes')
565
567 """check the validity of the line. args is ORDER,
568 ORDER being LO or NLO. If no mode is passed, NLO is used"""
569
570
571
572 if options['force']:
573 self.force = True
574
575 if not args:
576 args.append('NLO')
577 return
578
579 if len(args) > 1:
580 self.help_calculate_xsect()
581 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
582
583 elif len(args) == 1:
584 if not args[0] in ['NLO', 'LO']:
585 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
586 mode = args[0]
587
588
589 if options['multicore'] and options['cluster']:
590 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
591 ' are not compatible. Please choose one.'
592
593
595 """check the validity of the line. args is ORDER,
596 ORDER being LO or NLO. If no mode is passed, NLO is used"""
597
598
599
600 if not args:
601 args.append('NLO')
602 return
603
604 if len(args) > 1:
605 self.help_generate_events()
606 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
607
608 elif len(args) == 1:
609 if not args[0] in ['NLO', 'LO']:
610 raise self.InvalidCmd, '%s is not a valid mode, please use "LO" or "NLO"' % args[1]
611 mode = args[0]
612
613
614 if options['multicore'] and options['cluster']:
615 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
616 ' are not compatible. Please choose one.'
617
619 """check the validity of line"""
620
621 if len(args) == 0:
622 self.help_banner_run()
623 raise self.InvalidCmd('banner_run requires at least one argument.')
624
625 tag = [a[6:] for a in args if a.startswith('--tag=')]
626
627
628 if os.path.exists(args[0]):
629 type ='banner'
630 format = self.detect_card_type(args[0])
631 if format != 'banner':
632 raise self.InvalidCmd('The file is not a valid banner.')
633 elif tag:
634 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
635 (args[0], tag))
636 if not os.path.exists(args[0]):
637 raise self.InvalidCmd('No banner associates to this name and tag.')
638 else:
639 name = args[0]
640 type = 'run'
641 banners = glob.glob(pjoin(self.me_dir,'Events', args[0], '*_banner.txt'))
642 if not banners:
643 raise self.InvalidCmd('No banner associates to this name.')
644 elif len(banners) == 1:
645 args[0] = banners[0]
646 else:
647
648 tags = [os.path.basename(p)[len(args[0])+1:-11] for p in banners]
649 tag = self.ask('which tag do you want to use?', tags[0], tags)
650 args[0] = pjoin(self.me_dir,'Events', args[0], '%s_%s_banner.txt' % \
651 (args[0], tag))
652
653 run_name = [arg[7:] for arg in args if arg.startswith('--name=')]
654 if run_name:
655 try:
656 self.exec_cmd('remove %s all banner -f' % run_name)
657 except Exception:
658 pass
659 self.set_run_name(args[0], tag=None, level='parton', reload_card=True)
660 elif type == 'banner':
661 self.set_run_name(self.find_available_run_name(self.me_dir))
662 elif type == 'run':
663 if not self.results[name].is_empty():
664 run_name = self.find_available_run_name(self.me_dir)
665 logger.info('Run %s is not empty so will use run_name: %s' % \
666 (name, run_name))
667 self.set_run_name(run_name)
668 else:
669 try:
670 self.exec_cmd('remove %s all banner -f' % run_name)
671 except Exception:
672 pass
673 self.set_run_name(name)
674
675
676
678 """check the validity of the line. args is MODE
679 MODE being LO, NLO, aMC@NLO or aMC@LO. If no mode is passed, auto is used"""
680
681
682
683 if options['force']:
684 self.force = True
685
686
687 if not args:
688 args.append('auto')
689 return
690
691 if len(args) > 1:
692 self.help_launch()
693 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
694
695 elif len(args) == 1:
696 if not args[0] in ['LO', 'NLO', 'aMC@NLO', 'aMC@LO','auto']:
697 raise self.InvalidCmd, '%s is not a valid mode, please use "LO", "NLO", "aMC@NLO" or "aMC@LO"' % args[0]
698 mode = args[0]
699
700
701 if options['multicore'] and options['cluster']:
702 raise self.InvalidCmd, 'options -m (--multicore) and -c (--cluster)' + \
703 ' are not compatible. Please choose one.'
704 if mode == 'NLO' and options['reweightonly']:
705 raise self.InvalidCmd, 'option -r (--reweightonly) needs mode "aMC@NLO" or "aMC@LO"'
706
707
709 """check the validity of the line. args is MODE
710 MODE being FO or MC. If no mode is passed, MC is used"""
711
712
713
714 if options['force']:
715 self.force = True
716
717 if not args:
718 args.append('MC')
719 return
720
721 if len(args) > 1:
722 self.help_compile()
723 raise self.InvalidCmd, 'Invalid Syntax: Too many argument'
724
725 elif len(args) == 1:
726 if not args[0] in ['MC', 'FO']:
727 raise self.InvalidCmd, '%s is not a valid mode, please use "FO" or "MC"' % args[0]
728 mode = args[0]
729
730
731
732
733
734
735
737 """ The Series of help routine for the MadGraphCmd"""
738
740 """auto-completion for launch command"""
741
742 args = self.split_arg(line[0:begidx])
743 if len(args) == 1:
744
745 return self.list_completion(text,['LO','NLO','aMC@NLO','aMC@LO'],line)
746 elif len(args) == 2 and line[begidx-1] == '@':
747 return self.list_completion(text,['LO','NLO'],line)
748 else:
749 opts = []
750 for opt in _launch_parser.option_list:
751 opts += opt._long_opts + opt._short_opts
752 return self.list_completion(text, opts, line)
753
755 "Complete the banner run command"
756 try:
757
758
759 args = self.split_arg(line[0:begidx], error=False)
760
761 if args[-1].endswith(os.path.sep):
762 return self.path_completion(text,
763 os.path.join('.',*[a for a in args \
764 if a.endswith(os.path.sep)]))
765
766
767 if len(args) > 1:
768
769 tags = glob.glob(pjoin(self.me_dir, 'Events' , args[1],'%s_*_banner.txt' % args[1]))
770 tags = ['%s' % os.path.basename(t)[len(args[1])+1:-11] for t in tags]
771
772 if args[-1] != '--tag=':
773 tags = ['--tag=%s' % t for t in tags]
774 else:
775 return self.list_completion(text, tags)
776 return self.list_completion(text, tags +['--name=','-f'], line)
777
778
779 possibilites = {}
780
781 comp = self.path_completion(text, os.path.join('.',*[a for a in args \
782 if a.endswith(os.path.sep)]))
783 if os.path.sep in line:
784 return comp
785 else:
786 possibilites['Path from ./'] = comp
787
788 run_list = glob.glob(pjoin(self.me_dir, 'Events', '*','*_banner.txt'))
789 run_list = [n.rsplit('/',2)[1] for n in run_list]
790 possibilites['RUN Name'] = self.list_completion(text, run_list)
791
792 return self.deal_multiple_categories(possibilites)
793
794
795 except Exception, error:
796 print error
797
798
811
824
826 """auto-completion for generate_events command
827 call the compeltion for launch"""
828 self.complete_launch(text, line, begidx, endidx)
829
830
832 args = self.split_arg(line[0:begidx])
833 if len(args) == 1:
834
835 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe.gz'))
836 data = [n.rsplit('/',2)[1] for n in data]
837 tmp1 = self.list_completion(text, data)
838 if not self.run_name:
839 return tmp1
840
842 """ Complete the plot command """
843
844 args = self.split_arg(line[0:begidx], error=False)
845
846 if len(args) == 1:
847
848 data = glob.glob(pjoin(self.me_dir, 'Events', '*','events.lhe*'))
849 data = [n.rsplit('/',2)[1] for n in data]
850 tmp1 = self.list_completion(text, data)
851 if not self.run_name:
852 return tmp1
853
854 if len(args) > 1:
855 return self.list_completion(text, self._plot_mode)
856
858 "Complete the pgs command"
859 args = self.split_arg(line[0:begidx], error=False)
860 if len(args) == 1:
861
862 data = glob.glob(pjoin(self.me_dir, 'Events', '*', 'events_*.hep.gz'))
863 data = [n.rsplit('/',2)[1] for n in data]
864 tmp1 = self.list_completion(text, data)
865 if not self.run_name:
866 return tmp1
867 else:
868 tmp2 = self.list_completion(text, self._run_options + ['-f',
869 '--tag=' ,'--no_default'], line)
870 return tmp1 + tmp2
871 else:
872 return self.list_completion(text, self._run_options + ['-f',
873 '--tag=','--no_default'], line)
874
875 complete_delphes = complete_pgs
876
879
880
881
882
883 -class aMCatNLOCmd(CmdExtended, HelpToCmd, CompleteForCmd, common_run.CommonRunCmd):
884 """The command line processor of MadGraph"""
885
886
887 true = ['T','.true.',True,'true']
888
889 _run_options = ['--cluster','--multicore','--nb_core=','--nb_core=2', '-c', '-m']
890 _generate_options = ['-f', '--laststep=parton', '--laststep=pythia', '--laststep=pgs', '--laststep=delphes']
891 _calculate_decay_options = ['-f', '--accuracy=0.']
892 _set_options = ['stdout_level','fortran_compiler','cpp_compiler','timeout']
893 _plot_mode = ['all', 'parton','shower','pgs','delphes']
894 _clean_mode = _plot_mode + ['channel', 'banner']
895 _display_opts = ['run_name', 'options', 'variable']
896
897
898 web = False
899 cluster_mode = 0
900 queue = 'madgraph'
901 nb_core = None
902
903 next_possibility = {
904 'start': ['generate_events [OPTIONS]', 'calculate_crossx [OPTIONS]', 'launch [OPTIONS]',
905 'help generate_events'],
906 'generate_events': ['generate_events [OPTIONS]', 'shower'],
907 'launch': ['launch [OPTIONS]', 'shower'],
908 'shower' : ['generate_events [OPTIONS]']
909 }
910
911
912
913 - def __init__(self, me_dir = None, options = {}, *completekey, **stdin):
914 """ add information to the cmd """
915
916 self.start_time = 0
917 CmdExtended.__init__(self, me_dir, options, *completekey, **stdin)
918
919
920 self.mode = 'aMCatNLO'
921 self.nb_core = 0
922 self.prompt = "%s>"%os.path.basename(pjoin(self.me_dir))
923
924
925 if os.path.exists(pjoin(self.me_dir,'HTML','results.pkl')):
926 self.results = save_load_object.load_from_file(pjoin(self.me_dir,'HTML','results.pkl'))
927 self.results.resetall(self.me_dir)
928 self.last_mode = self.results[self.results.lastrun][-1]['run_mode']
929 else:
930 model = self.find_model_name()
931 process = self.process
932 self.results = gen_crossxhtml.AllResultsNLO(model, process, self.me_dir)
933 self.last_mode = ''
934 self.results.def_web_mode(self.web)
935
936 proc_card = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read()
937
938 if not '[real=QCD]' in proc_card:
939 check_compiler(self.options, block=True)
940
941
942
944 """ run the shower on a given parton level file """
945 argss = self.split_arg(line)
946 (options, argss) = _launch_parser.parse_args(argss)
947
948 options = options.__dict__
949 options['reweightonly'] = False
950 self.check_shower(argss, options)
951 evt_file = pjoin(os.getcwd(), argss[0], 'events.lhe')
952 self.ask_run_configuration('onlyshower', options)
953 self.run_mcatnlo(evt_file)
954
955 self.update_status('', level='all', update_results=True)
956
957
959 """Create the plot for a given run"""
960
961
962 args = self.split_arg(line)
963
964 self.check_plot(args)
965 logger.info('plot for run %s' % self.run_name)
966
967 if not self.force:
968 self.ask_edit_cards([], args, plot=True)
969
970 if any([arg in ['parton'] for arg in args]):
971 filename = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')
972 if os.path.exists(filename+'.gz'):
973 misc.gunzip(filename)
974 if os.path.exists(filename):
975 logger.info('Found events.lhe file for run %s' % self.run_name)
976 shutil.move(filename, pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'))
977 self.create_plot('parton')
978 shutil.move(pjoin(self.me_dir, 'Events', 'unweighted_events.lhe'), filename)
979 misc.gzip(filename)
980
981 if any([arg in ['all','parton'] for arg in args]):
982 filename = pjoin(self.me_dir, 'Events', self.run_name, 'MADatNLO.top')
983 if os.path.exists(filename):
984 logger.info('Found MADatNLO.top file for run %s' % \
985 self.run_name)
986 output = pjoin(self.me_dir, 'HTML',self.run_name, 'plots_parton.html')
987 plot_dir = pjoin(self.me_dir, 'HTML', self.run_name, 'plots_parton')
988
989 if not os.path.isdir(plot_dir):
990 os.makedirs(plot_dir)
991 top_file = pjoin(plot_dir, 'plots.top')
992 files.cp(filename, top_file)
993 madir = self.options['madanalysis_path']
994 tag = self.run_card['run_tag']
995 td = self.options['td_path']
996 misc.call(['%s/plot' % self.dirbin, madir, td],
997 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
998 stderr = subprocess.STDOUT,
999 cwd=plot_dir)
1000
1001 misc.call(['%s/plot_page-pl' % self.dirbin,
1002 os.path.basename(plot_dir),
1003 'parton'],
1004 stdout = open(pjoin(plot_dir, 'plot.log'),'a'),
1005 stderr = subprocess.STDOUT,
1006 cwd=pjoin(self.me_dir, 'HTML', self.run_name))
1007 shutil.move(pjoin(self.me_dir, 'HTML',self.run_name ,'plots.html'),
1008 output)
1009
1010 os.remove(pjoin(self.me_dir, 'Events', 'plots.top'))
1011
1012 if any([arg in ['all','shower'] for arg in args]):
1013 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
1014 'events_*.lhe.gz'))
1015 if len(filenames) != 1:
1016 filenames = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
1017 'events_*.hep.gz'))
1018 if len(filenames) != 1:
1019 logger.info('No shower level file found for run %s' % \
1020 self.run_name)
1021 return
1022 filename = filenames[0]
1023 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1024
1025 if not os.path.exists(pjoin(self.me_dir, 'Cards', 'pythia_card.dat')):
1026 if aMCatNLO and not self.options['mg5_path']:
1027 raise "plotting NLO HEP file needs MG5 utilities"
1028
1029 files.cp(pjoin(self.options['mg5_path'], 'Template','LO', 'Cards', 'pythia_card_default.dat'),
1030 pjoin(self.me_dir, 'Cards', 'pythia_card.dat'))
1031 self.run_hep2lhe()
1032 else:
1033 filename = filenames[0]
1034 misc.gunzip(filename, keep=True, stdout=pjoin(self.me_dir, 'Events','pythia_events.hep'))
1035
1036 self.create_plot('shower')
1037 lhe_file_name = filename.replace('.hep.gz', '.lhe')
1038 shutil.move(pjoin(self.me_dir, 'Events','pythia_events.lhe'),
1039 lhe_file_name)
1040 misc.gzip(lhe_file_name)
1041
1042 if any([arg in ['all','pgs'] for arg in args]):
1043 filename = pjoin(self.me_dir, 'Events', self.run_name,
1044 '%s_pgs_events.lhco' % self.run_tag)
1045 if os.path.exists(filename+'.gz'):
1046 misc.gunzip(filename)
1047 if os.path.exists(filename):
1048 self.create_plot('PGS')
1049 misc.gzip(filename)
1050 else:
1051 logger.info('No valid files for pgs plot')
1052
1053 if any([arg in ['all','delphes'] for arg in args]):
1054 filename = pjoin(self.me_dir, 'Events', self.run_name,
1055 '%s_delphes_events.lhco' % self.run_tag)
1056 if os.path.exists(filename+'.gz'):
1057 misc.gunzip(filename)
1058 if os.path.exists(filename):
1059
1060 self.create_plot('Delphes')
1061
1062 misc.gzip(filename)
1063 else:
1064 logger.info('No valid files for delphes plot')
1065
1066
1067
1069 """Main commands: calculates LO/NLO cross-section, using madevent_mintFO
1070 this function wraps the do_launch one"""
1071
1072 self.start_time = time.time()
1073 argss = self.split_arg(line)
1074
1075 (options, argss) = _calculate_xsect_parser.parse_args(argss)
1076 options = options.__dict__
1077 options['reweightonly'] = False
1078 options['parton'] = True
1079 self.check_calculate_xsect(argss, options)
1080 self.do_launch(line, options, argss)
1081
1082
1084 """Make a run from the banner file"""
1085
1086 args = self.split_arg(line)
1087
1088 self.check_banner_run(args)
1089
1090
1091 for name in ['shower_card.dat', 'madspin_card.dat']:
1092 try:
1093 os.remove(pjoin(self.me_dir, 'Cards', name))
1094 except Exception:
1095 pass
1096
1097 banner_mod.split_banner(args[0], self.me_dir, proc_card=False)
1098
1099
1100 if not self.force:
1101 ans = self.ask('Do you want to modify the Cards/Run Type?', 'n', ['y','n'])
1102 if ans == 'n':
1103 self.force = True
1104
1105
1106 if self.force:
1107 mode_status = {'order': 'NLO', 'fixed_order': False, 'madspin':False, 'shower':True}
1108 banner = banner_mod.Banner(args[0])
1109 for line in banner['run_settings']:
1110 if '=' in line:
1111 mode, value = [t.strip() for t in line.split('=')]
1112 mode_status[mode] = value
1113 else:
1114 mode_status = {}
1115
1116
1117 self.do_launch('-n %s %s' % (self.run_name, '-f' if self.force else ''),
1118 switch=mode_status)
1119
1120
1122 """Main commands: generate events
1123 this function just wraps the do_launch one"""
1124 self.do_launch(line)
1125
1126
1127
1129 """Advanced commands: this is for creating the correct run_card.inc from the nlo format"""
1130
1131 self.check_param_card(pjoin(self.me_dir, 'Cards','param_card.dat'))
1132 return super(aMCatNLOCmd,self).do_treatcards(line, amcatnlo)
1133
1134
1136 """assign all configuration variable from file
1137 loop over the different config file if config_file not define """
1138 return super(aMCatNLOCmd,self).set_configuration(amcatnlo=amcatnlo, **opt)
1139
1140
1141 - def do_launch(self, line, options={}, argss=[], switch={}):
1142 """Main commands: launch the full chain
1143 options and args are relevant if the function is called from other
1144 functions, such as generate_events or calculate_xsect
1145 mode gives the list of switch needed for the computation (usefull for banner_run)
1146 """
1147
1148 if not argss and not options:
1149 self.start_time = time.time()
1150 argss = self.split_arg(line)
1151
1152 (options, argss) = _launch_parser.parse_args(argss)
1153 options = options.__dict__
1154 self.check_launch(argss, options)
1155
1156 if 'run_name' in options.keys() and options['run_name']:
1157 self.run_name = options['run_name']
1158
1159
1160 if os.path.isdir(pjoin(self.me_dir, 'Events', self.run_name)):
1161 logger.warning('Removing old run information in \n'+
1162 pjoin(self.me_dir, 'Events', self.run_name))
1163 files.rm(pjoin(self.me_dir, 'Events', self.run_name))
1164 self.results.delete_run(self.run_name)
1165
1166 if options['multicore']:
1167 self.cluster_mode = 2
1168 elif options['cluster']:
1169 self.cluster_mode = 1
1170
1171 if not switch:
1172 mode = argss[0]
1173 if mode in ['LO', 'NLO']:
1174 options['parton'] = True
1175 mode = self.ask_run_configuration(mode, options)
1176 else:
1177 mode = self.ask_run_configuration('auto', options, switch)
1178
1179 self.results.add_detail('run_mode', mode)
1180
1181 self.update_status('Starting run', level=None, update_results=True)
1182
1183 if self.options['automatic_html_opening']:
1184 misc.open_file(os.path.join(self.me_dir, 'crossx.html'))
1185 self.options['automatic_html_opening'] = False
1186
1187 if '+' in mode:
1188 mode = mode.split('+')[0]
1189 self.compile(mode, options)
1190 evt_file = self.run(mode, options)
1191
1192 if int(self.run_card['nevents']) == 0 and not mode in ['LO', 'NLO']:
1193 logger.info('No event file generated: grids have been set-up with a '\
1194 'relative precision of %s' % self.run_card['req_acc'])
1195 return
1196
1197 if not mode in ['LO', 'NLO']:
1198 assert evt_file == pjoin(self.me_dir,'Events', self.run_name, 'events.lhe'), '%s != %s' %(evt_file, pjoin(self.me_dir,'Events', self.run_name, 'events.lhe.gz'))
1199 self.exec_cmd('decay_events -from_cards', postcmd=False)
1200 evt_file = pjoin(self.me_dir,'Events', self.run_name, 'events.lhe')
1201
1202 if not mode in ['LO', 'NLO', 'noshower', 'noshowerLO'] \
1203 and not options['parton']:
1204 self.run_mcatnlo(evt_file)
1205 elif mode == 'noshower':
1206 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
1207 Please, shower the Les Houches events before using them for physics analyses.""")
1208
1209
1210 self.update_status('', level='all', update_results=True)
1211 if int(self.run_card['ickkw']) == 3 and mode in ['noshower', 'aMC@NLO']:
1212 logger.warning("""You are running with FxFx merging enabled.
1213 To be able to merge samples of various multiplicities without double counting,
1214 you have to remove some events after showering 'by hand'.
1215 Please read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
1216
1217
1218
1219
1221 """Advanced commands: just compile the executables """
1222 argss = self.split_arg(line)
1223
1224 (options, argss) = _compile_parser.parse_args(argss)
1225 options = options.__dict__
1226 options['reweightonly'] = False
1227 options['nocompile'] = False
1228 self.check_compile(argss, options)
1229
1230 mode = {'FO': 'NLO', 'MC': 'aMC@NLO'}[argss[0]]
1231 self.ask_run_configuration(mode, options)
1232 self.compile(mode, options)
1233
1234
1235 self.update_status('', level='all', update_results=True)
1236
1238 """Have a nice results prints in the shell,
1239 data should be of type: gen_crossxhtml.OneTagResults"""
1240 if not data:
1241 return
1242 logger.info(" === Results Summary for run: %s tag: %s ===\n" % (data['run_name'],data['tag']))
1243 if self.ninitial == 1:
1244 logger.info(" Width : %.4g +- %.4g GeV" % (data['cross'], data['error']))
1245 else:
1246 logger.info(" Cross-section : %.4g +- %.4g pb" % (data['cross'], data['error']))
1247 logger.info(" Nb of events : %s" % data['nb_event'] )
1248
1249
1250
1251
1252
1253
1254
1255
1256 logger.info(" " )
1257
1259 """Have a nice results prints in the shell,
1260 data should be of type: gen_crossxhtml.OneTagResults"""
1261 if not data:
1262 return
1263
1264 fsock = open(path, mode)
1265
1266 fsock.write(" === Results Summary for run: %s tag: %s process: %s ===\n" % \
1267 (data['run_name'],data['tag'], os.path.basename(self.me_dir)))
1268
1269 if self.ninitial == 1:
1270 fsock.write(" Width : %.4g +- %.4g GeV\n" % (data['cross'], data['error']))
1271 else:
1272 fsock.write(" Cross-section : %.4g +- %.4g pb\n" % (data['cross'], data['error']))
1273 fsock.write(" Nb of events : %s\n" % data['nb_event'] )
1274
1275
1276
1277
1278
1279
1280 fsock.write(" \n" )
1281
1282
1283
1284
1285
1287 """Update random number seed with the value from the run_card.
1288 If this is 0, update the number according to a fresh one"""
1289 iseed = int(self.run_card['iseed'])
1290 if iseed == 0:
1291 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'))
1292 iseed = int(randinit.read()[2:]) + 1
1293 randinit.close()
1294 randinit = open(pjoin(self.me_dir, 'SubProcesses', 'randinit'), 'w')
1295 randinit.write('r=%d' % iseed)
1296 randinit.close()
1297
1298
1300 """reads the proc_characteristics file and initialises the correspondant
1301 dictionary"""
1302 lines = [l for l in open(file).read().split('\n') if l and not l.startswith('#')]
1303 self.proc_characteristics = {}
1304 for l in lines:
1305 key, value = l.split('=')
1306 self.proc_characteristics[key.strip()] = value.strip()
1307
1308
1309 - def run(self, mode, options):
1310 """runs aMC@NLO. Returns the name of the event file created"""
1311 logger.info('Starting run')
1312
1313 if not 'only_generation' in options.keys():
1314 options['only_generation'] = False
1315
1316 if mode in ['LO', 'NLO'] and self.run_card['iappl'] == '2' and not options['only_generation']:
1317 options['only_generation'] = True
1318 self.get_characteristics(pjoin(self.me_dir, 'SubProcesses', 'proc_characteristics'))
1319
1320 if self.cluster_mode == 1:
1321 cluster_name = self.options['cluster_type']
1322 self.cluster = cluster.from_name[cluster_name](**self.options)
1323 if self.cluster_mode == 2:
1324 try:
1325 import multiprocessing
1326 if not self.nb_core:
1327 try:
1328 self.nb_core = int(self.options['nb_core'])
1329 except TypeError:
1330 self.nb_core = multiprocessing.cpu_count()
1331 logger.info('Using %d cores' % self.nb_core)
1332 except ImportError:
1333 self.nb_core = 1
1334 logger.warning('Impossible to detect the number of cores => Using One.\n'+
1335 'Use set nb_core X in order to set this number and be able to'+
1336 'run in multicore.')
1337
1338 self.cluster = cluster.MultiCore(**self.options)
1339 self.update_random_seed()
1340
1341 folder_names = {'LO': ['born_G*'], 'NLO': ['all_G*'],
1342 'aMC@LO': ['GB*'], 'aMC@NLO': ['GF*']}
1343 folder_names['noshower'] = folder_names['aMC@NLO']
1344 folder_names['noshowerLO'] = folder_names['aMC@LO']
1345 job_dict = {}
1346 p_dirs = [d for d in \
1347 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
1348
1349 if not options['only_generation'] and not options['reweightonly']:
1350 self.update_status('Cleaning previous results', level=None)
1351 for dir in p_dirs:
1352 job_dict[dir] = [file for file in \
1353 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1354 if file.startswith('ajob')]
1355
1356 for obj in folder_names[mode]:
1357 to_rm = [file for file in \
1358 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1359 if file.startswith(obj[:-1]) and \
1360 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
1361 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
1362
1363
1364
1365 to_always_rm = [file for file in \
1366 os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)) \
1367 if file.startswith(obj[:-1]) and
1368 '_' in file and not '_G' in file and \
1369 (os.path.isdir(pjoin(self.me_dir, 'SubProcesses', dir, file)) or \
1370 os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file)))]
1371
1372 if not options['only_generation'] and not options['reweightonly']:
1373 to_always_rm.extend(to_rm)
1374 if os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz')):
1375 to_always_rm.append(pjoin(self.me_dir, 'SubProcesses', dir,'MadLoop5_resources.tar.gz'))
1376 files.rm([pjoin(self.me_dir, 'SubProcesses', dir, d) for d in to_always_rm])
1377
1378 mcatnlo_status = ['Setting up grid', 'Computing upper envelope', 'Generating events']
1379
1380 if self.run_card['iappl']=='2':
1381 self.applgrid_distribute(options,mode,p_dirs)
1382
1383 if options['reweightonly']:
1384 event_norm=self.run_card['event_norm']
1385 nevents=int(self.run_card['nevents'])
1386 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1387
1388 devnull = os.open(os.devnull, os.O_RDWR)
1389 if mode in ['LO', 'NLO']:
1390
1391 mode_dict = {'NLO': 'all', 'LO': 'born'}
1392 logger.info('Doing fixed order %s' % mode)
1393 req_acc = self.run_card['req_acc_FO']
1394 if not options['only_generation'] and req_acc != '-1':
1395 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, '-1', '6','0.10')
1396 self.update_status('Setting up grids', level=None)
1397 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
1398 elif not options['only_generation']:
1399 npoints = self.run_card['npoints_FO_grid']
1400 niters = self.run_card['niters_FO_grid']
1401 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], 0, npoints, niters)
1402 self.update_status('Setting up grids', level=None)
1403 self.run_all(job_dict, [['0', mode_dict[mode], '0']], 'Setting up grids')
1404
1405 npoints = self.run_card['npoints_FO']
1406 niters = self.run_card['niters_FO']
1407 self.write_madin_file(pjoin(self.me_dir, 'SubProcesses'), mode_dict[mode], -1, npoints, niters)
1408
1409 self.collect_log_files(folder_names[mode], 0)
1410 p = misc.Popen(['./combine_results_FO.sh', req_acc, '%s_G*' % mode_dict[mode]], \
1411 stdout=subprocess.PIPE, \
1412 cwd=pjoin(self.me_dir, 'SubProcesses'))
1413 output = p.communicate()
1414
1415 self.cross_sect_dict = self.read_results(output, mode)
1416 self.print_summary(options, 0, mode)
1417 cross, error = sum_html.make_all_html_results(self, ['%s*' % mode_dict[mode]])
1418 self.results.add_detail('cross', cross)
1419 self.results.add_detail('error', error)
1420
1421 self.update_status('Computing cross-section', level=None)
1422 self.run_all(job_dict, [['0', mode_dict[mode], '0', mode_dict[mode]]], 'Computing cross-section')
1423
1424
1425 self.collect_log_files(folder_names[mode], 1)
1426 p = misc.Popen(['./combine_results_FO.sh', '-1'] + folder_names[mode], \
1427 stdout=subprocess.PIPE,
1428 cwd=pjoin(self.me_dir, 'SubProcesses'))
1429 output = p.communicate()
1430 self.cross_sect_dict = self.read_results(output, mode)
1431
1432
1433 scale_pdf_info={}
1434 if self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.':
1435 data_files=[]
1436 for dir in p_dirs:
1437 for obj in folder_names[mode]:
1438 for file in os.listdir(pjoin(self.me_dir, 'SubProcesses', dir)):
1439 if file.startswith(obj[:-1]) and \
1440 (os.path.exists(pjoin(self.me_dir, 'SubProcesses', dir, file,'scale_pdf_dependence.dat'))):
1441 data_files.append(pjoin(dir,file,'scale_pdf_dependence.dat'))
1442 scale_pdf_info = self.pdf_scale_from_reweighting(data_files)
1443
1444 self.print_summary(options, 1, mode, scale_pdf_info)
1445
1446 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res.txt'),
1447 pjoin(self.me_dir, 'Events', self.run_name))
1448
1449 if self.analyse_card['fo_analysis_format'].lower() == 'topdrawer':
1450 misc.call(['./combine_plots_FO.sh'] + folder_names[mode], \
1451 stdout=devnull,
1452 cwd=pjoin(self.me_dir, 'SubProcesses'))
1453 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.top'),
1454 pjoin(self.me_dir, 'Events', self.run_name))
1455 logger.info('The results of this run and the TopDrawer file with the plots' + \
1456 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1457 elif self.analyse_card['fo_analysis_format'].lower() == 'root':
1458 misc.call(['./combine_root.sh'] + folder_names[mode], \
1459 stdout=devnull,
1460 cwd=pjoin(self.me_dir, 'SubProcesses'))
1461 files.cp(pjoin(self.me_dir, 'SubProcesses', 'MADatNLO.root'),
1462 pjoin(self.me_dir, 'Events', self.run_name))
1463 logger.info('The results of this run and the ROOT file with the plots' + \
1464 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1465 else:
1466 logger.info('The results of this run' + \
1467 ' have been saved in %s' % pjoin(self.me_dir, 'Events', self.run_name))
1468
1469 cross, error = sum_html.make_all_html_results(self, folder_names[mode])
1470 self.results.add_detail('cross', cross)
1471 self.results.add_detail('error', error)
1472 if self.run_card['iappl'] != '0':
1473 self.applgrid_combine(cross,error)
1474 self.update_status('Run complete', level='parton', update_results=True)
1475
1476 return
1477
1478 elif mode in ['aMC@NLO','aMC@LO','noshower','noshowerLO']:
1479 shower = self.run_card['parton_shower'].upper()
1480 nevents = int(self.run_card['nevents'])
1481 req_acc = self.run_card['req_acc']
1482 if nevents == 0 and float(req_acc) < 0 :
1483 raise aMCatNLOError('Cannot determine the required accuracy from the number '\
1484 'of events, because 0 events requested. Please set '\
1485 'the "req_acc" parameter in the run_card to a value between 0 and 1')
1486 elif float(req_acc) >1 or float(req_acc) == 0 :
1487 raise aMCatNLOError('Required accuracy ("req_acc" in the run_card) should '\
1488 'be between larger than 0 and smaller than 1, '\
1489 'or set to -1 for automatic determination. Current value is %s' % req_acc)
1490
1491 shower_list = ['HERWIG6', 'HERWIGPP', 'PYTHIA6Q', 'PYTHIA6PT', 'PYTHIA8']
1492
1493 if not shower in shower_list:
1494 raise aMCatNLOError('%s is not a valid parton shower. Please use one of the following: %s' \
1495 % (shower, ', '.join(shower_list)))
1496
1497
1498 if shower == 'PYTHIA6PT' and \
1499 self.proc_characteristics['has_fsr'] == 'true':
1500 raise aMCatNLOError('PYTHIA6PT does not support processes with FSR')
1501
1502 if mode in ['aMC@NLO', 'aMC@LO']:
1503 logger.info('Doing %s matched to parton shower' % mode[4:])
1504 elif mode in ['noshower','noshowerLO']:
1505 logger.info('Generating events without running the shower.')
1506 elif options['only_generation']:
1507 logger.info('Generating events starting from existing results')
1508
1509
1510 for i, status in enumerate(mcatnlo_status):
1511
1512
1513 try:
1514 nevents_unweighted = open(pjoin(self.me_dir,
1515 'SubProcesses',
1516 'nevents_unweighted')).read().split('\n')
1517 except IOError:
1518 nevents_unweighted = []
1519
1520 split = i == 2 and \
1521 int(self.run_card['nevt_job']) > 0 and \
1522 any([int(l.split()[1]) > int(self.run_card['nevt_job']) \
1523 for l in nevents_unweighted if l])
1524
1525 if i == 2 or not options['only_generation']:
1526
1527
1528 if i==2 and nevents==0:
1529 self.print_summary(options, 2,mode)
1530 return
1531
1532 if split:
1533
1534 misc.call([pjoin(self.me_dir, 'bin', 'internal', 'split_jobs.py')] + \
1535 [self.run_card['nevt_job']],
1536 stdout = devnull,
1537 cwd = pjoin(self.me_dir, 'SubProcesses'))
1538 assert os.path.exists(pjoin(self.me_dir, 'SubProcesses',
1539 'nevents_unweighted_splitted'))
1540
1541 self.update_status(status, level='parton')
1542 if mode in ['aMC@NLO', 'noshower']:
1543 self.write_madinMMC_file(pjoin(self.me_dir, 'SubProcesses'), 'all', i)
1544 self.run_all(job_dict, [['2', 'F', '%d' % i]], status, split_jobs = split)
1545
1546 elif mode in ['aMC@LO', 'noshowerLO']:
1547 self.write_madinMMC_file(
1548 pjoin(self.me_dir, 'SubProcesses'), 'born', i)
1549 self.run_all(job_dict,
1550 [['2', 'B', '%d' % i]],
1551 '%s at LO' % status, split_jobs = split)
1552
1553 if (i < 2 and not options['only_generation']) or i == 1 :
1554
1555 self.collect_log_files(folder_names[mode], i)
1556 p = misc.Popen(['./combine_results.sh'] + \
1557 ['%d' % i,'%d' % nevents, '%s' % req_acc ] + \
1558 folder_names[mode],
1559 stdout=subprocess.PIPE,
1560 cwd = pjoin(self.me_dir, 'SubProcesses'))
1561 output = p.communicate()
1562 files.cp(pjoin(self.me_dir, 'SubProcesses', 'res_%d.txt' % i), \
1563 pjoin(self.me_dir, 'Events', self.run_name))
1564
1565 self.cross_sect_dict = self.read_results(output, mode)
1566 self.print_summary(options, i, mode)
1567
1568 cross, error = sum_html.make_all_html_results(self, folder_names[mode])
1569 self.results.add_detail('cross', cross)
1570 self.results.add_detail('error', error)
1571
1572
1573 if split:
1574 self.check_event_files()
1575
1576 if self.cluster_mode == 1:
1577
1578 self.update_status(
1579 'Waiting while files are transferred back from the cluster nodes',
1580 level='parton')
1581 time.sleep(10)
1582 if split:
1583 files.cp(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted'), \
1584 pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted'))
1585
1586
1587 event_norm=self.run_card['event_norm']
1588 self.collect_log_files(folder_names[mode], 2)
1589 return self.reweight_and_collect_events(options, mode, nevents, event_norm)
1590
1591
1593 """Combines the APPLgrids in all the SubProcess/P*/all_G*/ directories"""
1594 logger.debug('Combining APPLgrids \n')
1595 applcomb=pjoin(self.options['applgrid'].rstrip('applgrid-config'),'applgrid-combine')
1596 with open(pjoin(self.me_dir,'SubProcesses','dirs.txt')) as dirf:
1597 all_jobs=dirf.readlines()
1598 ngrids=len(all_jobs)
1599 nobs =len([name for name in os.listdir(pjoin(self.me_dir,'SubProcesses',all_jobs[0].rstrip())) \
1600 if name.endswith("_out.root")])
1601 for obs in range(0,nobs):
1602 gdir = [pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_out.root") for job in all_jobs]
1603
1604 if self.run_card["iappl"] == "1":
1605 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+"_starting_grid.root"), '--optimise']+ gdir)
1606 elif self.run_card["iappl"] == "2":
1607 unc2_inv=pow(cross/error,2)
1608 unc2_inv_ngrids=pow(cross/error,2)*ngrids
1609 misc.call([applcomb,'-o', pjoin(self.me_dir,"Events",self.run_name,"aMCfast_obs_"+str(obs)+".root"),'-s',str(unc2_inv),'--weight',str(unc2_inv)]+ gdir)
1610 for job in all_jobs:
1611 os.remove(pjoin(self.me_dir,'SubProcesses',job.rstrip(),"grid_obs_"+str(obs)+"_in.root"))
1612 else:
1613 raise aMCatNLOError('iappl parameter can only be 0, 1 or 2')
1614
1615 for ggdir in gdir:
1616 os.remove(ggdir)
1617
1618
1620 """Distributes the APPLgrids ready to be filled by a second run of the code"""
1621
1622 if not('appl_start_grid' in options.keys() and options['appl_start_grid']):
1623 gfiles=glob.glob(pjoin(self.me_dir, 'Events','*','aMCfast_obs_0_starting_grid.root'))
1624 time_stamps={}
1625 for root_file in gfiles:
1626 time_stamps[root_file]=os.path.getmtime(root_file)
1627 options['appl_start_grid']= \
1628 max(time_stamps.iterkeys(), key=(lambda key: time_stamps[key])).split('/')[-2]
1629 logger.info('No --appl_start_grid option given. Guessing that start grid from run "%s" should be used.' \
1630 % options['appl_start_grid'])
1631
1632 if 'appl_start_grid' in options.keys() and options['appl_start_grid']:
1633 self.appl_start_grid = options['appl_start_grid']
1634 start_grid_dir=pjoin(self.me_dir, 'Events', self.appl_start_grid)
1635
1636 if not os.path.exists(pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root')):
1637 raise self.InvalidCmd('APPLgrid file not found: %s' % \
1638 pjoin(start_grid_dir,'aMCfast_obs_0_starting_grid.root'))
1639 else:
1640 all_grids=[pjoin(start_grid_dir,name) for name in os.listdir(start_grid_dir) \
1641 if name.endswith("_starting_grid.root")]
1642 nobs =len(all_grids)
1643 gstring=" ".join(all_grids)
1644 if not hasattr(self, 'appl_start_grid') or not self.appl_start_grid:
1645 raise self.InvalidCmd('No APPLgrid name currently defined. Please provide this information.')
1646 if mode == 'NLO':
1647 gdir='all_G'
1648 elif mode == 'LO':
1649 gdir='born_G'
1650
1651 for pdir in p_dirs:
1652 g_dirs = [file for file in os.listdir(pjoin(self.me_dir,"SubProcesses",pdir)) \
1653 if file.startswith(gdir) and os.path.isdir(pjoin(self.me_dir,"SubProcesses",pdir, file))]
1654 for g_dir in g_dirs:
1655 for grid in all_grids:
1656 obs=grid.split('_')[-3]
1657 files.cp(grid,pjoin(self.me_dir,"SubProcesses",pdir,g_dir,'grid_obs_'+obs+'_in.root'))
1658
1659
1661 """collect the log files and put them in a single, html-friendly file inside the run_...
1662 directory"""
1663 step_list = ['Grid setting', 'Cross-section computation', 'Event generation']
1664 log_file = pjoin(self.me_dir, 'Events', self.run_name,
1665 'alllogs_%d.html' % istep)
1666
1667 channel_dict = {}
1668 log_files = []
1669 for folder in folders:
1670 log_files += glob.glob(pjoin(self.me_dir, 'SubProcesses', 'P*', folder, 'log.txt'))
1671
1672 content = ''
1673
1674 content += '<HTML><BODY>\n<font face="courier" size=2>'
1675 for log in log_files:
1676 channel_dict[os.path.dirname(log)] = [istep]
1677
1678 content += '<a name=%s></a>\n' % (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'),''))
1679
1680 content += '<font color="red">\n'
1681 content += '<br>LOG file for integration channel %s, %s <br>' % \
1682 (os.path.dirname(log).replace(pjoin(self.me_dir,'SubProcesses'), ''),
1683 step_list[istep])
1684 content += '</font>\n'
1685
1686
1687 content += '<PRE>\n' + open(log).read() + '\n</PRE>'
1688 content +='<br>\n'
1689
1690 content += '</font>\n</BODY></HTML>\n'
1691 open(log_file, 'w').write(content)
1692
1693
1695 """extract results (cross-section, absolute cross-section and errors)
1696 from output, which should be formatted as
1697 Found 4 correctly terminated jobs
1698 random seed found in 'randinit' is 33
1699 Integrated abs(cross-section)
1700 7.94473937e+03 +- 2.9953e+01 (3.7702e-01%)
1701 Integrated cross-section
1702 6.63392298e+03 +- 3.7669e+01 (5.6782e-01%)
1703 for aMC@NLO/aMC@LO, and as
1704
1705 for NLO/LO
1706 The cross_sect_dict is returned"""
1707 res = {}
1708 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
1709 pat = re.compile(\
1710 '''Found (\d+) correctly terminated jobs
1711 random seed found in 'randinit' is (\d+)
1712 Integrated abs\(cross-section\)
1713 \s*(\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\d+\.\d+e[+-]\d+)\%\)
1714 Integrated cross-section
1715 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
1716 else:
1717 pat = re.compile(\
1718 '''Found (\d+) correctly terminated jobs
1719 \s*(\-?\d+\.\d+e[+-]\d+) \+\- (\d+\.\d+e[+-]\d+) \((\-?\d+\.\d+e[+-]\d+)\%\)''')
1720 pass
1721
1722 match = re.search(pat, output[0])
1723 if not match or output[1]:
1724 logger.info('Return code of the event collection: '+str(output[1]))
1725 logger.info('Output of the event collection:\n'+output[0])
1726 raise aMCatNLOError('An error occurred during the collection of results.\n' +
1727 'Please check the .log files inside the directories which failed.')
1728
1729
1730 if mode in ['aMC@LO', 'aMC@NLO', 'noshower', 'noshowerLO']:
1731 return {'randinit' : int(match.groups()[1]),
1732 'xseca' : float(match.groups()[2]),
1733 'erra' : float(match.groups()[3]),
1734 'xsect' : float(match.groups()[5]),
1735 'errt' : float(match.groups()[6])}
1736 else:
1737 return {'xsect' : float(match.groups()[1]),
1738 'errt' : float(match.groups()[2])}
1739
1740 - def print_summary(self, options, step, mode, scale_pdf_info={}):
1741 """print a summary of the results contained in self.cross_sect_dict.
1742 step corresponds to the mintMC step, if =2 (i.e. after event generation)
1743 some additional infos are printed"""
1744
1745 proc_card_lines = open(pjoin(self.me_dir, 'Cards', 'proc_card_mg5.dat')).read().split('\n')
1746 process = ''
1747 for line in proc_card_lines:
1748 if line.startswith('generate') or line.startswith('add process'):
1749 process = process+(line.replace('generate ', '')).replace('add process ','')+' ; '
1750 lpp = {'0':'l', '1':'p', '-1':'pbar'}
1751 proc_info = '\n Process %s\n Run at %s-%s collider (%s + %s GeV)' % \
1752 (process[:-3], lpp[self.run_card['lpp1']], lpp[self.run_card['lpp2']],
1753 self.run_card['ebeam1'], self.run_card['ebeam2'])
1754
1755
1756 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
1757 log_GV_files = glob.glob(pjoin(self.me_dir, \
1758 'SubProcesses', 'P*','G*','log_MINT*.txt'))
1759 all_log_files = glob.glob(pjoin(self.me_dir, \
1760 'SubProcesses', 'P*','G*','log*.txt'))
1761 elif mode == 'NLO':
1762 log_GV_files = glob.glob(pjoin(self.me_dir, \
1763 'SubProcesses', 'P*','all_G*','log*.txt'))
1764 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
1765 '%sG*'%foldName,'log*.txt')) for foldName in ['all_']],[])
1766 elif mode == 'LO':
1767 log_GV_files = ''
1768 all_log_files = sum([glob.glob(pjoin(self.me_dir,'SubProcesses', 'P*',
1769 '%sG*'%foldName,'log*.txt')) for foldName in ['born_']],[])
1770 else:
1771 raise aMCatNLOError, 'Running mode %s not supported.'%mode
1772
1773
1774 if mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']:
1775 status = ['Determining the number of unweighted events per channel',
1776 'Updating the number of unweighted events per channel',
1777 'Summary:']
1778 if step != 2:
1779 message = status[step] + '\n\n Intermediate results:' + \
1780 ('\n Random seed: %(randinit)d' + \
1781 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' + \
1782 '\n Total abs(cross-section): %(xseca)8.3e +- %(erra)6.1e pb \n') \
1783 % self.cross_sect_dict
1784 else:
1785
1786 message = '\n ' + status[step] + proc_info + \
1787 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1788 self.cross_sect_dict
1789
1790 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_scale']=='.true.' and int(self.run_card['ickkw']) != 4:
1791 message = message + \
1792 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \
1793 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low'])
1794 if int(self.run_card['nevents'])>=10000 and self.run_card['reweight_PDF']=='.true.' and int(self.run_card['ickkw']) != 4:
1795 message = message + \
1796 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \
1797 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low'])
1798
1799 neg_frac = (self.cross_sect_dict['xseca'] - self.cross_sect_dict['xsect'])/\
1800 (2. * self.cross_sect_dict['xseca'])
1801 message = message + \
1802 ('\n Number of events generated: %s' + \
1803 '\n Parton shower to be used: %s' + \
1804 '\n Fraction of negative weights: %4.2f' + \
1805 '\n Total running time : %s') % \
1806 (self.run_card['nevents'],
1807 self.run_card['parton_shower'],
1808 neg_frac,
1809 misc.format_timer(time.time()-self.start_time))
1810
1811 elif mode in ['NLO', 'LO']:
1812 status = ['Results after grid setup (cross-section is non-physical):',
1813 'Final results and run summary:']
1814 if step == 0:
1815 message = '\n ' + status[step] + \
1816 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1817 self.cross_sect_dict
1818 elif step == 1:
1819 message = '\n ' + status[step] + proc_info + \
1820 '\n Total cross-section: %(xsect)8.3e +- %(errt)6.1e pb' % \
1821 self.cross_sect_dict
1822 if self.run_card['reweight_scale']=='.true.':
1823 message = message + \
1824 ('\n Ren. and fac. scale uncertainty: +%0.1f%% -%0.1f%%') % \
1825 (scale_pdf_info['scale_upp'], scale_pdf_info['scale_low'])
1826 if self.run_card['reweight_PDF']=='.true.':
1827 message = message + \
1828 ('\n PDF uncertainty: +%0.1f%% -%0.1f%%') % \
1829 (scale_pdf_info['pdf_upp'], scale_pdf_info['pdf_low'])
1830
1831 if (mode in ['NLO', 'LO'] and step!=1) or \
1832 (mode in ['aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO'] and step!=2):
1833 logger.info(message+'\n')
1834 return
1835
1836
1837
1838
1839 try:
1840 message, debug_msg = \
1841 self.compile_advanced_stats(log_GV_files, all_log_files, message)
1842 except Exception as e:
1843 debug_msg = 'Advanced statistics collection failed with error "%s"'%str(e)
1844
1845 logger.debug(debug_msg+'\n')
1846 logger.info(message+'\n')
1847
1848
1849 evt_path = pjoin(self.me_dir, 'Events', self.run_name)
1850 open(pjoin(evt_path, 'summary.txt'),'w').write(message+'\n')
1851 open(pjoin(evt_path, '.full_summary.txt'),
1852 'w').write(message+'\n\n'+debug_msg+'\n')
1853
1854 self.archive_files(evt_path,mode)
1855
1857 """ Copies in the Events/Run_<xxx> directory relevant files characterizing
1858 the run."""
1859
1860 files_to_arxiv = [pjoin('Cards','param_card.dat'),
1861 pjoin('Cards','MadLoopParams.dat'),
1862 pjoin('Cards','FKS_params.dat'),
1863 pjoin('Cards','run_card.dat'),
1864 pjoin('Subprocesses','setscales.f'),
1865 pjoin('Subprocesses','cuts.f')]
1866
1867 if mode in ['NLO', 'LO']:
1868 files_to_arxiv.append(pjoin('Cards','FO_analyse_card.dat'))
1869
1870 if not os.path.exists(pjoin(evt_path,'RunMaterial')):
1871 os.mkdir(pjoin(evt_path,'RunMaterial'))
1872
1873 for path in files_to_arxiv:
1874 if os.path.isfile(pjoin(self.me_dir,path)):
1875 files.cp(pjoin(self.me_dir,path),pjoin(evt_path,'RunMaterial'))
1876 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],cwd=evt_path)
1877 shutil.rmtree(pjoin(evt_path,'RunMaterial'))
1878
1880 """ This functions goes through the log files given in arguments and
1881 compiles statistics about MadLoop stability, virtual integration
1882 optimization and detection of potential error messages into a nice
1883 debug message to printed at the end of the run """
1884
1885
1886
1887 stats = {'UPS':{}, 'Errors':[], 'virt_stats':{}, 'timings':{}}
1888 mint_search = re.compile(r"MINT(?P<ID>\d*).txt")
1889
1890
1891
1892
1893
1894
1895
1896 UPS_stat_finder = re.compile(
1897 r"Satistics from MadLoop:.*"+\
1898 r"Total points tried\:\s+(?P<ntot>\d+).*"+\
1899 r"Stability unknown\:\s+(?P<nsun>\d+).*"+\
1900 r"Stable PS point\:\s+(?P<nsps>\d+).*"+\
1901 r"Unstable PS point \(and rescued\)\:\s+(?P<nups>\d+).*"+\
1902 r"Exceptional PS point \(unstable and not rescued\)\:\s+(?P<neps>\d+).*"+\
1903 r"Double precision used\:\s+(?P<nddp>\d+).*"+\
1904 r"Quadruple precision used\:\s+(?P<nqdp>\d+).*"+\
1905 r"Initialization phase\-space points\:\s+(?P<nini>\d+).*"+\
1906 r"Unknown return code \(100\)\:\s+(?P<n100>\d+).*"+\
1907 r"Unknown return code \(10\)\:\s+(?P<n10>\d+).*",re.DOTALL)
1908
1909 unit_code_meaning = { 0 : 'Not identified (CTModeRun != -1)',
1910 1 : 'CutTools (double precision)',
1911 2 : 'PJFry++',
1912 3 : 'IREGI',
1913 4 : 'Golem95',
1914 9 : 'CutTools (quadruple precision)'}
1915 RetUnit_finder =re.compile(
1916 r"#Unit\s*(?P<unit>\d+)\s*=\s*(?P<n_occurences>\d+)")
1917
1918
1919 for gv_log in log_GV_files:
1920 channel_name = '/'.join(gv_log.split('/')[-5:-1])
1921 log=open(gv_log,'r').read()
1922 UPS_stats = re.search(UPS_stat_finder,log)
1923 for retunit_stats in re.finditer(RetUnit_finder, log):
1924 if channel_name not in stats['UPS'].keys():
1925 stats['UPS'][channel_name] = [0]*10+[[0]*10]
1926 stats['UPS'][channel_name][10][int(retunit_stats.group('unit'))] \
1927 += int(retunit_stats.group('n_occurences'))
1928 if not UPS_stats is None:
1929 try:
1930 stats['UPS'][channel_name][0] += int(UPS_stats.group('ntot'))
1931 stats['UPS'][channel_name][1] += int(UPS_stats.group('nsun'))
1932 stats['UPS'][channel_name][2] += int(UPS_stats.group('nsps'))
1933 stats['UPS'][channel_name][3] += int(UPS_stats.group('nups'))
1934 stats['UPS'][channel_name][4] += int(UPS_stats.group('neps'))
1935 stats['UPS'][channel_name][5] += int(UPS_stats.group('nddp'))
1936 stats['UPS'][channel_name][6] += int(UPS_stats.group('nqdp'))
1937 stats['UPS'][channel_name][7] += int(UPS_stats.group('nini'))
1938 stats['UPS'][channel_name][8] += int(UPS_stats.group('n100'))
1939 stats['UPS'][channel_name][9] += int(UPS_stats.group('n10'))
1940 except KeyError:
1941 stats['UPS'][channel_name] = [int(UPS_stats.group('ntot')),
1942 int(UPS_stats.group('nsun')),int(UPS_stats.group('nsps')),
1943 int(UPS_stats.group('nups')),int(UPS_stats.group('neps')),
1944 int(UPS_stats.group('nddp')),int(UPS_stats.group('nqdp')),
1945 int(UPS_stats.group('nini')),int(UPS_stats.group('n100')),
1946 int(UPS_stats.group('n10')),[0]*10]
1947 debug_msg = ""
1948 if len(stats['UPS'].keys())>0:
1949 nTotPS = sum([chan[0] for chan in stats['UPS'].values()],0)
1950 nTotsun = sum([chan[1] for chan in stats['UPS'].values()],0)
1951 nTotsps = sum([chan[2] for chan in stats['UPS'].values()],0)
1952 nTotups = sum([chan[3] for chan in stats['UPS'].values()],0)
1953 nToteps = sum([chan[4] for chan in stats['UPS'].values()],0)
1954 nTotddp = sum([chan[5] for chan in stats['UPS'].values()],0)
1955 nTotqdp = sum([chan[6] for chan in stats['UPS'].values()],0)
1956 nTotini = sum([chan[7] for chan in stats['UPS'].values()],0)
1957 nTot100 = sum([chan[8] for chan in stats['UPS'].values()],0)
1958 nTot10 = sum([chan[9] for chan in stats['UPS'].values()],0)
1959 nTot1 = [sum([chan[10][i] for chan in stats['UPS'].values()],0) \
1960 for i in range(10)]
1961 UPSfracs = [(chan[0] , 0.0 if chan[1][0]==0 else \
1962 float(chan[1][4]*100)/chan[1][0]) for chan in stats['UPS'].items()]
1963 maxUPS = max(UPSfracs, key = lambda w: w[1])
1964
1965 tmpStr = ""
1966 tmpStr += '\n Number of loop ME evaluations (by MadLoop): %d'%nTotPS
1967 tmpStr += '\n Stability unknown: %d'%nTotsun
1968 tmpStr += '\n Stable PS point: %d'%nTotsps
1969 tmpStr += '\n Unstable PS point (and rescued): %d'%nTotups
1970 tmpStr += '\n Unstable PS point (and not rescued): %d'%nToteps
1971 tmpStr += '\n Only double precision used: %d'%nTotddp
1972 tmpStr += '\n Quadruple precision used: %d'%nTotqdp
1973 tmpStr += '\n Initialization phase-space points: %d'%nTotini
1974 tmpStr += '\n Reduction methods used:'
1975 red_methods = [(unit_code_meaning[i],nTot1[i]) for i in \
1976 unit_code_meaning.keys() if nTot1[i]>0]
1977 for method, n in sorted(red_methods, key= lambda l: l[1], reverse=True):
1978 tmpStr += '\n > %s%s%s'%(method,' '*(33-len(method)),n)
1979 if nTot100 != 0:
1980 debug_msg += '\n Unknown return code (100): %d'%nTot100
1981 if nTot10 != 0:
1982 debug_msg += '\n Unknown return code (10): %d'%nTot10
1983 nUnknownUnit = sum(nTot1[u] for u in range(10) if u \
1984 not in unit_code_meaning.keys())
1985 if nUnknownUnit != 0:
1986 debug_msg += '\n Unknown return code (1): %d'\
1987 %nUnknownUnit
1988
1989 if maxUPS[1]>0.001:
1990 message += tmpStr
1991 message += '\n Total number of unstable PS point detected:'+\
1992 ' %d (%4.2f%%)'%(nToteps,float(100*nToteps)/nTotPS)
1993 message += '\n Maximum fraction of UPS points in '+\
1994 'channel %s (%4.2f%%)'%maxUPS
1995 message += '\n Please report this to the authors while '+\
1996 'providing the file'
1997 message += '\n %s'%str(pjoin(os.path.dirname(self.me_dir),
1998 maxUPS[0],'UPS.log'))
1999 else:
2000 debug_msg += tmpStr
2001
2002
2003
2004
2005
2006
2007 virt_tricks_finder = re.compile(
2008 r"accumulated results Virtual ratio\s*=\s*-?(?P<v_ratio>[\d\+-Eed\.]*)"+\
2009 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_ratio_err>[\d\+-Eed\.]*)\s*\%\)\s*\n"+\
2010 r"accumulated results ABS virtual\s*=\s*-?(?P<v_abs_contr>[\d\+-Eed\.]*)"+\
2011 r"\s*\+/-\s*-?[\d\+-Eed\.]*\s*\(\s*-?(?P<v_abs_contr_err>[\d\+-Eed\.]*)\s*\%\)")
2012
2013 virt_frac_finder = re.compile(r"update virtual fraction to\s*:\s*"+\
2014 "-?(?P<v_frac>[\d\+-Eed\.]*)\s*-?(?P<v_average>[\d\+-Eed\.]*)")
2015
2016 channel_contr_finder = re.compile(r"Final result \[ABS\]\s*:\s*-?(?P<v_contr>[\d\+-Eed\.]*)")
2017
2018 channel_contr_list = {}
2019 for gv_log in log_GV_files:
2020 logfile=open(gv_log,'r')
2021 log = logfile.read()
2022 logfile.close()
2023 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2024 vf_stats = None
2025 for vf_stats in re.finditer(virt_frac_finder, log):
2026 pass
2027 if not vf_stats is None:
2028 v_frac = float(vf_stats.group('v_frac'))
2029 v_average = float(vf_stats.group('v_average'))
2030 try:
2031 if v_frac < stats['virt_stats']['v_frac_min'][0]:
2032 stats['virt_stats']['v_frac_min']=(v_frac,channel_name)
2033 if v_frac > stats['virt_stats']['v_frac_max'][0]:
2034 stats['virt_stats']['v_frac_max']=(v_frac,channel_name)
2035 stats['virt_stats']['v_frac_avg'][0] += v_frac
2036 stats['virt_stats']['v_frac_avg'][1] += 1
2037 except KeyError:
2038 stats['virt_stats']['v_frac_min']=[v_frac,channel_name]
2039 stats['virt_stats']['v_frac_max']=[v_frac,channel_name]
2040 stats['virt_stats']['v_frac_avg']=[v_frac,1]
2041
2042
2043 ccontr_stats = None
2044 for ccontr_stats in re.finditer(channel_contr_finder, log):
2045 pass
2046 if not ccontr_stats is None:
2047 contrib = float(ccontr_stats.group('v_contr'))
2048 try:
2049 if contrib>channel_contr_list[channel_name]:
2050 channel_contr_list[channel_name]=contrib
2051 except KeyError:
2052 channel_contr_list[channel_name]=contrib
2053
2054
2055
2056
2057 average_contrib = 0.0
2058 for value in channel_contr_list.values():
2059 average_contrib += value
2060 if len(channel_contr_list.values()) !=0:
2061 average_contrib = average_contrib / len(channel_contr_list.values())
2062
2063 relevant_log_GV_files = []
2064 excluded_channels = set([])
2065 all_channels = set([])
2066 for log_file in log_GV_files:
2067 channel_name = '/'.join(log_file.split('/')[-3:-1])
2068 all_channels.add(channel_name)
2069 try:
2070 if channel_contr_list[channel_name] > (0.1*average_contrib):
2071 relevant_log_GV_files.append(log_file)
2072 else:
2073 excluded_channels.add(channel_name)
2074 except KeyError:
2075 relevant_log_GV_files.append(log_file)
2076
2077
2078 for gv_log in relevant_log_GV_files:
2079 logfile=open(gv_log,'r')
2080 log = logfile.read()
2081 logfile.close()
2082 channel_name = '/'.join(gv_log.split('/')[-3:-1])
2083
2084 vt_stats = None
2085 for vt_stats in re.finditer(virt_tricks_finder, log):
2086 pass
2087 if not vt_stats is None:
2088 vt_stats_group = vt_stats.groupdict()
2089 v_ratio = float(vt_stats.group('v_ratio'))
2090 v_ratio_err = float(vt_stats.group('v_ratio_err'))
2091 v_contr = float(vt_stats.group('v_abs_contr'))
2092 v_contr_err = float(vt_stats.group('v_abs_contr_err'))
2093 try:
2094 if v_ratio < stats['virt_stats']['v_ratio_min'][0]:
2095 stats['virt_stats']['v_ratio_min']=(v_ratio,channel_name)
2096 if v_ratio > stats['virt_stats']['v_ratio_max'][0]:
2097 stats['virt_stats']['v_ratio_max']=(v_ratio,channel_name)
2098 if v_ratio < stats['virt_stats']['v_ratio_err_min'][0]:
2099 stats['virt_stats']['v_ratio_err_min']=(v_ratio_err,channel_name)
2100 if v_ratio > stats['virt_stats']['v_ratio_err_max'][0]:
2101 stats['virt_stats']['v_ratio_err_max']=(v_ratio_err,channel_name)
2102 if v_contr < stats['virt_stats']['v_contr_min'][0]:
2103 stats['virt_stats']['v_contr_min']=(v_contr,channel_name)
2104 if v_contr > stats['virt_stats']['v_contr_max'][0]:
2105 stats['virt_stats']['v_contr_max']=(v_contr,channel_name)
2106 if v_contr_err < stats['virt_stats']['v_contr_err_min'][0]:
2107 stats['virt_stats']['v_contr_err_min']=(v_contr_err,channel_name)
2108 if v_contr_err > stats['virt_stats']['v_contr_err_max'][0]:
2109 stats['virt_stats']['v_contr_err_max']=(v_contr_err,channel_name)
2110 except KeyError:
2111 stats['virt_stats']['v_ratio_min']=[v_ratio,channel_name]
2112 stats['virt_stats']['v_ratio_max']=[v_ratio,channel_name]
2113 stats['virt_stats']['v_ratio_err_min']=[v_ratio_err,channel_name]
2114 stats['virt_stats']['v_ratio_err_max']=[v_ratio_err,channel_name]
2115 stats['virt_stats']['v_contr_min']=[v_contr,channel_name]
2116 stats['virt_stats']['v_contr_max']=[v_contr,channel_name]
2117 stats['virt_stats']['v_contr_err_min']=[v_contr_err,channel_name]
2118 stats['virt_stats']['v_contr_err_max']=[v_contr_err,channel_name]
2119
2120 vf_stats = None
2121 for vf_stats in re.finditer(virt_frac_finder, log):
2122 pass
2123 if not vf_stats is None:
2124 v_frac = float(vf_stats.group('v_frac'))
2125 v_average = float(vf_stats.group('v_average'))
2126 try:
2127 if v_average < stats['virt_stats']['v_average_min'][0]:
2128 stats['virt_stats']['v_average_min']=(v_average,channel_name)
2129 if v_average > stats['virt_stats']['v_average_max'][0]:
2130 stats['virt_stats']['v_average_max']=(v_average,channel_name)
2131 stats['virt_stats']['v_average_avg'][0] += v_average
2132 stats['virt_stats']['v_average_avg'][1] += 1
2133 except KeyError:
2134 stats['virt_stats']['v_average_min']=[v_average,channel_name]
2135 stats['virt_stats']['v_average_max']=[v_average,channel_name]
2136 stats['virt_stats']['v_average_avg']=[v_average,1]
2137
2138 try:
2139 debug_msg += '\n\n Statistics on virtual integration optimization : '
2140
2141 debug_msg += '\n Maximum virt fraction computed %.3f (%s)'\
2142 %tuple(stats['virt_stats']['v_frac_max'])
2143 debug_msg += '\n Minimum virt fraction computed %.3f (%s)'\
2144 %tuple(stats['virt_stats']['v_frac_min'])
2145 debug_msg += '\n Average virt fraction computed %.3f'\
2146 %float(stats['virt_stats']['v_frac_avg'][0]/float(stats['virt_stats']['v_frac_avg'][1]))
2147 debug_msg += '\n Stats below exclude negligible channels (%d excluded out of %d)'%\
2148 (len(excluded_channels),len(all_channels))
2149 debug_msg += '\n Maximum virt ratio used %.2f (%s)'\
2150 %tuple(stats['virt_stats']['v_average_max'])
2151 debug_msg += '\n Maximum virt ratio found from grids %.2f (%s)'\
2152 %tuple(stats['virt_stats']['v_ratio_max'])
2153 tmpStr = '\n Max. MC err. on virt ratio from grids %.1f %% (%s)'\
2154 %tuple(stats['virt_stats']['v_ratio_err_max'])
2155 debug_msg += tmpStr
2156
2157
2158
2159
2160
2161
2162
2163
2164 tmpStr = '\n Maximum MC error on abs virt %.1f %% (%s)'\
2165 %tuple(stats['virt_stats']['v_contr_err_max'])
2166 debug_msg += tmpStr
2167
2168
2169
2170
2171 except KeyError:
2172 debug_msg += '\n Could not find statistics on the integration optimization. '
2173
2174
2175
2176
2177
2178 timing_stat_finder = re.compile(r"\s*Time spent in\s*(?P<name>\w*)\s*:\s*"+\
2179 "(?P<time>[\d\+-Eed\.]*)\s*")
2180
2181 for logf in log_GV_files:
2182 logfile=open(logf,'r')
2183 log = logfile.read()
2184 logfile.close()
2185 channel_name = '/'.join(logf.split('/')[-3:-1])
2186 mint = re.search(mint_search,logf)
2187 if not mint is None:
2188 channel_name = channel_name+' [step %s]'%mint.group('ID')
2189
2190 for time_stats in re.finditer(timing_stat_finder, log):
2191 try:
2192 stats['timings'][time_stats.group('name')][channel_name]+=\
2193 float(time_stats.group('time'))
2194 except KeyError:
2195 if time_stats.group('name') not in stats['timings'].keys():
2196 stats['timings'][time_stats.group('name')] = {}
2197 stats['timings'][time_stats.group('name')][channel_name]=\
2198 float(time_stats.group('time'))
2199
2200
2201 Tstr = lambda secs: str(datetime.timedelta(seconds=int(secs)))
2202 try:
2203 totTimeList = [(time, chan) for chan, time in \
2204 stats['timings']['Total'].items()]
2205 except KeyError:
2206 totTimeList = []
2207
2208 totTimeList.sort()
2209 if len(totTimeList)>0:
2210 debug_msg += '\n\n Inclusive timing profile :'
2211 debug_msg += '\n Overall slowest channel %s (%s)'%\
2212 (Tstr(totTimeList[-1][0]),totTimeList[-1][1])
2213 debug_msg += '\n Average channel running time %s'%\
2214 Tstr(sum([el[0] for el in totTimeList])/len(totTimeList))
2215 debug_msg += '\n Aggregated total running time %s'%\
2216 Tstr(sum([el[0] for el in totTimeList]))
2217 else:
2218 debug_msg += '\n\n Inclusive timing profile non available.'
2219
2220 sorted_keys = sorted(stats['timings'].keys(), key= lambda stat: \
2221 sum(stats['timings'][stat].values()), reverse=True)
2222 for name in sorted_keys:
2223 if name=='Total':
2224 continue
2225 if sum(stats['timings'][name].values())<=0.0:
2226 debug_msg += '\n Zero time record for %s.'%name
2227 continue
2228 try:
2229 TimeList = [((100.0*time/stats['timings']['Total'][chan]),
2230 chan) for chan, time in stats['timings'][name].items()]
2231 except KeyError, ZeroDivisionError:
2232 debug_msg += '\n\n Timing profile for %s unavailable.'%name
2233 continue
2234 TimeList.sort()
2235 debug_msg += '\n Timing profile for <%s> :'%name
2236 try:
2237 debug_msg += '\n Overall fraction of time %.3f %%'%\
2238 float((100.0*(sum(stats['timings'][name].values())/
2239 sum(stats['timings']['Total'].values()))))
2240 except KeyError, ZeroDivisionError:
2241 debug_msg += '\n Overall fraction of time unavailable.'
2242 debug_msg += '\n Largest fraction of time %.3f %% (%s)'%\
2243 (TimeList[-1][0],TimeList[-1][1])
2244 debug_msg += '\n Smallest fraction of time %.3f %% (%s)'%\
2245 (TimeList[0][0],TimeList[0][1])
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256 err_finder = re.compile(\
2257 r"(?<!of\spaper\sfor\s)\bERROR\b(?!\scalculation\.)",re.IGNORECASE)
2258 for log in all_log_files:
2259 logfile=open(log,'r')
2260 nErrors = len(re.findall(err_finder, logfile.read()))
2261 logfile.close()
2262 if nErrors != 0:
2263 stats['Errors'].append((str(log),nErrors))
2264
2265 nErrors = sum([err[1] for err in stats['Errors']],0)
2266 if nErrors != 0:
2267 debug_msg += '\n WARNING:: A total of %d error%s ha%s been '\
2268 %(nErrors,'s' if nErrors>1 else '','ve' if nErrors>1 else 's')+\
2269 'found in the following log file%s:'%('s' if \
2270 len(stats['Errors'])>1 else '')
2271 for error in stats['Errors'][:3]:
2272 log_name = '/'.join(error[0].split('/')[-5:])
2273 debug_msg += '\n > %d error%s in %s'%\
2274 (error[1],'s' if error[1]>1 else '',log_name)
2275 if len(stats['Errors'])>3:
2276 nRemainingErrors = sum([err[1] for err in stats['Errors']][3:],0)
2277 nRemainingLogs = len(stats['Errors'])-3
2278 debug_msg += '\n And another %d error%s in %d other log file%s'%\
2279 (nRemainingErrors, 's' if nRemainingErrors>1 else '',
2280 nRemainingLogs, 's ' if nRemainingLogs>1 else '')
2281
2282 return message, debug_msg
2283
2284
2286 """this function calls the reweighting routines and creates the event file in the
2287 Event dir. Return the name of the event file created
2288 """
2289 scale_pdf_info={}
2290 if (self.run_card['reweight_scale'] == '.true.' or self.run_card['reweight_PDF'] == '.true.') and int(self.run_card['ickkw']) != 4 :
2291 scale_pdf_info = self.run_reweight(options['reweightonly'])
2292
2293 self.update_status('Collecting events', level='parton', update_results=True)
2294 misc.compile(['collect_events'],
2295 cwd=pjoin(self.me_dir, 'SubProcesses'))
2296 p = misc.Popen(['./collect_events'], cwd=pjoin(self.me_dir, 'SubProcesses'),
2297 stdin=subprocess.PIPE,
2298 stdout=open(pjoin(self.me_dir, 'collect_events.log'), 'w'))
2299 if event_norm.lower() == 'sum':
2300 p.communicate(input = '1\n')
2301 elif event_norm.lower() == 'unity':
2302 p.communicate(input = '3\n')
2303 else:
2304 p.communicate(input = '2\n')
2305
2306
2307 filename = open(pjoin(self.me_dir, 'collect_events.log')).read().split()[-1]
2308
2309 if not os.path.exists(pjoin(self.me_dir, 'SubProcesses', filename)):
2310 raise aMCatNLOError('An error occurred during event generation. ' + \
2311 'The event file has not been created. Check collect_events.log')
2312 evt_file = pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')
2313 misc.gzip(pjoin(self.me_dir, 'SubProcesses', filename), stdout=evt_file)
2314 if not options['reweightonly']:
2315 self.print_summary(options, 2, mode, scale_pdf_info)
2316 logger.info('The %s file has been generated.\n' % (evt_file))
2317 self.results.add_detail('nb_event', nevents)
2318 self.update_status('Events generated', level='parton', update_results=True)
2319 return evt_file[:-3]
2320
2321
2323 """runs mcatnlo on the generated event file, to produce showered-events
2324 """
2325 logger.info('Preparing MCatNLO run')
2326 try:
2327 misc.gunzip(evt_file)
2328 except Exception:
2329 pass
2330
2331 self.banner = banner_mod.Banner(evt_file)
2332 shower = self.banner.get_detail('run_card', 'parton_shower').upper()
2333
2334
2335
2336 if int(int(self.banner.get_detail('run_card', 'nevents')) / \
2337 self.shower_card['nsplit_jobs']) * self.shower_card['nsplit_jobs'] \
2338 != int(self.banner.get_detail('run_card', 'nevents')):
2339 logger.warning(\
2340 'nsplit_jobs in the shower card is not a divisor of the number of events.\n' + \
2341 'Setting it to 1.')
2342 self.shower_card['nsplit_jobs'] = 1
2343
2344
2345 if self.shower_card['nevents'] > 0 and \
2346 self.shower_card['nevents'] < int(self.banner.get_detail('run_card', 'nevents')) and \
2347 self.shower_card['nsplit_jobs'] != 1:
2348 logger.warning(\
2349 'Only a part of the events will be showered.\n' + \
2350 'Setting nsplit_jobs in the shower_card to 1.')
2351 self.shower_card['nsplit_jobs'] = 1
2352
2353 self.banner_to_mcatnlo(evt_file)
2354
2355
2356
2357
2358 if 'fastjet' in self.shower_card['extralibs']:
2359
2360 if not 'stdc++' in self.shower_card['extralibs']:
2361 logger.warning('Linking FastJet: adding stdc++ to EXTRALIBS')
2362 self.shower_card['extralibs'] += ' stdc++'
2363
2364 try:
2365
2366 p = subprocess.Popen([self.options['fastjet'], '--prefix'], \
2367 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2368 output, error = p.communicate()
2369
2370 output = output[:-1]
2371
2372 if not pjoin(output, 'lib') in self.shower_card['extrapaths']:
2373 logger.warning('Linking FastJet: updating EXTRAPATHS')
2374 self.shower_card['extrapaths'] += ' ' + pjoin(output, 'lib')
2375 if not pjoin(output, 'include') in self.shower_card['includepaths']:
2376 logger.warning('Linking FastJet: updating INCLUDEPATHS')
2377 self.shower_card['includepaths'] += ' ' + pjoin(output, 'include')
2378
2379 include_line = '#include "fastjet/ClusterSequence.hh"//INCLUDE_FJ'
2380 namespace_line = 'namespace fj = fastjet;//NAMESPACE_FJ'
2381 except Exception:
2382 logger.warning('Linking FastJet: using fjcore')
2383
2384 self.shower_card['extralibs'] = self.shower_card['extralibs'].replace('fastjet', '')
2385 if not 'fjcore.o' in self.shower_card['analyse']:
2386 self.shower_card['analyse'] += ' fjcore.o'
2387
2388 include_line = '#include "fjcore.hh"//INCLUDE_FJ'
2389 namespace_line = 'namespace fj = fjcore;//NAMESPACE_FJ'
2390
2391 fjwrapper_lines = open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc')).read().split('\n')
2392 for line in fjwrapper_lines:
2393 if '//INCLUDE_FJ' in line:
2394 fjwrapper_lines[fjwrapper_lines.index(line)] = include_line
2395 if '//NAMESPACE_FJ' in line:
2396 fjwrapper_lines[fjwrapper_lines.index(line)] = namespace_line
2397 open(pjoin(self.me_dir, 'MCatNLO', 'srcCommon', 'myfastjetfortran.cc'), 'w').write(\
2398 '\n'.join(fjwrapper_lines) + '\n')
2399
2400 extrapaths = self.shower_card['extrapaths'].split()
2401 if shower == 'HERWIGPP':
2402 extrapaths.append(pjoin(self.options['hepmc_path'], 'lib'))
2403
2404 if shower == 'PYTHIA8' and not os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2405 extrapaths.append(pjoin(self.options['pythia8_path'], 'lib'))
2406
2407 if 'LD_LIBRARY_PATH' in os.environ.keys():
2408 ldlibrarypath = os.environ['LD_LIBRARY_PATH']
2409 else:
2410 ldlibrarypath = ''
2411 ldlibrarypath += ':' + ':'.join(extrapaths)
2412 os.putenv('LD_LIBRARY_PATH', ldlibrarypath)
2413
2414 shower_card_path = pjoin(self.me_dir, 'MCatNLO', 'shower_card.dat')
2415 self.shower_card.write_card(shower, shower_card_path)
2416
2417 mcatnlo_log = pjoin(self.me_dir, 'mcatnlo.log')
2418 self.update_status('Compiling MCatNLO for %s...' % shower, level='shower')
2419 misc.call(['./MCatNLO_MadFKS.inputs'], stdout=open(mcatnlo_log, 'w'),
2420 stderr=open(mcatnlo_log, 'w'),
2421 cwd=pjoin(self.me_dir, 'MCatNLO'))
2422
2423 exe = 'MCATNLO_%s_EXE' % shower
2424 if not os.path.exists(pjoin(self.me_dir, 'MCatNLO', exe)) and \
2425 not os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe')):
2426 print open(mcatnlo_log).read()
2427 raise aMCatNLOError('Compilation failed, check %s for details' % mcatnlo_log)
2428 logger.info(' ... done')
2429
2430
2431 count = 1
2432 while os.path.isdir(pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2433 (shower, count))):
2434 count += 1
2435 rundir = pjoin(self.me_dir, 'MCatNLO', 'RUN_%s_%d' % \
2436 (shower, count))
2437 os.mkdir(rundir)
2438 files.cp(shower_card_path, rundir)
2439
2440
2441
2442 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
2443 'events_*.lhe'))
2444 if max(len(event_files), 1) != self.shower_card['nsplit_jobs']:
2445 logger.info('Cleaning old files and splitting the event file...')
2446
2447 files.rm([f for f in event_files if 'events.lhe' not in f])
2448 if self.shower_card['nsplit_jobs'] > 1:
2449 misc.compile(['split_events'], cwd = pjoin(self.me_dir, 'Utilities'))
2450 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'split_events')],
2451 stdin=subprocess.PIPE,
2452 stdout=open(pjoin(self.me_dir, 'Events', self.run_name, 'split_events.log'), 'w'),
2453 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2454 p.communicate(input = 'events.lhe\n%d\n' % self.shower_card['nsplit_jobs'])
2455 logger.info('Splitting done.')
2456 event_files = glob.glob(pjoin(self.me_dir, 'Events', self.run_name,
2457 'events_*.lhe'))
2458
2459 event_files.sort()
2460
2461 self.update_status('Showering events...', level='shower')
2462 logger.info('(Running in %s)' % rundir)
2463 if shower != 'PYTHIA8':
2464 files.mv(pjoin(self.me_dir, 'MCatNLO', exe), rundir)
2465 files.mv(pjoin(self.me_dir, 'MCatNLO', 'MCATNLO_%s_input' % shower), rundir)
2466 else:
2467
2468 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.cmd'), rundir)
2469 files.mv(pjoin(self.me_dir, 'MCatNLO', 'Pythia8.exe'), rundir)
2470 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
2471 files.ln(pjoin(self.options['pythia8_path'], 'examples', 'config.sh'), rundir)
2472 files.ln(pjoin(self.options['pythia8_path'], 'xmldoc'), rundir)
2473 else:
2474 files.ln(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'), rundir)
2475
2476 if shower == 'HERWIGPP':
2477 try:
2478 files.ln(pjoin(self.options['hwpp_path'], 'bin', 'Herwig++'), rundir)
2479 except Exception:
2480 raise aMCatNLOError('The Herwig++ path set in the configuration file is not valid.')
2481
2482 if os.path.exists(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so')):
2483 files.cp(pjoin(self.me_dir, 'MCatNLO', 'HWPPAnalyzer', 'HepMCFortran.so'), rundir)
2484
2485 files.ln(evt_file, rundir, 'events.lhe')
2486 for i, f in enumerate(event_files):
2487 files.ln(f, rundir,'events_%d.lhe' % (i + 1))
2488
2489 if not self.shower_card['analyse']:
2490
2491 out_id = 'HEP'
2492 else:
2493
2494 out_id = 'TOP'
2495
2496
2497 open(pjoin(rundir, 'shower.sh'), 'w').write(\
2498 open(pjoin(self.me_dir, 'MCatNLO', 'shower_template.sh')).read() \
2499 % {'extralibs': ':'.join(extrapaths)})
2500 subprocess.call(['chmod', '+x', pjoin(rundir, 'shower.sh')])
2501
2502 if event_files:
2503 arg_list = [[shower, out_id, self.run_name, '%d' % (i + 1)] \
2504 for i in range(len(event_files))]
2505 else:
2506 arg_list = [[shower, out_id, self.run_name]]
2507
2508 self.run_all({rundir: 'shower.sh'}, arg_list, 'shower')
2509 self.njobs = 1
2510 self.wait_for_complete('shower')
2511
2512
2513 message = ''
2514 warning = ''
2515 to_gzip = [evt_file]
2516 if out_id == 'HEP':
2517
2518 if shower in ['PYTHIA8', 'HERWIGPP']:
2519 hep_format = 'HEPMC'
2520 ext = 'hepmc'
2521 else:
2522 hep_format = 'StdHEP'
2523 ext = 'hep'
2524
2525 hep_file = '%s_%s_0.%s.gz' % \
2526 (pjoin(os.path.dirname(evt_file), 'events'), shower, ext)
2527 count = 0
2528
2529
2530
2531 while os.path.exists(hep_file) or \
2532 os.path.exists(hep_file.replace('.%s.gz' % ext, '__1.%s.gz' % ext)) :
2533 count +=1
2534 hep_file = '%s_%s_%d.%s.gz' % \
2535 (pjoin(os.path.dirname(evt_file), 'events'), shower, count, ext)
2536
2537 try:
2538 if self.shower_card['nsplit_jobs'] == 1:
2539 files.mv(os.path.join(rundir, 'events.%s.gz' % ext), hep_file)
2540 message = ('The file %s has been generated. \nIt contains showered' + \
2541 ' and hadronized events in the %s format obtained' + \
2542 ' showering the parton-level event file %s.gz with %s') % \
2543 (hep_file, hep_format, evt_file, shower)
2544 else:
2545 hep_list = []
2546 for i in range(self.shower_card['nsplit_jobs']):
2547 hep_list.append(hep_file.replace('.%s.gz' % ext, '__%d.%s.gz' % (i + 1, ext)))
2548 files.mv(os.path.join(rundir, 'events_%d.%s.gz' % (i + 1, ext)), hep_list[-1])
2549 message = ('The following files have been generated:\n %s\nThey contain showered' + \
2550 ' and hadronized events in the %s format obtained' + \
2551 ' showering the (split) parton-level event file %s.gz with %s') % \
2552 ('\n '.join(hep_list), hep_format, evt_file, shower)
2553
2554 except OSError, IOError:
2555 raise aMCatNLOError('No file has been generated, an error occurred.'+\
2556 ' More information in %s' % pjoin(os.getcwd(), 'amcatnlo_run.log'))
2557
2558
2559 if hep_format == 'StdHEP':
2560 try:
2561 self.do_plot('%s -f' % self.run_name)
2562 except Exception, error:
2563 logger.info("Fail to make the plot. Continue...")
2564 pass
2565
2566 elif out_id == 'TOP':
2567
2568 topfiles = []
2569 top_tars = [tarfile.TarFile(f) for f in glob.glob(pjoin(rundir, 'topfile*.tar'))]
2570 for top_tar in top_tars:
2571 topfiles.extend(top_tar.getnames())
2572
2573
2574 if len(top_tars) != self.shower_card['nsplit_jobs']:
2575 raise aMCatNLOError('%d job(s) expected, %d file(s) found' % \
2576 (self.shower_card['nsplit_jobs'], len(top_tars)))
2577
2578
2579
2580 filename = 'plot_%s_%d_' % (shower, 1)
2581 count = 1
2582 while os.path.exists(pjoin(self.me_dir, 'Events',
2583 self.run_name, '%s0.top' % filename)) or \
2584 os.path.exists(pjoin(self.me_dir, 'Events',
2585 self.run_name, '%s0__1.top' % filename)):
2586 count += 1
2587 filename = 'plot_%s_%d_' % (shower, count)
2588
2589 if not topfiles:
2590
2591 waarning = 'No .top file has been generated. For the results of your ' +\
2592 'run, please check inside %s' % rundir
2593
2594 elif self.shower_card['nsplit_jobs'] == 1:
2595
2596 top_tars[0].extractall(path = rundir)
2597 plotfiles = []
2598 for i, file in enumerate(topfiles):
2599 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
2600 '%s%d.top' % (filename, i))
2601 files.mv(pjoin(rundir, file), plotfile)
2602 plotfiles.append(plotfile)
2603
2604 ffiles = 'files'
2605 have = 'have'
2606 if len(plotfiles) == 1:
2607 ffiles = 'file'
2608 have = 'has'
2609
2610 message = ('The %s %s %s been generated, with histograms in the' + \
2611 ' TopDrawer format, obtained by showering the parton-level' + \
2612 ' file %s.gz with %s.') % (ffiles, ', '.join(plotfiles), have, \
2613 evt_file, shower)
2614 else:
2615
2616 topfiles_set = set(topfiles)
2617 plotfiles = []
2618 for j, top_tar in enumerate(top_tars):
2619 top_tar.extractall(path = rundir)
2620 for i, file in enumerate(topfiles_set):
2621 plotfile = pjoin(self.me_dir, 'Events', self.run_name,
2622 '%s%d__%d.top' % (filename, i, j + 1))
2623 files.mv(pjoin(rundir, file), plotfile)
2624 plotfiles.append(plotfile)
2625
2626
2627 if self.shower_card['combine_td']:
2628 misc.compile(['sum_plots'], cwd = pjoin(self.me_dir, 'Utilities'))
2629
2630 if self.banner.get('run_card', 'event_norm').lower() == 'sum':
2631 norm = 1.
2632 elif self.banner.get('run_card', 'event_norm').lower() == 'average':
2633 norm = 1./float(self.shower_card['nsplit_jobs'])
2634
2635 plotfiles = []
2636 for i, file in enumerate(topfiles_set):
2637 filelist = ['%s%d__%d.top' % (filename, i, j + 1) \
2638 for j in range(self.shower_card['nsplit_jobs'])]
2639 infile="%d\n%s\n%s\n" % \
2640 (self.shower_card['nsplit_jobs'],
2641 '\n'.join(filelist),
2642 '\n'.join([str(norm)] * self.shower_card['nsplit_jobs']))
2643
2644 p = misc.Popen([pjoin(self.me_dir, 'Utilities', 'sum_plots')],
2645 stdin=subprocess.PIPE,
2646 stdout=os.open(os.devnull, os.O_RDWR),
2647 cwd=pjoin(self.me_dir, 'Events', self.run_name))
2648 p.communicate(input = infile)
2649 files.mv(pjoin(self.me_dir, 'Events', self.run_name, 'sum.top'),
2650 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
2651 plotfiles.append(pjoin(self.me_dir, 'Events', self.run_name, '%s%d.top' % (filename, i)))
2652 tar = tarfile.open(
2653 pjoin(self.me_dir, 'Events', self.run_name, '%s%d.tar.gz' % (filename, i)), 'w:gz')
2654 for f in filelist:
2655 tar.add(pjoin(self.me_dir, 'Events', self.run_name, f), arcname=f)
2656 files.rm([pjoin(self.me_dir, 'Events', self.run_name, f) for f in filelist])
2657
2658 tar.close()
2659
2660 ffiles = 'files'
2661 have = 'have'
2662 if len(plotfiles) == 1:
2663 ffiles = 'file'
2664 have = 'has'
2665
2666 message = ('The %s %s %s been generated, with histograms in the' + \
2667 ' TopDrawer format, obtained by showering the parton-level' + \
2668 ' file %s.gz with %s.\n' + \
2669 'The files from the different shower ' + \
2670 'jobs (before combining them) can be found inside %s.') % \
2671 (ffiles, ', '.join(plotfiles), have, \
2672 evt_file, shower,
2673 ', '.join([f.replace('top', 'tar.gz') for f in plotfiles]))
2674
2675 else:
2676 message = ('The following files have been generated:\n %s\n' + \
2677 'They contain histograms in the' + \
2678 ' TopDrawer format, obtained by showering the parton-level' + \
2679 ' file %s.gz with %s.') % ('\n '.join(plotfiles), \
2680 evt_file, shower)
2681
2682
2683 run_dir_path = pjoin(rundir, self.run_name)
2684 if os.path.exists(pjoin(run_dir_path,'RunMaterial.tar.gz')):
2685 misc.call(['tar','-xzpf','RunMaterial.tar.gz'],cwd=run_dir_path)
2686 files.cp(pjoin(self.me_dir,'Cards','shower_card.dat'),
2687 pjoin(run_dir_path,'RunMaterial','shower_card_for_%s_%d.dat'\
2688 %(shower, count)))
2689 misc.call(['tar','-czpf','RunMaterial.tar.gz','RunMaterial'],
2690 cwd=run_dir_path)
2691 shutil.rmtree(pjoin(run_dir_path,'RunMaterial'))
2692
2693 for f in to_gzip:
2694 misc.gzip(f)
2695 if message:
2696 logger.info(message)
2697 if warning:
2698 logger.warning(warning)
2699
2700 self.update_status('Run complete', level='shower', update_results=True)
2701
2702
2703
2704 - def set_run_name(self, name, tag=None, level='parton', reload_card=False):
2705 """define the run name, the run_tag, the banner and the results."""
2706
2707
2708 upgrade_tag = {'parton': ['parton','pythia','pgs','delphes','shower'],
2709 'pythia': ['pythia','pgs','delphes'],
2710 'shower': ['shower'],
2711 'pgs': ['pgs'],
2712 'delphes':['delphes'],
2713 'plot':[]}
2714
2715
2716
2717 if name == self.run_name:
2718 if reload_card:
2719 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
2720 self.run_card = banner_mod.RunCardNLO(run_card)
2721
2722
2723 if tag:
2724 self.run_card['run_tag'] = tag
2725 self.run_tag = tag
2726 self.results.add_run(self.run_name, self.run_card)
2727 else:
2728 for tag in upgrade_tag[level]:
2729 if getattr(self.results[self.run_name][-1], tag):
2730 tag = self.get_available_tag()
2731 self.run_card['run_tag'] = tag
2732 self.run_tag = tag
2733 self.results.add_run(self.run_name, self.run_card)
2734 break
2735 return
2736
2737
2738 if self.run_name:
2739 self.store_result()
2740
2741 self.run_name = name
2742
2743
2744 run_card = pjoin(self.me_dir, 'Cards','run_card.dat')
2745 self.run_card = banner_mod.RunCardNLO(run_card)
2746
2747 new_tag = False
2748
2749 self.banner = banner_mod.recover_banner(self.results, level, self.run_name, tag)
2750 if tag:
2751 self.run_card['run_tag'] = tag
2752 new_tag = True
2753 elif not self.run_name in self.results and level =='parton':
2754 pass
2755 elif not self.run_name in self.results:
2756
2757 logger.warning('Trying to run data on unknown run.')
2758 self.results.add_run(name, self.run_card)
2759 self.results.update('add run %s' % name, 'all', makehtml=True)
2760 else:
2761 for tag in upgrade_tag[level]:
2762
2763 if getattr(self.results[self.run_name][-1], tag):
2764
2765 tag = self.get_available_tag()
2766 self.run_card['run_tag'] = tag
2767 new_tag = True
2768 break
2769 if not new_tag:
2770
2771 tag = self.results[self.run_name][-1]['tag']
2772 self.run_card['run_tag'] = tag
2773
2774
2775 if name in self.results and not new_tag:
2776 self.results.def_current(self.run_name)
2777 else:
2778 self.results.add_run(self.run_name, self.run_card)
2779
2780 self.run_tag = self.run_card['run_tag']
2781
2782
2783
2784 if level == 'parton':
2785 return
2786 elif level == 'pythia':
2787 return self.results[self.run_name][0]['tag']
2788 else:
2789 for i in range(-1,-len(self.results[self.run_name])-1,-1):
2790 tagRun = self.results[self.run_name][i]
2791 if tagRun.pythia:
2792 return tagRun['tag']
2793
2794
2796 """ tar the pythia results. This is done when we are quite sure that
2797 the pythia output will not be use anymore """
2798
2799 if not self.run_name:
2800 return
2801
2802 self.results.save()
2803
2804 if not self.to_store:
2805 return
2806
2807 tag = self.run_card['run_tag']
2808
2809 self.to_store = []
2810
2811
2813 """reads the info in the init block and returns them in a dictionary"""
2814 ev_file = open(evt_file)
2815 init = ""
2816 found = False
2817 while True:
2818 line = ev_file.readline()
2819 if "<init>" in line:
2820 found = True
2821 elif found and not line.startswith('#'):
2822 init += line
2823 if "</init>" in line or "<event>" in line:
2824 break
2825 ev_file.close()
2826
2827
2828
2829
2830
2831
2832 init_dict = {}
2833 init_dict['idbmup1'] = int(init.split()[0])
2834 init_dict['idbmup2'] = int(init.split()[1])
2835 init_dict['ebmup1'] = float(init.split()[2])
2836 init_dict['ebmup2'] = float(init.split()[3])
2837 init_dict['pdfgup1'] = int(init.split()[4])
2838 init_dict['pdfgup2'] = int(init.split()[5])
2839 init_dict['pdfsup1'] = int(init.split()[6])
2840 init_dict['pdfsup2'] = int(init.split()[7])
2841 init_dict['idwtup'] = int(init.split()[8])
2842 init_dict['nprup'] = int(init.split()[9])
2843
2844 return init_dict
2845
2846
2848 """creates the mcatnlo input script using the values set in the header of the event_file.
2849 It also checks if the lhapdf library is used"""
2850 shower = self.banner.get('run_card', 'parton_shower').upper()
2851 pdlabel = self.banner.get('run_card', 'pdlabel')
2852 itry = 0
2853 nevents = self.shower_card['nevents']
2854 init_dict = self.get_init_dict(evt_file)
2855
2856 if nevents < 0 or \
2857 nevents > int(self.banner.get_detail('run_card', 'nevents')):
2858 nevents = int(self.banner.get_detail('run_card', 'nevents'))
2859
2860 nevents = nevents / self.shower_card['nsplit_jobs']
2861
2862 mcmass_dict = {}
2863 for line in [l for l in self.banner['montecarlomasses'].split('\n') if l]:
2864 pdg = int(line.split()[0])
2865 mass = float(line.split()[1])
2866 mcmass_dict[pdg] = mass
2867
2868 content = 'EVPREFIX=%s\n' % pjoin(os.path.split(evt_file)[1])
2869 content += 'NEVENTS=%d\n' % nevents
2870 content += 'NEVENTS_TOT=%d\n' % (int(self.banner.get_detail('run_card', 'nevents')) /\
2871 self.shower_card['nsplit_jobs'])
2872 content += 'MCMODE=%s\n' % shower
2873 content += 'PDLABEL=%s\n' % pdlabel
2874 content += 'ALPHAEW=%s\n' % self.banner.get_detail('param_card', 'sminputs', 1).value
2875
2876
2877 content += 'TMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 6).value
2878 content += 'TWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 6).value
2879 content += 'ZMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 23).value
2880 content += 'ZWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 23).value
2881 content += 'WMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 24).value
2882 content += 'WWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 24).value
2883 try:
2884 content += 'HGGMASS=%s\n' % self.banner.get_detail('param_card', 'mass', 25).value
2885 content += 'HGGWIDTH=%s\n' % self.banner.get_detail('param_card', 'decay', 25).value
2886 except KeyError:
2887 content += 'HGGMASS=120.\n'
2888 content += 'HGGWIDTH=0.00575308848\n'
2889 content += 'beammom1=%s\n' % self.banner.get_detail('run_card', 'ebeam1')
2890 content += 'beammom2=%s\n' % self.banner.get_detail('run_card', 'ebeam2')
2891 content += 'BEAM1=%s\n' % self.banner.get_detail('run_card', 'lpp1')
2892 content += 'BEAM2=%s\n' % self.banner.get_detail('run_card', 'lpp2')
2893 content += 'DMASS=%s\n' % mcmass_dict[1]
2894 content += 'UMASS=%s\n' % mcmass_dict[2]
2895 content += 'SMASS=%s\n' % mcmass_dict[3]
2896 content += 'CMASS=%s\n' % mcmass_dict[4]
2897 content += 'BMASS=%s\n' % mcmass_dict[5]
2898 try:
2899 content += 'EMASS=%s\n' % mcmass_dict[11]
2900 content += 'MUMASS=%s\n' % mcmass_dict[13]
2901 content += 'TAUMASS=%s\n' % mcmass_dict[15]
2902 except KeyError:
2903
2904 mcmass_lines = [l for l in \
2905 open(pjoin(self.me_dir, 'SubProcesses', 'MCmasses_%s.inc' % shower.upper())
2906 ).read().split('\n') if l]
2907 new_mcmass_dict = {}
2908 for l in mcmass_lines:
2909 key, val = l.split('=')
2910 new_mcmass_dict[key.strip()] = val.replace('d', 'e').strip()
2911 content += 'EMASS=%s\n' % new_mcmass_dict['mcmass(11)']
2912 content += 'MUMASS=%s\n' % new_mcmass_dict['mcmass(13)']
2913 content += 'TAUMASS=%s\n' % new_mcmass_dict['mcmass(15)']
2914
2915 content += 'GMASS=%s\n' % mcmass_dict[21]
2916 content += 'EVENT_NORM=%s\n' % self.banner.get_detail('run_card', 'event_norm').lower()
2917
2918 if int(self.shower_card['pdfcode']) > 1 or \
2919 (pdlabel=='lhapdf' and int(self.shower_card['pdfcode'])==1):
2920
2921
2922
2923
2924 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
2925 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
2926 stdout = subprocess.PIPE).stdout.read().strip()
2927 content += 'LHAPDFPATH=%s\n' % lhapdfpath
2928 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
2929 if self.shower_card['pdfcode']==1:
2930 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
2931 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
2932 else:
2933 lhaid_list = [abs(int(self.shower_card['pdfcode']))]
2934 content += 'PDFCODE=%s\n' % self.shower_card['pdfcode']
2935 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
2936 elif int(self.shower_card['pdfcode'])==1:
2937
2938
2939
2940
2941
2942
2943 try:
2944 lhapdfpath = subprocess.Popen([self.options['lhapdf'], '--prefix'],
2945 stdout = subprocess.PIPE).stdout.read().strip()
2946 self.link_lhapdf(pjoin(self.me_dir, 'lib'))
2947 content += 'LHAPDFPATH=%s\n' % lhapdfpath
2948 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
2949 lhaid_list = [max([init_dict['pdfsup1'],init_dict['pdfsup2']])]
2950 content += 'PDFCODE=%s\n' % max([init_dict['pdfsup1'],init_dict['pdfsup2']])
2951 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
2952 except Exception:
2953 logger.warning('Trying to shower events using the same PDF in the shower as used in the generation'+\
2954 ' of the events using LHAPDF. However, no valid LHAPDF installation found with the'+\
2955 ' needed PDF set. Will use default internal PDF for the shower instead. To use the'+\
2956 ' same set as was used in the event generation install LHAPDF and set the path using'+\
2957 ' "set /path_to_lhapdf/bin/lhapdf-config" from the MadGraph5_aMC@NLO python shell')
2958 content += 'LHAPDFPATH=\n'
2959 content += 'PDFCODE=0\n'
2960 else:
2961 content += 'LHAPDFPATH=\n'
2962 content += 'PDFCODE=0\n'
2963
2964 content += 'ICKKW=%s\n' % self.banner.get_detail('run_card', 'ickkw')
2965 content += 'PTJCUT=%s\n' % self.banner.get_detail('run_card', 'ptj')
2966
2967 if self.options['pythia8_path']:
2968 content+='PY8PATH=%s\n' % self.options['pythia8_path']
2969 if self.options['hwpp_path']:
2970 content+='HWPPPATH=%s\n' % self.options['hwpp_path']
2971 if self.options['thepeg_path']:
2972 content+='THEPEGPATH=%s\n' % self.options['thepeg_path']
2973 if self.options['hepmc_path']:
2974 content+='HEPMCPATH=%s\n' % self.options['hepmc_path']
2975
2976 output = open(pjoin(self.me_dir, 'MCatNLO', 'banner.dat'), 'w')
2977 output.write(content)
2978 output.close()
2979 return shower
2980
2981
2983 """runs the reweight_xsec_events eecutables on each sub-event file generated
2984 to compute on the fly scale and/or PDF uncertainities"""
2985 logger.info(' Doing reweight')
2986
2987 nev_unw = pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted')
2988
2989 if only:
2990 if os.path.exists(nev_unw + '.orig'):
2991 files.cp(nev_unw + '.orig', nev_unw)
2992 else:
2993 raise aMCatNLOError('Cannot find event file information')
2994
2995
2996 file = open(nev_unw)
2997 lines = file.read().split('\n')
2998 file.close()
2999
3000 files.cp(nev_unw, nev_unw + '.orig')
3001
3002
3003 evt_files = [line.split()[0] for line in lines[:-1] if line.split()[1] != '0']
3004
3005 job_dict = {}
3006 exe = 'reweight_xsec_events.local'
3007 for i, evt_file in enumerate(evt_files):
3008 path, evt = os.path.split(evt_file)
3009 files.ln(pjoin(self.me_dir, 'SubProcesses', exe), \
3010 pjoin(self.me_dir, 'SubProcesses', path))
3011 job_dict[path] = [exe]
3012
3013 self.run_all(job_dict, [[evt, '1']], 'Running reweight')
3014
3015
3016 for evt_file in evt_files:
3017 last_line = subprocess.Popen(['tail', '-n1', '%s.rwgt' % \
3018 pjoin(self.me_dir, 'SubProcesses', evt_file)], \
3019 stdout = subprocess.PIPE).stdout.read().strip()
3020 if last_line != "</LesHouchesEvents>":
3021 raise aMCatNLOError('An error occurred during reweight. Check the' + \
3022 '\'reweight_xsec_events.output\' files inside the ' + \
3023 '\'SubProcesses/P*/G*/ directories for details')
3024
3025
3026 newfile = open(nev_unw, 'w')
3027 for line in lines:
3028 if line:
3029 newfile.write(line.replace(line.split()[0], line.split()[0] + '.rwgt') + '\n')
3030 newfile.close()
3031
3032 return self.pdf_scale_from_reweighting(evt_files)
3033
3035 """This function takes the files with the scale and pdf values
3036 written by the reweight_xsec_events.f code
3037 (P*/G*/pdf_scale_dependence.dat) and computes the overall
3038 scale and PDF uncertainty (the latter is computed using the
3039 Hessian method (if lhaid<90000) or Gaussian (if lhaid>90000))
3040 and returns it in percents. The expected format of the file
3041 is: n_scales xsec_scale_central xsec_scale1 ... n_pdf
3042 xsec_pdf0 xsec_pdf1 ...."""
3043 scale_pdf_info={}
3044 scales=[]
3045 pdfs=[]
3046 numofpdf = 0
3047 numofscales = 0
3048 for evt_file in evt_files:
3049 path, evt=os.path.split(evt_file)
3050 data_file=open(pjoin(self.me_dir, 'SubProcesses', path, 'scale_pdf_dependence.dat')).read()
3051 lines = data_file.replace("D", "E").split("\n")
3052 if not numofscales:
3053 numofscales = int(lines[0])
3054 if not numofpdf:
3055 numofpdf = int(lines[2])
3056 scales_this = [float(val) for val in lines[1].split()]
3057 pdfs_this = [float(val) for val in lines[3].split()]
3058
3059 if numofscales != len(scales_this) or numofpdf !=len(pdfs_this):
3060
3061 logger.info(data_file)
3062 logger.info((' Expected # of scales: %d\n'+
3063 ' Found # of scales: %d\n'+
3064 ' Expected # of pdfs: %d\n'+
3065 ' Found # of pdfs: %d\n') %
3066 (numofscales, len(scales_this), numofpdf, len(pdfs_this)))
3067 raise aMCatNLOError('inconsistent scale_pdf_dependence.dat')
3068 if not scales:
3069 scales = [0.] * numofscales
3070 if not pdfs:
3071 pdfs = [0.] * numofpdf
3072
3073 scales = [a + b for a, b in zip(scales, scales_this)]
3074 pdfs = [a + b for a, b in zip(pdfs, pdfs_this)]
3075
3076
3077 if numofscales>0 and numofpdf==0:
3078 cntrl_val=scales[0]
3079 elif numofpdf>0 and numofscales==0:
3080 cntrl_val=pdfs[0]
3081 elif numofpdf>0 and numofscales>0:
3082 if abs(1-scales[0]/pdfs[0])>0.0001:
3083 raise aMCatNLOError('Central values for scale and PDF variation not identical')
3084 else:
3085 cntrl_val=scales[0]
3086
3087
3088 scale_upp=0.0
3089 scale_low=0.0
3090 if numofscales>0:
3091 scale_pdf_info['scale_upp'] = (max(scales)/cntrl_val-1)*100
3092 scale_pdf_info['scale_low'] = (1-min(scales)/cntrl_val)*100
3093
3094
3095 lhaid=int(self.run_card['lhaid'])
3096 pdf_upp=0.0
3097 pdf_low=0.0
3098 if lhaid <= 90000:
3099
3100 if numofpdf>1:
3101 for i in range(int(numofpdf/2)):
3102 pdf_upp=pdf_upp+math.pow(max(0.0,pdfs[2*i+1]-cntrl_val,pdfs[2*i+2]-cntrl_val),2)
3103 pdf_low=pdf_low+math.pow(max(0.0,cntrl_val-pdfs[2*i+1],cntrl_val-pdfs[2*i+2]),2)
3104 scale_pdf_info['pdf_upp'] = math.sqrt(pdf_upp)/cntrl_val*100
3105 scale_pdf_info['pdf_low'] = math.sqrt(pdf_low)/cntrl_val*100
3106 else:
3107
3108 pdf_stdev=0.0
3109 for i in range(int(numofpdf-1)):
3110 pdf_stdev = pdf_stdev + pow(pdfs[i+1] - cntrl_val,2)
3111 pdf_stdev = math.sqrt(pdf_stdev/int(numofpdf-2))
3112 scale_pdf_info['pdf_upp'] = pdf_stdev/cntrl_val*100
3113 scale_pdf_info['pdf_low'] = scale_pdf_info['pdf_upp']
3114 return scale_pdf_info
3115
3116
3118 """this function waits for jobs on cluster to complete their run."""
3119
3120 starttime = time.time()
3121
3122 update_status = lambda i, r, f: self.update_status((i, r, f, run_type),
3123 starttime=starttime, level='parton', update_results=True)
3124 try:
3125 self.cluster.wait(self.me_dir, update_status)
3126 except:
3127 self.cluster.remove()
3128 raise
3129
3130 - def run_all(self, job_dict, arg_list, run_type='monitor', split_jobs = False):
3131 """runs the jobs in job_dict (organized as folder: [job_list]), with arguments args"""
3132 njob_split = 0
3133 self.ijob = 0
3134
3135
3136
3137 self.split_folders = {}
3138
3139 if run_type != 'shower':
3140 self.njobs = sum(len(jobs) for jobs in job_dict.values()) * len(arg_list)
3141 for args in arg_list:
3142 for Pdir, jobs in job_dict.items():
3143 for job in jobs:
3144 if not split_jobs:
3145 self.run_exe(job, args, run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3146 else:
3147 for n in self.find_jobs_to_split(Pdir, job, args[1]):
3148 self.run_exe(job, args + [n], run_type, cwd=pjoin(self.me_dir, 'SubProcesses', Pdir) )
3149 njob_split += 1
3150
3151 if self.cluster_mode == 2:
3152 time.sleep(1)
3153 if njob_split > 0:
3154 self.njobs = njob_split
3155 else:
3156 self.njobs = len(arg_list)
3157 for args in arg_list:
3158 [(cwd, exe)] = job_dict.items()
3159 self.run_exe(exe, args, run_type, cwd)
3160
3161 self.wait_for_complete(run_type)
3162
3163
3164
3166 """check the integrity of the event files after splitting, and resubmit
3167 those which are not nicely terminated"""
3168 to_resubmit = []
3169 for dir in self.split_folders.keys():
3170 last_line = ''
3171 try:
3172 last_line = subprocess.Popen(
3173 ['tail', '-n1', pjoin(dir, 'events.lhe')], \
3174 stdout = subprocess.PIPE).stdout.read().strip()
3175 except IOError:
3176 pass
3177
3178 if last_line != "</LesHouchesEvents>":
3179 to_resubmit.append(dir)
3180
3181 self.njobs = 0
3182 if to_resubmit:
3183 run_type = 'Resubmitting broken jobs'
3184 logger.info('Some event files are broken, corresponding jobs will be resubmitted.')
3185 logger.debug('Resubmitting\n' + '\n'.join(to_resubmit) + '\n')
3186 for dir in to_resubmit:
3187 files.rm([dir])
3188 job = self.split_folders[dir][0]
3189 args = self.split_folders[dir][1:]
3190 run_type = 'monitor'
3191 cwd = os.path.split(dir)[0]
3192 self.run_exe(job, args, run_type, cwd=cwd )
3193 self.njobs +=1
3194
3195 self.wait_for_complete(run_type)
3196
3197
3199 """looks into the nevents_unweighed_splitted file to check how many
3200 split jobs are needed for this (pdir, job). arg is F, B or V"""
3201
3202 splittings = []
3203 ajob = open(pjoin(self.me_dir, 'SubProcesses', pdir, job)).read()
3204 pattern = re.compile('for i in (\d+) ; do')
3205 match = re.search(pattern, ajob)
3206 channel = match.groups()[0]
3207
3208
3209 nevents_file = open(pjoin(self.me_dir, 'SubProcesses', 'nevents_unweighted_splitted')).read()
3210
3211
3212 pattern = re.compile(r"%s_(\d+)/events.lhe" % \
3213 pjoin(pdir, 'G%s%s' % (arg,channel)))
3214 matches = re.findall(pattern, nevents_file)
3215 for m in matches:
3216 splittings.append(m)
3217 return splittings
3218
3219
3220 - def run_exe(self, exe, args, run_type, cwd=None):
3221 """this basic function launch locally/on cluster exe with args as argument.
3222 """
3223
3224
3225 execpath = None
3226 if cwd and os.path.exists(pjoin(cwd, exe)):
3227 execpath = pjoin(cwd, exe)
3228 elif not cwd and os.path.exists(exe):
3229 execpath = exe
3230 else:
3231 raise aMCatNLOError('Cannot find executable %s in %s' \
3232 % (exe, os.getcwd()))
3233
3234 if self.cluster_mode == 1 and not os.access(execpath, os.X_OK):
3235 subprocess.call(['chmod', '+x', exe], cwd=cwd)
3236
3237 if self.cluster_mode == 0:
3238
3239 misc.call(['./'+exe] + args, cwd=cwd)
3240 self.ijob += 1
3241 self.update_status((max([self.njobs - self.ijob - 1, 0]),
3242 min([1, self.njobs - self.ijob]),
3243 self.ijob, run_type), level='parton')
3244
3245
3246 elif 'reweight' in exe:
3247
3248
3249 input_files, output_files = [], []
3250 pdfinput = self.get_pdf_input_filename()
3251 if os.path.exists(pdfinput):
3252 input_files.append(pdfinput)
3253 input_files.append(pjoin(os.path.dirname(exe), os.path.pardir, 'reweight_xsec_events'))
3254 input_files.append(pjoin(cwd, os.path.pardir, 'leshouche_info.dat'))
3255 input_files.append(args[0])
3256 output_files.append('%s.rwgt' % os.path.basename(args[0]))
3257 output_files.append('reweight_xsec_events.output')
3258 output_files.append('scale_pdf_dependence.dat')
3259
3260 return self.cluster.submit2(exe, args, cwd=cwd,
3261 input_files=input_files, output_files=output_files,
3262 required_output=output_files)
3263
3264 elif 'ajob' in exe:
3265
3266
3267 if type(args[0]) == str:
3268 input_files, output_files, required_output, args = self.getIO_ajob(exe,cwd, args)
3269
3270 self.cluster.submit2(exe, args, cwd=cwd,
3271 input_files=input_files, output_files=output_files,
3272 required_output=required_output)
3273
3274
3275 subfolder=output_files[-1].split('/')[0]
3276 if len(args) == 4 and '_' in subfolder:
3277 self.split_folders[pjoin(cwd,subfolder)] = [exe] + args
3278
3279 elif 'shower' in exe:
3280
3281
3282
3283 input_files, output_files = [], []
3284 shower = args[0]
3285
3286 if shower == 'PYTHIA8':
3287 input_files.append(pjoin(cwd, 'Pythia8.exe'))
3288 input_files.append(pjoin(cwd, 'Pythia8.cmd'))
3289 input_files.append(pjoin(cwd, 'config.sh'))
3290 if os.path.exists(pjoin(self.options['pythia8_path'], 'xmldoc')):
3291 input_files.append(pjoin(self.options['pythia8_path'], 'xmldoc'))
3292 else:
3293 input_files.append(pjoin(self.options['pythia8_path'], 'share/Pythia8/xmldoc'))
3294 else:
3295 input_files.append(pjoin(cwd, 'MCATNLO_%s_EXE' % shower))
3296 input_files.append(pjoin(cwd, 'MCATNLO_%s_input' % shower))
3297 if shower == 'HERWIGPP':
3298 input_files.append(pjoin(cwd, 'Herwig++'))
3299 input_files.append(pjoin(cwd, 'HepMCFortran.so'))
3300 if len(args) == 3:
3301 if os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz')):
3302 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe.gz'))
3303 elif os.path.exists(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe')):
3304 input_files.append(pjoin(self.me_dir, 'Events', self.run_name, 'events.lhe'))
3305 else:
3306 raise aMCatNLOError, 'Event file not present in %s' % \
3307 pjoin(self.me_dir, 'Events', self.run_name)
3308 else:
3309 input_files.append(pjoin(cwd, 'events_%s.lhe' % args[3]))
3310
3311 if len(args) == 3:
3312 output_files.append('mcatnlo_run.log')
3313 else:
3314 output_files.append('mcatnlo_run_%s.log' % args[3])
3315 if args[1] == 'HEP':
3316 if len(args) == 3:
3317 fname = 'events'
3318 else:
3319 fname = 'events_%s' % args[3]
3320 if shower in ['PYTHIA8', 'HERWIGPP']:
3321 output_files.append(fname + '.hepmc.gz')
3322 else:
3323 output_files.append(fname + '.hep.gz')
3324 elif args[1] == 'TOP':
3325 if len(args) == 3:
3326 fname = 'topfile'
3327 else:
3328 fname = 'topfile_%s' % args[3]
3329 output_files.append(fname + '.tar')
3330 else:
3331 raise aMCatNLOError, 'Not a valid output argument for shower job : %d' % args[1]
3332
3333 self.cluster.submit2(exe, args, cwd=cwd,
3334 input_files=input_files, output_files=output_files)
3335
3336 else:
3337 return self.cluster.submit(exe, args, cwd=cwd)
3338
3340
3341
3342
3343 keep_fourth_arg = False
3344 output_files = []
3345 required_output = []
3346 input_files = [pjoin(self.me_dir, 'MGMEVersion.txt'),
3347 pjoin(self.me_dir, 'SubProcesses', 'randinit'),
3348 pjoin(cwd, 'symfact.dat'),
3349 pjoin(cwd, 'iproc.dat'),
3350 pjoin(cwd, 'initial_states_map.dat'),
3351 pjoin(cwd, 'configs_and_props_info.dat'),
3352 pjoin(cwd, 'leshouche_info.dat'),
3353 pjoin(cwd, 'param_card.dat'),
3354 pjoin(cwd, 'FKS_params.dat')]
3355
3356 if os.path.exists(pjoin(cwd,'nevents.tar')):
3357 input_files.append(pjoin(cwd,'nevents.tar'))
3358
3359 if os.path.exists(pjoin(self.me_dir,'SubProcesses','OLE_order.olc')):
3360 input_files.append(pjoin(cwd, 'OLE_order.olc'))
3361
3362
3363 if os.path.exists(pjoin(cwd,'MadLoop5_resources')):
3364 input_files.append(pjoin(cwd, 'MadLoop5_resources.tar.gz'))
3365 if not os.path.exists(pjoin(cwd,'MadLoop5_resources.tar.gz')):
3366 tf=tarfile.open(pjoin(cwd,'MadLoop5_resources.tar.gz'),'w:gz',
3367 dereference=True)
3368 tf.add(pjoin(cwd,'MadLoop5_resources'),arcname='MadLoop5_resources')
3369 tf.close()
3370
3371 Ire = re.compile("for i in ([\d\s]*) ; do")
3372 try :
3373 fsock = open(exe)
3374 except IOError:
3375 fsock = open(pjoin(cwd,exe))
3376 text = fsock.read()
3377 data = Ire.findall(text)
3378 subdir = ' '.join(data).split()
3379
3380 if args[0] == '0':
3381
3382 input_files.append(pjoin(cwd, 'madevent_mintFO'))
3383 input_files.append(pjoin(self.me_dir, 'SubProcesses','madin.%s' % args[1]))
3384
3385 for i in subdir:
3386 current = '%s_G%s' % (args[1],i)
3387 if os.path.exists(pjoin(cwd,current)):
3388 input_files.append(pjoin(cwd, current))
3389 output_files.append(current)
3390
3391 required_output.append('%s/results.dat' % current)
3392 required_output.append('%s/log.txt' % current)
3393 required_output.append('%s/mint_grids' % current)
3394 required_output.append('%s/grid.MC_integer' % current)
3395 if len(args) == 4:
3396 required_output.append('%s/scale_pdf_dependence.dat' % current)
3397 args[2] = '-1'
3398
3399 base = '%s_G%s' % (args[3],i)
3400 if args[0] == '0':
3401 to_move = ['grid.MC_integer','mint_grids']
3402 elif args[0] == '1':
3403 to_move = ['mint_grids', 'grid.MC_integer']
3404 else:
3405 to_move = []
3406 if self.run_card['iappl'] =='2':
3407 for grid in glob.glob(pjoin(cwd,base,'grid_obs_*_in.root')):
3408 to_move.append(grid)
3409 if not os.path.exists(pjoin(cwd,current)):
3410 os.mkdir(pjoin(cwd,current))
3411 input_files.append(pjoin(cwd, current))
3412 for name in to_move:
3413 files.cp(pjoin(cwd,base, name),
3414 pjoin(cwd,current))
3415 files.cp(pjoin(cwd,base, 'grid.MC_integer'),
3416 pjoin(cwd,current))
3417
3418 elif args[0] == '2':
3419
3420 input_files.append(pjoin(cwd, 'madevent_mintMC'))
3421 if args[2] in ['0','2']:
3422 input_files.append(pjoin(self.me_dir, 'SubProcesses','madinMMC_%s.2' % args[1]))
3423
3424 for i in subdir:
3425 current = 'G%s%s' % (args[1], i)
3426 if os.path.exists(pjoin(cwd,current)):
3427 input_files.append(pjoin(cwd, current))
3428 output_files.append(current)
3429 if len(args) == 4 and args[3] in ['H','S','V','B','F']:
3430
3431 base = '%s_%s' % (args[3],i)
3432 files.ln(pjoin(cwd,base,'mint_grids'), name = 'preset_mint_grids',
3433 starting_dir=pjoin(cwd,current))
3434 files.ln(pjoin(cwd,base,'grid.MC_integer'),
3435 starting_dir=pjoin(cwd,current))
3436 elif len(args) ==4:
3437 keep_fourth_arg = True
3438
3439 output_files.append('G%s%s_%s' % (args[1], i, args[3]))
3440 required_output.append('%s/log_MINT%s.txt' % (current,args[2]))
3441 if args[2] in ['0','1']:
3442 required_output.append('%s/results.dat' % current)
3443 if args[2] == '1':
3444 output_files.append('%s/results.dat' % current)
3445
3446 else:
3447 raise aMCatNLOError, 'not valid arguments: %s' %(', '.join(args))
3448
3449
3450 pdfinput = self.get_pdf_input_filename()
3451 if os.path.exists(pdfinput):
3452 input_files.append(pdfinput)
3453
3454 if len(args) == 4 and not keep_fourth_arg:
3455 args = args[:3]
3456
3457 return input_files, output_files, required_output, args
3458
3460 """writes the madinMMC_?.2 file"""
3461
3462 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB']
3463 if run_mode not in run_modes:
3464 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
3465 % (run_mode, ', '.join(run_modes)))
3466 mint_modes = [0, 1, 2]
3467 if mint_mode not in mint_modes:
3468 raise aMCatNLOError('%s is not a valid mode for mintMC. Please use one of the following: %s' \
3469 % (mint_mode, ', '.join(mint_modes)))
3470 if run_mode in ['born']:
3471 name_suffix = 'B'
3472 elif run_mode in ['virt', 'viSB']:
3473 name_suffix = 'V'
3474 else:
3475 name_suffix = 'F'
3476
3477 content = \
3478 """-1 12 ! points, iterations
3479 0.03 ! desired fractional accuracy
3480 1 -0.1 ! alpha, beta for Gsoft
3481 -1 -0.1 ! alpha, beta for Gazi
3482 1 ! Suppress amplitude (0 no, 1 yes)?
3483 1 ! Exact helicity sum (0 yes, n = number/event)?
3484 1 ! Enter Configuration Number:
3485 %1d ! MINT imode: 0 to set-up grids, 1 to perform integral, 2 generate events
3486 1 1 1 ! if imode is 1: Folding parameters for xi_i, phi_i and y_ij
3487 %s ! all, born, real, virt
3488 """ \
3489 % (mint_mode, run_mode)
3490 file = open(pjoin(path, 'madinMMC_%s.2' % name_suffix), 'w')
3491 file.write(content)
3492 file.close()
3493
3494 - def write_madin_file(self, path, run_mode, vegas_mode, npoints, niters, accuracy='0'):
3495 """writes the madin.run_mode file"""
3496
3497 run_modes = ['born', 'virt', 'novi', 'all', 'viSB', 'novB', 'grid']
3498 if run_mode not in run_modes:
3499 raise aMCatNLOError('%s is not a valid mode for run. Please use one of the following: %s' \
3500 % (run_mode, ', '.join(run_modes)))
3501 name_suffix = run_mode
3502
3503 content = \
3504 """%s %s ! points, iterations
3505 %s ! accuracy
3506 2 ! 0 fixed grid 2 adjust
3507 1 ! 1 suppress amp, 0 doesnt
3508 1 ! 0 for exact hel sum
3509 1 ! hel configuration numb
3510 'test'
3511 1 ! 1 to save grids
3512 %s ! 0 to exclude, 1 for new run, 2 to restart, 3 to reset w/ keeping grid
3513 %s ! all, born, real, virt
3514 """ \
3515 % (npoints,niters,accuracy,vegas_mode,run_mode)
3516 file = open(pjoin(path, 'madin.%s' % name_suffix), 'w')
3517 file.write(content)
3518 file.close()
3519
3520 - def compile(self, mode, options):
3521 """compiles aMC@NLO to compute either NLO or NLO matched to shower, as
3522 specified in mode"""
3523
3524 os.mkdir(pjoin(self.me_dir, 'Events', self.run_name))
3525
3526 self.banner.write(pjoin(self.me_dir, 'Events', self.run_name,
3527 '%s_%s_banner.txt' % (self.run_name, self.run_tag)))
3528
3529 self.get_characteristics(pjoin(self.me_dir,
3530 'SubProcesses', 'proc_characteristics'))
3531
3532
3533 amcatnlo_log = pjoin(self.me_dir, 'compile_amcatnlo.log')
3534 madloop_log = pjoin(self.me_dir, 'compile_madloop.log')
3535 reweight_log = pjoin(self.me_dir, 'compile_reweight.log')
3536 test_log = pjoin(self.me_dir, 'test.log')
3537
3538 self.update_status('Compiling the code', level=None, update_results=True)
3539
3540
3541 libdir = pjoin(self.me_dir, 'lib')
3542 sourcedir = pjoin(self.me_dir, 'Source')
3543
3544
3545 files.rm([amcatnlo_log, madloop_log, reweight_log, test_log])
3546
3547 if '+' in mode:
3548 mode = mode.split('+')[0]
3549 if mode in ['NLO', 'LO']:
3550 exe = 'madevent_mintFO'
3551 tests = ['test_ME']
3552 self.analyse_card.write_card(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'))
3553 elif mode in ['aMC@NLO', 'aMC@LO','noshower','noshowerLO']:
3554 exe = 'madevent_mintMC'
3555 tests = ['test_ME', 'test_MC']
3556
3557 open(pjoin(self.me_dir, 'SubProcesses', 'analyse_opts'),'w').write('FO_ANALYSE=analysis_dummy.o dbook.o open_output_files_dummy.o\n')
3558
3559
3560 p_dirs = [d for d in \
3561 open(pjoin(self.me_dir, 'SubProcesses', 'subproc.mg')).read().split('\n') if d]
3562
3563 self.do_treatcards('', amcatnlo=True)
3564
3565
3566 if all([os.path.exists(pjoin(self.me_dir, 'SubProcesses', p_dir, exe)) \
3567 for p_dir in p_dirs]) and options['nocompile']:
3568 return
3569
3570
3571 if os.path.exists(pjoin(libdir, 'PDFsets')):
3572 files.rm(pjoin(libdir, 'PDFsets'))
3573
3574
3575 if self.run_card['pdlabel'] == 'lhapdf' and \
3576 (self.banner.get_detail('run_card', 'lpp1') != '0' or \
3577 self.banner.get_detail('run_card', 'lpp1') != '0'):
3578
3579 self.link_lhapdf(libdir, [pjoin('SubProcesses', p) for p in p_dirs])
3580 pdfsetsdir = self.get_lhapdf_pdfsetsdir()
3581 lhaid_list = [int(self.run_card['lhaid'])]
3582 if self.run_card['reweight_PDF'].lower() == '.true.':
3583 lhaid_list.append(int(self.run_card['PDF_set_min']))
3584 lhaid_list.append(int(self.run_card['PDF_set_max']))
3585 self.copy_lhapdf_set(lhaid_list, pdfsetsdir)
3586
3587 else:
3588 if self.run_card['lpp1'] == '1' == self.run_card['lpp2']:
3589 logger.info('Using built-in libraries for PDFs')
3590 if self.run_card['lpp1'] == '0' == self.run_card['lpp2']:
3591 logger.info('Lepton-Lepton collision: Ignoring \'pdlabel\' and \'lhaid\' in the run_card.')
3592 try:
3593 del os.environ['lhapdf']
3594 except KeyError:
3595 pass
3596
3597
3598 if self.run_card['iappl'] != '0':
3599 os.environ['applgrid'] = 'True'
3600
3601 for code in ['applgrid','amcfast']:
3602 try:
3603 p = subprocess.Popen([self.options[code], '--version'], \
3604 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
3605 output, error = p.communicate()
3606 if code is 'applgrid' and output < '1.4.63':
3607 raise aMCatNLOError('Version of APPLgrid is too old. Use 1.4.69 or later.'\
3608 +' You are using %s',output)
3609 if code is 'amcfast' and output < '1.1.1':
3610 raise aMCatNLOError('Version of aMCfast is too old. Use 1.1.1 or later.'\
3611 +' You are using %s',output)
3612 except Exception:
3613 raise aMCatNLOError(('No valid %s installation found. \n' + \
3614 'Please set the path to %s-config by using \n' + \
3615 'MG5_aMC> set <absolute-path-to-%s>/bin/%s-config \n') % (code,code,code,code))
3616
3617 appllibs=" APPLLIBS=$(shell %s --ldcflags) $(shell %s --ldflags) \n" \
3618 % (self.options['applgrid'],self.options['amcfast'])
3619 text=open(pjoin(self.me_dir,'Source','make_opts'),'r').readlines()
3620 text_out=[]
3621 for line in text:
3622 if line.strip().startswith('APPLLIBS=$'):
3623 line=appllibs
3624 text_out.append(line)
3625 open(pjoin(self.me_dir,'Source','make_opts'),'w').writelines(text_out)
3626 else:
3627 try:
3628 del os.environ['applgrid']
3629 except KeyError:
3630 pass
3631
3632 try:
3633 os.environ['fastjet_config'] = self.options['fastjet']
3634 except (TypeError, KeyError):
3635 if 'fastjet_config' in os.environ:
3636 del os.environ['fastjet_config']
3637 os.unsetenv('fastjet_config')
3638
3639
3640 self.update_status('Compiling source...', level=None)
3641 misc.compile(['clean4pdf'], cwd = sourcedir)
3642 misc.compile(cwd = sourcedir)
3643 if os.path.exists(pjoin(libdir, 'libdhelas.a')) \
3644 and os.path.exists(pjoin(libdir, 'libgeneric.a')) \
3645 and os.path.exists(pjoin(libdir, 'libmodel.a')) \
3646 and os.path.exists(pjoin(libdir, 'libpdf.a')):
3647 logger.info(' ...done, continuing with P* directories')
3648 else:
3649 raise aMCatNLOError('Compilation failed')
3650
3651
3652 MCatNLO_libdir = pjoin(self.me_dir, 'MCatNLO', 'lib')
3653 if not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libstdhep.a'))) or \
3654 not os.path.exists(os.path.realpath(pjoin(MCatNLO_libdir, 'libFmcfio.a'))):
3655 if os.path.exists(pjoin(sourcedir,'StdHEP')):
3656 logger.info('Compiling StdHEP (can take a couple of minutes) ...')
3657 misc.compile(['StdHEP'], cwd = sourcedir)
3658 logger.info(' ...done.')
3659 else:
3660 raise aMCatNLOError('Could not compile StdHEP because its'+\
3661 ' source directory could not be found in the SOURCE folder.\n'+\
3662 " Check the MG5_aMC option 'output_dependencies.'")
3663
3664
3665 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
3666 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
3667 if os.path.exists(pjoin(sourcedir,'CutTools')):
3668 logger.info('Compiling CutTools (can take a couple of minutes) ...')
3669 misc.compile(['CutTools'], cwd = sourcedir)
3670 logger.info(' ...done.')
3671 else:
3672 raise aMCatNLOError('Could not compile CutTools because its'+\
3673 ' source directory could not be found in the SOURCE folder.\n'+\
3674 " Check the MG5_aMC option 'output_dependencies.'")
3675 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libcts.a'))) or \
3676 not os.path.exists(os.path.realpath(pjoin(libdir, 'mpmodule.mod'))):
3677 raise aMCatNLOError('CutTools compilation failed.')
3678
3679
3680
3681 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
3682 libdir, 'libcts.a')))),'compiler_version.log')
3683 if os.path.exists(compiler_log_path):
3684 compiler_version_used = open(compiler_log_path,'r').read()
3685 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
3686 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
3687 if os.path.exists(pjoin(sourcedir,'CutTools')):
3688 logger.info('CutTools was compiled with a different fortran'+\
3689 ' compiler. Re-compiling it now...')
3690 misc.compile(['cleanCT'], cwd = sourcedir)
3691 misc.compile(['CutTools'], cwd = sourcedir)
3692 logger.info(' ...done.')
3693 else:
3694 raise aMCatNLOError("CutTools installation in %s"\
3695 %os.path.realpath(pjoin(libdir, 'libcts.a'))+\
3696 " seems to have been compiled with a different compiler than"+\
3697 " the one specified in MG5_aMC. Please recompile CutTools.")
3698
3699
3700 if not os.path.exists(os.path.realpath(pjoin(libdir, 'libiregi.a'))) \
3701 and os.path.exists(pjoin(sourcedir,'IREGI')):
3702 logger.info('Compiling IREGI (can take a couple of minutes) ...')
3703 misc.compile(['IREGI'], cwd = sourcedir)
3704 logger.info(' ...done.')
3705
3706 if os.path.exists(pjoin(libdir, 'libiregi.a')):
3707
3708
3709 compiler_log_path = pjoin(os.path.dirname((os.path.realpath(pjoin(
3710 libdir, 'libiregi.a')))),'compiler_version.log')
3711 if os.path.exists(compiler_log_path):
3712 compiler_version_used = open(compiler_log_path,'r').read()
3713 if not str(misc.get_gfortran_version(misc.detect_current_compiler(\
3714 pjoin(sourcedir,'make_opts')))) in compiler_version_used:
3715 if os.path.exists(pjoin(sourcedir,'IREGI')):
3716 logger.info('IREGI was compiled with a different fortran'+\
3717 ' compiler. Re-compiling it now...')
3718 misc.compile(['cleanIR'], cwd = sourcedir)
3719 misc.compile(['IREGI'], cwd = sourcedir)
3720 logger.info(' ...done.')
3721 else:
3722 raise aMCatNLOError("IREGI installation in %s"\
3723 %os.path.realpath(pjoin(libdir, 'libiregi.a'))+\
3724 " seems to have been compiled with a different compiler than"+\
3725 " the one specified in MG5_aMC. Please recompile IREGI.")
3726
3727
3728 if self.proc_characteristics['has_loops'].lower() == 'true' and \
3729 not os.path.exists(pjoin(self.me_dir,'OLP_virtuals')):
3730 os.environ['madloop'] = 'true'
3731 if mode in ['NLO', 'aMC@NLO', 'noshower']:
3732 tests.append('check_poles')
3733 else:
3734 os.unsetenv('madloop')
3735
3736
3737 self.update_status('Compiling directories...', level=None)
3738
3739 for test in tests:
3740 self.write_test_input(test)
3741
3742 try:
3743 import multiprocessing
3744 if not self.nb_core:
3745 try:
3746 self.nb_core = int(self.options['nb_core'])
3747 except TypeError:
3748 self.nb_core = multiprocessing.cpu_count()
3749 except ImportError:
3750 self.nb_core = 1
3751
3752 compile_options = copy.copy(self.options)
3753 compile_options['nb_core'] = self.nb_core
3754 compile_cluster = cluster.MultiCore(**compile_options)
3755 logger.info('Compiling on %d cores' % self.nb_core)
3756
3757 update_status = lambda i, r, f: self.donothing(i,r,f)
3758 for p_dir in p_dirs:
3759 compile_cluster.submit(prog = compile_dir,
3760 argument = [self.me_dir, p_dir, mode, options,
3761 tests, exe, self.options['run_mode']])
3762 try:
3763 compile_cluster.wait(self.me_dir, update_status)
3764
3765 except:
3766 compile_cluster.remove()
3767 self.quit()
3768
3769 logger.info('Checking test output:')
3770 for p_dir in p_dirs:
3771 logger.info(p_dir)
3772 for test in tests:
3773 logger.info(' Result for %s:' % test)
3774
3775 this_dir = pjoin(self.me_dir, 'SubProcesses', p_dir)
3776
3777 self.check_tests(test, this_dir)
3778
3779
3782
3783
3785 """just call the correct parser for the test log"""
3786 if test in ['test_ME', 'test_MC']:
3787 return self.parse_test_mx_log(pjoin(dir, '%s.log' % test))
3788 elif test == 'check_poles':
3789 return self.parse_check_poles_log(pjoin(dir, '%s.log' % test))
3790
3791
3793 """read and parse the test_ME/MC.log file"""
3794 content = open(log).read()
3795 if 'FAILED' in content:
3796 logger.info('Output of the failing test:\n'+content[:-1],'$MG:color:BLACK')
3797 raise aMCatNLOError('Some tests failed, run cannot continue.\n' + \
3798 'Please check that widths of final state particles (e.g. top) have been' + \
3799 ' set to 0 in the param_card.dat.')
3800 else:
3801 lines = [l for l in content.split('\n') if 'PASSED' in l]
3802 logger.info(' Passed.')
3803 logger.debug('\n'+'\n'.join(lines))
3804
3805
3807 """reads and parse the check_poles.log file"""
3808 content = open(log).read()
3809 npass = 0
3810 nfail = 0
3811 for line in content.split('\n'):
3812 if 'PASSED' in line:
3813 npass +=1
3814 tolerance = float(line.split()[1])
3815 if 'FAILED' in line:
3816 nfail +=1
3817 tolerance = float(line.split()[1])
3818
3819 if nfail + npass == 0:
3820 logger.warning('0 points have been tried')
3821 return
3822
3823 if float(nfail)/float(nfail+npass) > 0.1:
3824 raise aMCatNLOError('Poles do not cancel, run cannot continue')
3825 else:
3826 logger.info(' Poles successfully cancel for %d points over %d (tolerance=%2.1e)' \
3827 %(npass, nfail+npass, tolerance))
3828
3829
3850
3851
3852
3853
3855 """ return the model name """
3856 if hasattr(self, 'model_name'):
3857 return self.model_name
3858
3859 model = 'sm'
3860 proc = []
3861 for line in open(os.path.join(self.me_dir,'Cards','proc_card_mg5.dat')):
3862 line = line.split('#')[0]
3863
3864 if line.startswith('import') and 'model' in line:
3865 model = line.split()[2]
3866 proc = []
3867 elif line.startswith('generate'):
3868 proc.append(line.split(None,1)[1])
3869 elif line.startswith('add process'):
3870 proc.append(line.split(None,2)[2])
3871
3872 self.model = model
3873 self.process = proc
3874 return model
3875
3876
3877
3878
3880 """Ask the question when launching generate_events/multi_run"""
3881
3882 if 'parton' not in options:
3883 options['parton'] = False
3884 if 'reweightonly' not in options:
3885 options['reweightonly'] = False
3886
3887
3888 void = 'NOT INSTALLED'
3889 switch_order = ['order', 'fixed_order', 'shower','madspin']
3890 switch_default = {'order': 'NLO', 'fixed_order': 'OFF', 'shower': void,
3891 'madspin': void}
3892 if not switch:
3893 switch = switch_default
3894 else:
3895 switch.update(dict((k,value) for k,v in switch_default.items() if k not in switch))
3896
3897 default_switch = ['ON', 'OFF']
3898 allowed_switch_value = {'order': ['LO', 'NLO'],
3899 'fixed_order': default_switch,
3900 'shower': default_switch,
3901 'madspin': default_switch}
3902
3903 description = {'order': 'Perturbative order of the calculation:',
3904 'fixed_order': 'Fixed order (no event generation and no MC@[N]LO matching):',
3905 'shower': 'Shower the generated events:',
3906 'madspin': 'Decay particles with the MadSpin module:' }
3907
3908 force_switch = {('shower', 'ON'): {'fixed_order': 'OFF'},
3909 ('madspin', 'ON'): {'fixed_order':'OFF'},
3910 ('fixed_order', 'ON'): {'shower': 'OFF', 'madspin': 'OFF'}
3911 }
3912 special_values = ['LO', 'NLO', 'aMC@NLO', 'aMC@LO', 'noshower', 'noshowerLO']
3913
3914 assign_switch = lambda key, value: switch.__setitem__(key, value if switch[key] != void else void )
3915
3916
3917 if mode == 'auto':
3918 mode = None
3919 if not mode and (options['parton'] or options['reweightonly']):
3920 mode = 'noshower'
3921
3922
3923 available_mode = ['0', '1', '2']
3924 available_mode.append('3')
3925 if os.path.exists(pjoin(self.me_dir, 'Cards', 'shower_card.dat')):
3926 switch['shower'] = 'ON'
3927 else:
3928 switch['shower'] = 'OFF'
3929
3930 if not aMCatNLO or self.options['mg5_path']:
3931 available_mode.append('4')
3932 if os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
3933 switch['madspin'] = 'ON'
3934 else:
3935 switch['madspin'] = 'OFF'
3936
3937 answers = list(available_mode) + ['auto', 'done']
3938 alias = {}
3939 for id, key in enumerate(switch_order):
3940 if switch[key] != void:
3941 answers += ['%s=%s' % (key, s) for s in allowed_switch_value[key]]
3942
3943 alias.update(dict(('%s=%s' % (key, s.lower()), '%s=%s' % (key, s))
3944 for s in allowed_switch_value[key]))
3945 answers += special_values
3946
3947 def create_question(switch):
3948 switch_format = " %i %-60s %12s=%s\n"
3949 question = "The following switches determine which operations are executed:\n"
3950 for id, key in enumerate(switch_order):
3951 question += switch_format % (id+1, description[key], key, switch[key])
3952 question += ' Either type the switch number (1 to %s) to change its default setting,\n' % (id+1)
3953 question += ' or set any switch explicitly (e.g. type \'order=LO\' at the prompt)\n'
3954 question += ' Type \'0\', \'auto\', \'done\' or just press enter when you are done.\n'
3955 return question
3956
3957
3958 def modify_switch(mode, answer, switch):
3959 if '=' in answer:
3960 key, status = answer.split('=')
3961 switch[key] = status
3962 if (key, status) in force_switch:
3963 for key2, status2 in force_switch[(key, status)].items():
3964 if switch[key2] not in [status2, void]:
3965 logger.info('For coherence \'%s\' is set to \'%s\''
3966 % (key2, status2), '$MG:color:BLACK')
3967 switch[key2] = status2
3968 elif answer in ['0', 'auto', 'done']:
3969 return
3970 elif answer in special_values:
3971 logger.info('Enter mode value: Go to the related mode', '$MG:color:BLACK')
3972 if answer == 'LO':
3973 switch['order'] = 'LO'
3974 switch['fixed_order'] = 'ON'
3975 assign_switch('shower', 'OFF')
3976 assign_switch('madspin', 'OFF')
3977 elif answer == 'NLO':
3978 switch['order'] = 'NLO'
3979 switch['fixed_order'] = 'ON'
3980 assign_switch('shower', 'OFF')
3981 assign_switch('madspin', 'OFF')
3982 elif answer == 'aMC@NLO':
3983 switch['order'] = 'NLO'
3984 switch['fixed_order'] = 'OFF'
3985 assign_switch('shower', 'ON')
3986 assign_switch('madspin', 'OFF')
3987 elif answer == 'aMC@LO':
3988 switch['order'] = 'LO'
3989 switch['fixed_order'] = 'OFF'
3990 assign_switch('shower', 'ON')
3991 assign_switch('madspin', 'OFF')
3992 elif answer == 'noshower':
3993 switch['order'] = 'NLO'
3994 switch['fixed_order'] = 'OFF'
3995 assign_switch('shower', 'OFF')
3996 assign_switch('madspin', 'OFF')
3997 elif answer == 'noshowerLO':
3998 switch['order'] = 'LO'
3999 switch['fixed_order'] = 'OFF'
4000 assign_switch('shower', 'OFF')
4001 assign_switch('madspin', 'OFF')
4002 if mode:
4003 return
4004 return switch
4005
4006
4007 modify_switch(mode, self.last_mode, switch)
4008 if switch['madspin'] == 'OFF' and os.path.exists(pjoin(self.me_dir,'Cards','madspin_card.dat')):
4009 assign_switch('madspin', 'ON')
4010
4011 if not self.force:
4012 answer = ''
4013 while answer not in ['0', 'done', 'auto', 'onlyshower']:
4014 question = create_question(switch)
4015 if mode:
4016 answer = mode
4017 else:
4018 answer = self.ask(question, '0', answers, alias=alias)
4019 if answer.isdigit() and answer != '0':
4020 key = switch_order[int(answer) - 1]
4021 opt1 = allowed_switch_value[key][0]
4022 opt2 = allowed_switch_value[key][1]
4023 answer = '%s=%s' % (key, opt1 if switch[key] == opt2 else opt2)
4024
4025 if not modify_switch(mode, answer, switch):
4026 break
4027
4028
4029 if not mode or mode == 'auto':
4030 if switch['order'] == 'LO':
4031 if switch['shower'] == 'ON':
4032 mode = 'aMC@LO'
4033 elif switch['fixed_order'] == 'ON':
4034 mode = 'LO'
4035 else:
4036 mode = 'noshowerLO'
4037 elif switch['order'] == 'NLO':
4038 if switch['shower'] == 'ON':
4039 mode = 'aMC@NLO'
4040 elif switch['fixed_order'] == 'ON':
4041 mode = 'NLO'
4042 else:
4043 mode = 'noshower'
4044 logger.info('will run in mode: %s' % mode)
4045
4046 if mode == 'noshower':
4047 logger.warning("""You have chosen not to run a parton shower. NLO events without showering are NOT physical.
4048 Please, shower the Les Houches events before using them for physics analyses.""")
4049
4050
4051
4052 cards = ['param_card.dat', 'run_card.dat']
4053 ignore = []
4054 if mode in ['LO', 'NLO']:
4055 options['parton'] = True
4056 ignore = ['shower_card.dat', 'madspin_card.dat']
4057 cards.append('FO_analyse_card.dat')
4058 elif switch['madspin'] == 'ON':
4059 cards.append('madspin_card.dat')
4060 if 'aMC@' in mode:
4061 cards.append('shower_card.dat')
4062 if mode == 'onlyshower':
4063 cards = ['shower_card.dat']
4064 if options['reweightonly']:
4065 cards = ['run_card.dat']
4066
4067 self.keep_cards(cards, ignore)
4068
4069 if mode =='onlyshower':
4070 cards = ['shower_card.dat']
4071
4072 if not options['force'] and not self.force:
4073 self.ask_edit_cards(cards, plot=False)
4074
4075 self.banner = banner_mod.Banner()
4076
4077
4078 for card in cards:
4079 self.banner.add(pjoin(self.me_dir, 'Cards', card))
4080
4081 run_settings = '\n'.join(['%s = %s' % (k, v) for (k, v) in switch.items()])
4082 self.banner.add_text('run_settings', run_settings)
4083
4084 if not mode =='onlyshower':
4085 self.run_card = self.banner.charge_card('run_card')
4086 self.run_tag = self.run_card['run_tag']
4087
4088 if not hasattr(self, 'run_name') or not self.run_name:
4089 self.run_name = self.find_available_run_name(self.me_dir)
4090
4091 if self.run_name.startswith('run_'):
4092 if mode in ['LO','aMC@LO','noshowerLO']:
4093 self.run_name += '_LO'
4094 self.set_run_name(self.run_name, self.run_tag, 'parton')
4095 if int(self.run_card['ickkw']) == 3 and mode in ['LO', 'aMC@LO', 'noshowerLO']:
4096 logger.error("""FxFx merging (ickkw=3) not allowed at LO""")
4097 raise self.InvalidCmd(error)
4098 elif int(self.run_card['ickkw']) == 3 and mode in ['aMC@NLO', 'noshower']:
4099 logger.warning("""You are running with FxFx merging enabled. To be able to merge
4100 samples of various multiplicities without double counting, you
4101 have to remove some events after showering 'by hand'. Please
4102 read http://amcatnlo.cern.ch/FxFx_merging.htm for more details.""")
4103 if self.run_card['parton_shower'].upper() == 'PYTHIA6Q':
4104 logger.error("""FxFx merging does not work with Q-squared ordered showers.""")
4105 raise self.InvalidCmd(error)
4106 elif self.run_card['parton_shower'].upper() != 'HERWIG6' and self.run_card['parton_shower'].upper() != 'PYTHIA8':
4107 question="FxFx merging not tested for %s shower. Do you want to continue?\n" % self.run_card['parton_shower'] + \
4108 "Type \'n\' to stop or \'y\' to continue"
4109 answers = ['n','y']
4110 answer = self.ask(question, 'n', answers, alias=alias)
4111 if answer == 'n':
4112 error = '''Stop opertation'''
4113 self.ask_run_configuration(mode, options)
4114
4115 if 'aMC@' in mode or mode == 'onlyshower':
4116 self.shower_card = self.banner.charge_card('shower_card')
4117
4118 elif mode in ['LO', 'NLO']:
4119 analyse_card_path = pjoin(self.me_dir, 'Cards','FO_analyse_card.dat')
4120 self.analyse_card = self.banner.charge_card('FO_analyse_card')
4121
4122
4123 return mode
4124
4125
4126
4127
4128
4130 """The command line processor of MadGraph"""
4131
4132 _compile_usage = "compile [MODE] [options]\n" + \
4133 "-- compiles aMC@NLO \n" + \
4134 " MODE can be either FO, for fixed-order computations, \n" + \
4135 " or MC for matching with parton-shower monte-carlos. \n" + \
4136 " (if omitted, it is set to MC)\n"
4137 _compile_parser = misc.OptionParser(usage=_compile_usage)
4138 _compile_parser.add_option("-f", "--force", default=False, action='store_true',
4139 help="Use the card present in the directory for the launch, without editing them")
4140
4141 _launch_usage = "launch [MODE] [options]\n" + \
4142 "-- execute aMC@NLO \n" + \
4143 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4144 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4145 " computation of the total cross-section and the filling of parton-level histograms \n" + \
4146 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4147 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4148 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4149 " in the run_card.dat\n"
4150
4151 _launch_parser = misc.OptionParser(usage=_launch_usage)
4152 _launch_parser.add_option("-f", "--force", default=False, action='store_true',
4153 help="Use the card present in the directory for the launch, without editing them")
4154 _launch_parser.add_option("-c", "--cluster", default=False, action='store_true',
4155 help="Submit the jobs on the cluster")
4156 _launch_parser.add_option("-m", "--multicore", default=False, action='store_true',
4157 help="Submit the jobs on multicore mode")
4158 _launch_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4159 help="Skip compilation. Ignored if no executable is found")
4160 _launch_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4161 help="Skip integration and event generation, just run reweight on the" + \
4162 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4163 _launch_parser.add_option("-p", "--parton", default=False, action='store_true',
4164 help="Stop the run after the parton level file generation (you need " + \
4165 "to shower the file in order to get physical results)")
4166 _launch_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4167 help="Skip grid set up, just generate events starting from " + \
4168 "the last available results")
4169 _launch_parser.add_option("-n", "--name", default=False, dest='run_name',
4170 help="Provide a name to the run")
4171 _launch_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4172 help="For use with APPLgrid only: start from existing grids")
4173
4174
4175 _generate_events_usage = "generate_events [MODE] [options]\n" + \
4176 "-- execute aMC@NLO \n" + \
4177 " MODE can be either LO, NLO, aMC@NLO or aMC@LO (if omitted, it is asked in a separate question)\n" + \
4178 " If mode is set to LO/NLO, no event generation will be performed, but only the \n" + \
4179 " computation of the total cross-section and the filling of parton-level histograms \n" + \
4180 " specified in the DIRPATH/SubProcesses/madfks_plot.f file.\n" + \
4181 " If mode is set to aMC@LO/aMC@NLO, after the cross-section computation, a .lhe \n" + \
4182 " event file is generated which will be showered with the MonteCarlo specified \n" + \
4183 " in the run_card.dat\n"
4184
4185 _generate_events_parser = misc.OptionParser(usage=_generate_events_usage)
4186 _generate_events_parser.add_option("-f", "--force", default=False, action='store_true',
4187 help="Use the card present in the directory for the generate_events, without editing them")
4188 _generate_events_parser.add_option("-c", "--cluster", default=False, action='store_true',
4189 help="Submit the jobs on the cluster")
4190 _generate_events_parser.add_option("-m", "--multicore", default=False, action='store_true',
4191 help="Submit the jobs on multicore mode")
4192 _generate_events_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4193 help="Skip compilation. Ignored if no executable is found")
4194 _generate_events_parser.add_option("-r", "--reweightonly", default=False, action='store_true',
4195 help="Skip integration and event generation, just run reweight on the" + \
4196 " latest generated event files (see list in SubProcesses/nevents_unweighted)")
4197 _generate_events_parser.add_option("-p", "--parton", default=False, action='store_true',
4198 help="Stop the run after the parton level file generation (you need " + \
4199 "to shower the file in order to get physical results)")
4200 _generate_events_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4201 help="Skip grid set up, just generate events starting from " + \
4202 "the last available results")
4203 _generate_events_parser.add_option("-n", "--name", default=False, dest='run_name',
4204 help="Provide a name to the run")
4205
4206
4207
4208 _calculate_xsect_usage = "calculate_xsect [ORDER] [options]\n" + \
4209 "-- calculate cross-section up to ORDER.\n" + \
4210 " ORDER can be either LO or NLO (if omitted, it is set to NLO). \n"
4211
4212 _calculate_xsect_parser = misc.OptionParser(usage=_calculate_xsect_usage)
4213 _calculate_xsect_parser.add_option("-f", "--force", default=False, action='store_true',
4214 help="Use the card present in the directory for the launch, without editing them")
4215 _calculate_xsect_parser.add_option("-c", "--cluster", default=False, action='store_true',
4216 help="Submit the jobs on the cluster")
4217 _calculate_xsect_parser.add_option("-m", "--multicore", default=False, action='store_true',
4218 help="Submit the jobs on multicore mode")
4219 _calculate_xsect_parser.add_option("-x", "--nocompile", default=False, action='store_true',
4220 help="Skip compilation. Ignored if no executable is found")
4221 _calculate_xsect_parser.add_option("-n", "--name", default=False, dest='run_name',
4222 help="Provide a name to the run")
4223 _calculate_xsect_parser.add_option("-a", "--appl_start_grid", default=False, dest='appl_start_grid',
4224 help="For use with APPLgrid only: start from existing grids")
4225 _calculate_xsect_parser.add_option("-o", "--only_generation", default=False, action='store_true',
4226 help="Skip grid set up, just generate events starting from " + \
4227 "the last available results")
4228
4229 _shower_usage = 'shower run_name [options]\n' + \
4230 '-- do shower/hadronization on parton-level file generated for run run_name\n' + \
4231 ' all the information (e.g. number of events, MonteCarlo, ...\n' + \
4232 ' are directly read from the header of the event file\n'
4233 _shower_parser = misc.OptionParser(usage=_shower_usage)
4234 _shower_parser.add_option("-f", "--force", default=False, action='store_true',
4235 help="Use the shower_card present in the directory for the launch, without editing")
4236