1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Methods and classes to export matrix elements to fks format."""
16
17 from distutils import dir_util
18 import glob
19 import logging
20 import os
21 import re
22 import shutil
23 import subprocess
24 import string
25 import copy
26 import platform
27
28 import madgraph.core.color_algebra as color
29 import madgraph.core.helas_objects as helas_objects
30 import madgraph.core.base_objects as base_objects
31 import madgraph.fks.fks_helas_objects as fks_helas_objects
32 import madgraph.fks.fks_base as fks
33 import madgraph.fks.fks_common as fks_common
34 import madgraph.iolibs.drawing_eps as draw
35 import madgraph.iolibs.gen_infohtml as gen_infohtml
36 import madgraph.iolibs.files as files
37 import madgraph.various.misc as misc
38 import madgraph.iolibs.file_writers as writers
39 import madgraph.iolibs.template_files as template_files
40 import madgraph.iolibs.ufo_expression_parsers as parsers
41 import madgraph.iolibs.export_v4 as export_v4
42 import madgraph.loop.loop_exporters as loop_exporters
43 import madgraph.various.q_polynomial as q_polynomial
44 import madgraph.various.banner as banner_mod
45
46 import aloha.create_aloha as create_aloha
47
48 import models.write_param_card as write_param_card
49 import models.check_param_card as check_param_card
50 from madgraph import MadGraph5Error, MG5DIR, InvalidCmd
51 from madgraph.iolibs.files import cp, ln, mv
52
53 pjoin = os.path.join
54
55 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
56 logger = logging.getLogger('madgraph.export_fks')
57
58
60 Pdir = args[0]
61 old_pos = args[1]
62 dir_path = args[2]
63
64 devnull = os.open(os.devnull, os.O_RDWR)
65
66 os.chdir(Pdir)
67 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
68 stdout = devnull)
69 os.chdir(os.path.pardir)
70
71
72
73
74
76 """Class to take care of exporting a set of matrix elements to
77 Fortran (v4) format."""
78
79
80
81
83 """create the directory run_name as a copy of the MadEvent
84 Template, and clean the directory
85 For now it is just the same as copy_v4template, but it will be modified
86 """
87
88 mgme_dir = self.mgme_dir
89 dir_path = self.dir_path
90 clean =self.opt['clean']
91
92
93 if not os.path.isdir(dir_path):
94 if not mgme_dir:
95 raise MadGraph5Error, \
96 "No valid MG_ME path given for MG4 run directory creation."
97 logger.info('initialize a new directory: %s' % \
98 os.path.basename(dir_path))
99 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
100
101 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)
102
103 for card in ['plot_card']:
104 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
105 try:
106 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
107 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
108 except IOError:
109 logger.warning("Failed to move " + card + ".dat to default")
110
111 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
112 if not mgme_dir:
113 raise MadGraph5Error, \
114 "No valid MG_ME path given for MG4 run directory creation."
115 try:
116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
117 except IOError:
118 MG5_version = misc.get_pkg_info()
119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
120 "5." + MG5_version['version'])
121
122
123 if clean:
124 logger.info('remove old information in %s' % os.path.basename(dir_path))
125 if os.environ.has_key('MADGRAPH_BASE'):
126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
127 '--web'],cwd=dir_path)
128 else:
129 try:
130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
131 cwd=dir_path)
132 except Exception, why:
133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
134 % (os.path.basename(dir_path),why))
135
136 MG_version = misc.get_pkg_info()
137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
138 MG_version['version'])
139
140
141 self.link_CutTools(dir_path)
142
143 link_tir_libs=[]
144 tir_libs=[]
145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
146 dirpath = os.path.join(self.dir_path, 'SubProcesses')
147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop')
148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
149 link_tir_libs,tir_libs)
150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
151 filename = pjoin(self.dir_path, 'Source','make_opts')
152 calls = self.write_make_opts(writers.MakefileWriter(filename),
153 link_tir_libs,tir_libs)
154
155
156 for card in ['FO_analyse_card', 'shower_card']:
157 try:
158 shutil.copy(pjoin(self.dir_path, 'Cards',
159 card + '.dat'),
160 pjoin(self.dir_path, 'Cards',
161 card + '_default.dat'))
162 except IOError:
163 logger.warning("Failed to copy " + card + ".dat to default")
164
165 cwd = os.getcwd()
166 dirpath = os.path.join(self.dir_path, 'SubProcesses')
167 try:
168 os.chdir(dirpath)
169 except os.error:
170 logger.error('Could not cd to directory %s' % dirpath)
171 return 0
172
173
174 cpfiles= ["SubProcesses/MadLoopParamReader.f",
175 "Cards/MadLoopParams.dat",
176 "SubProcesses/MadLoopParams.inc"]
177
178 for file in cpfiles:
179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
180 os.path.join(self.dir_path, file))
181
182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
184
185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
187 'Cards', 'MadLoopParams.dat'))
188
189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
190 "MadLoopParams.dat"))
191
192
193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
194 "SubProcesses","MadLoopCommons.inc")).read()
195 writer = writers.FortranWriter(os.path.join(self.dir_path,
196 "SubProcesses","MadLoopCommons.f"))
197 writer.writelines(MadLoopCommon%{
198 'print_banner_commands':self.MadLoop_banner},
199 context={'collier_available':False})
200 writer.close()
201
202
203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
204 writers.FortranWriter('cts_mpc.h'))
205
206
207
208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat')
209 FKS_card_file = open(FKS_card_path,'r')
210 FKS_card = FKS_card_file.read()
211 FKS_card_file.close()
212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+",
213 "#NHelForMCoverHels\n-1", FKS_card)
214 FKS_card_file = open(FKS_card_path,'w')
215 FKS_card_file.write(FKS_card)
216 FKS_card_file.close()
217
218
219 os.chdir(cwd)
220
221 self.copy_python_files()
222
223
224 self.write_pdf_opendata()
225
226
227
228
229
231 """ Create the file makefile_loop which links to the TIR libraries."""
232
233 file = open(os.path.join(self.mgme_dir,'Template','NLO',
234 'SubProcesses','makefile_loop.inc')).read()
235 replace_dict={}
236 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
237 replace_dict['tir_libs']=' '.join(tir_libs)
238 replace_dict['dotf']='%.f'
239 replace_dict['doto']='%.o'
240 replace_dict['tir_include']=' '.join(tir_include)
241 file=file%replace_dict
242 if writer:
243 writer.writelines(file)
244 else:
245 return file
246
247
249 """ Create the file make_opts which links to the TIR libraries."""
250 file = open(os.path.join(self.mgme_dir,'Template','NLO',
251 'Source','make_opts.inc')).read()
252 replace_dict={}
253 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
254 replace_dict['tir_libs']=' '.join(tir_libs)
255 replace_dict['dotf']='%.f'
256 replace_dict['doto']='%.o'
257 file=file%replace_dict
258 if writer:
259 writer.writelines(file)
260 else:
261 return file
262
263
264
265
267 """copy python files required for the Template"""
268
269 files_to_copy = [ \
270 pjoin('interface','amcatnlo_run_interface.py'),
271 pjoin('interface','extended_cmd.py'),
272 pjoin('interface','common_run_interface.py'),
273 pjoin('interface','coloring_logging.py'),
274 pjoin('various','misc.py'),
275 pjoin('various','shower_card.py'),
276 pjoin('various','FO_analyse_card.py'),
277 pjoin('various','histograms.py'),
278 pjoin('various','banner.py'),
279 pjoin('various','cluster.py'),
280 pjoin('various','systematics.py'),
281 pjoin('various','lhe_parser.py'),
282 pjoin('madevent','sum_html.py'),
283 pjoin('madevent','gen_crossxhtml.py'),
284 pjoin('iolibs','files.py'),
285 pjoin('iolibs','save_load_object.py'),
286 pjoin('iolibs','file_writers.py'),
287 pjoin('..','models','check_param_card.py'),
288 pjoin('__init__.py')
289 ]
290 cp(_file_path+'/interface/.mg5_logging.conf',
291 self.dir_path+'/bin/internal/me5_logging.conf')
292
293 for cp_file in files_to_copy:
294 cp(pjoin(_file_path,cp_file),
295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [],
298 wanted_couplings = []):
299
300 super(ProcessExporterFortranFKS,self).convert_model(model,
301 wanted_lorentz, wanted_couplings)
302
303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~')
304 try:
305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel'))
306 except OSError as error:
307 pass
308 model_path = model.get('modelpath')
309 shutil.copytree(model_path,
310 pjoin(self.dir_path,'bin','internal','ufomodel'),
311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
312 if hasattr(model, 'restrict_card'):
313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel',
314 'restrict_default.dat')
315 if isinstance(model.restrict_card, check_param_card.ParamCard):
316 model.restrict_card.write(out_path)
317 else:
318 files.cp(model.restrict_card, out_path)
319
320
321
322
323
324
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent"""
327
328 lines = "integer max_particles, max_branch\n"
329 lines += "parameter (max_particles=%d) \n" % maxparticles
330 lines += "parameter (max_branch=max_particles-1)"
331
332
333 writer.writelines(lines)
334
335 return True
336
337
338
339
340
342 """Write the maxconfigs.inc file for MadEvent"""
343
344 lines = "integer lmaxconfigs\n"
345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs
346
347
348 writer.writelines(lines)
349
350 return True
351
352
353
354
355
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent
358 Perl script of MadEvent4 are still working properly for pure MG5 run."""
359
360 proc_card_template = template_files.mg4_proc_card.mg4_template
361 process_template = template_files.mg4_proc_card.process_template
362 process_text = ''
363 coupling = ''
364 new_process_content = []
365
366
367
368 process_str = process_str.replace(' =', '=')
369 process_str = process_str.replace('= ', '=')
370 process_str = process_str.replace(',',' , ')
371
372 for info in process_str.split():
373 if '=' in info:
374 coupling += info + '\n'
375 else:
376 new_process_content.append(info)
377
378
379 process_str = ' '.join(new_process_content)
380
381
382 process_text += process_template.substitute({'process': process_str, \
383 'coupling': coupling})
384
385 text = proc_card_template.substitute({'process': process_text,
386 'model': modelname,
387 'multiparticle':''})
388 ff = open(file_pos, 'w')
389 ff.write(text)
390 ff.close()
391
392
393
394
395
397 """ Write an initial state process map. Each possible PDF
398 combination gets an unique identifier."""
399
400 text=''
401 for i,e in enumerate(initial_states):
402 text=text+str(i+1)+' '+str(len(e))
403 for t in e:
404 text=text+' '
405 try:
406 for p in t:
407 text=text+' '+str(p)
408 except TypeError:
409 text=text+' '+str(t)
410 text=text+'\n'
411
412 ff = open(file_pos, 'w')
413 ff.write(text)
414 ff.close()
415
417 """ A function returning a string uniquely identifying the matrix
418 element given in argument so that it can be used as a prefix to all
419 MadLoop5 subroutines and common blocks related to it. This allows
420 to compile several processes into one library as requested by the
421 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design
422 necessitates that there is no process prefix."""
423
424 return ''
425
426
427
428
430 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the
431 non-optimized mode"""
432 raise fks_common.FKSProcessError(), \
433 "write_coef_specs should be called only in the loop-optimized mode"
434
435
436
437
438
439 - def generate_directories_fks(self, matrix_element, fortran_model, me_number,
440 me_ntot, path=os.getcwd(),OLP='MadLoop'):
441 """Generate the Pxxxxx_i directories for a subprocess in MadFKS,
442 including the necessary matrix.f and various helper files"""
443 proc = matrix_element.born_matrix_element['processes'][0]
444
445 if not self.model:
446 self.model = matrix_element.get('processes')[0].get('model')
447
448 cwd = os.getcwd()
449 try:
450 os.chdir(path)
451 except OSError, error:
452 error_msg = "The directory %s should exist in order to be able " % path + \
453 "to \"export\" in it. If you see this error message by " + \
454 "typing the command \"export\" please consider to use " + \
455 "instead the command \"output\". "
456 raise MadGraph5Error, error_msg
457
458 calls = 0
459
460 self.fksdirs = []
461
462 borndir = "P%s" % \
463 (matrix_element.get('processes')[0].shell_string())
464 os.mkdir(borndir)
465 os.chdir(borndir)
466 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot))
467
468
469 self.generate_born_fks_files(matrix_element,
470 fortran_model, me_number, path)
471
472
473
474 if OLP=='NJET':
475 filename = 'OLE_order.lh'
476 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP)
477
478 if matrix_element.virt_matrix_element:
479 calls += self.generate_virt_directory( \
480 matrix_element.virt_matrix_element, \
481 fortran_model, \
482 os.path.join(path, borndir))
483
484
485
486 self.write_real_matrix_elements(matrix_element, fortran_model)
487
488 self.write_pdf_calls(matrix_element, fortran_model)
489
490 filename = 'nFKSconfigs.inc'
491 self.write_nfksconfigs_file(writers.FortranWriter(filename),
492 matrix_element,
493 fortran_model)
494
495 filename = 'iproc.dat'
496 self.write_iproc_file(writers.FortranWriter(filename),
497 me_number)
498
499 filename = 'fks_info.inc'
500 self.write_fks_info_file(writers.FortranWriter(filename),
501 matrix_element,
502 fortran_model)
503
504 filename = 'leshouche_info.dat'
505 nfksconfs,maxproc,maxflow,nexternal=\
506 self.write_leshouche_info_file(filename,matrix_element)
507
508
509
510 if nfksconfs == maxproc == maxflow == 0:
511 nfksconfs = 1
512 (dummylines, maxproc, maxflow) = self.get_leshouche_lines(
513 matrix_element.born_matrix_element, 1)
514
515 filename = 'leshouche_decl.inc'
516 self.write_leshouche_info_declarations(
517 writers.FortranWriter(filename),
518 nfksconfs,maxproc,maxflow,nexternal,
519 fortran_model)
520 filename = 'genps.inc'
521 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes()
522 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis')))
523 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\
524 ncolor,maxflow,fortran_model)
525
526 filename = 'configs_and_props_info.dat'
527 nconfigs,max_leg_number,nfksconfs=self.write_configs_and_props_info_file(
528 filename,
529 matrix_element)
530
531 filename = 'configs_and_props_decl.inc'
532 self.write_configs_and_props_info_declarations(
533 writers.FortranWriter(filename),
534 nconfigs,max_leg_number,nfksconfs,
535 fortran_model)
536
537 filename = 'real_from_born_configs.inc'
538 self.write_real_from_born_configs(
539 writers.FortranWriter(filename),
540 matrix_element,
541 fortran_model)
542
543 filename = 'ngraphs.inc'
544 self.write_ngraphs_file(writers.FortranWriter(filename),
545 nconfigs)
546
547
548 filename = 'real_me_chooser.f'
549 self.write_real_me_wrapper(writers.FortranWriter(filename),
550 matrix_element,
551 fortran_model)
552
553 filename = 'parton_lum_chooser.f'
554 self.write_pdf_wrapper(writers.FortranWriter(filename),
555 matrix_element,
556 fortran_model)
557
558 filename = 'get_color.f'
559 self.write_colors_file(writers.FortranWriter(filename),
560 matrix_element)
561
562 filename = 'nexternal.inc'
563 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
564 self.write_nexternal_file(writers.FortranWriter(filename),
565 nexternal, ninitial)
566 self.proc_characteristic['ninitial'] = ninitial
567 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal)
568
569 filename = 'pmass.inc'
570 try:
571 self.write_pmass_file(writers.FortranWriter(filename),
572 matrix_element.real_processes[0].matrix_element)
573 except IndexError:
574 self.write_pmass_file(writers.FortranWriter(filename),
575 matrix_element.born_matrix_element)
576
577
578 self.draw_feynman_diagrams(matrix_element)
579
580 linkfiles = ['BinothLHADummy.f',
581 'check_poles.f',
582 'MCmasses_HERWIG6.inc',
583 'MCmasses_HERWIGPP.inc',
584 'MCmasses_PYTHIA6Q.inc',
585 'MCmasses_PYTHIA6PT.inc',
586 'MCmasses_PYTHIA8.inc',
587 'add_write_info.f',
588 'coupl.inc',
589 'cuts.f',
590 'FKS_params.dat',
591 'initial_states_map.dat',
592 'OLE_order.olc',
593 'FKSParams.inc',
594 'FKSParamReader.f',
595 'cuts.inc',
596 'unlops.inc',
597 'pythia_unlops.f',
598 'driver_mintMC.f',
599 'driver_mintFO.f',
600 'appl_interface.cc',
601 'appl_interface_dummy.f',
602 'appl_common.inc',
603 'reweight_appl.inc',
604 'fastjetfortran_madfks_core.cc',
605 'fastjetfortran_madfks_full.cc',
606 'fjcore.cc',
607 'fastjet_wrapper.f',
608 'fjcore.hh',
609 'fks_Sij.f',
610 'fks_powers.inc',
611 'fks_singular.f',
612 'veto_xsec.f',
613 'veto_xsec.inc',
614 'c_weight.inc',
615 'fks_inc_chooser.f',
616 'leshouche_inc_chooser.f',
617 'configs_and_props_inc_chooser.f',
618 'genps_fks.f',
619 'boostwdir2.f',
620 'madfks_mcatnlo.inc',
621 'open_output_files.f',
622 'open_output_files_dummy.f',
623 'HwU_dummy.f',
624 'madfks_plot.f',
625 'analysis_dummy.f',
626 'analysis_lhe.f',
627 'mint-integrator2.f',
628 'MC_integer.f',
629 'mint.inc',
630 'montecarlocounter.f',
631 'q_es.inc',
632 'recluster.cc',
633 'Boosts.h',
634 'reweight.inc',
635 'reweight0.inc',
636 'reweight1.inc',
637 'reweightNLO.inc',
638 'reweight_all.inc',
639 'reweight_xsec.f',
640 'reweight_xsec_events.f',
641 'reweight_xsec_events_pdf_dummy.f',
642 'iproc_map.f',
643 'run.inc',
644 'run_card.inc',
645 'setcuts.f',
646 'setscales.f',
647 'symmetry_fks_test_MC.f',
648 'symmetry_fks_test_ME.f',
649 'symmetry_fks_v3.f',
650 'trapfpe.c',
651 'vegas2.for',
652 'write_ajob.f',
653 'handling_lhe_events.f',
654 'write_event.f',
655 'fill_MC_mshell.f',
656 'maxparticles.inc',
657 'message.inc',
658 'initcluster.f',
659 'cluster.inc',
660 'cluster.f',
661 'reweight.f',
662 'randinit',
663 'sudakov.inc',
664 'maxconfigs.inc',
665 'timing_variables.inc']
666
667 for file in linkfiles:
668 ln('../' + file , '.')
669 os.system("ln -s ../../Cards/param_card.dat .")
670
671
672 os.system("ln -s ../makefile_fks_dir ./makefile")
673 if matrix_element.virt_matrix_element:
674 os.system("ln -s ../BinothLHA.f ./BinothLHA.f")
675 elif OLP!='MadLoop':
676 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f")
677 else:
678 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f")
679
680
681
682
683
684
685
686
687 os.chdir(os.path.pardir)
688
689 filename = 'subproc.mg'
690 files.append_to_file(filename,
691 self.write_subproc,
692 borndir)
693
694
695 os.chdir(cwd)
696
697 gen_infohtml.make_info_html_nlo(self.dir_path)
698
699
700 return calls
701
702
703
704
706 """ """
707
708 run_card = banner_mod.RunCardNLO()
709
710 run_card.create_default_for_process(self.proc_characteristic,
711 history,
712 processes)
713
714 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'))
715 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
716
717
728
729 - def finalize(self, matrix_elements, history, mg5options, flaglist):
730 """Finalize FKS directory by creating jpeg diagrams, html
731 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if
732 necessary."""
733
734 devnull = os.open(os.devnull, os.O_RDWR)
735 try:
736 res = misc.call([mg5options['lhapdf'], '--version'], \
737 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
738 except Exception:
739 res = 1
740 if res != 0:
741 logger.info('The value for lhapdf in the current configuration does not ' + \
742 'correspond to a valid executable.\nPlease set it correctly either in ' + \
743 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \
744 'and regenrate the process. \nTo avoid regeneration, edit the ' + \
745 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \
746 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n')
747
748 compiler_dict = {'fortran': mg5options['fortran_compiler'],
749 'cpp': mg5options['cpp_compiler'],
750 'f2py': mg5options['f2py_compiler']}
751
752 if 'nojpeg' in flaglist:
753 makejpg = False
754 else:
755 makejpg = True
756 output_dependencies = mg5options['output_dependencies']
757
758
759 self.proc_characteristic['grouped_matrix'] = False
760 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme']
761
762 self.create_proc_charac()
763
764 self.create_run_card(matrix_elements.get_processes(), history)
765
766
767
768
769
770
771
772
773 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f')
774 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc')
775 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model)
776
777
778
779 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc')
780 self.write_maxconfigs_file(writers.FortranWriter(filename),
781 matrix_elements.get_max_configs())
782
783
784 filename = os.path.join(self.dir_path,'Source','maxparticles.inc')
785 self.write_maxparticles_file(writers.FortranWriter(filename),
786 matrix_elements.get_max_particles())
787
788
789 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses'))
790
791
792 fcompiler_chosen = self.set_fortran_compiler(compiler_dict)
793 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp'])
794
795 old_pos = os.getcwd()
796 os.chdir(os.path.join(self.dir_path, 'SubProcesses'))
797 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \
798 proc[0] == 'P']
799
800 devnull = os.open(os.devnull, os.O_RDWR)
801
802 if makejpg:
803 logger.info("Generate jpeg diagrams")
804 for Pdir in P_dir_list:
805 os.chdir(Pdir)
806 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
807 stdout = devnull)
808 os.chdir(os.path.pardir)
809
810 logger.info("Generate web pages")
811
812
813 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \
814 stdout = devnull)
815
816 os.chdir(os.path.pardir)
817
818
819
820
821
822
823
824
825
826
827 if os.path.isdir('Cards'):
828 output_file = os.path.join('Cards', 'proc_card_mg5.dat')
829 history.write(output_file)
830
831
832 for card in ['run_card', 'FO_analyse_card', 'shower_card']:
833 try:
834 shutil.copy(pjoin(self.dir_path, 'Cards',
835 card + '.dat'),
836 pjoin(self.dir_path, 'Cards',
837 card + '_default.dat'))
838 except IOError:
839 logger.warning("Failed to copy " + card + ".dat to default")
840
841
842 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
843 stdout = devnull)
844
845
846 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')):
847 if os.path.exists('amcatnlo.tar.gz'):
848 os.remove('amcatnlo.tar.gz')
849 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')],
850 stdout = devnull)
851
852 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
853 stdout = devnull)
854
855
856 os.chdir(old_pos)
857
858
859
860 base_compiler= ['FC=g77','FC=gfortran']
861
862 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP')
863
864 if output_dependencies == 'external':
865
866 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \
867 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')):
868 if 'FC' not in os.environ or not os.environ['FC']:
869 path = os.path.join(StdHep_path, 'src', 'make_opts')
870 text = open(path).read()
871 for base in base_compiler:
872 text = text.replace(base,'FC=%s' % fcompiler_chosen)
873 open(path, 'w').writelines(text)
874
875 logger.info('Compiling StdHEP. This has to be done only once.')
876 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP'))
877 logger.info('Done.')
878
879 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \
880 pjoin(self.dir_path, 'MCatNLO', 'lib'))
881 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \
882 pjoin(self.dir_path, 'MCatNLO', 'lib'))
883
884 elif output_dependencies == 'internal':
885 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP')
886 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True)
887
888 linkfiles = ['libstdhep.a', 'libFmcfio.a']
889 for file in linkfiles:
890 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file),
891 os.path.join(self.dir_path, 'MCatNLO', 'lib'))
892 if 'FC' not in os.environ or not os.environ['FC']:
893 path = pjoin(StdHEP_internal_path, 'src', 'make_opts')
894 text = open(path).read()
895 for base in base_compiler:
896 text = text.replace(base,'FC=%s' % fcompiler_chosen)
897 open(path, 'w').writelines(text)
898
899 misc.compile(['clean'],cwd = StdHEP_internal_path)
900
901 elif output_dependencies == 'environment_paths':
902
903
904 libStdHep = misc.which_lib('libstdhep.a')
905 libFmcfio = misc.which_lib('libFmcfio.a')
906 if not libStdHep is None and not libFmcfio is None:
907 logger.info('MG5_aMC is using StdHep installation found at %s.'%\
908 os.path.dirname(libStdHep))
909 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
910 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
911 else:
912 raise InvalidCmd("Could not find the location of the files"+\
913 " libstdhep.a and libFmcfio.a in you environment paths.")
914
915 else:
916 raise MadGraph5Error, 'output_dependencies option %s not recognized'\
917 %output_dependencies
918
919
920 if 'madanalysis5_path' in self.opt and not \
921 self.opt['madanalysis5_path'] is None and not self.proc_defs is None:
922
923 processes = sum([me.get('processes') if not isinstance(me, str) else [] \
924 for me in matrix_elements.get('matrix_elements')],[])
925
926
927
928 if len(processes)==0:
929 processes = self.born_processes
930 if len(processes)==0:
931 logger.warning(
932 """MG5aMC could not provide to Madanalysis5 the list of processes generated.
933 As a result, the default card will not be tailored to the process generated.
934 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""")
935
936
937 self.create_default_madanalysis5_cards(
938 history, self.proc_defs, [processes,]*len(self.proc_defs),
939 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'),
940 levels =['hadron'])
941
943 """Writes the real_from_born_configs.inc file that contains
944 the mapping to go for a given born configuration (that is used
945 e.g. in the multi-channel phase-space integration to the
946 corresponding real-emission diagram, i.e. the real emission
947 diagram in which the combined ij is split in i_fks and
948 j_fks."""
949 lines=[]
950 lines2=[]
951 max_links=0
952 born_me=matrix_element.born_matrix_element
953 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
954 iFKS=iFKS+1
955 links=conf['fks_info']['rb_links']
956 max_links=max(max_links,len(links))
957 for i,diags in enumerate(links):
958 if not i == diags['born_conf']:
959 print links
960 raise MadGraph5Error, "born_conf should be canonically ordered"
961 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links])
962 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
963 % (iFKS,len(links),real_configs))
964
965 lines2.append("integer irfbc")
966 lines2.append("integer real_from_born_conf(%d,%d)" \
967 % (max_links,len(matrix_element.get_fks_info_list())))
968
969 writer.writelines(lines2+lines)
970
971
972
973
974
975
977 """Write the get_mass_width_file.f file for MG4.
978 Also update the makeinc.inc file
979 """
980 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero']
981 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero']
982
983 iflines_mass = ''
984 iflines_width = ''
985
986 for i, part in enumerate(mass_particles):
987 if i == 0:
988 ifstring = 'if'
989 else:
990 ifstring = 'else if'
991 if part['self_antipart']:
992 iflines_mass += '%s (id.eq.%d) then\n' % \
993 (ifstring, part.get_pdg_code())
994 else:
995 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
996 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
997 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass')
998
999 for i, part in enumerate(width_particles):
1000 if i == 0:
1001 ifstring = 'if'
1002 else:
1003 ifstring = 'else if'
1004 if part['self_antipart']:
1005 iflines_width += '%s (id.eq.%d) then\n' % \
1006 (ifstring, part.get_pdg_code())
1007 else:
1008 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1009 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1010 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width')
1011
1012 replace_dict = {'iflines_mass' : iflines_mass,
1013 'iflines_width' : iflines_width}
1014
1015 file = open(os.path.join(_file_path, \
1016 'iolibs/template_files/get_mass_width_fcts.inc')).read()
1017 file = file % replace_dict
1018
1019
1020 writer.writelines(file)
1021
1022
1023 makeinc_content = open(makeinc).read()
1024 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ')
1025 open(makeinc, 'w').write(makeinc_content)
1026
1027 return
1028
1029
1031 """writes the declarations for the variables relevant for configs_and_props
1032 """
1033 lines = []
1034 lines.append("integer ifr,lmaxconfigs_used,max_branch_used")
1035 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig)
1036 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number)
1037 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs)
1038 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1039 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1040 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1041 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1042 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1043 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1044
1045 writer.writelines(lines)
1046
1047
1049 """writes the configs_and_props_info.inc file that cointains
1050 all the (real-emission) configurations (IFOREST) as well as
1051 the masses and widths of intermediate particles"""
1052 lines = []
1053 lines.append("# C -> MAPCONFIG_D")
1054 lines.append("# F/D -> IFOREST_D")
1055 lines.append("# S -> SPROP_D")
1056 lines.append("# T -> TPRID_D")
1057 lines.append("# M -> PMASS_D/PWIDTH_D")
1058 lines.append("# P -> POW_D")
1059 lines2 = []
1060 nconfs = len(matrix_element.get_fks_info_list())
1061 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1062
1063 max_iconfig=0
1064 max_leg_number=0
1065
1066 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
1067 iFKS=iFKS+1
1068 iconfig = 0
1069 s_and_t_channels = []
1070 mapconfigs = []
1071 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element
1072 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams')
1073 model = fks_matrix_element.get('base_amplitude').get('process').get('model')
1074 minvert = min([max([len(vert.get('legs')) for vert in \
1075 diag.get('vertices')]) for diag in base_diagrams])
1076
1077 lines.append("# ")
1078 lines.append("# nFKSprocess %d" % iFKS)
1079 for idiag, diag in enumerate(base_diagrams):
1080 if any([len(vert.get('legs')) > minvert for vert in
1081 diag.get('vertices')]):
1082
1083 continue
1084 iconfig = iconfig + 1
1085 helas_diag = fks_matrix_element.get('diagrams')[idiag]
1086 mapconfigs.append(helas_diag.get('number'))
1087 lines.append("# Diagram %d for nFKSprocess %d" % \
1088 (helas_diag.get('number'),iFKS))
1089
1090 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1091 helas_diag.get('number')))
1092
1093
1094
1095 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1096 get_s_and_t_channels(ninitial, model, 990)
1097
1098 s_and_t_channels.append([schannels, tchannels])
1099
1100
1101 allchannels = schannels
1102 if len(tchannels) > 1:
1103
1104 allchannels = schannels + tchannels
1105
1106 for vert in allchannels:
1107 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1108 last_leg = vert.get('legs')[-1]
1109 lines.append("F %4d %4d %4d %4d" % \
1110 (iFKS,last_leg.get('number'), iconfig, len(daughters)))
1111 for d in daughters:
1112 lines.append("D %4d" % d)
1113 if vert in schannels:
1114 lines.append("S %4d %4d %4d %10d" % \
1115 (iFKS,last_leg.get('number'), iconfig,
1116 last_leg.get('id')))
1117 elif vert in tchannels[:-1]:
1118 lines.append("T %4d %4d %4d %10d" % \
1119 (iFKS,last_leg.get('number'), iconfig,
1120 abs(last_leg.get('id'))))
1121
1122
1123 max_leg_number = min(max_leg_number,last_leg.get('number'))
1124 max_iconfig = max(max_iconfig,iconfig)
1125
1126
1127 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1128 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1129
1130
1131 lines2.append("# ")
1132 particle_dict = fks_matrix_element.get('processes')[0].get('model').\
1133 get('particle_dict')
1134
1135 for iconf, configs in enumerate(s_and_t_channels):
1136 for vertex in configs[0] + configs[1][:-1]:
1137 leg = vertex.get('legs')[-1]
1138 if leg.get('id') not in particle_dict:
1139
1140 pow_part = 0
1141 else:
1142 particle = particle_dict[leg.get('id')]
1143
1144 pow_part = 1 + int(particle.is_boson())
1145
1146 lines2.append("M %4d %4d %4d %10d " % \
1147 (iFKS,leg.get('number'), iconf + 1, leg.get('id')))
1148 lines2.append("P %4d %4d %4d %4d " % \
1149 (iFKS,leg.get('number'), iconf + 1, pow_part))
1150
1151
1152 open(filename,'w').write('\n'.join(lines+lines2))
1153
1154 return max_iconfig, max_leg_number, nconfs
1155
1156
1159 """writes the declarations for the variables relevant for leshouche_info
1160 """
1161 lines = []
1162 lines.append('integer maxproc_used, maxflow_used')
1163 lines.append('parameter (maxproc_used = %d)' % maxproc)
1164 lines.append('parameter (maxflow_used = %d)' % maxflow)
1165 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal))
1166 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal))
1167 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal))
1168 lines.append('integer niprocs_d(%d)' % (nfksconfs))
1169
1170 writer.writelines(lines)
1171
1172
1173 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1174 """writes the genps.inc file
1175 """
1176 lines = []
1177 lines.append("include 'maxparticles.inc'")
1178 lines.append("include 'maxconfigs.inc'")
1179 lines.append("integer maxproc,ngraphs,ncolor,maxflow")
1180 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \
1181 (maxproc,ngraphs,ncolor,maxflow))
1182 writer.writelines(lines)
1183
1184
1186 """writes the leshouche_info.inc file which contains
1187 the LHA informations for all the real emission processes
1188 """
1189 lines = []
1190 lines.append("# I -> IDUP_D")
1191 lines.append("# M -> MOTHUP_D")
1192 lines.append("# C -> ICOLUP_D")
1193 nfksconfs = len(matrix_element.get_fks_info_list())
1194 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1195
1196 maxproc = 0
1197 maxflow = 0
1198 for i, conf in enumerate(matrix_element.get_fks_info_list()):
1199
1200 (newlines, nprocs, nflows) = self.get_leshouche_lines(
1201 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1)
1202 lines.extend(newlines)
1203 maxproc = max(maxproc, nprocs)
1204 maxflow = max(maxflow, nflows)
1205
1206
1207 open(filename,'w').write('\n'.join(lines))
1208
1209 return nfksconfs, maxproc, maxflow, nexternal
1210
1211
1213 """writes the wrapper which allows to chose among the different real matrix elements"""
1214
1215 file = \
1216 """double precision function dlum()
1217 implicit none
1218 include 'timing_variables.inc'
1219 integer nfksprocess
1220 common/c_nfksprocess/nfksprocess
1221 call cpu_time(tbefore)
1222 """
1223 if matrix_element.real_processes:
1224 for n, info in enumerate(matrix_element.get_fks_info_list()):
1225 file += \
1226 """if (nfksprocess.eq.%(n)d) then
1227 call dlum_%(n_me)d(dlum)
1228 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1229 file += \
1230 """
1231 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess
1232 stop
1233 endif
1234 call cpu_time(tAfter)
1235 tPDF = tPDF + (tAfter-tBefore)
1236 return
1237 end
1238 """
1239 else:
1240 file+= \
1241 """call dlum_0(dlum)
1242 call cpu_time(tAfter)
1243 tPDF = tPDF + (tAfter-tBefore)
1244 return
1245 end
1246 """
1247
1248
1249 writer.writelines(file)
1250 return 0
1251
1252
1254 """writes the wrapper which allows to chose among the different real matrix elements"""
1255
1256 file = \
1257 """subroutine smatrix_real(p, wgt)
1258 implicit none
1259 include 'nexternal.inc'
1260 double precision p(0:3, nexternal)
1261 double precision wgt
1262 integer nfksprocess
1263 common/c_nfksprocess/nfksprocess
1264 """
1265 for n, info in enumerate(matrix_element.get_fks_info_list()):
1266 file += \
1267 """if (nfksprocess.eq.%(n)d) then
1268 call smatrix_%(n_me)d(p, wgt)
1269 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1270
1271 if matrix_element.real_processes:
1272 file += \
1273 """
1274 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess
1275 stop
1276 endif
1277 return
1278 end
1279 """
1280 else:
1281 file += \
1282 """
1283 wgt=0d0
1284 return
1285 end
1286 """
1287
1288 writer.writelines(file)
1289 return 0
1290
1291
1293 """Create the ps files containing the feynman diagrams for the born process,
1294 as well as for all the real emission processes"""
1295
1296 filename = 'born.ps'
1297 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\
1298 get('base_amplitude').get('diagrams'),
1299 filename,
1300 model=matrix_element.born_matrix_element.\
1301 get('processes')[0].get('model'),
1302 amplitude=True, diagram_type='born')
1303 plot.draw()
1304
1305 for n, fksreal in enumerate(matrix_element.real_processes):
1306 filename = 'matrix_%d.ps' % (n + 1)
1307 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\
1308 get('base_amplitude').get('diagrams'),
1309 filename,
1310 model=fksreal.matrix_element.\
1311 get('processes')[0].get('model'),
1312 amplitude=True, diagram_type='real')
1313 plot.draw()
1314
1315
1317 """writes the matrix_i.f files which contain the real matrix elements"""
1318
1319
1320
1321 for n, fksreal in enumerate(matrix_element.real_processes):
1322 filename = 'matrix_%d.f' % (n + 1)
1323 self.write_matrix_element_fks(writers.FortranWriter(filename),
1324 fksreal.matrix_element, n + 1,
1325 fortran_model)
1326
1328 """writes the parton_lum_i.f files which contain the real matrix elements.
1329 If no real emission existst, write the one for the born"""
1330
1331 if matrix_element.real_processes:
1332 for n, fksreal in enumerate(matrix_element.real_processes):
1333 filename = 'parton_lum_%d.f' % (n + 1)
1334 self.write_pdf_file(writers.FortranWriter(filename),
1335 fksreal.matrix_element, n + 1,
1336 fortran_model)
1337 else:
1338 filename = 'parton_lum_0.f'
1339 self.write_pdf_file(writers.FortranWriter(filename),
1340 matrix_element.born_matrix_element, 0,
1341 fortran_model)
1342
1343
1345 """generates the files needed for the born amplitude in the P* directory, which will
1346 be needed by the P* directories"""
1347 pathdir = os.getcwd()
1348
1349 filename = 'born.f'
1350 calls_born, ncolor_born = \
1351 self.write_born_fks(writers.FortranWriter(filename),\
1352 matrix_element,
1353 fortran_model)
1354
1355 filename = 'born_hel.f'
1356 self.write_born_hel(writers.FortranWriter(filename),\
1357 matrix_element,
1358 fortran_model)
1359
1360
1361 filename = 'born_conf.inc'
1362 nconfigs, mapconfigs, s_and_t_channels = \
1363 self.write_configs_file(
1364 writers.FortranWriter(filename),
1365 matrix_element.born_matrix_element,
1366 fortran_model)
1367
1368 filename = 'born_props.inc'
1369 self.write_props_file(writers.FortranWriter(filename),
1370 matrix_element.born_matrix_element,
1371 fortran_model,
1372 s_and_t_channels)
1373
1374 filename = 'born_decayBW.inc'
1375 self.write_decayBW_file(writers.FortranWriter(filename),
1376 s_and_t_channels)
1377
1378 filename = 'born_leshouche.inc'
1379 nflows = self.write_leshouche_file(writers.FortranWriter(filename),
1380 matrix_element.born_matrix_element,
1381 fortran_model)
1382
1383 filename = 'born_nhel.inc'
1384 self.write_born_nhel_file(writers.FortranWriter(filename),
1385 matrix_element.born_matrix_element, nflows,
1386 fortran_model,
1387 ncolor_born)
1388
1389 filename = 'born_ngraphs.inc'
1390 self.write_ngraphs_file(writers.FortranWriter(filename),
1391 matrix_element.born_matrix_element.get_number_of_amplitudes())
1392
1393 filename = 'ncombs.inc'
1394 self.write_ncombs_file(writers.FortranWriter(filename),
1395 matrix_element.born_matrix_element,
1396 fortran_model)
1397
1398 filename = 'born_maxamps.inc'
1399 maxamps = len(matrix_element.get('diagrams'))
1400 maxflows = ncolor_born
1401 self.write_maxamps_file(writers.FortranWriter(filename),
1402 maxamps,
1403 maxflows,
1404 max([len(matrix_element.get('processes')) for me in \
1405 matrix_element.born_matrix_element]),1)
1406
1407 filename = 'config_subproc_map.inc'
1408 self.write_config_subproc_map_file(writers.FortranWriter(filename),
1409 s_and_t_channels)
1410
1411 filename = 'coloramps.inc'
1412 self.write_coloramps_file(writers.FortranWriter(filename),
1413 mapconfigs,
1414 matrix_element.born_matrix_element,
1415 fortran_model)
1416
1417
1418 filename = ['sborn_sf.f', 'sborn_sf_dum.f']
1419 for i, links in enumerate([matrix_element.color_links, []]):
1420 self.write_sborn_sf(writers.FortranWriter(filename[i]),
1421 links,
1422 fortran_model)
1423 self.color_link_files = []
1424 for i in range(len(matrix_element.color_links)):
1425 filename = 'b_sf_%3.3d.f' % (i + 1)
1426 self.color_link_files.append(filename)
1427 self.write_b_sf_fks(writers.FortranWriter(filename),
1428 matrix_element, i,
1429 fortran_model)
1430
1431
1433 """Generates the library for computing the loop matrix elements
1434 necessary for this process using the OLP specified."""
1435
1436
1437 virtual_path = pjoin(export_path,'OLP_virtuals')
1438 if not os.path.exists(virtual_path):
1439 os.makedirs(virtual_path)
1440 filename = os.path.join(virtual_path,'OLE_order.lh')
1441 self.write_lh_order(filename, process_list, OLP)
1442
1443 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\
1444 'Please check the virt_generation.log file in %s.'\
1445 %str(pjoin(virtual_path,'virt_generation.log'))
1446
1447
1448 if OLP=='GoSam':
1449 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1450 'GoSam','makevirt'),pjoin(virtual_path,'makevirt'))
1451 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1452 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc'))
1453 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path)
1454
1455 logger.info('Generating the loop matrix elements with %s...'%OLP)
1456 virt_generation_log = \
1457 open(pjoin(virtual_path,'virt_generation.log'), 'w')
1458 retcode = subprocess.call(['./makevirt'],cwd=virtual_path,
1459 stdout=virt_generation_log, stderr=virt_generation_log)
1460 virt_generation_log.close()
1461
1462 possible_other_extensions = ['so','dylib']
1463 shared_lib_ext='so'
1464 for ext in possible_other_extensions:
1465 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib',
1466 'libgolem_olp.'+ext)):
1467 shared_lib_ext = ext
1468
1469
1470 files_to_check = ['olp_module.mod',str(pjoin('lib',
1471 'libgolem_olp.'+shared_lib_ext))]
1472 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path,
1473 'Virtuals',f)) for f in files_to_check]):
1474 raise fks_common.FKSProcessError(fail_msg)
1475
1476 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext),
1477 pjoin(export_path,'lib'))
1478
1479
1480 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read()
1481 make_opts=open(pjoin(export_path,'Source','make_opts'),'w')
1482 if OLP=='GoSam':
1483 if platform.system().lower()=='darwin':
1484
1485
1486 make_opts_content=make_opts_content.replace('libOLP=',
1487 'libOLP=-Wl,-lgolem_olp')
1488 else:
1489
1490
1491
1492
1493
1494
1495
1496 make_opts_content=make_opts_content.replace('libOLP=',
1497 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp')
1498
1499
1500 make_opts.write(make_opts_content)
1501 make_opts.close()
1502
1503
1504
1505
1506
1507 proc_to_label = self.parse_contract_file(
1508 pjoin(virtual_path,'OLE_order.olc'))
1509
1510 self.write_BinothLHA_inc(process_list,proc_to_label,\
1511 pjoin(export_path,'SubProcesses'))
1512
1513
1514 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1515
1517 """ Write the file Binoth_proc.inc in each SubProcess directory so as
1518 to provide the right process_label to use in the OLP call to get the
1519 loop matrix element evaluation. The proc_to_label is the dictionary of
1520 the format of the one returned by the function parse_contract_file."""
1521
1522 for proc in processes:
1523 name = "P%s"%proc.shell_string()
1524 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \
1525 not leg.get('state')]),
1526 tuple([leg.get('id') for leg in proc.get('legs') if \
1527 leg.get('state')]))
1528 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w')
1529 try:
1530 incFile.write(
1531 """ INTEGER PROC_LABEL
1532 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs]))
1533 except KeyError:
1534 raise fks_common.FKSProcessError('Could not found the target'+\
1535 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\
1536 ' the proc_to_label argument in write_BinothLHA_inc.')
1537 incFile.close()
1538
1540 """ Parses the BLHA contract file, make sure all parameters could be
1541 understood by the OLP and return a mapping of the processes (characterized
1542 by the pdg's of the initial and final state particles) to their process
1543 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}.
1544 """
1545
1546 proc_def_to_label = {}
1547
1548 if not os.path.exists(contract_file_path):
1549 raise fks_common.FKSProcessError('Could not find the contract file'+\
1550 ' OLE_order.olc in %s.'%str(contract_file_path))
1551
1552 comment_re=re.compile(r"^\s*#")
1553 proc_def_re=re.compile(
1554 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+
1555 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$")
1556 line_OK_re=re.compile(r"^.*\|\s*OK")
1557 for line in file(contract_file_path):
1558
1559 if not comment_re.match(line) is None:
1560 continue
1561
1562 proc_def = proc_def_re.match(line)
1563 if not proc_def is None:
1564 if int(proc_def.group('proc_class'))!=1:
1565 raise fks_common.FKSProcessError(
1566 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\
1567 ' process class attribute. Found %s instead in: \n%s'\
1568 %(proc_def.group('proc_class'),line))
1569 in_pdgs=tuple([int(in_pdg) for in_pdg in \
1570 proc_def.group('in_pdgs').split()])
1571 out_pdgs=tuple([int(out_pdg) for out_pdg in \
1572 proc_def.group('out_pdgs').split()])
1573 proc_def_to_label[(in_pdgs,out_pdgs)]=\
1574 int(proc_def.group('proc_label'))
1575 continue
1576
1577 if line_OK_re.match(line) is None:
1578 raise fks_common.FKSProcessError(
1579 'The OLP could not process the following line: \n%s'%line)
1580
1581 return proc_def_to_label
1582
1583
1585 """writes the V**** directory inside the P**** directories specified in
1586 dir_name"""
1587
1588 cwd = os.getcwd()
1589
1590 matrix_element = loop_matrix_element
1591
1592
1593 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
1594 try:
1595 os.mkdir(dirpath)
1596 except os.error as error:
1597 logger.warning(error.strerror + " " + dirpath)
1598
1599
1600 name = "V%s" % matrix_element.get('processes')[0].shell_string()
1601 dirpath = os.path.join(dir_name, name)
1602
1603 try:
1604 os.mkdir(dirpath)
1605 except os.error as error:
1606 logger.warning(error.strerror + " " + dirpath)
1607
1608 try:
1609 os.chdir(dirpath)
1610 except os.error:
1611 logger.error('Could not cd to directory %s' % dirpath)
1612 return 0
1613
1614 logger.info('Creating files in directory %s' % name)
1615
1616
1617 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1618
1619 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
1620
1621 filename = 'born_matrix.f'
1622 calls = self.write_bornmatrix(
1623 writers.FortranWriter(filename),
1624 matrix_element,
1625 fortran_model)
1626
1627 filename = 'nexternal.inc'
1628 self.write_nexternal_file(writers.FortranWriter(filename),
1629 nexternal, ninitial)
1630
1631 filename = 'pmass.inc'
1632 self.write_pmass_file(writers.FortranWriter(filename),
1633 matrix_element)
1634
1635 filename = 'ngraphs.inc'
1636 self.write_ngraphs_file(writers.FortranWriter(filename),
1637 len(matrix_element.get_all_amplitudes()))
1638
1639 filename = "loop_matrix.ps"
1640 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
1641 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
1642 filename,
1643 model=matrix_element.get('processes')[0].get('model'),
1644 amplitude='')
1645 logger.info("Drawing loop Feynman diagrams for " + \
1646 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1647 plot.draw()
1648
1649 filename = "born_matrix.ps"
1650 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
1651 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\
1652 get('model'),amplitude='')
1653 logger.info("Generating born Feynman diagrams for " + \
1654 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1655 plot.draw()
1656
1657
1658
1659
1660 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
1661 open('unique_id.inc','w').write(
1662 """ integer UNIQUE_ID
1663 parameter(UNIQUE_ID=1)""")
1664
1665 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
1666 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
1667 'MadLoopCommons.f','MadLoopParams.inc']
1668
1669
1670 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
1671 pjoin('..','MadLoop5_resources'))
1672
1673 for file in linkfiles:
1674 ln('../../%s' % file)
1675
1676 os.system("ln -s ../../makefile_loop makefile")
1677
1678 linkfiles = ['mpmodule.mod']
1679
1680 for file in linkfiles:
1681 ln('../../../lib/%s' % file)
1682
1683 linkfiles = ['coef_specs.inc']
1684
1685 for file in linkfiles:
1686 ln('../../../Source/DHELAS/%s' % file)
1687
1688
1689 os.chdir(cwd)
1690
1691 if not calls:
1692 calls = 0
1693 return calls
1694
1696 """computes the QED/QCD orders from the knowledge of the n of ext particles
1697 and of the weighted orders"""
1698
1699
1700 QED = weighted - nexternal + 2
1701 QCD = weighted - 2 * QED
1702 return QED, QCD
1703
1704
1705
1706
1707
1708
1709
1711 """Creates the OLE_order.lh file. This function should be edited according
1712 to the OLP which is used. For now it is generic."""
1713
1714
1715 if len(process_list)==0:
1716 raise fks_common.FKSProcessError('No matrix elements provided to '+\
1717 'the function write_lh_order.')
1718 return
1719
1720
1721
1722 orders = process_list[0].get('orders')
1723 if 'QED' in orders.keys() and 'QCD' in orders.keys():
1724 QED=orders['QED']
1725 QCD=orders['QCD']
1726 elif 'QED' in orders.keys():
1727 QED=orders['QED']
1728 QCD=0
1729 elif 'QCD' in orders.keys():
1730 QED=0
1731 QCD=orders['QCD']
1732 else:
1733 QED, QCD = self.get_qed_qcd_orders_from_weighted(\
1734 len(process_list[0].get('legs')),
1735 orders['WEIGHTED'])
1736
1737 replace_dict = {}
1738 replace_dict['mesq'] = 'CHaveraged'
1739 replace_dict['corr'] = ' '.join(process_list[0].\
1740 get('perturbation_couplings'))
1741 replace_dict['irreg'] = 'CDR'
1742 replace_dict['aspow'] = QCD
1743 replace_dict['aepow'] = QED
1744 replace_dict['modelfile'] = './param_card.dat'
1745 replace_dict['params'] = 'alpha_s'
1746 proc_lines=[]
1747 for proc in process_list:
1748 proc_lines.append('%s -> %s' % \
1749 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']),
1750 ' '.join(str(l['id']) for l in proc['legs'] if l['state'])))
1751 replace_dict['pdgs'] = '\n'.join(proc_lines)
1752 replace_dict['symfin'] = 'Yes'
1753 content = \
1754 "#OLE_order written by MadGraph5_aMC@NLO\n\
1755 \n\
1756 MatrixElementSquareType %(mesq)s\n\
1757 CorrectionType %(corr)s\n\
1758 IRregularisation %(irreg)s\n\
1759 AlphasPower %(aspow)d\n\
1760 AlphaPower %(aepow)d\n\
1761 NJetSymmetrizeFinal %(symfin)s\n\
1762 ModelFile %(modelfile)s\n\
1763 Parameters %(params)s\n\
1764 \n\
1765 # process\n\
1766 %(pdgs)s\n\
1767 " % replace_dict
1768
1769 file = open(filename, 'w')
1770 file.write(content)
1771 file.close
1772 return
1773
1774
1775
1776
1777
1778
1780 """Export a matrix element to a born.f file in MadFKS format"""
1781
1782 matrix_element = fksborn.born_matrix_element
1783
1784 if not matrix_element.get('processes') or \
1785 not matrix_element.get('diagrams'):
1786 return 0
1787
1788 if not isinstance(writer, writers.FortranWriter):
1789 raise writers.FortranWriter.FortranWriterError(\
1790 "writer not FortranWriter")
1791
1792 writers.FortranWriter.downcase = False
1793
1794 replace_dict = {}
1795
1796
1797 info_lines = self.get_mg5_info_lines()
1798 replace_dict['info_lines'] = info_lines
1799
1800
1801 process_lines = self.get_process_info_lines(matrix_element)
1802 replace_dict['process_lines'] = process_lines
1803
1804
1805
1806 ncomb = matrix_element.get_helicity_combinations()
1807 replace_dict['ncomb'] = ncomb
1808
1809
1810 helicity_lines = self.get_helicity_lines(matrix_element)
1811 replace_dict['helicity_lines'] = helicity_lines
1812
1813
1814 ic_line = self.get_ic_line(matrix_element)
1815 replace_dict['ic_line'] = ic_line
1816
1817
1818
1819
1820
1821
1822 ngraphs = matrix_element.get_number_of_amplitudes()
1823 replace_dict['ngraphs'] = ngraphs
1824
1825
1826 nwavefuncs = matrix_element.get_number_of_wavefunctions()
1827 replace_dict['nwavefuncs'] = nwavefuncs
1828
1829
1830 ncolor = max(1, len(matrix_element.get('color_basis')))
1831 replace_dict['ncolor'] = ncolor
1832
1833
1834 color_data_lines = self.get_color_data_lines(matrix_element)
1835 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
1836
1837
1838 helas_calls = fortran_model.get_matrix_element_calls(\
1839 matrix_element)
1840 replace_dict['helas_calls'] = "\n".join(helas_calls)
1841
1842
1843 amp2_lines = self.get_amp2_lines(matrix_element)
1844 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
1845
1846
1847 jamp_lines = self.get_JAMP_lines(matrix_element)
1848 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
1849
1850
1851 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
1852 replace_dict['wavefunctionsize'] = 20
1853 else:
1854 replace_dict['wavefunctionsize'] = 8
1855
1856
1857 ij_lines = self.get_ij_lines(fksborn)
1858 replace_dict['ij_lines'] = '\n'.join(ij_lines)
1859
1860
1861 den_factor_lines = self.get_den_factor_lines(fksborn)
1862 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
1863
1864
1865 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1)
1866
1867 file = open(os.path.join(_file_path, \
1868 'iolibs/template_files/born_fks.inc')).read()
1869 file = file % replace_dict
1870
1871
1872 writer.writelines(file)
1873
1874 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
1875
1876
1878 """Export a matrix element to a born_hel.f file in MadFKS format"""
1879
1880 matrix_element = fksborn.born_matrix_element
1881
1882 if not matrix_element.get('processes') or \
1883 not matrix_element.get('diagrams'):
1884 return 0
1885
1886 if not isinstance(writer, writers.FortranWriter):
1887 raise writers.FortranWriter.FortranWriterError(\
1888 "writer not FortranWriter")
1889
1890 writers.FortranWriter.downcase = False
1891
1892 replace_dict = {}
1893
1894
1895 info_lines = self.get_mg5_info_lines()
1896 replace_dict['info_lines'] = info_lines
1897
1898
1899 process_lines = self.get_process_info_lines(matrix_element)
1900 replace_dict['process_lines'] = process_lines
1901
1902
1903
1904 ncomb = matrix_element.get_helicity_combinations()
1905 replace_dict['ncomb'] = ncomb
1906
1907
1908 helicity_lines = self.get_helicity_lines(matrix_element)
1909 replace_dict['helicity_lines'] = helicity_lines
1910
1911
1912 ic_line = self.get_ic_line(matrix_element)
1913 replace_dict['ic_line'] = ic_line
1914
1915
1916
1917
1918
1919
1920 ngraphs = matrix_element.get_number_of_amplitudes()
1921 replace_dict['ngraphs'] = ngraphs
1922
1923
1924 nwavefuncs = matrix_element.get_number_of_wavefunctions()
1925 replace_dict['nwavefuncs'] = nwavefuncs
1926
1927
1928 ncolor = max(1, len(matrix_element.get('color_basis')))
1929 replace_dict['ncolor'] = ncolor
1930
1931
1932 color_data_lines = self.get_color_data_lines(matrix_element)
1933 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
1934
1935
1936 amp2_lines = self.get_amp2_lines(matrix_element)
1937 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
1938
1939
1940 jamp_lines = self.get_JAMP_lines(matrix_element)
1941 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
1942
1943
1944 den_factor_lines = self.get_den_factor_lines(fksborn)
1945 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
1946
1947
1948 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
1949
1950 file = open(os.path.join(_file_path, \
1951 'iolibs/template_files/born_fks_hel.inc')).read()
1952 file = file % replace_dict
1953
1954
1955 writer.writelines(file)
1956
1957 return
1958
1959
1960
1961
1962
1963
1965 """Creates the sborn_sf.f file, containing the calls to the different
1966 color linked borns"""
1967
1968 replace_dict = {}
1969 nborns = len(color_links)
1970 ifkss = []
1971 iborns = []
1972 mms = []
1973 nns = []
1974 iflines = "\n"
1975
1976
1977 file = """subroutine sborn_sf(p_born,m,n,wgt)
1978 implicit none
1979 include "nexternal.inc"
1980 double precision p_born(0:3,nexternal-1),wgt
1981 double complex wgt1(2)
1982 integer m,n \n"""
1983
1984 if nborns > 0:
1985
1986 for i, c_link in enumerate(color_links):
1987 iborn = i+1
1988
1989 iff = {True : 'if', False : 'elseif'}[i==0]
1990
1991 m, n = c_link['link']
1992
1993 if m != n:
1994 iflines += \
1995 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
1996 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\
1997 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
1998 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
1999 else:
2000 iflines += \
2001 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2002 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\
2003 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2004 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2005
2006
2007 file += iflines + \
2008 """else
2009 wgt = 0d0
2010 endif
2011
2012 return
2013 end"""
2014 elif nborns == 0:
2015
2016 file+="""
2017 c This is a dummy function because
2018 c this subdir has no soft singularities
2019 wgt = 0d0
2020
2021 return
2022 end"""
2023
2024
2025 writer.writelines(file)
2026
2027
2028
2029
2030
2031
2033 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format"""
2034
2035 matrix_element = copy.copy(fksborn.born_matrix_element)
2036
2037 if not matrix_element.get('processes') or \
2038 not matrix_element.get('diagrams'):
2039 return 0
2040
2041 if not isinstance(writer, writers.FortranWriter):
2042 raise writers.FortranWriter.FortranWriterError(\
2043 "writer not FortranWriter")
2044
2045 writers.FortranWriter.downcase = False
2046
2047 iborn = i + 1
2048 link = fksborn.color_links[i]
2049
2050 replace_dict = {}
2051
2052 replace_dict['iborn'] = iborn
2053
2054
2055 info_lines = self.get_mg5_info_lines()
2056 replace_dict['info_lines'] = info_lines
2057
2058
2059 process_lines = self.get_process_info_lines(matrix_element)
2060 replace_dict['process_lines'] = process_lines + \
2061 "\nc spectators: %d %d \n" % tuple(link['link'])
2062
2063
2064 ncomb = matrix_element.get_helicity_combinations()
2065 replace_dict['ncomb'] = ncomb
2066
2067
2068 helicity_lines = self.get_helicity_lines(matrix_element)
2069 replace_dict['helicity_lines'] = helicity_lines
2070
2071
2072 ic_line = self.get_ic_line(matrix_element)
2073 replace_dict['ic_line'] = ic_line
2074
2075
2076 den_factor_lines = self.get_den_factor_lines(fksborn)
2077 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2078
2079
2080 ngraphs = matrix_element.get_number_of_amplitudes()
2081 replace_dict['ngraphs'] = ngraphs
2082
2083
2084 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2085 replace_dict['nwavefuncs'] = nwavefuncs
2086
2087
2088 ncolor1 = max(1, len(link['orig_basis']))
2089 replace_dict['ncolor1'] = ncolor1
2090 ncolor2 = max(1, len(link['link_basis']))
2091 replace_dict['ncolor2'] = ncolor2
2092
2093
2094 color_data_lines = self.get_color_data_lines_from_color_matrix(\
2095 link['link_matrix'])
2096 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2097
2098
2099 amp2_lines = self.get_amp2_lines(matrix_element)
2100 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2101
2102
2103 jamp_lines = self.get_JAMP_lines(matrix_element)
2104 new_jamp_lines = []
2105 for line in jamp_lines:
2106 line = string.replace(line, 'JAMP', 'JAMP1')
2107 new_jamp_lines.append(line)
2108 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines)
2109
2110 matrix_element.set('color_basis', link['link_basis'] )
2111 jamp_lines = self.get_JAMP_lines(matrix_element)
2112 new_jamp_lines = []
2113 for line in jamp_lines:
2114 line = string.replace(line, 'JAMP', 'JAMP2')
2115 new_jamp_lines.append(line)
2116 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines)
2117
2118
2119
2120 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2121
2122 file = open(os.path.join(_file_path, \
2123 'iolibs/template_files/b_sf_xxx_fks.inc')).read()
2124 file = file % replace_dict
2125
2126
2127 writer.writelines(file)
2128
2129 return 0 , ncolor1
2130
2131
2132
2133
2134
2135
2137 """Write the born_nhel.inc file for MG4."""
2138
2139 ncomb = matrix_element.get_helicity_combinations()
2140 file = " integer max_bhel, max_bcol \n"
2141 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \
2142 (ncomb, nflows)
2143
2144
2145 writer.writelines(file)
2146
2147 return True
2148
2149
2150
2151
2153 """Writes the content of nFKSconfigs.inc, which just gives the
2154 total FKS dirs as a parameter.
2155 nFKSconfigs is always >=1 (use a fake configuration for LOonly)"""
2156 replace_dict = {}
2157 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1)
2158 content = \
2159 """ INTEGER FKS_CONFIGS
2160 PARAMETER (FKS_CONFIGS=%(nconfs)d)
2161
2162 """ % replace_dict
2163
2164 writer.writelines(content)
2165
2166
2167
2168
2169
2171 """Writes the content of fks_info.inc, which lists the informations on the
2172 possible splittings of the born ME.
2173 nconfs is always >=1 (use a fake configuration for LOonly).
2174 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and
2175 the last colored particle as j_fks."""
2176
2177 replace_dict = {}
2178 fks_info_list = fksborn.get_fks_info_list()
2179 replace_dict['nconfs'] = max(len(fks_info_list), 1)
2180
2181
2182 if len(fks_info_list) > 0:
2183 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \
2184 for info in fks_info_list])
2185 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \
2186 for info in fks_info_list])
2187
2188 col_lines = []
2189 pdg_lines = []
2190 charge_lines = []
2191 fks_j_from_i_lines = []
2192 for i, info in enumerate(fks_info_list):
2193 col_lines.append( \
2194 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2195 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) ))
2196 pdg_lines.append( \
2197 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2198 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs'])))
2199 charge_lines.append(\
2200 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\
2201 % (i + 1, ', '.join('%19.15fd0' % charg\
2202 for charg in fksborn.real_processes[info['n_me']-1].charges) ))
2203 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\
2204 i + 1))
2205 else:
2206
2207
2208
2209 bornproc = fksborn.born_matrix_element.get('processes')[0]
2210 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21]
2211 colors = [l.get('color') for l in bornproc.get('legs')] + [8]
2212 charges = [0.] * len(colors)
2213
2214 fks_i = len(colors)
2215
2216
2217 fks_j=1
2218 for cpos, col in enumerate(colors[:-1]):
2219 if col != 1:
2220 fks_j = cpos+1
2221
2222 fks_i_values = str(fks_i)
2223 fks_j_values = str(fks_j)
2224 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2225 % ', '.join([str(col) for col in colors])]
2226 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2227 % ', '.join([str(pdg) for pdg in pdgs])]
2228 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2229 % ', '.join('%19.15fd0' % charg for charg in charges)]
2230 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \
2231 % (fks_i, fks_j)]
2232
2233
2234 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values
2235 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values
2236 replace_dict['col_lines'] = '\n'.join(col_lines)
2237 replace_dict['pdg_lines'] = '\n'.join(pdg_lines)
2238 replace_dict['charge_lines'] = '\n'.join(charge_lines)
2239 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines)
2240
2241 content = \
2242 """ INTEGER IPOS, JPOS
2243 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d)
2244 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL)
2245 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL)
2246 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL)
2247
2248 %(fks_i_line)s
2249 %(fks_j_line)s
2250
2251 %(fks_j_from_i_lines)s
2252
2253 C
2254 C Particle type:
2255 C octet = 8, triplet = 3, singlet = 1
2256 %(col_lines)s
2257
2258 C
2259 C Particle type according to PDG:
2260 C
2261 %(pdg_lines)s
2262
2263 C
2264 C Particle charge:
2265 C charge is set 0. with QCD corrections, which is irrelevant
2266 %(charge_lines)s
2267 """ % replace_dict
2268 if not isinstance(writer, writers.FortranWriter):
2269 raise writers.FortranWriter.FortranWriterError(\
2270 "writer not FortranWriter")
2271
2272 writers.FortranWriter.downcase = False
2273
2274 writer.writelines(content)
2275
2276 return True
2277
2278
2279
2280
2281
2282
2284 """Export a matrix element to a matrix.f file in MG4 madevent format"""
2285
2286 if not matrix_element.get('processes') or \
2287 not matrix_element.get('diagrams'):
2288 return 0,0
2289
2290 if not isinstance(writer, writers.FortranWriter):
2291 raise writers.FortranWriter.FortranWriterError(\
2292 "writer not FortranWriter")
2293
2294 writers.FortranWriter.downcase = False
2295
2296 replace_dict = {}
2297 replace_dict['N_me'] = n
2298
2299
2300 info_lines = self.get_mg5_info_lines()
2301 replace_dict['info_lines'] = info_lines
2302
2303
2304 process_lines = self.get_process_info_lines(matrix_element)
2305 replace_dict['process_lines'] = process_lines
2306
2307
2308 ncomb = matrix_element.get_helicity_combinations()
2309 replace_dict['ncomb'] = ncomb
2310
2311
2312 helicity_lines = self.get_helicity_lines(matrix_element)
2313 replace_dict['helicity_lines'] = helicity_lines
2314
2315
2316 ic_line = self.get_ic_line(matrix_element)
2317 replace_dict['ic_line'] = ic_line
2318
2319
2320
2321 den_factor_line = self.get_den_factor_line(matrix_element)
2322 replace_dict['den_factor_line'] = den_factor_line
2323
2324
2325 ngraphs = matrix_element.get_number_of_amplitudes()
2326 replace_dict['ngraphs'] = ngraphs
2327
2328
2329 ncolor = max(1, len(matrix_element.get('color_basis')))
2330 replace_dict['ncolor'] = ncolor
2331
2332
2333 color_data_lines = self.get_color_data_lines(matrix_element)
2334 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2335
2336
2337 helas_calls = fortran_model.get_matrix_element_calls(\
2338 matrix_element)
2339 replace_dict['helas_calls'] = "\n".join(helas_calls)
2340
2341
2342
2343 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2344 replace_dict['nwavefuncs'] = nwavefuncs
2345
2346
2347 amp2_lines = self.get_amp2_lines(matrix_element)
2348 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2349
2350
2351 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2352 replace_dict['wavefunctionsize'] = 20
2353 else:
2354 replace_dict['wavefunctionsize'] = 8
2355
2356
2357 jamp_lines = self.get_JAMP_lines(matrix_element)
2358
2359 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2360
2361 realfile = open(os.path.join(_file_path, \
2362 'iolibs/template_files/realmatrix_fks.inc')).read()
2363
2364 realfile = realfile % replace_dict
2365
2366
2367 writer.writelines(realfile)
2368
2369 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2370
2371
2372
2373
2374
2376
2377 """Write the auto_dsig.f file for MadFKS, which contains
2378 pdf call information"""
2379
2380 if not matrix_element.get('processes') or \
2381 not matrix_element.get('diagrams'):
2382 return 0
2383
2384 nexternal, ninitial = matrix_element.get_nexternal_ninitial()
2385
2386 if ninitial < 1 or ninitial > 2:
2387 raise writers.FortranWriter.FortranWriterError, \
2388 """Need ninitial = 1 or 2 to write auto_dsig file"""
2389
2390 replace_dict = {}
2391
2392 replace_dict['N_me'] = n
2393
2394
2395 info_lines = self.get_mg5_info_lines()
2396 replace_dict['info_lines'] = info_lines
2397
2398
2399 process_lines = self.get_process_info_lines(matrix_element)
2400 replace_dict['process_lines'] = process_lines
2401
2402 pdf_vars, pdf_data, pdf_lines = \
2403 self.get_pdf_lines_mir(matrix_element, ninitial, False, False)
2404 replace_dict['pdf_vars'] = pdf_vars
2405 replace_dict['pdf_data'] = pdf_data
2406 replace_dict['pdf_lines'] = pdf_lines
2407
2408 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \
2409 self.get_pdf_lines_mir(matrix_element, ninitial, False, True)
2410 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr
2411
2412 file = open(os.path.join(_file_path, \
2413 'iolibs/template_files/parton_lum_n_fks.inc')).read()
2414 file = file % replace_dict
2415
2416
2417 writer.writelines(file)
2418
2419
2420
2421
2422
2423
2424
2426 """Write the coloramps.inc file for MadEvent"""
2427
2428 lines = []
2429 lines.append( "logical icolamp(%d,%d,1)" % \
2430 (max(len(matrix_element.get('color_basis').keys()), 1),
2431 len(mapconfigs)))
2432
2433 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1)
2434
2435
2436 writer.writelines(lines)
2437
2438 return True
2439
2440
2441
2442
2443
2444
2446 """Write the leshouche.inc file for MG4"""
2447
2448
2449 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2450
2451 lines = []
2452 for iproc, proc in enumerate(matrix_element.get('processes')):
2453 legs = proc.get_legs_with_decays()
2454 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \
2455 (iproc + 1, nexternal,
2456 ",".join([str(l.get('id')) for l in legs])))
2457 for i in [1, 2]:
2458 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \
2459 (i, iproc + 1, nexternal,
2460 ",".join([ "%3r" % 0 ] * ninitial + \
2461 [ "%3r" % i ] * (nexternal - ninitial))))
2462
2463
2464
2465 if iproc == 0:
2466
2467 if not matrix_element.get('color_basis'):
2468 for i in [1, 2]:
2469 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \
2470 (i, nexternal,
2471 ",".join([ "%3r" % 0 ] * nexternal)))
2472 color_flow_list = []
2473
2474 else:
2475
2476 repr_dict = {}
2477 for l in legs:
2478 repr_dict[l.get('number')] = \
2479 proc.get('model').get_particle(l.get('id')).get_color()\
2480 * (-1)**(1+l.get('state'))
2481
2482 color_flow_list = \
2483 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2484 ninitial)
2485
2486 for cf_i, color_flow_dict in enumerate(color_flow_list):
2487 for i in [0, 1]:
2488 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \
2489 (i + 1, cf_i + 1, nexternal,
2490 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \
2491 for l in legs])))
2492
2493
2494 writer.writelines(lines)
2495
2496 return len(color_flow_list)
2497
2498
2499
2500
2501
2502
2504 """Write the configs.inc file for MadEvent"""
2505
2506
2507 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2508 lines = []
2509
2510 iconfig = 0
2511
2512 s_and_t_channels = []
2513 mapconfigs = []
2514
2515 model = matrix_element.get('processes')[0].get('model')
2516
2517
2518 base_diagrams = matrix_element.get('base_amplitude').get('diagrams')
2519 model = matrix_element.get('base_amplitude').get('process').get('model')
2520 minvert = min([max([len(vert.get('legs')) for vert in \
2521 diag.get('vertices')]) for diag in base_diagrams])
2522
2523 for idiag, diag in enumerate(base_diagrams):
2524 if any([len(vert.get('legs')) > minvert for vert in
2525 diag.get('vertices')]):
2526
2527 continue
2528 iconfig = iconfig + 1
2529 helas_diag = matrix_element.get('diagrams')[idiag]
2530 mapconfigs.append(helas_diag.get('number'))
2531 lines.append("# Diagram %d, Amplitude %d" % \
2532 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number']))
2533
2534 lines.append("data mapconfig(%4d)/%4d/" % (iconfig,
2535 helas_diag.get('amplitudes')[0]['number']))
2536
2537
2538
2539 schannels, tchannels = helas_diag.get('amplitudes')[0].\
2540 get_s_and_t_channels(ninitial, model, 990)
2541
2542 s_and_t_channels.append([schannels, tchannels])
2543
2544
2545 allchannels = schannels
2546 if len(tchannels) > 1:
2547
2548 allchannels = schannels + tchannels
2549
2550 for vert in allchannels:
2551 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
2552 last_leg = vert.get('legs')[-1]
2553 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \
2554 (last_leg.get('number'), iconfig, len(daughters),
2555 ",".join(["%3d" % d for d in daughters])))
2556 if vert in schannels:
2557 lines.append("data sprop(%4d,%4d)/%8d/" % \
2558 (last_leg.get('number'), iconfig,
2559 last_leg.get('id')))
2560 elif vert in tchannels[:-1]:
2561 lines.append("data tprid(%4d,%4d)/%8d/" % \
2562 (last_leg.get('number'), iconfig,
2563 abs(last_leg.get('id'))))
2564
2565
2566 lines.append("# Number of configs")
2567 lines.append("data mapconfig(0)/%4d/" % iconfig)
2568
2569
2570 writer.writelines(lines)
2571
2572 return iconfig, mapconfigs, s_and_t_channels
2573
2574
2575
2576
2577
2578
2580 """Write the decayBW.inc file for MadEvent"""
2581
2582 lines = []
2583
2584 booldict = {False: ".false.", True: ".false."}
2585
2586
2587 for iconf, config in enumerate(s_and_t_channels):
2588 schannels = config[0]
2589 for vertex in schannels:
2590
2591
2592 leg = vertex.get('legs')[-1]
2593 lines.append("data gForceBW(%d,%d)/%s/" % \
2594 (leg.get('number'), iconf + 1,
2595 booldict[leg.get('from_group')]))
2596
2597
2598 writer.writelines(lines)
2599
2600 return True
2601
2602
2603
2604
2605
2607 """Write the dname.mg file for MG4"""
2608
2609 line = "DIRNAME=P%s" % \
2610 matrix_element.get('processes')[0].shell_string()
2611
2612
2613 writer.write(line + "\n")
2614
2615 return True
2616
2617
2618
2619
2620
2622 """Write the iproc.dat file for MG4"""
2623
2624 line = "%d" % (me_number + 1)
2625
2626
2627 for line_to_write in writer.write_line(line):
2628 writer.write(line_to_write)
2629 return True
2630
2631
2632
2633
2634
2635
2636
2637
2638
2639
2640
2642 """generate the lines for fks.inc describing initializating the
2643 fks_j_from_i array"""
2644 lines = []
2645 if not me.isfinite:
2646 for ii, js in me.fks_j_from_i.items():
2647 if js:
2648 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \
2649 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js])))
2650 else:
2651 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \
2652 % (2, 1, 1, '1'))
2653 lines.append('')
2654
2655 return lines
2656
2657
2658
2659
2660
2662
2663 """Write the leshouche.inc file for MG4"""
2664
2665
2666 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2667
2668 lines = []
2669 for iproc, proc in enumerate(matrix_element.get('processes')):
2670 legs = proc.get_legs_with_decays()
2671 lines.append("I %4d %4d %s" % \
2672 (ime, iproc + 1,
2673 " ".join([str(l.get('id')) for l in legs])))
2674 for i in [1, 2]:
2675 lines.append("M %4d %4d %4d %s" % \
2676 (ime, i, iproc + 1,
2677 " ".join([ "%3d" % 0 ] * ninitial + \
2678 [ "%3d" % i ] * (nexternal - ninitial))))
2679
2680
2681
2682 if iproc == 0:
2683
2684 if not matrix_element.get('color_basis'):
2685 for i in [1, 2]:
2686 lines.append("C %4d %4d 1 %s" % \
2687 (ime, i,
2688 " ".join([ "%3d" % 0 ] * nexternal)))
2689 color_flow_list = []
2690 nflow = 1
2691
2692 else:
2693
2694 repr_dict = {}
2695 for l in legs:
2696 repr_dict[l.get('number')] = \
2697 proc.get('model').get_particle(l.get('id')).get_color()\
2698 * (-1)**(1+l.get('state'))
2699
2700 color_flow_list = \
2701 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2702 ninitial)
2703
2704 for cf_i, color_flow_dict in enumerate(color_flow_list):
2705 for i in [0, 1]:
2706 lines.append("C %4d %4d %4d %s" % \
2707 (ime, i + 1, cf_i + 1,
2708 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2709 for l in legs])))
2710
2711 nflow = len(color_flow_list)
2712
2713 nproc = len(matrix_element.get('processes'))
2714
2715 return lines, nproc, nflow
2716
2717
2718
2719
2720
2722 """returns the lines with the information on the denominator keeping care
2723 of the identical particle factors in the various real emissions"""
2724
2725 lines = []
2726 info_list = fks_born.get_fks_info_list()
2727 if info_list:
2728
2729 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list))
2730 lines.append('DATA IDEN_VALUES /' + \
2731 ', '.join(['%d' % (
2732 fks_born.born_matrix_element.get_denominator_factor() ) \
2733 for info in info_list]) + '/')
2734 else:
2735
2736 lines.append('INTEGER IDEN_VALUES(1)')
2737 lines.append('DATA IDEN_VALUES / %d /' \
2738 % fks_born.born_matrix_element.get_denominator_factor())
2739
2740 return lines
2741
2742
2743
2744
2745
2747 """returns the lines with the information on the particle number of the born
2748 that splits"""
2749 info_list = fks_born.get_fks_info_list()
2750 lines = []
2751 if info_list:
2752
2753 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list))
2754 lines.append('DATA IJ_VALUES /' + \
2755 ', '.join(['%d' % info['fks_info']['ij'] for info in info_list]) + '/')
2756 else:
2757
2758 lines.append('INTEGER IJ_VALUES(1)')
2759 lines.append('DATA IJ_VALUES / 1 /')
2760
2761 return lines
2762
2763
2764 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\
2765 mirror = False):
2766 """Generate the PDF lines for the auto_dsig.f file"""
2767
2768 processes = matrix_element.get('processes')
2769 model = processes[0].get('model')
2770
2771 pdf_definition_lines = ""
2772 pdf_data_lines = ""
2773 pdf_lines = ""
2774
2775 if ninitial == 1:
2776 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n"
2777 for i, proc in enumerate(processes):
2778 process_line = proc.base_string()
2779 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
2780 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n"
2781 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n"
2782 else:
2783
2784 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \
2785 p in processes]))),
2786 sorted(list(set([p.get_initial_pdg(2) for \
2787 p in processes])))]
2788
2789
2790 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \
2791 sum(initial_states,[])])
2792 for key,val in pdf_codes.items():
2793 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m')
2794
2795
2796 pdgtopdf = {21: 0, 22: 7}
2797
2798 for pdg in sum(initial_states,[]):
2799 if not pdg in pdgtopdf and not pdg in pdgtopdf.values():
2800 pdgtopdf[pdg] = pdg
2801 elif pdg not in pdgtopdf and pdg in pdgtopdf.values():
2802
2803 pdgtopdf[pdg] = 6000000 + pdg
2804
2805
2806 for i in [0,1]:
2807 pdf_definition_lines += "DOUBLE PRECISION " + \
2808 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
2809 for pdg in \
2810 initial_states[i]]) + \
2811 "\n"
2812
2813
2814 for i in [0,1]:
2815 pdf_data_lines += "DATA " + \
2816 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
2817 for pdg in initial_states[i]]) + \
2818 "/%d*1D0/" % len(initial_states[i]) + \
2819 "\n"
2820
2821
2822 for i, init_states in enumerate(initial_states):
2823 if not mirror:
2824 ibeam = i + 1
2825 else:
2826 ibeam = 2 - i
2827 if subproc_group:
2828 pdf_lines = pdf_lines + \
2829 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \
2830 % (ibeam, ibeam)
2831 else:
2832 pdf_lines = pdf_lines + \
2833 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \
2834 % (ibeam, ibeam)
2835
2836 for initial_state in init_states:
2837 if initial_state in pdf_codes.keys():
2838 if subproc_group:
2839 if abs(pdgtopdf[initial_state]) <= 7:
2840 pdf_lines = pdf_lines + \
2841 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \
2842 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \
2843 (pdf_codes[initial_state],
2844 i + 1, ibeam, pdgtopdf[initial_state],
2845 ibeam, ibeam)
2846 else:
2847
2848 pdf_lines = pdf_lines + \
2849 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
2850 "%s%d=0d0\n") % \
2851 (pdf_codes[initial_state],i + 1)
2852 else:
2853 if abs(pdgtopdf[initial_state]) <= 7:
2854 pdf_lines = pdf_lines + \
2855 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \
2856 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \
2857 (pdf_codes[initial_state],
2858 i + 1, ibeam, pdgtopdf[initial_state],
2859 ibeam, ibeam)
2860 else:
2861
2862 pdf_lines = pdf_lines + \
2863 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
2864 "%s%d=0d0\n") % \
2865 (pdf_codes[initial_state],i + 1)
2866
2867 pdf_lines = pdf_lines + "ENDIF\n"
2868
2869
2870 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n"
2871 for proc in processes:
2872 process_line = proc.base_string()
2873 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
2874 pdf_lines = pdf_lines + "\nPD(IPROC) = "
2875 for ibeam in [1, 2]:
2876 initial_state = proc.get_initial_pdg(ibeam)
2877 if initial_state in pdf_codes.keys():
2878 pdf_lines = pdf_lines + "%s%d*" % \
2879 (pdf_codes[initial_state], ibeam)
2880 else:
2881 pdf_lines = pdf_lines + "1d0*"
2882
2883 pdf_lines = pdf_lines[:-1] + "\n"
2884
2885
2886 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
2887
2888
2889
2891 """Return the color matrix definition lines for the given color_matrix. Split
2892 rows in chunks of size n."""
2893
2894 if not color_matrix:
2895 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"]
2896 else:
2897 ret_list = []
2898 my_cs = color.ColorString()
2899 for index, denominator in \
2900 enumerate(color_matrix.get_line_denominators()):
2901
2902 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator))
2903
2904 num_list = color_matrix.get_line_numerators(index, denominator)
2905 for k in xrange(0, len(num_list), n):
2906 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \
2907 (index + 1, k + 1, min(k + n, len(num_list)),
2908 ','.join(["%5r" % i for i in num_list[k:k + n]])))
2909
2910 return ret_list
2911
2912
2913
2914
2917 """Write the maxamps.inc file for MG4."""
2918
2919 file = " integer maxamps, maxflow, maxproc, maxsproc\n"
2920 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \
2921 (maxamps, maxflows)
2922 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \
2923 (maxproc, maxsproc)
2924
2925
2926 writer.writelines(file)
2927
2928 return True
2929
2930
2931
2932
2934
2935 """Write the ncombs.inc file for MadEvent."""
2936
2937
2938 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2939
2940
2941 file = " integer n_max_cl\n"
2942 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1))
2943
2944
2945 writer.writelines(file)
2946
2947 return True
2948
2949
2950
2951
2953 """Write a dummy config_subproc.inc file for MadEvent"""
2954
2955 lines = []
2956
2957 for iconfig in range(len(s_and_t_channels)):
2958 lines.append("DATA CONFSUB(1,%d)/1/" % \
2959 (iconfig + 1))
2960
2961
2962 writer.writelines(lines)
2963
2964 return True
2965
2966
2967
2968
2970 """Write the get_color.f file for MadEvent, which returns color
2971 for all particles used in the matrix element."""
2972
2973 try:
2974 matrix_elements=matrix_element.real_processes[0].matrix_element
2975 except IndexError:
2976 matrix_elements=[matrix_element.born_matrix_element]
2977
2978 if isinstance(matrix_elements, helas_objects.HelasMatrixElement):
2979 matrix_elements = [matrix_elements]
2980
2981 model = matrix_elements[0].get('processes')[0].get('model')
2982
2983
2984
2985
2986 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
2987 for wf in d.get('wavefunctions')],[]) \
2988 for d in me.get('diagrams')],[]) \
2989 for me in [real_proc.matrix_element]],[])\
2990 for real_proc in matrix_element.real_processes],[]))
2991
2992 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
2993 for wf in d.get('wavefunctions')],[]) \
2994 for d in matrix_element.born_matrix_element.get('diagrams')],[])))
2995
2996
2997 leg_ids = set(sum([sum([sum([[l.get('id') for l in \
2998 p.get_legs_with_decays()] for p in \
2999 me.get('processes')], []) for me in \
3000 [real_proc.matrix_element]], []) for real_proc in \
3001 matrix_element.real_processes],[]))
3002
3003 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \
3004 p.get_legs_with_decays()] for p in \
3005 matrix_element.born_matrix_element.get('processes')], [])))
3006 particle_ids = sorted(list(wf_ids.union(leg_ids)))
3007
3008 lines = """function get_color(ipdg)
3009 implicit none
3010 integer get_color, ipdg
3011
3012 if(ipdg.eq.%d)then
3013 get_color=%d
3014 return
3015 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color())
3016
3017 for part_id in particle_ids[1:]:
3018 lines += """else if(ipdg.eq.%d)then
3019 get_color=%d
3020 return
3021 """ % (part_id, model.get_particle(part_id).get_color())
3022
3023
3024 lines += """else if(ipdg.eq.%d)then
3025 c This is dummy particle used in multiparticle vertices
3026 get_color=2
3027 return
3028 """ % model.get_first_non_pdg()
3029 lines += """else
3030 write(*,*)'Error: No color given for pdg ',ipdg
3031 get_color=0
3032 return
3033 endif
3034 end
3035 """
3036
3037
3038 writer.writelines(lines)
3039
3040 return True
3041
3042
3043
3044
3045
3046 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3047 """Write the props.inc file for MadEvent. Needs input from
3048 write_configs_file. With respect to the parent routine, it has some
3049 more specific formats that allow the props.inc file to be read by the
3050 link program"""
3051
3052 lines = []
3053
3054 particle_dict = matrix_element.get('processes')[0].get('model').\
3055 get('particle_dict')
3056
3057 for iconf, configs in enumerate(s_and_t_channels):
3058 for vertex in configs[0] + configs[1][:-1]:
3059 leg = vertex.get('legs')[-1]
3060 if leg.get('id') not in particle_dict:
3061
3062 mass = 'zero'
3063 width = 'zero'
3064 pow_part = 0
3065 else:
3066 particle = particle_dict[leg.get('id')]
3067
3068 if particle.get('mass').lower() == 'zero':
3069 mass = particle.get('mass')
3070 else:
3071 mass = "abs(%s)" % particle.get('mass')
3072
3073 if particle.get('width').lower() == 'zero':
3074 width = particle.get('width')
3075 else:
3076 width = "abs(%s)" % particle.get('width')
3077
3078 pow_part = 1 + int(particle.is_boson())
3079
3080 lines.append("pmass(%3d,%4d) = %s" % \
3081 (leg.get('number'), iconf + 1, mass))
3082 lines.append("pwidth(%3d,%4d) = %s" % \
3083 (leg.get('number'), iconf + 1, width))
3084 lines.append("pow(%3d,%4d) = %d" % \
3085 (leg.get('number'), iconf + 1, pow_part))
3086
3087
3088 writer.writelines(lines)
3089
3090 return True
3091
3092
3093
3094
3095
3097 """Append this subprocess to the subproc.mg file for MG4"""
3098
3099
3100 writer.write(subprocdir + "\n")
3101
3102 return True
3103
3104
3105
3106
3107
3108
3109
3110
3113 """Class to take care of exporting a set of matrix elements to
3114 Fortran (v4) format."""
3115
3116
3119
3120
3121
3122
3123
3125 """create the directory run_name as a copy of the MadEvent
3126 Template, and clean the directory
3127 For now it is just the same as copy_v4template, but it will be modified
3128 """
3129 mgme_dir = self.mgme_dir
3130 dir_path = self.dir_path
3131 clean =self.opt['clean']
3132
3133
3134 if not os.path.isdir(dir_path):
3135 if not mgme_dir:
3136 raise MadGraph5Error, \
3137 "No valid MG_ME path given for MG4 run directory creation."
3138 logger.info('initialize a new directory: %s' % \
3139 os.path.basename(dir_path))
3140 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
3141
3142 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),
3143 dir_path)
3144
3145 for card in ['plot_card']:
3146 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
3147 try:
3148 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
3149 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
3150 except IOError:
3151 logger.warning("Failed to copy " + card + ".dat to default")
3152
3153 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
3154 if not mgme_dir:
3155 raise MadGraph5Error, \
3156 "No valid MG_ME path given for MG4 run directory creation."
3157 try:
3158 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
3159 except IOError:
3160 MG5_version = misc.get_pkg_info()
3161 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
3162 "5." + MG5_version['version'])
3163
3164
3165 if clean:
3166 logger.info('remove old information in %s' % os.path.basename(dir_path))
3167 if os.environ.has_key('MADGRAPH_BASE'):
3168 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
3169 '--web'], cwd=dir_path)
3170 else:
3171 try:
3172 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
3173 cwd=dir_path)
3174 except Exception, why:
3175 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
3176 % (os.path.basename(dir_path),why))
3177
3178 MG_version = misc.get_pkg_info()
3179 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
3180 MG_version['version'])
3181
3182
3183 self.link_CutTools(dir_path)
3184
3185 link_tir_libs=[]
3186 tir_libs=[]
3187 tir_include=[]
3188 for tir in self.all_tir:
3189 tir_dir="%s_dir"%tir
3190 libpath=getattr(self,tir_dir)
3191 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'),
3192 libpath,"lib%s.a"%tir,tir_name=tir)
3193 setattr(self,tir_dir,libpath)
3194 if libpath != "":
3195 if tir in ['pjfry','ninja','golem', 'samurai','collier']:
3196
3197
3198 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir))
3199 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir))
3200
3201 if tir in ['ninja']:
3202 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext))
3203 for ext in ['a','dylib','so']):
3204 raise MadGraph5Error(
3205 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath)
3206 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo'))
3207 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo'))
3208
3209 if tir in ['golem','samurai','ninja','collier']:
3210 trg_path = pjoin(os.path.dirname(libpath),'include')
3211 if os.path.isdir(trg_path):
3212 to_include = misc.find_includes_path(trg_path,
3213 self.include_names[tir])
3214 else:
3215 to_include = None
3216
3217 if to_include is None and tir=='collier':
3218 to_include = misc.find_includes_path(
3219 pjoin(libpath,'modules'),self.include_names[tir])
3220 if to_include is None:
3221 logger.error(
3222 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+
3223 'Generation carries on but you will need to edit the include path by hand in the makefiles.')
3224 to_include = '<Not_found_define_it_yourself>'
3225 tir_include.append('-I %s'%to_include)
3226 else:
3227 link_tir_libs.append('-l%s'%tir)
3228 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir)
3229
3230 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
3231 cwd = os.getcwd()
3232 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3233 try:
3234 os.chdir(dirpath)
3235 except os.error:
3236 logger.error('Could not cd to directory %s' % dirpath)
3237 return 0
3238 filename = 'makefile_loop'
3239 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
3240 link_tir_libs,tir_libs,tir_include=tir_include)
3241 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
3242 dirpath = os.path.join(self.dir_path, 'Source')
3243 try:
3244 os.chdir(dirpath)
3245 except os.error:
3246 logger.error('Could not cd to directory %s' % dirpath)
3247 return 0
3248 filename = 'make_opts'
3249 calls = self.write_make_opts(writers.MakefileWriter(filename),
3250 link_tir_libs,tir_libs)
3251
3252 os.chdir(cwd)
3253
3254 cwd = os.getcwd()
3255 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3256 try:
3257 os.chdir(dirpath)
3258 except os.error:
3259 logger.error('Could not cd to directory %s' % dirpath)
3260 return 0
3261
3262
3263 cpfiles= ["SubProcesses/MadLoopParamReader.f",
3264 "Cards/MadLoopParams.dat",
3265 "SubProcesses/MadLoopParams.inc"]
3266
3267 for file in cpfiles:
3268 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
3269 os.path.join(self.dir_path, file))
3270
3271 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
3272 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
3273
3274
3275
3276 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
3277 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
3278 'Cards', 'MadLoopParams.dat'))
3279
3280 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
3281 "MadLoopParams.dat"))
3282
3283
3284 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
3285 "SubProcesses","MadLoopCommons.inc")).read()
3286 writer = writers.FortranWriter(os.path.join(self.dir_path,
3287 "SubProcesses","MadLoopCommons.f"))
3288 writer.writelines(MadLoopCommon%{
3289 'print_banner_commands':self.MadLoop_banner},
3290 context={'collier_available':self.tir_available_dict['collier']})
3291 writer.close()
3292
3293
3294 model_path = self.dir_path + '/Source/MODEL/'
3295
3296 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')):
3297 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses')
3298 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')):
3299 ln(model_path + '/mp_coupl_same_name.inc', \
3300 self.dir_path + '/SubProcesses')
3301
3302
3303 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
3304 writers.FortranWriter('cts_mpc.h'),)
3305
3306 self.copy_python_files()
3307
3308
3309
3310 self.write_pdf_opendata()
3311
3312
3313
3314 os.chdir(cwd)
3315
3317 """writes the V**** directory inside the P**** directories specified in
3318 dir_name"""
3319
3320 cwd = os.getcwd()
3321
3322 matrix_element = loop_matrix_element
3323
3324
3325 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
3326 try:
3327 os.mkdir(dirpath)
3328 except os.error as error:
3329 logger.warning(error.strerror + " " + dirpath)
3330
3331
3332 name = "V%s" % matrix_element.get('processes')[0].shell_string()
3333 dirpath = os.path.join(dir_name, name)
3334
3335 try:
3336 os.mkdir(dirpath)
3337 except os.error as error:
3338 logger.warning(error.strerror + " " + dirpath)
3339
3340 try:
3341 os.chdir(dirpath)
3342 except os.error:
3343 logger.error('Could not cd to directory %s' % dirpath)
3344 return 0
3345
3346 logger.info('Creating files in directory %s' % name)
3347
3348
3349 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3350
3351 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
3352
3353
3354 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'),
3355 abspath=False, cwd=None)
3356
3357
3358 filename = 'born_matrix.f'
3359 calls = self.write_bornmatrix(
3360 writers.FortranWriter(filename),
3361 matrix_element,
3362 fortran_model)
3363
3364 filename = 'nexternal.inc'
3365 self.write_nexternal_file(writers.FortranWriter(filename),
3366 nexternal, ninitial)
3367
3368 filename = 'pmass.inc'
3369 self.write_pmass_file(writers.FortranWriter(filename),
3370 matrix_element)
3371
3372 filename = 'ngraphs.inc'
3373 self.write_ngraphs_file(writers.FortranWriter(filename),
3374 len(matrix_element.get_all_amplitudes()))
3375
3376 filename = "loop_matrix.ps"
3377 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""")
3378 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
3379 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
3380 filename,
3381 model=matrix_element.get('processes')[0].get('model'),
3382 amplitude='')
3383 logger.info("Drawing loop Feynman diagrams for " + \
3384 matrix_element.get('processes')[0].nice_string(\
3385 print_weighted=False))
3386 plot.draw()
3387
3388 filename = "born_matrix.ps"
3389 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
3390 get('born_diagrams'),
3391 filename,
3392 model=matrix_element.get('processes')[0].\
3393 get('model'),
3394 amplitude='')
3395 logger.info("Generating born Feynman diagrams for " + \
3396 matrix_element.get('processes')[0].nice_string(\
3397 print_weighted=False))
3398 plot.draw()
3399
3400
3401
3402
3403 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
3404
3405 open('unique_id.inc','w').write(
3406 """ integer UNIQUE_ID
3407 parameter(UNIQUE_ID=1)""")
3408
3409 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
3410 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
3411 'MadLoopParams.inc','MadLoopCommons.f']
3412
3413 for file in linkfiles:
3414 ln('../../%s' % file)
3415
3416 os.system("ln -s ../../makefile_loop makefile")
3417
3418
3419 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
3420 pjoin('..','MadLoop5_resources'))
3421
3422 linkfiles = ['mpmodule.mod']
3423
3424 for file in linkfiles:
3425 ln('../../../lib/%s' % file)
3426
3427 linkfiles = ['coef_specs.inc']
3428
3429 for file in linkfiles:
3430 ln('../../../Source/DHELAS/%s' % file)
3431
3432
3433 os.chdir(cwd)
3434
3435 if not calls:
3436 calls = 0
3437 return calls
3438
3439
3440
3441
3442
3444 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the
3445 non-optimized mode"""
3446 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc')
3447
3448 replace_dict = {}
3449 replace_dict['max_lwf_size'] = 4
3450 replace_dict['vertex_max_coefs'] = max(\
3451 [q_polynomial.get_number_of_coefs_for_rank(n)
3452 for n in max_loop_vertex_ranks])
3453 IncWriter=writers.FortranWriter(filename,'w')
3454 IncWriter.writelines("""INTEGER MAXLWFSIZE
3455 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d)
3456 INTEGER VERTEXMAXCOEFS
3457 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\
3458 % replace_dict)
3459 IncWriter.close()
3460