1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Methods and classes to export matrix elements to fks format."""
16
17 from __future__ import absolute_import
18 from __future__ import print_function
19 from distutils import dir_util
20 import glob
21 import logging
22 import os
23 import re
24 import shutil
25 import subprocess
26 import string
27 import copy
28 import platform
29
30 import madgraph.core.color_algebra as color
31 import madgraph.core.helas_objects as helas_objects
32 import madgraph.core.base_objects as base_objects
33 import madgraph.fks.fks_helas_objects as fks_helas_objects
34 import madgraph.fks.fks_base as fks
35 import madgraph.fks.fks_common as fks_common
36 import madgraph.iolibs.drawing_eps as draw
37 import madgraph.iolibs.gen_infohtml as gen_infohtml
38 import madgraph.iolibs.files as files
39 import madgraph.various.misc as misc
40 import madgraph.iolibs.file_writers as writers
41 import madgraph.iolibs.template_files as template_files
42 import madgraph.iolibs.ufo_expression_parsers as parsers
43 import madgraph.iolibs.export_v4 as export_v4
44 import madgraph.loop.loop_exporters as loop_exporters
45 import madgraph.various.q_polynomial as q_polynomial
46 import madgraph.various.banner as banner_mod
47
48 import aloha.create_aloha as create_aloha
49
50 import models.write_param_card as write_param_card
51 import models.check_param_card as check_param_card
52 from madgraph import MadGraph5Error, MG5DIR, InvalidCmd
53 from madgraph.iolibs.files import cp, ln, mv
54 from six.moves import range
55
56 pjoin = os.path.join
57
58 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
59 logger = logging.getLogger('madgraph.export_fks')
60
61
63 Pdir = args[0]
64 old_pos = args[1]
65 dir_path = args[2]
66
67 devnull = os.open(os.devnull, os.O_RDWR)
68
69 os.chdir(Pdir)
70 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
71 stdout = devnull)
72 os.chdir(os.path.pardir)
73
74
75
76
77
79 """Class to take care of exporting a set of matrix elements to
80 Fortran (v4) format."""
81
82
83
84
86 """create the directory run_name as a copy of the MadEvent
87 Template, and clean the directory
88 For now it is just the same as copy_v4template, but it will be modified
89 """
90
91 mgme_dir = self.mgme_dir
92 dir_path = self.dir_path
93 clean =self.opt['clean']
94
95
96 if not os.path.isdir(dir_path):
97 if not mgme_dir:
98 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.")
99 logger.info('initialize a new directory: %s' % \
100 os.path.basename(dir_path))
101 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
102
103 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)
104
105 for card in ['plot_card']:
106 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
107 try:
108 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
109 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
110 except IOError:
111 logger.warning("Failed to move " + card + ".dat to default")
112
113 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
114 if not mgme_dir:
115 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.")
116 try:
117 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
118 except IOError:
119 MG5_version = misc.get_pkg_info()
120 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
121 "5." + MG5_version['version'])
122
123
124 if clean:
125 logger.info('remove old information in %s' % os.path.basename(dir_path))
126 if 'MADGRAPH_BASE' in os.environ:
127 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
128 '--web'],cwd=dir_path)
129 else:
130 try:
131 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
132 cwd=dir_path)
133 except Exception as why:
134 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
135 % (os.path.basename(dir_path),why))
136
137 MG_version = misc.get_pkg_info()
138 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
139 MG_version['version'])
140
141
142 self.link_CutTools(dir_path)
143
144 link_tir_libs=[]
145 tir_libs=[]
146 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
147 dirpath = os.path.join(self.dir_path, 'SubProcesses')
148 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop')
149 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
150 link_tir_libs,tir_libs)
151 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
152 filename = pjoin(self.dir_path, 'Source','make_opts')
153 calls = self.write_make_opts(writers.MakefileWriter(filename),
154 link_tir_libs,tir_libs)
155
156
157 for card in ['FO_analyse_card', 'shower_card']:
158 try:
159 shutil.copy(pjoin(self.dir_path, 'Cards',
160 card + '.dat'),
161 pjoin(self.dir_path, 'Cards',
162 card + '_default.dat'))
163 except IOError:
164 logger.warning("Failed to copy " + card + ".dat to default")
165
166 cwd = os.getcwd()
167 dirpath = os.path.join(self.dir_path, 'SubProcesses')
168 try:
169 os.chdir(dirpath)
170 except os.error:
171 logger.error('Could not cd to directory %s' % dirpath)
172 return 0
173
174
175 cpfiles= ["SubProcesses/MadLoopParamReader.f",
176 "Cards/MadLoopParams.dat",
177 "SubProcesses/MadLoopParams.inc"]
178
179 for file in cpfiles:
180 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
181 os.path.join(self.dir_path, file))
182
183 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
184 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
185
186 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
187 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
188 'Cards', 'MadLoopParams.dat'))
189
190 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
191 "MadLoopParams.dat"))
192
193
194 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
195 "SubProcesses","MadLoopCommons.inc")).read()
196 writer = writers.FortranWriter(os.path.join(self.dir_path,
197 "SubProcesses","MadLoopCommons.f"))
198 writer.writelines(MadLoopCommon%{
199 'print_banner_commands':self.MadLoop_banner},
200 context={'collier_available':False})
201 writer.close()
202
203
204 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
205 writers.FortranWriter('cts_mpc.h'))
206
207
208
209 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat')
210 FKS_card_file = open(FKS_card_path,'r')
211 FKS_card = FKS_card_file.read()
212 FKS_card_file.close()
213 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+",
214 "#NHelForMCoverHels\n-1", FKS_card)
215 FKS_card_file = open(FKS_card_path,'w')
216 FKS_card_file.write(FKS_card)
217 FKS_card_file.close()
218
219
220 os.chdir(cwd)
221
222 self.copy_python_files()
223
224
225 self.write_pdf_opendata()
226
227
228
229
230
232 """ Create the file makefile_loop which links to the TIR libraries."""
233
234 file = open(os.path.join(self.mgme_dir,'Template','NLO',
235 'SubProcesses','makefile_loop.inc')).read()
236 replace_dict={}
237 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
238 replace_dict['tir_libs']=' '.join(tir_libs)
239 replace_dict['dotf']='%.f'
240 replace_dict['doto']='%.o'
241 replace_dict['tir_include']=' '.join(tir_include)
242 file=file%replace_dict
243 if writer:
244 writer.writelines(file)
245 else:
246 return file
247
248
250 """ Create the file make_opts which links to the TIR libraries."""
251 file = open(os.path.join(self.mgme_dir,'Template','NLO',
252 'Source','make_opts.inc')).read()
253 replace_dict={}
254 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
255 replace_dict['tir_libs']=' '.join(tir_libs)
256 replace_dict['dotf']='%.f'
257 replace_dict['doto']='%.o'
258 file=file%replace_dict
259 if writer:
260 writer.writelines(file)
261 else:
262 return file
263
264
265
266
268 """copy python files required for the Template"""
269
270 files_to_copy = [ \
271 pjoin('interface','amcatnlo_run_interface.py'),
272 pjoin('interface','extended_cmd.py'),
273 pjoin('interface','common_run_interface.py'),
274 pjoin('interface','coloring_logging.py'),
275 pjoin('various','misc.py'),
276 pjoin('various','shower_card.py'),
277 pjoin('various','FO_analyse_card.py'),
278 pjoin('various','histograms.py'),
279 pjoin('various','banner.py'),
280 pjoin('various','cluster.py'),
281 pjoin('various','systematics.py'),
282 pjoin('various','lhe_parser.py'),
283 pjoin('madevent','sum_html.py'),
284 pjoin('madevent','gen_crossxhtml.py'),
285 pjoin('iolibs','files.py'),
286 pjoin('iolibs','save_load_object.py'),
287 pjoin('iolibs','file_writers.py'),
288 pjoin('..','models','check_param_card.py'),
289 pjoin('__init__.py')
290 ]
291 cp(_file_path+'/interface/.mg5_logging.conf',
292 self.dir_path+'/bin/internal/me5_logging.conf')
293
294 for cp_file in files_to_copy:
295 cp(pjoin(_file_path,cp_file),
296 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
297
298 - def convert_model(self, model, wanted_lorentz = [],
299 wanted_couplings = []):
300
301 super(ProcessExporterFortranFKS,self).convert_model(model,
302 wanted_lorentz, wanted_couplings)
303
304 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~')
305 try:
306 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel'))
307 except OSError as error:
308 pass
309 model_path = model.get('modelpath')
310 shutil.copytree(model_path,
311 pjoin(self.dir_path,'bin','internal','ufomodel'),
312 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
313 if hasattr(model, 'restrict_card'):
314 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel',
315 'restrict_default.dat')
316 if isinstance(model.restrict_card, check_param_card.ParamCard):
317 model.restrict_card.write(out_path)
318 else:
319 files.cp(model.restrict_card, out_path)
320
321
322
323
324
325
326 - def write_maxparticles_file(self, writer, maxparticles):
327 """Write the maxparticles.inc file for MadEvent"""
328
329 lines = "integer max_particles, max_branch\n"
330 lines += "parameter (max_particles=%d) \n" % maxparticles
331 lines += "parameter (max_branch=max_particles-1)"
332
333
334 writer.writelines(lines)
335
336 return True
337
338
339
340
341
343 """Write the maxconfigs.inc file for MadEvent"""
344
345 lines = "integer lmaxconfigs\n"
346 lines += "parameter (lmaxconfigs=%d)" % maxconfigs
347
348
349 writer.writelines(lines)
350
351 return True
352
353
354
355
356
358 """ write an equivalent of the MG4 proc_card in order that all the Madevent
359 Perl script of MadEvent4 are still working properly for pure MG5 run."""
360
361 proc_card_template = template_files.mg4_proc_card.mg4_template
362 process_template = template_files.mg4_proc_card.process_template
363 process_text = ''
364 coupling = ''
365 new_process_content = []
366
367
368
369 process_str = process_str.replace(' =', '=')
370 process_str = process_str.replace('= ', '=')
371 process_str = process_str.replace(',',' , ')
372
373 for info in process_str.split():
374 if '=' in info:
375 coupling += info + '\n'
376 else:
377 new_process_content.append(info)
378
379
380 process_str = ' '.join(new_process_content)
381
382
383 process_text += process_template.substitute({'process': process_str, \
384 'coupling': coupling})
385
386 text = proc_card_template.substitute({'process': process_text,
387 'model': modelname,
388 'multiparticle':''})
389 ff = open(file_pos, 'w')
390 ff.write(text)
391 ff.close()
392
393
394
395
396
398 """ Write an initial state process map. Each possible PDF
399 combination gets an unique identifier."""
400
401 text=''
402 for i,e in enumerate(initial_states):
403 text=text+str(i+1)+' '+str(len(e))
404 for t in e:
405 if len(t) ==1:
406 t.append(0)
407 text=text+' '
408 try:
409 for p in t:
410 if p == None : p = 0
411 text=text+' '+str(p)
412 except TypeError:
413 text=text+' '+str(t)
414 text=text+'\n'
415
416 ff = open(file_pos, 'w')
417 ff.write(text)
418 ff.close()
419
421 """ A function returning a string uniquely identifying the matrix
422 element given in argument so that it can be used as a prefix to all
423 MadLoop5 subroutines and common blocks related to it. This allows
424 to compile several processes into one library as requested by the
425 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design
426 necessitates that there is no process prefix."""
427
428 return ''
429
430
431
432
434 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the
435 non-optimized mode"""
436 raise fks_common.FKSProcessError()("write_coef_specs should be called only in the loop-optimized mode")
437
438
439
440
441
442 - def generate_directories_fks(self, matrix_element, fortran_model, me_number,
443 me_ntot, path=os.getcwd(),OLP='MadLoop'):
444 """Generate the Pxxxxx_i directories for a subprocess in MadFKS,
445 including the necessary matrix.f and various helper files"""
446 proc = matrix_element.born_matrix_element['processes'][0]
447
448 if not self.model:
449 self.model = matrix_element.get('processes')[0].get('model')
450
451 cwd = os.getcwd()
452 try:
453 os.chdir(path)
454 except OSError as error:
455 error_msg = "The directory %s should exist in order to be able " % path + \
456 "to \"export\" in it. If you see this error message by " + \
457 "typing the command \"export\" please consider to use " + \
458 "instead the command \"output\". "
459 raise MadGraph5Error(error_msg)
460
461 calls = 0
462
463 self.fksdirs = []
464
465 borndir = "P%s" % \
466 (matrix_element.get('processes')[0].shell_string())
467 os.mkdir(borndir)
468 os.chdir(borndir)
469 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot))
470
471
472 self.generate_born_fks_files(matrix_element,
473 fortran_model, me_number, path)
474
475
476
477 if OLP=='NJET':
478 filename = 'OLE_order.lh'
479 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP)
480
481 if matrix_element.virt_matrix_element:
482 calls += self.generate_virt_directory( \
483 matrix_element.virt_matrix_element, \
484 fortran_model, \
485 os.path.join(path, borndir))
486
487
488
489 self.write_real_matrix_elements(matrix_element, fortran_model)
490
491 self.write_pdf_calls(matrix_element, fortran_model)
492
493 filename = 'nFKSconfigs.inc'
494 self.write_nfksconfigs_file(writers.FortranWriter(filename),
495 matrix_element,
496 fortran_model)
497
498 filename = 'iproc.dat'
499 self.write_iproc_file(writers.FortranWriter(filename),
500 me_number)
501
502 filename = 'fks_info.inc'
503 self.write_fks_info_file(writers.FortranWriter(filename),
504 matrix_element,
505 fortran_model)
506
507 filename = 'leshouche_info.dat'
508 nfksconfs,maxproc,maxflow,nexternal=\
509 self.write_leshouche_info_file(filename,matrix_element)
510
511
512
513 if nfksconfs == maxproc == maxflow == 0:
514 nfksconfs = 1
515 (dummylines, maxproc, maxflow) = self.get_leshouche_lines(
516 matrix_element.born_matrix_element, 1)
517
518 filename = 'leshouche_decl.inc'
519 self.write_leshouche_info_declarations(
520 writers.FortranWriter(filename),
521 nfksconfs,maxproc,maxflow,nexternal,
522 fortran_model)
523 filename = 'genps.inc'
524 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes()
525 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis')))
526 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\
527 ncolor,maxflow,fortran_model)
528
529 filename = 'configs_and_props_info.dat'
530 nconfigs,max_leg_number=self.write_configs_and_props_info_file(
531 filename,
532 matrix_element)
533
534 filename = 'configs_and_props_decl.inc'
535 self.write_configs_and_props_info_declarations(
536 writers.FortranWriter(filename),
537 nconfigs,max_leg_number,nfksconfs,
538 fortran_model)
539
540 filename = 'real_from_born_configs.inc'
541 self.write_real_from_born_configs(
542 writers.FortranWriter(filename),
543 matrix_element,
544 fortran_model)
545
546 filename = 'ngraphs.inc'
547 self.write_ngraphs_file(writers.FortranWriter(filename),
548 nconfigs)
549
550
551 filename = 'real_me_chooser.f'
552 self.write_real_me_wrapper(writers.FortranWriter(filename),
553 matrix_element,
554 fortran_model)
555
556 filename = 'parton_lum_chooser.f'
557 self.write_pdf_wrapper(writers.FortranWriter(filename),
558 matrix_element,
559 fortran_model)
560
561 filename = 'get_color.f'
562 self.write_colors_file(writers.FortranWriter(filename),
563 matrix_element)
564
565 filename = 'nexternal.inc'
566 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
567 self.write_nexternal_file(writers.FortranWriter(filename),
568 nexternal, ninitial)
569 self.proc_characteristic['ninitial'] = ninitial
570 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal)
571
572 filename = 'pmass.inc'
573 try:
574 self.write_pmass_file(writers.FortranWriter(filename),
575 matrix_element.real_processes[0].matrix_element)
576 except IndexError:
577 self.write_pmass_file(writers.FortranWriter(filename),
578 matrix_element.born_matrix_element)
579
580
581 self.draw_feynman_diagrams(matrix_element)
582
583 linkfiles = ['BinothLHADummy.f',
584 'check_poles.f',
585 'MCmasses_HERWIG6.inc',
586 'MCmasses_HERWIGPP.inc',
587 'MCmasses_PYTHIA6Q.inc',
588 'MCmasses_PYTHIA6PT.inc',
589 'MCmasses_PYTHIA8.inc',
590 'add_write_info.f',
591 'coupl.inc',
592 'cuts.f',
593 'FKS_params.dat',
594 'initial_states_map.dat',
595 'OLE_order.olc',
596 'FKSParams.inc',
597 'FKSParamReader.f',
598 'cuts.inc',
599 'unlops.inc',
600 'pythia_unlops.f',
601 'driver_mintMC.f',
602 'driver_mintFO.f',
603 'appl_interface.cc',
604 'appl_interface_dummy.f',
605 'appl_common.inc',
606 'reweight_appl.inc',
607 'fastjetfortran_madfks_core.cc',
608 'fastjetfortran_madfks_full.cc',
609 'fjcore.cc',
610 'fastjet_wrapper.f',
611 'fjcore.hh',
612 'fks_Sij.f',
613 'fks_powers.inc',
614 'fks_singular.f',
615 'veto_xsec.f',
616 'veto_xsec.inc',
617 'weight_lines.f',
618 'fks_inc_chooser.f',
619 'leshouche_inc_chooser.f',
620 'configs_and_props_inc_chooser.f',
621 'genps_fks.f',
622 'boostwdir2.f',
623 'madfks_mcatnlo.inc',
624 'open_output_files.f',
625 'open_output_files_dummy.f',
626 'HwU_dummy.f',
627 'madfks_plot.f',
628 'analysis_dummy.f',
629 'analysis_lhe.f',
630 'mint-integrator2.f',
631 'MC_integer.f',
632 'mint.inc',
633 'montecarlocounter.f',
634 'q_es.inc',
635 'recluster.cc',
636 'Boosts.h',
637 'reweight_xsec.f',
638 'reweight_xsec_events.f',
639 'reweight_xsec_events_pdf_dummy.f',
640 'iproc_map.f',
641 'run.inc',
642 'run_card.inc',
643 'setcuts.f',
644 'setscales.f',
645 'test_soft_col_limits.f',
646 'symmetry_fks_v3.f',
647 'vegas2.for',
648 'write_ajob.f',
649 'handling_lhe_events.f',
650 'write_event.f',
651 'fill_MC_mshell.f',
652 'maxparticles.inc',
653 'message.inc',
654 'initcluster.f',
655 'cluster.inc',
656 'cluster.f',
657 'reweight.f',
658 'randinit',
659 'sudakov.inc',
660 'maxconfigs.inc',
661 'timing_variables.inc']
662
663 for file in linkfiles:
664 ln('../' + file , '.')
665 os.system("ln -s ../../Cards/param_card.dat .")
666
667
668 os.system("ln -s ../makefile_fks_dir ./makefile")
669 if matrix_element.virt_matrix_element:
670 os.system("ln -s ../BinothLHA.f ./BinothLHA.f")
671 elif OLP!='MadLoop':
672 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f")
673 else:
674 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f")
675
676
677 os.chdir(os.path.pardir)
678
679 filename = 'subproc.mg'
680 files.append_to_file(filename,
681 self.write_subproc,
682 borndir)
683
684 os.chdir(cwd)
685
686 gen_infohtml.make_info_html_nlo(self.dir_path)
687
688
689 return calls
690
691
692
693
695 """ """
696
697 run_card = banner_mod.RunCardNLO()
698
699 run_card.create_default_for_process(self.proc_characteristic,
700 history,
701 processes)
702
703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'))
704 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
705
706
717
718 - def finalize(self, matrix_elements, history, mg5options, flaglist):
719 """Finalize FKS directory by creating jpeg diagrams, html
720 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if
721 necessary."""
722
723 devnull = os.open(os.devnull, os.O_RDWR)
724 try:
725 res = misc.call([mg5options['lhapdf'], '--version'], \
726 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
727 except Exception:
728 res = 1
729 if res != 0:
730 logger.info('The value for lhapdf in the current configuration does not ' + \
731 'correspond to a valid executable.\nPlease set it correctly either in ' + \
732 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \
733 'and regenrate the process. \nTo avoid regeneration, edit the ' + \
734 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \
735 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n')
736
737 compiler_dict = {'fortran': mg5options['fortran_compiler'],
738 'cpp': mg5options['cpp_compiler'],
739 'f2py': mg5options['f2py_compiler']}
740
741 if 'nojpeg' in flaglist:
742 makejpg = False
743 else:
744 makejpg = True
745 output_dependencies = mg5options['output_dependencies']
746
747
748 self.proc_characteristic['grouped_matrix'] = False
749 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme']
750
751 self.create_proc_charac()
752
753 self.create_run_card(matrix_elements.get_processes(), history)
754
755
756
757
758
759
760
761
762 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f')
763 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc')
764 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model)
765
766
767
768 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc')
769 self.write_maxconfigs_file(writers.FortranWriter(filename),
770 matrix_elements.get_max_configs())
771
772
773 filename = os.path.join(self.dir_path,'Source','maxparticles.inc')
774 self.write_maxparticles_file(writers.FortranWriter(filename),
775 matrix_elements.get_max_particles())
776
777
778 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses'))
779
780
781 fcompiler_chosen = self.set_fortran_compiler(compiler_dict)
782 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp'])
783
784 old_pos = os.getcwd()
785 os.chdir(os.path.join(self.dir_path, 'SubProcesses'))
786 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \
787 proc[0] == 'P']
788
789 devnull = os.open(os.devnull, os.O_RDWR)
790
791 if makejpg:
792 logger.info("Generate jpeg diagrams")
793 for Pdir in P_dir_list:
794 os.chdir(Pdir)
795 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
796 stdout = devnull)
797 os.chdir(os.path.pardir)
798
799 logger.info("Generate web pages")
800
801
802 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \
803 stdout = devnull)
804
805 os.chdir(os.path.pardir)
806
807
808
809
810
811
812
813
814
815
816 if os.path.isdir('Cards'):
817 output_file = os.path.join('Cards', 'proc_card_mg5.dat')
818 history.write(output_file)
819
820
821 for card in ['run_card', 'FO_analyse_card', 'shower_card']:
822 try:
823 shutil.copy(pjoin(self.dir_path, 'Cards',
824 card + '.dat'),
825 pjoin(self.dir_path, 'Cards',
826 card + '_default.dat'))
827 except IOError:
828 logger.warning("Failed to copy " + card + ".dat to default")
829
830
831 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
832 stdout = devnull)
833
834
835 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')):
836 if os.path.exists('amcatnlo.tar.gz'):
837 os.remove('amcatnlo.tar.gz')
838 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')],
839 stdout = devnull)
840
841 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
842 stdout = devnull)
843
844
845 os.chdir(old_pos)
846
847
848
849 base_compiler= ['FC=g77','FC=gfortran']
850
851 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP')
852 if output_dependencies == 'external':
853
854 if (not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \
855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a'))) and \
856 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP','fail')):
857 if 'FC' not in os.environ or not os.environ['FC']:
858 path = os.path.join(StdHep_path, 'src', 'make_opts')
859 text = open(path).read()
860 for base in base_compiler:
861 text = text.replace(base,'FC=%s' % fcompiler_chosen)
862 open(path, 'w').writelines(text)
863 logger.info('Compiling StdHEP. This has to be done only once.')
864 try:
865 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP'))
866 except Exception as error:
867 logger.debug(str(error))
868 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6")
869 logger.info("details on the compilation error are available on %s", pjoin(MG5DIR, 'vendor', 'StdHEP','fail'))
870 logger.info("if you want to retry the compilation automatically, you have to remove that file first")
871 with open(pjoin(MG5DIR, 'vendor', 'StdHEP','fail'),'w') as fsock:
872 fsock.write(str(error))
873 else:
874 logger.info('Done.')
875 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')):
876
877 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \
878 pjoin(self.dir_path, 'MCatNLO', 'lib'))
879 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \
880 pjoin(self.dir_path, 'MCatNLO', 'lib'))
881
882 elif output_dependencies == 'internal':
883 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP')
884 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True)
885
886 linkfiles = ['libstdhep.a', 'libFmcfio.a']
887 for file in linkfiles:
888 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file),
889 os.path.join(self.dir_path, 'MCatNLO', 'lib'))
890 if 'FC' not in os.environ or not os.environ['FC']:
891 path = pjoin(StdHEP_internal_path, 'src', 'make_opts')
892 text = open(path).read()
893 for base in base_compiler:
894 text = text.replace(base,'FC=%s' % fcompiler_chosen)
895 open(path, 'w').writelines(text)
896
897 misc.compile(['clean'],cwd = StdHEP_internal_path)
898
899 elif output_dependencies == 'environment_paths':
900
901
902 libStdHep = misc.which_lib('libstdhep.a')
903 libFmcfio = misc.which_lib('libFmcfio.a')
904 if not libStdHep is None and not libFmcfio is None:
905 logger.info('MG5_aMC is using StdHep installation found at %s.'%\
906 os.path.dirname(libStdHep))
907 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
908 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
909 else:
910 raise InvalidCmd("Could not find the location of the files"+\
911 " libstdhep.a and libFmcfio.a in you environment paths.")
912
913 else:
914 raise MadGraph5Error('output_dependencies option %s not recognized'\
915 %output_dependencies)
916
917
918 if 'madanalysis5_path' in self.opt and not \
919 self.opt['madanalysis5_path'] is None and not self.proc_defs is None:
920
921 processes = sum([me.get('processes') if not isinstance(me, str) else [] \
922 for me in matrix_elements.get('matrix_elements')],[])
923
924
925
926 if len(processes)==0:
927 processes = self.born_processes
928 if len(processes)==0:
929 logger.warning(
930 """MG5aMC could not provide to Madanalysis5 the list of processes generated.
931 As a result, the default card will not be tailored to the process generated.
932 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""")
933
934
935 self.create_default_madanalysis5_cards(
936 history, self.proc_defs, [processes,]*len(self.proc_defs),
937 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'),
938 levels =['hadron'])
939
941 """Writes the real_from_born_configs.inc file that contains
942 the mapping to go for a given born configuration (that is used
943 e.g. in the multi-channel phase-space integration to the
944 corresponding real-emission diagram, i.e. the real emission
945 diagram in which the combined ij is split in i_fks and
946 j_fks."""
947 lines=[]
948 lines2=[]
949 max_links=0
950 born_me=matrix_element.born_matrix_element
951 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
952 iFKS=iFKS+1
953 links=conf['fks_info']['rb_links']
954 max_links=max(max_links,len(links))
955 for i,diags in enumerate(links):
956 if not i == diags['born_conf']:
957 print(links)
958 raise MadGraph5Error("born_conf should be canonically ordered")
959 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links])
960 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
961 % (iFKS,len(links),real_configs))
962
963
964
965 if not matrix_element.get_fks_info_list():
966
967 base_diagrams = born_me.get('base_amplitude').get('diagrams')
968 minvert = min([max([len(vert.get('legs')) for vert in \
969 diag.get('vertices')]) for diag in base_diagrams])
970
971 for idiag, diag in enumerate(base_diagrams):
972 if any([len(vert.get('legs')) > minvert for vert in
973 diag.get('vertices')]):
974
975 continue
976 max_links = max_links + 1
977
978 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)])
979 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
980 % (1,max_links,real_configs))
981
982 lines2.append("integer irfbc")
983 lines2.append("integer real_from_born_conf(%d,%d)" \
984 % (max_links, max(len(matrix_element.get_fks_info_list()),1)))
985
986 writer.writelines(lines2+lines)
987
988
989
990
991
992
994 """Write the get_mass_width_file.f file for MG4.
995 Also update the makeinc.inc file
996 """
997 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero']
998 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero']
999
1000 iflines_mass = ''
1001 iflines_width = ''
1002
1003 for i, part in enumerate(mass_particles):
1004 if i == 0:
1005 ifstring = 'if'
1006 else:
1007 ifstring = 'else if'
1008 if part['self_antipart']:
1009 iflines_mass += '%s (id.eq.%d) then\n' % \
1010 (ifstring, part.get_pdg_code())
1011 else:
1012 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1013 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1014 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass')
1015
1016 for i, part in enumerate(width_particles):
1017 if i == 0:
1018 ifstring = 'if'
1019 else:
1020 ifstring = 'else if'
1021 if part['self_antipart']:
1022 iflines_width += '%s (id.eq.%d) then\n' % \
1023 (ifstring, part.get_pdg_code())
1024 else:
1025 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1026 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1027 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width')
1028
1029
1030 if len(mass_particles)==0:
1031 iflines_mass = 'if (.True.) then\n'
1032
1033 if len(width_particles)==0:
1034 iflines_width = 'if (.True.) then\n'
1035
1036 replace_dict = {'iflines_mass' : iflines_mass,
1037 'iflines_width' : iflines_width}
1038
1039 file = open(os.path.join(_file_path, \
1040 'iolibs/template_files/get_mass_width_fcts.inc')).read()
1041 file = file % replace_dict
1042
1043
1044 writer.writelines(file)
1045
1046
1047 makeinc_content = open(makeinc).read()
1048 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ')
1049 open(makeinc, 'w').write(makeinc_content)
1050
1051 return
1052
1053
1055 """writes the declarations for the variables relevant for configs_and_props
1056 """
1057 lines = []
1058 lines.append("integer ifr,lmaxconfigs_used,max_branch_used")
1059 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig)
1060 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number)
1061 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs)
1062 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1063 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1064 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1065 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1066 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1067 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1068
1069 writer.writelines(lines)
1070
1071
1073 """writes the configs_and_props_info.inc file that cointains
1074 all the (real-emission) configurations (IFOREST) as well as
1075 the masses and widths of intermediate particles"""
1076 lines = []
1077 lines.append("# C -> MAPCONFIG_D")
1078 lines.append("# F/D -> IFOREST_D")
1079 lines.append("# S -> SPROP_D")
1080 lines.append("# T -> TPRID_D")
1081 lines.append("# M -> PMASS_D/PWIDTH_D")
1082 lines.append("# P -> POW_D")
1083 lines2 = []
1084 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1085
1086 max_iconfig=0
1087 max_leg_number=0
1088
1089
1090
1091
1092 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
1093 iFKS=iFKS+1
1094 iconfig = 0
1095 s_and_t_channels = []
1096 mapconfigs = []
1097 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element
1098 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams')
1099 model = fks_matrix_element.get('base_amplitude').get('process').get('model')
1100 minvert = min([max([len(vert.get('legs')) for vert in \
1101 diag.get('vertices')]) for diag in base_diagrams])
1102
1103 lines.append("# ")
1104 lines.append("# nFKSprocess %d" % iFKS)
1105 for idiag, diag in enumerate(base_diagrams):
1106 if any([len(vert.get('legs')) > minvert for vert in
1107 diag.get('vertices')]):
1108
1109 continue
1110 iconfig = iconfig + 1
1111 helas_diag = fks_matrix_element.get('diagrams')[idiag]
1112 mapconfigs.append(helas_diag.get('number'))
1113 lines.append("# Diagram %d for nFKSprocess %d" % \
1114 (helas_diag.get('number'),iFKS))
1115
1116 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1117 helas_diag.get('number')))
1118
1119
1120
1121 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1122 get_s_and_t_channels(ninitial, model, 990)
1123
1124 s_and_t_channels.append([schannels, tchannels])
1125
1126
1127 allchannels = schannels
1128 if len(tchannels) > 1:
1129
1130 allchannels = schannels + tchannels
1131
1132 for vert in allchannels:
1133 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1134 last_leg = vert.get('legs')[-1]
1135 lines.append("F %4d %4d %4d %4d" % \
1136 (iFKS,last_leg.get('number'), iconfig, len(daughters)))
1137 for d in daughters:
1138 lines.append("D %4d" % d)
1139 if vert in schannels:
1140 lines.append("S %4d %4d %4d %10d" % \
1141 (iFKS,last_leg.get('number'), iconfig,
1142 last_leg.get('id')))
1143 elif vert in tchannels[:-1]:
1144 lines.append("T %4d %4d %4d %10d" % \
1145 (iFKS,last_leg.get('number'), iconfig,
1146 abs(last_leg.get('id'))))
1147
1148
1149 max_leg_number = min(max_leg_number,last_leg.get('number'))
1150 max_iconfig = max(max_iconfig,iconfig)
1151
1152
1153 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1154 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1155
1156
1157 lines2.append("# ")
1158 particle_dict = fks_matrix_element.get('processes')[0].get('model').\
1159 get('particle_dict')
1160
1161 for iconf, configs in enumerate(s_and_t_channels):
1162 for vertex in configs[0] + configs[1][:-1]:
1163 leg = vertex.get('legs')[-1]
1164 if leg.get('id') not in particle_dict:
1165
1166 pow_part = 0
1167 else:
1168 particle = particle_dict[leg.get('id')]
1169
1170 pow_part = 1 + int(particle.is_boson())
1171
1172 lines2.append("M %4d %4d %4d %10d " % \
1173 (iFKS,leg.get('number'), iconf + 1, leg.get('id')))
1174 lines2.append("P %4d %4d %4d %4d " % \
1175 (iFKS,leg.get('number'), iconf + 1, pow_part))
1176
1177
1178
1179
1180 if not matrix_element.get_fks_info_list():
1181 born_me = matrix_element.born_matrix_element
1182
1183
1184
1185
1186 bornproc = born_me.get('processes')[0]
1187 colors = [l.get('color') for l in bornproc.get('legs')]
1188
1189 fks_i = len(colors)
1190
1191
1192 fks_j=1
1193 for cpos, col in enumerate(colors):
1194 if col != 1:
1195 fks_j = cpos+1
1196 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos]
1197
1198
1199 if fks_j > ninitial:
1200 iFKS=1
1201 iconfig = 0
1202 s_and_t_channels = []
1203 mapconfigs = []
1204 base_diagrams = born_me.get('base_amplitude').get('diagrams')
1205 model = born_me.get('base_amplitude').get('process').get('model')
1206 minvert = min([max([len(vert.get('legs')) for vert in \
1207 diag.get('vertices')]) for diag in base_diagrams])
1208
1209 lines.append("# ")
1210 lines.append("# nFKSprocess %d" % iFKS)
1211 for idiag, diag in enumerate(base_diagrams):
1212 if any([len(vert.get('legs')) > minvert for vert in
1213 diag.get('vertices')]):
1214
1215 continue
1216 iconfig = iconfig + 1
1217 helas_diag = born_me.get('diagrams')[idiag]
1218 mapconfigs.append(helas_diag.get('number'))
1219 lines.append("# Diagram %d for nFKSprocess %d" % \
1220 (helas_diag.get('number'),iFKS))
1221
1222 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1223 helas_diag.get('number')))
1224
1225
1226
1227 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1228 get_s_and_t_channels(ninitial, model, 990)
1229
1230 s_and_t_channels.append([schannels, tchannels])
1231
1232
1233 lines.append("F %4d %4d %4d %4d" % \
1234 (iFKS,-1,iconfig,2))
1235
1236 lines.append("D %4d" % nexternal)
1237 lines.append("D %4d" % fks_j)
1238 lines.append("S %4d %4d %4d %10d" % \
1239 (iFKS,-1, iconfig,fks_j_id))
1240
1241
1242
1243
1244 allchannels = schannels
1245 if len(tchannels) > 1:
1246
1247 allchannels = schannels + tchannels
1248
1249 for vert in allchannels:
1250 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1251 last_leg = vert.get('legs')[-1]
1252 lines.append("F %4d %4d %4d %4d" % \
1253 (iFKS,last_leg.get('number')-1, iconfig, len(daughters)))
1254
1255
1256 for i_dau in range(len(daughters)):
1257 if daughters[i_dau] < 0:
1258 daughters[i_dau] += -1
1259
1260 if fks_j in daughters:
1261 daughters[daughters.index(fks_j)] = -1
1262 for d in daughters:
1263 lines.append("D %4d" % d)
1264 if vert in schannels:
1265 lines.append("S %4d %4d %4d %10d" % \
1266 (iFKS,last_leg.get('number')-1, iconfig,
1267 last_leg.get('id')))
1268 elif vert in tchannels[:-1]:
1269 lines.append("T %4d %4d %4d %10d" % \
1270 (iFKS,last_leg.get('number')-1, iconfig,
1271 abs(last_leg.get('id'))))
1272
1273
1274 max_leg_number = min(max_leg_number,last_leg.get('number')-1)
1275 max_iconfig = max(max_iconfig,iconfig)
1276
1277
1278 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1279 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1280
1281
1282 lines2.append("# ")
1283 particle_dict = born_me.get('processes')[0].get('model').\
1284 get('particle_dict')
1285
1286 for iconf, configs in enumerate(s_and_t_channels):
1287 lines2.append("M %4d %4d %4d %10d " % \
1288 (iFKS,-1, iconf + 1, fks_j_id))
1289 pow_part = 1 + int(particle_dict[fks_j_id].is_boson())
1290 lines2.append("P %4d %4d %4d %4d " % \
1291 (iFKS,-1, iconf + 1, pow_part))
1292 for vertex in configs[0] + configs[1][:-1]:
1293 leg = vertex.get('legs')[-1]
1294 if leg.get('id') not in particle_dict:
1295
1296 pow_part = 0
1297 else:
1298 particle = particle_dict[leg.get('id')]
1299
1300 pow_part = 1 + int(particle.is_boson())
1301
1302 lines2.append("M %4d %4d %4d %10d " % \
1303 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id')))
1304 lines2.append("P %4d %4d %4d %4d " % \
1305 (iFKS,leg.get('number')-1, iconf + 1, pow_part))
1306
1307
1308 open(filename,'w').write('\n'.join(lines+lines2))
1309
1310 return max_iconfig, max_leg_number
1311
1312
1315 """writes the declarations for the variables relevant for leshouche_info
1316 """
1317 lines = []
1318 lines.append('integer maxproc_used, maxflow_used')
1319 lines.append('parameter (maxproc_used = %d)' % maxproc)
1320 lines.append('parameter (maxflow_used = %d)' % maxflow)
1321 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal))
1322 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal))
1323 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal))
1324 lines.append('integer niprocs_d(%d)' % (nfksconfs))
1325
1326 writer.writelines(lines)
1327
1328
1329 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1330 """writes the genps.inc file
1331 """
1332 lines = []
1333 lines.append("include 'maxparticles.inc'")
1334 lines.append("include 'maxconfigs.inc'")
1335 lines.append("integer maxproc,ngraphs,ncolor,maxflow")
1336 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \
1337 (maxproc,ngraphs,ncolor,maxflow))
1338 writer.writelines(lines)
1339
1340
1342 """writes the leshouche_info.inc file which contains
1343 the LHA informations for all the real emission processes
1344 """
1345 lines = []
1346 lines.append("# I -> IDUP_D")
1347 lines.append("# M -> MOTHUP_D")
1348 lines.append("# C -> ICOLUP_D")
1349 nfksconfs = len(matrix_element.get_fks_info_list())
1350 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1351
1352 maxproc = 0
1353 maxflow = 0
1354 for i, conf in enumerate(matrix_element.get_fks_info_list()):
1355
1356 (newlines, nprocs, nflows) = self.get_leshouche_lines(
1357 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1)
1358 lines.extend(newlines)
1359 maxproc = max(maxproc, nprocs)
1360 maxflow = max(maxflow, nflows)
1361
1362
1363 if not matrix_element.get_fks_info_list():
1364 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1)
1365 lines.extend(newlines)
1366
1367
1368 open(filename,'w').write('\n'.join(lines))
1369
1370 return nfksconfs, maxproc, maxflow, nexternal
1371
1372
1374 """writes the wrapper which allows to chose among the different real matrix elements"""
1375
1376 file = \
1377 """double precision function dlum()
1378 implicit none
1379 integer nfksprocess
1380 common/c_nfksprocess/nfksprocess
1381 """
1382 if matrix_element.real_processes:
1383 for n, info in enumerate(matrix_element.get_fks_info_list()):
1384 file += \
1385 """if (nfksprocess.eq.%(n)d) then
1386 call dlum_%(n_me)d(dlum)
1387 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1388 file += \
1389 """
1390 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess
1391 stop
1392 endif
1393 return
1394 end
1395 """
1396 else:
1397 file+= \
1398 """call dlum_0(dlum)
1399 return
1400 end
1401 """
1402
1403
1404 writer.writelines(file)
1405 return 0
1406
1407
1409 """writes the wrapper which allows to chose among the different real matrix elements"""
1410
1411 file = \
1412 """subroutine smatrix_real(p, wgt)
1413 implicit none
1414 include 'nexternal.inc'
1415 double precision p(0:3, nexternal)
1416 double precision wgt
1417 integer nfksprocess
1418 common/c_nfksprocess/nfksprocess
1419 """
1420 for n, info in enumerate(matrix_element.get_fks_info_list()):
1421 file += \
1422 """if (nfksprocess.eq.%(n)d) then
1423 call smatrix_%(n_me)d(p, wgt)
1424 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1425
1426 if matrix_element.real_processes:
1427 file += \
1428 """
1429 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess
1430 stop
1431 endif
1432 return
1433 end
1434 """
1435 else:
1436 file += \
1437 """
1438 wgt=0d0
1439 return
1440 end
1441 """
1442
1443 writer.writelines(file)
1444 return 0
1445
1446
1448 """Create the ps files containing the feynman diagrams for the born process,
1449 as well as for all the real emission processes"""
1450
1451 filename = 'born.ps'
1452 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\
1453 get('base_amplitude').get('diagrams'),
1454 filename,
1455 model=matrix_element.born_matrix_element.\
1456 get('processes')[0].get('model'),
1457 amplitude=True, diagram_type='born')
1458 plot.draw()
1459
1460 for n, fksreal in enumerate(matrix_element.real_processes):
1461 filename = 'matrix_%d.ps' % (n + 1)
1462 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\
1463 get('base_amplitude').get('diagrams'),
1464 filename,
1465 model=fksreal.matrix_element.\
1466 get('processes')[0].get('model'),
1467 amplitude=True, diagram_type='real')
1468 plot.draw()
1469
1470
1472 """writes the matrix_i.f files which contain the real matrix elements"""
1473
1474
1475
1476 for n, fksreal in enumerate(matrix_element.real_processes):
1477 filename = 'matrix_%d.f' % (n + 1)
1478 self.write_matrix_element_fks(writers.FortranWriter(filename),
1479 fksreal.matrix_element, n + 1,
1480 fortran_model)
1481
1483 """writes the parton_lum_i.f files which contain the real matrix elements.
1484 If no real emission existst, write the one for the born"""
1485
1486 if matrix_element.real_processes:
1487 for n, fksreal in enumerate(matrix_element.real_processes):
1488 filename = 'parton_lum_%d.f' % (n + 1)
1489 self.write_pdf_file(writers.FortranWriter(filename),
1490 fksreal.matrix_element, n + 1,
1491 fortran_model)
1492 else:
1493 filename = 'parton_lum_0.f'
1494 self.write_pdf_file(writers.FortranWriter(filename),
1495 matrix_element.born_matrix_element, 0,
1496 fortran_model)
1497
1498
1500 """generates the files needed for the born amplitude in the P* directory, which will
1501 be needed by the P* directories"""
1502 pathdir = os.getcwd()
1503
1504 filename = 'born.f'
1505 calls_born, ncolor_born = \
1506 self.write_born_fks(writers.FortranWriter(filename),\
1507 matrix_element,
1508 fortran_model)
1509
1510 filename = 'born_hel.f'
1511 self.write_born_hel(writers.FortranWriter(filename),\
1512 matrix_element,
1513 fortran_model)
1514
1515
1516 filename = 'born_conf.inc'
1517 nconfigs, mapconfigs, s_and_t_channels = \
1518 self.write_configs_file(
1519 writers.FortranWriter(filename),
1520 matrix_element.born_matrix_element,
1521 fortran_model)
1522
1523 filename = 'born_props.inc'
1524 self.write_props_file(writers.FortranWriter(filename),
1525 matrix_element.born_matrix_element,
1526 fortran_model,
1527 s_and_t_channels)
1528
1529 filename = 'born_decayBW.inc'
1530 self.write_decayBW_file(writers.FortranWriter(filename),
1531 s_and_t_channels)
1532
1533 filename = 'born_leshouche.inc'
1534 nflows = self.write_leshouche_file(writers.FortranWriter(filename),
1535 matrix_element.born_matrix_element,
1536 fortran_model)
1537
1538 filename = 'born_nhel.inc'
1539 self.write_born_nhel_file(writers.FortranWriter(filename),
1540 matrix_element.born_matrix_element, nflows,
1541 fortran_model,
1542 ncolor_born)
1543
1544 filename = 'born_ngraphs.inc'
1545 self.write_ngraphs_file(writers.FortranWriter(filename),
1546 matrix_element.born_matrix_element.get_number_of_amplitudes())
1547
1548 filename = 'ncombs.inc'
1549 self.write_ncombs_file(writers.FortranWriter(filename),
1550 matrix_element.born_matrix_element,
1551 fortran_model)
1552
1553 filename = 'born_maxamps.inc'
1554 maxamps = len(matrix_element.get('diagrams'))
1555 maxflows = ncolor_born
1556 self.write_maxamps_file(writers.FortranWriter(filename),
1557 maxamps,
1558 maxflows,
1559 max([len(matrix_element.get('processes')) for me in \
1560 matrix_element.born_matrix_element]),1)
1561
1562 filename = 'config_subproc_map.inc'
1563 self.write_config_subproc_map_file(writers.FortranWriter(filename),
1564 s_and_t_channels)
1565
1566 filename = 'coloramps.inc'
1567 self.write_coloramps_file(writers.FortranWriter(filename),
1568 mapconfigs,
1569 matrix_element.born_matrix_element,
1570 fortran_model)
1571
1572
1573 filename = ['sborn_sf.f', 'sborn_sf_dum.f']
1574 for i, links in enumerate([matrix_element.color_links, []]):
1575 self.write_sborn_sf(writers.FortranWriter(filename[i]),
1576 links,
1577 fortran_model)
1578 self.color_link_files = []
1579 for i in range(len(matrix_element.color_links)):
1580 filename = 'b_sf_%3.3d.f' % (i + 1)
1581 self.color_link_files.append(filename)
1582 self.write_b_sf_fks(writers.FortranWriter(filename),
1583 matrix_element, i,
1584 fortran_model)
1585
1586
1588 """Generates the library for computing the loop matrix elements
1589 necessary for this process using the OLP specified."""
1590
1591
1592 virtual_path = pjoin(export_path,'OLP_virtuals')
1593 if not os.path.exists(virtual_path):
1594 os.makedirs(virtual_path)
1595 filename = os.path.join(virtual_path,'OLE_order.lh')
1596 self.write_lh_order(filename, process_list, OLP)
1597
1598 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\
1599 'Please check the virt_generation.log file in %s.'\
1600 %str(pjoin(virtual_path,'virt_generation.log'))
1601
1602
1603 if OLP=='GoSam':
1604 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1605 'GoSam','makevirt'),pjoin(virtual_path,'makevirt'))
1606 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1607 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc'))
1608 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path)
1609
1610 logger.info('Generating the loop matrix elements with %s...'%OLP)
1611 virt_generation_log = \
1612 open(pjoin(virtual_path,'virt_generation.log'), 'w')
1613 retcode = subprocess.call(['./makevirt'],cwd=virtual_path,
1614 stdout=virt_generation_log, stderr=virt_generation_log)
1615 virt_generation_log.close()
1616
1617 possible_other_extensions = ['so','dylib']
1618 shared_lib_ext='so'
1619 for ext in possible_other_extensions:
1620 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib',
1621 'libgolem_olp.'+ext)):
1622 shared_lib_ext = ext
1623
1624
1625 files_to_check = ['olp_module.mod',str(pjoin('lib',
1626 'libgolem_olp.'+shared_lib_ext))]
1627 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path,
1628 'Virtuals',f)) for f in files_to_check]):
1629 raise fks_common.FKSProcessError(fail_msg)
1630
1631 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext),
1632 pjoin(export_path,'lib'))
1633
1634
1635 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read()
1636 make_opts=open(pjoin(export_path,'Source','make_opts'),'w')
1637 if OLP=='GoSam':
1638 if platform.system().lower()=='darwin':
1639
1640
1641 make_opts_content=make_opts_content.replace('libOLP=',
1642 'libOLP=-Wl,-lgolem_olp')
1643 else:
1644
1645
1646
1647
1648
1649
1650
1651 make_opts_content=make_opts_content.replace('libOLP=',
1652 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp')
1653
1654
1655 make_opts.write(make_opts_content)
1656 make_opts.close()
1657
1658
1659
1660
1661
1662 proc_to_label = self.parse_contract_file(
1663 pjoin(virtual_path,'OLE_order.olc'))
1664
1665 self.write_BinothLHA_inc(process_list,proc_to_label,\
1666 pjoin(export_path,'SubProcesses'))
1667
1668
1669 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1670
1672 """ Write the file Binoth_proc.inc in each SubProcess directory so as
1673 to provide the right process_label to use in the OLP call to get the
1674 loop matrix element evaluation. The proc_to_label is the dictionary of
1675 the format of the one returned by the function parse_contract_file."""
1676
1677 for proc in processes:
1678 name = "P%s"%proc.shell_string()
1679 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \
1680 not leg.get('state')]),
1681 tuple([leg.get('id') for leg in proc.get('legs') if \
1682 leg.get('state')]))
1683 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w')
1684 try:
1685 incFile.write(
1686 """ INTEGER PROC_LABEL
1687 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs]))
1688 except KeyError:
1689 raise fks_common.FKSProcessError('Could not found the target'+\
1690 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\
1691 ' the proc_to_label argument in write_BinothLHA_inc.')
1692 incFile.close()
1693
1695 """ Parses the BLHA contract file, make sure all parameters could be
1696 understood by the OLP and return a mapping of the processes (characterized
1697 by the pdg's of the initial and final state particles) to their process
1698 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}.
1699 """
1700
1701 proc_def_to_label = {}
1702
1703 if not os.path.exists(contract_file_path):
1704 raise fks_common.FKSProcessError('Could not find the contract file'+\
1705 ' OLE_order.olc in %s.'%str(contract_file_path))
1706
1707 comment_re=re.compile(r"^\s*#")
1708 proc_def_re=re.compile(
1709 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+
1710 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$")
1711 line_OK_re=re.compile(r"^.*\|\s*OK")
1712 for line in open(contract_file_path):
1713
1714 if not comment_re.match(line) is None:
1715 continue
1716
1717 proc_def = proc_def_re.match(line)
1718 if not proc_def is None:
1719 if int(proc_def.group('proc_class'))!=1:
1720 raise fks_common.FKSProcessError(
1721 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\
1722 ' process class attribute. Found %s instead in: \n%s'\
1723 %(proc_def.group('proc_class'),line))
1724 in_pdgs=tuple([int(in_pdg) for in_pdg in \
1725 proc_def.group('in_pdgs').split()])
1726 out_pdgs=tuple([int(out_pdg) for out_pdg in \
1727 proc_def.group('out_pdgs').split()])
1728 proc_def_to_label[(in_pdgs,out_pdgs)]=\
1729 int(proc_def.group('proc_label'))
1730 continue
1731
1732 if line_OK_re.match(line) is None:
1733 raise fks_common.FKSProcessError(
1734 'The OLP could not process the following line: \n%s'%line)
1735
1736 return proc_def_to_label
1737
1738
1740 """writes the V**** directory inside the P**** directories specified in
1741 dir_name"""
1742
1743 cwd = os.getcwd()
1744
1745 matrix_element = loop_matrix_element
1746
1747
1748 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
1749 try:
1750 os.mkdir(dirpath)
1751 except os.error as error:
1752 logger.warning(error.strerror + " " + dirpath)
1753
1754
1755 name = "V%s" % matrix_element.get('processes')[0].shell_string()
1756 dirpath = os.path.join(dir_name, name)
1757
1758 try:
1759 os.mkdir(dirpath)
1760 except os.error as error:
1761 logger.warning(error.strerror + " " + dirpath)
1762
1763 try:
1764 os.chdir(dirpath)
1765 except os.error:
1766 logger.error('Could not cd to directory %s' % dirpath)
1767 return 0
1768
1769 logger.info('Creating files in directory %s' % name)
1770
1771
1772 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1773
1774 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
1775
1776 filename = 'born_matrix.f'
1777 calls = self.write_bornmatrix(
1778 writers.FortranWriter(filename),
1779 matrix_element,
1780 fortran_model)
1781
1782 filename = 'nexternal.inc'
1783 self.write_nexternal_file(writers.FortranWriter(filename),
1784 nexternal, ninitial)
1785
1786 filename = 'pmass.inc'
1787 self.write_pmass_file(writers.FortranWriter(filename),
1788 matrix_element)
1789
1790 filename = 'ngraphs.inc'
1791 self.write_ngraphs_file(writers.FortranWriter(filename),
1792 len(matrix_element.get_all_amplitudes()))
1793
1794 filename = "loop_matrix.ps"
1795 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
1796 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
1797 filename,
1798 model=matrix_element.get('processes')[0].get('model'),
1799 amplitude='')
1800 logger.info("Drawing loop Feynman diagrams for " + \
1801 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1802 plot.draw()
1803
1804 filename = "born_matrix.ps"
1805 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
1806 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\
1807 get('model'),amplitude='')
1808 logger.info("Generating born Feynman diagrams for " + \
1809 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1810 plot.draw()
1811
1812
1813
1814
1815 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
1816 open('unique_id.inc','w').write(
1817 """ integer UNIQUE_ID
1818 parameter(UNIQUE_ID=1)""")
1819
1820 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
1821 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
1822 'MadLoopCommons.f','MadLoopParams.inc']
1823
1824
1825 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
1826 pjoin('..','MadLoop5_resources'))
1827
1828 for file in linkfiles:
1829 ln('../../%s' % file)
1830
1831 os.system("ln -s ../../makefile_loop makefile")
1832
1833 linkfiles = ['mpmodule.mod']
1834
1835 for file in linkfiles:
1836 ln('../../../lib/%s' % file)
1837
1838 linkfiles = ['coef_specs.inc']
1839
1840 for file in linkfiles:
1841 ln('../../../Source/DHELAS/%s' % file)
1842
1843
1844 os.chdir(cwd)
1845
1846 if not calls:
1847 calls = 0
1848 return calls
1849
1851 """computes the QED/QCD orders from the knowledge of the n of ext particles
1852 and of the weighted orders"""
1853
1854
1855 QED = weighted - nexternal + 2
1856 QCD = weighted - 2 * QED
1857 return QED, QCD
1858
1859
1860
1861
1862
1863
1864
1866 """Creates the OLE_order.lh file. This function should be edited according
1867 to the OLP which is used. For now it is generic."""
1868
1869
1870 if len(process_list)==0:
1871 raise fks_common.FKSProcessError('No matrix elements provided to '+\
1872 'the function write_lh_order.')
1873 return
1874
1875
1876
1877 orders = process_list[0].get('orders')
1878 if 'QED' in list(orders.keys()) and 'QCD' in list(orders.keys()):
1879 QED=orders['QED']
1880 QCD=orders['QCD']
1881 elif 'QED' in list(orders.keys()):
1882 QED=orders['QED']
1883 QCD=0
1884 elif 'QCD' in list(orders.keys()):
1885 QED=0
1886 QCD=orders['QCD']
1887 else:
1888 QED, QCD = self.get_qed_qcd_orders_from_weighted(\
1889 len(process_list[0].get('legs')),
1890 orders['WEIGHTED'])
1891
1892 replace_dict = {}
1893 replace_dict['mesq'] = 'CHaveraged'
1894 replace_dict['corr'] = ' '.join(process_list[0].\
1895 get('perturbation_couplings'))
1896 replace_dict['irreg'] = 'CDR'
1897 replace_dict['aspow'] = QCD
1898 replace_dict['aepow'] = QED
1899 replace_dict['modelfile'] = './param_card.dat'
1900 replace_dict['params'] = 'alpha_s'
1901 proc_lines=[]
1902 for proc in process_list:
1903 proc_lines.append('%s -> %s' % \
1904 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']),
1905 ' '.join(str(l['id']) for l in proc['legs'] if l['state'])))
1906 replace_dict['pdgs'] = '\n'.join(proc_lines)
1907 replace_dict['symfin'] = 'Yes'
1908 content = \
1909 "#OLE_order written by MadGraph5_aMC@NLO\n\
1910 \n\
1911 MatrixElementSquareType %(mesq)s\n\
1912 CorrectionType %(corr)s\n\
1913 IRregularisation %(irreg)s\n\
1914 AlphasPower %(aspow)d\n\
1915 AlphaPower %(aepow)d\n\
1916 NJetSymmetrizeFinal %(symfin)s\n\
1917 ModelFile %(modelfile)s\n\
1918 Parameters %(params)s\n\
1919 \n\
1920 # process\n\
1921 %(pdgs)s\n\
1922 " % replace_dict
1923
1924 file = open(filename, 'w')
1925 file.write(content)
1926 file.close
1927 return
1928
1929
1930
1931
1932
1933
1935 """Export a matrix element to a born.f file in MadFKS format"""
1936
1937 matrix_element = fksborn.born_matrix_element
1938
1939 if not matrix_element.get('processes') or \
1940 not matrix_element.get('diagrams'):
1941 return 0
1942
1943 if not isinstance(writer, writers.FortranWriter):
1944 raise writers.FortranWriter.FortranWriterError(\
1945 "writer not FortranWriter")
1946
1947 writers.FortranWriter.downcase = False
1948
1949 replace_dict = {}
1950
1951
1952 info_lines = self.get_mg5_info_lines()
1953 replace_dict['info_lines'] = info_lines
1954
1955
1956 process_lines = self.get_process_info_lines(matrix_element)
1957 replace_dict['process_lines'] = process_lines
1958
1959
1960
1961 ncomb = matrix_element.get_helicity_combinations()
1962 replace_dict['ncomb'] = ncomb
1963
1964
1965 helicity_lines = self.get_helicity_lines(matrix_element)
1966 replace_dict['helicity_lines'] = helicity_lines
1967
1968
1969 ic_line = self.get_ic_line(matrix_element)
1970 replace_dict['ic_line'] = ic_line
1971
1972
1973
1974
1975
1976
1977 ngraphs = matrix_element.get_number_of_amplitudes()
1978 replace_dict['ngraphs'] = ngraphs
1979
1980
1981 nwavefuncs = matrix_element.get_number_of_wavefunctions()
1982 replace_dict['nwavefuncs'] = nwavefuncs
1983
1984
1985 ncolor = max(1, len(matrix_element.get('color_basis')))
1986 replace_dict['ncolor'] = ncolor
1987
1988
1989 color_data_lines = self.get_color_data_lines(matrix_element)
1990 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
1991
1992
1993 helas_calls = fortran_model.get_matrix_element_calls(\
1994 matrix_element)
1995 replace_dict['helas_calls'] = "\n".join(helas_calls)
1996
1997
1998 amp2_lines = self.get_amp2_lines(matrix_element)
1999 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2000
2001
2002 jamp_lines = self.get_JAMP_lines(matrix_element)
2003 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2004
2005
2006 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2007 replace_dict['wavefunctionsize'] = 20
2008 else:
2009 replace_dict['wavefunctionsize'] = 8
2010
2011
2012 ij_lines = self.get_ij_lines(fksborn)
2013 replace_dict['ij_lines'] = '\n'.join(ij_lines)
2014
2015
2016 den_factor_lines = self.get_den_factor_lines(fksborn)
2017 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2018
2019
2020 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1)
2021
2022 file = open(os.path.join(_file_path, \
2023 'iolibs/template_files/born_fks.inc')).read()
2024 file = file % replace_dict
2025
2026
2027 writer.writelines(file)
2028
2029 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2030
2031
2033 """Export a matrix element to a born_hel.f file in MadFKS format"""
2034
2035 matrix_element = fksborn.born_matrix_element
2036
2037 if not matrix_element.get('processes') or \
2038 not matrix_element.get('diagrams'):
2039 return 0
2040
2041 if not isinstance(writer, writers.FortranWriter):
2042 raise writers.FortranWriter.FortranWriterError(\
2043 "writer not FortranWriter")
2044
2045 writers.FortranWriter.downcase = False
2046
2047 replace_dict = {}
2048
2049
2050 info_lines = self.get_mg5_info_lines()
2051 replace_dict['info_lines'] = info_lines
2052
2053
2054 process_lines = self.get_process_info_lines(matrix_element)
2055 replace_dict['process_lines'] = process_lines
2056
2057
2058
2059 ncomb = matrix_element.get_helicity_combinations()
2060 replace_dict['ncomb'] = ncomb
2061
2062
2063 helicity_lines = self.get_helicity_lines(matrix_element)
2064 replace_dict['helicity_lines'] = helicity_lines
2065
2066
2067 ic_line = self.get_ic_line(matrix_element)
2068 replace_dict['ic_line'] = ic_line
2069
2070
2071
2072
2073
2074
2075 ngraphs = matrix_element.get_number_of_amplitudes()
2076 replace_dict['ngraphs'] = ngraphs
2077
2078
2079 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2080 replace_dict['nwavefuncs'] = nwavefuncs
2081
2082
2083 ncolor = max(1, len(matrix_element.get('color_basis')))
2084 replace_dict['ncolor'] = ncolor
2085
2086
2087 color_data_lines = self.get_color_data_lines(matrix_element)
2088 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2089
2090
2091 amp2_lines = self.get_amp2_lines(matrix_element)
2092 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2093
2094
2095 jamp_lines = self.get_JAMP_lines(matrix_element)
2096 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2097
2098
2099 den_factor_lines = self.get_den_factor_lines(fksborn)
2100 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2101
2102
2103 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2104
2105 file = open(os.path.join(_file_path, \
2106 'iolibs/template_files/born_fks_hel.inc')).read()
2107 file = file % replace_dict
2108
2109
2110 writer.writelines(file)
2111
2112 return
2113
2114
2115
2116
2117
2118
2120 """Creates the sborn_sf.f file, containing the calls to the different
2121 color linked borns"""
2122
2123 replace_dict = {}
2124 nborns = len(color_links)
2125 ifkss = []
2126 iborns = []
2127 mms = []
2128 nns = []
2129 iflines = "\n"
2130
2131
2132 file = """subroutine sborn_sf(p_born,m,n,wgt)
2133 implicit none
2134 include "nexternal.inc"
2135 double precision p_born(0:3,nexternal-1),wgt
2136 double complex wgt1(2)
2137 integer m,n \n"""
2138
2139 if nborns > 0:
2140
2141 for i, c_link in enumerate(color_links):
2142 iborn = i+1
2143
2144 iff = {True : 'if', False : 'elseif'}[i==0]
2145
2146 m, n = c_link['link']
2147
2148 if m != n:
2149 iflines += \
2150 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2151 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\
2152 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2153 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2154 else:
2155 iflines += \
2156 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2157 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\
2158 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2159 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2160
2161
2162 file += iflines + \
2163 """else
2164 wgt = 0d0
2165 endif
2166
2167 return
2168 end"""
2169 elif nborns == 0:
2170
2171 file+="""
2172 c This is a dummy function because
2173 c this subdir has no soft singularities
2174 wgt = 0d0
2175
2176 return
2177 end"""
2178
2179
2180 writer.writelines(file)
2181
2182
2183
2184
2185
2186
2188 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format"""
2189
2190 matrix_element = copy.copy(fksborn.born_matrix_element)
2191
2192 if not matrix_element.get('processes') or \
2193 not matrix_element.get('diagrams'):
2194 return 0
2195
2196 if not isinstance(writer, writers.FortranWriter):
2197 raise writers.FortranWriter.FortranWriterError(\
2198 "writer not FortranWriter")
2199
2200 writers.FortranWriter.downcase = False
2201
2202 iborn = i + 1
2203 link = fksborn.color_links[i]
2204
2205 replace_dict = {}
2206
2207 replace_dict['iborn'] = iborn
2208
2209
2210 info_lines = self.get_mg5_info_lines()
2211 replace_dict['info_lines'] = info_lines
2212
2213
2214 process_lines = self.get_process_info_lines(matrix_element)
2215 replace_dict['process_lines'] = process_lines + \
2216 "\nc spectators: %d %d \n" % tuple(link['link'])
2217
2218
2219 ncomb = matrix_element.get_helicity_combinations()
2220 replace_dict['ncomb'] = ncomb
2221
2222
2223 helicity_lines = self.get_helicity_lines(matrix_element)
2224 replace_dict['helicity_lines'] = helicity_lines
2225
2226
2227 ic_line = self.get_ic_line(matrix_element)
2228 replace_dict['ic_line'] = ic_line
2229
2230
2231 den_factor_lines = self.get_den_factor_lines(fksborn)
2232 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2233
2234
2235 ngraphs = matrix_element.get_number_of_amplitudes()
2236 replace_dict['ngraphs'] = ngraphs
2237
2238
2239 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2240 replace_dict['nwavefuncs'] = nwavefuncs
2241
2242
2243 ncolor1 = max(1, len(link['orig_basis']))
2244 replace_dict['ncolor1'] = ncolor1
2245 ncolor2 = max(1, len(link['link_basis']))
2246 replace_dict['ncolor2'] = ncolor2
2247
2248
2249 color_data_lines = self.get_color_data_lines_from_color_matrix(\
2250 link['link_matrix'])
2251 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2252
2253
2254 amp2_lines = self.get_amp2_lines(matrix_element)
2255 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2256
2257
2258 jamp_lines = self.get_JAMP_lines(matrix_element)
2259 new_jamp_lines = []
2260 for line in jamp_lines:
2261 line = line.replace('JAMP', 'JAMP1')
2262 new_jamp_lines.append(line)
2263 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines)
2264
2265 matrix_element.set('color_basis', link['link_basis'] )
2266 jamp_lines = self.get_JAMP_lines(matrix_element)
2267 new_jamp_lines = []
2268 for line in jamp_lines:
2269 line = line.replace( 'JAMP', 'JAMP2')
2270 new_jamp_lines.append(line)
2271 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines)
2272
2273
2274
2275 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2276
2277 file = open(os.path.join(_file_path, \
2278 'iolibs/template_files/b_sf_xxx_fks.inc')).read()
2279 file = file % replace_dict
2280
2281
2282 writer.writelines(file)
2283
2284 return 0 , ncolor1
2285
2286
2287
2288
2289
2290
2292 """Write the born_nhel.inc file for MG4."""
2293
2294 ncomb = matrix_element.get_helicity_combinations()
2295 file = " integer max_bhel, max_bcol \n"
2296 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \
2297 (ncomb, nflows)
2298
2299
2300 writer.writelines(file)
2301
2302 return True
2303
2304
2305
2306
2308 """Writes the content of nFKSconfigs.inc, which just gives the
2309 total FKS dirs as a parameter.
2310 nFKSconfigs is always >=1 (use a fake configuration for LOonly)"""
2311 replace_dict = {}
2312 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1)
2313 content = \
2314 """ INTEGER FKS_CONFIGS
2315 PARAMETER (FKS_CONFIGS=%(nconfs)d)
2316
2317 """ % replace_dict
2318
2319 writer.writelines(content)
2320
2321
2322
2323
2324
2326 """Writes the content of fks_info.inc, which lists the informations on the
2327 possible splittings of the born ME.
2328 nconfs is always >=1 (use a fake configuration for LOonly).
2329 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and
2330 the last colored particle as j_fks."""
2331
2332 replace_dict = {}
2333 fks_info_list = fksborn.get_fks_info_list()
2334 replace_dict['nconfs'] = max(len(fks_info_list), 1)
2335
2336
2337 if len(fks_info_list) > 0:
2338 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \
2339 for info in fks_info_list])
2340 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \
2341 for info in fks_info_list])
2342
2343 col_lines = []
2344 pdg_lines = []
2345 charge_lines = []
2346 fks_j_from_i_lines = []
2347 for i, info in enumerate(fks_info_list):
2348 col_lines.append( \
2349 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2350 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) ))
2351 pdg_lines.append( \
2352 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2353 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs'])))
2354 charge_lines.append(\
2355 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\
2356 % (i + 1, ', '.join('%19.15fd0' % charg\
2357 for charg in fksborn.real_processes[info['n_me']-1].charges) ))
2358 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\
2359 i + 1))
2360 else:
2361
2362
2363
2364 bornproc = fksborn.born_matrix_element.get('processes')[0]
2365 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21]
2366 colors = [l.get('color') for l in bornproc.get('legs')] + [8]
2367 charges = [0.] * len(colors)
2368
2369 fks_i = len(colors)
2370
2371
2372 fks_j=1
2373 for cpos, col in enumerate(colors[:-1]):
2374 if col != 1:
2375 fks_j = cpos+1
2376
2377 fks_i_values = str(fks_i)
2378 fks_j_values = str(fks_j)
2379 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2380 % ', '.join([str(col) for col in colors])]
2381 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2382 % ', '.join([str(pdg) for pdg in pdgs])]
2383 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2384 % ', '.join('%19.15fd0' % charg for charg in charges)]
2385 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \
2386 % (fks_i, fks_j)]
2387
2388
2389 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values
2390 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values
2391 replace_dict['col_lines'] = '\n'.join(col_lines)
2392 replace_dict['pdg_lines'] = '\n'.join(pdg_lines)
2393 replace_dict['charge_lines'] = '\n'.join(charge_lines)
2394 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines)
2395
2396 content = \
2397 """ INTEGER IPOS, JPOS
2398 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d)
2399 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL)
2400 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL)
2401 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL)
2402
2403 %(fks_i_line)s
2404 %(fks_j_line)s
2405
2406 %(fks_j_from_i_lines)s
2407
2408 C
2409 C Particle type:
2410 C octet = 8, triplet = 3, singlet = 1
2411 %(col_lines)s
2412
2413 C
2414 C Particle type according to PDG:
2415 C
2416 %(pdg_lines)s
2417
2418 C
2419 C Particle charge:
2420 C charge is set 0. with QCD corrections, which is irrelevant
2421 %(charge_lines)s
2422 """ % replace_dict
2423 if not isinstance(writer, writers.FortranWriter):
2424 raise writers.FortranWriter.FortranWriterError(\
2425 "writer not FortranWriter")
2426
2427 writers.FortranWriter.downcase = False
2428
2429 writer.writelines(content)
2430
2431 return True
2432
2433
2434
2435
2436
2437
2439 """Export a matrix element to a matrix.f file in MG4 madevent format"""
2440
2441 if not matrix_element.get('processes') or \
2442 not matrix_element.get('diagrams'):
2443 return 0,0
2444
2445 if not isinstance(writer, writers.FortranWriter):
2446 raise writers.FortranWriter.FortranWriterError(\
2447 "writer not FortranWriter")
2448
2449 writers.FortranWriter.downcase = False
2450
2451 replace_dict = {}
2452 replace_dict['N_me'] = n
2453
2454
2455 info_lines = self.get_mg5_info_lines()
2456 replace_dict['info_lines'] = info_lines
2457
2458
2459 process_lines = self.get_process_info_lines(matrix_element)
2460 replace_dict['process_lines'] = process_lines
2461
2462
2463 ncomb = matrix_element.get_helicity_combinations()
2464 replace_dict['ncomb'] = ncomb
2465
2466
2467 helicity_lines = self.get_helicity_lines(matrix_element)
2468 replace_dict['helicity_lines'] = helicity_lines
2469
2470
2471 ic_line = self.get_ic_line(matrix_element)
2472 replace_dict['ic_line'] = ic_line
2473
2474
2475
2476 den_factor_line = self.get_den_factor_line(matrix_element)
2477 replace_dict['den_factor_line'] = den_factor_line
2478
2479
2480 ngraphs = matrix_element.get_number_of_amplitudes()
2481 replace_dict['ngraphs'] = ngraphs
2482
2483
2484 ncolor = max(1, len(matrix_element.get('color_basis')))
2485 replace_dict['ncolor'] = ncolor
2486
2487
2488 color_data_lines = self.get_color_data_lines(matrix_element)
2489 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2490
2491
2492 helas_calls = fortran_model.get_matrix_element_calls(\
2493 matrix_element)
2494 replace_dict['helas_calls'] = "\n".join(helas_calls)
2495
2496
2497
2498 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2499 replace_dict['nwavefuncs'] = nwavefuncs
2500
2501
2502 amp2_lines = self.get_amp2_lines(matrix_element)
2503 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2504
2505
2506 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2507 replace_dict['wavefunctionsize'] = 20
2508 else:
2509 replace_dict['wavefunctionsize'] = 8
2510
2511
2512 jamp_lines = self.get_JAMP_lines(matrix_element)
2513
2514 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2515
2516 realfile = open(os.path.join(_file_path, \
2517 'iolibs/template_files/realmatrix_fks.inc')).read()
2518
2519 realfile = realfile % replace_dict
2520
2521
2522 writer.writelines(realfile)
2523
2524 return len([call for call in helas_calls if call.find('#') != 0]), ncolor
2525
2526
2527
2528
2529
2531
2532 """Write the auto_dsig.f file for MadFKS, which contains
2533 pdf call information"""
2534
2535 if not matrix_element.get('processes') or \
2536 not matrix_element.get('diagrams'):
2537 return 0
2538
2539 nexternal, ninitial = matrix_element.get_nexternal_ninitial()
2540
2541 if ninitial < 1 or ninitial > 2:
2542 raise writers.FortranWriter.FortranWriterError("""Need ninitial = 1 or 2 to write auto_dsig file""")
2543
2544 replace_dict = {}
2545
2546 replace_dict['N_me'] = n
2547
2548
2549 info_lines = self.get_mg5_info_lines()
2550 replace_dict['info_lines'] = info_lines
2551
2552
2553 process_lines = self.get_process_info_lines(matrix_element)
2554 replace_dict['process_lines'] = process_lines
2555
2556 pdf_vars, pdf_data, pdf_lines = \
2557 self.get_pdf_lines_mir(matrix_element, ninitial, False, False)
2558 replace_dict['pdf_vars'] = pdf_vars
2559 replace_dict['pdf_data'] = pdf_data
2560 replace_dict['pdf_lines'] = pdf_lines
2561
2562 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \
2563 self.get_pdf_lines_mir(matrix_element, ninitial, False, True)
2564 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr
2565
2566 file = open(os.path.join(_file_path, \
2567 'iolibs/template_files/parton_lum_n_fks.inc')).read()
2568 file = file % replace_dict
2569
2570
2571 writer.writelines(file)
2572
2573
2574
2575
2576
2577
2578
2580 """Write the coloramps.inc file for MadEvent"""
2581
2582 lines = []
2583 lines.append( "logical icolamp(%d,%d,1)" % \
2584 (max(len(list(matrix_element.get('color_basis').keys())), 1),
2585 len(mapconfigs)))
2586
2587 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1)
2588
2589
2590 writer.writelines(lines)
2591
2592 return True
2593
2594
2595
2596
2597
2598
2600 """Write the leshouche.inc file for MG4"""
2601
2602
2603 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2604
2605 lines = []
2606 for iproc, proc in enumerate(matrix_element.get('processes')):
2607 legs = proc.get_legs_with_decays()
2608 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \
2609 (iproc + 1, nexternal,
2610 ",".join([str(l.get('id')) for l in legs])))
2611 for i in [1, 2]:
2612 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \
2613 (i, iproc + 1, nexternal,
2614 ",".join([ "%3r" % 0 ] * ninitial + \
2615 [ "%3r" % i ] * (nexternal - ninitial))))
2616
2617
2618
2619 if iproc == 0:
2620
2621 if not matrix_element.get('color_basis'):
2622 for i in [1, 2]:
2623 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \
2624 (i, nexternal,
2625 ",".join([ "%3r" % 0 ] * nexternal)))
2626 color_flow_list = []
2627
2628 else:
2629
2630 repr_dict = {}
2631 for l in legs:
2632 repr_dict[l.get('number')] = \
2633 proc.get('model').get_particle(l.get('id')).get_color()\
2634 * (-1)**(1+l.get('state'))
2635
2636 color_flow_list = \
2637 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2638 ninitial)
2639
2640 for cf_i, color_flow_dict in enumerate(color_flow_list):
2641 for i in [0, 1]:
2642 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \
2643 (i + 1, cf_i + 1, nexternal,
2644 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \
2645 for l in legs])))
2646
2647
2648 writer.writelines(lines)
2649
2650 return len(color_flow_list)
2651
2652
2653
2654
2655
2656
2658 """Write the configs.inc file for MadEvent"""
2659
2660
2661 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2662 lines = []
2663
2664 iconfig = 0
2665
2666 s_and_t_channels = []
2667 mapconfigs = []
2668
2669 model = matrix_element.get('processes')[0].get('model')
2670
2671
2672 base_diagrams = matrix_element.get('base_amplitude').get('diagrams')
2673 model = matrix_element.get('base_amplitude').get('process').get('model')
2674 minvert = min([max([len(vert.get('legs')) for vert in \
2675 diag.get('vertices')]) for diag in base_diagrams])
2676
2677 for idiag, diag in enumerate(base_diagrams):
2678 if any([len(vert.get('legs')) > minvert for vert in
2679 diag.get('vertices')]):
2680
2681 continue
2682 iconfig = iconfig + 1
2683 helas_diag = matrix_element.get('diagrams')[idiag]
2684 mapconfigs.append(helas_diag.get('number'))
2685 lines.append("# Diagram %d, Amplitude %d" % \
2686 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number']))
2687
2688 lines.append("data mapconfig(%4d)/%4d/" % (iconfig,
2689 helas_diag.get('amplitudes')[0]['number']))
2690
2691
2692
2693 schannels, tchannels = helas_diag.get('amplitudes')[0].\
2694 get_s_and_t_channels(ninitial, model, 990)
2695
2696 s_and_t_channels.append([schannels, tchannels])
2697
2698
2699 allchannels = schannels
2700 if len(tchannels) > 1:
2701
2702 allchannels = schannels + tchannels
2703
2704 for vert in allchannels:
2705 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
2706 last_leg = vert.get('legs')[-1]
2707 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \
2708 (last_leg.get('number'), iconfig, len(daughters),
2709 ",".join(["%3d" % d for d in daughters])))
2710 if vert in schannels:
2711 lines.append("data sprop(%4d,%4d)/%8d/" % \
2712 (last_leg.get('number'), iconfig,
2713 last_leg.get('id')))
2714 elif vert in tchannels[:-1]:
2715 lines.append("data tprid(%4d,%4d)/%8d/" % \
2716 (last_leg.get('number'), iconfig,
2717 abs(last_leg.get('id'))))
2718
2719
2720 lines.append("# Number of configs")
2721 lines.append("data mapconfig(0)/%4d/" % iconfig)
2722
2723
2724 writer.writelines(lines)
2725
2726 return iconfig, mapconfigs, s_and_t_channels
2727
2728
2729
2730
2731
2732
2734 """Write the decayBW.inc file for MadEvent"""
2735
2736 lines = []
2737
2738 booldict = {False: ".false.", True: ".false."}
2739
2740
2741 for iconf, config in enumerate(s_and_t_channels):
2742 schannels = config[0]
2743 for vertex in schannels:
2744
2745
2746 leg = vertex.get('legs')[-1]
2747 lines.append("data gForceBW(%d,%d)/%s/" % \
2748 (leg.get('number'), iconf + 1,
2749 booldict[leg.get('from_group')]))
2750
2751
2752 writer.writelines(lines)
2753
2754 return True
2755
2756
2757
2758
2759
2761 """Write the dname.mg file for MG4"""
2762
2763 line = "DIRNAME=P%s" % \
2764 matrix_element.get('processes')[0].shell_string()
2765
2766
2767 writer.write(line + "\n")
2768
2769 return True
2770
2771
2772
2773
2774
2776 """Write the iproc.dat file for MG4"""
2777
2778 line = "%d" % (me_number + 1)
2779
2780
2781 for line_to_write in writer.write_line(line):
2782 writer.write(line_to_write)
2783 return True
2784
2785
2786
2787
2788
2789
2790
2791
2792
2793
2794
2796 """generate the lines for fks.inc describing initializating the
2797 fks_j_from_i array"""
2798 lines = []
2799 if not me.isfinite:
2800 for ii, js in me.fks_j_from_i.items():
2801 if js:
2802 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \
2803 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js])))
2804 else:
2805 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \
2806 % (2, 1, 1, '1'))
2807 lines.append('')
2808
2809 return lines
2810
2811
2812
2813
2814
2816
2817 """Write the leshouche.inc file for MG4"""
2818
2819
2820 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2821
2822 lines = []
2823 for iproc, proc in enumerate(matrix_element.get('processes')):
2824 legs = proc.get_legs_with_decays()
2825 lines.append("I %4d %4d %s" % \
2826 (ime, iproc + 1,
2827 " ".join([str(l.get('id')) for l in legs])))
2828 for i in [1, 2]:
2829 lines.append("M %4d %4d %4d %s" % \
2830 (ime, i, iproc + 1,
2831 " ".join([ "%3d" % 0 ] * ninitial + \
2832 [ "%3d" % i ] * (nexternal - ninitial))))
2833
2834
2835
2836 if iproc == 0:
2837
2838 if not matrix_element.get('color_basis'):
2839 for i in [1, 2]:
2840 lines.append("C %4d %4d 1 %s" % \
2841 (ime, i,
2842 " ".join([ "%3d" % 0 ] * nexternal)))
2843 color_flow_list = []
2844 nflow = 1
2845
2846 else:
2847
2848 repr_dict = {}
2849 for l in legs:
2850 repr_dict[l.get('number')] = \
2851 proc.get('model').get_particle(l.get('id')).get_color()\
2852 * (-1)**(1+l.get('state'))
2853
2854 color_flow_list = \
2855 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2856 ninitial)
2857
2858 for cf_i, color_flow_dict in enumerate(color_flow_list):
2859 for i in [0, 1]:
2860 lines.append("C %4d %4d %4d %s" % \
2861 (ime, i + 1, cf_i + 1,
2862 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2863 for l in legs])))
2864
2865 nflow = len(color_flow_list)
2866
2867 nproc = len(matrix_element.get('processes'))
2868
2869 return lines, nproc, nflow
2870
2871
2873
2874 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly
2875 In this case, write born color structure times ij -> i,j splitting)
2876 """
2877
2878 bornproc = matrix_element.get('processes')[0]
2879 colors = [l.get('color') for l in bornproc.get('legs')]
2880
2881 fks_i = len(colors)
2882
2883
2884 fks_j=1
2885 for cpos, col in enumerate(colors):
2886 if col != 1:
2887 fks_j = cpos+1
2888
2889
2890 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2891 nexternal+=1
2892
2893 lines = []
2894 for iproc, proc in enumerate(matrix_element.get('processes')):
2895
2896 legs = proc.get_legs_with_decays() + \
2897 [fks_common.FKSLeg({'id': -21,
2898 'number': nexternal,
2899 'state': True,
2900 'fks': 'i',
2901 'color': 8,
2902 'charge': 0.,
2903 'massless': True,
2904 'spin': 3,
2905 'is_part': True,
2906 'self_antipart': True})]
2907
2908 lines.append("I %4d %4d %s" % \
2909 (ime, iproc + 1,
2910 " ".join([str(l.get('id')) for l in legs])))
2911 for i in [1, 2]:
2912 lines.append("M %4d %4d %4d %s" % \
2913 (ime, i, iproc + 1,
2914 " ".join([ "%3d" % 0 ] * ninitial + \
2915 [ "%3d" % i ] * (nexternal - ninitial))))
2916
2917
2918
2919 if iproc == 0:
2920
2921 if not matrix_element.get('color_basis'):
2922 for i in [1, 2]:
2923 lines.append("C %4d %4d 1 %s" % \
2924 (ime, i,
2925 " ".join([ "%3d" % 0 ] * nexternal)))
2926 color_flow_list = []
2927 nflow = 1
2928
2929 else:
2930
2931
2932
2933 repr_dict = {}
2934 for l in legs[:-1]:
2935 repr_dict[l.get('number')] = \
2936 proc.get('model').get_particle(l.get('id')).get_color()\
2937 * (-1)**(1+l.get('state'))
2938
2939 color_flow_list = \
2940 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2941 ninitial)
2942
2943 for cf_i, color_flow_dict in enumerate(color_flow_list):
2944
2945
2946 maxicol = max(sum(list(color_flow_dict.values()), []))
2947
2948 if color_flow_dict[fks_j][0] == 0:
2949 anti = True
2950 icol_j = color_flow_dict[fks_j][1]
2951 else:
2952 anti = False
2953 icol_j = color_flow_dict[fks_j][0]
2954
2955 if anti:
2956 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1])
2957 color_flow_dict[fks_j][1] = maxicol + 1
2958 else:
2959 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1)
2960 color_flow_dict[fks_j][0] = maxicol + 1
2961
2962 for i in [0, 1]:
2963 lines.append("C %4d %4d %4d %s" % \
2964 (ime, i + 1, cf_i + 1,
2965 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2966 for l in legs])))
2967
2968 nflow = len(color_flow_list)
2969
2970 nproc = len(matrix_element.get('processes'))
2971
2972 return lines, nproc, nflow
2973
2974
2975
2976
2977
2979 """returns the lines with the information on the denominator keeping care
2980 of the identical particle factors in the various real emissions"""
2981
2982 lines = []
2983 info_list = fks_born.get_fks_info_list()
2984 if info_list:
2985
2986 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list))
2987 lines.append('DATA IDEN_VALUES /' + \
2988 ', '.join(['%d' % (
2989 fks_born.born_matrix_element.get_denominator_factor() ) \
2990 for info in info_list]) + '/')
2991 else:
2992
2993 lines.append('INTEGER IDEN_VALUES(1)')
2994 lines.append('DATA IDEN_VALUES / %d /' \
2995 % fks_born.born_matrix_element.get_denominator_factor())
2996
2997 return lines
2998
2999
3000
3001
3002
3004 """returns the lines with the information on the particle number of the born
3005 that splits"""
3006 info_list = fks_born.get_fks_info_list()
3007 lines = []
3008 if info_list:
3009
3010
3011 ij_list = [info['fks_info']['ij']if \
3012 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \
3013 else 0 for info in info_list]
3014 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list))
3015 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/')
3016 else:
3017
3018 lines.append('INTEGER IJ_VALUES(1)')
3019 lines.append('DATA IJ_VALUES / 1 /')
3020
3021 return lines
3022
3023
3024 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\
3025 mirror = False):
3026 """Generate the PDF lines for the auto_dsig.f file"""
3027
3028 processes = matrix_element.get('processes')
3029 model = processes[0].get('model')
3030
3031 pdf_definition_lines = ""
3032 pdf_data_lines = ""
3033 pdf_lines = ""
3034
3035 if ninitial == 1:
3036 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n"
3037 for i, proc in enumerate(processes):
3038 process_line = proc.base_string()
3039 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3040 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n"
3041 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n"
3042 else:
3043
3044 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \
3045 p in processes]))),
3046 sorted(list(set([p.get_initial_pdg(2) for \
3047 p in processes])))]
3048
3049
3050 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \
3051 sum(initial_states,[])])
3052 for key,val in pdf_codes.items():
3053 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m')
3054
3055
3056 pdgtopdf = {21: 0, 22: 7}
3057
3058 for pdg in sum(initial_states,[]):
3059 if not pdg in pdgtopdf and not pdg in list(pdgtopdf.values()):
3060 pdgtopdf[pdg] = pdg
3061 elif pdg not in pdgtopdf and pdg in list(pdgtopdf.values()):
3062
3063 pdgtopdf[pdg] = 6000000 + pdg
3064
3065
3066 for i in [0,1]:
3067 pdf_definition_lines += "DOUBLE PRECISION " + \
3068 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3069 for pdg in \
3070 initial_states[i]]) + \
3071 "\n"
3072
3073
3074 for i in [0,1]:
3075 pdf_data_lines += "DATA " + \
3076 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3077 for pdg in initial_states[i]]) + \
3078 "/%d*1D0/" % len(initial_states[i]) + \
3079 "\n"
3080
3081
3082 for i, init_states in enumerate(initial_states):
3083 if not mirror:
3084 ibeam = i + 1
3085 else:
3086 ibeam = 2 - i
3087 if subproc_group:
3088 pdf_lines = pdf_lines + \
3089 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \
3090 % (ibeam, ibeam)
3091 else:
3092 pdf_lines = pdf_lines + \
3093 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \
3094 % (ibeam, ibeam)
3095
3096 for initial_state in init_states:
3097 if initial_state in list(pdf_codes.keys()):
3098 if subproc_group:
3099 if abs(pdgtopdf[initial_state]) <= 7:
3100 pdf_lines = pdf_lines + \
3101 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \
3102 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \
3103 (pdf_codes[initial_state],
3104 i + 1, ibeam, pdgtopdf[initial_state],
3105 ibeam, ibeam)
3106 else:
3107
3108 pdf_lines = pdf_lines + \
3109 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3110 "%s%d=0d0\n") % \
3111 (pdf_codes[initial_state],i + 1)
3112 else:
3113 if abs(pdgtopdf[initial_state]) <= 7:
3114 pdf_lines = pdf_lines + \
3115 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \
3116 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \
3117 (pdf_codes[initial_state],
3118 i + 1, ibeam, pdgtopdf[initial_state],
3119 ibeam, ibeam)
3120 else:
3121
3122 pdf_lines = pdf_lines + \
3123 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3124 "%s%d=0d0\n") % \
3125 (pdf_codes[initial_state],i + 1)
3126
3127 pdf_lines = pdf_lines + "ENDIF\n"
3128
3129
3130 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n"
3131 for proc in processes:
3132 process_line = proc.base_string()
3133 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3134 pdf_lines = pdf_lines + "\nPD(IPROC) = "
3135 for ibeam in [1, 2]:
3136 initial_state = proc.get_initial_pdg(ibeam)
3137 if initial_state in list(pdf_codes.keys()):
3138 pdf_lines = pdf_lines + "%s%d*" % \
3139 (pdf_codes[initial_state], ibeam)
3140 else:
3141 pdf_lines = pdf_lines + "1d0*"
3142
3143 pdf_lines = pdf_lines[:-1] + "\n"
3144
3145
3146 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
3147
3148
3149
3151 """Return the color matrix definition lines for the given color_matrix. Split
3152 rows in chunks of size n."""
3153
3154 if not color_matrix:
3155 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"]
3156 else:
3157 ret_list = []
3158 my_cs = color.ColorString()
3159 for index, denominator in \
3160 enumerate(color_matrix.get_line_denominators()):
3161
3162 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator))
3163
3164 num_list = color_matrix.get_line_numerators(index, denominator)
3165 for k in range(0, len(num_list), n):
3166 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \
3167 (index + 1, k + 1, min(k + n, len(num_list)),
3168 ','.join(["%5r" % int(i) for i in num_list[k:k + n]])))
3169 return ret_list
3170
3171
3172
3173
3176 """Write the maxamps.inc file for MG4."""
3177
3178 file = " integer maxamps, maxflow, maxproc, maxsproc\n"
3179 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \
3180 (maxamps, maxflows)
3181 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \
3182 (maxproc, maxsproc)
3183
3184
3185 writer.writelines(file)
3186
3187 return True
3188
3189
3190
3191
3193
3194 """Write the ncombs.inc file for MadEvent."""
3195
3196
3197 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3198
3199
3200 file = " integer n_max_cl\n"
3201 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1))
3202
3203
3204 writer.writelines(file)
3205
3206 return True
3207
3208
3209
3210
3212 """Write a dummy config_subproc.inc file for MadEvent"""
3213
3214 lines = []
3215
3216 for iconfig in range(len(s_and_t_channels)):
3217 lines.append("DATA CONFSUB(1,%d)/1/" % \
3218 (iconfig + 1))
3219
3220
3221 writer.writelines(lines)
3222
3223 return True
3224
3225
3226
3227
3229 """Write the get_color.f file for MadEvent, which returns color
3230 for all particles used in the matrix element."""
3231
3232 try:
3233 matrix_elements=matrix_element.real_processes[0].matrix_element
3234 except IndexError:
3235 matrix_elements=[matrix_element.born_matrix_element]
3236
3237 if isinstance(matrix_elements, helas_objects.HelasMatrixElement):
3238 matrix_elements = [matrix_elements]
3239
3240 model = matrix_elements[0].get('processes')[0].get('model')
3241
3242
3243
3244
3245 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3246 for wf in d.get('wavefunctions')],[]) \
3247 for d in me.get('diagrams')],[]) \
3248 for me in [real_proc.matrix_element]],[])\
3249 for real_proc in matrix_element.real_processes],[]))
3250
3251 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3252 for wf in d.get('wavefunctions')],[]) \
3253 for d in matrix_element.born_matrix_element.get('diagrams')],[])))
3254
3255
3256 leg_ids = set(sum([sum([sum([[l.get('id') for l in \
3257 p.get_legs_with_decays()] for p in \
3258 me.get('processes')], []) for me in \
3259 [real_proc.matrix_element]], []) for real_proc in \
3260 matrix_element.real_processes],[]))
3261
3262 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \
3263 p.get_legs_with_decays()] for p in \
3264 matrix_element.born_matrix_element.get('processes')], [])))
3265 particle_ids = sorted(list(wf_ids.union(leg_ids)))
3266
3267 lines = """function get_color(ipdg)
3268 implicit none
3269 integer get_color, ipdg
3270
3271 if(ipdg.eq.%d)then
3272 get_color=%d
3273 return
3274 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color())
3275
3276 for part_id in particle_ids[1:]:
3277 lines += """else if(ipdg.eq.%d)then
3278 get_color=%d
3279 return
3280 """ % (part_id, model.get_particle(part_id).get_color())
3281
3282
3283 lines += """else if(ipdg.eq.%d)then
3284 c This is dummy particle used in multiparticle vertices
3285 get_color=2
3286 return
3287 """ % model.get_first_non_pdg()
3288 lines += """else
3289 write(*,*)'Error: No color given for pdg ',ipdg
3290 get_color=0
3291 return
3292 endif
3293 end
3294 """
3295
3296
3297 writer.writelines(lines)
3298
3299 return True
3300
3301
3302
3303
3304
3305 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3306 """Write the props.inc file for MadEvent. Needs input from
3307 write_configs_file. With respect to the parent routine, it has some
3308 more specific formats that allow the props.inc file to be read by the
3309 link program"""
3310
3311 lines = []
3312
3313 particle_dict = matrix_element.get('processes')[0].get('model').\
3314 get('particle_dict')
3315
3316 for iconf, configs in enumerate(s_and_t_channels):
3317 for vertex in configs[0] + configs[1][:-1]:
3318 leg = vertex.get('legs')[-1]
3319 if leg.get('id') not in particle_dict:
3320
3321 mass = 'zero'
3322 width = 'zero'
3323 pow_part = 0
3324 else:
3325 particle = particle_dict[leg.get('id')]
3326
3327 if particle.get('mass').lower() == 'zero':
3328 mass = particle.get('mass')
3329 else:
3330 mass = "abs(%s)" % particle.get('mass')
3331
3332 if particle.get('width').lower() == 'zero':
3333 width = particle.get('width')
3334 else:
3335 width = "abs(%s)" % particle.get('width')
3336
3337 pow_part = 1 + int(particle.is_boson())
3338
3339 lines.append("pmass(%3d,%4d) = %s" % \
3340 (leg.get('number'), iconf + 1, mass))
3341 lines.append("pwidth(%3d,%4d) = %s" % \
3342 (leg.get('number'), iconf + 1, width))
3343 lines.append("pow(%3d,%4d) = %d" % \
3344 (leg.get('number'), iconf + 1, pow_part))
3345
3346
3347 writer.writelines(lines)
3348
3349 return True
3350
3351
3352
3353
3354
3356 """Append this subprocess to the subproc.mg file for MG4"""
3357
3358
3359 writer.write(subprocdir + "\n")
3360
3361 return True
3362
3363
3364
3365
3366
3367
3368
3369
3372 """Class to take care of exporting a set of matrix elements to
3373 Fortran (v4) format."""
3374
3375
3378
3379
3380
3381
3382
3384 """create the directory run_name as a copy of the MadEvent
3385 Template, and clean the directory
3386 For now it is just the same as copy_v4template, but it will be modified
3387 """
3388 mgme_dir = self.mgme_dir
3389 dir_path = self.dir_path
3390 clean =self.opt['clean']
3391
3392
3393 if not os.path.isdir(dir_path):
3394 if not mgme_dir:
3395 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.")
3396 logger.info('initialize a new directory: %s' % \
3397 os.path.basename(dir_path))
3398 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
3399
3400 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),
3401 dir_path)
3402
3403 for card in ['plot_card']:
3404 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
3405 try:
3406 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
3407 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
3408 except IOError:
3409 logger.warning("Failed to copy " + card + ".dat to default")
3410
3411 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
3412 if not mgme_dir:
3413 raise MadGraph5Error("No valid MG_ME path given for MG4 run directory creation.")
3414 try:
3415 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
3416 except IOError:
3417 MG5_version = misc.get_pkg_info()
3418 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
3419 "5." + MG5_version['version'])
3420
3421
3422 if clean:
3423 logger.info('remove old information in %s' % os.path.basename(dir_path))
3424 if 'MADGRAPH_BASE' in os.environ:
3425 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
3426 '--web'], cwd=dir_path)
3427 else:
3428 try:
3429 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
3430 cwd=dir_path)
3431 except Exception as why:
3432 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
3433 % (os.path.basename(dir_path),why))
3434
3435 MG_version = misc.get_pkg_info()
3436 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
3437 MG_version['version'])
3438
3439
3440 self.link_CutTools(dir_path)
3441
3442 link_tir_libs=[]
3443 tir_libs=[]
3444 tir_include=[]
3445 for tir in self.all_tir:
3446 tir_dir="%s_dir"%tir
3447 libpath=getattr(self,tir_dir)
3448 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'),
3449 libpath,"lib%s.a"%tir,tir_name=tir)
3450 setattr(self,tir_dir,libpath)
3451 if libpath != "":
3452 if tir in ['pjfry','ninja','golem', 'samurai','collier']:
3453
3454
3455 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir))
3456 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir))
3457
3458 if tir in ['ninja']:
3459 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext))
3460 for ext in ['a','dylib','so']):
3461 raise MadGraph5Error(
3462 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath)
3463 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo'))
3464 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo'))
3465
3466 if tir in ['golem','samurai','ninja','collier']:
3467 trg_path = pjoin(os.path.dirname(libpath),'include')
3468 if os.path.isdir(trg_path):
3469 to_include = misc.find_includes_path(trg_path,
3470 self.include_names[tir])
3471 else:
3472 to_include = None
3473
3474 if to_include is None and tir=='collier':
3475 to_include = misc.find_includes_path(
3476 pjoin(libpath,'modules'),self.include_names[tir])
3477 if to_include is None:
3478 logger.error(
3479 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+
3480 'Generation carries on but you will need to edit the include path by hand in the makefiles.')
3481 to_include = '<Not_found_define_it_yourself>'
3482 tir_include.append('-I %s'%to_include)
3483 else:
3484 link_tir_libs.append('-l%s'%tir)
3485 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir)
3486
3487 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
3488 cwd = os.getcwd()
3489 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3490 try:
3491 os.chdir(dirpath)
3492 except os.error:
3493 logger.error('Could not cd to directory %s' % dirpath)
3494 return 0
3495 filename = 'makefile_loop'
3496 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
3497 link_tir_libs,tir_libs,tir_include=tir_include)
3498 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
3499 dirpath = os.path.join(self.dir_path, 'Source')
3500 try:
3501 os.chdir(dirpath)
3502 except os.error:
3503 logger.error('Could not cd to directory %s' % dirpath)
3504 return 0
3505 filename = 'make_opts'
3506 calls = self.write_make_opts(writers.MakefileWriter(filename),
3507 link_tir_libs,tir_libs)
3508
3509 os.chdir(cwd)
3510
3511 cwd = os.getcwd()
3512 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3513 try:
3514 os.chdir(dirpath)
3515 except os.error:
3516 logger.error('Could not cd to directory %s' % dirpath)
3517 return 0
3518
3519
3520 cpfiles= ["SubProcesses/MadLoopParamReader.f",
3521 "Cards/MadLoopParams.dat",
3522 "SubProcesses/MadLoopParams.inc"]
3523
3524 for file in cpfiles:
3525 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
3526 os.path.join(self.dir_path, file))
3527
3528 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
3529 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
3530
3531
3532
3533 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
3534 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
3535 'Cards', 'MadLoopParams.dat'))
3536
3537 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
3538 "MadLoopParams.dat"))
3539
3540
3541 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
3542 "SubProcesses","MadLoopCommons.inc")).read()
3543 writer = writers.FortranWriter(os.path.join(self.dir_path,
3544 "SubProcesses","MadLoopCommons.f"))
3545 writer.writelines(MadLoopCommon%{
3546 'print_banner_commands':self.MadLoop_banner},
3547 context={'collier_available':self.tir_available_dict['collier']})
3548 writer.close()
3549
3550
3551 model_path = self.dir_path + '/Source/MODEL/'
3552
3553 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')):
3554 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses')
3555 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')):
3556 ln(model_path + '/mp_coupl_same_name.inc', \
3557 self.dir_path + '/SubProcesses')
3558
3559
3560 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
3561 writers.FortranWriter('cts_mpc.h'),)
3562
3563 self.copy_python_files()
3564
3565
3566
3567 self.write_pdf_opendata()
3568
3569
3570
3571 os.chdir(cwd)
3572
3574 """writes the V**** directory inside the P**** directories specified in
3575 dir_name"""
3576
3577 cwd = os.getcwd()
3578
3579 matrix_element = loop_matrix_element
3580
3581
3582 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
3583 try:
3584 os.mkdir(dirpath)
3585 except os.error as error:
3586 logger.warning(error.strerror + " " + dirpath)
3587
3588
3589 name = "V%s" % matrix_element.get('processes')[0].shell_string()
3590 dirpath = os.path.join(dir_name, name)
3591
3592 try:
3593 os.mkdir(dirpath)
3594 except os.error as error:
3595 logger.warning(error.strerror + " " + dirpath)
3596
3597 try:
3598 os.chdir(dirpath)
3599 except os.error:
3600 logger.error('Could not cd to directory %s' % dirpath)
3601 return 0
3602
3603 logger.info('Creating files in directory %s' % name)
3604
3605
3606 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3607
3608 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
3609
3610
3611 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'),
3612 abspath=False, cwd=None)
3613
3614
3615 filename = 'born_matrix.f'
3616 calls = self.write_bornmatrix(
3617 writers.FortranWriter(filename),
3618 matrix_element,
3619 fortran_model)
3620
3621 filename = 'nexternal.inc'
3622 self.write_nexternal_file(writers.FortranWriter(filename),
3623 nexternal, ninitial)
3624
3625 filename = 'pmass.inc'
3626 self.write_pmass_file(writers.FortranWriter(filename),
3627 matrix_element)
3628
3629 filename = 'ngraphs.inc'
3630 self.write_ngraphs_file(writers.FortranWriter(filename),
3631 len(matrix_element.get_all_amplitudes()))
3632
3633 filename = "loop_matrix.ps"
3634 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""")
3635 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
3636 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
3637 filename,
3638 model=matrix_element.get('processes')[0].get('model'),
3639 amplitude='')
3640 logger.info("Drawing loop Feynman diagrams for " + \
3641 matrix_element.get('processes')[0].nice_string(\
3642 print_weighted=False))
3643 plot.draw()
3644
3645 filename = "born_matrix.ps"
3646 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
3647 get('born_diagrams'),
3648 filename,
3649 model=matrix_element.get('processes')[0].\
3650 get('model'),
3651 amplitude='')
3652 logger.info("Generating born Feynman diagrams for " + \
3653 matrix_element.get('processes')[0].nice_string(\
3654 print_weighted=False))
3655 plot.draw()
3656
3657
3658
3659
3660 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
3661
3662 open('unique_id.inc','w').write(
3663 """ integer UNIQUE_ID
3664 parameter(UNIQUE_ID=1)""")
3665
3666 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
3667 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
3668 'MadLoopParams.inc','MadLoopCommons.f']
3669
3670 for file in linkfiles:
3671 ln('../../%s' % file)
3672
3673 os.system("ln -s ../../makefile_loop makefile")
3674
3675
3676 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
3677 pjoin('..','MadLoop5_resources'))
3678
3679 linkfiles = ['mpmodule.mod']
3680
3681 for file in linkfiles:
3682 ln('../../../lib/%s' % file)
3683
3684 linkfiles = ['coef_specs.inc']
3685
3686 for file in linkfiles:
3687 ln('../../../Source/DHELAS/%s' % file)
3688
3689
3690 os.chdir(cwd)
3691
3692 if not calls:
3693 calls = 0
3694 return calls
3695
3696
3697
3698
3699
3701 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the
3702 non-optimized mode"""
3703 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc')
3704
3705 replace_dict = {}
3706 replace_dict['max_lwf_size'] = 4
3707 replace_dict['vertex_max_coefs'] = max(\
3708 [q_polynomial.get_number_of_coefs_for_rank(n)
3709 for n in max_loop_vertex_ranks])
3710 IncWriter=writers.FortranWriter(filename,'w')
3711 IncWriter.writelines("""INTEGER MAXLWFSIZE
3712 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d)
3713 INTEGER VERTEXMAXCOEFS
3714 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\
3715 % replace_dict)
3716 IncWriter.close()
3717