1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Methods and classes to export matrix elements to fks format."""
16
17 from distutils import dir_util
18 import glob
19 import logging
20 import os
21 import re
22 import shutil
23 import subprocess
24 import string
25 import copy
26 import platform
27
28 import madgraph.core.color_algebra as color
29 import madgraph.core.helas_objects as helas_objects
30 import madgraph.core.base_objects as base_objects
31 import madgraph.fks.fks_helas_objects as fks_helas_objects
32 import madgraph.fks.fks_base as fks
33 import madgraph.fks.fks_common as fks_common
34 import madgraph.iolibs.drawing_eps as draw
35 import madgraph.iolibs.gen_infohtml as gen_infohtml
36 import madgraph.iolibs.files as files
37 import madgraph.various.misc as misc
38 import madgraph.iolibs.file_writers as writers
39 import madgraph.iolibs.template_files as template_files
40 import madgraph.iolibs.ufo_expression_parsers as parsers
41 import madgraph.iolibs.export_v4 as export_v4
42 import madgraph.loop.loop_exporters as loop_exporters
43 import madgraph.various.q_polynomial as q_polynomial
44 import madgraph.various.banner as banner_mod
45
46 import aloha.create_aloha as create_aloha
47
48 import models.write_param_card as write_param_card
49 import models.check_param_card as check_param_card
50 from madgraph import MadGraph5Error, MG5DIR, InvalidCmd
51 from madgraph.iolibs.files import cp, ln, mv
52
53 pjoin = os.path.join
54
55 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
56 logger = logging.getLogger('madgraph.export_fks')
57
58
60 Pdir = args[0]
61 old_pos = args[1]
62 dir_path = args[2]
63
64 devnull = os.open(os.devnull, os.O_RDWR)
65
66 os.chdir(Pdir)
67 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
68 stdout = devnull)
69 os.chdir(os.path.pardir)
70
71
72
73
74
76 """Class to take care of exporting a set of matrix elements to
77 Fortran (v4) format."""
78
79
80
81
83 """create the directory run_name as a copy of the MadEvent
84 Template, and clean the directory
85 For now it is just the same as copy_v4template, but it will be modified
86 """
87
88 mgme_dir = self.mgme_dir
89 dir_path = self.dir_path
90 clean =self.opt['clean']
91
92
93 if not os.path.isdir(dir_path):
94 if not mgme_dir:
95 raise MadGraph5Error, \
96 "No valid MG_ME path given for MG4 run directory creation."
97 logger.info('initialize a new directory: %s' % \
98 os.path.basename(dir_path))
99 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
100
101 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)
102
103 for card in ['plot_card']:
104 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
105 try:
106 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
107 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
108 except IOError:
109 logger.warning("Failed to move " + card + ".dat to default")
110
111 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
112 if not mgme_dir:
113 raise MadGraph5Error, \
114 "No valid MG_ME path given for MG4 run directory creation."
115 try:
116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
117 except IOError:
118 MG5_version = misc.get_pkg_info()
119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
120 "5." + MG5_version['version'])
121
122
123 if clean:
124 logger.info('remove old information in %s' % os.path.basename(dir_path))
125 if os.environ.has_key('MADGRAPH_BASE'):
126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
127 '--web'],cwd=dir_path)
128 else:
129 try:
130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
131 cwd=dir_path)
132 except Exception, why:
133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
134 % (os.path.basename(dir_path),why))
135
136 MG_version = misc.get_pkg_info()
137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
138 MG_version['version'])
139
140
141 self.link_CutTools(dir_path)
142
143 link_tir_libs=[]
144 tir_libs=[]
145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
146 dirpath = os.path.join(self.dir_path, 'SubProcesses')
147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop')
148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
149 link_tir_libs,tir_libs)
150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
151 filename = pjoin(self.dir_path, 'Source','make_opts')
152 calls = self.write_make_opts(writers.MakefileWriter(filename),
153 link_tir_libs,tir_libs)
154
155
156 for card in ['FO_analyse_card', 'shower_card']:
157 try:
158 shutil.copy(pjoin(self.dir_path, 'Cards',
159 card + '.dat'),
160 pjoin(self.dir_path, 'Cards',
161 card + '_default.dat'))
162 except IOError:
163 logger.warning("Failed to copy " + card + ".dat to default")
164
165 cwd = os.getcwd()
166 dirpath = os.path.join(self.dir_path, 'SubProcesses')
167 try:
168 os.chdir(dirpath)
169 except os.error:
170 logger.error('Could not cd to directory %s' % dirpath)
171 return 0
172
173
174 cpfiles= ["SubProcesses/MadLoopParamReader.f",
175 "Cards/MadLoopParams.dat",
176 "SubProcesses/MadLoopParams.inc"]
177
178 for file in cpfiles:
179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
180 os.path.join(self.dir_path, file))
181
182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
184
185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
187 'Cards', 'MadLoopParams.dat'))
188
189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
190 "MadLoopParams.dat"))
191
192
193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
194 "SubProcesses","MadLoopCommons.inc")).read()
195 writer = writers.FortranWriter(os.path.join(self.dir_path,
196 "SubProcesses","MadLoopCommons.f"))
197 writer.writelines(MadLoopCommon%{
198 'print_banner_commands':self.MadLoop_banner},
199 context={'collier_available':False})
200 writer.close()
201
202
203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
204 writers.FortranWriter('cts_mpc.h'))
205
206
207
208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat')
209 FKS_card_file = open(FKS_card_path,'r')
210 FKS_card = FKS_card_file.read()
211 FKS_card_file.close()
212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+",
213 "#NHelForMCoverHels\n-1", FKS_card)
214 FKS_card_file = open(FKS_card_path,'w')
215 FKS_card_file.write(FKS_card)
216 FKS_card_file.close()
217
218
219 os.chdir(cwd)
220
221 self.copy_python_files()
222
223
224 self.write_pdf_opendata()
225
226
227
228
229
231 """ Create the file makefile_loop which links to the TIR libraries."""
232
233 file = open(os.path.join(self.mgme_dir,'Template','NLO',
234 'SubProcesses','makefile_loop.inc')).read()
235 replace_dict={}
236 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
237 replace_dict['tir_libs']=' '.join(tir_libs)
238 replace_dict['dotf']='%.f'
239 replace_dict['doto']='%.o'
240 replace_dict['tir_include']=' '.join(tir_include)
241 file=file%replace_dict
242 if writer:
243 writer.writelines(file)
244 else:
245 return file
246
247
249 """ Create the file make_opts which links to the TIR libraries."""
250 file = open(os.path.join(self.mgme_dir,'Template','NLO',
251 'Source','make_opts.inc')).read()
252 replace_dict={}
253 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
254 replace_dict['tir_libs']=' '.join(tir_libs)
255 replace_dict['dotf']='%.f'
256 replace_dict['doto']='%.o'
257 file=file%replace_dict
258 if writer:
259 writer.writelines(file)
260 else:
261 return file
262
263
264
265
267 """copy python files required for the Template"""
268
269 files_to_copy = [ \
270 pjoin('interface','amcatnlo_run_interface.py'),
271 pjoin('interface','extended_cmd.py'),
272 pjoin('interface','common_run_interface.py'),
273 pjoin('interface','coloring_logging.py'),
274 pjoin('various','misc.py'),
275 pjoin('various','shower_card.py'),
276 pjoin('various','FO_analyse_card.py'),
277 pjoin('various','histograms.py'),
278 pjoin('various','banner.py'),
279 pjoin('various','cluster.py'),
280 pjoin('various','systematics.py'),
281 pjoin('various','lhe_parser.py'),
282 pjoin('madevent','sum_html.py'),
283 pjoin('madevent','gen_crossxhtml.py'),
284 pjoin('iolibs','files.py'),
285 pjoin('iolibs','save_load_object.py'),
286 pjoin('iolibs','file_writers.py'),
287 pjoin('..','models','check_param_card.py'),
288 pjoin('__init__.py')
289 ]
290 cp(_file_path+'/interface/.mg5_logging.conf',
291 self.dir_path+'/bin/internal/me5_logging.conf')
292
293 for cp_file in files_to_copy:
294 cp(pjoin(_file_path,cp_file),
295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [],
298 wanted_couplings = []):
299
300 super(ProcessExporterFortranFKS,self).convert_model(model,
301 wanted_lorentz, wanted_couplings)
302
303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~')
304 try:
305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel'))
306 except OSError as error:
307 pass
308 model_path = model.get('modelpath')
309 shutil.copytree(model_path,
310 pjoin(self.dir_path,'bin','internal','ufomodel'),
311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
312 if hasattr(model, 'restrict_card'):
313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel',
314 'restrict_default.dat')
315 if isinstance(model.restrict_card, check_param_card.ParamCard):
316 model.restrict_card.write(out_path)
317 else:
318 files.cp(model.restrict_card, out_path)
319
320
321
322
323
324
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent"""
327
328 lines = "integer max_particles, max_branch\n"
329 lines += "parameter (max_particles=%d) \n" % maxparticles
330 lines += "parameter (max_branch=max_particles-1)"
331
332
333 writer.writelines(lines)
334
335 return True
336
337
338
339
340
342 """Write the maxconfigs.inc file for MadEvent"""
343
344 lines = "integer lmaxconfigs\n"
345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs
346
347
348 writer.writelines(lines)
349
350 return True
351
352
353
354
355
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent
358 Perl script of MadEvent4 are still working properly for pure MG5 run."""
359
360 proc_card_template = template_files.mg4_proc_card.mg4_template
361 process_template = template_files.mg4_proc_card.process_template
362 process_text = ''
363 coupling = ''
364 new_process_content = []
365
366
367
368 process_str = process_str.replace(' =', '=')
369 process_str = process_str.replace('= ', '=')
370 process_str = process_str.replace(',',' , ')
371
372 for info in process_str.split():
373 if '=' in info:
374 coupling += info + '\n'
375 else:
376 new_process_content.append(info)
377
378
379 process_str = ' '.join(new_process_content)
380
381
382 process_text += process_template.substitute({'process': process_str, \
383 'coupling': coupling})
384
385 text = proc_card_template.substitute({'process': process_text,
386 'model': modelname,
387 'multiparticle':''})
388 ff = open(file_pos, 'w')
389 ff.write(text)
390 ff.close()
391
392
393
394
395
397 """ Write an initial state process map. Each possible PDF
398 combination gets an unique identifier."""
399
400 text=''
401 for i,e in enumerate(initial_states):
402 if len(e) ==1:
403 e.append(0)
404 text=text+str(i+1)+' '+str(len(e))
405 for t in e:
406 text=text+' '
407 try:
408 for p in t:
409 text=text+' '+str(p)
410 except TypeError:
411 text=text+' '+str(t)
412 text=text+'\n'
413
414 ff = open(file_pos, 'w')
415 ff.write(text)
416 ff.close()
417
419 """ A function returning a string uniquely identifying the matrix
420 element given in argument so that it can be used as a prefix to all
421 MadLoop5 subroutines and common blocks related to it. This allows
422 to compile several processes into one library as requested by the
423 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design
424 necessitates that there is no process prefix."""
425
426 return ''
427
428
429
430
432 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the
433 non-optimized mode"""
434 raise fks_common.FKSProcessError(), \
435 "write_coef_specs should be called only in the loop-optimized mode"
436
437
438
439
440
441 - def generate_directories_fks(self, matrix_element, fortran_model, me_number,
442 me_ntot, path=os.getcwd(),OLP='MadLoop'):
443 """Generate the Pxxxxx_i directories for a subprocess in MadFKS,
444 including the necessary matrix.f and various helper files"""
445 proc = matrix_element.born_matrix_element['processes'][0]
446
447 if not self.model:
448 self.model = matrix_element.get('processes')[0].get('model')
449
450 cwd = os.getcwd()
451 try:
452 os.chdir(path)
453 except OSError, error:
454 error_msg = "The directory %s should exist in order to be able " % path + \
455 "to \"export\" in it. If you see this error message by " + \
456 "typing the command \"export\" please consider to use " + \
457 "instead the command \"output\". "
458 raise MadGraph5Error, error_msg
459
460 calls = 0
461
462 self.fksdirs = []
463
464 borndir = "P%s" % \
465 (matrix_element.get('processes')[0].shell_string())
466 os.mkdir(borndir)
467 os.chdir(borndir)
468 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot))
469
470
471 self.generate_born_fks_files(matrix_element,
472 fortran_model, me_number, path)
473
474
475
476 if OLP=='NJET':
477 filename = 'OLE_order.lh'
478 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP)
479
480 if matrix_element.virt_matrix_element:
481 calls += self.generate_virt_directory( \
482 matrix_element.virt_matrix_element, \
483 fortran_model, \
484 os.path.join(path, borndir))
485
486
487
488 self.write_real_matrix_elements(matrix_element, fortran_model)
489
490 self.write_pdf_calls(matrix_element, fortran_model)
491
492 filename = 'nFKSconfigs.inc'
493 self.write_nfksconfigs_file(writers.FortranWriter(filename),
494 matrix_element,
495 fortran_model)
496
497 filename = 'iproc.dat'
498 self.write_iproc_file(writers.FortranWriter(filename),
499 me_number)
500
501 filename = 'fks_info.inc'
502 self.write_fks_info_file(writers.FortranWriter(filename),
503 matrix_element,
504 fortran_model)
505
506 filename = 'leshouche_info.dat'
507 nfksconfs,maxproc,maxflow,nexternal=\
508 self.write_leshouche_info_file(filename,matrix_element)
509
510
511
512 if nfksconfs == maxproc == maxflow == 0:
513 nfksconfs = 1
514 (dummylines, maxproc, maxflow) = self.get_leshouche_lines(
515 matrix_element.born_matrix_element, 1)
516
517 filename = 'leshouche_decl.inc'
518 self.write_leshouche_info_declarations(
519 writers.FortranWriter(filename),
520 nfksconfs,maxproc,maxflow,nexternal,
521 fortran_model)
522 filename = 'genps.inc'
523 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes()
524 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis')))
525 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\
526 ncolor,maxflow,fortran_model)
527
528 filename = 'configs_and_props_info.dat'
529 nconfigs,max_leg_number=self.write_configs_and_props_info_file(
530 filename,
531 matrix_element)
532
533 filename = 'configs_and_props_decl.inc'
534 self.write_configs_and_props_info_declarations(
535 writers.FortranWriter(filename),
536 nconfigs,max_leg_number,nfksconfs,
537 fortran_model)
538
539 filename = 'real_from_born_configs.inc'
540 self.write_real_from_born_configs(
541 writers.FortranWriter(filename),
542 matrix_element,
543 fortran_model)
544
545 filename = 'ngraphs.inc'
546 self.write_ngraphs_file(writers.FortranWriter(filename),
547 nconfigs)
548
549
550 filename = 'real_me_chooser.f'
551 self.write_real_me_wrapper(writers.FortranWriter(filename),
552 matrix_element,
553 fortran_model)
554
555 filename = 'parton_lum_chooser.f'
556 self.write_pdf_wrapper(writers.FortranWriter(filename),
557 matrix_element,
558 fortran_model)
559
560 filename = 'get_color.f'
561 self.write_colors_file(writers.FortranWriter(filename),
562 matrix_element)
563
564 filename = 'nexternal.inc'
565 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
566 self.write_nexternal_file(writers.FortranWriter(filename),
567 nexternal, ninitial)
568 self.proc_characteristic['ninitial'] = ninitial
569 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal)
570
571 filename = 'pmass.inc'
572 try:
573 self.write_pmass_file(writers.FortranWriter(filename),
574 matrix_element.real_processes[0].matrix_element)
575 except IndexError:
576 self.write_pmass_file(writers.FortranWriter(filename),
577 matrix_element.born_matrix_element)
578
579
580 self.draw_feynman_diagrams(matrix_element)
581
582 linkfiles = ['BinothLHADummy.f',
583 'check_poles.f',
584 'MCmasses_HERWIG6.inc',
585 'MCmasses_HERWIGPP.inc',
586 'MCmasses_PYTHIA6Q.inc',
587 'MCmasses_PYTHIA6PT.inc',
588 'MCmasses_PYTHIA8.inc',
589 'add_write_info.f',
590 'coupl.inc',
591 'cuts.f',
592 'FKS_params.dat',
593 'initial_states_map.dat',
594 'OLE_order.olc',
595 'FKSParams.inc',
596 'FKSParamReader.f',
597 'cuts.inc',
598 'unlops.inc',
599 'pythia_unlops.f',
600 'driver_mintMC.f',
601 'driver_mintFO.f',
602 'appl_interface.cc',
603 'appl_interface_dummy.f',
604 'appl_common.inc',
605 'reweight_appl.inc',
606 'fastjetfortran_madfks_core.cc',
607 'fastjetfortran_madfks_full.cc',
608 'fjcore.cc',
609 'fastjet_wrapper.f',
610 'fjcore.hh',
611 'fks_Sij.f',
612 'fks_powers.inc',
613 'fks_singular.f',
614 'veto_xsec.f',
615 'veto_xsec.inc',
616 'weight_lines.f',
617 'fks_inc_chooser.f',
618 'leshouche_inc_chooser.f',
619 'configs_and_props_inc_chooser.f',
620 'genps_fks.f',
621 'boostwdir2.f',
622 'madfks_mcatnlo.inc',
623 'open_output_files.f',
624 'open_output_files_dummy.f',
625 'HwU_dummy.f',
626 'madfks_plot.f',
627 'analysis_dummy.f',
628 'analysis_lhe.f',
629 'mint-integrator2.f',
630 'MC_integer.f',
631 'mint.inc',
632 'montecarlocounter.f',
633 'q_es.inc',
634 'recluster.cc',
635 'Boosts.h',
636 'reweight_xsec.f',
637 'reweight_xsec_events.f',
638 'reweight_xsec_events_pdf_dummy.f',
639 'iproc_map.f',
640 'run.inc',
641 'run_card.inc',
642 'setcuts.f',
643 'setscales.f',
644 'test_soft_col_limits.f',
645 'symmetry_fks_v3.f',
646 'vegas2.for',
647 'write_ajob.f',
648 'handling_lhe_events.f',
649 'write_event.f',
650 'fill_MC_mshell.f',
651 'maxparticles.inc',
652 'message.inc',
653 'initcluster.f',
654 'cluster.inc',
655 'cluster.f',
656 'reweight.f',
657 'randinit',
658 'sudakov.inc',
659 'maxconfigs.inc',
660 'timing_variables.inc']
661
662 for file in linkfiles:
663 ln('../' + file , '.')
664 os.system("ln -s ../../Cards/param_card.dat .")
665
666
667 os.system("ln -s ../makefile_fks_dir ./makefile")
668 if matrix_element.virt_matrix_element:
669 os.system("ln -s ../BinothLHA.f ./BinothLHA.f")
670 elif OLP!='MadLoop':
671 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f")
672 else:
673 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f")
674
675
676 os.chdir(os.path.pardir)
677
678 filename = 'subproc.mg'
679 files.append_to_file(filename,
680 self.write_subproc,
681 borndir)
682
683 os.chdir(cwd)
684
685 gen_infohtml.make_info_html_nlo(self.dir_path)
686
687
688 return calls
689
690
691
692
694 """ """
695
696 run_card = banner_mod.RunCardNLO()
697
698 run_card.create_default_for_process(self.proc_characteristic,
699 history,
700 processes)
701
702 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'))
703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
704
705
716
717 - def finalize(self, matrix_elements, history, mg5options, flaglist):
718 """Finalize FKS directory by creating jpeg diagrams, html
719 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if
720 necessary."""
721
722 devnull = os.open(os.devnull, os.O_RDWR)
723 try:
724 res = misc.call([mg5options['lhapdf'], '--version'], \
725 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
726 except Exception:
727 res = 1
728 if res != 0:
729 logger.info('The value for lhapdf in the current configuration does not ' + \
730 'correspond to a valid executable.\nPlease set it correctly either in ' + \
731 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \
732 'and regenrate the process. \nTo avoid regeneration, edit the ' + \
733 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \
734 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n')
735
736 compiler_dict = {'fortran': mg5options['fortran_compiler'],
737 'cpp': mg5options['cpp_compiler'],
738 'f2py': mg5options['f2py_compiler']}
739
740 if 'nojpeg' in flaglist:
741 makejpg = False
742 else:
743 makejpg = True
744 output_dependencies = mg5options['output_dependencies']
745
746
747 self.proc_characteristic['grouped_matrix'] = False
748 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme']
749
750 self.create_proc_charac()
751
752 self.create_run_card(matrix_elements.get_processes(), history)
753
754
755
756
757
758
759
760
761 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f')
762 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc')
763 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model)
764
765
766
767 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc')
768 self.write_maxconfigs_file(writers.FortranWriter(filename),
769 matrix_elements.get_max_configs())
770
771
772 filename = os.path.join(self.dir_path,'Source','maxparticles.inc')
773 self.write_maxparticles_file(writers.FortranWriter(filename),
774 matrix_elements.get_max_particles())
775
776
777 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses'))
778
779
780 fcompiler_chosen = self.set_fortran_compiler(compiler_dict)
781 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp'])
782
783 old_pos = os.getcwd()
784 os.chdir(os.path.join(self.dir_path, 'SubProcesses'))
785 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \
786 proc[0] == 'P']
787
788 devnull = os.open(os.devnull, os.O_RDWR)
789
790 if makejpg:
791 logger.info("Generate jpeg diagrams")
792 for Pdir in P_dir_list:
793 os.chdir(Pdir)
794 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
795 stdout = devnull)
796 os.chdir(os.path.pardir)
797
798 logger.info("Generate web pages")
799
800
801 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \
802 stdout = devnull)
803
804 os.chdir(os.path.pardir)
805
806
807
808
809
810
811
812
813
814
815 if os.path.isdir('Cards'):
816 output_file = os.path.join('Cards', 'proc_card_mg5.dat')
817 history.write(output_file)
818
819
820 for card in ['run_card', 'FO_analyse_card', 'shower_card']:
821 try:
822 shutil.copy(pjoin(self.dir_path, 'Cards',
823 card + '.dat'),
824 pjoin(self.dir_path, 'Cards',
825 card + '_default.dat'))
826 except IOError:
827 logger.warning("Failed to copy " + card + ".dat to default")
828
829
830 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
831 stdout = devnull)
832
833
834 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')):
835 if os.path.exists('amcatnlo.tar.gz'):
836 os.remove('amcatnlo.tar.gz')
837 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')],
838 stdout = devnull)
839
840 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
841 stdout = devnull)
842
843
844 os.chdir(old_pos)
845
846
847
848 base_compiler= ['FC=g77','FC=gfortran']
849
850 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP')
851
852 if output_dependencies == 'external':
853
854 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \
855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')):
856 if 'FC' not in os.environ or not os.environ['FC']:
857 path = os.path.join(StdHep_path, 'src', 'make_opts')
858 text = open(path).read()
859 for base in base_compiler:
860 text = text.replace(base,'FC=%s' % fcompiler_chosen)
861 open(path, 'w').writelines(text)
862
863 logger.info('Compiling StdHEP. This has to be done only once.')
864 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP'))
865 logger.info('Done.')
866
867 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \
868 pjoin(self.dir_path, 'MCatNLO', 'lib'))
869 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \
870 pjoin(self.dir_path, 'MCatNLO', 'lib'))
871
872 elif output_dependencies == 'internal':
873 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP')
874 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True)
875
876 linkfiles = ['libstdhep.a', 'libFmcfio.a']
877 for file in linkfiles:
878 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file),
879 os.path.join(self.dir_path, 'MCatNLO', 'lib'))
880 if 'FC' not in os.environ or not os.environ['FC']:
881 path = pjoin(StdHEP_internal_path, 'src', 'make_opts')
882 text = open(path).read()
883 for base in base_compiler:
884 text = text.replace(base,'FC=%s' % fcompiler_chosen)
885 open(path, 'w').writelines(text)
886
887 misc.compile(['clean'],cwd = StdHEP_internal_path)
888
889 elif output_dependencies == 'environment_paths':
890
891
892 libStdHep = misc.which_lib('libstdhep.a')
893 libFmcfio = misc.which_lib('libFmcfio.a')
894 if not libStdHep is None and not libFmcfio is None:
895 logger.info('MG5_aMC is using StdHep installation found at %s.'%\
896 os.path.dirname(libStdHep))
897 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
898 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
899 else:
900 raise InvalidCmd("Could not find the location of the files"+\
901 " libstdhep.a and libFmcfio.a in you environment paths.")
902
903 else:
904 raise MadGraph5Error, 'output_dependencies option %s not recognized'\
905 %output_dependencies
906
907
908 if 'madanalysis5_path' in self.opt and not \
909 self.opt['madanalysis5_path'] is None and not self.proc_defs is None:
910
911 processes = sum([me.get('processes') if not isinstance(me, str) else [] \
912 for me in matrix_elements.get('matrix_elements')],[])
913
914
915
916 if len(processes)==0:
917 processes = self.born_processes
918 if len(processes)==0:
919 logger.warning(
920 """MG5aMC could not provide to Madanalysis5 the list of processes generated.
921 As a result, the default card will not be tailored to the process generated.
922 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""")
923
924
925 self.create_default_madanalysis5_cards(
926 history, self.proc_defs, [processes,]*len(self.proc_defs),
927 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'),
928 levels =['hadron'])
929
931 """Writes the real_from_born_configs.inc file that contains
932 the mapping to go for a given born configuration (that is used
933 e.g. in the multi-channel phase-space integration to the
934 corresponding real-emission diagram, i.e. the real emission
935 diagram in which the combined ij is split in i_fks and
936 j_fks."""
937 lines=[]
938 lines2=[]
939 max_links=0
940 born_me=matrix_element.born_matrix_element
941 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
942 iFKS=iFKS+1
943 links=conf['fks_info']['rb_links']
944 max_links=max(max_links,len(links))
945 for i,diags in enumerate(links):
946 if not i == diags['born_conf']:
947 print links
948 raise MadGraph5Error, "born_conf should be canonically ordered"
949 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links])
950 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
951 % (iFKS,len(links),real_configs))
952
953
954
955 if not matrix_element.get_fks_info_list():
956
957 base_diagrams = born_me.get('base_amplitude').get('diagrams')
958 minvert = min([max([len(vert.get('legs')) for vert in \
959 diag.get('vertices')]) for diag in base_diagrams])
960
961 for idiag, diag in enumerate(base_diagrams):
962 if any([len(vert.get('legs')) > minvert for vert in
963 diag.get('vertices')]):
964
965 continue
966 max_links = max_links + 1
967
968 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)])
969 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
970 % (1,max_links,real_configs))
971
972 lines2.append("integer irfbc")
973 lines2.append("integer real_from_born_conf(%d,%d)" \
974 % (max_links, max(len(matrix_element.get_fks_info_list()),1)))
975
976 writer.writelines(lines2+lines)
977
978
979
980
981
982
984 """Write the get_mass_width_file.f file for MG4.
985 Also update the makeinc.inc file
986 """
987 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero']
988 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero']
989
990 iflines_mass = ''
991 iflines_width = ''
992
993 for i, part in enumerate(mass_particles):
994 if i == 0:
995 ifstring = 'if'
996 else:
997 ifstring = 'else if'
998 if part['self_antipart']:
999 iflines_mass += '%s (id.eq.%d) then\n' % \
1000 (ifstring, part.get_pdg_code())
1001 else:
1002 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1003 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1004 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass')
1005
1006 for i, part in enumerate(width_particles):
1007 if i == 0:
1008 ifstring = 'if'
1009 else:
1010 ifstring = 'else if'
1011 if part['self_antipart']:
1012 iflines_width += '%s (id.eq.%d) then\n' % \
1013 (ifstring, part.get_pdg_code())
1014 else:
1015 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1016 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1017 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width')
1018
1019
1020 if len(mass_particles)==0:
1021 iflines_mass = 'if (.True.) then\n'
1022
1023 if len(width_particles)==0:
1024 iflines_width = 'if (.True.) then\n'
1025
1026 replace_dict = {'iflines_mass' : iflines_mass,
1027 'iflines_width' : iflines_width}
1028
1029 file = open(os.path.join(_file_path, \
1030 'iolibs/template_files/get_mass_width_fcts.inc')).read()
1031 file = file % replace_dict
1032
1033
1034 writer.writelines(file)
1035
1036
1037 makeinc_content = open(makeinc).read()
1038 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ')
1039 open(makeinc, 'w').write(makeinc_content)
1040
1041 return
1042
1043
1045 """writes the declarations for the variables relevant for configs_and_props
1046 """
1047 lines = []
1048 lines.append("integer ifr,lmaxconfigs_used,max_branch_used")
1049 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig)
1050 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number)
1051 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs)
1052 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1053 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1054 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1055 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1056 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1057 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1058
1059 writer.writelines(lines)
1060
1061
1063 """writes the configs_and_props_info.inc file that cointains
1064 all the (real-emission) configurations (IFOREST) as well as
1065 the masses and widths of intermediate particles"""
1066 lines = []
1067 lines.append("# C -> MAPCONFIG_D")
1068 lines.append("# F/D -> IFOREST_D")
1069 lines.append("# S -> SPROP_D")
1070 lines.append("# T -> TPRID_D")
1071 lines.append("# M -> PMASS_D/PWIDTH_D")
1072 lines.append("# P -> POW_D")
1073 lines2 = []
1074 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1075
1076 max_iconfig=0
1077 max_leg_number=0
1078
1079
1080
1081
1082 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
1083 iFKS=iFKS+1
1084 iconfig = 0
1085 s_and_t_channels = []
1086 mapconfigs = []
1087 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element
1088 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams')
1089 model = fks_matrix_element.get('base_amplitude').get('process').get('model')
1090 minvert = min([max([len(vert.get('legs')) for vert in \
1091 diag.get('vertices')]) for diag in base_diagrams])
1092
1093 lines.append("# ")
1094 lines.append("# nFKSprocess %d" % iFKS)
1095 for idiag, diag in enumerate(base_diagrams):
1096 if any([len(vert.get('legs')) > minvert for vert in
1097 diag.get('vertices')]):
1098
1099 continue
1100 iconfig = iconfig + 1
1101 helas_diag = fks_matrix_element.get('diagrams')[idiag]
1102 mapconfigs.append(helas_diag.get('number'))
1103 lines.append("# Diagram %d for nFKSprocess %d" % \
1104 (helas_diag.get('number'),iFKS))
1105
1106 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1107 helas_diag.get('number')))
1108
1109
1110
1111 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1112 get_s_and_t_channels(ninitial, model, 990)
1113
1114 s_and_t_channels.append([schannels, tchannels])
1115
1116
1117 allchannels = schannels
1118 if len(tchannels) > 1:
1119
1120 allchannels = schannels + tchannels
1121
1122 for vert in allchannels:
1123 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1124 last_leg = vert.get('legs')[-1]
1125 lines.append("F %4d %4d %4d %4d" % \
1126 (iFKS,last_leg.get('number'), iconfig, len(daughters)))
1127 for d in daughters:
1128 lines.append("D %4d" % d)
1129 if vert in schannels:
1130 lines.append("S %4d %4d %4d %10d" % \
1131 (iFKS,last_leg.get('number'), iconfig,
1132 last_leg.get('id')))
1133 elif vert in tchannels[:-1]:
1134 lines.append("T %4d %4d %4d %10d" % \
1135 (iFKS,last_leg.get('number'), iconfig,
1136 abs(last_leg.get('id'))))
1137
1138
1139 max_leg_number = min(max_leg_number,last_leg.get('number'))
1140 max_iconfig = max(max_iconfig,iconfig)
1141
1142
1143 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1144 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1145
1146
1147 lines2.append("# ")
1148 particle_dict = fks_matrix_element.get('processes')[0].get('model').\
1149 get('particle_dict')
1150
1151 for iconf, configs in enumerate(s_and_t_channels):
1152 for vertex in configs[0] + configs[1][:-1]:
1153 leg = vertex.get('legs')[-1]
1154 if leg.get('id') not in particle_dict:
1155
1156 pow_part = 0
1157 else:
1158 particle = particle_dict[leg.get('id')]
1159
1160 pow_part = 1 + int(particle.is_boson())
1161
1162 lines2.append("M %4d %4d %4d %10d " % \
1163 (iFKS,leg.get('number'), iconf + 1, leg.get('id')))
1164 lines2.append("P %4d %4d %4d %4d " % \
1165 (iFKS,leg.get('number'), iconf + 1, pow_part))
1166
1167
1168
1169
1170 if not matrix_element.get_fks_info_list():
1171 born_me = matrix_element.born_matrix_element
1172
1173
1174
1175
1176 bornproc = born_me.get('processes')[0]
1177 colors = [l.get('color') for l in bornproc.get('legs')]
1178
1179 fks_i = len(colors)
1180
1181
1182 fks_j=1
1183 for cpos, col in enumerate(colors):
1184 if col != 1:
1185 fks_j = cpos+1
1186 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos]
1187
1188
1189 if fks_j > ninitial:
1190 iFKS=1
1191 iconfig = 0
1192 s_and_t_channels = []
1193 mapconfigs = []
1194 base_diagrams = born_me.get('base_amplitude').get('diagrams')
1195 model = born_me.get('base_amplitude').get('process').get('model')
1196 minvert = min([max([len(vert.get('legs')) for vert in \
1197 diag.get('vertices')]) for diag in base_diagrams])
1198
1199 lines.append("# ")
1200 lines.append("# nFKSprocess %d" % iFKS)
1201 for idiag, diag in enumerate(base_diagrams):
1202 if any([len(vert.get('legs')) > minvert for vert in
1203 diag.get('vertices')]):
1204
1205 continue
1206 iconfig = iconfig + 1
1207 helas_diag = born_me.get('diagrams')[idiag]
1208 mapconfigs.append(helas_diag.get('number'))
1209 lines.append("# Diagram %d for nFKSprocess %d" % \
1210 (helas_diag.get('number'),iFKS))
1211
1212 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1213 helas_diag.get('number')))
1214
1215
1216
1217 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1218 get_s_and_t_channels(ninitial, model, 990)
1219
1220 s_and_t_channels.append([schannels, tchannels])
1221
1222
1223 lines.append("F %4d %4d %4d %4d" % \
1224 (iFKS,-1,iconfig,2))
1225
1226 lines.append("D %4d" % nexternal)
1227 lines.append("D %4d" % fks_j)
1228 lines.append("S %4d %4d %4d %10d" % \
1229 (iFKS,-1, iconfig,fks_j_id))
1230
1231
1232
1233
1234 allchannels = schannels
1235 if len(tchannels) > 1:
1236
1237 allchannels = schannels + tchannels
1238
1239 for vert in allchannels:
1240 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1241 last_leg = vert.get('legs')[-1]
1242 lines.append("F %4d %4d %4d %4d" % \
1243 (iFKS,last_leg.get('number')-1, iconfig, len(daughters)))
1244
1245
1246 for i_dau in range(len(daughters)):
1247 if daughters[i_dau] < 0:
1248 daughters[i_dau] += -1
1249
1250 if fks_j in daughters:
1251 daughters[daughters.index(fks_j)] = -1
1252 for d in daughters:
1253 lines.append("D %4d" % d)
1254 if vert in schannels:
1255 lines.append("S %4d %4d %4d %10d" % \
1256 (iFKS,last_leg.get('number')-1, iconfig,
1257 last_leg.get('id')))
1258 elif vert in tchannels[:-1]:
1259 lines.append("T %4d %4d %4d %10d" % \
1260 (iFKS,last_leg.get('number')-1, iconfig,
1261 abs(last_leg.get('id'))))
1262
1263
1264 max_leg_number = min(max_leg_number,last_leg.get('number')-1)
1265 max_iconfig = max(max_iconfig,iconfig)
1266
1267
1268 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1269 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1270
1271
1272 lines2.append("# ")
1273 particle_dict = born_me.get('processes')[0].get('model').\
1274 get('particle_dict')
1275
1276 for iconf, configs in enumerate(s_and_t_channels):
1277 lines2.append("M %4d %4d %4d %10d " % \
1278 (iFKS,-1, iconf + 1, fks_j_id))
1279 pow_part = 1 + int(particle_dict[fks_j_id].is_boson())
1280 lines2.append("P %4d %4d %4d %4d " % \
1281 (iFKS,-1, iconf + 1, pow_part))
1282 for vertex in configs[0] + configs[1][:-1]:
1283 leg = vertex.get('legs')[-1]
1284 if leg.get('id') not in particle_dict:
1285
1286 pow_part = 0
1287 else:
1288 particle = particle_dict[leg.get('id')]
1289
1290 pow_part = 1 + int(particle.is_boson())
1291
1292 lines2.append("M %4d %4d %4d %10d " % \
1293 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id')))
1294 lines2.append("P %4d %4d %4d %4d " % \
1295 (iFKS,leg.get('number')-1, iconf + 1, pow_part))
1296
1297
1298 open(filename,'w').write('\n'.join(lines+lines2))
1299
1300 return max_iconfig, max_leg_number
1301
1302
1305 """writes the declarations for the variables relevant for leshouche_info
1306 """
1307 lines = []
1308 lines.append('integer maxproc_used, maxflow_used')
1309 lines.append('parameter (maxproc_used = %d)' % maxproc)
1310 lines.append('parameter (maxflow_used = %d)' % maxflow)
1311 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal))
1312 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal))
1313 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal))
1314 lines.append('integer niprocs_d(%d)' % (nfksconfs))
1315
1316 writer.writelines(lines)
1317
1318
1319 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1320 """writes the genps.inc file
1321 """
1322 lines = []
1323 lines.append("include 'maxparticles.inc'")
1324 lines.append("include 'maxconfigs.inc'")
1325 lines.append("integer maxproc,ngraphs,ncolor,maxflow")
1326 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \
1327 (maxproc,ngraphs,ncolor,maxflow))
1328 writer.writelines(lines)
1329
1330
1332 """writes the leshouche_info.inc file which contains
1333 the LHA informations for all the real emission processes
1334 """
1335 lines = []
1336 lines.append("# I -> IDUP_D")
1337 lines.append("# M -> MOTHUP_D")
1338 lines.append("# C -> ICOLUP_D")
1339 nfksconfs = len(matrix_element.get_fks_info_list())
1340 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1341
1342 maxproc = 0
1343 maxflow = 0
1344 for i, conf in enumerate(matrix_element.get_fks_info_list()):
1345
1346 (newlines, nprocs, nflows) = self.get_leshouche_lines(
1347 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1)
1348 lines.extend(newlines)
1349 maxproc = max(maxproc, nprocs)
1350 maxflow = max(maxflow, nflows)
1351
1352
1353 if not matrix_element.get_fks_info_list():
1354 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1)
1355 lines.extend(newlines)
1356
1357
1358 open(filename,'w').write('\n'.join(lines))
1359
1360 return nfksconfs, maxproc, maxflow, nexternal
1361
1362
1364 """writes the wrapper which allows to chose among the different real matrix elements"""
1365
1366 file = \
1367 """double precision function dlum()
1368 implicit none
1369 integer nfksprocess
1370 common/c_nfksprocess/nfksprocess
1371 """
1372 if matrix_element.real_processes:
1373 for n, info in enumerate(matrix_element.get_fks_info_list()):
1374 file += \
1375 """if (nfksprocess.eq.%(n)d) then
1376 call dlum_%(n_me)d(dlum)
1377 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1378 file += \
1379 """
1380 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess
1381 stop
1382 endif
1383 return
1384 end
1385 """
1386 else:
1387 file+= \
1388 """call dlum_0(dlum)
1389 return
1390 end
1391 """
1392
1393
1394 writer.writelines(file)
1395 return 0
1396
1397
1399 """writes the wrapper which allows to chose among the different real matrix elements"""
1400
1401 file = \
1402 """subroutine smatrix_real(p, wgt)
1403 implicit none
1404 include 'nexternal.inc'
1405 double precision p(0:3, nexternal)
1406 double precision wgt
1407 integer nfksprocess
1408 common/c_nfksprocess/nfksprocess
1409 """
1410 for n, info in enumerate(matrix_element.get_fks_info_list()):
1411 file += \
1412 """if (nfksprocess.eq.%(n)d) then
1413 call smatrix_%(n_me)d(p, wgt)
1414 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1415
1416 if matrix_element.real_processes:
1417 file += \
1418 """
1419 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess
1420 stop
1421 endif
1422 return
1423 end
1424 """
1425 else:
1426 file += \
1427 """
1428 wgt=0d0
1429 return
1430 end
1431 """
1432
1433 writer.writelines(file)
1434 return 0
1435
1436
1438 """Create the ps files containing the feynman diagrams for the born process,
1439 as well as for all the real emission processes"""
1440
1441 filename = 'born.ps'
1442 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\
1443 get('base_amplitude').get('diagrams'),
1444 filename,
1445 model=matrix_element.born_matrix_element.\
1446 get('processes')[0].get('model'),
1447 amplitude=True, diagram_type='born')
1448 plot.draw()
1449
1450 for n, fksreal in enumerate(matrix_element.real_processes):
1451 filename = 'matrix_%d.ps' % (n + 1)
1452 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\
1453 get('base_amplitude').get('diagrams'),
1454 filename,
1455 model=fksreal.matrix_element.\
1456 get('processes')[0].get('model'),
1457 amplitude=True, diagram_type='real')
1458 plot.draw()
1459
1460
1462 """writes the matrix_i.f files which contain the real matrix elements"""
1463
1464
1465
1466 for n, fksreal in enumerate(matrix_element.real_processes):
1467 filename = 'matrix_%d.f' % (n + 1)
1468 self.write_matrix_element_fks(writers.FortranWriter(filename),
1469 fksreal.matrix_element, n + 1,
1470 fortran_model)
1471
1473 """writes the parton_lum_i.f files which contain the real matrix elements.
1474 If no real emission existst, write the one for the born"""
1475
1476 if matrix_element.real_processes:
1477 for n, fksreal in enumerate(matrix_element.real_processes):
1478 filename = 'parton_lum_%d.f' % (n + 1)
1479 self.write_pdf_file(writers.FortranWriter(filename),
1480 fksreal.matrix_element, n + 1,
1481 fortran_model)
1482 else:
1483 filename = 'parton_lum_0.f'
1484 self.write_pdf_file(writers.FortranWriter(filename),
1485 matrix_element.born_matrix_element, 0,
1486 fortran_model)
1487
1488
1490 """generates the files needed for the born amplitude in the P* directory, which will
1491 be needed by the P* directories"""
1492 pathdir = os.getcwd()
1493
1494 filename = 'born.f'
1495 calls_born, ncolor_born = \
1496 self.write_born_fks(writers.FortranWriter(filename),\
1497 matrix_element,
1498 fortran_model)
1499
1500 filename = 'born_hel.f'
1501 self.write_born_hel(writers.FortranWriter(filename),\
1502 matrix_element,
1503 fortran_model)
1504
1505
1506 filename = 'born_conf.inc'
1507 nconfigs, mapconfigs, s_and_t_channels = \
1508 self.write_configs_file(
1509 writers.FortranWriter(filename),
1510 matrix_element.born_matrix_element,
1511 fortran_model)
1512
1513 filename = 'born_props.inc'
1514 self.write_props_file(writers.FortranWriter(filename),
1515 matrix_element.born_matrix_element,
1516 fortran_model,
1517 s_and_t_channels)
1518
1519 filename = 'born_decayBW.inc'
1520 self.write_decayBW_file(writers.FortranWriter(filename),
1521 s_and_t_channels)
1522
1523 filename = 'born_leshouche.inc'
1524 nflows = self.write_leshouche_file(writers.FortranWriter(filename),
1525 matrix_element.born_matrix_element,
1526 fortran_model)
1527
1528 filename = 'born_nhel.inc'
1529 self.write_born_nhel_file(writers.FortranWriter(filename),
1530 matrix_element.born_matrix_element, nflows,
1531 fortran_model,
1532 ncolor_born)
1533
1534 filename = 'born_ngraphs.inc'
1535 self.write_ngraphs_file(writers.FortranWriter(filename),
1536 matrix_element.born_matrix_element.get_number_of_amplitudes())
1537
1538 filename = 'ncombs.inc'
1539 self.write_ncombs_file(writers.FortranWriter(filename),
1540 matrix_element.born_matrix_element,
1541 fortran_model)
1542
1543 filename = 'born_maxamps.inc'
1544 maxamps = len(matrix_element.get('diagrams'))
1545 maxflows = ncolor_born
1546 self.write_maxamps_file(writers.FortranWriter(filename),
1547 maxamps,
1548 maxflows,
1549 max([len(matrix_element.get('processes')) for me in \
1550 matrix_element.born_matrix_element]),1)
1551
1552 filename = 'config_subproc_map.inc'
1553 self.write_config_subproc_map_file(writers.FortranWriter(filename),
1554 s_and_t_channels)
1555
1556 filename = 'coloramps.inc'
1557 self.write_coloramps_file(writers.FortranWriter(filename),
1558 mapconfigs,
1559 matrix_element.born_matrix_element,
1560 fortran_model)
1561
1562
1563 filename = ['sborn_sf.f', 'sborn_sf_dum.f']
1564 for i, links in enumerate([matrix_element.color_links, []]):
1565 self.write_sborn_sf(writers.FortranWriter(filename[i]),
1566 links,
1567 fortran_model)
1568 self.color_link_files = []
1569 for i in range(len(matrix_element.color_links)):
1570 filename = 'b_sf_%3.3d.f' % (i + 1)
1571 self.color_link_files.append(filename)
1572 self.write_b_sf_fks(writers.FortranWriter(filename),
1573 matrix_element, i,
1574 fortran_model)
1575
1576
1578 """Generates the library for computing the loop matrix elements
1579 necessary for this process using the OLP specified."""
1580
1581
1582 virtual_path = pjoin(export_path,'OLP_virtuals')
1583 if not os.path.exists(virtual_path):
1584 os.makedirs(virtual_path)
1585 filename = os.path.join(virtual_path,'OLE_order.lh')
1586 self.write_lh_order(filename, process_list, OLP)
1587
1588 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\
1589 'Please check the virt_generation.log file in %s.'\
1590 %str(pjoin(virtual_path,'virt_generation.log'))
1591
1592
1593 if OLP=='GoSam':
1594 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1595 'GoSam','makevirt'),pjoin(virtual_path,'makevirt'))
1596 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1597 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc'))
1598 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path)
1599
1600 logger.info('Generating the loop matrix elements with %s...'%OLP)
1601 virt_generation_log = \
1602 open(pjoin(virtual_path,'virt_generation.log'), 'w')
1603 retcode = subprocess.call(['./makevirt'],cwd=virtual_path,
1604 stdout=virt_generation_log, stderr=virt_generation_log)
1605 virt_generation_log.close()
1606
1607 possible_other_extensions = ['so','dylib']
1608 shared_lib_ext='so'
1609 for ext in possible_other_extensions:
1610 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib',
1611 'libgolem_olp.'+ext)):
1612 shared_lib_ext = ext
1613
1614
1615 files_to_check = ['olp_module.mod',str(pjoin('lib',
1616 'libgolem_olp.'+shared_lib_ext))]
1617 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path,
1618 'Virtuals',f)) for f in files_to_check]):
1619 raise fks_common.FKSProcessError(fail_msg)
1620
1621 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext),
1622 pjoin(export_path,'lib'))
1623
1624
1625 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read()
1626 make_opts=open(pjoin(export_path,'Source','make_opts'),'w')
1627 if OLP=='GoSam':
1628 if platform.system().lower()=='darwin':
1629
1630
1631 make_opts_content=make_opts_content.replace('libOLP=',
1632 'libOLP=-Wl,-lgolem_olp')
1633 else:
1634
1635
1636
1637
1638
1639
1640
1641 make_opts_content=make_opts_content.replace('libOLP=',
1642 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp')
1643
1644
1645 make_opts.write(make_opts_content)
1646 make_opts.close()
1647
1648
1649
1650
1651
1652 proc_to_label = self.parse_contract_file(
1653 pjoin(virtual_path,'OLE_order.olc'))
1654
1655 self.write_BinothLHA_inc(process_list,proc_to_label,\
1656 pjoin(export_path,'SubProcesses'))
1657
1658
1659 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1660
1662 """ Write the file Binoth_proc.inc in each SubProcess directory so as
1663 to provide the right process_label to use in the OLP call to get the
1664 loop matrix element evaluation. The proc_to_label is the dictionary of
1665 the format of the one returned by the function parse_contract_file."""
1666
1667 for proc in processes:
1668 name = "P%s"%proc.shell_string()
1669 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \
1670 not leg.get('state')]),
1671 tuple([leg.get('id') for leg in proc.get('legs') if \
1672 leg.get('state')]))
1673 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w')
1674 try:
1675 incFile.write(
1676 """ INTEGER PROC_LABEL
1677 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs]))
1678 except KeyError:
1679 raise fks_common.FKSProcessError('Could not found the target'+\
1680 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\
1681 ' the proc_to_label argument in write_BinothLHA_inc.')
1682 incFile.close()
1683
1685 """ Parses the BLHA contract file, make sure all parameters could be
1686 understood by the OLP and return a mapping of the processes (characterized
1687 by the pdg's of the initial and final state particles) to their process
1688 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}.
1689 """
1690
1691 proc_def_to_label = {}
1692
1693 if not os.path.exists(contract_file_path):
1694 raise fks_common.FKSProcessError('Could not find the contract file'+\
1695 ' OLE_order.olc in %s.'%str(contract_file_path))
1696
1697 comment_re=re.compile(r"^\s*#")
1698 proc_def_re=re.compile(
1699 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+
1700 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$")
1701 line_OK_re=re.compile(r"^.*\|\s*OK")
1702 for line in file(contract_file_path):
1703
1704 if not comment_re.match(line) is None:
1705 continue
1706
1707 proc_def = proc_def_re.match(line)
1708 if not proc_def is None:
1709 if int(proc_def.group('proc_class'))!=1:
1710 raise fks_common.FKSProcessError(
1711 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\
1712 ' process class attribute. Found %s instead in: \n%s'\
1713 %(proc_def.group('proc_class'),line))
1714 in_pdgs=tuple([int(in_pdg) for in_pdg in \
1715 proc_def.group('in_pdgs').split()])
1716 out_pdgs=tuple([int(out_pdg) for out_pdg in \
1717 proc_def.group('out_pdgs').split()])
1718 proc_def_to_label[(in_pdgs,out_pdgs)]=\
1719 int(proc_def.group('proc_label'))
1720 continue
1721
1722 if line_OK_re.match(line) is None:
1723 raise fks_common.FKSProcessError(
1724 'The OLP could not process the following line: \n%s'%line)
1725
1726 return proc_def_to_label
1727
1728
1730 """writes the V**** directory inside the P**** directories specified in
1731 dir_name"""
1732
1733 cwd = os.getcwd()
1734
1735 matrix_element = loop_matrix_element
1736
1737
1738 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
1739 try:
1740 os.mkdir(dirpath)
1741 except os.error as error:
1742 logger.warning(error.strerror + " " + dirpath)
1743
1744
1745 name = "V%s" % matrix_element.get('processes')[0].shell_string()
1746 dirpath = os.path.join(dir_name, name)
1747
1748 try:
1749 os.mkdir(dirpath)
1750 except os.error as error:
1751 logger.warning(error.strerror + " " + dirpath)
1752
1753 try:
1754 os.chdir(dirpath)
1755 except os.error:
1756 logger.error('Could not cd to directory %s' % dirpath)
1757 return 0
1758
1759 logger.info('Creating files in directory %s' % name)
1760
1761
1762 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1763
1764 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
1765
1766 filename = 'born_matrix.f'
1767 calls = self.write_bornmatrix(
1768 writers.FortranWriter(filename),
1769 matrix_element,
1770 fortran_model)
1771
1772 filename = 'nexternal.inc'
1773 self.write_nexternal_file(writers.FortranWriter(filename),
1774 nexternal, ninitial)
1775
1776 filename = 'pmass.inc'
1777 self.write_pmass_file(writers.FortranWriter(filename),
1778 matrix_element)
1779
1780 filename = 'ngraphs.inc'
1781 self.write_ngraphs_file(writers.FortranWriter(filename),
1782 len(matrix_element.get_all_amplitudes()))
1783
1784 filename = "loop_matrix.ps"
1785 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
1786 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
1787 filename,
1788 model=matrix_element.get('processes')[0].get('model'),
1789 amplitude='')
1790 logger.info("Drawing loop Feynman diagrams for " + \
1791 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1792 plot.draw()
1793
1794 filename = "born_matrix.ps"
1795 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
1796 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\
1797 get('model'),amplitude='')
1798 logger.info("Generating born Feynman diagrams for " + \
1799 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1800 plot.draw()
1801
1802
1803
1804
1805 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
1806 open('unique_id.inc','w').write(
1807 """ integer UNIQUE_ID
1808 parameter(UNIQUE_ID=1)""")
1809
1810 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
1811 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
1812 'MadLoopCommons.f','MadLoopParams.inc']
1813
1814
1815 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
1816 pjoin('..','MadLoop5_resources'))
1817
1818 for file in linkfiles:
1819 ln('../../%s' % file)
1820
1821 os.system("ln -s ../../makefile_loop makefile")
1822
1823 linkfiles = ['mpmodule.mod']
1824
1825 for file in linkfiles:
1826 ln('../../../lib/%s' % file)
1827
1828 linkfiles = ['coef_specs.inc']
1829
1830 for file in linkfiles:
1831 ln('../../../Source/DHELAS/%s' % file)
1832
1833
1834 os.chdir(cwd)
1835
1836 if not calls:
1837 calls = 0
1838 return calls
1839
1841 """computes the QED/QCD orders from the knowledge of the n of ext particles
1842 and of the weighted orders"""
1843
1844
1845 QED = weighted - nexternal + 2
1846 QCD = weighted - 2 * QED
1847 return QED, QCD
1848
1849
1850
1851
1852
1853
1854
1856 """Creates the OLE_order.lh file. This function should be edited according
1857 to the OLP which is used. For now it is generic."""
1858
1859
1860 if len(process_list)==0:
1861 raise fks_common.FKSProcessError('No matrix elements provided to '+\
1862 'the function write_lh_order.')
1863 return
1864
1865
1866
1867 orders = process_list[0].get('orders')
1868 if 'QED' in orders.keys() and 'QCD' in orders.keys():
1869 QED=orders['QED']
1870 QCD=orders['QCD']
1871 elif 'QED' in orders.keys():
1872 QED=orders['QED']
1873 QCD=0
1874 elif 'QCD' in orders.keys():
1875 QED=0
1876 QCD=orders['QCD']
1877 else:
1878 QED, QCD = self.get_qed_qcd_orders_from_weighted(\
1879 len(process_list[0].get('legs')),
1880 orders['WEIGHTED'])
1881
1882 replace_dict = {}
1883 replace_dict['mesq'] = 'CHaveraged'
1884 replace_dict['corr'] = ' '.join(process_list[0].\
1885 get('perturbation_couplings'))
1886 replace_dict['irreg'] = 'CDR'
1887 replace_dict['aspow'] = QCD
1888 replace_dict['aepow'] = QED
1889 replace_dict['modelfile'] = './param_card.dat'
1890 replace_dict['params'] = 'alpha_s'
1891 proc_lines=[]
1892 for proc in process_list:
1893 proc_lines.append('%s -> %s' % \
1894 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']),
1895 ' '.join(str(l['id']) for l in proc['legs'] if l['state'])))
1896 replace_dict['pdgs'] = '\n'.join(proc_lines)
1897 replace_dict['symfin'] = 'Yes'
1898 content = \
1899 "#OLE_order written by MadGraph5_aMC@NLO\n\
1900 \n\
1901 MatrixElementSquareType %(mesq)s\n\
1902 CorrectionType %(corr)s\n\
1903 IRregularisation %(irreg)s\n\
1904 AlphasPower %(aspow)d\n\
1905 AlphaPower %(aepow)d\n\
1906 NJetSymmetrizeFinal %(symfin)s\n\
1907 ModelFile %(modelfile)s\n\
1908 Parameters %(params)s\n\
1909 \n\
1910 # process\n\
1911 %(pdgs)s\n\
1912 " % replace_dict
1913
1914 file = open(filename, 'w')
1915 file.write(content)
1916 file.close
1917 return
1918
1919
1920
1921
1922
1923
1925 """Export a matrix element to a born.f file in MadFKS format"""
1926
1927 matrix_element = fksborn.born_matrix_element
1928
1929 if not matrix_element.get('processes') or \
1930 not matrix_element.get('diagrams'):
1931 return 0
1932
1933 if not isinstance(writer, writers.FortranWriter):
1934 raise writers.FortranWriter.FortranWriterError(\
1935 "writer not FortranWriter")
1936
1937 writers.FortranWriter.downcase = False
1938
1939 replace_dict = {}
1940
1941
1942 info_lines = self.get_mg5_info_lines()
1943 replace_dict['info_lines'] = info_lines
1944
1945
1946 process_lines = self.get_process_info_lines(matrix_element)
1947 replace_dict['process_lines'] = process_lines
1948
1949
1950
1951 ncomb = matrix_element.get_helicity_combinations()
1952 replace_dict['ncomb'] = ncomb
1953
1954
1955 helicity_lines = self.get_helicity_lines(matrix_element)
1956 replace_dict['helicity_lines'] = helicity_lines
1957
1958
1959 ic_line = self.get_ic_line(matrix_element)
1960 replace_dict['ic_line'] = ic_line
1961
1962
1963
1964
1965
1966
1967 ngraphs = matrix_element.get_number_of_amplitudes()
1968 replace_dict['ngraphs'] = ngraphs
1969
1970
1971 nwavefuncs = matrix_element.get_number_of_wavefunctions()
1972 replace_dict['nwavefuncs'] = nwavefuncs
1973
1974
1975 ncolor = max(1, len(matrix_element.get('color_basis')))
1976 replace_dict['ncolor'] = ncolor
1977
1978
1979 color_data_lines = self.get_color_data_lines(matrix_element)
1980 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
1981
1982
1983 helas_calls = fortran_model.get_matrix_element_calls(\
1984 matrix_element)
1985 replace_dict['helas_calls'] = "\n".join(helas_calls)
1986
1987
1988 amp2_lines = self.get_amp2_lines(matrix_element)
1989 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
1990
1991
1992 jamp_lines = self.get_JAMP_lines(matrix_element)
1993 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
1994
1995
1996 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
1997 replace_dict['wavefunctionsize'] = 20
1998 else:
1999 replace_dict['wavefunctionsize'] = 8
2000
2001
2002 ij_lines = self.get_ij_lines(fksborn)
2003 replace_dict['ij_lines'] = '\n'.join(ij_lines)
2004
2005
2006 den_factor_lines = self.get_den_factor_lines(fksborn)
2007 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2008
2009
2010 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1)
2011
2012 file = open(os.path.join(_file_path, \
2013 'iolibs/template_files/born_fks.inc')).read()
2014 file = file % replace_dict
2015
2016
2017 writer.writelines(file)
2018
2019 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2020
2021
2023 """Export a matrix element to a born_hel.f file in MadFKS format"""
2024
2025 matrix_element = fksborn.born_matrix_element
2026
2027 if not matrix_element.get('processes') or \
2028 not matrix_element.get('diagrams'):
2029 return 0
2030
2031 if not isinstance(writer, writers.FortranWriter):
2032 raise writers.FortranWriter.FortranWriterError(\
2033 "writer not FortranWriter")
2034
2035 writers.FortranWriter.downcase = False
2036
2037 replace_dict = {}
2038
2039
2040 info_lines = self.get_mg5_info_lines()
2041 replace_dict['info_lines'] = info_lines
2042
2043
2044 process_lines = self.get_process_info_lines(matrix_element)
2045 replace_dict['process_lines'] = process_lines
2046
2047
2048
2049 ncomb = matrix_element.get_helicity_combinations()
2050 replace_dict['ncomb'] = ncomb
2051
2052
2053 helicity_lines = self.get_helicity_lines(matrix_element)
2054 replace_dict['helicity_lines'] = helicity_lines
2055
2056
2057 ic_line = self.get_ic_line(matrix_element)
2058 replace_dict['ic_line'] = ic_line
2059
2060
2061
2062
2063
2064
2065 ngraphs = matrix_element.get_number_of_amplitudes()
2066 replace_dict['ngraphs'] = ngraphs
2067
2068
2069 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2070 replace_dict['nwavefuncs'] = nwavefuncs
2071
2072
2073 ncolor = max(1, len(matrix_element.get('color_basis')))
2074 replace_dict['ncolor'] = ncolor
2075
2076
2077 color_data_lines = self.get_color_data_lines(matrix_element)
2078 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2079
2080
2081 amp2_lines = self.get_amp2_lines(matrix_element)
2082 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2083
2084
2085 jamp_lines = self.get_JAMP_lines(matrix_element)
2086 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2087
2088
2089 den_factor_lines = self.get_den_factor_lines(fksborn)
2090 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2091
2092
2093 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2094
2095 file = open(os.path.join(_file_path, \
2096 'iolibs/template_files/born_fks_hel.inc')).read()
2097 file = file % replace_dict
2098
2099
2100 writer.writelines(file)
2101
2102 return
2103
2104
2105
2106
2107
2108
2110 """Creates the sborn_sf.f file, containing the calls to the different
2111 color linked borns"""
2112
2113 replace_dict = {}
2114 nborns = len(color_links)
2115 ifkss = []
2116 iborns = []
2117 mms = []
2118 nns = []
2119 iflines = "\n"
2120
2121
2122 file = """subroutine sborn_sf(p_born,m,n,wgt)
2123 implicit none
2124 include "nexternal.inc"
2125 double precision p_born(0:3,nexternal-1),wgt
2126 double complex wgt1(2)
2127 integer m,n \n"""
2128
2129 if nborns > 0:
2130
2131 for i, c_link in enumerate(color_links):
2132 iborn = i+1
2133
2134 iff = {True : 'if', False : 'elseif'}[i==0]
2135
2136 m, n = c_link['link']
2137
2138 if m != n:
2139 iflines += \
2140 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2141 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\
2142 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2143 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2144 else:
2145 iflines += \
2146 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2147 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\
2148 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2149 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2150
2151
2152 file += iflines + \
2153 """else
2154 wgt = 0d0
2155 endif
2156
2157 return
2158 end"""
2159 elif nborns == 0:
2160
2161 file+="""
2162 c This is a dummy function because
2163 c this subdir has no soft singularities
2164 wgt = 0d0
2165
2166 return
2167 end"""
2168
2169
2170 writer.writelines(file)
2171
2172
2173
2174
2175
2176
2178 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format"""
2179
2180 matrix_element = copy.copy(fksborn.born_matrix_element)
2181
2182 if not matrix_element.get('processes') or \
2183 not matrix_element.get('diagrams'):
2184 return 0
2185
2186 if not isinstance(writer, writers.FortranWriter):
2187 raise writers.FortranWriter.FortranWriterError(\
2188 "writer not FortranWriter")
2189
2190 writers.FortranWriter.downcase = False
2191
2192 iborn = i + 1
2193 link = fksborn.color_links[i]
2194
2195 replace_dict = {}
2196
2197 replace_dict['iborn'] = iborn
2198
2199
2200 info_lines = self.get_mg5_info_lines()
2201 replace_dict['info_lines'] = info_lines
2202
2203
2204 process_lines = self.get_process_info_lines(matrix_element)
2205 replace_dict['process_lines'] = process_lines + \
2206 "\nc spectators: %d %d \n" % tuple(link['link'])
2207
2208
2209 ncomb = matrix_element.get_helicity_combinations()
2210 replace_dict['ncomb'] = ncomb
2211
2212
2213 helicity_lines = self.get_helicity_lines(matrix_element)
2214 replace_dict['helicity_lines'] = helicity_lines
2215
2216
2217 ic_line = self.get_ic_line(matrix_element)
2218 replace_dict['ic_line'] = ic_line
2219
2220
2221 den_factor_lines = self.get_den_factor_lines(fksborn)
2222 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2223
2224
2225 ngraphs = matrix_element.get_number_of_amplitudes()
2226 replace_dict['ngraphs'] = ngraphs
2227
2228
2229 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2230 replace_dict['nwavefuncs'] = nwavefuncs
2231
2232
2233 ncolor1 = max(1, len(link['orig_basis']))
2234 replace_dict['ncolor1'] = ncolor1
2235 ncolor2 = max(1, len(link['link_basis']))
2236 replace_dict['ncolor2'] = ncolor2
2237
2238
2239 color_data_lines = self.get_color_data_lines_from_color_matrix(\
2240 link['link_matrix'])
2241 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2242
2243
2244 amp2_lines = self.get_amp2_lines(matrix_element)
2245 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2246
2247
2248 jamp_lines = self.get_JAMP_lines(matrix_element)
2249 new_jamp_lines = []
2250 for line in jamp_lines:
2251 line = string.replace(line, 'JAMP', 'JAMP1')
2252 new_jamp_lines.append(line)
2253 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines)
2254
2255 matrix_element.set('color_basis', link['link_basis'] )
2256 jamp_lines = self.get_JAMP_lines(matrix_element)
2257 new_jamp_lines = []
2258 for line in jamp_lines:
2259 line = string.replace(line, 'JAMP', 'JAMP2')
2260 new_jamp_lines.append(line)
2261 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines)
2262
2263
2264
2265 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2266
2267 file = open(os.path.join(_file_path, \
2268 'iolibs/template_files/b_sf_xxx_fks.inc')).read()
2269 file = file % replace_dict
2270
2271
2272 writer.writelines(file)
2273
2274 return 0 , ncolor1
2275
2276
2277
2278
2279
2280
2282 """Write the born_nhel.inc file for MG4."""
2283
2284 ncomb = matrix_element.get_helicity_combinations()
2285 file = " integer max_bhel, max_bcol \n"
2286 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \
2287 (ncomb, nflows)
2288
2289
2290 writer.writelines(file)
2291
2292 return True
2293
2294
2295
2296
2298 """Writes the content of nFKSconfigs.inc, which just gives the
2299 total FKS dirs as a parameter.
2300 nFKSconfigs is always >=1 (use a fake configuration for LOonly)"""
2301 replace_dict = {}
2302 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1)
2303 content = \
2304 """ INTEGER FKS_CONFIGS
2305 PARAMETER (FKS_CONFIGS=%(nconfs)d)
2306
2307 """ % replace_dict
2308
2309 writer.writelines(content)
2310
2311
2312
2313
2314
2316 """Writes the content of fks_info.inc, which lists the informations on the
2317 possible splittings of the born ME.
2318 nconfs is always >=1 (use a fake configuration for LOonly).
2319 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and
2320 the last colored particle as j_fks."""
2321
2322 replace_dict = {}
2323 fks_info_list = fksborn.get_fks_info_list()
2324 replace_dict['nconfs'] = max(len(fks_info_list), 1)
2325
2326
2327 if len(fks_info_list) > 0:
2328 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \
2329 for info in fks_info_list])
2330 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \
2331 for info in fks_info_list])
2332
2333 col_lines = []
2334 pdg_lines = []
2335 charge_lines = []
2336 fks_j_from_i_lines = []
2337 for i, info in enumerate(fks_info_list):
2338 col_lines.append( \
2339 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2340 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) ))
2341 pdg_lines.append( \
2342 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2343 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs'])))
2344 charge_lines.append(\
2345 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\
2346 % (i + 1, ', '.join('%19.15fd0' % charg\
2347 for charg in fksborn.real_processes[info['n_me']-1].charges) ))
2348 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\
2349 i + 1))
2350 else:
2351
2352
2353
2354 bornproc = fksborn.born_matrix_element.get('processes')[0]
2355 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21]
2356 colors = [l.get('color') for l in bornproc.get('legs')] + [8]
2357 charges = [0.] * len(colors)
2358
2359 fks_i = len(colors)
2360
2361
2362 fks_j=1
2363 for cpos, col in enumerate(colors[:-1]):
2364 if col != 1:
2365 fks_j = cpos+1
2366
2367 fks_i_values = str(fks_i)
2368 fks_j_values = str(fks_j)
2369 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2370 % ', '.join([str(col) for col in colors])]
2371 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2372 % ', '.join([str(pdg) for pdg in pdgs])]
2373 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2374 % ', '.join('%19.15fd0' % charg for charg in charges)]
2375 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \
2376 % (fks_i, fks_j)]
2377
2378
2379 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values
2380 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values
2381 replace_dict['col_lines'] = '\n'.join(col_lines)
2382 replace_dict['pdg_lines'] = '\n'.join(pdg_lines)
2383 replace_dict['charge_lines'] = '\n'.join(charge_lines)
2384 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines)
2385
2386 content = \
2387 """ INTEGER IPOS, JPOS
2388 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d)
2389 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL)
2390 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL)
2391 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL)
2392
2393 %(fks_i_line)s
2394 %(fks_j_line)s
2395
2396 %(fks_j_from_i_lines)s
2397
2398 C
2399 C Particle type:
2400 C octet = 8, triplet = 3, singlet = 1
2401 %(col_lines)s
2402
2403 C
2404 C Particle type according to PDG:
2405 C
2406 %(pdg_lines)s
2407
2408 C
2409 C Particle charge:
2410 C charge is set 0. with QCD corrections, which is irrelevant
2411 %(charge_lines)s
2412 """ % replace_dict
2413 if not isinstance(writer, writers.FortranWriter):
2414 raise writers.FortranWriter.FortranWriterError(\
2415 "writer not FortranWriter")
2416
2417 writers.FortranWriter.downcase = False
2418
2419 writer.writelines(content)
2420
2421 return True
2422
2423
2424
2425
2426
2427
2429 """Export a matrix element to a matrix.f file in MG4 madevent format"""
2430
2431 if not matrix_element.get('processes') or \
2432 not matrix_element.get('diagrams'):
2433 return 0,0
2434
2435 if not isinstance(writer, writers.FortranWriter):
2436 raise writers.FortranWriter.FortranWriterError(\
2437 "writer not FortranWriter")
2438
2439 writers.FortranWriter.downcase = False
2440
2441 replace_dict = {}
2442 replace_dict['N_me'] = n
2443
2444
2445 info_lines = self.get_mg5_info_lines()
2446 replace_dict['info_lines'] = info_lines
2447
2448
2449 process_lines = self.get_process_info_lines(matrix_element)
2450 replace_dict['process_lines'] = process_lines
2451
2452
2453 ncomb = matrix_element.get_helicity_combinations()
2454 replace_dict['ncomb'] = ncomb
2455
2456
2457 helicity_lines = self.get_helicity_lines(matrix_element)
2458 replace_dict['helicity_lines'] = helicity_lines
2459
2460
2461 ic_line = self.get_ic_line(matrix_element)
2462 replace_dict['ic_line'] = ic_line
2463
2464
2465
2466 den_factor_line = self.get_den_factor_line(matrix_element)
2467 replace_dict['den_factor_line'] = den_factor_line
2468
2469
2470 ngraphs = matrix_element.get_number_of_amplitudes()
2471 replace_dict['ngraphs'] = ngraphs
2472
2473
2474 ncolor = max(1, len(matrix_element.get('color_basis')))
2475 replace_dict['ncolor'] = ncolor
2476
2477
2478 color_data_lines = self.get_color_data_lines(matrix_element)
2479 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2480
2481
2482 helas_calls = fortran_model.get_matrix_element_calls(\
2483 matrix_element)
2484 replace_dict['helas_calls'] = "\n".join(helas_calls)
2485
2486
2487
2488 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2489 replace_dict['nwavefuncs'] = nwavefuncs
2490
2491
2492 amp2_lines = self.get_amp2_lines(matrix_element)
2493 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2494
2495
2496 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2497 replace_dict['wavefunctionsize'] = 20
2498 else:
2499 replace_dict['wavefunctionsize'] = 8
2500
2501
2502 jamp_lines = self.get_JAMP_lines(matrix_element)
2503
2504 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2505
2506 realfile = open(os.path.join(_file_path, \
2507 'iolibs/template_files/realmatrix_fks.inc')).read()
2508
2509 realfile = realfile % replace_dict
2510
2511
2512 writer.writelines(realfile)
2513
2514 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2515
2516
2517
2518
2519
2521
2522 """Write the auto_dsig.f file for MadFKS, which contains
2523 pdf call information"""
2524
2525 if not matrix_element.get('processes') or \
2526 not matrix_element.get('diagrams'):
2527 return 0
2528
2529 nexternal, ninitial = matrix_element.get_nexternal_ninitial()
2530
2531 if ninitial < 1 or ninitial > 2:
2532 raise writers.FortranWriter.FortranWriterError, \
2533 """Need ninitial = 1 or 2 to write auto_dsig file"""
2534
2535 replace_dict = {}
2536
2537 replace_dict['N_me'] = n
2538
2539
2540 info_lines = self.get_mg5_info_lines()
2541 replace_dict['info_lines'] = info_lines
2542
2543
2544 process_lines = self.get_process_info_lines(matrix_element)
2545 replace_dict['process_lines'] = process_lines
2546
2547 pdf_vars, pdf_data, pdf_lines = \
2548 self.get_pdf_lines_mir(matrix_element, ninitial, False, False)
2549 replace_dict['pdf_vars'] = pdf_vars
2550 replace_dict['pdf_data'] = pdf_data
2551 replace_dict['pdf_lines'] = pdf_lines
2552
2553 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \
2554 self.get_pdf_lines_mir(matrix_element, ninitial, False, True)
2555 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr
2556
2557 file = open(os.path.join(_file_path, \
2558 'iolibs/template_files/parton_lum_n_fks.inc')).read()
2559 file = file % replace_dict
2560
2561
2562 writer.writelines(file)
2563
2564
2565
2566
2567
2568
2569
2571 """Write the coloramps.inc file for MadEvent"""
2572
2573 lines = []
2574 lines.append( "logical icolamp(%d,%d,1)" % \
2575 (max(len(matrix_element.get('color_basis').keys()), 1),
2576 len(mapconfigs)))
2577
2578 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1)
2579
2580
2581 writer.writelines(lines)
2582
2583 return True
2584
2585
2586
2587
2588
2589
2591 """Write the leshouche.inc file for MG4"""
2592
2593
2594 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2595
2596 lines = []
2597 for iproc, proc in enumerate(matrix_element.get('processes')):
2598 legs = proc.get_legs_with_decays()
2599 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \
2600 (iproc + 1, nexternal,
2601 ",".join([str(l.get('id')) for l in legs])))
2602 for i in [1, 2]:
2603 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \
2604 (i, iproc + 1, nexternal,
2605 ",".join([ "%3r" % 0 ] * ninitial + \
2606 [ "%3r" % i ] * (nexternal - ninitial))))
2607
2608
2609
2610 if iproc == 0:
2611
2612 if not matrix_element.get('color_basis'):
2613 for i in [1, 2]:
2614 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \
2615 (i, nexternal,
2616 ",".join([ "%3r" % 0 ] * nexternal)))
2617 color_flow_list = []
2618
2619 else:
2620
2621 repr_dict = {}
2622 for l in legs:
2623 repr_dict[l.get('number')] = \
2624 proc.get('model').get_particle(l.get('id')).get_color()\
2625 * (-1)**(1+l.get('state'))
2626
2627 color_flow_list = \
2628 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2629 ninitial)
2630
2631 for cf_i, color_flow_dict in enumerate(color_flow_list):
2632 for i in [0, 1]:
2633 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \
2634 (i + 1, cf_i + 1, nexternal,
2635 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \
2636 for l in legs])))
2637
2638
2639 writer.writelines(lines)
2640
2641 return len(color_flow_list)
2642
2643
2644
2645
2646
2647
2649 """Write the configs.inc file for MadEvent"""
2650
2651
2652 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2653 lines = []
2654
2655 iconfig = 0
2656
2657 s_and_t_channels = []
2658 mapconfigs = []
2659
2660 model = matrix_element.get('processes')[0].get('model')
2661
2662
2663 base_diagrams = matrix_element.get('base_amplitude').get('diagrams')
2664 model = matrix_element.get('base_amplitude').get('process').get('model')
2665 minvert = min([max([len(vert.get('legs')) for vert in \
2666 diag.get('vertices')]) for diag in base_diagrams])
2667
2668 for idiag, diag in enumerate(base_diagrams):
2669 if any([len(vert.get('legs')) > minvert for vert in
2670 diag.get('vertices')]):
2671
2672 continue
2673 iconfig = iconfig + 1
2674 helas_diag = matrix_element.get('diagrams')[idiag]
2675 mapconfigs.append(helas_diag.get('number'))
2676 lines.append("# Diagram %d, Amplitude %d" % \
2677 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number']))
2678
2679 lines.append("data mapconfig(%4d)/%4d/" % (iconfig,
2680 helas_diag.get('amplitudes')[0]['number']))
2681
2682
2683
2684 schannels, tchannels = helas_diag.get('amplitudes')[0].\
2685 get_s_and_t_channels(ninitial, model, 990)
2686
2687 s_and_t_channels.append([schannels, tchannels])
2688
2689
2690 allchannels = schannels
2691 if len(tchannels) > 1:
2692
2693 allchannels = schannels + tchannels
2694
2695 for vert in allchannels:
2696 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
2697 last_leg = vert.get('legs')[-1]
2698 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \
2699 (last_leg.get('number'), iconfig, len(daughters),
2700 ",".join(["%3d" % d for d in daughters])))
2701 if vert in schannels:
2702 lines.append("data sprop(%4d,%4d)/%8d/" % \
2703 (last_leg.get('number'), iconfig,
2704 last_leg.get('id')))
2705 elif vert in tchannels[:-1]:
2706 lines.append("data tprid(%4d,%4d)/%8d/" % \
2707 (last_leg.get('number'), iconfig,
2708 abs(last_leg.get('id'))))
2709
2710
2711 lines.append("# Number of configs")
2712 lines.append("data mapconfig(0)/%4d/" % iconfig)
2713
2714
2715 writer.writelines(lines)
2716
2717 return iconfig, mapconfigs, s_and_t_channels
2718
2719
2720
2721
2722
2723
2725 """Write the decayBW.inc file for MadEvent"""
2726
2727 lines = []
2728
2729 booldict = {False: ".false.", True: ".false."}
2730
2731
2732 for iconf, config in enumerate(s_and_t_channels):
2733 schannels = config[0]
2734 for vertex in schannels:
2735
2736
2737 leg = vertex.get('legs')[-1]
2738 lines.append("data gForceBW(%d,%d)/%s/" % \
2739 (leg.get('number'), iconf + 1,
2740 booldict[leg.get('from_group')]))
2741
2742
2743 writer.writelines(lines)
2744
2745 return True
2746
2747
2748
2749
2750
2752 """Write the dname.mg file for MG4"""
2753
2754 line = "DIRNAME=P%s" % \
2755 matrix_element.get('processes')[0].shell_string()
2756
2757
2758 writer.write(line + "\n")
2759
2760 return True
2761
2762
2763
2764
2765
2767 """Write the iproc.dat file for MG4"""
2768
2769 line = "%d" % (me_number + 1)
2770
2771
2772 for line_to_write in writer.write_line(line):
2773 writer.write(line_to_write)
2774 return True
2775
2776
2777
2778
2779
2780
2781
2782
2783
2784
2785
2787 """generate the lines for fks.inc describing initializating the
2788 fks_j_from_i array"""
2789 lines = []
2790 if not me.isfinite:
2791 for ii, js in me.fks_j_from_i.items():
2792 if js:
2793 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \
2794 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js])))
2795 else:
2796 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \
2797 % (2, 1, 1, '1'))
2798 lines.append('')
2799
2800 return lines
2801
2802
2803
2804
2805
2807
2808 """Write the leshouche.inc file for MG4"""
2809
2810
2811 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2812
2813 lines = []
2814 for iproc, proc in enumerate(matrix_element.get('processes')):
2815 legs = proc.get_legs_with_decays()
2816 lines.append("I %4d %4d %s" % \
2817 (ime, iproc + 1,
2818 " ".join([str(l.get('id')) for l in legs])))
2819 for i in [1, 2]:
2820 lines.append("M %4d %4d %4d %s" % \
2821 (ime, i, iproc + 1,
2822 " ".join([ "%3d" % 0 ] * ninitial + \
2823 [ "%3d" % i ] * (nexternal - ninitial))))
2824
2825
2826
2827 if iproc == 0:
2828
2829 if not matrix_element.get('color_basis'):
2830 for i in [1, 2]:
2831 lines.append("C %4d %4d 1 %s" % \
2832 (ime, i,
2833 " ".join([ "%3d" % 0 ] * nexternal)))
2834 color_flow_list = []
2835 nflow = 1
2836
2837 else:
2838
2839 repr_dict = {}
2840 for l in legs:
2841 repr_dict[l.get('number')] = \
2842 proc.get('model').get_particle(l.get('id')).get_color()\
2843 * (-1)**(1+l.get('state'))
2844
2845 color_flow_list = \
2846 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2847 ninitial)
2848
2849 for cf_i, color_flow_dict in enumerate(color_flow_list):
2850 for i in [0, 1]:
2851 lines.append("C %4d %4d %4d %s" % \
2852 (ime, i + 1, cf_i + 1,
2853 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2854 for l in legs])))
2855
2856 nflow = len(color_flow_list)
2857
2858 nproc = len(matrix_element.get('processes'))
2859
2860 return lines, nproc, nflow
2861
2862
2864
2865 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly
2866 In this case, write born color structure times ij -> i,j splitting)
2867 """
2868
2869 bornproc = matrix_element.get('processes')[0]
2870 colors = [l.get('color') for l in bornproc.get('legs')]
2871
2872 fks_i = len(colors)
2873
2874
2875 fks_j=1
2876 for cpos, col in enumerate(colors):
2877 if col != 1:
2878 fks_j = cpos+1
2879
2880
2881 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2882 nexternal+=1
2883
2884 lines = []
2885 for iproc, proc in enumerate(matrix_element.get('processes')):
2886
2887 legs = proc.get_legs_with_decays() + \
2888 [fks_common.FKSLeg({'id': -21,
2889 'number': nexternal,
2890 'state': True,
2891 'fks': 'i',
2892 'color': 8,
2893 'charge': 0.,
2894 'massless': True,
2895 'spin': 3,
2896 'is_part': True,
2897 'self_antipart': True})]
2898
2899 lines.append("I %4d %4d %s" % \
2900 (ime, iproc + 1,
2901 " ".join([str(l.get('id')) for l in legs])))
2902 for i in [1, 2]:
2903 lines.append("M %4d %4d %4d %s" % \
2904 (ime, i, iproc + 1,
2905 " ".join([ "%3d" % 0 ] * ninitial + \
2906 [ "%3d" % i ] * (nexternal - ninitial))))
2907
2908
2909
2910 if iproc == 0:
2911
2912 if not matrix_element.get('color_basis'):
2913 for i in [1, 2]:
2914 lines.append("C %4d %4d 1 %s" % \
2915 (ime, i,
2916 " ".join([ "%3d" % 0 ] * nexternal)))
2917 color_flow_list = []
2918 nflow = 1
2919
2920 else:
2921
2922
2923
2924 repr_dict = {}
2925 for l in legs[:-1]:
2926 repr_dict[l.get('number')] = \
2927 proc.get('model').get_particle(l.get('id')).get_color()\
2928 * (-1)**(1+l.get('state'))
2929
2930 color_flow_list = \
2931 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2932 ninitial)
2933
2934 for cf_i, color_flow_dict in enumerate(color_flow_list):
2935
2936
2937 maxicol = max(sum(color_flow_dict.values(), []))
2938
2939 if color_flow_dict[fks_j][0] == 0:
2940 anti = True
2941 icol_j = color_flow_dict[fks_j][1]
2942 else:
2943 anti = False
2944 icol_j = color_flow_dict[fks_j][0]
2945
2946 if anti:
2947 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1])
2948 color_flow_dict[fks_j][1] = maxicol + 1
2949 else:
2950 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1)
2951 color_flow_dict[fks_j][0] = maxicol + 1
2952
2953 for i in [0, 1]:
2954 lines.append("C %4d %4d %4d %s" % \
2955 (ime, i + 1, cf_i + 1,
2956 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2957 for l in legs])))
2958
2959 nflow = len(color_flow_list)
2960
2961 nproc = len(matrix_element.get('processes'))
2962
2963 return lines, nproc, nflow
2964
2965
2966
2967
2968
2970 """returns the lines with the information on the denominator keeping care
2971 of the identical particle factors in the various real emissions"""
2972
2973 lines = []
2974 info_list = fks_born.get_fks_info_list()
2975 if info_list:
2976
2977 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list))
2978 lines.append('DATA IDEN_VALUES /' + \
2979 ', '.join(['%d' % (
2980 fks_born.born_matrix_element.get_denominator_factor() ) \
2981 for info in info_list]) + '/')
2982 else:
2983
2984 lines.append('INTEGER IDEN_VALUES(1)')
2985 lines.append('DATA IDEN_VALUES / %d /' \
2986 % fks_born.born_matrix_element.get_denominator_factor())
2987
2988 return lines
2989
2990
2991
2992
2993
2995 """returns the lines with the information on the particle number of the born
2996 that splits"""
2997 info_list = fks_born.get_fks_info_list()
2998 lines = []
2999 if info_list:
3000
3001
3002 ij_list = [info['fks_info']['ij']if \
3003 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \
3004 else 0 for info in info_list]
3005 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list))
3006 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/')
3007 else:
3008
3009 lines.append('INTEGER IJ_VALUES(1)')
3010 lines.append('DATA IJ_VALUES / 1 /')
3011
3012 return lines
3013
3014
3015 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\
3016 mirror = False):
3017 """Generate the PDF lines for the auto_dsig.f file"""
3018
3019 processes = matrix_element.get('processes')
3020 model = processes[0].get('model')
3021
3022 pdf_definition_lines = ""
3023 pdf_data_lines = ""
3024 pdf_lines = ""
3025
3026 if ninitial == 1:
3027 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n"
3028 for i, proc in enumerate(processes):
3029 process_line = proc.base_string()
3030 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3031 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n"
3032 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n"
3033 else:
3034
3035 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \
3036 p in processes]))),
3037 sorted(list(set([p.get_initial_pdg(2) for \
3038 p in processes])))]
3039
3040
3041 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \
3042 sum(initial_states,[])])
3043 for key,val in pdf_codes.items():
3044 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m')
3045
3046
3047 pdgtopdf = {21: 0, 22: 7}
3048
3049 for pdg in sum(initial_states,[]):
3050 if not pdg in pdgtopdf and not pdg in pdgtopdf.values():
3051 pdgtopdf[pdg] = pdg
3052 elif pdg not in pdgtopdf and pdg in pdgtopdf.values():
3053
3054 pdgtopdf[pdg] = 6000000 + pdg
3055
3056
3057 for i in [0,1]:
3058 pdf_definition_lines += "DOUBLE PRECISION " + \
3059 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3060 for pdg in \
3061 initial_states[i]]) + \
3062 "\n"
3063
3064
3065 for i in [0,1]:
3066 pdf_data_lines += "DATA " + \
3067 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3068 for pdg in initial_states[i]]) + \
3069 "/%d*1D0/" % len(initial_states[i]) + \
3070 "\n"
3071
3072
3073 for i, init_states in enumerate(initial_states):
3074 if not mirror:
3075 ibeam = i + 1
3076 else:
3077 ibeam = 2 - i
3078 if subproc_group:
3079 pdf_lines = pdf_lines + \
3080 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \
3081 % (ibeam, ibeam)
3082 else:
3083 pdf_lines = pdf_lines + \
3084 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \
3085 % (ibeam, ibeam)
3086
3087 for initial_state in init_states:
3088 if initial_state in pdf_codes.keys():
3089 if subproc_group:
3090 if abs(pdgtopdf[initial_state]) <= 7:
3091 pdf_lines = pdf_lines + \
3092 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \
3093 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \
3094 (pdf_codes[initial_state],
3095 i + 1, ibeam, pdgtopdf[initial_state],
3096 ibeam, ibeam)
3097 else:
3098
3099 pdf_lines = pdf_lines + \
3100 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3101 "%s%d=0d0\n") % \
3102 (pdf_codes[initial_state],i + 1)
3103 else:
3104 if abs(pdgtopdf[initial_state]) <= 7:
3105 pdf_lines = pdf_lines + \
3106 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \
3107 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \
3108 (pdf_codes[initial_state],
3109 i + 1, ibeam, pdgtopdf[initial_state],
3110 ibeam, ibeam)
3111 else:
3112
3113 pdf_lines = pdf_lines + \
3114 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3115 "%s%d=0d0\n") % \
3116 (pdf_codes[initial_state],i + 1)
3117
3118 pdf_lines = pdf_lines + "ENDIF\n"
3119
3120
3121 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n"
3122 for proc in processes:
3123 process_line = proc.base_string()
3124 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3125 pdf_lines = pdf_lines + "\nPD(IPROC) = "
3126 for ibeam in [1, 2]:
3127 initial_state = proc.get_initial_pdg(ibeam)
3128 if initial_state in pdf_codes.keys():
3129 pdf_lines = pdf_lines + "%s%d*" % \
3130 (pdf_codes[initial_state], ibeam)
3131 else:
3132 pdf_lines = pdf_lines + "1d0*"
3133
3134 pdf_lines = pdf_lines[:-1] + "\n"
3135
3136
3137 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
3138
3139
3140
3142 """Return the color matrix definition lines for the given color_matrix. Split
3143 rows in chunks of size n."""
3144
3145 if not color_matrix:
3146 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"]
3147 else:
3148 ret_list = []
3149 my_cs = color.ColorString()
3150 for index, denominator in \
3151 enumerate(color_matrix.get_line_denominators()):
3152
3153 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator))
3154
3155 num_list = color_matrix.get_line_numerators(index, denominator)
3156 for k in xrange(0, len(num_list), n):
3157 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \
3158 (index + 1, k + 1, min(k + n, len(num_list)),
3159 ','.join(["%5r" % i for i in num_list[k:k + n]])))
3160
3161 return ret_list
3162
3163
3164
3165
3168 """Write the maxamps.inc file for MG4."""
3169
3170 file = " integer maxamps, maxflow, maxproc, maxsproc\n"
3171 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \
3172 (maxamps, maxflows)
3173 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \
3174 (maxproc, maxsproc)
3175
3176
3177 writer.writelines(file)
3178
3179 return True
3180
3181
3182
3183
3185
3186 """Write the ncombs.inc file for MadEvent."""
3187
3188
3189 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3190
3191
3192 file = " integer n_max_cl\n"
3193 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1))
3194
3195
3196 writer.writelines(file)
3197
3198 return True
3199
3200
3201
3202
3204 """Write a dummy config_subproc.inc file for MadEvent"""
3205
3206 lines = []
3207
3208 for iconfig in range(len(s_and_t_channels)):
3209 lines.append("DATA CONFSUB(1,%d)/1/" % \
3210 (iconfig + 1))
3211
3212
3213 writer.writelines(lines)
3214
3215 return True
3216
3217
3218
3219
3221 """Write the get_color.f file for MadEvent, which returns color
3222 for all particles used in the matrix element."""
3223
3224 try:
3225 matrix_elements=matrix_element.real_processes[0].matrix_element
3226 except IndexError:
3227 matrix_elements=[matrix_element.born_matrix_element]
3228
3229 if isinstance(matrix_elements, helas_objects.HelasMatrixElement):
3230 matrix_elements = [matrix_elements]
3231
3232 model = matrix_elements[0].get('processes')[0].get('model')
3233
3234
3235
3236
3237 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3238 for wf in d.get('wavefunctions')],[]) \
3239 for d in me.get('diagrams')],[]) \
3240 for me in [real_proc.matrix_element]],[])\
3241 for real_proc in matrix_element.real_processes],[]))
3242
3243 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3244 for wf in d.get('wavefunctions')],[]) \
3245 for d in matrix_element.born_matrix_element.get('diagrams')],[])))
3246
3247
3248 leg_ids = set(sum([sum([sum([[l.get('id') for l in \
3249 p.get_legs_with_decays()] for p in \
3250 me.get('processes')], []) for me in \
3251 [real_proc.matrix_element]], []) for real_proc in \
3252 matrix_element.real_processes],[]))
3253
3254 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \
3255 p.get_legs_with_decays()] for p in \
3256 matrix_element.born_matrix_element.get('processes')], [])))
3257 particle_ids = sorted(list(wf_ids.union(leg_ids)))
3258
3259 lines = """function get_color(ipdg)
3260 implicit none
3261 integer get_color, ipdg
3262
3263 if(ipdg.eq.%d)then
3264 get_color=%d
3265 return
3266 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color())
3267
3268 for part_id in particle_ids[1:]:
3269 lines += """else if(ipdg.eq.%d)then
3270 get_color=%d
3271 return
3272 """ % (part_id, model.get_particle(part_id).get_color())
3273
3274
3275 lines += """else if(ipdg.eq.%d)then
3276 c This is dummy particle used in multiparticle vertices
3277 get_color=2
3278 return
3279 """ % model.get_first_non_pdg()
3280 lines += """else
3281 write(*,*)'Error: No color given for pdg ',ipdg
3282 get_color=0
3283 return
3284 endif
3285 end
3286 """
3287
3288
3289 writer.writelines(lines)
3290
3291 return True
3292
3293
3294
3295
3296
3297 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3298 """Write the props.inc file for MadEvent. Needs input from
3299 write_configs_file. With respect to the parent routine, it has some
3300 more specific formats that allow the props.inc file to be read by the
3301 link program"""
3302
3303 lines = []
3304
3305 particle_dict = matrix_element.get('processes')[0].get('model').\
3306 get('particle_dict')
3307
3308 for iconf, configs in enumerate(s_and_t_channels):
3309 for vertex in configs[0] + configs[1][:-1]:
3310 leg = vertex.get('legs')[-1]
3311 if leg.get('id') not in particle_dict:
3312
3313 mass = 'zero'
3314 width = 'zero'
3315 pow_part = 0
3316 else:
3317 particle = particle_dict[leg.get('id')]
3318
3319 if particle.get('mass').lower() == 'zero':
3320 mass = particle.get('mass')
3321 else:
3322 mass = "abs(%s)" % particle.get('mass')
3323
3324 if particle.get('width').lower() == 'zero':
3325 width = particle.get('width')
3326 else:
3327 width = "abs(%s)" % particle.get('width')
3328
3329 pow_part = 1 + int(particle.is_boson())
3330
3331 lines.append("pmass(%3d,%4d) = %s" % \
3332 (leg.get('number'), iconf + 1, mass))
3333 lines.append("pwidth(%3d,%4d) = %s" % \
3334 (leg.get('number'), iconf + 1, width))
3335 lines.append("pow(%3d,%4d) = %d" % \
3336 (leg.get('number'), iconf + 1, pow_part))
3337
3338
3339 writer.writelines(lines)
3340
3341 return True
3342
3343
3344
3345
3346
3348 """Append this subprocess to the subproc.mg file for MG4"""
3349
3350
3351 writer.write(subprocdir + "\n")
3352
3353 return True
3354
3355
3356
3357
3358
3359
3360
3361
3364 """Class to take care of exporting a set of matrix elements to
3365 Fortran (v4) format."""
3366
3367
3370
3371
3372
3373
3374
3376 """create the directory run_name as a copy of the MadEvent
3377 Template, and clean the directory
3378 For now it is just the same as copy_v4template, but it will be modified
3379 """
3380 mgme_dir = self.mgme_dir
3381 dir_path = self.dir_path
3382 clean =self.opt['clean']
3383
3384
3385 if not os.path.isdir(dir_path):
3386 if not mgme_dir:
3387 raise MadGraph5Error, \
3388 "No valid MG_ME path given for MG4 run directory creation."
3389 logger.info('initialize a new directory: %s' % \
3390 os.path.basename(dir_path))
3391 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
3392
3393 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),
3394 dir_path)
3395
3396 for card in ['plot_card']:
3397 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
3398 try:
3399 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
3400 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
3401 except IOError:
3402 logger.warning("Failed to copy " + card + ".dat to default")
3403
3404 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
3405 if not mgme_dir:
3406 raise MadGraph5Error, \
3407 "No valid MG_ME path given for MG4 run directory creation."
3408 try:
3409 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
3410 except IOError:
3411 MG5_version = misc.get_pkg_info()
3412 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
3413 "5." + MG5_version['version'])
3414
3415
3416 if clean:
3417 logger.info('remove old information in %s' % os.path.basename(dir_path))
3418 if os.environ.has_key('MADGRAPH_BASE'):
3419 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
3420 '--web'], cwd=dir_path)
3421 else:
3422 try:
3423 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
3424 cwd=dir_path)
3425 except Exception, why:
3426 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
3427 % (os.path.basename(dir_path),why))
3428
3429 MG_version = misc.get_pkg_info()
3430 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
3431 MG_version['version'])
3432
3433
3434 self.link_CutTools(dir_path)
3435
3436 link_tir_libs=[]
3437 tir_libs=[]
3438 tir_include=[]
3439 for tir in self.all_tir:
3440 tir_dir="%s_dir"%tir
3441 libpath=getattr(self,tir_dir)
3442 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'),
3443 libpath,"lib%s.a"%tir,tir_name=tir)
3444 setattr(self,tir_dir,libpath)
3445 if libpath != "":
3446 if tir in ['pjfry','ninja','golem', 'samurai','collier']:
3447
3448
3449 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir))
3450 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir))
3451
3452 if tir in ['ninja']:
3453 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext))
3454 for ext in ['a','dylib','so']):
3455 raise MadGraph5Error(
3456 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath)
3457 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo'))
3458 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo'))
3459
3460 if tir in ['golem','samurai','ninja','collier']:
3461 trg_path = pjoin(os.path.dirname(libpath),'include')
3462 if os.path.isdir(trg_path):
3463 to_include = misc.find_includes_path(trg_path,
3464 self.include_names[tir])
3465 else:
3466 to_include = None
3467
3468 if to_include is None and tir=='collier':
3469 to_include = misc.find_includes_path(
3470 pjoin(libpath,'modules'),self.include_names[tir])
3471 if to_include is None:
3472 logger.error(
3473 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+
3474 'Generation carries on but you will need to edit the include path by hand in the makefiles.')
3475 to_include = '<Not_found_define_it_yourself>'
3476 tir_include.append('-I %s'%to_include)
3477 else:
3478 link_tir_libs.append('-l%s'%tir)
3479 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir)
3480
3481 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
3482 cwd = os.getcwd()
3483 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3484 try:
3485 os.chdir(dirpath)
3486 except os.error:
3487 logger.error('Could not cd to directory %s' % dirpath)
3488 return 0
3489 filename = 'makefile_loop'
3490 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
3491 link_tir_libs,tir_libs,tir_include=tir_include)
3492 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
3493 dirpath = os.path.join(self.dir_path, 'Source')
3494 try:
3495 os.chdir(dirpath)
3496 except os.error:
3497 logger.error('Could not cd to directory %s' % dirpath)
3498 return 0
3499 filename = 'make_opts'
3500 calls = self.write_make_opts(writers.MakefileWriter(filename),
3501 link_tir_libs,tir_libs)
3502
3503 os.chdir(cwd)
3504
3505 cwd = os.getcwd()
3506 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3507 try:
3508 os.chdir(dirpath)
3509 except os.error:
3510 logger.error('Could not cd to directory %s' % dirpath)
3511 return 0
3512
3513
3514 cpfiles= ["SubProcesses/MadLoopParamReader.f",
3515 "Cards/MadLoopParams.dat",
3516 "SubProcesses/MadLoopParams.inc"]
3517
3518 for file in cpfiles:
3519 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
3520 os.path.join(self.dir_path, file))
3521
3522 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
3523 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
3524
3525
3526
3527 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
3528 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
3529 'Cards', 'MadLoopParams.dat'))
3530
3531 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
3532 "MadLoopParams.dat"))
3533
3534
3535 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
3536 "SubProcesses","MadLoopCommons.inc")).read()
3537 writer = writers.FortranWriter(os.path.join(self.dir_path,
3538 "SubProcesses","MadLoopCommons.f"))
3539 writer.writelines(MadLoopCommon%{
3540 'print_banner_commands':self.MadLoop_banner},
3541 context={'collier_available':self.tir_available_dict['collier']})
3542 writer.close()
3543
3544
3545 model_path = self.dir_path + '/Source/MODEL/'
3546
3547 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')):
3548 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses')
3549 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')):
3550 ln(model_path + '/mp_coupl_same_name.inc', \
3551 self.dir_path + '/SubProcesses')
3552
3553
3554 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
3555 writers.FortranWriter('cts_mpc.h'),)
3556
3557 self.copy_python_files()
3558
3559
3560
3561 self.write_pdf_opendata()
3562
3563
3564
3565 os.chdir(cwd)
3566
3568 """writes the V**** directory inside the P**** directories specified in
3569 dir_name"""
3570
3571 cwd = os.getcwd()
3572
3573 matrix_element = loop_matrix_element
3574
3575
3576 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
3577 try:
3578 os.mkdir(dirpath)
3579 except os.error as error:
3580 logger.warning(error.strerror + " " + dirpath)
3581
3582
3583 name = "V%s" % matrix_element.get('processes')[0].shell_string()
3584 dirpath = os.path.join(dir_name, name)
3585
3586 try:
3587 os.mkdir(dirpath)
3588 except os.error as error:
3589 logger.warning(error.strerror + " " + dirpath)
3590
3591 try:
3592 os.chdir(dirpath)
3593 except os.error:
3594 logger.error('Could not cd to directory %s' % dirpath)
3595 return 0
3596
3597 logger.info('Creating files in directory %s' % name)
3598
3599
3600 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3601
3602 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
3603
3604
3605 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'),
3606 abspath=False, cwd=None)
3607
3608
3609 filename = 'born_matrix.f'
3610 calls = self.write_bornmatrix(
3611 writers.FortranWriter(filename),
3612 matrix_element,
3613 fortran_model)
3614
3615 filename = 'nexternal.inc'
3616 self.write_nexternal_file(writers.FortranWriter(filename),
3617 nexternal, ninitial)
3618
3619 filename = 'pmass.inc'
3620 self.write_pmass_file(writers.FortranWriter(filename),
3621 matrix_element)
3622
3623 filename = 'ngraphs.inc'
3624 self.write_ngraphs_file(writers.FortranWriter(filename),
3625 len(matrix_element.get_all_amplitudes()))
3626
3627 filename = "loop_matrix.ps"
3628 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""")
3629 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
3630 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
3631 filename,
3632 model=matrix_element.get('processes')[0].get('model'),
3633 amplitude='')
3634 logger.info("Drawing loop Feynman diagrams for " + \
3635 matrix_element.get('processes')[0].nice_string(\
3636 print_weighted=False))
3637 plot.draw()
3638
3639 filename = "born_matrix.ps"
3640 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
3641 get('born_diagrams'),
3642 filename,
3643 model=matrix_element.get('processes')[0].\
3644 get('model'),
3645 amplitude='')
3646 logger.info("Generating born Feynman diagrams for " + \
3647 matrix_element.get('processes')[0].nice_string(\
3648 print_weighted=False))
3649 plot.draw()
3650
3651
3652
3653
3654 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
3655
3656 open('unique_id.inc','w').write(
3657 """ integer UNIQUE_ID
3658 parameter(UNIQUE_ID=1)""")
3659
3660 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
3661 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
3662 'MadLoopParams.inc','MadLoopCommons.f']
3663
3664 for file in linkfiles:
3665 ln('../../%s' % file)
3666
3667 os.system("ln -s ../../makefile_loop makefile")
3668
3669
3670 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
3671 pjoin('..','MadLoop5_resources'))
3672
3673 linkfiles = ['mpmodule.mod']
3674
3675 for file in linkfiles:
3676 ln('../../../lib/%s' % file)
3677
3678 linkfiles = ['coef_specs.inc']
3679
3680 for file in linkfiles:
3681 ln('../../../Source/DHELAS/%s' % file)
3682
3683
3684 os.chdir(cwd)
3685
3686 if not calls:
3687 calls = 0
3688 return calls
3689
3690
3691
3692
3693
3695 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the
3696 non-optimized mode"""
3697 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc')
3698
3699 replace_dict = {}
3700 replace_dict['max_lwf_size'] = 4
3701 replace_dict['vertex_max_coefs'] = max(\
3702 [q_polynomial.get_number_of_coefs_for_rank(n)
3703 for n in max_loop_vertex_ranks])
3704 IncWriter=writers.FortranWriter(filename,'w')
3705 IncWriter.writelines("""INTEGER MAXLWFSIZE
3706 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d)
3707 INTEGER VERTEXMAXCOEFS
3708 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\
3709 % replace_dict)
3710 IncWriter.close()
3711