1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 """Methods and classes to export matrix elements to fks format."""
16
17 from distutils import dir_util
18 import glob
19 import logging
20 import os
21 import re
22 import shutil
23 import subprocess
24 import string
25 import copy
26 import platform
27
28 import madgraph.core.color_algebra as color
29 import madgraph.core.helas_objects as helas_objects
30 import madgraph.core.base_objects as base_objects
31 import madgraph.fks.fks_helas_objects as fks_helas_objects
32 import madgraph.fks.fks_base as fks
33 import madgraph.fks.fks_common as fks_common
34 import madgraph.iolibs.drawing_eps as draw
35 import madgraph.iolibs.gen_infohtml as gen_infohtml
36 import madgraph.iolibs.files as files
37 import madgraph.various.misc as misc
38 import madgraph.iolibs.file_writers as writers
39 import madgraph.iolibs.template_files as template_files
40 import madgraph.iolibs.ufo_expression_parsers as parsers
41 import madgraph.iolibs.export_v4 as export_v4
42 import madgraph.loop.loop_exporters as loop_exporters
43 import madgraph.various.q_polynomial as q_polynomial
44 import madgraph.various.banner as banner_mod
45
46 import aloha.create_aloha as create_aloha
47
48 import models.write_param_card as write_param_card
49 import models.check_param_card as check_param_card
50 from madgraph import MadGraph5Error, MG5DIR, InvalidCmd
51 from madgraph.iolibs.files import cp, ln, mv
52
53 pjoin = os.path.join
54
55 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
56 logger = logging.getLogger('madgraph.export_fks')
57
58
60 Pdir = args[0]
61 old_pos = args[1]
62 dir_path = args[2]
63
64 devnull = os.open(os.devnull, os.O_RDWR)
65
66 os.chdir(Pdir)
67 subprocess.call([os.path.join(old_pos, dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
68 stdout = devnull)
69 os.chdir(os.path.pardir)
70
71
72
73
74
76 """Class to take care of exporting a set of matrix elements to
77 Fortran (v4) format."""
78
79
80
81
83 """create the directory run_name as a copy of the MadEvent
84 Template, and clean the directory
85 For now it is just the same as copy_v4template, but it will be modified
86 """
87
88 mgme_dir = self.mgme_dir
89 dir_path = self.dir_path
90 clean =self.opt['clean']
91
92
93 if not os.path.isdir(dir_path):
94 if not mgme_dir:
95 raise MadGraph5Error, \
96 "No valid MG_ME path given for MG4 run directory creation."
97 logger.info('initialize a new directory: %s' % \
98 os.path.basename(dir_path))
99 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
100
101 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),dir_path)
102
103 for card in ['plot_card']:
104 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
105 try:
106 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
107 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
108 except IOError:
109 logger.warning("Failed to move " + card + ".dat to default")
110
111 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
112 if not mgme_dir:
113 raise MadGraph5Error, \
114 "No valid MG_ME path given for MG4 run directory creation."
115 try:
116 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
117 except IOError:
118 MG5_version = misc.get_pkg_info()
119 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
120 "5." + MG5_version['version'])
121
122
123 if clean:
124 logger.info('remove old information in %s' % os.path.basename(dir_path))
125 if os.environ.has_key('MADGRAPH_BASE'):
126 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
127 '--web'],cwd=dir_path)
128 else:
129 try:
130 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
131 cwd=dir_path)
132 except Exception, why:
133 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
134 % (os.path.basename(dir_path),why))
135
136 MG_version = misc.get_pkg_info()
137 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
138 MG_version['version'])
139
140
141 self.link_CutTools(dir_path)
142
143 link_tir_libs=[]
144 tir_libs=[]
145 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
146 dirpath = os.path.join(self.dir_path, 'SubProcesses')
147 filename = pjoin(self.dir_path, 'SubProcesses','makefile_loop')
148 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
149 link_tir_libs,tir_libs)
150 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
151 filename = pjoin(self.dir_path, 'Source','make_opts')
152 calls = self.write_make_opts(writers.MakefileWriter(filename),
153 link_tir_libs,tir_libs)
154
155
156 for card in ['FO_analyse_card', 'shower_card']:
157 try:
158 shutil.copy(pjoin(self.dir_path, 'Cards',
159 card + '.dat'),
160 pjoin(self.dir_path, 'Cards',
161 card + '_default.dat'))
162 except IOError:
163 logger.warning("Failed to copy " + card + ".dat to default")
164
165 cwd = os.getcwd()
166 dirpath = os.path.join(self.dir_path, 'SubProcesses')
167 try:
168 os.chdir(dirpath)
169 except os.error:
170 logger.error('Could not cd to directory %s' % dirpath)
171 return 0
172
173
174 cpfiles= ["SubProcesses/MadLoopParamReader.f",
175 "Cards/MadLoopParams.dat",
176 "SubProcesses/MadLoopParams.inc"]
177
178 for file in cpfiles:
179 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
180 os.path.join(self.dir_path, file))
181
182 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
183 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
184
185 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
186 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
187 'Cards', 'MadLoopParams.dat'))
188
189 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
190 "MadLoopParams.dat"))
191
192
193 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
194 "SubProcesses","MadLoopCommons.inc")).read()
195 writer = writers.FortranWriter(os.path.join(self.dir_path,
196 "SubProcesses","MadLoopCommons.f"))
197 writer.writelines(MadLoopCommon%{
198 'print_banner_commands':self.MadLoop_banner},
199 context={'collier_available':False})
200 writer.close()
201
202
203 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
204 writers.FortranWriter('cts_mpc.h'))
205
206
207
208 FKS_card_path = pjoin(self.dir_path,'Cards','FKS_params.dat')
209 FKS_card_file = open(FKS_card_path,'r')
210 FKS_card = FKS_card_file.read()
211 FKS_card_file.close()
212 FKS_card = re.sub(r"#NHelForMCoverHels\n-?\d+",
213 "#NHelForMCoverHels\n-1", FKS_card)
214 FKS_card_file = open(FKS_card_path,'w')
215 FKS_card_file.write(FKS_card)
216 FKS_card_file.close()
217
218
219 os.chdir(cwd)
220
221 self.copy_python_files()
222
223
224 self.write_pdf_opendata()
225
226
227
228
229
231 """ Create the file makefile_loop which links to the TIR libraries."""
232
233 file = open(os.path.join(self.mgme_dir,'Template','NLO',
234 'SubProcesses','makefile_loop.inc')).read()
235 replace_dict={}
236 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
237 replace_dict['tir_libs']=' '.join(tir_libs)
238 replace_dict['dotf']='%.f'
239 replace_dict['doto']='%.o'
240 replace_dict['tir_include']=' '.join(tir_include)
241 file=file%replace_dict
242 if writer:
243 writer.writelines(file)
244 else:
245 return file
246
247
249 """ Create the file make_opts which links to the TIR libraries."""
250 file = open(os.path.join(self.mgme_dir,'Template','NLO',
251 'Source','make_opts.inc')).read()
252 replace_dict={}
253 replace_dict['link_tir_libs']=' '.join(link_tir_libs)
254 replace_dict['tir_libs']=' '.join(tir_libs)
255 replace_dict['dotf']='%.f'
256 replace_dict['doto']='%.o'
257 file=file%replace_dict
258 if writer:
259 writer.writelines(file)
260 else:
261 return file
262
263
264
265
267 """copy python files required for the Template"""
268
269 files_to_copy = [ \
270 pjoin('interface','amcatnlo_run_interface.py'),
271 pjoin('interface','extended_cmd.py'),
272 pjoin('interface','common_run_interface.py'),
273 pjoin('interface','coloring_logging.py'),
274 pjoin('various','misc.py'),
275 pjoin('various','shower_card.py'),
276 pjoin('various','FO_analyse_card.py'),
277 pjoin('various','histograms.py'),
278 pjoin('various','banner.py'),
279 pjoin('various','cluster.py'),
280 pjoin('various','systematics.py'),
281 pjoin('various','lhe_parser.py'),
282 pjoin('madevent','sum_html.py'),
283 pjoin('madevent','gen_crossxhtml.py'),
284 pjoin('iolibs','files.py'),
285 pjoin('iolibs','save_load_object.py'),
286 pjoin('iolibs','file_writers.py'),
287 pjoin('..','models','check_param_card.py'),
288 pjoin('__init__.py')
289 ]
290 cp(_file_path+'/interface/.mg5_logging.conf',
291 self.dir_path+'/bin/internal/me5_logging.conf')
292
293 for cp_file in files_to_copy:
294 cp(pjoin(_file_path,cp_file),
295 pjoin(self.dir_path,'bin','internal',os.path.basename(cp_file)))
296
297 - def convert_model(self, model, wanted_lorentz = [],
298 wanted_couplings = []):
299
300 super(ProcessExporterFortranFKS,self).convert_model(model,
301 wanted_lorentz, wanted_couplings)
302
303 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~')
304 try:
305 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel'))
306 except OSError as error:
307 pass
308 model_path = model.get('modelpath')
309 shutil.copytree(model_path,
310 pjoin(self.dir_path,'bin','internal','ufomodel'),
311 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
312 if hasattr(model, 'restrict_card'):
313 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel',
314 'restrict_default.dat')
315 if isinstance(model.restrict_card, check_param_card.ParamCard):
316 model.restrict_card.write(out_path)
317 else:
318 files.cp(model.restrict_card, out_path)
319
320
321
322
323
324
325 - def write_maxparticles_file(self, writer, maxparticles):
326 """Write the maxparticles.inc file for MadEvent"""
327
328 lines = "integer max_particles, max_branch\n"
329 lines += "parameter (max_particles=%d) \n" % maxparticles
330 lines += "parameter (max_branch=max_particles-1)"
331
332
333 writer.writelines(lines)
334
335 return True
336
337
338
339
340
342 """Write the maxconfigs.inc file for MadEvent"""
343
344 lines = "integer lmaxconfigs\n"
345 lines += "parameter (lmaxconfigs=%d)" % maxconfigs
346
347
348 writer.writelines(lines)
349
350 return True
351
352
353
354
355
357 """ write an equivalent of the MG4 proc_card in order that all the Madevent
358 Perl script of MadEvent4 are still working properly for pure MG5 run."""
359
360 proc_card_template = template_files.mg4_proc_card.mg4_template
361 process_template = template_files.mg4_proc_card.process_template
362 process_text = ''
363 coupling = ''
364 new_process_content = []
365
366
367
368 process_str = process_str.replace(' =', '=')
369 process_str = process_str.replace('= ', '=')
370 process_str = process_str.replace(',',' , ')
371
372 for info in process_str.split():
373 if '=' in info:
374 coupling += info + '\n'
375 else:
376 new_process_content.append(info)
377
378
379 process_str = ' '.join(new_process_content)
380
381
382 process_text += process_template.substitute({'process': process_str, \
383 'coupling': coupling})
384
385 text = proc_card_template.substitute({'process': process_text,
386 'model': modelname,
387 'multiparticle':''})
388 ff = open(file_pos, 'w')
389 ff.write(text)
390 ff.close()
391
392
393
394
395
397 """ Write an initial state process map. Each possible PDF
398 combination gets an unique identifier."""
399
400 text=''
401 for i,e in enumerate(initial_states):
402 text=text+str(i+1)+' '+str(len(e))
403 for t in e:
404 if len(t) ==1:
405 t.append(0)
406 text=text+' '
407 try:
408 for p in t:
409 if p == None : p = 0
410 text=text+' '+str(p)
411 except TypeError:
412 text=text+' '+str(t)
413 text=text+'\n'
414
415 ff = open(file_pos, 'w')
416 ff.write(text)
417 ff.close()
418
420 """ A function returning a string uniquely identifying the matrix
421 element given in argument so that it can be used as a prefix to all
422 MadLoop5 subroutines and common blocks related to it. This allows
423 to compile several processes into one library as requested by the
424 BLHA (Binoth LesHouches Accord) guidelines. The MadFKS design
425 necessitates that there is no process prefix."""
426
427 return ''
428
429
430
431
433 """writes the coef_specs.inc in the DHELAS folder. Should not be called in the
434 non-optimized mode"""
435 raise fks_common.FKSProcessError(), \
436 "write_coef_specs should be called only in the loop-optimized mode"
437
438
439
440
441
442 - def generate_directories_fks(self, matrix_element, fortran_model, me_number,
443 me_ntot, path=os.getcwd(),OLP='MadLoop'):
444 """Generate the Pxxxxx_i directories for a subprocess in MadFKS,
445 including the necessary matrix.f and various helper files"""
446 proc = matrix_element.born_matrix_element['processes'][0]
447
448 if not self.model:
449 self.model = matrix_element.get('processes')[0].get('model')
450
451 cwd = os.getcwd()
452 try:
453 os.chdir(path)
454 except OSError, error:
455 error_msg = "The directory %s should exist in order to be able " % path + \
456 "to \"export\" in it. If you see this error message by " + \
457 "typing the command \"export\" please consider to use " + \
458 "instead the command \"output\". "
459 raise MadGraph5Error, error_msg
460
461 calls = 0
462
463 self.fksdirs = []
464
465 borndir = "P%s" % \
466 (matrix_element.get('processes')[0].shell_string())
467 os.mkdir(borndir)
468 os.chdir(borndir)
469 logger.info('Writing files in %s (%d / %d)' % (borndir, me_number + 1, me_ntot))
470
471
472 self.generate_born_fks_files(matrix_element,
473 fortran_model, me_number, path)
474
475
476
477 if OLP=='NJET':
478 filename = 'OLE_order.lh'
479 self.write_lh_order(filename, [matrix_element.born_matrix_element.get('processes')[0]], OLP)
480
481 if matrix_element.virt_matrix_element:
482 calls += self.generate_virt_directory( \
483 matrix_element.virt_matrix_element, \
484 fortran_model, \
485 os.path.join(path, borndir))
486
487
488
489 self.write_real_matrix_elements(matrix_element, fortran_model)
490
491 self.write_pdf_calls(matrix_element, fortran_model)
492
493 filename = 'nFKSconfigs.inc'
494 self.write_nfksconfigs_file(writers.FortranWriter(filename),
495 matrix_element,
496 fortran_model)
497
498 filename = 'iproc.dat'
499 self.write_iproc_file(writers.FortranWriter(filename),
500 me_number)
501
502 filename = 'fks_info.inc'
503 self.write_fks_info_file(writers.FortranWriter(filename),
504 matrix_element,
505 fortran_model)
506
507 filename = 'leshouche_info.dat'
508 nfksconfs,maxproc,maxflow,nexternal=\
509 self.write_leshouche_info_file(filename,matrix_element)
510
511
512
513 if nfksconfs == maxproc == maxflow == 0:
514 nfksconfs = 1
515 (dummylines, maxproc, maxflow) = self.get_leshouche_lines(
516 matrix_element.born_matrix_element, 1)
517
518 filename = 'leshouche_decl.inc'
519 self.write_leshouche_info_declarations(
520 writers.FortranWriter(filename),
521 nfksconfs,maxproc,maxflow,nexternal,
522 fortran_model)
523 filename = 'genps.inc'
524 ngraphs = matrix_element.born_matrix_element.get_number_of_amplitudes()
525 ncolor = max(1,len(matrix_element.born_matrix_element.get('color_basis')))
526 self.write_genps(writers.FortranWriter(filename),maxproc,ngraphs,\
527 ncolor,maxflow,fortran_model)
528
529 filename = 'configs_and_props_info.dat'
530 nconfigs,max_leg_number=self.write_configs_and_props_info_file(
531 filename,
532 matrix_element)
533
534 filename = 'configs_and_props_decl.inc'
535 self.write_configs_and_props_info_declarations(
536 writers.FortranWriter(filename),
537 nconfigs,max_leg_number,nfksconfs,
538 fortran_model)
539
540 filename = 'real_from_born_configs.inc'
541 self.write_real_from_born_configs(
542 writers.FortranWriter(filename),
543 matrix_element,
544 fortran_model)
545
546 filename = 'ngraphs.inc'
547 self.write_ngraphs_file(writers.FortranWriter(filename),
548 nconfigs)
549
550
551 filename = 'real_me_chooser.f'
552 self.write_real_me_wrapper(writers.FortranWriter(filename),
553 matrix_element,
554 fortran_model)
555
556 filename = 'parton_lum_chooser.f'
557 self.write_pdf_wrapper(writers.FortranWriter(filename),
558 matrix_element,
559 fortran_model)
560
561 filename = 'get_color.f'
562 self.write_colors_file(writers.FortranWriter(filename),
563 matrix_element)
564
565 filename = 'nexternal.inc'
566 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
567 self.write_nexternal_file(writers.FortranWriter(filename),
568 nexternal, ninitial)
569 self.proc_characteristic['ninitial'] = ninitial
570 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal)
571
572 filename = 'pmass.inc'
573 try:
574 self.write_pmass_file(writers.FortranWriter(filename),
575 matrix_element.real_processes[0].matrix_element)
576 except IndexError:
577 self.write_pmass_file(writers.FortranWriter(filename),
578 matrix_element.born_matrix_element)
579
580
581 self.draw_feynman_diagrams(matrix_element)
582
583 linkfiles = ['BinothLHADummy.f',
584 'check_poles.f',
585 'MCmasses_HERWIG6.inc',
586 'MCmasses_HERWIGPP.inc',
587 'MCmasses_PYTHIA6Q.inc',
588 'MCmasses_PYTHIA6PT.inc',
589 'MCmasses_PYTHIA8.inc',
590 'add_write_info.f',
591 'coupl.inc',
592 'cuts.f',
593 'FKS_params.dat',
594 'initial_states_map.dat',
595 'OLE_order.olc',
596 'FKSParams.inc',
597 'FKSParamReader.f',
598 'cuts.inc',
599 'unlops.inc',
600 'pythia_unlops.f',
601 'driver_mintMC.f',
602 'driver_mintFO.f',
603 'appl_interface.cc',
604 'appl_interface_dummy.f',
605 'appl_common.inc',
606 'reweight_appl.inc',
607 'fastjetfortran_madfks_core.cc',
608 'fastjetfortran_madfks_full.cc',
609 'fjcore.cc',
610 'fastjet_wrapper.f',
611 'fjcore.hh',
612 'fks_Sij.f',
613 'fks_powers.inc',
614 'fks_singular.f',
615 'veto_xsec.f',
616 'veto_xsec.inc',
617 'weight_lines.f',
618 'fks_inc_chooser.f',
619 'leshouche_inc_chooser.f',
620 'configs_and_props_inc_chooser.f',
621 'genps_fks.f',
622 'boostwdir2.f',
623 'madfks_mcatnlo.inc',
624 'open_output_files.f',
625 'open_output_files_dummy.f',
626 'HwU_dummy.f',
627 'madfks_plot.f',
628 'analysis_dummy.f',
629 'analysis_lhe.f',
630 'mint-integrator2.f',
631 'MC_integer.f',
632 'mint.inc',
633 'montecarlocounter.f',
634 'q_es.inc',
635 'recluster.cc',
636 'Boosts.h',
637 'reweight_xsec.f',
638 'reweight_xsec_events.f',
639 'reweight_xsec_events_pdf_dummy.f',
640 'iproc_map.f',
641 'run.inc',
642 'run_card.inc',
643 'setcuts.f',
644 'setscales.f',
645 'test_soft_col_limits.f',
646 'symmetry_fks_v3.f',
647 'vegas2.for',
648 'write_ajob.f',
649 'handling_lhe_events.f',
650 'write_event.f',
651 'fill_MC_mshell.f',
652 'maxparticles.inc',
653 'message.inc',
654 'initcluster.f',
655 'cluster.inc',
656 'cluster.f',
657 'reweight.f',
658 'randinit',
659 'sudakov.inc',
660 'maxconfigs.inc',
661 'timing_variables.inc']
662
663 for file in linkfiles:
664 ln('../' + file , '.')
665 os.system("ln -s ../../Cards/param_card.dat .")
666
667
668 os.system("ln -s ../makefile_fks_dir ./makefile")
669 if matrix_element.virt_matrix_element:
670 os.system("ln -s ../BinothLHA.f ./BinothLHA.f")
671 elif OLP!='MadLoop':
672 os.system("ln -s ../BinothLHA_OLP.f ./BinothLHA.f")
673 else:
674 os.system("ln -s ../BinothLHA_user.f ./BinothLHA.f")
675
676
677 os.chdir(os.path.pardir)
678
679 filename = 'subproc.mg'
680 files.append_to_file(filename,
681 self.write_subproc,
682 borndir)
683
684 os.chdir(cwd)
685
686 gen_infohtml.make_info_html_nlo(self.dir_path)
687
688
689 return calls
690
691
692
693
695 """ """
696
697 run_card = banner_mod.RunCardNLO()
698
699 run_card.create_default_for_process(self.proc_characteristic,
700 history,
701 processes)
702
703 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'))
704 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
705
706
717
718 - def finalize(self, matrix_elements, history, mg5options, flaglist):
719 """Finalize FKS directory by creating jpeg diagrams, html
720 pages,proc_card_mg5.dat and madevent.tar.gz and create the MA5 card if
721 necessary."""
722
723 devnull = os.open(os.devnull, os.O_RDWR)
724 try:
725 res = misc.call([mg5options['lhapdf'], '--version'], \
726 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
727 except Exception:
728 res = 1
729 if res != 0:
730 logger.info('The value for lhapdf in the current configuration does not ' + \
731 'correspond to a valid executable.\nPlease set it correctly either in ' + \
732 'input/mg5_configuration or with "set lhapdf /path/to/lhapdf-config" ' + \
733 'and regenrate the process. \nTo avoid regeneration, edit the ' + \
734 ('%s/Cards/amcatnlo_configuration.txt file.\n' % self.dir_path ) + \
735 'Note that you can still compile and run aMC@NLO with the built-in PDFs\n')
736
737 compiler_dict = {'fortran': mg5options['fortran_compiler'],
738 'cpp': mg5options['cpp_compiler'],
739 'f2py': mg5options['f2py_compiler']}
740
741 if 'nojpeg' in flaglist:
742 makejpg = False
743 else:
744 makejpg = True
745 output_dependencies = mg5options['output_dependencies']
746
747
748 self.proc_characteristic['grouped_matrix'] = False
749 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme']
750
751 self.create_proc_charac()
752
753 self.create_run_card(matrix_elements.get_processes(), history)
754
755
756
757
758
759
760
761
762 filename = os.path.join(self.dir_path,'Source','MODEL','get_mass_width_fcts.f')
763 makeinc = os.path.join(self.dir_path,'Source','MODEL','makeinc.inc')
764 self.write_get_mass_width_file(writers.FortranWriter(filename), makeinc, self.model)
765
766
767
768 filename = os.path.join(self.dir_path,'Source','maxconfigs.inc')
769 self.write_maxconfigs_file(writers.FortranWriter(filename),
770 matrix_elements.get_max_configs())
771
772
773 filename = os.path.join(self.dir_path,'Source','maxparticles.inc')
774 self.write_maxparticles_file(writers.FortranWriter(filename),
775 matrix_elements.get_max_particles())
776
777
778 os.system('touch %s/done' % os.path.join(self.dir_path,'SubProcesses'))
779
780
781 fcompiler_chosen = self.set_fortran_compiler(compiler_dict)
782 ccompiler_chosen = self.set_cpp_compiler(compiler_dict['cpp'])
783
784 old_pos = os.getcwd()
785 os.chdir(os.path.join(self.dir_path, 'SubProcesses'))
786 P_dir_list = [proc for proc in os.listdir('.') if os.path.isdir(proc) and \
787 proc[0] == 'P']
788
789 devnull = os.open(os.devnull, os.O_RDWR)
790
791 if makejpg:
792 logger.info("Generate jpeg diagrams")
793 for Pdir in P_dir_list:
794 os.chdir(Pdir)
795 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')],
796 stdout = devnull)
797 os.chdir(os.path.pardir)
798
799 logger.info("Generate web pages")
800
801
802 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \
803 stdout = devnull)
804
805 os.chdir(os.path.pardir)
806
807
808
809
810
811
812
813
814
815
816 if os.path.isdir('Cards'):
817 output_file = os.path.join('Cards', 'proc_card_mg5.dat')
818 history.write(output_file)
819
820
821 for card in ['run_card', 'FO_analyse_card', 'shower_card']:
822 try:
823 shutil.copy(pjoin(self.dir_path, 'Cards',
824 card + '.dat'),
825 pjoin(self.dir_path, 'Cards',
826 card + '_default.dat'))
827 except IOError:
828 logger.warning("Failed to copy " + card + ".dat to default")
829
830
831 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
832 stdout = devnull)
833
834
835 if os.path.exists(pjoin('SubProcesses', 'subproc.mg')):
836 if os.path.exists('amcatnlo.tar.gz'):
837 os.remove('amcatnlo.tar.gz')
838 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'make_amcatnlo_tar')],
839 stdout = devnull)
840
841 subprocess.call([os.path.join(old_pos, self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')],
842 stdout = devnull)
843
844
845 os.chdir(old_pos)
846
847
848
849 base_compiler= ['FC=g77','FC=gfortran']
850
851 StdHep_path = pjoin(MG5DIR, 'vendor', 'StdHEP')
852 if output_dependencies == 'external':
853
854 if not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libstdhep.a')) or \
855 not os.path.exists(pjoin(MG5DIR, 'vendor', 'StdHEP', 'lib', 'libFmcfio.a')):
856 if 'FC' not in os.environ or not os.environ['FC']:
857 path = os.path.join(StdHep_path, 'src', 'make_opts')
858 text = open(path).read()
859 for base in base_compiler:
860 text = text.replace(base,'FC=%s' % fcompiler_chosen)
861 open(path, 'w').writelines(text)
862
863 logger.info('Compiling StdHEP. This has to be done only once.')
864 try:
865 misc.compile(cwd = pjoin(MG5DIR, 'vendor', 'StdHEP'))
866 except Exception as error:
867 logger.debug(str(error))
868 logger.warning("StdHep failed to compiled. This forbids to run NLO+PS with PY6 and Herwig6")
869 logger.info("details on the compilation error are available if the code is run with --debug flag")
870 else:
871 logger.info('Done.')
872 if os.path.exists(pjoin(StdHep_path, 'lib', 'libstdhep.a')):
873
874 files.ln(pjoin(StdHep_path, 'lib', 'libstdhep.a'), \
875 pjoin(self.dir_path, 'MCatNLO', 'lib'))
876 files.ln(pjoin(StdHep_path, 'lib', 'libFmcfio.a'), \
877 pjoin(self.dir_path, 'MCatNLO', 'lib'))
878
879 elif output_dependencies == 'internal':
880 StdHEP_internal_path = pjoin(self.dir_path,'Source','StdHEP')
881 shutil.copytree(StdHep_path,StdHEP_internal_path, symlinks=True)
882
883 linkfiles = ['libstdhep.a', 'libFmcfio.a']
884 for file in linkfiles:
885 ln(pjoin(os.path.pardir,os.path.pardir,'Source','StdHEP','lib',file),
886 os.path.join(self.dir_path, 'MCatNLO', 'lib'))
887 if 'FC' not in os.environ or not os.environ['FC']:
888 path = pjoin(StdHEP_internal_path, 'src', 'make_opts')
889 text = open(path).read()
890 for base in base_compiler:
891 text = text.replace(base,'FC=%s' % fcompiler_chosen)
892 open(path, 'w').writelines(text)
893
894 misc.compile(['clean'],cwd = StdHEP_internal_path)
895
896 elif output_dependencies == 'environment_paths':
897
898
899 libStdHep = misc.which_lib('libstdhep.a')
900 libFmcfio = misc.which_lib('libFmcfio.a')
901 if not libStdHep is None and not libFmcfio is None:
902 logger.info('MG5_aMC is using StdHep installation found at %s.'%\
903 os.path.dirname(libStdHep))
904 ln(pjoin(libStdHep),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
905 ln(pjoin(libFmcfio),pjoin(self.dir_path, 'MCatNLO', 'lib'),abspath=True)
906 else:
907 raise InvalidCmd("Could not find the location of the files"+\
908 " libstdhep.a and libFmcfio.a in you environment paths.")
909
910 else:
911 raise MadGraph5Error, 'output_dependencies option %s not recognized'\
912 %output_dependencies
913
914
915 if 'madanalysis5_path' in self.opt and not \
916 self.opt['madanalysis5_path'] is None and not self.proc_defs is None:
917
918 processes = sum([me.get('processes') if not isinstance(me, str) else [] \
919 for me in matrix_elements.get('matrix_elements')],[])
920
921
922
923 if len(processes)==0:
924 processes = self.born_processes
925 if len(processes)==0:
926 logger.warning(
927 """MG5aMC could not provide to Madanalysis5 the list of processes generated.
928 As a result, the default card will not be tailored to the process generated.
929 This typically happens when using the 'low_mem_multicore_nlo_generation' NLO generation mode.""")
930
931
932 self.create_default_madanalysis5_cards(
933 history, self.proc_defs, [processes,]*len(self.proc_defs),
934 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'),
935 levels =['hadron'])
936
938 """Writes the real_from_born_configs.inc file that contains
939 the mapping to go for a given born configuration (that is used
940 e.g. in the multi-channel phase-space integration to the
941 corresponding real-emission diagram, i.e. the real emission
942 diagram in which the combined ij is split in i_fks and
943 j_fks."""
944 lines=[]
945 lines2=[]
946 max_links=0
947 born_me=matrix_element.born_matrix_element
948 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
949 iFKS=iFKS+1
950 links=conf['fks_info']['rb_links']
951 max_links=max(max_links,len(links))
952 for i,diags in enumerate(links):
953 if not i == diags['born_conf']:
954 print links
955 raise MadGraph5Error, "born_conf should be canonically ordered"
956 real_configs=', '.join(['%d' % int(diags['real_conf']+1) for diags in links])
957 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
958 % (iFKS,len(links),real_configs))
959
960
961
962 if not matrix_element.get_fks_info_list():
963
964 base_diagrams = born_me.get('base_amplitude').get('diagrams')
965 minvert = min([max([len(vert.get('legs')) for vert in \
966 diag.get('vertices')]) for diag in base_diagrams])
967
968 for idiag, diag in enumerate(base_diagrams):
969 if any([len(vert.get('legs')) > minvert for vert in
970 diag.get('vertices')]):
971
972 continue
973 max_links = max_links + 1
974
975 real_configs=', '.join(['%d' % i for i in range(1, max_links+1)])
976 lines.append("data (real_from_born_conf(irfbc,%d),irfbc=1,%d) /%s/" \
977 % (1,max_links,real_configs))
978
979 lines2.append("integer irfbc")
980 lines2.append("integer real_from_born_conf(%d,%d)" \
981 % (max_links, max(len(matrix_element.get_fks_info_list()),1)))
982
983 writer.writelines(lines2+lines)
984
985
986
987
988
989
991 """Write the get_mass_width_file.f file for MG4.
992 Also update the makeinc.inc file
993 """
994 mass_particles = [p for p in model['particles'] if p['mass'].lower() != 'zero']
995 width_particles = [p for p in model['particles'] if p['width'].lower() != 'zero']
996
997 iflines_mass = ''
998 iflines_width = ''
999
1000 for i, part in enumerate(mass_particles):
1001 if i == 0:
1002 ifstring = 'if'
1003 else:
1004 ifstring = 'else if'
1005 if part['self_antipart']:
1006 iflines_mass += '%s (id.eq.%d) then\n' % \
1007 (ifstring, part.get_pdg_code())
1008 else:
1009 iflines_mass += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1010 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1011 iflines_mass += 'get_mass_from_id=abs(%s)\n' % part.get('mass')
1012
1013 for i, part in enumerate(width_particles):
1014 if i == 0:
1015 ifstring = 'if'
1016 else:
1017 ifstring = 'else if'
1018 if part['self_antipart']:
1019 iflines_width += '%s (id.eq.%d) then\n' % \
1020 (ifstring, part.get_pdg_code())
1021 else:
1022 iflines_width += '%s (id.eq.%d.or.id.eq.%d) then\n' % \
1023 (ifstring, part.get_pdg_code(), part.get_anti_pdg_code())
1024 iflines_width += 'get_width_from_id=abs(%s)\n' % part.get('width')
1025
1026
1027 if len(mass_particles)==0:
1028 iflines_mass = 'if (.True.) then\n'
1029
1030 if len(width_particles)==0:
1031 iflines_width = 'if (.True.) then\n'
1032
1033 replace_dict = {'iflines_mass' : iflines_mass,
1034 'iflines_width' : iflines_width}
1035
1036 file = open(os.path.join(_file_path, \
1037 'iolibs/template_files/get_mass_width_fcts.inc')).read()
1038 file = file % replace_dict
1039
1040
1041 writer.writelines(file)
1042
1043
1044 makeinc_content = open(makeinc).read()
1045 makeinc_content = makeinc_content.replace('MODEL = ', 'MODEL = get_mass_width_fcts.o ')
1046 open(makeinc, 'w').write(makeinc_content)
1047
1048 return
1049
1050
1052 """writes the declarations for the variables relevant for configs_and_props
1053 """
1054 lines = []
1055 lines.append("integer ifr,lmaxconfigs_used,max_branch_used")
1056 lines.append("parameter (lmaxconfigs_used=%4d)" % max_iconfig)
1057 lines.append("parameter (max_branch_used =%4d)" % -max_leg_number)
1058 lines.append("integer mapconfig_d(%3d,0:lmaxconfigs_used)" % nfksconfs)
1059 lines.append("integer iforest_d(%3d,2,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1060 lines.append("integer sprop_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1061 lines.append("integer tprid_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1062 lines.append("double precision pmass_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1063 lines.append("double precision pwidth_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1064 lines.append("integer pow_d(%3d,-max_branch_used:-1,lmaxconfigs_used)" % nfksconfs)
1065
1066 writer.writelines(lines)
1067
1068
1070 """writes the configs_and_props_info.inc file that cointains
1071 all the (real-emission) configurations (IFOREST) as well as
1072 the masses and widths of intermediate particles"""
1073 lines = []
1074 lines.append("# C -> MAPCONFIG_D")
1075 lines.append("# F/D -> IFOREST_D")
1076 lines.append("# S -> SPROP_D")
1077 lines.append("# T -> TPRID_D")
1078 lines.append("# M -> PMASS_D/PWIDTH_D")
1079 lines.append("# P -> POW_D")
1080 lines2 = []
1081 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1082
1083 max_iconfig=0
1084 max_leg_number=0
1085
1086
1087
1088
1089 for iFKS, conf in enumerate(matrix_element.get_fks_info_list()):
1090 iFKS=iFKS+1
1091 iconfig = 0
1092 s_and_t_channels = []
1093 mapconfigs = []
1094 fks_matrix_element=matrix_element.real_processes[conf['n_me'] - 1].matrix_element
1095 base_diagrams = fks_matrix_element.get('base_amplitude').get('diagrams')
1096 model = fks_matrix_element.get('base_amplitude').get('process').get('model')
1097 minvert = min([max([len(vert.get('legs')) for vert in \
1098 diag.get('vertices')]) for diag in base_diagrams])
1099
1100 lines.append("# ")
1101 lines.append("# nFKSprocess %d" % iFKS)
1102 for idiag, diag in enumerate(base_diagrams):
1103 if any([len(vert.get('legs')) > minvert for vert in
1104 diag.get('vertices')]):
1105
1106 continue
1107 iconfig = iconfig + 1
1108 helas_diag = fks_matrix_element.get('diagrams')[idiag]
1109 mapconfigs.append(helas_diag.get('number'))
1110 lines.append("# Diagram %d for nFKSprocess %d" % \
1111 (helas_diag.get('number'),iFKS))
1112
1113 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1114 helas_diag.get('number')))
1115
1116
1117
1118 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1119 get_s_and_t_channels(ninitial, model, 990)
1120
1121 s_and_t_channels.append([schannels, tchannels])
1122
1123
1124 allchannels = schannels
1125 if len(tchannels) > 1:
1126
1127 allchannels = schannels + tchannels
1128
1129 for vert in allchannels:
1130 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1131 last_leg = vert.get('legs')[-1]
1132 lines.append("F %4d %4d %4d %4d" % \
1133 (iFKS,last_leg.get('number'), iconfig, len(daughters)))
1134 for d in daughters:
1135 lines.append("D %4d" % d)
1136 if vert in schannels:
1137 lines.append("S %4d %4d %4d %10d" % \
1138 (iFKS,last_leg.get('number'), iconfig,
1139 last_leg.get('id')))
1140 elif vert in tchannels[:-1]:
1141 lines.append("T %4d %4d %4d %10d" % \
1142 (iFKS,last_leg.get('number'), iconfig,
1143 abs(last_leg.get('id'))))
1144
1145
1146 max_leg_number = min(max_leg_number,last_leg.get('number'))
1147 max_iconfig = max(max_iconfig,iconfig)
1148
1149
1150 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1151 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1152
1153
1154 lines2.append("# ")
1155 particle_dict = fks_matrix_element.get('processes')[0].get('model').\
1156 get('particle_dict')
1157
1158 for iconf, configs in enumerate(s_and_t_channels):
1159 for vertex in configs[0] + configs[1][:-1]:
1160 leg = vertex.get('legs')[-1]
1161 if leg.get('id') not in particle_dict:
1162
1163 pow_part = 0
1164 else:
1165 particle = particle_dict[leg.get('id')]
1166
1167 pow_part = 1 + int(particle.is_boson())
1168
1169 lines2.append("M %4d %4d %4d %10d " % \
1170 (iFKS,leg.get('number'), iconf + 1, leg.get('id')))
1171 lines2.append("P %4d %4d %4d %4d " % \
1172 (iFKS,leg.get('number'), iconf + 1, pow_part))
1173
1174
1175
1176
1177 if not matrix_element.get_fks_info_list():
1178 born_me = matrix_element.born_matrix_element
1179
1180
1181
1182
1183 bornproc = born_me.get('processes')[0]
1184 colors = [l.get('color') for l in bornproc.get('legs')]
1185
1186 fks_i = len(colors)
1187
1188
1189 fks_j=1
1190 for cpos, col in enumerate(colors):
1191 if col != 1:
1192 fks_j = cpos+1
1193 fks_j_id = [l.get('id') for l in bornproc.get('legs')][cpos]
1194
1195
1196 if fks_j > ninitial:
1197 iFKS=1
1198 iconfig = 0
1199 s_and_t_channels = []
1200 mapconfigs = []
1201 base_diagrams = born_me.get('base_amplitude').get('diagrams')
1202 model = born_me.get('base_amplitude').get('process').get('model')
1203 minvert = min([max([len(vert.get('legs')) for vert in \
1204 diag.get('vertices')]) for diag in base_diagrams])
1205
1206 lines.append("# ")
1207 lines.append("# nFKSprocess %d" % iFKS)
1208 for idiag, diag in enumerate(base_diagrams):
1209 if any([len(vert.get('legs')) > minvert for vert in
1210 diag.get('vertices')]):
1211
1212 continue
1213 iconfig = iconfig + 1
1214 helas_diag = born_me.get('diagrams')[idiag]
1215 mapconfigs.append(helas_diag.get('number'))
1216 lines.append("# Diagram %d for nFKSprocess %d" % \
1217 (helas_diag.get('number'),iFKS))
1218
1219 lines.append("C %4d %4d %4d " % (iFKS,iconfig,
1220 helas_diag.get('number')))
1221
1222
1223
1224 schannels, tchannels = helas_diag.get('amplitudes')[0].\
1225 get_s_and_t_channels(ninitial, model, 990)
1226
1227 s_and_t_channels.append([schannels, tchannels])
1228
1229
1230 lines.append("F %4d %4d %4d %4d" % \
1231 (iFKS,-1,iconfig,2))
1232
1233 lines.append("D %4d" % nexternal)
1234 lines.append("D %4d" % fks_j)
1235 lines.append("S %4d %4d %4d %10d" % \
1236 (iFKS,-1, iconfig,fks_j_id))
1237
1238
1239
1240
1241 allchannels = schannels
1242 if len(tchannels) > 1:
1243
1244 allchannels = schannels + tchannels
1245
1246 for vert in allchannels:
1247 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
1248 last_leg = vert.get('legs')[-1]
1249 lines.append("F %4d %4d %4d %4d" % \
1250 (iFKS,last_leg.get('number')-1, iconfig, len(daughters)))
1251
1252
1253 for i_dau in range(len(daughters)):
1254 if daughters[i_dau] < 0:
1255 daughters[i_dau] += -1
1256
1257 if fks_j in daughters:
1258 daughters[daughters.index(fks_j)] = -1
1259 for d in daughters:
1260 lines.append("D %4d" % d)
1261 if vert in schannels:
1262 lines.append("S %4d %4d %4d %10d" % \
1263 (iFKS,last_leg.get('number')-1, iconfig,
1264 last_leg.get('id')))
1265 elif vert in tchannels[:-1]:
1266 lines.append("T %4d %4d %4d %10d" % \
1267 (iFKS,last_leg.get('number')-1, iconfig,
1268 abs(last_leg.get('id'))))
1269
1270
1271 max_leg_number = min(max_leg_number,last_leg.get('number')-1)
1272 max_iconfig = max(max_iconfig,iconfig)
1273
1274
1275 lines.append("# Number of configs for nFKSprocess %d" % iFKS)
1276 lines.append("C %4d %4d %4d" % (iFKS,0,iconfig))
1277
1278
1279 lines2.append("# ")
1280 particle_dict = born_me.get('processes')[0].get('model').\
1281 get('particle_dict')
1282
1283 for iconf, configs in enumerate(s_and_t_channels):
1284 lines2.append("M %4d %4d %4d %10d " % \
1285 (iFKS,-1, iconf + 1, fks_j_id))
1286 pow_part = 1 + int(particle_dict[fks_j_id].is_boson())
1287 lines2.append("P %4d %4d %4d %4d " % \
1288 (iFKS,-1, iconf + 1, pow_part))
1289 for vertex in configs[0] + configs[1][:-1]:
1290 leg = vertex.get('legs')[-1]
1291 if leg.get('id') not in particle_dict:
1292
1293 pow_part = 0
1294 else:
1295 particle = particle_dict[leg.get('id')]
1296
1297 pow_part = 1 + int(particle.is_boson())
1298
1299 lines2.append("M %4d %4d %4d %10d " % \
1300 (iFKS,leg.get('number')-1, iconf + 1, leg.get('id')))
1301 lines2.append("P %4d %4d %4d %4d " % \
1302 (iFKS,leg.get('number')-1, iconf + 1, pow_part))
1303
1304
1305 open(filename,'w').write('\n'.join(lines+lines2))
1306
1307 return max_iconfig, max_leg_number
1308
1309
1312 """writes the declarations for the variables relevant for leshouche_info
1313 """
1314 lines = []
1315 lines.append('integer maxproc_used, maxflow_used')
1316 lines.append('parameter (maxproc_used = %d)' % maxproc)
1317 lines.append('parameter (maxflow_used = %d)' % maxflow)
1318 lines.append('integer idup_d(%d,%d,maxproc_used)' % (nfksconfs, nexternal))
1319 lines.append('integer mothup_d(%d,%d,%d,maxproc_used)' % (nfksconfs, 2, nexternal))
1320 lines.append('integer icolup_d(%d,%d,%d,maxflow_used)' % (nfksconfs, 2, nexternal))
1321 lines.append('integer niprocs_d(%d)' % (nfksconfs))
1322
1323 writer.writelines(lines)
1324
1325
1326 - def write_genps(self, writer, maxproc,ngraphs,ncolor,maxflow, fortran_model):
1327 """writes the genps.inc file
1328 """
1329 lines = []
1330 lines.append("include 'maxparticles.inc'")
1331 lines.append("include 'maxconfigs.inc'")
1332 lines.append("integer maxproc,ngraphs,ncolor,maxflow")
1333 lines.append("parameter (maxproc=%d,ngraphs=%d,ncolor=%d,maxflow=%d)" % \
1334 (maxproc,ngraphs,ncolor,maxflow))
1335 writer.writelines(lines)
1336
1337
1339 """writes the leshouche_info.inc file which contains
1340 the LHA informations for all the real emission processes
1341 """
1342 lines = []
1343 lines.append("# I -> IDUP_D")
1344 lines.append("# M -> MOTHUP_D")
1345 lines.append("# C -> ICOLUP_D")
1346 nfksconfs = len(matrix_element.get_fks_info_list())
1347 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1348
1349 maxproc = 0
1350 maxflow = 0
1351 for i, conf in enumerate(matrix_element.get_fks_info_list()):
1352
1353 (newlines, nprocs, nflows) = self.get_leshouche_lines(
1354 matrix_element.real_processes[conf['n_me'] - 1].matrix_element, i + 1)
1355 lines.extend(newlines)
1356 maxproc = max(maxproc, nprocs)
1357 maxflow = max(maxflow, nflows)
1358
1359
1360 if not matrix_element.get_fks_info_list():
1361 (newlines, nprocs, nflows) = self.get_leshouche_lines_dummy(matrix_element.born_matrix_element, 1)
1362 lines.extend(newlines)
1363
1364
1365 open(filename,'w').write('\n'.join(lines))
1366
1367 return nfksconfs, maxproc, maxflow, nexternal
1368
1369
1371 """writes the wrapper which allows to chose among the different real matrix elements"""
1372
1373 file = \
1374 """double precision function dlum()
1375 implicit none
1376 integer nfksprocess
1377 common/c_nfksprocess/nfksprocess
1378 """
1379 if matrix_element.real_processes:
1380 for n, info in enumerate(matrix_element.get_fks_info_list()):
1381 file += \
1382 """if (nfksprocess.eq.%(n)d) then
1383 call dlum_%(n_me)d(dlum)
1384 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1385 file += \
1386 """
1387 write(*,*) 'ERROR: invalid n in dlum :', nfksprocess
1388 stop
1389 endif
1390 return
1391 end
1392 """
1393 else:
1394 file+= \
1395 """call dlum_0(dlum)
1396 return
1397 end
1398 """
1399
1400
1401 writer.writelines(file)
1402 return 0
1403
1404
1406 """writes the wrapper which allows to chose among the different real matrix elements"""
1407
1408 file = \
1409 """subroutine smatrix_real(p, wgt)
1410 implicit none
1411 include 'nexternal.inc'
1412 double precision p(0:3, nexternal)
1413 double precision wgt
1414 integer nfksprocess
1415 common/c_nfksprocess/nfksprocess
1416 """
1417 for n, info in enumerate(matrix_element.get_fks_info_list()):
1418 file += \
1419 """if (nfksprocess.eq.%(n)d) then
1420 call smatrix_%(n_me)d(p, wgt)
1421 else""" % {'n': n + 1, 'n_me' : info['n_me']}
1422
1423 if matrix_element.real_processes:
1424 file += \
1425 """
1426 write(*,*) 'ERROR: invalid n in real_matrix :', nfksprocess
1427 stop
1428 endif
1429 return
1430 end
1431 """
1432 else:
1433 file += \
1434 """
1435 wgt=0d0
1436 return
1437 end
1438 """
1439
1440 writer.writelines(file)
1441 return 0
1442
1443
1445 """Create the ps files containing the feynman diagrams for the born process,
1446 as well as for all the real emission processes"""
1447
1448 filename = 'born.ps'
1449 plot = draw.MultiEpsDiagramDrawer(matrix_element.born_matrix_element.\
1450 get('base_amplitude').get('diagrams'),
1451 filename,
1452 model=matrix_element.born_matrix_element.\
1453 get('processes')[0].get('model'),
1454 amplitude=True, diagram_type='born')
1455 plot.draw()
1456
1457 for n, fksreal in enumerate(matrix_element.real_processes):
1458 filename = 'matrix_%d.ps' % (n + 1)
1459 plot = draw.MultiEpsDiagramDrawer(fksreal.matrix_element.\
1460 get('base_amplitude').get('diagrams'),
1461 filename,
1462 model=fksreal.matrix_element.\
1463 get('processes')[0].get('model'),
1464 amplitude=True, diagram_type='real')
1465 plot.draw()
1466
1467
1469 """writes the matrix_i.f files which contain the real matrix elements"""
1470
1471
1472
1473 for n, fksreal in enumerate(matrix_element.real_processes):
1474 filename = 'matrix_%d.f' % (n + 1)
1475 self.write_matrix_element_fks(writers.FortranWriter(filename),
1476 fksreal.matrix_element, n + 1,
1477 fortran_model)
1478
1480 """writes the parton_lum_i.f files which contain the real matrix elements.
1481 If no real emission existst, write the one for the born"""
1482
1483 if matrix_element.real_processes:
1484 for n, fksreal in enumerate(matrix_element.real_processes):
1485 filename = 'parton_lum_%d.f' % (n + 1)
1486 self.write_pdf_file(writers.FortranWriter(filename),
1487 fksreal.matrix_element, n + 1,
1488 fortran_model)
1489 else:
1490 filename = 'parton_lum_0.f'
1491 self.write_pdf_file(writers.FortranWriter(filename),
1492 matrix_element.born_matrix_element, 0,
1493 fortran_model)
1494
1495
1497 """generates the files needed for the born amplitude in the P* directory, which will
1498 be needed by the P* directories"""
1499 pathdir = os.getcwd()
1500
1501 filename = 'born.f'
1502 calls_born, ncolor_born = \
1503 self.write_born_fks(writers.FortranWriter(filename),\
1504 matrix_element,
1505 fortran_model)
1506
1507 filename = 'born_hel.f'
1508 self.write_born_hel(writers.FortranWriter(filename),\
1509 matrix_element,
1510 fortran_model)
1511
1512
1513 filename = 'born_conf.inc'
1514 nconfigs, mapconfigs, s_and_t_channels = \
1515 self.write_configs_file(
1516 writers.FortranWriter(filename),
1517 matrix_element.born_matrix_element,
1518 fortran_model)
1519
1520 filename = 'born_props.inc'
1521 self.write_props_file(writers.FortranWriter(filename),
1522 matrix_element.born_matrix_element,
1523 fortran_model,
1524 s_and_t_channels)
1525
1526 filename = 'born_decayBW.inc'
1527 self.write_decayBW_file(writers.FortranWriter(filename),
1528 s_and_t_channels)
1529
1530 filename = 'born_leshouche.inc'
1531 nflows = self.write_leshouche_file(writers.FortranWriter(filename),
1532 matrix_element.born_matrix_element,
1533 fortran_model)
1534
1535 filename = 'born_nhel.inc'
1536 self.write_born_nhel_file(writers.FortranWriter(filename),
1537 matrix_element.born_matrix_element, nflows,
1538 fortran_model,
1539 ncolor_born)
1540
1541 filename = 'born_ngraphs.inc'
1542 self.write_ngraphs_file(writers.FortranWriter(filename),
1543 matrix_element.born_matrix_element.get_number_of_amplitudes())
1544
1545 filename = 'ncombs.inc'
1546 self.write_ncombs_file(writers.FortranWriter(filename),
1547 matrix_element.born_matrix_element,
1548 fortran_model)
1549
1550 filename = 'born_maxamps.inc'
1551 maxamps = len(matrix_element.get('diagrams'))
1552 maxflows = ncolor_born
1553 self.write_maxamps_file(writers.FortranWriter(filename),
1554 maxamps,
1555 maxflows,
1556 max([len(matrix_element.get('processes')) for me in \
1557 matrix_element.born_matrix_element]),1)
1558
1559 filename = 'config_subproc_map.inc'
1560 self.write_config_subproc_map_file(writers.FortranWriter(filename),
1561 s_and_t_channels)
1562
1563 filename = 'coloramps.inc'
1564 self.write_coloramps_file(writers.FortranWriter(filename),
1565 mapconfigs,
1566 matrix_element.born_matrix_element,
1567 fortran_model)
1568
1569
1570 filename = ['sborn_sf.f', 'sborn_sf_dum.f']
1571 for i, links in enumerate([matrix_element.color_links, []]):
1572 self.write_sborn_sf(writers.FortranWriter(filename[i]),
1573 links,
1574 fortran_model)
1575 self.color_link_files = []
1576 for i in range(len(matrix_element.color_links)):
1577 filename = 'b_sf_%3.3d.f' % (i + 1)
1578 self.color_link_files.append(filename)
1579 self.write_b_sf_fks(writers.FortranWriter(filename),
1580 matrix_element, i,
1581 fortran_model)
1582
1583
1585 """Generates the library for computing the loop matrix elements
1586 necessary for this process using the OLP specified."""
1587
1588
1589 virtual_path = pjoin(export_path,'OLP_virtuals')
1590 if not os.path.exists(virtual_path):
1591 os.makedirs(virtual_path)
1592 filename = os.path.join(virtual_path,'OLE_order.lh')
1593 self.write_lh_order(filename, process_list, OLP)
1594
1595 fail_msg='Generation of the virtuals with %s failed.\n'%OLP+\
1596 'Please check the virt_generation.log file in %s.'\
1597 %str(pjoin(virtual_path,'virt_generation.log'))
1598
1599
1600 if OLP=='GoSam':
1601 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1602 'GoSam','makevirt'),pjoin(virtual_path,'makevirt'))
1603 cp(pjoin(self.mgme_dir,'Template','loop_material','OLP_specifics',
1604 'GoSam','gosam.rc'),pjoin(virtual_path,'gosam.rc'))
1605 ln(pjoin(export_path,'Cards','param_card.dat'),virtual_path)
1606
1607 logger.info('Generating the loop matrix elements with %s...'%OLP)
1608 virt_generation_log = \
1609 open(pjoin(virtual_path,'virt_generation.log'), 'w')
1610 retcode = subprocess.call(['./makevirt'],cwd=virtual_path,
1611 stdout=virt_generation_log, stderr=virt_generation_log)
1612 virt_generation_log.close()
1613
1614 possible_other_extensions = ['so','dylib']
1615 shared_lib_ext='so'
1616 for ext in possible_other_extensions:
1617 if os.path.isfile(pjoin(virtual_path,'Virtuals','lib',
1618 'libgolem_olp.'+ext)):
1619 shared_lib_ext = ext
1620
1621
1622 files_to_check = ['olp_module.mod',str(pjoin('lib',
1623 'libgolem_olp.'+shared_lib_ext))]
1624 if retcode != 0 or any([not os.path.exists(pjoin(virtual_path,
1625 'Virtuals',f)) for f in files_to_check]):
1626 raise fks_common.FKSProcessError(fail_msg)
1627
1628 ln(pjoin(virtual_path,'Virtuals','lib','libgolem_olp.'+shared_lib_ext),
1629 pjoin(export_path,'lib'))
1630
1631
1632 make_opts_content=open(pjoin(export_path,'Source','make_opts')).read()
1633 make_opts=open(pjoin(export_path,'Source','make_opts'),'w')
1634 if OLP=='GoSam':
1635 if platform.system().lower()=='darwin':
1636
1637
1638 make_opts_content=make_opts_content.replace('libOLP=',
1639 'libOLP=-Wl,-lgolem_olp')
1640 else:
1641
1642
1643
1644
1645
1646
1647
1648 make_opts_content=make_opts_content.replace('libOLP=',
1649 'libOLP=-Wl,-rpath='+str(pjoin(export_path,'lib'))+' -lgolem_olp')
1650
1651
1652 make_opts.write(make_opts_content)
1653 make_opts.close()
1654
1655
1656
1657
1658
1659 proc_to_label = self.parse_contract_file(
1660 pjoin(virtual_path,'OLE_order.olc'))
1661
1662 self.write_BinothLHA_inc(process_list,proc_to_label,\
1663 pjoin(export_path,'SubProcesses'))
1664
1665
1666 ln(pjoin(virtual_path,'OLE_order.olc'),pjoin(export_path,'SubProcesses'))
1667
1669 """ Write the file Binoth_proc.inc in each SubProcess directory so as
1670 to provide the right process_label to use in the OLP call to get the
1671 loop matrix element evaluation. The proc_to_label is the dictionary of
1672 the format of the one returned by the function parse_contract_file."""
1673
1674 for proc in processes:
1675 name = "P%s"%proc.shell_string()
1676 proc_pdgs=(tuple([leg.get('id') for leg in proc.get('legs') if \
1677 not leg.get('state')]),
1678 tuple([leg.get('id') for leg in proc.get('legs') if \
1679 leg.get('state')]))
1680 incFile = open(pjoin(SubProcPath, name,'Binoth_proc.inc'),'w')
1681 try:
1682 incFile.write(
1683 """ INTEGER PROC_LABEL
1684 PARAMETER (PROC_LABEL=%d)"""%(proc_to_label[proc_pdgs]))
1685 except KeyError:
1686 raise fks_common.FKSProcessError('Could not found the target'+\
1687 ' process %s > %s in '%(str(proc_pdgs[0]),str(proc_pdgs[1]))+\
1688 ' the proc_to_label argument in write_BinothLHA_inc.')
1689 incFile.close()
1690
1692 """ Parses the BLHA contract file, make sure all parameters could be
1693 understood by the OLP and return a mapping of the processes (characterized
1694 by the pdg's of the initial and final state particles) to their process
1695 label. The format of the mapping is {((in_pdgs),(out_pdgs)):proc_label}.
1696 """
1697
1698 proc_def_to_label = {}
1699
1700 if not os.path.exists(contract_file_path):
1701 raise fks_common.FKSProcessError('Could not find the contract file'+\
1702 ' OLE_order.olc in %s.'%str(contract_file_path))
1703
1704 comment_re=re.compile(r"^\s*#")
1705 proc_def_re=re.compile(
1706 r"^(?P<in_pdgs>(\s*-?\d+\s*)+)->(?P<out_pdgs>(\s*-?\d+\s*)+)\|"+
1707 r"\s*(?P<proc_class>\d+)\s*(?P<proc_label>\d+)\s*$")
1708 line_OK_re=re.compile(r"^.*\|\s*OK")
1709 for line in file(contract_file_path):
1710
1711 if not comment_re.match(line) is None:
1712 continue
1713
1714 proc_def = proc_def_re.match(line)
1715 if not proc_def is None:
1716 if int(proc_def.group('proc_class'))!=1:
1717 raise fks_common.FKSProcessError(
1718 'aMCatNLO can only handle loop processes generated by the OLP which have only '+\
1719 ' process class attribute. Found %s instead in: \n%s'\
1720 %(proc_def.group('proc_class'),line))
1721 in_pdgs=tuple([int(in_pdg) for in_pdg in \
1722 proc_def.group('in_pdgs').split()])
1723 out_pdgs=tuple([int(out_pdg) for out_pdg in \
1724 proc_def.group('out_pdgs').split()])
1725 proc_def_to_label[(in_pdgs,out_pdgs)]=\
1726 int(proc_def.group('proc_label'))
1727 continue
1728
1729 if line_OK_re.match(line) is None:
1730 raise fks_common.FKSProcessError(
1731 'The OLP could not process the following line: \n%s'%line)
1732
1733 return proc_def_to_label
1734
1735
1737 """writes the V**** directory inside the P**** directories specified in
1738 dir_name"""
1739
1740 cwd = os.getcwd()
1741
1742 matrix_element = loop_matrix_element
1743
1744
1745 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
1746 try:
1747 os.mkdir(dirpath)
1748 except os.error as error:
1749 logger.warning(error.strerror + " " + dirpath)
1750
1751
1752 name = "V%s" % matrix_element.get('processes')[0].shell_string()
1753 dirpath = os.path.join(dir_name, name)
1754
1755 try:
1756 os.mkdir(dirpath)
1757 except os.error as error:
1758 logger.warning(error.strerror + " " + dirpath)
1759
1760 try:
1761 os.chdir(dirpath)
1762 except os.error:
1763 logger.error('Could not cd to directory %s' % dirpath)
1764 return 0
1765
1766 logger.info('Creating files in directory %s' % name)
1767
1768
1769 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
1770
1771 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
1772
1773 filename = 'born_matrix.f'
1774 calls = self.write_bornmatrix(
1775 writers.FortranWriter(filename),
1776 matrix_element,
1777 fortran_model)
1778
1779 filename = 'nexternal.inc'
1780 self.write_nexternal_file(writers.FortranWriter(filename),
1781 nexternal, ninitial)
1782
1783 filename = 'pmass.inc'
1784 self.write_pmass_file(writers.FortranWriter(filename),
1785 matrix_element)
1786
1787 filename = 'ngraphs.inc'
1788 self.write_ngraphs_file(writers.FortranWriter(filename),
1789 len(matrix_element.get_all_amplitudes()))
1790
1791 filename = "loop_matrix.ps"
1792 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
1793 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
1794 filename,
1795 model=matrix_element.get('processes')[0].get('model'),
1796 amplitude='')
1797 logger.info("Drawing loop Feynman diagrams for " + \
1798 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1799 plot.draw()
1800
1801 filename = "born_matrix.ps"
1802 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
1803 get('born_diagrams'),filename,model=matrix_element.get('processes')[0].\
1804 get('model'),amplitude='')
1805 logger.info("Generating born Feynman diagrams for " + \
1806 matrix_element.get('processes')[0].nice_string(print_weighted=False))
1807 plot.draw()
1808
1809
1810
1811
1812 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
1813 open('unique_id.inc','w').write(
1814 """ integer UNIQUE_ID
1815 parameter(UNIQUE_ID=1)""")
1816
1817 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
1818 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
1819 'MadLoopCommons.f','MadLoopParams.inc']
1820
1821
1822 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
1823 pjoin('..','MadLoop5_resources'))
1824
1825 for file in linkfiles:
1826 ln('../../%s' % file)
1827
1828 os.system("ln -s ../../makefile_loop makefile")
1829
1830 linkfiles = ['mpmodule.mod']
1831
1832 for file in linkfiles:
1833 ln('../../../lib/%s' % file)
1834
1835 linkfiles = ['coef_specs.inc']
1836
1837 for file in linkfiles:
1838 ln('../../../Source/DHELAS/%s' % file)
1839
1840
1841 os.chdir(cwd)
1842
1843 if not calls:
1844 calls = 0
1845 return calls
1846
1848 """computes the QED/QCD orders from the knowledge of the n of ext particles
1849 and of the weighted orders"""
1850
1851
1852 QED = weighted - nexternal + 2
1853 QCD = weighted - 2 * QED
1854 return QED, QCD
1855
1856
1857
1858
1859
1860
1861
1863 """Creates the OLE_order.lh file. This function should be edited according
1864 to the OLP which is used. For now it is generic."""
1865
1866
1867 if len(process_list)==0:
1868 raise fks_common.FKSProcessError('No matrix elements provided to '+\
1869 'the function write_lh_order.')
1870 return
1871
1872
1873
1874 orders = process_list[0].get('orders')
1875 if 'QED' in orders.keys() and 'QCD' in orders.keys():
1876 QED=orders['QED']
1877 QCD=orders['QCD']
1878 elif 'QED' in orders.keys():
1879 QED=orders['QED']
1880 QCD=0
1881 elif 'QCD' in orders.keys():
1882 QED=0
1883 QCD=orders['QCD']
1884 else:
1885 QED, QCD = self.get_qed_qcd_orders_from_weighted(\
1886 len(process_list[0].get('legs')),
1887 orders['WEIGHTED'])
1888
1889 replace_dict = {}
1890 replace_dict['mesq'] = 'CHaveraged'
1891 replace_dict['corr'] = ' '.join(process_list[0].\
1892 get('perturbation_couplings'))
1893 replace_dict['irreg'] = 'CDR'
1894 replace_dict['aspow'] = QCD
1895 replace_dict['aepow'] = QED
1896 replace_dict['modelfile'] = './param_card.dat'
1897 replace_dict['params'] = 'alpha_s'
1898 proc_lines=[]
1899 for proc in process_list:
1900 proc_lines.append('%s -> %s' % \
1901 (' '.join(str(l['id']) for l in proc['legs'] if not l['state']),
1902 ' '.join(str(l['id']) for l in proc['legs'] if l['state'])))
1903 replace_dict['pdgs'] = '\n'.join(proc_lines)
1904 replace_dict['symfin'] = 'Yes'
1905 content = \
1906 "#OLE_order written by MadGraph5_aMC@NLO\n\
1907 \n\
1908 MatrixElementSquareType %(mesq)s\n\
1909 CorrectionType %(corr)s\n\
1910 IRregularisation %(irreg)s\n\
1911 AlphasPower %(aspow)d\n\
1912 AlphaPower %(aepow)d\n\
1913 NJetSymmetrizeFinal %(symfin)s\n\
1914 ModelFile %(modelfile)s\n\
1915 Parameters %(params)s\n\
1916 \n\
1917 # process\n\
1918 %(pdgs)s\n\
1919 " % replace_dict
1920
1921 file = open(filename, 'w')
1922 file.write(content)
1923 file.close
1924 return
1925
1926
1927
1928
1929
1930
1932 """Export a matrix element to a born.f file in MadFKS format"""
1933
1934 matrix_element = fksborn.born_matrix_element
1935
1936 if not matrix_element.get('processes') or \
1937 not matrix_element.get('diagrams'):
1938 return 0
1939
1940 if not isinstance(writer, writers.FortranWriter):
1941 raise writers.FortranWriter.FortranWriterError(\
1942 "writer not FortranWriter")
1943
1944 writers.FortranWriter.downcase = False
1945
1946 replace_dict = {}
1947
1948
1949 info_lines = self.get_mg5_info_lines()
1950 replace_dict['info_lines'] = info_lines
1951
1952
1953 process_lines = self.get_process_info_lines(matrix_element)
1954 replace_dict['process_lines'] = process_lines
1955
1956
1957
1958 ncomb = matrix_element.get_helicity_combinations()
1959 replace_dict['ncomb'] = ncomb
1960
1961
1962 helicity_lines = self.get_helicity_lines(matrix_element)
1963 replace_dict['helicity_lines'] = helicity_lines
1964
1965
1966 ic_line = self.get_ic_line(matrix_element)
1967 replace_dict['ic_line'] = ic_line
1968
1969
1970
1971
1972
1973
1974 ngraphs = matrix_element.get_number_of_amplitudes()
1975 replace_dict['ngraphs'] = ngraphs
1976
1977
1978 nwavefuncs = matrix_element.get_number_of_wavefunctions()
1979 replace_dict['nwavefuncs'] = nwavefuncs
1980
1981
1982 ncolor = max(1, len(matrix_element.get('color_basis')))
1983 replace_dict['ncolor'] = ncolor
1984
1985
1986 color_data_lines = self.get_color_data_lines(matrix_element)
1987 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
1988
1989
1990 helas_calls = fortran_model.get_matrix_element_calls(\
1991 matrix_element)
1992 replace_dict['helas_calls'] = "\n".join(helas_calls)
1993
1994
1995 amp2_lines = self.get_amp2_lines(matrix_element)
1996 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
1997
1998
1999 jamp_lines = self.get_JAMP_lines(matrix_element)
2000 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2001
2002
2003 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2004 replace_dict['wavefunctionsize'] = 20
2005 else:
2006 replace_dict['wavefunctionsize'] = 8
2007
2008
2009 ij_lines = self.get_ij_lines(fksborn)
2010 replace_dict['ij_lines'] = '\n'.join(ij_lines)
2011
2012
2013 den_factor_lines = self.get_den_factor_lines(fksborn)
2014 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2015
2016
2017 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()),1)
2018
2019 file = open(os.path.join(_file_path, \
2020 'iolibs/template_files/born_fks.inc')).read()
2021 file = file % replace_dict
2022
2023
2024 writer.writelines(file)
2025
2026 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2027
2028
2030 """Export a matrix element to a born_hel.f file in MadFKS format"""
2031
2032 matrix_element = fksborn.born_matrix_element
2033
2034 if not matrix_element.get('processes') or \
2035 not matrix_element.get('diagrams'):
2036 return 0
2037
2038 if not isinstance(writer, writers.FortranWriter):
2039 raise writers.FortranWriter.FortranWriterError(\
2040 "writer not FortranWriter")
2041
2042 writers.FortranWriter.downcase = False
2043
2044 replace_dict = {}
2045
2046
2047 info_lines = self.get_mg5_info_lines()
2048 replace_dict['info_lines'] = info_lines
2049
2050
2051 process_lines = self.get_process_info_lines(matrix_element)
2052 replace_dict['process_lines'] = process_lines
2053
2054
2055
2056 ncomb = matrix_element.get_helicity_combinations()
2057 replace_dict['ncomb'] = ncomb
2058
2059
2060 helicity_lines = self.get_helicity_lines(matrix_element)
2061 replace_dict['helicity_lines'] = helicity_lines
2062
2063
2064 ic_line = self.get_ic_line(matrix_element)
2065 replace_dict['ic_line'] = ic_line
2066
2067
2068
2069
2070
2071
2072 ngraphs = matrix_element.get_number_of_amplitudes()
2073 replace_dict['ngraphs'] = ngraphs
2074
2075
2076 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2077 replace_dict['nwavefuncs'] = nwavefuncs
2078
2079
2080 ncolor = max(1, len(matrix_element.get('color_basis')))
2081 replace_dict['ncolor'] = ncolor
2082
2083
2084 color_data_lines = self.get_color_data_lines(matrix_element)
2085 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2086
2087
2088 amp2_lines = self.get_amp2_lines(matrix_element)
2089 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2090
2091
2092 jamp_lines = self.get_JAMP_lines(matrix_element)
2093 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2094
2095
2096 den_factor_lines = self.get_den_factor_lines(fksborn)
2097 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2098
2099
2100 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2101
2102 file = open(os.path.join(_file_path, \
2103 'iolibs/template_files/born_fks_hel.inc')).read()
2104 file = file % replace_dict
2105
2106
2107 writer.writelines(file)
2108
2109 return
2110
2111
2112
2113
2114
2115
2117 """Creates the sborn_sf.f file, containing the calls to the different
2118 color linked borns"""
2119
2120 replace_dict = {}
2121 nborns = len(color_links)
2122 ifkss = []
2123 iborns = []
2124 mms = []
2125 nns = []
2126 iflines = "\n"
2127
2128
2129 file = """subroutine sborn_sf(p_born,m,n,wgt)
2130 implicit none
2131 include "nexternal.inc"
2132 double precision p_born(0:3,nexternal-1),wgt
2133 double complex wgt1(2)
2134 integer m,n \n"""
2135
2136 if nborns > 0:
2137
2138 for i, c_link in enumerate(color_links):
2139 iborn = i+1
2140
2141 iff = {True : 'if', False : 'elseif'}[i==0]
2142
2143 m, n = c_link['link']
2144
2145 if m != n:
2146 iflines += \
2147 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2148 %(iff)s ((m.eq.%(m)d .and. n.eq.%(n)d).or.(m.eq.%(n)d .and. n.eq.%(m)d)) then \n\
2149 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2150 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2151 else:
2152 iflines += \
2153 "c b_sf_%(iborn)3.3d links partons %(m)d and %(n)d \n\
2154 %(iff)s (m.eq.%(m)d .and. n.eq.%(n)d) then \n\
2155 call sb_sf_%(iborn)3.3d(p_born,wgt)\n\n" \
2156 %{'m':m, 'n': n, 'iff': iff, 'iborn': iborn}
2157
2158
2159 file += iflines + \
2160 """else
2161 wgt = 0d0
2162 endif
2163
2164 return
2165 end"""
2166 elif nborns == 0:
2167
2168 file+="""
2169 c This is a dummy function because
2170 c this subdir has no soft singularities
2171 wgt = 0d0
2172
2173 return
2174 end"""
2175
2176
2177 writer.writelines(file)
2178
2179
2180
2181
2182
2183
2185 """Create the b_sf_xxx.f file for the soft linked born in MadFKS format"""
2186
2187 matrix_element = copy.copy(fksborn.born_matrix_element)
2188
2189 if not matrix_element.get('processes') or \
2190 not matrix_element.get('diagrams'):
2191 return 0
2192
2193 if not isinstance(writer, writers.FortranWriter):
2194 raise writers.FortranWriter.FortranWriterError(\
2195 "writer not FortranWriter")
2196
2197 writers.FortranWriter.downcase = False
2198
2199 iborn = i + 1
2200 link = fksborn.color_links[i]
2201
2202 replace_dict = {}
2203
2204 replace_dict['iborn'] = iborn
2205
2206
2207 info_lines = self.get_mg5_info_lines()
2208 replace_dict['info_lines'] = info_lines
2209
2210
2211 process_lines = self.get_process_info_lines(matrix_element)
2212 replace_dict['process_lines'] = process_lines + \
2213 "\nc spectators: %d %d \n" % tuple(link['link'])
2214
2215
2216 ncomb = matrix_element.get_helicity_combinations()
2217 replace_dict['ncomb'] = ncomb
2218
2219
2220 helicity_lines = self.get_helicity_lines(matrix_element)
2221 replace_dict['helicity_lines'] = helicity_lines
2222
2223
2224 ic_line = self.get_ic_line(matrix_element)
2225 replace_dict['ic_line'] = ic_line
2226
2227
2228 den_factor_lines = self.get_den_factor_lines(fksborn)
2229 replace_dict['den_factor_lines'] = '\n'.join(den_factor_lines)
2230
2231
2232 ngraphs = matrix_element.get_number_of_amplitudes()
2233 replace_dict['ngraphs'] = ngraphs
2234
2235
2236 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2237 replace_dict['nwavefuncs'] = nwavefuncs
2238
2239
2240 ncolor1 = max(1, len(link['orig_basis']))
2241 replace_dict['ncolor1'] = ncolor1
2242 ncolor2 = max(1, len(link['link_basis']))
2243 replace_dict['ncolor2'] = ncolor2
2244
2245
2246 color_data_lines = self.get_color_data_lines_from_color_matrix(\
2247 link['link_matrix'])
2248 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2249
2250
2251 amp2_lines = self.get_amp2_lines(matrix_element)
2252 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2253
2254
2255 jamp_lines = self.get_JAMP_lines(matrix_element)
2256 new_jamp_lines = []
2257 for line in jamp_lines:
2258 line = string.replace(line, 'JAMP', 'JAMP1')
2259 new_jamp_lines.append(line)
2260 replace_dict['jamp1_lines'] = '\n'.join(new_jamp_lines)
2261
2262 matrix_element.set('color_basis', link['link_basis'] )
2263 jamp_lines = self.get_JAMP_lines(matrix_element)
2264 new_jamp_lines = []
2265 for line in jamp_lines:
2266 line = string.replace(line, 'JAMP', 'JAMP2')
2267 new_jamp_lines.append(line)
2268 replace_dict['jamp2_lines'] = '\n'.join(new_jamp_lines)
2269
2270
2271
2272 replace_dict['nconfs'] = len(fksborn.get_fks_info_list())
2273
2274 file = open(os.path.join(_file_path, \
2275 'iolibs/template_files/b_sf_xxx_fks.inc')).read()
2276 file = file % replace_dict
2277
2278
2279 writer.writelines(file)
2280
2281 return 0 , ncolor1
2282
2283
2284
2285
2286
2287
2289 """Write the born_nhel.inc file for MG4."""
2290
2291 ncomb = matrix_element.get_helicity_combinations()
2292 file = " integer max_bhel, max_bcol \n"
2293 file = file + "parameter (max_bhel=%d)\nparameter(max_bcol=%d)" % \
2294 (ncomb, nflows)
2295
2296
2297 writer.writelines(file)
2298
2299 return True
2300
2301
2302
2303
2305 """Writes the content of nFKSconfigs.inc, which just gives the
2306 total FKS dirs as a parameter.
2307 nFKSconfigs is always >=1 (use a fake configuration for LOonly)"""
2308 replace_dict = {}
2309 replace_dict['nconfs'] = max(len(fksborn.get_fks_info_list()), 1)
2310 content = \
2311 """ INTEGER FKS_CONFIGS
2312 PARAMETER (FKS_CONFIGS=%(nconfs)d)
2313
2314 """ % replace_dict
2315
2316 writer.writelines(content)
2317
2318
2319
2320
2321
2323 """Writes the content of fks_info.inc, which lists the informations on the
2324 possible splittings of the born ME.
2325 nconfs is always >=1 (use a fake configuration for LOonly).
2326 The fake configuration use an 'antigluon' (id -21, color=8) as i_fks and
2327 the last colored particle as j_fks."""
2328
2329 replace_dict = {}
2330 fks_info_list = fksborn.get_fks_info_list()
2331 replace_dict['nconfs'] = max(len(fks_info_list), 1)
2332
2333
2334 if len(fks_info_list) > 0:
2335 fks_i_values = ', '.join(['%d' % info['fks_info']['i'] \
2336 for info in fks_info_list])
2337 fks_j_values = ', '.join(['%d' % info['fks_info']['j'] \
2338 for info in fks_info_list])
2339
2340 col_lines = []
2341 pdg_lines = []
2342 charge_lines = []
2343 fks_j_from_i_lines = []
2344 for i, info in enumerate(fks_info_list):
2345 col_lines.append( \
2346 'DATA (PARTICLE_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2347 % (i + 1, ', '.join('%d' % col for col in fksborn.real_processes[info['n_me']-1].colors) ))
2348 pdg_lines.append( \
2349 'DATA (PDG_TYPE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2350 % (i + 1, ', '.join('%d' % pdg for pdg in info['pdgs'])))
2351 charge_lines.append(\
2352 'DATA (PARTICLE_CHARGE_D(%d, IPOS), IPOS=1, NEXTERNAL) / %s /'\
2353 % (i + 1, ', '.join('%19.15fd0' % charg\
2354 for charg in fksborn.real_processes[info['n_me']-1].charges) ))
2355 fks_j_from_i_lines.extend(self.get_fks_j_from_i_lines(fksborn.real_processes[info['n_me']-1],\
2356 i + 1))
2357 else:
2358
2359
2360
2361 bornproc = fksborn.born_matrix_element.get('processes')[0]
2362 pdgs = [l.get('id') for l in bornproc.get('legs')] + [-21]
2363 colors = [l.get('color') for l in bornproc.get('legs')] + [8]
2364 charges = [0.] * len(colors)
2365
2366 fks_i = len(colors)
2367
2368
2369 fks_j=1
2370 for cpos, col in enumerate(colors[:-1]):
2371 if col != 1:
2372 fks_j = cpos+1
2373
2374 fks_i_values = str(fks_i)
2375 fks_j_values = str(fks_j)
2376 col_lines = ['DATA (PARTICLE_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2377 % ', '.join([str(col) for col in colors])]
2378 pdg_lines = ['DATA (PDG_TYPE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2379 % ', '.join([str(pdg) for pdg in pdgs])]
2380 charge_lines = ['DATA (PARTICLE_CHARGE_D(1, IPOS), IPOS=1, NEXTERNAL) / %s /' \
2381 % ', '.join('%19.15fd0' % charg for charg in charges)]
2382 fks_j_from_i_lines = ['DATA (FKS_J_FROM_I_D(1, %d, JPOS), JPOS = 0, 1) / 1, %d /' \
2383 % (fks_i, fks_j)]
2384
2385
2386 replace_dict['fks_i_line'] = "data fks_i_D / %s /" % fks_i_values
2387 replace_dict['fks_j_line'] = "data fks_j_D / %s /" % fks_j_values
2388 replace_dict['col_lines'] = '\n'.join(col_lines)
2389 replace_dict['pdg_lines'] = '\n'.join(pdg_lines)
2390 replace_dict['charge_lines'] = '\n'.join(charge_lines)
2391 replace_dict['fks_j_from_i_lines'] = '\n'.join(fks_j_from_i_lines)
2392
2393 content = \
2394 """ INTEGER IPOS, JPOS
2395 INTEGER FKS_I_D(%(nconfs)d), FKS_J_D(%(nconfs)d)
2396 INTEGER FKS_J_FROM_I_D(%(nconfs)d, NEXTERNAL, 0:NEXTERNAL)
2397 INTEGER PARTICLE_TYPE_D(%(nconfs)d, NEXTERNAL), PDG_TYPE_D(%(nconfs)d, NEXTERNAL)
2398 REAL*8 PARTICLE_CHARGE_D(%(nconfs)d, NEXTERNAL)
2399
2400 %(fks_i_line)s
2401 %(fks_j_line)s
2402
2403 %(fks_j_from_i_lines)s
2404
2405 C
2406 C Particle type:
2407 C octet = 8, triplet = 3, singlet = 1
2408 %(col_lines)s
2409
2410 C
2411 C Particle type according to PDG:
2412 C
2413 %(pdg_lines)s
2414
2415 C
2416 C Particle charge:
2417 C charge is set 0. with QCD corrections, which is irrelevant
2418 %(charge_lines)s
2419 """ % replace_dict
2420 if not isinstance(writer, writers.FortranWriter):
2421 raise writers.FortranWriter.FortranWriterError(\
2422 "writer not FortranWriter")
2423
2424 writers.FortranWriter.downcase = False
2425
2426 writer.writelines(content)
2427
2428 return True
2429
2430
2431
2432
2433
2434
2436 """Export a matrix element to a matrix.f file in MG4 madevent format"""
2437
2438 if not matrix_element.get('processes') or \
2439 not matrix_element.get('diagrams'):
2440 return 0,0
2441
2442 if not isinstance(writer, writers.FortranWriter):
2443 raise writers.FortranWriter.FortranWriterError(\
2444 "writer not FortranWriter")
2445
2446 writers.FortranWriter.downcase = False
2447
2448 replace_dict = {}
2449 replace_dict['N_me'] = n
2450
2451
2452 info_lines = self.get_mg5_info_lines()
2453 replace_dict['info_lines'] = info_lines
2454
2455
2456 process_lines = self.get_process_info_lines(matrix_element)
2457 replace_dict['process_lines'] = process_lines
2458
2459
2460 ncomb = matrix_element.get_helicity_combinations()
2461 replace_dict['ncomb'] = ncomb
2462
2463
2464 helicity_lines = self.get_helicity_lines(matrix_element)
2465 replace_dict['helicity_lines'] = helicity_lines
2466
2467
2468 ic_line = self.get_ic_line(matrix_element)
2469 replace_dict['ic_line'] = ic_line
2470
2471
2472
2473 den_factor_line = self.get_den_factor_line(matrix_element)
2474 replace_dict['den_factor_line'] = den_factor_line
2475
2476
2477 ngraphs = matrix_element.get_number_of_amplitudes()
2478 replace_dict['ngraphs'] = ngraphs
2479
2480
2481 ncolor = max(1, len(matrix_element.get('color_basis')))
2482 replace_dict['ncolor'] = ncolor
2483
2484
2485 color_data_lines = self.get_color_data_lines(matrix_element)
2486 replace_dict['color_data_lines'] = "\n".join(color_data_lines)
2487
2488
2489 helas_calls = fortran_model.get_matrix_element_calls(\
2490 matrix_element)
2491 replace_dict['helas_calls'] = "\n".join(helas_calls)
2492
2493
2494
2495 nwavefuncs = matrix_element.get_number_of_wavefunctions()
2496 replace_dict['nwavefuncs'] = nwavefuncs
2497
2498
2499 amp2_lines = self.get_amp2_lines(matrix_element)
2500 replace_dict['amp2_lines'] = '\n'.join(amp2_lines)
2501
2502
2503 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]):
2504 replace_dict['wavefunctionsize'] = 20
2505 else:
2506 replace_dict['wavefunctionsize'] = 8
2507
2508
2509 jamp_lines = self.get_JAMP_lines(matrix_element)
2510
2511 replace_dict['jamp_lines'] = '\n'.join(jamp_lines)
2512
2513 realfile = open(os.path.join(_file_path, \
2514 'iolibs/template_files/realmatrix_fks.inc')).read()
2515
2516 realfile = realfile % replace_dict
2517
2518
2519 writer.writelines(realfile)
2520
2521 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
2522
2523
2524
2525
2526
2528
2529 """Write the auto_dsig.f file for MadFKS, which contains
2530 pdf call information"""
2531
2532 if not matrix_element.get('processes') or \
2533 not matrix_element.get('diagrams'):
2534 return 0
2535
2536 nexternal, ninitial = matrix_element.get_nexternal_ninitial()
2537
2538 if ninitial < 1 or ninitial > 2:
2539 raise writers.FortranWriter.FortranWriterError, \
2540 """Need ninitial = 1 or 2 to write auto_dsig file"""
2541
2542 replace_dict = {}
2543
2544 replace_dict['N_me'] = n
2545
2546
2547 info_lines = self.get_mg5_info_lines()
2548 replace_dict['info_lines'] = info_lines
2549
2550
2551 process_lines = self.get_process_info_lines(matrix_element)
2552 replace_dict['process_lines'] = process_lines
2553
2554 pdf_vars, pdf_data, pdf_lines = \
2555 self.get_pdf_lines_mir(matrix_element, ninitial, False, False)
2556 replace_dict['pdf_vars'] = pdf_vars
2557 replace_dict['pdf_data'] = pdf_data
2558 replace_dict['pdf_lines'] = pdf_lines
2559
2560 pdf_vars_mirr, pdf_data_mirr, pdf_lines_mirr = \
2561 self.get_pdf_lines_mir(matrix_element, ninitial, False, True)
2562 replace_dict['pdf_lines_mirr'] = pdf_lines_mirr
2563
2564 file = open(os.path.join(_file_path, \
2565 'iolibs/template_files/parton_lum_n_fks.inc')).read()
2566 file = file % replace_dict
2567
2568
2569 writer.writelines(file)
2570
2571
2572
2573
2574
2575
2576
2578 """Write the coloramps.inc file for MadEvent"""
2579
2580 lines = []
2581 lines.append( "logical icolamp(%d,%d,1)" % \
2582 (max(len(matrix_element.get('color_basis').keys()), 1),
2583 len(mapconfigs)))
2584
2585 lines += self.get_icolamp_lines(mapconfigs, matrix_element, 1)
2586
2587
2588 writer.writelines(lines)
2589
2590 return True
2591
2592
2593
2594
2595
2596
2598 """Write the leshouche.inc file for MG4"""
2599
2600
2601 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2602
2603 lines = []
2604 for iproc, proc in enumerate(matrix_element.get('processes')):
2605 legs = proc.get_legs_with_decays()
2606 lines.append("DATA (IDUP(i,%d),i=1,%d)/%s/" % \
2607 (iproc + 1, nexternal,
2608 ",".join([str(l.get('id')) for l in legs])))
2609 for i in [1, 2]:
2610 lines.append("DATA (MOTHUP(%d,i,%3r),i=1,%2r)/%s/" % \
2611 (i, iproc + 1, nexternal,
2612 ",".join([ "%3r" % 0 ] * ninitial + \
2613 [ "%3r" % i ] * (nexternal - ninitial))))
2614
2615
2616
2617 if iproc == 0:
2618
2619 if not matrix_element.get('color_basis'):
2620 for i in [1, 2]:
2621 lines.append("DATA (ICOLUP(%d,i, 1),i=1,%2r)/%s/" % \
2622 (i, nexternal,
2623 ",".join([ "%3r" % 0 ] * nexternal)))
2624 color_flow_list = []
2625
2626 else:
2627
2628 repr_dict = {}
2629 for l in legs:
2630 repr_dict[l.get('number')] = \
2631 proc.get('model').get_particle(l.get('id')).get_color()\
2632 * (-1)**(1+l.get('state'))
2633
2634 color_flow_list = \
2635 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2636 ninitial)
2637
2638 for cf_i, color_flow_dict in enumerate(color_flow_list):
2639 for i in [0, 1]:
2640 lines.append("DATA (ICOLUP(%d,i,%3r),i=1,%2r)/%s/" % \
2641 (i + 1, cf_i + 1, nexternal,
2642 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \
2643 for l in legs])))
2644
2645
2646 writer.writelines(lines)
2647
2648 return len(color_flow_list)
2649
2650
2651
2652
2653
2654
2656 """Write the configs.inc file for MadEvent"""
2657
2658
2659 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2660 lines = []
2661
2662 iconfig = 0
2663
2664 s_and_t_channels = []
2665 mapconfigs = []
2666
2667 model = matrix_element.get('processes')[0].get('model')
2668
2669
2670 base_diagrams = matrix_element.get('base_amplitude').get('diagrams')
2671 model = matrix_element.get('base_amplitude').get('process').get('model')
2672 minvert = min([max([len(vert.get('legs')) for vert in \
2673 diag.get('vertices')]) for diag in base_diagrams])
2674
2675 for idiag, diag in enumerate(base_diagrams):
2676 if any([len(vert.get('legs')) > minvert for vert in
2677 diag.get('vertices')]):
2678
2679 continue
2680 iconfig = iconfig + 1
2681 helas_diag = matrix_element.get('diagrams')[idiag]
2682 mapconfigs.append(helas_diag.get('number'))
2683 lines.append("# Diagram %d, Amplitude %d" % \
2684 (helas_diag.get('number'),helas_diag.get('amplitudes')[0]['number']))
2685
2686 lines.append("data mapconfig(%4d)/%4d/" % (iconfig,
2687 helas_diag.get('amplitudes')[0]['number']))
2688
2689
2690
2691 schannels, tchannels = helas_diag.get('amplitudes')[0].\
2692 get_s_and_t_channels(ninitial, model, 990)
2693
2694 s_and_t_channels.append([schannels, tchannels])
2695
2696
2697 allchannels = schannels
2698 if len(tchannels) > 1:
2699
2700 allchannels = schannels + tchannels
2701
2702 for vert in allchannels:
2703 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]]
2704 last_leg = vert.get('legs')[-1]
2705 lines.append("data (iforest(i,%3d,%4d),i=1,%d)/%s/" % \
2706 (last_leg.get('number'), iconfig, len(daughters),
2707 ",".join(["%3d" % d for d in daughters])))
2708 if vert in schannels:
2709 lines.append("data sprop(%4d,%4d)/%8d/" % \
2710 (last_leg.get('number'), iconfig,
2711 last_leg.get('id')))
2712 elif vert in tchannels[:-1]:
2713 lines.append("data tprid(%4d,%4d)/%8d/" % \
2714 (last_leg.get('number'), iconfig,
2715 abs(last_leg.get('id'))))
2716
2717
2718 lines.append("# Number of configs")
2719 lines.append("data mapconfig(0)/%4d/" % iconfig)
2720
2721
2722 writer.writelines(lines)
2723
2724 return iconfig, mapconfigs, s_and_t_channels
2725
2726
2727
2728
2729
2730
2732 """Write the decayBW.inc file for MadEvent"""
2733
2734 lines = []
2735
2736 booldict = {False: ".false.", True: ".false."}
2737
2738
2739 for iconf, config in enumerate(s_and_t_channels):
2740 schannels = config[0]
2741 for vertex in schannels:
2742
2743
2744 leg = vertex.get('legs')[-1]
2745 lines.append("data gForceBW(%d,%d)/%s/" % \
2746 (leg.get('number'), iconf + 1,
2747 booldict[leg.get('from_group')]))
2748
2749
2750 writer.writelines(lines)
2751
2752 return True
2753
2754
2755
2756
2757
2759 """Write the dname.mg file for MG4"""
2760
2761 line = "DIRNAME=P%s" % \
2762 matrix_element.get('processes')[0].shell_string()
2763
2764
2765 writer.write(line + "\n")
2766
2767 return True
2768
2769
2770
2771
2772
2774 """Write the iproc.dat file for MG4"""
2775
2776 line = "%d" % (me_number + 1)
2777
2778
2779 for line_to_write in writer.write_line(line):
2780 writer.write(line_to_write)
2781 return True
2782
2783
2784
2785
2786
2787
2788
2789
2790
2791
2792
2794 """generate the lines for fks.inc describing initializating the
2795 fks_j_from_i array"""
2796 lines = []
2797 if not me.isfinite:
2798 for ii, js in me.fks_j_from_i.items():
2799 if js:
2800 lines.append('DATA (FKS_J_FROM_I_D(%d, %d, JPOS), JPOS = 0, %d) / %d, %s /' \
2801 % (i, ii, len(js), len(js), ', '.join(["%d" % j for j in js])))
2802 else:
2803 lines.append('DATA (FKS_J_FROM_I_D(%d, JPOS), JPOS = 0, %d) / %d, %s /' \
2804 % (2, 1, 1, '1'))
2805 lines.append('')
2806
2807 return lines
2808
2809
2810
2811
2812
2814
2815 """Write the leshouche.inc file for MG4"""
2816
2817
2818 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2819
2820 lines = []
2821 for iproc, proc in enumerate(matrix_element.get('processes')):
2822 legs = proc.get_legs_with_decays()
2823 lines.append("I %4d %4d %s" % \
2824 (ime, iproc + 1,
2825 " ".join([str(l.get('id')) for l in legs])))
2826 for i in [1, 2]:
2827 lines.append("M %4d %4d %4d %s" % \
2828 (ime, i, iproc + 1,
2829 " ".join([ "%3d" % 0 ] * ninitial + \
2830 [ "%3d" % i ] * (nexternal - ninitial))))
2831
2832
2833
2834 if iproc == 0:
2835
2836 if not matrix_element.get('color_basis'):
2837 for i in [1, 2]:
2838 lines.append("C %4d %4d 1 %s" % \
2839 (ime, i,
2840 " ".join([ "%3d" % 0 ] * nexternal)))
2841 color_flow_list = []
2842 nflow = 1
2843
2844 else:
2845
2846 repr_dict = {}
2847 for l in legs:
2848 repr_dict[l.get('number')] = \
2849 proc.get('model').get_particle(l.get('id')).get_color()\
2850 * (-1)**(1+l.get('state'))
2851
2852 color_flow_list = \
2853 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2854 ninitial)
2855
2856 for cf_i, color_flow_dict in enumerate(color_flow_list):
2857 for i in [0, 1]:
2858 lines.append("C %4d %4d %4d %s" % \
2859 (ime, i + 1, cf_i + 1,
2860 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2861 for l in legs])))
2862
2863 nflow = len(color_flow_list)
2864
2865 nproc = len(matrix_element.get('processes'))
2866
2867 return lines, nproc, nflow
2868
2869
2871
2872 """As get_leshouche_lines, but for 'fake' real emission processes (LOonly
2873 In this case, write born color structure times ij -> i,j splitting)
2874 """
2875
2876 bornproc = matrix_element.get('processes')[0]
2877 colors = [l.get('color') for l in bornproc.get('legs')]
2878
2879 fks_i = len(colors)
2880
2881
2882 fks_j=1
2883 for cpos, col in enumerate(colors):
2884 if col != 1:
2885 fks_j = cpos+1
2886
2887
2888 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
2889 nexternal+=1
2890
2891 lines = []
2892 for iproc, proc in enumerate(matrix_element.get('processes')):
2893
2894 legs = proc.get_legs_with_decays() + \
2895 [fks_common.FKSLeg({'id': -21,
2896 'number': nexternal,
2897 'state': True,
2898 'fks': 'i',
2899 'color': 8,
2900 'charge': 0.,
2901 'massless': True,
2902 'spin': 3,
2903 'is_part': True,
2904 'self_antipart': True})]
2905
2906 lines.append("I %4d %4d %s" % \
2907 (ime, iproc + 1,
2908 " ".join([str(l.get('id')) for l in legs])))
2909 for i in [1, 2]:
2910 lines.append("M %4d %4d %4d %s" % \
2911 (ime, i, iproc + 1,
2912 " ".join([ "%3d" % 0 ] * ninitial + \
2913 [ "%3d" % i ] * (nexternal - ninitial))))
2914
2915
2916
2917 if iproc == 0:
2918
2919 if not matrix_element.get('color_basis'):
2920 for i in [1, 2]:
2921 lines.append("C %4d %4d 1 %s" % \
2922 (ime, i,
2923 " ".join([ "%3d" % 0 ] * nexternal)))
2924 color_flow_list = []
2925 nflow = 1
2926
2927 else:
2928
2929
2930
2931 repr_dict = {}
2932 for l in legs[:-1]:
2933 repr_dict[l.get('number')] = \
2934 proc.get('model').get_particle(l.get('id')).get_color()\
2935 * (-1)**(1+l.get('state'))
2936
2937 color_flow_list = \
2938 matrix_element.get('color_basis').color_flow_decomposition(repr_dict,
2939 ninitial)
2940
2941 for cf_i, color_flow_dict in enumerate(color_flow_list):
2942
2943
2944 maxicol = max(sum(color_flow_dict.values(), []))
2945
2946 if color_flow_dict[fks_j][0] == 0:
2947 anti = True
2948 icol_j = color_flow_dict[fks_j][1]
2949 else:
2950 anti = False
2951 icol_j = color_flow_dict[fks_j][0]
2952
2953 if anti:
2954 color_flow_dict[nexternal] = (maxicol + 1, color_flow_dict[fks_j][1])
2955 color_flow_dict[fks_j][1] = maxicol + 1
2956 else:
2957 color_flow_dict[nexternal] = (color_flow_dict[fks_j][0], maxicol + 1)
2958 color_flow_dict[fks_j][0] = maxicol + 1
2959
2960 for i in [0, 1]:
2961 lines.append("C %4d %4d %4d %s" % \
2962 (ime, i + 1, cf_i + 1,
2963 " ".join(["%3d" % color_flow_dict[l.get('number')][i] \
2964 for l in legs])))
2965
2966 nflow = len(color_flow_list)
2967
2968 nproc = len(matrix_element.get('processes'))
2969
2970 return lines, nproc, nflow
2971
2972
2973
2974
2975
2977 """returns the lines with the information on the denominator keeping care
2978 of the identical particle factors in the various real emissions"""
2979
2980 lines = []
2981 info_list = fks_born.get_fks_info_list()
2982 if info_list:
2983
2984 lines.append('INTEGER IDEN_VALUES(%d)' % len(info_list))
2985 lines.append('DATA IDEN_VALUES /' + \
2986 ', '.join(['%d' % (
2987 fks_born.born_matrix_element.get_denominator_factor() ) \
2988 for info in info_list]) + '/')
2989 else:
2990
2991 lines.append('INTEGER IDEN_VALUES(1)')
2992 lines.append('DATA IDEN_VALUES / %d /' \
2993 % fks_born.born_matrix_element.get_denominator_factor())
2994
2995 return lines
2996
2997
2998
2999
3000
3002 """returns the lines with the information on the particle number of the born
3003 that splits"""
3004 info_list = fks_born.get_fks_info_list()
3005 lines = []
3006 if info_list:
3007
3008
3009 ij_list = [info['fks_info']['ij']if \
3010 fks_born.born_matrix_element['processes'][0]['legs'][info['fks_info']['ij']-1]['massless'] \
3011 else 0 for info in info_list]
3012 lines.append('INTEGER IJ_VALUES(%d)' % len(info_list))
3013 lines.append('DATA IJ_VALUES /' + ', '.join(['%d' % ij for ij in ij_list]) + '/')
3014 else:
3015
3016 lines.append('INTEGER IJ_VALUES(1)')
3017 lines.append('DATA IJ_VALUES / 1 /')
3018
3019 return lines
3020
3021
3022 - def get_pdf_lines_mir(self, matrix_element, ninitial, subproc_group = False,\
3023 mirror = False):
3024 """Generate the PDF lines for the auto_dsig.f file"""
3025
3026 processes = matrix_element.get('processes')
3027 model = processes[0].get('model')
3028
3029 pdf_definition_lines = ""
3030 pdf_data_lines = ""
3031 pdf_lines = ""
3032
3033 if ninitial == 1:
3034 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n"
3035 for i, proc in enumerate(processes):
3036 process_line = proc.base_string()
3037 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3038 pdf_lines = pdf_lines + "\nPD(IPROC) = 1d0\n"
3039 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n"
3040 else:
3041
3042 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \
3043 p in processes]))),
3044 sorted(list(set([p.get_initial_pdg(2) for \
3045 p in processes])))]
3046
3047
3048 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \
3049 sum(initial_states,[])])
3050 for key,val in pdf_codes.items():
3051 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m')
3052
3053
3054 pdgtopdf = {21: 0, 22: 7}
3055
3056 for pdg in sum(initial_states,[]):
3057 if not pdg in pdgtopdf and not pdg in pdgtopdf.values():
3058 pdgtopdf[pdg] = pdg
3059 elif pdg not in pdgtopdf and pdg in pdgtopdf.values():
3060
3061 pdgtopdf[pdg] = 6000000 + pdg
3062
3063
3064 for i in [0,1]:
3065 pdf_definition_lines += "DOUBLE PRECISION " + \
3066 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3067 for pdg in \
3068 initial_states[i]]) + \
3069 "\n"
3070
3071
3072 for i in [0,1]:
3073 pdf_data_lines += "DATA " + \
3074 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \
3075 for pdg in initial_states[i]]) + \
3076 "/%d*1D0/" % len(initial_states[i]) + \
3077 "\n"
3078
3079
3080 for i, init_states in enumerate(initial_states):
3081 if not mirror:
3082 ibeam = i + 1
3083 else:
3084 ibeam = 2 - i
3085 if subproc_group:
3086 pdf_lines = pdf_lines + \
3087 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \
3088 % (ibeam, ibeam)
3089 else:
3090 pdf_lines = pdf_lines + \
3091 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \
3092 % (ibeam, ibeam)
3093
3094 for initial_state in init_states:
3095 if initial_state in pdf_codes.keys():
3096 if subproc_group:
3097 if abs(pdgtopdf[initial_state]) <= 7:
3098 pdf_lines = pdf_lines + \
3099 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \
3100 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \
3101 (pdf_codes[initial_state],
3102 i + 1, ibeam, pdgtopdf[initial_state],
3103 ibeam, ibeam)
3104 else:
3105
3106 pdf_lines = pdf_lines + \
3107 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3108 "%s%d=0d0\n") % \
3109 (pdf_codes[initial_state],i + 1)
3110 else:
3111 if abs(pdgtopdf[initial_state]) <= 7:
3112 pdf_lines = pdf_lines + \
3113 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \
3114 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \
3115 (pdf_codes[initial_state],
3116 i + 1, ibeam, pdgtopdf[initial_state],
3117 ibeam, ibeam)
3118 else:
3119
3120 pdf_lines = pdf_lines + \
3121 ("c settings other partons flavours outside quark, gluon, photon to 0d0\n" + \
3122 "%s%d=0d0\n") % \
3123 (pdf_codes[initial_state],i + 1)
3124
3125 pdf_lines = pdf_lines + "ENDIF\n"
3126
3127
3128 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n"
3129 for proc in processes:
3130 process_line = proc.base_string()
3131 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line
3132 pdf_lines = pdf_lines + "\nPD(IPROC) = "
3133 for ibeam in [1, 2]:
3134 initial_state = proc.get_initial_pdg(ibeam)
3135 if initial_state in pdf_codes.keys():
3136 pdf_lines = pdf_lines + "%s%d*" % \
3137 (pdf_codes[initial_state], ibeam)
3138 else:
3139 pdf_lines = pdf_lines + "1d0*"
3140
3141 pdf_lines = pdf_lines[:-1] + "\n"
3142
3143
3144 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
3145
3146
3147
3149 """Return the color matrix definition lines for the given color_matrix. Split
3150 rows in chunks of size n."""
3151
3152 if not color_matrix:
3153 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"]
3154 else:
3155 ret_list = []
3156 my_cs = color.ColorString()
3157 for index, denominator in \
3158 enumerate(color_matrix.get_line_denominators()):
3159
3160 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator))
3161
3162 num_list = color_matrix.get_line_numerators(index, denominator)
3163 for k in xrange(0, len(num_list), n):
3164 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \
3165 (index + 1, k + 1, min(k + n, len(num_list)),
3166 ','.join(["%5r" % i for i in num_list[k:k + n]])))
3167
3168 return ret_list
3169
3170
3171
3172
3175 """Write the maxamps.inc file for MG4."""
3176
3177 file = " integer maxamps, maxflow, maxproc, maxsproc\n"
3178 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \
3179 (maxamps, maxflows)
3180 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \
3181 (maxproc, maxsproc)
3182
3183
3184 writer.writelines(file)
3185
3186 return True
3187
3188
3189
3190
3192
3193 """Write the ncombs.inc file for MadEvent."""
3194
3195
3196 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3197
3198
3199 file = " integer n_max_cl\n"
3200 file = file + "parameter (n_max_cl=%d)" % (2 ** (nexternal+1))
3201
3202
3203 writer.writelines(file)
3204
3205 return True
3206
3207
3208
3209
3211 """Write a dummy config_subproc.inc file for MadEvent"""
3212
3213 lines = []
3214
3215 for iconfig in range(len(s_and_t_channels)):
3216 lines.append("DATA CONFSUB(1,%d)/1/" % \
3217 (iconfig + 1))
3218
3219
3220 writer.writelines(lines)
3221
3222 return True
3223
3224
3225
3226
3228 """Write the get_color.f file for MadEvent, which returns color
3229 for all particles used in the matrix element."""
3230
3231 try:
3232 matrix_elements=matrix_element.real_processes[0].matrix_element
3233 except IndexError:
3234 matrix_elements=[matrix_element.born_matrix_element]
3235
3236 if isinstance(matrix_elements, helas_objects.HelasMatrixElement):
3237 matrix_elements = [matrix_elements]
3238
3239 model = matrix_elements[0].get('processes')[0].get('model')
3240
3241
3242
3243
3244 wf_ids = set(sum([sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3245 for wf in d.get('wavefunctions')],[]) \
3246 for d in me.get('diagrams')],[]) \
3247 for me in [real_proc.matrix_element]],[])\
3248 for real_proc in matrix_element.real_processes],[]))
3249
3250 wf_ids = wf_ids.union(set(sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \
3251 for wf in d.get('wavefunctions')],[]) \
3252 for d in matrix_element.born_matrix_element.get('diagrams')],[])))
3253
3254
3255 leg_ids = set(sum([sum([sum([[l.get('id') for l in \
3256 p.get_legs_with_decays()] for p in \
3257 me.get('processes')], []) for me in \
3258 [real_proc.matrix_element]], []) for real_proc in \
3259 matrix_element.real_processes],[]))
3260
3261 leg_ids = leg_ids.union(set(sum([[l.get('id') for l in \
3262 p.get_legs_with_decays()] for p in \
3263 matrix_element.born_matrix_element.get('processes')], [])))
3264 particle_ids = sorted(list(wf_ids.union(leg_ids)))
3265
3266 lines = """function get_color(ipdg)
3267 implicit none
3268 integer get_color, ipdg
3269
3270 if(ipdg.eq.%d)then
3271 get_color=%d
3272 return
3273 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color())
3274
3275 for part_id in particle_ids[1:]:
3276 lines += """else if(ipdg.eq.%d)then
3277 get_color=%d
3278 return
3279 """ % (part_id, model.get_particle(part_id).get_color())
3280
3281
3282 lines += """else if(ipdg.eq.%d)then
3283 c This is dummy particle used in multiparticle vertices
3284 get_color=2
3285 return
3286 """ % model.get_first_non_pdg()
3287 lines += """else
3288 write(*,*)'Error: No color given for pdg ',ipdg
3289 get_color=0
3290 return
3291 endif
3292 end
3293 """
3294
3295
3296 writer.writelines(lines)
3297
3298 return True
3299
3300
3301
3302
3303
3304 - def write_props_file(self, writer, matrix_element, fortran_model, s_and_t_channels):
3305 """Write the props.inc file for MadEvent. Needs input from
3306 write_configs_file. With respect to the parent routine, it has some
3307 more specific formats that allow the props.inc file to be read by the
3308 link program"""
3309
3310 lines = []
3311
3312 particle_dict = matrix_element.get('processes')[0].get('model').\
3313 get('particle_dict')
3314
3315 for iconf, configs in enumerate(s_and_t_channels):
3316 for vertex in configs[0] + configs[1][:-1]:
3317 leg = vertex.get('legs')[-1]
3318 if leg.get('id') not in particle_dict:
3319
3320 mass = 'zero'
3321 width = 'zero'
3322 pow_part = 0
3323 else:
3324 particle = particle_dict[leg.get('id')]
3325
3326 if particle.get('mass').lower() == 'zero':
3327 mass = particle.get('mass')
3328 else:
3329 mass = "abs(%s)" % particle.get('mass')
3330
3331 if particle.get('width').lower() == 'zero':
3332 width = particle.get('width')
3333 else:
3334 width = "abs(%s)" % particle.get('width')
3335
3336 pow_part = 1 + int(particle.is_boson())
3337
3338 lines.append("pmass(%3d,%4d) = %s" % \
3339 (leg.get('number'), iconf + 1, mass))
3340 lines.append("pwidth(%3d,%4d) = %s" % \
3341 (leg.get('number'), iconf + 1, width))
3342 lines.append("pow(%3d,%4d) = %d" % \
3343 (leg.get('number'), iconf + 1, pow_part))
3344
3345
3346 writer.writelines(lines)
3347
3348 return True
3349
3350
3351
3352
3353
3355 """Append this subprocess to the subproc.mg file for MG4"""
3356
3357
3358 writer.write(subprocdir + "\n")
3359
3360 return True
3361
3362
3363
3364
3365
3366
3367
3368
3371 """Class to take care of exporting a set of matrix elements to
3372 Fortran (v4) format."""
3373
3374
3377
3378
3379
3380
3381
3383 """create the directory run_name as a copy of the MadEvent
3384 Template, and clean the directory
3385 For now it is just the same as copy_v4template, but it will be modified
3386 """
3387 mgme_dir = self.mgme_dir
3388 dir_path = self.dir_path
3389 clean =self.opt['clean']
3390
3391
3392 if not os.path.isdir(dir_path):
3393 if not mgme_dir:
3394 raise MadGraph5Error, \
3395 "No valid MG_ME path given for MG4 run directory creation."
3396 logger.info('initialize a new directory: %s' % \
3397 os.path.basename(dir_path))
3398 shutil.copytree(os.path.join(mgme_dir, 'Template', 'NLO'), dir_path, True)
3399
3400 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template', 'Common'),
3401 dir_path)
3402
3403 for card in ['plot_card']:
3404 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')):
3405 try:
3406 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'),
3407 pjoin(self.dir_path, 'Cards', card + '_default.dat'))
3408 except IOError:
3409 logger.warning("Failed to copy " + card + ".dat to default")
3410
3411 elif not os.path.isfile(os.path.join(dir_path, 'TemplateVersion.txt')):
3412 if not mgme_dir:
3413 raise MadGraph5Error, \
3414 "No valid MG_ME path given for MG4 run directory creation."
3415 try:
3416 shutil.copy(os.path.join(mgme_dir, 'MGMEVersion.txt'), dir_path)
3417 except IOError:
3418 MG5_version = misc.get_pkg_info()
3419 open(os.path.join(dir_path, 'MGMEVersion.txt'), 'w').write( \
3420 "5." + MG5_version['version'])
3421
3422
3423 if clean:
3424 logger.info('remove old information in %s' % os.path.basename(dir_path))
3425 if os.environ.has_key('MADGRAPH_BASE'):
3426 subprocess.call([os.path.join('bin', 'internal', 'clean_template'),
3427 '--web'], cwd=dir_path)
3428 else:
3429 try:
3430 subprocess.call([os.path.join('bin', 'internal', 'clean_template')], \
3431 cwd=dir_path)
3432 except Exception, why:
3433 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \
3434 % (os.path.basename(dir_path),why))
3435
3436 MG_version = misc.get_pkg_info()
3437 open(os.path.join(dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write(
3438 MG_version['version'])
3439
3440
3441 self.link_CutTools(dir_path)
3442
3443 link_tir_libs=[]
3444 tir_libs=[]
3445 tir_include=[]
3446 for tir in self.all_tir:
3447 tir_dir="%s_dir"%tir
3448 libpath=getattr(self,tir_dir)
3449 libpath = self.link_TIR(os.path.join(self.dir_path, 'lib'),
3450 libpath,"lib%s.a"%tir,tir_name=tir)
3451 setattr(self,tir_dir,libpath)
3452 if libpath != "":
3453 if tir in ['pjfry','ninja','golem', 'samurai','collier']:
3454
3455
3456 link_tir_libs.append('-L%s/ -l%s'%(libpath,tir))
3457 tir_libs.append('%s/lib%s.$(libext)'%(libpath,tir))
3458
3459 if tir in ['ninja']:
3460 if not any(os.path.isfile(pjoin(libpath,'libavh_olo.%s'%ext))
3461 for ext in ['a','dylib','so']):
3462 raise MadGraph5Error(
3463 "The OneLOop library 'libavh_olo.(a|dylib|so)' could no be found in path '%s'. Please place a symlink to it there."%libpath)
3464 link_tir_libs.append('-L%s/ -l%s'%(libpath,'avh_olo'))
3465 tir_libs.append('%s/lib%s.$(libext)'%(libpath,'avh_olo'))
3466
3467 if tir in ['golem','samurai','ninja','collier']:
3468 trg_path = pjoin(os.path.dirname(libpath),'include')
3469 if os.path.isdir(trg_path):
3470 to_include = misc.find_includes_path(trg_path,
3471 self.include_names[tir])
3472 else:
3473 to_include = None
3474
3475 if to_include is None and tir=='collier':
3476 to_include = misc.find_includes_path(
3477 pjoin(libpath,'modules'),self.include_names[tir])
3478 if to_include is None:
3479 logger.error(
3480 'Could not find the include directory for %s, looking in %s.\n' % (tir ,str(trg_path))+
3481 'Generation carries on but you will need to edit the include path by hand in the makefiles.')
3482 to_include = '<Not_found_define_it_yourself>'
3483 tir_include.append('-I %s'%to_include)
3484 else:
3485 link_tir_libs.append('-l%s'%tir)
3486 tir_libs.append('$(LIBDIR)lib%s.$(libext)'%tir)
3487
3488 os.remove(os.path.join(self.dir_path,'SubProcesses','makefile_loop.inc'))
3489 cwd = os.getcwd()
3490 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3491 try:
3492 os.chdir(dirpath)
3493 except os.error:
3494 logger.error('Could not cd to directory %s' % dirpath)
3495 return 0
3496 filename = 'makefile_loop'
3497 calls = self.write_makefile_TIR(writers.MakefileWriter(filename),
3498 link_tir_libs,tir_libs,tir_include=tir_include)
3499 os.remove(os.path.join(self.dir_path,'Source','make_opts.inc'))
3500 dirpath = os.path.join(self.dir_path, 'Source')
3501 try:
3502 os.chdir(dirpath)
3503 except os.error:
3504 logger.error('Could not cd to directory %s' % dirpath)
3505 return 0
3506 filename = 'make_opts'
3507 calls = self.write_make_opts(writers.MakefileWriter(filename),
3508 link_tir_libs,tir_libs)
3509
3510 os.chdir(cwd)
3511
3512 cwd = os.getcwd()
3513 dirpath = os.path.join(self.dir_path, 'SubProcesses')
3514 try:
3515 os.chdir(dirpath)
3516 except os.error:
3517 logger.error('Could not cd to directory %s' % dirpath)
3518 return 0
3519
3520
3521 cpfiles= ["SubProcesses/MadLoopParamReader.f",
3522 "Cards/MadLoopParams.dat",
3523 "SubProcesses/MadLoopParams.inc"]
3524
3525 for file in cpfiles:
3526 shutil.copy(os.path.join(self.loop_dir,'StandAlone/', file),
3527 os.path.join(self.dir_path, file))
3528
3529 shutil.copy(pjoin(self.dir_path, 'Cards','MadLoopParams.dat'),
3530 pjoin(self.dir_path, 'Cards','MadLoopParams_default.dat'))
3531
3532
3533
3534 if os.path.exists(pjoin(self.dir_path, 'Cards', 'MadLoopParams.dat')):
3535 self.MadLoopparam = banner_mod.MadLoopParam(pjoin(self.dir_path,
3536 'Cards', 'MadLoopParams.dat'))
3537
3538 self.MadLoopparam.write(pjoin(self.dir_path,"SubProcesses",
3539 "MadLoopParams.dat"))
3540
3541
3542 MadLoopCommon = open(os.path.join(self.loop_dir,'StandAlone',
3543 "SubProcesses","MadLoopCommons.inc")).read()
3544 writer = writers.FortranWriter(os.path.join(self.dir_path,
3545 "SubProcesses","MadLoopCommons.f"))
3546 writer.writelines(MadLoopCommon%{
3547 'print_banner_commands':self.MadLoop_banner},
3548 context={'collier_available':self.tir_available_dict['collier']})
3549 writer.close()
3550
3551
3552 model_path = self.dir_path + '/Source/MODEL/'
3553
3554 if os.path.isfile(os.path.join(model_path,'mp_coupl.inc')):
3555 ln(model_path + '/mp_coupl.inc', self.dir_path + '/SubProcesses')
3556 if os.path.isfile(os.path.join(model_path,'mp_coupl_same_name.inc')):
3557 ln(model_path + '/mp_coupl_same_name.inc', \
3558 self.dir_path + '/SubProcesses')
3559
3560
3561 self.write_mp_files(writers.FortranWriter('cts_mprec.h'),\
3562 writers.FortranWriter('cts_mpc.h'),)
3563
3564 self.copy_python_files()
3565
3566
3567
3568 self.write_pdf_opendata()
3569
3570
3571
3572 os.chdir(cwd)
3573
3575 """writes the V**** directory inside the P**** directories specified in
3576 dir_name"""
3577
3578 cwd = os.getcwd()
3579
3580 matrix_element = loop_matrix_element
3581
3582
3583 dirpath = os.path.join(dir_name, 'MadLoop5_resources')
3584 try:
3585 os.mkdir(dirpath)
3586 except os.error as error:
3587 logger.warning(error.strerror + " " + dirpath)
3588
3589
3590 name = "V%s" % matrix_element.get('processes')[0].shell_string()
3591 dirpath = os.path.join(dir_name, name)
3592
3593 try:
3594 os.mkdir(dirpath)
3595 except os.error as error:
3596 logger.warning(error.strerror + " " + dirpath)
3597
3598 try:
3599 os.chdir(dirpath)
3600 except os.error:
3601 logger.error('Could not cd to directory %s' % dirpath)
3602 return 0
3603
3604 logger.info('Creating files in directory %s' % name)
3605
3606
3607 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial()
3608
3609 calls=self.write_loop_matrix_element_v4(None,matrix_element,fortran_model)
3610
3611
3612 ln(pjoin(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc'),
3613 abspath=False, cwd=None)
3614
3615
3616 filename = 'born_matrix.f'
3617 calls = self.write_bornmatrix(
3618 writers.FortranWriter(filename),
3619 matrix_element,
3620 fortran_model)
3621
3622 filename = 'nexternal.inc'
3623 self.write_nexternal_file(writers.FortranWriter(filename),
3624 nexternal, ninitial)
3625
3626 filename = 'pmass.inc'
3627 self.write_pmass_file(writers.FortranWriter(filename),
3628 matrix_element)
3629
3630 filename = 'ngraphs.inc'
3631 self.write_ngraphs_file(writers.FortranWriter(filename),
3632 len(matrix_element.get_all_amplitudes()))
3633
3634 filename = "loop_matrix.ps"
3635 writers.FortranWriter(filename).writelines("""C Post-helas generation loop-drawing is not ready yet.""")
3636 plot = draw.MultiEpsDiagramDrawer(base_objects.DiagramList(
3637 matrix_element.get('base_amplitude').get('loop_diagrams')[:1000]),
3638 filename,
3639 model=matrix_element.get('processes')[0].get('model'),
3640 amplitude='')
3641 logger.info("Drawing loop Feynman diagrams for " + \
3642 matrix_element.get('processes')[0].nice_string(\
3643 print_weighted=False))
3644 plot.draw()
3645
3646 filename = "born_matrix.ps"
3647 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\
3648 get('born_diagrams'),
3649 filename,
3650 model=matrix_element.get('processes')[0].\
3651 get('model'),
3652 amplitude='')
3653 logger.info("Generating born Feynman diagrams for " + \
3654 matrix_element.get('processes')[0].nice_string(\
3655 print_weighted=False))
3656 plot.draw()
3657
3658
3659
3660
3661 self.write_global_specs(matrix_element, output_path=pjoin(dirpath,'global_specs.inc'))
3662
3663 open('unique_id.inc','w').write(
3664 """ integer UNIQUE_ID
3665 parameter(UNIQUE_ID=1)""")
3666
3667 linkfiles = ['coupl.inc', 'mp_coupl.inc', 'mp_coupl_same_name.inc',
3668 'cts_mprec.h', 'cts_mpc.h', 'MadLoopParamReader.f',
3669 'MadLoopParams.inc','MadLoopCommons.f']
3670
3671 for file in linkfiles:
3672 ln('../../%s' % file)
3673
3674 os.system("ln -s ../../makefile_loop makefile")
3675
3676
3677 ln(pjoin(os.path.pardir,os.path.pardir,'MadLoopParams.dat'),
3678 pjoin('..','MadLoop5_resources'))
3679
3680 linkfiles = ['mpmodule.mod']
3681
3682 for file in linkfiles:
3683 ln('../../../lib/%s' % file)
3684
3685 linkfiles = ['coef_specs.inc']
3686
3687 for file in linkfiles:
3688 ln('../../../Source/DHELAS/%s' % file)
3689
3690
3691 os.chdir(cwd)
3692
3693 if not calls:
3694 calls = 0
3695 return calls
3696
3697
3698
3699
3700
3702 """ writes the coef_specs.inc in the DHELAS folder. Should not be called in the
3703 non-optimized mode"""
3704 filename = os.path.join(self.dir_path, 'Source', 'DHELAS', 'coef_specs.inc')
3705
3706 replace_dict = {}
3707 replace_dict['max_lwf_size'] = 4
3708 replace_dict['vertex_max_coefs'] = max(\
3709 [q_polynomial.get_number_of_coefs_for_rank(n)
3710 for n in max_loop_vertex_ranks])
3711 IncWriter=writers.FortranWriter(filename,'w')
3712 IncWriter.writelines("""INTEGER MAXLWFSIZE
3713 PARAMETER (MAXLWFSIZE=%(max_lwf_size)d)
3714 INTEGER VERTEXMAXCOEFS
3715 PARAMETER (VERTEXMAXCOEFS=%(vertex_max_coefs)d)"""\
3716 % replace_dict)
3717 IncWriter.close()
3718