1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 """Methods and classes to export models and matrix elements to Pythia 8
17 and C++ Standalone format."""
18
19 import fractions
20 import glob
21 import itertools
22 import logging
23 from math import fmod
24 import os
25 import re
26 import shutil
27 import subprocess
28
29 import madgraph.core.base_objects as base_objects
30 import madgraph.core.color_algebra as color
31 import madgraph.core.helas_objects as helas_objects
32 import madgraph.iolibs.drawing_eps as draw
33 import madgraph.iolibs.files as files
34 import madgraph.iolibs.helas_call_writers as helas_call_writers
35 import madgraph.iolibs.file_writers as writers
36 import madgraph.iolibs.template_files as template_files
37 import madgraph.iolibs.ufo_expression_parsers as parsers
38 from madgraph import MadGraph5Error, InvalidCmd, MG5DIR
39 from madgraph.iolibs.files import cp, ln, mv
40
41 import madgraph.various.misc as misc
42
43 import aloha.create_aloha as create_aloha
44 import aloha.aloha_writers as aloha_writers
45
46 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
47 logger = logging.getLogger('madgraph.export_pythia8')
48 pjoin = os.path.join
56 """Prepare export_dir as standalone_cpp directory, including:
57 src (for RAMBO, model and ALOHA files + makefile)
58 lib (with compiled libraries from src)
59 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories)
60 """
61
62 cwd = os.getcwd()
63
64 try:
65 os.mkdir(dirpath)
66 except os.error as error:
67 logger.warning(error.strerror + " " + dirpath)
68
69 try:
70 os.chdir(dirpath)
71 except os.error:
72 logger.error('Could not cd to directory %s' % dirpath)
73 return 0
74
75 logger.info('Creating subdirectories in directory %s' % dirpath)
76
77 try:
78 os.mkdir('src')
79 except os.error as error:
80 logger.warning(error.strerror + " " + dirpath)
81
82 try:
83 os.mkdir('lib')
84 except os.error as error:
85 logger.warning(error.strerror + " " + dirpath)
86
87 try:
88 os.mkdir('Cards')
89 except os.error as error:
90 logger.warning(error.strerror + " " + dirpath)
91
92 try:
93 os.mkdir('SubProcesses')
94 except os.error as error:
95 logger.warning(error.strerror + " " + dirpath)
96
97
98 open(os.path.join("Cards","param_card.dat"), 'w').write(\
99 model.write_param_card())
100
101 src_files = ['rambo.h', 'rambo.cc', 'read_slha.h', 'read_slha.cc']
102
103
104 for f in src_files:
105 cp(_file_path + 'iolibs/template_files/' + f, 'src')
106
107
108 makefile = read_template_file('Makefile_sa_cpp_src') % \
109 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
110 open(os.path.join('src', 'Makefile'), 'w').write(makefile)
111
112
113 cp(_file_path + 'iolibs/template_files/check_sa.cpp', 'SubProcesses')
114
115
116 makefile = read_template_file('Makefile_sa_cpp_sp') % \
117 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
118 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
119
120
121 os.chdir(cwd)
122
130
131 """Generate the Pxxxxx directory for a subprocess in C++ standalone,
132 including the necessary .h and .cc files"""
133
134 cwd = os.getcwd()
135
136 if format == 'standalone_cpp':
137 process_exporter_cpp = ProcessExporterCPP(matrix_element,
138 cpp_helas_call_writer)
139 elif format == 'matchbox_cpp':
140 process_exporter_cpp = ProcessExporterMatchbox(matrix_element,
141 cpp_helas_call_writer)
142 else:
143 raise Exception, 'Unrecognized format %s' % format
144
145
146 dirpath = os.path.join(path, \
147 "P%d_%s" % (process_exporter_cpp.process_number,
148 process_exporter_cpp.process_name))
149 try:
150 os.mkdir(dirpath)
151 except os.error as error:
152 logger.warning(error.strerror + " " + dirpath)
153
154 try:
155 os.chdir(dirpath)
156 except os.error:
157 logger.error('Could not cd to directory %s' % dirpath)
158 return 0
159
160 logger.info('Creating files in directory %s' % dirpath)
161
162 process_exporter_cpp.path = dirpath
163
164 process_exporter_cpp.generate_process_files()
165
166 linkfiles = ['check_sa.cpp', 'Makefile']
167
168
169 for file in linkfiles:
170 ln('../%s' % file)
171
172
173 os.chdir(cwd)
174
175 return
176
178 """Make the model library in a C++ standalone directory"""
179
180 source_dir = os.path.join(dir_path, "src")
181
182 logger.info("Running make for src")
183 misc.compile(cwd=source_dir)
184
189 """Class to take care of exporting a set of matrix elements to
190 C++ format."""
191
192
193 process_dir = '.'
194 include_dir = '.'
195 process_template_h = 'cpp_process_h.inc'
196 process_template_cc = 'cpp_process_cc.inc'
197 process_class_template = 'cpp_process_class.inc'
198 process_definition_template = 'cpp_process_function_definitions.inc'
199 process_wavefunction_template = 'cpp_process_wavefunctions.inc'
200 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc'
201 single_process_template = 'cpp_process_matrix.inc'
202
205
206 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "",
207 process_number = 0, path = os.getcwd()):
208 """Initiate with matrix elements, helas call writer, process
209 string, path. Generate the process .h and .cc files."""
210
211 if isinstance(matrix_elements, helas_objects.HelasMultiProcess):
212 self.matrix_elements = matrix_elements.get('matrix_elements')
213 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement):
214 self.matrix_elements = \
215 helas_objects.HelasMatrixElementList([matrix_elements])
216 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList):
217 self.matrix_elements = matrix_elements
218 else:
219 raise base_objects.PhysicsObject.PhysicsObjectError,\
220 "Wrong object type for matrix_elements"
221
222 if not self.matrix_elements:
223 raise MadGraph5Error("No matrix elements to export")
224
225 self.model = self.matrix_elements[0].get('processes')[0].get('model')
226 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name'))
227
228 self.processes = sum([me.get('processes') for \
229 me in self.matrix_elements], [])
230 self.processes.extend(sum([me.get_mirror_processes() for \
231 me in self.matrix_elements], []))
232
233 self.nprocesses = len(self.matrix_elements)
234 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
235 self.nprocesses = 2*len(self.matrix_elements)
236
237 if process_string:
238 self.process_string = process_string
239 else:
240 self.process_string = self.processes[0].base_string()
241
242 if process_number:
243 self.process_number = process_number
244 else:
245 self.process_number = self.processes[0].get('id')
246
247 self.process_name = self.get_process_name()
248 self.process_class = "CPPProcess"
249
250 self.path = path
251 self.helas_call_writer = cpp_helas_call_writer
252
253 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter):
254 raise self.ProcessExporterCPPError, \
255 "helas_call_writer not CPPUFOHelasCallWriter"
256
257 self.nexternal, self.ninitial = \
258 self.matrix_elements[0].get_nexternal_ninitial()
259 self.nfinal = self.nexternal - self.ninitial
260
261
262
263
264 self.single_helicities = True
265
266 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0])
267
268 for me in self.matrix_elements[1:]:
269 if self.get_helicity_matrix(me) != hel_matrix:
270 self.single_helicities = False
271
272 if self.single_helicities:
273
274
275
276
277 self.wavefunctions = []
278 wf_number = 0
279
280 for me in self.matrix_elements:
281 for iwf, wf in enumerate(me.get_all_wavefunctions()):
282 try:
283 old_wf = \
284 self.wavefunctions[self.wavefunctions.index(wf)]
285 wf.set('number', old_wf.get('number'))
286 except ValueError:
287 wf_number += 1
288 wf.set('number', wf_number)
289 self.wavefunctions.append(wf)
290
291
292 self.amplitudes = helas_objects.HelasAmplitudeList()
293 amp_number = 0
294 for me in self.matrix_elements:
295 for iamp, amp in enumerate(me.get_all_amplitudes()):
296 try:
297 old_amp = \
298 self.amplitudes[self.amplitudes.index(amp)]
299 amp.set('number', old_amp.get('number'))
300 except ValueError:
301 amp_number += 1
302 amp.set('number', amp_number)
303 self.amplitudes.append(amp)
304 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes})
305 self.amplitudes = helas_objects.HelasMatrixElement({\
306 'diagrams': helas_objects.HelasDiagramList([diagram])})
307
308
309
311 """Generate the .h and .cc files needed for C++, for the
312 processes described by multi_matrix_element"""
313
314
315 if not os.path.isdir(os.path.join(self.path, self.include_dir)):
316 os.makedirs(os.path.join(self.path, self.include_dir))
317 filename = os.path.join(self.path, self.include_dir,
318 '%s.h' % self.process_class)
319 self.write_process_h_file(writers.CPPWriter(filename))
320
321 if not os.path.isdir(os.path.join(self.path, self.process_dir)):
322 os.makedirs(os.path.join(self.path, self.process_dir))
323 filename = os.path.join(self.path, self.process_dir,
324 '%s.cc' % self.process_class)
325 self.write_process_cc_file(writers.CPPWriter(filename))
326
327 logger.info('Created files %(process)s.h and %(process)s.cc in' % \
328 {'process': self.process_class} + \
329 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
330
331
333
334 replace_dict = {}
335
336
337 return replace_dict
338
339
340
369
370
371
372
374 """Write the class member definition (.cc) file for the process
375 described by matrix_element"""
376
377 if not isinstance(writer, writers.CPPWriter):
378 raise writers.CPPWriter.CPPWriterError(\
379 "writer not CPPWriter")
380
381 replace_dict = self.get_default_converter()
382
383
384 info_lines = get_mg5_info_lines()
385 replace_dict['info_lines'] = info_lines
386
387
388 replace_dict['process_file_name'] = self.process_name
389
390
391 replace_dict['model_name'] = self.model_name
392
393
394
395 process_function_definitions = \
396 self.get_process_function_definitions()
397 replace_dict['process_function_definitions'] = \
398 process_function_definitions
399
400 file = read_template_file(self.process_template_cc) % replace_dict
401
402
403 writer.writelines(file)
404
405
406
407
409 """The complete class definition for the process"""
410
411 replace_dict = {}
412
413
414 replace_dict['model_name'] = self.model_name
415
416
417 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
418 self.matrix_elements])
419
420 replace_dict['process_lines'] = process_lines
421
422
423 replace_dict['nfinal'] = self.nfinal
424
425
426 replace_dict['ninitial'] = self.ninitial
427
428
429 replace_dict['process_class_name'] = self.process_name
430
431
432 process_definition = "%s (%s)" % (self.process_string,
433 self.model_name)
434 replace_dict['process_definition'] = process_definition
435
436 process = self.processes[0]
437
438 replace_dict['process_code'] = self.process_number
439 replace_dict['nexternal'] = self.nexternal
440 replace_dict['nprocesses'] = self.nprocesses
441
442
443 color_amplitudes = self.matrix_elements[0].get_color_amplitudes()
444
445 replace_dict['ncolor'] = len(color_amplitudes)
446
447 if self.single_helicities:
448 replace_dict['all_sigma_kin_definitions'] = \
449 """// Calculate wavefunctions
450 void calculate_wavefunctions(const int perm[], const int hel[]);
451 static const int nwavefuncs = %d;
452 std::complex<double> w[nwavefuncs][18];
453 static const int namplitudes = %d;
454 std::complex<double> amp[namplitudes];""" % \
455 (len(self.wavefunctions),
456 len(self.amplitudes.get_all_amplitudes()))
457 replace_dict['all_matrix_definitions'] = \
458 "\n".join(["double matrix_%s();" % \
459 me.get('processes')[0].shell_string().\
460 replace("0_", "") \
461 for me in self.matrix_elements])
462
463 else:
464 replace_dict['all_sigma_kin_definitions'] = \
465 "\n".join(["void sigmaKin_%s();" % \
466 me.get('processes')[0].shell_string().\
467 replace("0_", "") \
468 for me in self.matrix_elements])
469 replace_dict['all_matrix_definitions'] = \
470 "\n".join(["double matrix_%s(const int hel[]);" % \
471 me.get('processes')[0].shell_string().\
472 replace("0_", "") \
473 for me in self.matrix_elements])
474
475
476 file = read_template_file(self.process_class_template) % replace_dict
477
478 return file
479
481 """The complete Pythia 8 class definition for the process"""
482
483 replace_dict = {}
484
485
486 replace_dict['model_name'] = self.model_name
487
488
489 replace_dict['process_lines'] = \
490 "\n".join([self.get_process_info_lines(me) for \
491 me in self.matrix_elements])
492
493
494 replace_dict['process_class_name'] = self.process_name
495
496 color_amplitudes = [me.get_color_amplitudes() for me in \
497 self.matrix_elements]
498
499 replace_dict['initProc_lines'] = \
500 self.get_initProc_lines(self.matrix_elements[0],
501 color_amplitudes)
502 replace_dict['reset_jamp_lines'] = \
503 self.get_reset_jamp_lines(color_amplitudes)
504 replace_dict['sigmaKin_lines'] = \
505 self.get_sigmaKin_lines(color_amplitudes)
506 replace_dict['sigmaHat_lines'] = \
507 self.get_sigmaHat_lines()
508
509 replace_dict['all_sigmaKin'] = \
510 self.get_all_sigmaKin_lines(color_amplitudes,
511 'CPPProcess')
512
513 file = read_template_file(self.process_definition_template) %\
514 replace_dict
515
516 return file
517
519 """Return process file name for the process in matrix_element"""
520
521 process_string = self.process_string
522
523
524 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$")
525 proc_number_re = proc_number_pattern.match(process_string)
526 proc_number = 0
527 if proc_number_re:
528 proc_number = int(proc_number_re.group(2))
529 process_string = proc_number_re.group(1) + \
530 proc_number_re.group(3)
531
532
533 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$")
534 order_re = order_pattern.match(process_string)
535 while order_re:
536 process_string = order_re.group(1)
537 order_re = order_pattern.match(process_string)
538
539 process_string = process_string.replace(' ', '')
540 process_string = process_string.replace('>', '_')
541 process_string = process_string.replace('+', 'p')
542 process_string = process_string.replace('-', 'm')
543 process_string = process_string.replace('~', 'x')
544 process_string = process_string.replace('/', '_no_')
545 process_string = process_string.replace('$', '_nos_')
546 process_string = process_string.replace('|', '_or_')
547 if proc_number != 0:
548 process_string = "%d_%s" % (proc_number, process_string)
549
550 process_string = "Sigma_%s_%s" % (self.model_name,
551 process_string)
552 return process_string
553
555 """Return info lines describing the processes for this matrix element"""
556
557 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \
558 for process in matrix_element.get('processes')])
559
560
562 """Get initProc_lines for function definition for Pythia 8 .cc file"""
563
564 initProc_lines = []
565
566 initProc_lines.append("// Set external particle masses for this matrix element")
567
568 for part in matrix_element.get_external_wavefunctions():
569 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
570 for i, colamp in enumerate(color_amplitudes):
571 initProc_lines.append("jamp2[%d] = new double[%d];" % \
572 (i, len(colamp)))
573
574 return "\n".join(initProc_lines)
575
577 """Get lines to reset jamps"""
578
579 ret_lines = ""
580 for icol, col_amp in enumerate(color_amplitudes):
581 ret_lines+= """for(int i=0;i < %(ncolor)d; i++)
582 jamp2[%(proc_number)d][i]=0.;\n""" % \
583 {"ncolor": len(col_amp), "proc_number": icol}
584 return ret_lines
585
586
610
611
613 """Get sigmaKin_lines for function definition for Pythia 8 .cc file"""
614
615
616 if self.single_helicities:
617 replace_dict = {}
618
619
620 replace_dict['ncomb'] = \
621 self.matrix_elements[0].get_helicity_combinations()
622
623
624 replace_dict['process_class_name'] = self.process_name
625
626
627 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id')
628 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id')
629
630
631 replace_dict['helicity_matrix'] = \
632 self.get_helicity_matrix(self.matrix_elements[0])
633
634
635 den_factors = [str(me.get_denominator_factor()) for me in \
636 self.matrix_elements]
637 if self.nprocesses != len(self.matrix_elements):
638 den_factors.extend(den_factors)
639 replace_dict['den_factors'] = ",".join(den_factors)
640 replace_dict['get_matrix_t_lines'] = "\n".join(
641 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
642 {"iproc": i, "proc_name": \
643 me.get('processes')[0].shell_string().replace("0_", "")} \
644 for i, me in enumerate(self.matrix_elements)])
645
646
647 mirror_matrix_lines = ""
648
649 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
650 mirror_matrix_lines += \
651 """ // Mirror initial state momenta for mirror process
652 perm[0]=1;
653 perm[1]=0;
654 // Calculate wavefunctions
655 calculate_wavefunctions(perm, helicities[ihel]);
656 // Mirror back
657 perm[0]=0;
658 perm[1]=1;
659 // Calculate matrix elements
660 """
661
662 mirror_matrix_lines += "\n".join(
663 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
664 {"iproc": i + len(self.matrix_elements), "proc_name": \
665 me.get('processes')[0].shell_string().replace("0_", "")} \
666 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')])
667
668 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines
669
670
671 file = \
672 read_template_file(\
673 self.process_sigmaKin_function_template) %\
674 replace_dict
675
676 return file
677
678 else:
679 ret_lines = "// Call the individual sigmaKin for each process\n"
680 return ret_lines + \
681 "\n".join(["sigmaKin_%s();" % \
682 me.get('processes')[0].shell_string().\
683 replace("0_", "") for \
684 me in self.matrix_elements])
685
687 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file"""
688
689 ret_lines = []
690 if self.single_helicities:
691 ret_lines.append(\
692 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
693 class_name)
694 ret_lines.append("// Calculate wavefunctions for all processes")
695 ret_lines.append(self.get_calculate_wavefunctions(\
696 self.wavefunctions, self.amplitudes))
697 ret_lines.append("}")
698 else:
699 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
700 for i, me in enumerate(self.matrix_elements)])
701 ret_lines.extend([self.get_matrix_single_process(i, me,
702 color_amplitudes[i],
703 class_name) \
704 for i, me in enumerate(self.matrix_elements)])
705 return "\n".join(ret_lines)
706
707
709 """Write sigmaKin for each process"""
710
711
712
713 replace_dict = {}
714
715
716 replace_dict['proc_name'] = \
717 matrix_element.get('processes')[0].shell_string().replace("0_", "")
718
719
720 replace_dict['process_class_name'] = self.process_name
721
722
723 replace_dict['proc_number'] = i
724
725
726 replace_dict['ncomb'] = matrix_element.get_helicity_combinations()
727
728
729 replace_dict['helicity_matrix'] = \
730 self.get_helicity_matrix(matrix_element)
731
732 replace_dict['den_factor'] = matrix_element.get_denominator_factor()
733
734 file = \
735 read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\
736 replace_dict
737
738 return file
739
742 """Write matrix() for each process"""
743
744
745
746 replace_dict = {}
747
748
749 replace_dict['proc_name'] = \
750 matrix_element.get('processes')[0].shell_string().replace("0_", "")
751
752
753
754 if self.single_helicities:
755 replace_dict['matrix_args'] = ""
756 replace_dict['all_wavefunction_calls'] = "int i, j;"
757 else:
758 replace_dict['matrix_args'] = "const int hel[]"
759 wavefunctions = matrix_element.get_all_wavefunctions()
760 replace_dict['all_wavefunction_calls'] = \
761 """const int nwavefuncs = %d;
762 std::complex<double> w[nwavefuncs][18];
763 """ % len(wavefunctions)+ \
764 self.get_calculate_wavefunctions(wavefunctions, [])
765
766
767 replace_dict['process_class_name'] = class_name
768
769
770 replace_dict['proc_number'] = i
771
772
773 replace_dict['ncolor'] = len(color_amplitudes)
774
775 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes()
776
777
778 replace_dict['color_matrix_lines'] = \
779 self.get_color_matrix_lines(matrix_element)
780
781
782 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes)
783
784
785
786 replace_dict = self.get_class_specific_definition_matrix(replace_dict, matrix_element)
787
788 file = read_template_file(self.single_process_template) % \
789 replace_dict
790
791 return file
792
794 """place to add some specific hack to a given exporter.
795 Please always use Super in that case"""
796
797 return converter
798
800 """Get sigmaHat_lines for function definition for Pythia 8 .cc file"""
801
802
803 beams = set([(process.get('legs')[0].get('id'),
804 process.get('legs')[1].get('id')) \
805 for process in self.processes])
806
807 res_lines = []
808
809
810
811 res_lines.append("// Select between the different processes")
812 for ibeam, beam_parts in enumerate(beams):
813
814 if ibeam == 0:
815 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
816 else:
817 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
818
819
820 beam_processes = [(i, me) for (i, me) in \
821 enumerate(self.matrix_elements) if beam_parts in \
822 [(process.get('legs')[0].get('id'),
823 process.get('legs')[1].get('id')) \
824 for process in me.get('processes')]]
825
826
827 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \
828 enumerate(self.matrix_elements) if beam_parts in \
829 [(process.get('legs')[0].get('id'),
830 process.get('legs')[1].get('id')) \
831 for process in me.get_mirror_processes()]])
832
833
834 res_lines.append("// Add matrix elements for processes with beams %s" % \
835 repr(beam_parts))
836 res_lines.append("return %s;" % \
837 ("+".join(["matrix_element[%i]*%i" % \
838 (i, len([proc for proc in \
839 me.get('processes') if beam_parts == \
840 (proc.get('legs')[0].get('id'),
841 proc.get('legs')[1].get('id')) or \
842 me.get('has_mirror_process') and \
843 beam_parts == \
844 (proc.get('legs')[1].get('id'),
845 proc.get('legs')[0].get('id'))])) \
846 for (i, me) in beam_processes]).\
847 replace('*1', '')))
848 res_lines.append("}")
849
850
851 res_lines.append("else {")
852 res_lines.append("// Return 0 if not correct initial state assignment")
853 res_lines.append(" return 0.;}")
854
855 return "\n".join(res_lines)
856
857
859 """Return the Helicity matrix definition lines for this matrix element"""
860
861 helicity_line = "static const int helicities[ncomb][nexternal] = {";
862 helicity_line_list = []
863
864 for helicities in matrix_element.get_helicity_matrix(allow_reverse=False):
865 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \
866 tuple(helicities) + "}")
867
868 return helicity_line + ",".join(helicity_line_list) + "};"
869
871 """Return the denominator factor line for this matrix element"""
872
873 return "const int denominator = %d;" % \
874 matrix_element.get_denominator_factor()
875
877 """Return the color matrix definition lines for this matrix element. Split
878 rows in chunks of size n."""
879
880 if not matrix_element.get('color_matrix'):
881 return "\n".join(["static const double denom[1] = {1.};",
882 "static const double cf[1][1] = {1.};"])
883 else:
884 color_denominators = matrix_element.get('color_matrix').\
885 get_line_denominators()
886 denom_string = "static const double denom[ncolor] = {%s};" % \
887 ",".join(["%i" % denom for denom in color_denominators])
888
889 matrix_strings = []
890 my_cs = color.ColorString()
891 for index, denominator in enumerate(color_denominators):
892
893 num_list = matrix_element.get('color_matrix').\
894 get_line_numerators(index, denominator)
895
896 matrix_strings.append("{%s}" % \
897 ",".join(["%d" % i for i in num_list]))
898 matrix_string = "static const double cf[ncolor][ncolor] = {" + \
899 ",".join(matrix_strings) + "};"
900 return "\n".join([denom_string, matrix_string])
901
902
903
904
905
906
908 """Return the jamp = sum(fermionfactor * amp[i]) lines"""
909
910 res_list = []
911
912 for i, coeff_list in enumerate(color_amplitudes):
913
914 res = "jamp[%i]=" % i
915
916
917
918 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list]
919 common_factor = False
920 diff_fracs = list(set(list_fracs))
921 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1:
922 common_factor = True
923 global_factor = diff_fracs[0]
924 res = res + '%s(' % coeff(1, global_factor, False, 0)
925
926 for (coefficient, amp_number) in coeff_list:
927 if common_factor:
928 res = res + "%samp[%d]" % (coeff(coefficient[0],
929 coefficient[1] / abs(coefficient[1]),
930 coefficient[2],
931 coefficient[3]),
932 amp_number - 1)
933 else:
934 res = res + "%samp[%d]" % (coeff(coefficient[0],
935 coefficient[1],
936 coefficient[2],
937 coefficient[3]),
938 amp_number - 1)
939
940 if common_factor:
941 res = res + ')'
942
943 res += ';'
944
945 res_list.append(res)
946
947 return "\n".join(res_list)
948
949 @staticmethod
956
957
958
959
960 -def generate_process_files_pythia8(multi_matrix_element, cpp_helas_call_writer,
961 process_string = "",
962 process_number = 0, path = os.getcwd(),
963 version='8.2'):
964
965 """Generate the .h and .cc files needed for Pythia 8, for the
966 processes described by multi_matrix_element"""
967
968 process_exporter_pythia8 = ProcessExporterPythia8(multi_matrix_element,
969 cpp_helas_call_writer,
970 process_string,
971 process_number,
972 path,
973 version=version)
974
975
976 model = process_exporter_pythia8.model
977 model_name = process_exporter_pythia8.model_name
978 process_exporter_pythia8.process_dir = \
979 'Processes_%(model)s' % {'model': \
980 model_name}
981 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir
982 process_exporter_pythia8.generate_process_files()
983 return process_exporter_pythia8
984
987 """Class to take care of exporting a set of matrix elements to
988 Matchbox format."""
989
990
991 process_class_template = 'matchbox_class.inc'
992 single_process_template = 'matchbox_matrix.inc'
993 process_definition_template = 'matchbox_function_definitions.inc'
994
996 """Get initProc_lines for function definition for Pythia 8 .cc file"""
997
998 initProc_lines = []
999
1000 initProc_lines.append("// Set external particle masses for this matrix element")
1001
1002 for part in matrix_element.get_external_wavefunctions():
1003 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
1004 return "\n".join(initProc_lines)
1005
1006
1017
1019 """Get sigmaKin_process for all subprocesses for MAtchbox .cc file"""
1020
1021 ret_lines = []
1022 if self.single_helicities:
1023 ret_lines.append(\
1024 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
1025 class_name)
1026 ret_lines.append("// Calculate wavefunctions for all processes")
1027 ret_lines.append(self.get_calculate_wavefunctions(\
1028 self.wavefunctions, self.amplitudes))
1029 ret_lines.append(self.get_jamp_lines(color_amplitudes[0]))
1030 ret_lines.append("}")
1031 else:
1032 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
1033 for i, me in enumerate(self.matrix_elements)])
1034 ret_lines.extend([self.get_matrix_single_process(i, me,
1035 color_amplitudes[i],
1036 class_name) \
1037 for i, me in enumerate(self.matrix_elements)])
1038 return "\n".join(ret_lines)
1039
1040
1042 """Return the color matrix definition lines for this matrix element. Split
1043 rows in chunks of size n."""
1044
1045 if not matrix_element.get('color_matrix'):
1046 return "\n".join(["static const double res[1][1] = {-1.};"])
1047
1048
1049 color_denominators = matrix_element.get('color_matrix').\
1050 get_line_denominators()
1051 matrix_strings = []
1052 my_cs = color.ColorString()
1053
1054 for i_color in xrange(len(color_denominators)):
1055
1056 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color])
1057 t_str=repr(my_cs)
1058 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)")
1059
1060
1061 all_matches = t_match.findall(t_str)
1062 tmp_color = []
1063 for match in all_matches:
1064 ctype, arg = match[0], [m.strip() for m in match[1].split(',')]
1065 if ctype not in ['T', 'Tr']:
1066 raise self.ProcessExporterCPPError, 'Color Structure not handle by Matchbox'
1067 tmp_color.append(arg)
1068
1069 nb_index = sum(len(o) for o in tmp_color)
1070 max_len = nb_index + (nb_index//2) -1
1071
1072 curr_color = tmp_color[0]
1073 for tcolor in tmp_color[1:]:
1074 curr_color += ['0'] + tcolor
1075 curr_color += ['0'] * (max_len- len(curr_color))
1076
1077 matrix_strings.append('{%s}' % ','.join(curr_color))
1078
1079 matrix_string = 'static const double res[%s][%s] = {%s};' % \
1080 (len(color_denominators), max_len, ",".join(matrix_strings))
1081
1082 return matrix_string
1083
1089 """Class to take care of exporting a set of matrix elements to
1090 Pythia 8 format."""
1091
1092
1093 process_template_h = 'pythia8_process_h.inc'
1094 process_template_cc = 'pythia8_process_cc.inc'
1095 process_class_template = 'pythia8_process_class.inc'
1096 process_definition_template = 'pythia8_process_function_definitions.inc'
1097 process_wavefunction_template = 'pythia8_process_wavefunctions.inc'
1098 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc'
1099
1101 """Set process class name"""
1102
1103 if 'version' in opts:
1104 self.version = opts['version']
1105 del opts['version']
1106 else:
1107 self.version='8.2'
1108 super(ProcessExporterPythia8, self).__init__(*args, **opts)
1109
1110
1111 for me in self.matrix_elements:
1112 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]:
1113 nex,nin = me.get_nexternal_ninitial()
1114 raise InvalidCmd,\
1115 "Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \
1116 (nin,nex-nin)
1117
1118 self.process_class = self.process_name
1119
1120
1121
1123
1124 replace_dict = {}
1125
1126 replace_dict['model_name'] = self.model_name
1127 if self.version =="8.2":
1128 replace_dict['include_prefix'] = 'Pythia8/'
1129 else:
1130 replace_dict['include_prefix'] = ''
1131
1132 replace_dict['version'] = self.version
1133
1134 return replace_dict
1135
1136
1137
1139 """The complete Pythia 8 class definition for the process"""
1140
1141 replace_dict = self.get_default_converter()
1142
1143
1144
1145 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
1146 self.matrix_elements])
1147
1148 replace_dict['process_lines'] = process_lines
1149
1150
1151 replace_dict['nfinal'] = self.nfinal
1152
1153
1154 replace_dict['process_class_name'] = self.process_name
1155
1156
1157 process_definition = "%s (%s)" % (self.process_string,
1158 self.model_name)
1159 replace_dict['process_definition'] = process_definition
1160
1161 process = self.processes[0]
1162 replace_dict['process_code'] = 10000 + \
1163 100*process.get('id') + \
1164 self.process_number
1165
1166 replace_dict['inFlux'] = self.get_process_influx()
1167
1168 replace_dict['id_masses'] = self.get_id_masses(process)
1169 replace_dict['resonances'] = self.get_resonance_lines()
1170
1171 replace_dict['nexternal'] = self.nexternal
1172 replace_dict['nprocesses'] = self.nprocesses
1173
1174 if self.single_helicities:
1175 replace_dict['all_sigma_kin_definitions'] = \
1176 """// Calculate wavefunctions
1177 void calculate_wavefunctions(const int perm[], const int hel[]);
1178 static const int nwavefuncs = %d;
1179 std::complex<double> w[nwavefuncs][18];
1180 static const int namplitudes = %d;
1181 std::complex<double> amp[namplitudes];""" % \
1182 (len(self.wavefunctions),
1183 len(self.amplitudes.get_all_amplitudes()))
1184 replace_dict['all_matrix_definitions'] = \
1185 "\n".join(["double matrix_%s();" % \
1186 me.get('processes')[0].shell_string().\
1187 replace("0_", "") \
1188 for me in self.matrix_elements])
1189
1190 else:
1191 replace_dict['all_sigma_kin_definitions'] = \
1192 "\n".join(["void sigmaKin_%s();" % \
1193 me.get('processes')[0].shell_string().\
1194 replace("0_", "") \
1195 for me in self.matrix_elements])
1196 replace_dict['all_matrix_definitions'] = \
1197 "\n".join(["double matrix_%s(const int hel[]);" % \
1198 me.get('processes')[0].shell_string().\
1199 replace("0_", "") \
1200 for me in self.matrix_elements])
1201
1202
1203 file = read_template_file('pythia8_process_class.inc') % replace_dict
1204
1205 return file
1206
1208 """The complete Pythia 8 class definition for the process"""
1209
1210
1211 replace_dict = self.get_default_converter()
1212
1213
1214 replace_dict['process_lines'] = \
1215 "\n".join([self.get_process_info_lines(me) for \
1216 me in self.matrix_elements])
1217
1218
1219 replace_dict['process_class_name'] = self.process_name
1220
1221 color_amplitudes = [me.get_color_amplitudes() for me in \
1222 self.matrix_elements]
1223
1224 replace_dict['initProc_lines'] = \
1225 self.get_initProc_lines(color_amplitudes)
1226 replace_dict['reset_jamp_lines'] = \
1227 self.get_reset_jamp_lines(color_amplitudes)
1228 replace_dict['sigmaKin_lines'] = \
1229 self.get_sigmaKin_lines(color_amplitudes)
1230 replace_dict['sigmaHat_lines'] = \
1231 self.get_sigmaHat_lines()
1232
1233 replace_dict['setIdColAcol_lines'] = \
1234 self.get_setIdColAcol_lines(color_amplitudes)
1235
1236 replace_dict['weightDecay_lines'] = \
1237 self.get_weightDecay_lines()
1238
1239 replace_dict['all_sigmaKin'] = \
1240 self.get_all_sigmaKin_lines(color_amplitudes,
1241 self.process_name)
1242
1243 file = read_template_file('pythia8_process_function_definitions.inc') %\
1244 replace_dict
1245
1246 return file
1247
1249 """Return process file name for the process in matrix_element"""
1250
1251
1252
1253 beams = set([tuple(sorted([process.get('legs')[0].get('id'),
1254 process.get('legs')[1].get('id')])) \
1255 for process in self.processes])
1256
1257
1258 antiquarks = range(-1, -6, -1)
1259 quarks = range(1,6)
1260 antileptons = range(-11, -17, -1)
1261 leptons = range(11, 17, 1)
1262 allquarks = antiquarks + quarks
1263 antifermions = antiquarks + antileptons
1264 fermions = quarks + leptons
1265 allfermions = allquarks + antileptons + leptons
1266 downfermions = range(-2, -5, -2) + range(-1, -5, -2) + \
1267 range(-12, -17, -2) + range(-11, -17, -2)
1268 upfermions = range(1, 5, 2) + range(2, 5, 2) + \
1269 range(11, 17, 2) + range(12, 17, 2)
1270
1271
1272
1273 set_tuples = [(set([(21, 21)]), "gg"),
1274 (set(list(itertools.product(allquarks, [21]))), "qg"),
1275 (set(zip(antiquarks, quarks)), "qqbarSame"),
1276 (set(list(itertools.product(allquarks,
1277 allquarks))), "qq"),
1278 (set(zip(antifermions, fermions)),"ffbarSame"),
1279 (set(zip(downfermions, upfermions)),"ffbarChg"),
1280 (set(list(itertools.product(allfermions,
1281 allfermions))), "ff"),
1282 (set(list(itertools.product(allfermions, [22]))), "fgm"),
1283 (set([(21, 22)]), "ggm"),
1284 (set([(22, 22)]), "gmgm")]
1285
1286 for set_tuple in set_tuples:
1287 if beams.issubset(set_tuple[0]):
1288 return set_tuple[1]
1289
1290 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\
1291 repr(beams))
1292
1293 return
1294
1296 """Return the lines which define the ids for the final state particles,
1297 for the Pythia phase space"""
1298
1299 if self.nfinal == 1:
1300 return ""
1301
1302 mass_strings = []
1303 for i in range(2, len(process.get_legs_with_decays())):
1304 if self.model.get_particle(process.get_legs_with_decays()[i].get('id')).\
1305 get('mass') not in ['zero', 'ZERO']:
1306 mass_strings.append("int id%dMass() const {return %d;}" % \
1307 (i + 1, abs(process.get_legs_with_decays()[i].get('id'))))
1308
1309 return "\n".join(mass_strings)
1310
1312 """Return the lines which define the ids for intermediate resonances
1313 for the Pythia phase space"""
1314
1315 if self.nfinal == 1:
1316 return "virtual int resonanceA() const {return %d;}" % \
1317 abs(self.processes[0].get('legs')[2].get('id'))
1318
1319 res_strings = []
1320 res_letters = ['A', 'B']
1321
1322 sids, singleres, schannel = self.get_resonances()
1323
1324 for i, sid in enumerate(sids[:2]):
1325 res_strings.append("virtual int resonance%s() const {return %d;}"\
1326 % (res_letters[i], sid))
1327
1328 if schannel:
1329 res_strings.append("virtual bool isSChannel() const {return true;}")
1330
1331 if singleres != 0:
1332 res_strings.append("virtual int idSChannel() const {return %d;}" \
1333 % singleres)
1334
1335 return "\n".join(res_strings)
1336
1338 """Return the PIDs for any resonances in 2->2 and 2->3 processes."""
1339
1340 model = self.matrix_elements[0].get('processes')[0].get('model')
1341 new_pdg = model.get_first_non_pdg()
1342
1343 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], [])
1344 resonances = []
1345 no_t_channels = True
1346 final_s_channels = []
1347 for diagram in diagrams:
1348 schannels, tchannels = diagram.get('amplitudes')[0].\
1349 get_s_and_t_channels(self.ninitial, model,
1350 new_pdg)
1351 for schannel in schannels:
1352 sid = schannel.get('legs')[-1].get('id')
1353 part = self.model.get_particle(sid)
1354 if part:
1355 width = self.model.get_particle(sid).get('width')
1356 if width.lower() != 'zero':
1357
1358 resonances.append(abs(sid))
1359 else:
1360 sid = 0
1361 if len(tchannels) == 1 and schannel == schannels[-1]:
1362 final_s_channels.append(abs(sid))
1363
1364 if len(tchannels) > 1:
1365
1366 no_t_channels = False
1367
1368 resonance_set = set(resonances)
1369 final_s_set = set(final_s_channels)
1370
1371 singleres = 0
1372
1373 if len(final_s_channels) == len(diagrams) and len(final_s_set) == 1 \
1374 and final_s_channels[0] != 0:
1375 singleres = final_s_channels[0]
1376
1377 resonance_set = list(set([pid for pid in resonance_set]))
1378
1379
1380
1381 schannel = no_t_channels and \
1382 not any(['QCD' in d.calculate_orders() for d in diagrams])
1383
1384 return resonance_set, singleres, schannel
1385
1387 """Get initProc_lines for function definition for Pythia 8 .cc file"""
1388
1389 initProc_lines = []
1390
1391 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau")
1392
1393 if not self.model.get_particle(4) or \
1394 self.model.get_particle(4).get('mass').lower() == 'zero':
1395 cMassiveME = "0."
1396 else:
1397 cMassiveME = "particleDataPtr->m0(4)"
1398 initProc_lines.append("mcME = %s;" % cMassiveME)
1399 if not self.model.get_particle(5) or \
1400 self.model.get_particle(5).get('mass').lower() == 'zero':
1401 bMassiveME = "0."
1402 else:
1403 bMassiveME = "particleDataPtr->m0(5)"
1404 initProc_lines.append("mbME = %s;" % bMassiveME)
1405 if not self.model.get_particle(13) or \
1406 self.model.get_particle(13).get('mass').lower() == 'zero':
1407 muMassiveME = "0."
1408 else:
1409 muMassiveME = "particleDataPtr->m0(13)"
1410 initProc_lines.append("mmuME = %s;" % muMassiveME)
1411 if not self.model.get_particle(15) or \
1412 self.model.get_particle(15).get('mass').lower() == 'zero':
1413 tauMassiveME = "0."
1414 else:
1415 tauMassiveME = "particleDataPtr->m0(15)"
1416 initProc_lines.append("mtauME = %s;" % tauMassiveME)
1417
1418 for i, me in enumerate(self.matrix_elements):
1419 initProc_lines.append("jamp2[%d] = new double[%d];" % \
1420 (i, len(color_amplitudes[i])))
1421
1422 return "\n".join(initProc_lines)
1423
1425 """Generate lines to set final-state id and color info for process"""
1426
1427 res_lines = []
1428
1429
1430 beams = set([(process.get('legs')[0].get('id'),
1431 process.get('legs')[1].get('id')) \
1432 for process in self.processes])
1433
1434
1435 for ibeam, beam_parts in enumerate(beams):
1436 if ibeam == 0:
1437 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
1438 else:
1439 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
1440
1441 beam_processes = [(i, me) for (i, me) in \
1442 enumerate(self.matrix_elements) if beam_parts in \
1443 [(process.get('legs')[0].get('id'),
1444 process.get('legs')[1].get('id')) \
1445 for process in me.get('processes')]]
1446
1447 beam_mirror_processes = []
1448 if beam_parts[0] != beam_parts[1]:
1449 beam_mirror_processes = [(i, me) for (i, me) in \
1450 enumerate(self.matrix_elements) if beam_parts in \
1451 [(process.get('legs')[1].get('id'),
1452 process.get('legs')[0].get('id')) \
1453 for process in me.get('processes')]]
1454
1455 final_id_list = []
1456 final_mirror_id_list = []
1457 for (i, me) in beam_processes:
1458 final_id_list.extend([tuple([l.get('id') for l in \
1459 proc.get_legs_with_decays() if l.get('state')]) \
1460 for proc in me.get('processes') \
1461 if beam_parts == \
1462 (proc.get('legs')[0].get('id'),
1463 proc.get('legs')[1].get('id'))])
1464 for (i, me) in beam_mirror_processes:
1465 final_mirror_id_list.extend([tuple([l.get('id') for l in \
1466 proc.get_legs_with_decays() if l.get('state')]) \
1467 for proc in me.get_mirror_processes() \
1468 if beam_parts == \
1469 (proc.get('legs')[0].get('id'),
1470 proc.get('legs')[1].get('id'))])
1471 final_id_list = set(final_id_list)
1472 final_mirror_id_list = set(final_mirror_id_list)
1473
1474 if final_id_list and final_mirror_id_list or \
1475 not final_id_list and not final_mirror_id_list:
1476 raise self.ProcessExporterCPPError,\
1477 "Missing processes, or both process and mirror process"
1478
1479
1480 ncombs = len(final_id_list)+len(final_mirror_id_list)
1481
1482 res_lines.append("// Pick one of the flavor combinations %s" % \
1483 ", ".join([repr(ids) for ids in final_id_list]))
1484
1485 me_weight = []
1486 for final_ids in final_id_list:
1487 items = [(i, len([ p for p in me.get('processes') \
1488 if [l.get('id') for l in \
1489 p.get_legs_with_decays()] == \
1490 list(beam_parts) + list(final_ids)])) \
1491 for (i, me) in beam_processes]
1492 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\
1493 (i, l) in items if l > 0]).\
1494 replace('*1', ''))
1495 if any([l>1 for (i, l) in items]):
1496 raise self.ProcessExporterCPPError,\
1497 "More than one process with identical " + \
1498 "external particles is not supported"
1499
1500 for final_ids in final_mirror_id_list:
1501 items = [(i, len([ p for p in me.get_mirror_processes() \
1502 if [l.get('id') for l in p.get_legs_with_decays()] == \
1503 list(beam_parts) + list(final_ids)])) \
1504 for (i, me) in beam_mirror_processes]
1505 me_weight.append("+".join(["matrix_element[%i]*%i" % \
1506 (i+len(self.matrix_elements), l) for\
1507 (i, l) in items if l > 0]).\
1508 replace('*1', ''))
1509 if any([l>1 for (i, l) in items]):
1510 raise self.ProcessExporterCPPError,\
1511 "More than one process with identical " + \
1512 "external particles is not supported"
1513
1514 if final_id_list:
1515 res_lines.append("int flavors[%d][%d] = {%s};" % \
1516 (ncombs, self.nfinal,
1517 ",".join(["{" + ",".join([str(id) for id \
1518 in ids]) + "}" for ids \
1519 in final_id_list])))
1520 elif final_mirror_id_list:
1521 res_lines.append("int flavors[%d][%d] = {%s};" % \
1522 (ncombs, self.nfinal,
1523 ",".join(["{" + ",".join([str(id) for id \
1524 in ids]) + "}" for ids \
1525 in final_mirror_id_list])))
1526 res_lines.append("vector<double> probs;")
1527 res_lines.append("double sum = %s;" % "+".join(me_weight))
1528 for me in me_weight:
1529 res_lines.append("probs.push_back(%s/sum);" % me)
1530 res_lines.append("int choice = rndmPtr->pick(probs);")
1531 for i in range(self.nfinal):
1532 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i))
1533
1534 res_lines.append("}")
1535
1536 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \
1537 range(1, self.nexternal + 1)]))
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548 res_lines.append("// Pick color flow")
1549
1550 res_lines.append("int ncolor[%d] = {%s};" % \
1551 (len(color_amplitudes),
1552 ",".join([str(len(colamp)) for colamp in \
1553 color_amplitudes])))
1554
1555
1556 for ime, me in enumerate(self.matrix_elements):
1557
1558 res_lines.append("if((%s)){" % \
1559 ")||(".join(["&&".join(["id%d == %d" % \
1560 (i+1, l.get('id')) for (i, l) in \
1561 enumerate(p.get_legs_with_decays())])\
1562 for p in me.get('processes')]))
1563 if ime > 0:
1564 res_lines[-1] = "else " + res_lines[-1]
1565
1566 proc = me.get('processes')[0]
1567 if not me.get('color_basis'):
1568
1569 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1570 else:
1571
1572 repr_dict = {}
1573 legs = proc.get_legs_with_decays()
1574 for l in legs:
1575 repr_dict[l.get('number')] = \
1576 proc.get('model').get_particle(l.get('id')).get_color()
1577
1578 color_flow_list = \
1579 me.get('color_basis').color_flow_decomposition(\
1580 repr_dict, self.ninitial)
1581
1582 ncolor = len(me.get('color_basis'))
1583 res_lines.append("""vector<double> probs;
1584 double sum = %s;
1585 for(int i=0;i<ncolor[%i];i++)
1586 probs.push_back(jamp2[%i][i]/sum);
1587 int ic = rndmPtr->pick(probs);""" % \
1588 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1589 in range(ncolor)]), ime, ime))
1590
1591 color_flows = []
1592 for color_flow_dict in color_flow_list:
1593 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \
1594 for (l,i) in itertools.product(legs, [0,1])])
1595
1596
1597 res_lines.append("static int colors[%d][%d] = {%s};" % \
1598 (ncolor, 2 * self.nexternal,
1599 ",".join(["{" + ",".join([str(id) for id \
1600 in flows]) + "}" for flows \
1601 in color_flows])))
1602
1603 res_lines.append("setColAcol(%s);" % \
1604 ",".join(["colors[ic][%d]" % i for i in \
1605 range(2 * self.nexternal)]))
1606 res_lines.append('}')
1607
1608
1609 for ime, me in enumerate(self.matrix_elements):
1610 if not me.get('has_mirror_process'):
1611 continue
1612 res_lines.append("else if((%s)){" % \
1613 ")||(".join(["&&".join(["id%d == %d" % \
1614 (i+1, l.get('id')) for (i, l) in \
1615 enumerate(p.get_legs_with_decays())])\
1616 for p in me.get_mirror_processes()]))
1617
1618 proc = me.get('processes')[0]
1619 if not me.get('color_basis'):
1620
1621 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1622 else:
1623
1624 repr_dict = {}
1625 legs = proc.get_legs_with_decays()
1626 legs[0:2] = [legs[1],legs[0]]
1627 for l in legs:
1628 repr_dict[l.get('number')] = \
1629 proc.get('model').get_particle(l.get('id')).get_color()
1630
1631 color_flow_list = \
1632 me.get('color_basis').color_flow_decomposition(\
1633 repr_dict, self.ninitial)
1634
1635 ncolor = len(me.get('color_basis'))
1636 res_lines.append("""vector<double> probs;
1637 double sum = %s;
1638 for(int i=0;i<ncolor[%i];i++)
1639 probs.push_back(jamp2[%i][i]/sum);
1640 int ic = rndmPtr->pick(probs);""" % \
1641 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1642 in range(ncolor)]), ime, ime))
1643
1644 color_flows = []
1645 for color_flow_dict in color_flow_list:
1646 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \
1647 for (l,i) in itertools.product(legs, [0,1])])
1648
1649
1650 res_lines.append("static int colors[%d][%d] = {%s};" % \
1651 (ncolor, 2 * self.nexternal,
1652 ",".join(["{" + ",".join([str(id) for id \
1653 in flows]) + "}" for flows \
1654 in color_flows])))
1655
1656 res_lines.append("setColAcol(%s);" % \
1657 ",".join(["colors[ic][%d]" % i for i in \
1658 range(2 * self.nexternal)]))
1659 res_lines.append('}')
1660
1661 return "\n".join(res_lines)
1662
1663
1665 """Get weightDecay_lines for function definition for Pythia 8 .cc file"""
1666
1667 weightDecay_lines = "// Just use isotropic decay (default)\n"
1668 weightDecay_lines += "return 1.;"
1669
1670 return weightDecay_lines
1671
1672
1673
1674
1676 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
1677
1678 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h'))\
1679 and not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia8', 'Pythia.h')):
1680 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir)
1681
1682
1683 model_builder = UFOModelConverterPythia8(model, pythia_dir, replace_dict=self.get_default_converter())
1684 model_builder.cc_file_dir = "Processes_" + model_builder.model_name
1685 model_builder.include_dir = model_builder.cc_file_dir
1686
1687 model_builder.write_files()
1688
1689 model_builder.write_makefile()
1690
1691 model_builder.write_param_card()
1692 return model_builder.model_name, model_builder.cc_file_dir
1693
1699 """Open a template file and return the contents."""
1700 try:
1701 return open(os.path.join(_file_path, \
1702 'iolibs', 'template_files', 'pythia8',
1703 filename)).read()
1704 except:
1705 return open(os.path.join(_file_path, \
1706 'iolibs', 'template_files',
1707 filename)).read()
1708
1710 """Return info lines for MG5, suitable to place at beginning of
1711 Fortran files"""
1712
1713 info = misc.get_pkg_info()
1714 info_lines = ""
1715 if info and info.has_key('version') and info.has_key('date'):
1716 info_lines = "# MadGraph5_aMC@NLO v. %s, %s\n" % \
1717 (info['version'], info['date'])
1718 info_lines = info_lines + \
1719 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1720 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1721 else:
1722 info_lines = "# MadGraph5_aMC@NLO\n" + \
1723 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1724 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1725
1726 return info_lines
1727
1728 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1729 """Returns a nicely formatted string for the coefficients in JAMP lines"""
1730
1731 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power
1732
1733 if total_coeff == 1:
1734 if is_imaginary:
1735 return '+std::complex<double>(0,1)*'
1736 else:
1737 return '+'
1738 elif total_coeff == -1:
1739 if is_imaginary:
1740 return '-std::complex<double>(0,1)*'
1741 else:
1742 return '-'
1743
1744 res_str = '%+i.' % total_coeff.numerator
1745
1746 if total_coeff.denominator != 1:
1747
1748 res_str = res_str + '/%i.' % total_coeff.denominator
1749
1750 if is_imaginary:
1751 res_str = res_str + '*std::complex<double>(0,1)'
1752
1753 return res_str + '*'
1754
1755
1756
1757
1758
1759 -def convert_model_to_cpp(model, output_dir, wanted_lorentz = [],
1760 wanted_couplings = []):
1761 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
1762
1763
1764 model_builder = UFOModelConverterCPP(model,
1765 os.path.join(output_dir, 'src'),
1766 wanted_lorentz,
1767 wanted_couplings)
1768 model_builder.write_files()
1769
1775 """ A converter of the UFO-MG5 Model to the C++ format """
1776
1777
1778 output_name = 'C++ Standalone'
1779 namespace = 'MG5'
1780
1781
1782 type_dict = {"real": "double",
1783 "complex": "std::complex<double>"}
1784
1785
1786 compiler_option_re = re.compile('^#\w')
1787 namespace_re = re.compile('^using namespace')
1788
1789 slha_to_depend = {('SMINPUTS', (3,)): ('aS',),
1790 ('SMINPUTS', (1,)): ('aEM',)}
1791
1792
1793 include_dir = '.'
1794 cc_file_dir = '.'
1795 param_template_h = 'cpp_model_parameters_h.inc'
1796 param_template_cc = 'cpp_model_parameters_cc.inc'
1797 aloha_template_h = 'cpp_hel_amps_h.inc'
1798 aloha_template_cc = 'cpp_hel_amps_cc.inc'
1799
1800 copy_include_files = []
1801 copy_cc_files = []
1802
1803 - def __init__(self, model, output_path, wanted_lorentz = [],
1804 wanted_couplings = [], replace_dict={}):
1805 """ initialization of the objects """
1806
1807 self.model = model
1808 self.model_name = ProcessExporterCPP.get_model_name(model['name'])
1809
1810 self.dir_path = output_path
1811 self.default_replace_dict = dict(replace_dict)
1812
1813 self.wanted_lorentz = wanted_lorentz
1814
1815
1816
1817
1818 self.coups_dep = {}
1819 self.coups_indep = []
1820 self.params_dep = []
1821 self.params_indep = []
1822 self.p_to_cpp = parsers.UFOExpressionParserCPP()
1823
1824
1825 self.prepare_parameters()
1826 self.prepare_couplings(wanted_couplings)
1827
1836
1837
1838
1840 """Extract the parameters from the model, and store them in
1841 the two lists params_indep and params_dep"""
1842
1843
1844 keys = self.model['parameters'].keys()
1845 keys.sort(key=len)
1846 params_ext = []
1847 for key in keys:
1848 if key == ('external',):
1849 params_ext += [p for p in self.model['parameters'][key] if p.name]
1850 elif 'aS' in key:
1851 for p in self.model['parameters'][key]:
1852 self.params_dep.append(base_objects.ModelVariable(p.name,
1853 p.name + " = " + \
1854 self.p_to_cpp.parse(p.expr) + ";",
1855 p.type,
1856 p.depend))
1857 else:
1858 for p in self.model['parameters'][key]:
1859 if p.name == 'ZERO':
1860 continue
1861 self.params_indep.append(base_objects.ModelVariable(p.name,
1862 p.name + " = " + \
1863 self.p_to_cpp.parse(p.expr) + ";",
1864 p.type,
1865 p.depend))
1866
1867
1868 while params_ext:
1869 param = params_ext.pop(0)
1870
1871 expression = ""
1872 assert param.value.imag == 0
1873 if len(param.lhacode) == 1:
1874 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \
1875 (param.name, param.lhablock.lower(),
1876 param.lhacode[0], param.value.real)
1877 elif len(param.lhacode) == 2:
1878 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \
1879 (param.lhacode[0], param.lhacode[1])
1880 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \
1881 % (param.name, param.lhablock.lower(), param.value.real)
1882 else:
1883 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
1884 self.params_indep.insert(0,
1885 base_objects.ModelVariable(param.name,
1886 expression,
1887 'real'))
1888
1890 """Extract the couplings from the model, and store them in
1891 the two lists coups_indep and coups_dep"""
1892
1893
1894 keys = self.model['couplings'].keys()
1895 keys.sort(key=len)
1896 for key, coup_list in self.model['couplings'].items():
1897 if "aS" in key:
1898 for c in coup_list:
1899 if not wanted_couplings or c.name in wanted_couplings:
1900 self.coups_dep[c.name] = base_objects.ModelVariable(\
1901 c.name,
1902 c.expr,
1903 c.type,
1904 c.depend)
1905 else:
1906 for c in coup_list:
1907 if not wanted_couplings or c.name in wanted_couplings:
1908 self.coups_indep.append(base_objects.ModelVariable(\
1909 c.name,
1910 c.expr,
1911 c.type,
1912 c.depend))
1913
1914
1915 for coup in self.coups_dep.values() + self.coups_indep:
1916 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
1917
1918
1919
1921 """Generate the parameters_model.h and parameters_model.cc
1922 files, which have the parameters and couplings for the model."""
1923
1924 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1925 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1926 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1927 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1928
1929 parameter_h_file = os.path.join(self.dir_path, self.include_dir,
1930 'Parameters_%s.h' % self.model_name)
1931 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1932 'Parameters_%s.cc' % self.model_name)
1933
1934 file_h, file_cc = self.generate_parameters_class_files()
1935
1936
1937 writers.CPPWriter(parameter_h_file).writelines(file_h)
1938 writers.CPPWriter(parameter_cc_file).writelines(file_cc)
1939
1940
1941 for copy_file in self.copy_include_files:
1942 shutil.copy(os.path.join(_file_path, 'iolibs',
1943 'template_files',copy_file),
1944 os.path.join(self.dir_path, self.include_dir))
1945
1946 for copy_file in self.copy_cc_files:
1947 shutil.copy(os.path.join(_file_path, 'iolibs',
1948 'template_files',copy_file),
1949 os.path.join(self.dir_path, self.cc_file_dir))
1950
1951 logger.info("Created files %s and %s in directory" \
1952 % (os.path.split(parameter_h_file)[-1],
1953 os.path.split(parameter_cc_file)[-1]))
1954 logger.info("%s and %s" % \
1955 (os.path.split(parameter_h_file)[0],
1956 os.path.split(parameter_cc_file)[0]))
1957
1959 """Create the content of the Parameters_model.h and .cc files"""
1960
1961 replace_dict = self.default_replace_dict
1962
1963 replace_dict['info_lines'] = get_mg5_info_lines()
1964 replace_dict['model_name'] = self.model_name
1965
1966 replace_dict['independent_parameters'] = \
1967 "// Model parameters independent of aS\n" + \
1968 self.write_parameters(self.params_indep)
1969 replace_dict['independent_couplings'] = \
1970 "// Model parameters dependent on aS\n" + \
1971 self.write_parameters(self.params_dep)
1972 replace_dict['dependent_parameters'] = \
1973 "// Model couplings independent of aS\n" + \
1974 self.write_parameters(self.coups_indep)
1975 replace_dict['dependent_couplings'] = \
1976 "// Model couplings dependent on aS\n" + \
1977 self.write_parameters(self.coups_dep.values())
1978
1979 replace_dict['set_independent_parameters'] = \
1980 self.write_set_parameters(self.params_indep)
1981 replace_dict['set_independent_couplings'] = \
1982 self.write_set_parameters(self.coups_indep)
1983 replace_dict['set_dependent_parameters'] = \
1984 self.write_set_parameters(self.params_dep)
1985 replace_dict['set_dependent_couplings'] = \
1986 self.write_set_parameters(self.coups_dep.values())
1987
1988 replace_dict['print_independent_parameters'] = \
1989 self.write_print_parameters(self.params_indep)
1990 replace_dict['print_independent_couplings'] = \
1991 self.write_print_parameters(self.coups_indep)
1992 replace_dict['print_dependent_parameters'] = \
1993 self.write_print_parameters(self.params_dep)
1994 replace_dict['print_dependent_couplings'] = \
1995 self.write_print_parameters(self.coups_dep.values())
1996
1997 if 'include_prefix' not in replace_dict:
1998 replace_dict['include_prefix'] = ''
1999
2000
2001 file_h = read_template_file(self.param_template_h) % \
2002 replace_dict
2003 file_cc = read_template_file(self.param_template_cc) % \
2004 replace_dict
2005
2006 return file_h, file_cc
2007
2009 """Write out the definitions of parameters"""
2010
2011
2012 type_param_dict = {}
2013
2014 for param in params:
2015 type_param_dict[param.type] = \
2016 type_param_dict.setdefault(param.type, []) + [param.name]
2017
2018
2019
2020 res_strings = []
2021 for key in type_param_dict:
2022 res_strings.append("%s %s;" % (self.type_dict[key],
2023 ",".join(type_param_dict[key])))
2024
2025 return "\n".join(res_strings)
2026
2028 """Write out the lines of independent parameters"""
2029
2030
2031
2032 res_strings = []
2033 for param in params:
2034 res_strings.append("%s" % param.expr)
2035
2036
2037
2038 for particle in self.model.get('particles'):
2039 if particle.is_fermion() and particle.get('self_antipart') and \
2040 particle.get('width').lower() != 'zero':
2041 res_strings.append("if (%s < 0)" % particle.get('mass'))
2042 res_strings.append("%(width)s = -abs(%(width)s);" % \
2043 {"width": particle.get('width')})
2044
2045 return "\n".join(res_strings)
2046
2048 """Write out the lines of independent parameters"""
2049
2050
2051
2052 res_strings = []
2053 for param in params:
2054 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name))
2055
2056 return "\n".join(res_strings)
2057
2058
2059
2061 """Generate the hel_amps_model.h and hel_amps_model.cc files, which
2062 have the complete set of generalized Helas routines for the model"""
2063
2064 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
2065 os.makedirs(os.path.join(self.dir_path, self.include_dir))
2066 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
2067 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
2068
2069 model_h_file = os.path.join(self.dir_path, self.include_dir,
2070 'HelAmps_%s.h' % self.model_name)
2071 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
2072 'HelAmps_%s.cc' % self.model_name)
2073
2074 replace_dict = {}
2075
2076 replace_dict['output_name'] = self.output_name
2077 replace_dict['info_lines'] = get_mg5_info_lines()
2078 replace_dict['namespace'] = self.namespace
2079 replace_dict['model_name'] = self.model_name
2080
2081
2082
2083 template_h_files = self.read_aloha_template_files(ext = 'h')
2084 template_cc_files = self.read_aloha_template_files(ext = 'cc')
2085
2086 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name'))
2087 aloha_model.add_Lorentz_object(self.model.get('lorentz'))
2088
2089 if self.wanted_lorentz:
2090 aloha_model.compute_subset(self.wanted_lorentz)
2091 else:
2092 aloha_model.compute_all(save=False, custom_propa=True)
2093
2094 for abstracthelas in dict(aloha_model).values():
2095 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP',
2096 mode='no_include')
2097
2098 template_h_files.append(h_rout)
2099 template_cc_files.append(cc_rout)
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109 replace_dict['function_declarations'] = '\n'.join(template_h_files)
2110 replace_dict['function_definitions'] = '\n'.join(template_cc_files)
2111
2112 file_h = read_template_file(self.aloha_template_h) % replace_dict
2113 file_cc = read_template_file(self.aloha_template_cc) % replace_dict
2114
2115
2116 writers.CPPWriter(model_h_file).writelines(file_h)
2117 writers.CPPWriter(model_cc_file).writelines(file_cc)
2118
2119 logger.info("Created files %s and %s in directory" \
2120 % (os.path.split(model_h_file)[-1],
2121 os.path.split(model_cc_file)[-1]))
2122 logger.info("%s and %s" % \
2123 (os.path.split(model_h_file)[0],
2124 os.path.split(model_cc_file)[0]))
2125
2126
2128 """Read all ALOHA template files with extension ext, strip them of
2129 compiler options and namespace options, and return in a list"""
2130
2131 template_files = []
2132 for filename in misc.glob('*.%s' % ext, pjoin(MG5DIR, 'aloha','template_files')):
2133 file = open(filename, 'r')
2134 template_file_string = ""
2135 while file:
2136 line = file.readline()
2137 if len(line) == 0: break
2138 line = self.clean_line(line)
2139 if not line:
2140 continue
2141 template_file_string += line.strip() + '\n'
2142 template_files.append(template_file_string)
2143
2144 return template_files
2145
2146
2147
2148
2149
2150
2151
2152
2153
2154
2155
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2169 """Strip a line of compiler options and namespace options."""
2170
2171 if self.compiler_option_re.match(line) or self.namespace_re.match(line):
2172 return ""
2173
2174 return line
2175
2176
2177
2178
2179 -def generate_example_file_pythia8(path,
2180 model_path,
2181 process_names,
2182 exporter,
2183 main_file_name = "",
2184 example_dir = "examples",
2185 version="8.2"):
2186 """Generate the main_model_name.cc file and Makefile in the examples dir"""
2187
2188 filepath = os.path.join(path, example_dir)
2189 if not os.path.isdir(filepath):
2190 os.makedirs(filepath)
2191
2192 replace_dict = {}
2193
2194
2195 info_lines = get_mg5_info_lines()
2196 replace_dict['info_lines'] = info_lines
2197
2198
2199 replace_dict['model_name'] = exporter.model_name
2200
2201
2202 replace_dict['include_lines'] = \
2203 "\n".join(["#include \"%s.h\"" % proc_name \
2204 for proc_name in process_names])
2205
2206
2207 replace_dict['sigma_pointer_lines'] = \
2208 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \
2209 for proc_name in process_names])
2210
2211
2212 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path,
2213 "param_card_%s.dat" % \
2214 exporter.model_name)
2215
2216
2217 if version =="8.2":
2218 template_path = 'pythia8.2_main_example_cc.inc'
2219 makefile_path = 'pythia8.2_main_makefile.inc'
2220 replace_dict['include_prefix'] = 'Pythia8/'
2221 else:
2222 template_path = 'pythia8_main_example_cc.inc'
2223 makefile_path = 'pythia8_main_makefile.inc'
2224 replace_dict['include_prefix'] = ''
2225
2226
2227 file = read_template_file(template_path) % \
2228 replace_dict
2229
2230 if not main_file_name:
2231 num = 1
2232 while os.path.exists(os.path.join(filepath,
2233 'main_%s_%i.cc' % (exporter.model_name, num))) or \
2234 os.path.exists(os.path.join(filepath,
2235 'main_%s_%i' % (exporter.model_name, num))):
2236 num += 1
2237 main_file_name = str(num)
2238
2239 main_file = 'main_%s_%s' % (exporter.model_name,
2240 main_file_name)
2241
2242 main_filename = os.path.join(filepath, main_file + '.cc')
2243
2244
2245 writers.CPPWriter(main_filename).writelines(file)
2246
2247 replace_dict = {}
2248
2249
2250 replace_dict['info_lines'] = get_mg5_info_lines()
2251
2252 replace_dict['main_file'] = main_file
2253
2254 replace_dict['process_dir'] = model_path
2255
2256 replace_dict['include_dir'] = exporter.include_dir
2257
2258
2259 file = read_template_file(makefile_path) % replace_dict
2260
2261 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \
2262 (exporter.model_name, main_file_name))
2263
2264
2265 open(make_filename, 'w').write(file)
2266
2267 logger.info("Created files %s and %s in directory %s" \
2268 % (os.path.split(main_filename)[-1],
2269 os.path.split(make_filename)[-1],
2270 os.path.split(make_filename)[0]))
2271 return main_file, make_filename
2272
2282 """ A converter of the UFO-MG5 Model to the Pythia 8 format """
2283
2284
2285 output_name = 'Pythia 8'
2286 namespace = 'Pythia8'
2287
2288
2289 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(pow(pd->m0(23),2))',
2290 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(pow(pd->m0(23),2))*pow(pd->m0(23),2)/(sqrt(2.)*pow(pd->m0(24),2)*(pow(pd->m0(23),2)-pow(pd->m0(24),2)))',
2291 ('SMINPUTS', (3,)): 'alpS',
2292 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)',
2293 }
2294
2295
2296 param_template_h = 'pythia8_model_parameters_h.inc'
2297 param_template_cc = 'pythia8_model_parameters_cc.inc'
2298
2300 """Extract the model parameters from Pythia 8, and store them in
2301 the two lists params_indep and params_dep"""
2302
2303
2304 keys = self.model['parameters'].keys()
2305 keys.sort(key=len)
2306 params_ext = []
2307 for key in keys:
2308 if key == ('external',):
2309 params_ext += [p for p in self.model['parameters'][key] if p.name]
2310 elif 'aS' in key:
2311 for p in self.model['parameters'][key]:
2312 self.params_dep.append(base_objects.ModelVariable(p.name,
2313 p.name + " = " + \
2314 self.p_to_cpp.parse(p.expr) + ';',
2315 p.type,
2316 p.depend))
2317 else:
2318 for p in self.model['parameters'][key]:
2319 self.params_indep.append(base_objects.ModelVariable(p.name,
2320 p.name + " = " + \
2321 self.p_to_cpp.parse(p.expr) + ';',
2322 p.type,
2323 p.depend))
2324
2325
2326
2327
2328 while params_ext:
2329 param = params_ext.pop(0)
2330 key = (param.lhablock, tuple(param.lhacode))
2331 if 'aS' in self.slha_to_depend.setdefault(key, ()):
2332
2333 self.params_dep.insert(0,
2334 base_objects.ModelVariable(param.name,
2335 param.name + ' = ' + \
2336 self.slha_to_expr[key] + ';',
2337 'real'))
2338 else:
2339 try:
2340
2341 self.params_indep.insert(0,
2342 base_objects.ModelVariable(param.name,
2343 param.name + ' = ' + \
2344 self.slha_to_expr[key] + ';',
2345 'real'))
2346 except Exception:
2347
2348
2349 if param.lhablock == 'YUKAWA':
2350 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \
2351 % param.lhacode[0]
2352 if param.lhablock == 'MASS':
2353 self.slha_to_expr[key] = 'pd->m0(%i)' \
2354 % param.lhacode[0]
2355 if param.lhablock == 'DECAY':
2356 self.slha_to_expr[key] = \
2357 'pd->mWidth(%i)' % param.lhacode[0]
2358 if key in self.slha_to_expr:
2359 self.params_indep.insert(0,\
2360 base_objects.ModelVariable(param.name,
2361 param.name + "=" + self.slha_to_expr[key] \
2362 + ';',
2363 'real'))
2364 else:
2365
2366 if len(param.lhacode) == 1:
2367 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \
2368 (param.lhablock.lower(),
2369 param.lhacode[0],
2370 param.name) + \
2371 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2372 + "%s = %e;}") % (param.name, param.value.real,
2373 param.name, param.value.real)
2374 elif len(param.lhacode) == 2:
2375 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \
2376 (param.lhablock.lower(),
2377 param.lhacode[0],
2378 param.lhacode[1],
2379 param.name) + \
2380 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2381 + "%s = %e;}") % (param.name, param.value.real,
2382 param.name, param.value.real)
2383 elif len(param.lhacode) == 3:
2384 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \
2385 (param.lhablock.lower(),
2386 param.lhacode[0],
2387 param.lhacode[1],
2388 param.lhacode[2],
2389 param.name) + \
2390 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2391 + "%s = %e;}") % (param.name, param.value.real,
2392 param.name, param.value.real)
2393 else:
2394 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
2395 self.params_indep.insert(0,
2396 base_objects.ModelVariable(param.name,
2397 expression,
2398 'real'))
2399
2401 """Generate the Makefile, which creates library files."""
2402
2403 makefilename = os.path.join(self.dir_path, self.cc_file_dir,
2404 'Makefile')
2405
2406 replace_dict = {}
2407
2408 replace_dict['info_lines'] = get_mg5_info_lines()
2409 replace_dict['model'] = self.model_name
2410
2411 if self.default_replace_dict['version'] == "8.2":
2412 path = 'pythia8.2_makefile.inc'
2413 else:
2414 path = 'pythia8_makefile.inc'
2415 makefile = read_template_file(path) % replace_dict
2416
2417
2418 open(makefilename, 'w').write(makefile)
2419
2420 logger.info("Created %s in directory %s" \
2421 % (os.path.split(makefilename)[-1],
2422 os.path.split(makefilename)[0]))
2423
2425 """Generate the param_card for the model."""
2426
2427 paramcardname = os.path.join(self.dir_path, self.cc_file_dir,
2428 'param_card_%s.dat' % self.model_name)
2429
2430 open(paramcardname, 'w').write(\
2431 self.model.write_param_card())
2432
2433 logger.info("Created %s in directory %s" \
2434 % (os.path.split(paramcardname)[-1],
2435 os.path.split(paramcardname)[0]))
2436