1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16 """Methods and classes to export models and matrix elements to Pythia 8
17 and C++ Standalone format."""
18
19 import fractions
20 import glob
21 import itertools
22 import logging
23 from math import fmod
24 import os
25 import re
26 import shutil
27 import subprocess
28
29 import madgraph.core.base_objects as base_objects
30 import madgraph.core.color_algebra as color
31 import madgraph.core.helas_objects as helas_objects
32 import madgraph.iolibs.drawing_eps as draw
33 import madgraph.iolibs.files as files
34 import madgraph.iolibs.helas_call_writers as helas_call_writers
35 import madgraph.iolibs.file_writers as writers
36 import madgraph.iolibs.template_files as template_files
37 import madgraph.iolibs.ufo_expression_parsers as parsers
38 from madgraph import MadGraph5Error, InvalidCmd, MG5DIR
39 from madgraph.iolibs.files import cp, ln, mv
40
41 import madgraph.various.misc as misc
42
43 import aloha.create_aloha as create_aloha
44 import aloha.aloha_writers as aloha_writers
45
46 _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/'
47 logger = logging.getLogger('madgraph.export_pythia8')
56 """Prepare export_dir as standalone_cpp directory, including:
57 src (for RAMBO, model and ALOHA files + makefile)
58 lib (with compiled libraries from src)
59 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories)
60 """
61
62 cwd = os.getcwd()
63
64 try:
65 os.mkdir(dirpath)
66 except os.error as error:
67 logger.warning(error.strerror + " " + dirpath)
68
69 try:
70 os.chdir(dirpath)
71 except os.error:
72 logger.error('Could not cd to directory %s' % dirpath)
73 return 0
74
75 logger.info('Creating subdirectories in directory %s' % dirpath)
76
77 try:
78 os.mkdir('src')
79 except os.error as error:
80 logger.warning(error.strerror + " " + dirpath)
81
82 try:
83 os.mkdir('lib')
84 except os.error as error:
85 logger.warning(error.strerror + " " + dirpath)
86
87 try:
88 os.mkdir('Cards')
89 except os.error as error:
90 logger.warning(error.strerror + " " + dirpath)
91
92 try:
93 os.mkdir('SubProcesses')
94 except os.error as error:
95 logger.warning(error.strerror + " " + dirpath)
96
97
98 open(os.path.join("Cards","param_card.dat"), 'w').write(\
99 model.write_param_card())
100
101 src_files = ['rambo.h', 'rambo.cc', 'read_slha.h', 'read_slha.cc']
102
103
104 for f in src_files:
105 cp(_file_path + 'iolibs/template_files/' + f, 'src')
106
107
108 makefile = read_template_file('Makefile_sa_cpp_src') % \
109 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
110 open(os.path.join('src', 'Makefile'), 'w').write(makefile)
111
112
113 cp(_file_path + 'iolibs/template_files/check_sa.cpp', 'SubProcesses')
114
115
116 makefile = read_template_file('Makefile_sa_cpp_sp') % \
117 {'model': ProcessExporterCPP.get_model_name(model.get('name'))}
118 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
119
120
121 os.chdir(cwd)
122
129
130 """Generate the Pxxxxx directory for a subprocess in C++ standalone,
131 including the necessary .h and .cc files"""
132
133 cwd = os.getcwd()
134
135
136 process_exporter_cpp = ProcessExporterCPP(matrix_element,
137 cpp_helas_call_writer)
138
139
140 dirpath = os.path.join(path, \
141 "P%d_%s" % (process_exporter_cpp.process_number,
142 process_exporter_cpp.process_name))
143 try:
144 os.mkdir(dirpath)
145 except os.error as error:
146 logger.warning(error.strerror + " " + dirpath)
147
148 try:
149 os.chdir(dirpath)
150 except os.error:
151 logger.error('Could not cd to directory %s' % dirpath)
152 return 0
153
154 logger.info('Creating files in directory %s' % dirpath)
155
156 process_exporter_cpp.path = dirpath
157
158 process_exporter_cpp.generate_process_files()
159
160 linkfiles = ['check_sa.cpp', 'Makefile']
161
162
163 for file in linkfiles:
164 ln('../%s' % file)
165
166
167 os.chdir(cwd)
168
169 return
170
172 """Make the model library in a C++ standalone directory"""
173
174 source_dir = os.path.join(dir_path, "src")
175
176 logger.info("Running make for src")
177 misc.compile(cwd=source_dir)
178
183 """Class to take care of exporting a set of matrix elements to
184 C++ format."""
185
186
187 process_dir = '.'
188 include_dir = '.'
189 process_template_h = 'cpp_process_h.inc'
190 process_template_cc = 'cpp_process_cc.inc'
191 process_class_template = 'cpp_process_class.inc'
192 process_definition_template = 'cpp_process_function_definitions.inc'
193 process_wavefunction_template = 'cpp_process_wavefunctions.inc'
194 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc'
195
198
199 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "",
200 process_number = 0, path = os.getcwd()):
201 """Initiate with matrix elements, helas call writer, process
202 string, path. Generate the process .h and .cc files."""
203
204 if isinstance(matrix_elements, helas_objects.HelasMultiProcess):
205 self.matrix_elements = matrix_elements.get('matrix_elements')
206 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement):
207 self.matrix_elements = \
208 helas_objects.HelasMatrixElementList([matrix_elements])
209 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList):
210 self.matrix_elements = matrix_elements
211 else:
212 raise base_objects.PhysicsObject.PhysicsObjectError,\
213 "Wrong object type for matrix_elements"
214
215 if not self.matrix_elements:
216 raise MadGraph5Error("No matrix elements to export")
217
218 self.model = self.matrix_elements[0].get('processes')[0].get('model')
219 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name'))
220
221 self.processes = sum([me.get('processes') for \
222 me in self.matrix_elements], [])
223 self.processes.extend(sum([me.get_mirror_processes() for \
224 me in self.matrix_elements], []))
225
226 self.nprocesses = len(self.matrix_elements)
227 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
228 self.nprocesses = 2*len(self.matrix_elements)
229
230 if process_string:
231 self.process_string = process_string
232 else:
233 self.process_string = self.processes[0].base_string()
234
235 if process_number:
236 self.process_number = process_number
237 else:
238 self.process_number = self.processes[0].get('id')
239
240 self.process_name = self.get_process_name()
241 self.process_class = "CPPProcess"
242
243 self.path = path
244 self.helas_call_writer = cpp_helas_call_writer
245
246 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter):
247 raise self.ProcessExporterCPPError, \
248 "helas_call_writer not CPPUFOHelasCallWriter"
249
250 self.nexternal, self.ninitial = \
251 self.matrix_elements[0].get_nexternal_ninitial()
252 self.nfinal = self.nexternal - self.ninitial
253
254
255
256
257 self.single_helicities = True
258
259 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0])
260
261 for me in self.matrix_elements[1:]:
262 if self.get_helicity_matrix(me) != hel_matrix:
263 self.single_helicities = False
264
265 if self.single_helicities:
266
267
268
269
270 self.wavefunctions = []
271 wf_number = 0
272
273 for me in self.matrix_elements:
274 for iwf, wf in enumerate(me.get_all_wavefunctions()):
275 try:
276 old_wf = \
277 self.wavefunctions[self.wavefunctions.index(wf)]
278 wf.set('number', old_wf.get('number'))
279 except ValueError:
280 wf_number += 1
281 wf.set('number', wf_number)
282 self.wavefunctions.append(wf)
283
284
285 self.amplitudes = helas_objects.HelasAmplitudeList()
286 amp_number = 0
287 for me in self.matrix_elements:
288 for iamp, amp in enumerate(me.get_all_amplitudes()):
289 try:
290 old_amp = \
291 self.amplitudes[self.amplitudes.index(amp)]
292 amp.set('number', old_amp.get('number'))
293 except ValueError:
294 amp_number += 1
295 amp.set('number', amp_number)
296 self.amplitudes.append(amp)
297 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes})
298 self.amplitudes = helas_objects.HelasMatrixElement({\
299 'diagrams': helas_objects.HelasDiagramList([diagram])})
300
301
302
304 """Generate the .h and .cc files needed for C++, for the
305 processes described by multi_matrix_element"""
306
307
308 if not os.path.isdir(os.path.join(self.path, self.include_dir)):
309 os.makedirs(os.path.join(self.path, self.include_dir))
310 filename = os.path.join(self.path, self.include_dir,
311 '%s.h' % self.process_class)
312 self.write_process_h_file(writers.CPPWriter(filename))
313
314 if not os.path.isdir(os.path.join(self.path, self.process_dir)):
315 os.makedirs(os.path.join(self.path, self.process_dir))
316 filename = os.path.join(self.path, self.process_dir,
317 '%s.cc' % self.process_class)
318 self.write_process_cc_file(writers.CPPWriter(filename))
319
320 logger.info('Created files %(process)s.h and %(process)s.cc in' % \
321 {'process': self.process_class} + \
322 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
323
324
325
326
327
328
330 """Write the class definition (.h) file for the process"""
331
332 if not isinstance(writer, writers.CPPWriter):
333 raise writers.CPPWriter.CPPWriterError(\
334 "writer not CPPWriter")
335
336 replace_dict = {}
337
338
339 info_lines = get_mg5_info_lines()
340 replace_dict['info_lines'] = info_lines
341
342
343 replace_dict['model_name'] = \
344 self.model_name
345
346
347 replace_dict['process_file_name'] = self.process_name
348
349
350 process_class_definitions = self.get_process_class_definitions()
351 replace_dict['process_class_definitions'] = process_class_definitions
352
353 file = read_template_file(self.process_template_h) % replace_dict
354
355
356 writer.writelines(file)
357
358
359
360
362 """Write the class member definition (.cc) file for the process
363 described by matrix_element"""
364
365 if not isinstance(writer, writers.CPPWriter):
366 raise writers.CPPWriter.CPPWriterError(\
367 "writer not CPPWriter")
368
369 replace_dict = {}
370
371
372 info_lines = get_mg5_info_lines()
373 replace_dict['info_lines'] = info_lines
374
375
376 replace_dict['process_file_name'] = self.process_name
377
378
379 replace_dict['model_name'] = self.model_name
380
381
382
383 process_function_definitions = \
384 self.get_process_function_definitions()
385 replace_dict['process_function_definitions'] = \
386 process_function_definitions
387
388 file = read_template_file(self.process_template_cc) % replace_dict
389
390
391 writer.writelines(file)
392
393
394
395
397 """The complete class definition for the process"""
398
399 replace_dict = {}
400
401
402 replace_dict['model_name'] = self.model_name
403
404
405 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
406 self.matrix_elements])
407
408 replace_dict['process_lines'] = process_lines
409
410
411 replace_dict['nfinal'] = self.nfinal
412
413
414 replace_dict['ninitial'] = self.ninitial
415
416
417 replace_dict['process_class_name'] = self.process_name
418
419
420 process_definition = "%s (%s)" % (self.process_string,
421 self.model_name)
422 replace_dict['process_definition'] = process_definition
423
424 process = self.processes[0]
425
426 replace_dict['process_code'] = self.process_number
427 replace_dict['nexternal'] = self.nexternal
428 replace_dict['nprocesses'] = self.nprocesses
429
430 if self.single_helicities:
431 replace_dict['all_sigma_kin_definitions'] = \
432 """// Calculate wavefunctions
433 void calculate_wavefunctions(const int perm[], const int hel[]);
434 static const int nwavefuncs = %d;
435 std::complex<double> w[nwavefuncs][18];
436 static const int namplitudes = %d;
437 std::complex<double> amp[namplitudes];""" % \
438 (len(self.wavefunctions),
439 len(self.amplitudes.get_all_amplitudes()))
440 replace_dict['all_matrix_definitions'] = \
441 "\n".join(["double matrix_%s();" % \
442 me.get('processes')[0].shell_string().\
443 replace("0_", "") \
444 for me in self.matrix_elements])
445
446 else:
447 replace_dict['all_sigma_kin_definitions'] = \
448 "\n".join(["void sigmaKin_%s();" % \
449 me.get('processes')[0].shell_string().\
450 replace("0_", "") \
451 for me in self.matrix_elements])
452 replace_dict['all_matrix_definitions'] = \
453 "\n".join(["double matrix_%s(const int hel[]);" % \
454 me.get('processes')[0].shell_string().\
455 replace("0_", "") \
456 for me in self.matrix_elements])
457
458
459 file = read_template_file(self.process_class_template) % replace_dict
460
461 return file
462
464 """The complete Pythia 8 class definition for the process"""
465
466 replace_dict = {}
467
468
469 replace_dict['model_name'] = self.model_name
470
471
472 replace_dict['process_lines'] = \
473 "\n".join([self.get_process_info_lines(me) for \
474 me in self.matrix_elements])
475
476
477 replace_dict['process_class_name'] = self.process_name
478
479 color_amplitudes = [me.get_color_amplitudes() for me in \
480 self.matrix_elements]
481
482 replace_dict['initProc_lines'] = \
483 self.get_initProc_lines(self.matrix_elements[0],
484 color_amplitudes)
485 replace_dict['reset_jamp_lines'] = \
486 self.get_reset_jamp_lines(color_amplitudes)
487 replace_dict['sigmaKin_lines'] = \
488 self.get_sigmaKin_lines(color_amplitudes)
489 replace_dict['sigmaHat_lines'] = \
490 self.get_sigmaHat_lines()
491
492 replace_dict['all_sigmaKin'] = \
493 self.get_all_sigmaKin_lines(color_amplitudes,
494 'CPPProcess')
495
496 file = read_template_file(self.process_definition_template) %\
497 replace_dict
498
499 return file
500
502 """Return process file name for the process in matrix_element"""
503
504 process_string = self.process_string
505
506
507 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$")
508 proc_number_re = proc_number_pattern.match(process_string)
509 proc_number = 0
510 if proc_number_re:
511 proc_number = int(proc_number_re.group(2))
512 process_string = proc_number_re.group(1) + \
513 proc_number_re.group(3)
514
515
516 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$")
517 order_re = order_pattern.match(process_string)
518 while order_re:
519 process_string = order_re.group(1)
520 order_re = order_pattern.match(process_string)
521
522 process_string = process_string.replace(' ', '')
523 process_string = process_string.replace('>', '_')
524 process_string = process_string.replace('+', 'p')
525 process_string = process_string.replace('-', 'm')
526 process_string = process_string.replace('~', 'x')
527 process_string = process_string.replace('/', '_no_')
528 process_string = process_string.replace('$', '_nos_')
529 process_string = process_string.replace('|', '_or_')
530 if proc_number != 0:
531 process_string = "%d_%s" % (proc_number, process_string)
532
533 process_string = "Sigma_%s_%s" % (self.model_name,
534 process_string)
535 return process_string
536
538 """Return info lines describing the processes for this matrix element"""
539
540 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \
541 for process in matrix_element.get('processes')])
542
543
545 """Get initProc_lines for function definition for Pythia 8 .cc file"""
546
547 initProc_lines = []
548
549 initProc_lines.append("// Set external particle masses for this matrix element")
550
551 for part in matrix_element.get_external_wavefunctions():
552 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass'))
553 for i, colamp in enumerate(color_amplitudes):
554 initProc_lines.append("jamp2[%d] = new double[%d];" % \
555 (i, len(colamp)))
556
557 return "\n".join(initProc_lines)
558
560 """Get lines to reset jamps"""
561
562 ret_lines = ""
563 for icol, col_amp in enumerate(color_amplitudes):
564 ret_lines+= """for(int i=0;i < %(ncolor)d; i++)
565 jamp2[%(proc_number)d][i]=0.;\n""" % \
566 {"ncolor": len(col_amp), "proc_number": icol}
567 return ret_lines
568
569
593
594
596 """Get sigmaKin_lines for function definition for Pythia 8 .cc file"""
597
598
599 if self.single_helicities:
600 replace_dict = {}
601
602
603 replace_dict['ncomb'] = \
604 self.matrix_elements[0].get_helicity_combinations()
605
606
607 replace_dict['process_class_name'] = self.process_name
608
609
610 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id')
611 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id')
612
613
614 replace_dict['helicity_matrix'] = \
615 self.get_helicity_matrix(self.matrix_elements[0])
616
617
618 den_factors = [str(me.get_denominator_factor()) for me in \
619 self.matrix_elements]
620 if self.nprocesses != len(self.matrix_elements):
621 den_factors.extend(den_factors)
622 replace_dict['den_factors'] = ",".join(den_factors)
623 replace_dict['get_matrix_t_lines'] = "\n".join(
624 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
625 {"iproc": i, "proc_name": \
626 me.get('processes')[0].shell_string().replace("0_", "")} \
627 for i, me in enumerate(self.matrix_elements)])
628
629
630 mirror_matrix_lines = ""
631
632 if any([m.get('has_mirror_process') for m in self.matrix_elements]):
633 mirror_matrix_lines += \
634 """ // Mirror initial state momenta for mirror process
635 perm[0]=1;
636 perm[1]=0;
637 // Calculate wavefunctions
638 calculate_wavefunctions(perm, helicities[ihel]);
639 // Mirror back
640 perm[0]=0;
641 perm[1]=1;
642 // Calculate matrix elements
643 """
644
645 mirror_matrix_lines += "\n".join(
646 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \
647 {"iproc": i + len(self.matrix_elements), "proc_name": \
648 me.get('processes')[0].shell_string().replace("0_", "")} \
649 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')])
650
651 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines
652
653
654 file = \
655 read_template_file(\
656 self.process_sigmaKin_function_template) %\
657 replace_dict
658
659 return file
660
661 else:
662 ret_lines = "// Call the individual sigmaKin for each process\n"
663 return ret_lines + \
664 "\n".join(["sigmaKin_%s();" % \
665 me.get('processes')[0].shell_string().\
666 replace("0_", "") for \
667 me in self.matrix_elements])
668
670 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file"""
671
672 ret_lines = []
673 if self.single_helicities:
674 ret_lines.append(\
675 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \
676 class_name)
677 ret_lines.append("// Calculate wavefunctions for all processes")
678 ret_lines.append(self.get_calculate_wavefunctions(\
679 self.wavefunctions, self.amplitudes))
680 ret_lines.append("}")
681 else:
682 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \
683 for i, me in enumerate(self.matrix_elements)])
684 ret_lines.extend([self.get_matrix_single_process(i, me,
685 color_amplitudes[i],
686 class_name) \
687 for i, me in enumerate(self.matrix_elements)])
688 return "\n".join(ret_lines)
689
690
692 """Write sigmaKin for each process"""
693
694
695
696 replace_dict = {}
697
698
699 replace_dict['proc_name'] = \
700 matrix_element.get('processes')[0].shell_string().replace("0_", "")
701
702
703 replace_dict['process_class_name'] = self.process_name
704
705
706 replace_dict['proc_number'] = i
707
708
709 replace_dict['ncomb'] = matrix_element.get_helicity_combinations()
710
711
712 replace_dict['helicity_matrix'] = \
713 self.get_helicity_matrix(matrix_element)
714
715 replace_dict['den_factor'] = matrix_element.get_denominator_factor()
716
717 file = \
718 read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\
719 replace_dict
720
721 return file
722
725 """Write matrix() for each process"""
726
727
728
729 replace_dict = {}
730
731
732 replace_dict['proc_name'] = \
733 matrix_element.get('processes')[0].shell_string().replace("0_", "")
734
735
736
737 if self.single_helicities:
738 replace_dict['matrix_args'] = ""
739 replace_dict['all_wavefunction_calls'] = "int i, j;"
740 else:
741 replace_dict['matrix_args'] = "const int hel[]"
742 wavefunctions = matrix_element.get_all_wavefunctions()
743 replace_dict['all_wavefunction_calls'] = \
744 """const int nwavefuncs = %d;
745 std::complex<double> w[nwavefuncs][18];
746 """ % len(wavefunctions)+ \
747 self.get_calculate_wavefunctions(wavefunctions, [])
748
749
750 replace_dict['process_class_name'] = class_name
751
752
753 replace_dict['proc_number'] = i
754
755
756 replace_dict['ncolor'] = len(color_amplitudes)
757
758 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes()
759
760
761 replace_dict['color_matrix_lines'] = \
762 self.get_color_matrix_lines(matrix_element)
763
764 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes)
765
766 file = read_template_file('cpp_process_matrix.inc') % \
767 replace_dict
768
769 return file
770
771
773 """Get sigmaHat_lines for function definition for Pythia 8 .cc file"""
774
775
776 beams = set([(process.get('legs')[0].get('id'),
777 process.get('legs')[1].get('id')) \
778 for process in self.processes])
779
780 res_lines = []
781
782
783
784 res_lines.append("// Select between the different processes")
785 for ibeam, beam_parts in enumerate(beams):
786
787 if ibeam == 0:
788 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
789 else:
790 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
791
792
793 beam_processes = [(i, me) for (i, me) in \
794 enumerate(self.matrix_elements) if beam_parts in \
795 [(process.get('legs')[0].get('id'),
796 process.get('legs')[1].get('id')) \
797 for process in me.get('processes')]]
798
799
800 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \
801 enumerate(self.matrix_elements) if beam_parts in \
802 [(process.get('legs')[0].get('id'),
803 process.get('legs')[1].get('id')) \
804 for process in me.get_mirror_processes()]])
805
806
807 res_lines.append("// Add matrix elements for processes with beams %s" % \
808 repr(beam_parts))
809 res_lines.append("return %s;" % \
810 ("+".join(["matrix_element[%i]*%i" % \
811 (i, len([proc for proc in \
812 me.get('processes') if beam_parts == \
813 (proc.get('legs')[0].get('id'),
814 proc.get('legs')[1].get('id')) or \
815 me.get('has_mirror_process') and \
816 beam_parts == \
817 (proc.get('legs')[1].get('id'),
818 proc.get('legs')[0].get('id'))])) \
819 for (i, me) in beam_processes]).\
820 replace('*1', '')))
821 res_lines.append("}")
822
823
824 res_lines.append("else {")
825 res_lines.append("// Return 0 if not correct initial state assignment")
826 res_lines.append(" return 0.;}")
827
828 return "\n".join(res_lines)
829
830
832 """Return the Helicity matrix definition lines for this matrix element"""
833
834 helicity_line = "static const int helicities[ncomb][nexternal] = {";
835 helicity_line_list = []
836
837 for helicities in matrix_element.get_helicity_matrix():
838 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \
839 tuple(helicities) + "}")
840
841 return helicity_line + ",".join(helicity_line_list) + "};"
842
844 """Return the denominator factor line for this matrix element"""
845
846 return "const int denominator = %d;" % \
847 matrix_element.get_denominator_factor()
848
850 """Return the color matrix definition lines for this matrix element. Split
851 rows in chunks of size n."""
852
853 if not matrix_element.get('color_matrix'):
854 return "\n".join(["static const double denom[1] = {1.};",
855 "static const double cf[1][1] = {1.};"])
856 else:
857 color_denominators = matrix_element.get('color_matrix').\
858 get_line_denominators()
859 denom_string = "static const double denom[ncolor] = {%s};" % \
860 ",".join(["%i" % denom for denom in color_denominators])
861
862 matrix_strings = []
863 my_cs = color.ColorString()
864 for index, denominator in enumerate(color_denominators):
865
866 num_list = matrix_element.get('color_matrix').\
867 get_line_numerators(index, denominator)
868
869 matrix_strings.append("{%s}" % \
870 ",".join(["%d" % i for i in num_list]))
871 matrix_string = "static const double cf[ncolor][ncolor] = {" + \
872 ",".join(matrix_strings) + "};"
873 return "\n".join([denom_string, matrix_string])
874
876 """Return the jamp = sum(fermionfactor * amp[i]) lines"""
877
878 res_list = []
879
880 for i, coeff_list in enumerate(color_amplitudes):
881
882 res = "jamp[%i]=" % i
883
884
885
886 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list]
887 common_factor = False
888 diff_fracs = list(set(list_fracs))
889 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1:
890 common_factor = True
891 global_factor = diff_fracs[0]
892 res = res + '%s(' % coeff(1, global_factor, False, 0)
893
894 for (coefficient, amp_number) in coeff_list:
895 if common_factor:
896 res = res + "%samp[%d]" % (coeff(coefficient[0],
897 coefficient[1] / abs(coefficient[1]),
898 coefficient[2],
899 coefficient[3]),
900 amp_number - 1)
901 else:
902 res = res + "%samp[%d]" % (coeff(coefficient[0],
903 coefficient[1],
904 coefficient[2],
905 coefficient[3]),
906 amp_number - 1)
907
908 if common_factor:
909 res = res + ')'
910
911 res += ';'
912
913 res_list.append(res)
914
915 return "\n".join(res_list)
916
917 @staticmethod
924
925
926
927
928 -def generate_process_files_pythia8(multi_matrix_element, cpp_helas_call_writer,
929 process_string = "",
930 process_number = 0, path = os.getcwd()):
931
932 """Generate the .h and .cc files needed for Pythia 8, for the
933 processes described by multi_matrix_element"""
934
935 process_exporter_pythia8 = ProcessExporterPythia8(multi_matrix_element,
936 cpp_helas_call_writer,
937 process_string,
938 process_number,
939 path)
940
941
942 model = process_exporter_pythia8.model
943 model_name = process_exporter_pythia8.model_name
944 process_exporter_pythia8.process_dir = \
945 'Processes_%(model)s' % {'model': \
946 model_name}
947 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir
948 process_exporter_pythia8.generate_process_files()
949 return process_exporter_pythia8
950
955 """Class to take care of exporting a set of matrix elements to
956 Pythia 8 format."""
957
958
959 process_template_h = 'pythia8_process_h.inc'
960 process_template_cc = 'pythia8_process_cc.inc'
961 process_class_template = 'pythia8_process_class.inc'
962 process_definition_template = 'pythia8_process_function_definitions.inc'
963 process_wavefunction_template = 'pythia8_process_wavefunctions.inc'
964 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc'
965
967 """Set process class name"""
968
969 super(ProcessExporterPythia8, self).__init__(*args, **opts)
970
971
972 for me in self.matrix_elements:
973 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]:
974 nex,nin = me.get_nexternal_ninitial()
975 raise InvalidCmd,\
976 "Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \
977 (nin,nex-nin)
978
979 self.process_class = self.process_name
980
981
982
983
984
985
987 """The complete Pythia 8 class definition for the process"""
988
989 replace_dict = {}
990
991
992 replace_dict['model_name'] = self.model_name
993
994
995 process_lines = "\n".join([self.get_process_info_lines(me) for me in \
996 self.matrix_elements])
997
998 replace_dict['process_lines'] = process_lines
999
1000
1001 replace_dict['nfinal'] = self.nfinal
1002
1003
1004 replace_dict['process_class_name'] = self.process_name
1005
1006
1007 process_definition = "%s (%s)" % (self.process_string,
1008 self.model_name)
1009 replace_dict['process_definition'] = process_definition
1010
1011 process = self.processes[0]
1012 replace_dict['process_code'] = 10000 + \
1013 100*process.get('id') + \
1014 self.process_number
1015
1016 replace_dict['inFlux'] = self.get_process_influx()
1017
1018 replace_dict['id_masses'] = self.get_id_masses(process)
1019 replace_dict['resonances'] = self.get_resonance_lines()
1020
1021 replace_dict['nexternal'] = self.nexternal
1022 replace_dict['nprocesses'] = self.nprocesses
1023
1024 if self.single_helicities:
1025 replace_dict['all_sigma_kin_definitions'] = \
1026 """// Calculate wavefunctions
1027 void calculate_wavefunctions(const int perm[], const int hel[]);
1028 static const int nwavefuncs = %d;
1029 std::complex<double> w[nwavefuncs][18];
1030 static const int namplitudes = %d;
1031 std::complex<double> amp[namplitudes];""" % \
1032 (len(self.wavefunctions),
1033 len(self.amplitudes.get_all_amplitudes()))
1034 replace_dict['all_matrix_definitions'] = \
1035 "\n".join(["double matrix_%s();" % \
1036 me.get('processes')[0].shell_string().\
1037 replace("0_", "") \
1038 for me in self.matrix_elements])
1039
1040 else:
1041 replace_dict['all_sigma_kin_definitions'] = \
1042 "\n".join(["void sigmaKin_%s();" % \
1043 me.get('processes')[0].shell_string().\
1044 replace("0_", "") \
1045 for me in self.matrix_elements])
1046 replace_dict['all_matrix_definitions'] = \
1047 "\n".join(["double matrix_%s(const int hel[]);" % \
1048 me.get('processes')[0].shell_string().\
1049 replace("0_", "") \
1050 for me in self.matrix_elements])
1051
1052
1053 file = read_template_file('pythia8_process_class.inc') % replace_dict
1054
1055 return file
1056
1058 """The complete Pythia 8 class definition for the process"""
1059
1060 replace_dict = {}
1061
1062
1063 replace_dict['model_name'] = self.model_name
1064
1065
1066 replace_dict['process_lines'] = \
1067 "\n".join([self.get_process_info_lines(me) for \
1068 me in self.matrix_elements])
1069
1070
1071 replace_dict['process_class_name'] = self.process_name
1072
1073 color_amplitudes = [me.get_color_amplitudes() for me in \
1074 self.matrix_elements]
1075
1076 replace_dict['initProc_lines'] = \
1077 self.get_initProc_lines(color_amplitudes)
1078 replace_dict['reset_jamp_lines'] = \
1079 self.get_reset_jamp_lines(color_amplitudes)
1080 replace_dict['sigmaKin_lines'] = \
1081 self.get_sigmaKin_lines(color_amplitudes)
1082 replace_dict['sigmaHat_lines'] = \
1083 self.get_sigmaHat_lines()
1084
1085 replace_dict['setIdColAcol_lines'] = \
1086 self.get_setIdColAcol_lines(color_amplitudes)
1087
1088 replace_dict['weightDecay_lines'] = \
1089 self.get_weightDecay_lines()
1090
1091 replace_dict['all_sigmaKin'] = \
1092 self.get_all_sigmaKin_lines(color_amplitudes,
1093 self.process_name)
1094
1095 file = read_template_file('pythia8_process_function_definitions.inc') %\
1096 replace_dict
1097
1098 return file
1099
1101 """Return process file name for the process in matrix_element"""
1102
1103
1104
1105 beams = set([tuple(sorted([process.get('legs')[0].get('id'),
1106 process.get('legs')[1].get('id')])) \
1107 for process in self.processes])
1108
1109
1110 antiquarks = range(-1, -6, -1)
1111 quarks = range(1,6)
1112 antileptons = range(-11, -17, -1)
1113 leptons = range(11, 17, 1)
1114 allquarks = antiquarks + quarks
1115 antifermions = antiquarks + antileptons
1116 fermions = quarks + leptons
1117 allfermions = allquarks + antileptons + leptons
1118 downfermions = range(-2, -5, -2) + range(-1, -5, -2) + \
1119 range(-12, -17, -2) + range(-11, -17, -2)
1120 upfermions = range(1, 5, 2) + range(2, 5, 2) + \
1121 range(11, 17, 2) + range(12, 17, 2)
1122
1123
1124
1125 set_tuples = [(set([(21, 21)]), "gg"),
1126 (set(list(itertools.product(allquarks, [21]))), "qg"),
1127 (set(zip(antiquarks, quarks)), "qqbarSame"),
1128 (set(list(itertools.product(allquarks,
1129 allquarks))), "qq"),
1130 (set(zip(antifermions, fermions)),"ffbarSame"),
1131 (set(zip(downfermions, upfermions)),"ffbarChg"),
1132 (set(list(itertools.product(allfermions,
1133 allfermions))), "ff"),
1134 (set(list(itertools.product(allfermions, [22]))), "fgm"),
1135 (set([(21, 22)]), "ggm"),
1136 (set([(22, 22)]), "gmgm")]
1137
1138 for set_tuple in set_tuples:
1139 if beams.issubset(set_tuple[0]):
1140 return set_tuple[1]
1141
1142 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\
1143 repr(beams))
1144
1145 return
1146
1148 """Return the lines which define the ids for the final state particles,
1149 for the Pythia phase space"""
1150
1151 if self.nfinal == 1:
1152 return ""
1153
1154 mass_strings = []
1155 for i in range(2, len(process.get_legs_with_decays())):
1156 if self.model.get_particle(process.get_legs_with_decays()[i].get('id')).\
1157 get('mass') not in ['zero', 'ZERO']:
1158 mass_strings.append("int id%dMass() const {return %d;}" % \
1159 (i + 1, abs(process.get_legs_with_decays()[i].get('id'))))
1160
1161 return "\n".join(mass_strings)
1162
1164 """Return the lines which define the ids for intermediate resonances
1165 for the Pythia phase space"""
1166
1167 if self.nfinal == 1:
1168 return "virtual int resonanceA() const {return %d;}" % \
1169 abs(self.processes[0].get('legs')[2].get('id'))
1170
1171 res_strings = []
1172 res_letters = ['A', 'B']
1173
1174 sids, singleres, schannel = self.get_resonances()
1175
1176 for i, sid in enumerate(sids[:2]):
1177 res_strings.append("virtual int resonance%s() const {return %d;}"\
1178 % (res_letters[i], sid))
1179
1180 if schannel:
1181 res_strings.append("virtual bool isSChannel() const {return true;}")
1182
1183 if singleres != 0:
1184 res_strings.append("virtual int idSChannel() const {return %d;}" \
1185 % singleres)
1186
1187 return "\n".join(res_strings)
1188
1190 """Return the PIDs for any resonances in 2->2 and 2->3 processes."""
1191
1192 model = self.matrix_elements[0].get('processes')[0].get('model')
1193 new_pdg = model.get_first_non_pdg()
1194
1195 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], [])
1196 resonances = []
1197 no_t_channels = True
1198 final_s_channels = []
1199 for diagram in diagrams:
1200 schannels, tchannels = diagram.get('amplitudes')[0].\
1201 get_s_and_t_channels(self.ninitial, model,
1202 new_pdg)
1203 for schannel in schannels:
1204 sid = schannel.get('legs')[-1].get('id')
1205 part = self.model.get_particle(sid)
1206 if part:
1207 width = self.model.get_particle(sid).get('width')
1208 if width.lower() != 'zero':
1209
1210 resonances.append(abs(sid))
1211 else:
1212 sid = 0
1213 if len(tchannels) == 1 and schannel == schannels[-1]:
1214 final_s_channels.append(abs(sid))
1215
1216 if len(tchannels) > 1:
1217
1218 no_t_channels = False
1219
1220 resonance_set = set(resonances)
1221 final_s_set = set(final_s_channels)
1222
1223 singleres = 0
1224
1225 if len(final_s_channels) == len(diagrams) and len(final_s_set) == 1 \
1226 and final_s_channels[0] != 0:
1227 singleres = final_s_channels[0]
1228
1229 resonance_set = list(set([pid for pid in resonance_set]))
1230
1231
1232
1233 schannel = no_t_channels and \
1234 not any(['QCD' in d.calculate_orders() for d in diagrams])
1235
1236 return resonance_set, singleres, schannel
1237
1239 """Get initProc_lines for function definition for Pythia 8 .cc file"""
1240
1241 initProc_lines = []
1242
1243 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau")
1244
1245 if not self.model.get_particle(4) or \
1246 self.model.get_particle(4).get('mass').lower() == 'zero':
1247 cMassiveME = "0."
1248 else:
1249 cMassiveME = "particleDataPtr->m0(4)"
1250 initProc_lines.append("mcME = %s;" % cMassiveME)
1251 if not self.model.get_particle(5) or \
1252 self.model.get_particle(5).get('mass').lower() == 'zero':
1253 bMassiveME = "0."
1254 else:
1255 bMassiveME = "particleDataPtr->m0(5)"
1256 initProc_lines.append("mbME = %s;" % bMassiveME)
1257 if not self.model.get_particle(13) or \
1258 self.model.get_particle(13).get('mass').lower() == 'zero':
1259 muMassiveME = "0."
1260 else:
1261 muMassiveME = "particleDataPtr->m0(13)"
1262 initProc_lines.append("mmuME = %s;" % muMassiveME)
1263 if not self.model.get_particle(15) or \
1264 self.model.get_particle(15).get('mass').lower() == 'zero':
1265 tauMassiveME = "0."
1266 else:
1267 tauMassiveME = "particleDataPtr->m0(15)"
1268 initProc_lines.append("mtauME = %s;" % tauMassiveME)
1269
1270 for i, me in enumerate(self.matrix_elements):
1271 initProc_lines.append("jamp2[%d] = new double[%d];" % \
1272 (i, len(color_amplitudes[i])))
1273
1274 return "\n".join(initProc_lines)
1275
1277 """Generate lines to set final-state id and color info for process"""
1278
1279 res_lines = []
1280
1281
1282 beams = set([(process.get('legs')[0].get('id'),
1283 process.get('legs')[1].get('id')) \
1284 for process in self.processes])
1285
1286
1287 for ibeam, beam_parts in enumerate(beams):
1288 if ibeam == 0:
1289 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts)
1290 else:
1291 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts)
1292
1293 beam_processes = [(i, me) for (i, me) in \
1294 enumerate(self.matrix_elements) if beam_parts in \
1295 [(process.get('legs')[0].get('id'),
1296 process.get('legs')[1].get('id')) \
1297 for process in me.get('processes')]]
1298
1299 beam_mirror_processes = []
1300 if beam_parts[0] != beam_parts[1]:
1301 beam_mirror_processes = [(i, me) for (i, me) in \
1302 enumerate(self.matrix_elements) if beam_parts in \
1303 [(process.get('legs')[1].get('id'),
1304 process.get('legs')[0].get('id')) \
1305 for process in me.get('processes')]]
1306
1307 final_id_list = []
1308 final_mirror_id_list = []
1309 for (i, me) in beam_processes:
1310 final_id_list.extend([tuple([l.get('id') for l in \
1311 proc.get_legs_with_decays() if l.get('state')]) \
1312 for proc in me.get('processes') \
1313 if beam_parts == \
1314 (proc.get('legs')[0].get('id'),
1315 proc.get('legs')[1].get('id'))])
1316 for (i, me) in beam_mirror_processes:
1317 final_mirror_id_list.extend([tuple([l.get('id') for l in \
1318 proc.get_legs_with_decays() if l.get('state')]) \
1319 for proc in me.get_mirror_processes() \
1320 if beam_parts == \
1321 (proc.get('legs')[0].get('id'),
1322 proc.get('legs')[1].get('id'))])
1323 final_id_list = set(final_id_list)
1324 final_mirror_id_list = set(final_mirror_id_list)
1325
1326 if final_id_list and final_mirror_id_list or \
1327 not final_id_list and not final_mirror_id_list:
1328 raise self.ProcessExporterCPPError,\
1329 "Missing processes, or both process and mirror process"
1330
1331
1332 ncombs = len(final_id_list)+len(final_mirror_id_list)
1333
1334 res_lines.append("// Pick one of the flavor combinations %s" % \
1335 ", ".join([repr(ids) for ids in final_id_list]))
1336
1337 me_weight = []
1338 for final_ids in final_id_list:
1339 items = [(i, len([ p for p in me.get('processes') \
1340 if [l.get('id') for l in \
1341 p.get_legs_with_decays()] == \
1342 list(beam_parts) + list(final_ids)])) \
1343 for (i, me) in beam_processes]
1344 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\
1345 (i, l) in items if l > 0]).\
1346 replace('*1', ''))
1347 if any([l>1 for (i, l) in items]):
1348 raise self.ProcessExporterCPPError,\
1349 "More than one process with identical " + \
1350 "external particles is not supported"
1351
1352 for final_ids in final_mirror_id_list:
1353 items = [(i, len([ p for p in me.get_mirror_processes() \
1354 if [l.get('id') for l in p.get_legs_with_decays()] == \
1355 list(beam_parts) + list(final_ids)])) \
1356 for (i, me) in beam_mirror_processes]
1357 me_weight.append("+".join(["matrix_element[%i]*%i" % \
1358 (i+len(self.matrix_elements), l) for\
1359 (i, l) in items if l > 0]).\
1360 replace('*1', ''))
1361 if any([l>1 for (i, l) in items]):
1362 raise self.ProcessExporterCPPError,\
1363 "More than one process with identical " + \
1364 "external particles is not supported"
1365
1366 if final_id_list:
1367 res_lines.append("int flavors[%d][%d] = {%s};" % \
1368 (ncombs, self.nfinal,
1369 ",".join(["{" + ",".join([str(id) for id \
1370 in ids]) + "}" for ids \
1371 in final_id_list])))
1372 elif final_mirror_id_list:
1373 res_lines.append("int flavors[%d][%d] = {%s};" % \
1374 (ncombs, self.nfinal,
1375 ",".join(["{" + ",".join([str(id) for id \
1376 in ids]) + "}" for ids \
1377 in final_mirror_id_list])))
1378 res_lines.append("vector<double> probs;")
1379 res_lines.append("double sum = %s;" % "+".join(me_weight))
1380 for me in me_weight:
1381 res_lines.append("probs.push_back(%s/sum);" % me)
1382 res_lines.append("int choice = rndmPtr->pick(probs);")
1383 for i in range(self.nfinal):
1384 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i))
1385
1386 res_lines.append("}")
1387
1388 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \
1389 range(1, self.nexternal + 1)]))
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400 res_lines.append("// Pick color flow")
1401
1402 res_lines.append("int ncolor[%d] = {%s};" % \
1403 (len(color_amplitudes),
1404 ",".join([str(len(colamp)) for colamp in \
1405 color_amplitudes])))
1406
1407
1408 for ime, me in enumerate(self.matrix_elements):
1409
1410 res_lines.append("if(%s){" % \
1411 "||".join(["&&".join(["id%d == %d" % \
1412 (i+1, l.get('id')) for (i, l) in \
1413 enumerate(p.get_legs_with_decays())])\
1414 for p in me.get('processes')]))
1415 if ime > 0:
1416 res_lines[-1] = "else " + res_lines[-1]
1417
1418 proc = me.get('processes')[0]
1419 if not me.get('color_basis'):
1420
1421 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1422 else:
1423
1424 repr_dict = {}
1425 legs = proc.get_legs_with_decays()
1426 for l in legs:
1427 repr_dict[l.get('number')] = \
1428 proc.get('model').get_particle(l.get('id')).get_color()
1429
1430 color_flow_list = \
1431 me.get('color_basis').color_flow_decomposition(\
1432 repr_dict, self.ninitial)
1433
1434 ncolor = len(me.get('color_basis'))
1435 res_lines.append("""vector<double> probs;
1436 double sum = %s;
1437 for(int i=0;i<ncolor[%i];i++)
1438 probs.push_back(jamp2[%i][i]/sum);
1439 int ic = rndmPtr->pick(probs);""" % \
1440 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1441 in range(ncolor)]), ime, ime))
1442
1443 color_flows = []
1444 for color_flow_dict in color_flow_list:
1445 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \
1446 for (l,i) in itertools.product(legs, [0,1])])
1447
1448
1449 res_lines.append("static int colors[%d][%d] = {%s};" % \
1450 (ncolor, 2 * self.nexternal,
1451 ",".join(["{" + ",".join([str(id) for id \
1452 in flows]) + "}" for flows \
1453 in color_flows])))
1454
1455 res_lines.append("setColAcol(%s);" % \
1456 ",".join(["colors[ic][%d]" % i for i in \
1457 range(2 * self.nexternal)]))
1458 res_lines.append('}')
1459
1460
1461 for ime, me in enumerate(self.matrix_elements):
1462 if not me.get('has_mirror_process'):
1463 continue
1464 res_lines.append("else if(%s){" % \
1465 "||".join(["&&".join(["id%d == %d" % \
1466 (i+1, l.get('id')) for (i, l) in \
1467 enumerate(p.get_legs_with_decays())])\
1468 for p in me.get_mirror_processes()]))
1469
1470 proc = me.get('processes')[0]
1471 if not me.get('color_basis'):
1472
1473 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal))
1474 else:
1475
1476 repr_dict = {}
1477 legs = proc.get_legs_with_decays()
1478 legs[0:2] = [legs[1],legs[0]]
1479 for l in legs:
1480 repr_dict[l.get('number')] = \
1481 proc.get('model').get_particle(l.get('id')).get_color()
1482
1483 color_flow_list = \
1484 me.get('color_basis').color_flow_decomposition(\
1485 repr_dict, self.ninitial)
1486
1487 ncolor = len(me.get('color_basis'))
1488 res_lines.append("""vector<double> probs;
1489 double sum = %s;
1490 for(int i=0;i<ncolor[%i];i++)
1491 probs.push_back(jamp2[%i][i]/sum);
1492 int ic = rndmPtr->pick(probs);""" % \
1493 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \
1494 in range(ncolor)]), ime, ime))
1495
1496 color_flows = []
1497 for color_flow_dict in color_flow_list:
1498 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \
1499 for (l,i) in itertools.product(legs, [0,1])])
1500
1501
1502 res_lines.append("static int colors[%d][%d] = {%s};" % \
1503 (ncolor, 2 * self.nexternal,
1504 ",".join(["{" + ",".join([str(id) for id \
1505 in flows]) + "}" for flows \
1506 in color_flows])))
1507
1508 res_lines.append("setColAcol(%s);" % \
1509 ",".join(["colors[ic][%d]" % i for i in \
1510 range(2 * self.nexternal)]))
1511 res_lines.append('}')
1512
1513 return "\n".join(res_lines)
1514
1515
1517 """Get weightDecay_lines for function definition for Pythia 8 .cc file"""
1518
1519 weightDecay_lines = "// Just use isotropic decay (default)\n"
1520 weightDecay_lines += "return 1.;"
1521
1522 return weightDecay_lines
1523
1529 """Open a template file and return the contents."""
1530
1531 return open(os.path.join(_file_path, \
1532 'iolibs', 'template_files',
1533 filename)).read()
1534
1536 """Return info lines for MG5, suitable to place at beginning of
1537 Fortran files"""
1538
1539 info = misc.get_pkg_info()
1540 info_lines = ""
1541 if info and info.has_key('version') and info.has_key('date'):
1542 info_lines = "# MadGraph5_aMC@NLO v. %s, %s\n" % \
1543 (info['version'], info['date'])
1544 info_lines = info_lines + \
1545 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1546 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1547 else:
1548 info_lines = "# MadGraph5_aMC@NLO\n" + \
1549 "# By the MadGraph5_aMC@NLO Development Team\n" + \
1550 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch"
1551
1552 return info_lines
1553
1554 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1555 """Returns a nicely formatted string for the coefficients in JAMP lines"""
1556
1557 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power
1558
1559 if total_coeff == 1:
1560 if is_imaginary:
1561 return '+std::complex<double>(0,1)*'
1562 else:
1563 return '+'
1564 elif total_coeff == -1:
1565 if is_imaginary:
1566 return '-std::complex<double>(0,1)*'
1567 else:
1568 return '-'
1569
1570 res_str = '%+i.' % total_coeff.numerator
1571
1572 if total_coeff.denominator != 1:
1573
1574 res_str = res_str + '/%i.' % total_coeff.denominator
1575
1576 if is_imaginary:
1577 res_str = res_str + '*std::complex<double>(0,1)'
1578
1579 return res_str + '*'
1580
1581
1582
1583
1584
1585 -def convert_model_to_cpp(model, output_dir, wanted_lorentz = [],
1586 wanted_couplings = []):
1587 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
1588
1589
1590 model_builder = UFOModelConverterCPP(model,
1591 os.path.join(output_dir, 'src'),
1592 wanted_lorentz,
1593 wanted_couplings)
1594 model_builder.write_files()
1595
1601 """ A converter of the UFO-MG5 Model to the C++ format """
1602
1603
1604 output_name = 'C++ Standalone'
1605 namespace = 'MG5'
1606
1607
1608 type_dict = {"real": "double",
1609 "complex": "std::complex<double>"}
1610
1611
1612 compiler_option_re = re.compile('^#\w')
1613 namespace_re = re.compile('^using namespace')
1614
1615 slha_to_depend = {('SMINPUTS', (3,)): ('aS',),
1616 ('SMINPUTS', (1,)): ('aEM',)}
1617
1618
1619 include_dir = '.'
1620 cc_file_dir = '.'
1621 param_template_h = 'cpp_model_parameters_h.inc'
1622 param_template_cc = 'cpp_model_parameters_cc.inc'
1623 aloha_template_h = 'cpp_hel_amps_h.inc'
1624 aloha_template_cc = 'cpp_hel_amps_cc.inc'
1625
1626 copy_include_files = []
1627 copy_cc_files = []
1628
1629 - def __init__(self, model, output_path, wanted_lorentz = [],
1630 wanted_couplings = []):
1631 """ initialization of the objects """
1632
1633 self.model = model
1634 self.model_name = ProcessExporterCPP.get_model_name(model['name'])
1635
1636 self.dir_path = output_path
1637
1638
1639 self.wanted_lorentz = wanted_lorentz
1640
1641
1642
1643
1644 self.coups_dep = {}
1645 self.coups_indep = []
1646 self.params_dep = []
1647 self.params_indep = []
1648 self.p_to_cpp = parsers.UFOExpressionParserCPP()
1649
1650
1651 self.prepare_parameters()
1652 self.prepare_couplings(wanted_couplings)
1653
1662
1663
1664
1666 """Extract the parameters from the model, and store them in
1667 the two lists params_indep and params_dep"""
1668
1669
1670 keys = self.model['parameters'].keys()
1671 keys.sort(key=len)
1672 params_ext = []
1673 for key in keys:
1674 if key == ('external',):
1675 params_ext += [p for p in self.model['parameters'][key] if p.name]
1676 elif 'aS' in key:
1677 for p in self.model['parameters'][key]:
1678 self.params_dep.append(base_objects.ModelVariable(p.name,
1679 p.name + " = " + \
1680 self.p_to_cpp.parse(p.expr) + ";",
1681 p.type,
1682 p.depend))
1683 else:
1684 for p in self.model['parameters'][key]:
1685 if p.name == 'ZERO':
1686 continue
1687 self.params_indep.append(base_objects.ModelVariable(p.name,
1688 p.name + " = " + \
1689 self.p_to_cpp.parse(p.expr) + ";",
1690 p.type,
1691 p.depend))
1692
1693
1694 while params_ext:
1695 param = params_ext.pop(0)
1696
1697 expression = ""
1698 assert param.value.imag == 0
1699 if len(param.lhacode) == 1:
1700 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \
1701 (param.name, param.lhablock.lower(),
1702 param.lhacode[0], param.value.real)
1703 elif len(param.lhacode) == 2:
1704 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \
1705 (param.lhacode[0], param.lhacode[1])
1706 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \
1707 % (param.name, param.lhablock.lower(), param.value.real)
1708 else:
1709 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
1710 self.params_indep.insert(0,
1711 base_objects.ModelVariable(param.name,
1712 expression,
1713 'real'))
1714
1716 """Extract the couplings from the model, and store them in
1717 the two lists coups_indep and coups_dep"""
1718
1719
1720 keys = self.model['couplings'].keys()
1721 keys.sort(key=len)
1722 for key, coup_list in self.model['couplings'].items():
1723 if "aS" in key:
1724 for c in coup_list:
1725 if not wanted_couplings or c.name in wanted_couplings:
1726 self.coups_dep[c.name] = base_objects.ModelVariable(\
1727 c.name,
1728 c.expr,
1729 c.type,
1730 c.depend)
1731 else:
1732 for c in coup_list:
1733 if not wanted_couplings or c.name in wanted_couplings:
1734 self.coups_indep.append(base_objects.ModelVariable(\
1735 c.name,
1736 c.expr,
1737 c.type,
1738 c.depend))
1739
1740
1741 for coup in self.coups_dep.values() + self.coups_indep:
1742 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
1743
1744
1745
1747 """Generate the parameters_model.h and parameters_model.cc
1748 files, which have the parameters and couplings for the model."""
1749
1750 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1751 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1752 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1753 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1754
1755 parameter_h_file = os.path.join(self.dir_path, self.include_dir,
1756 'Parameters_%s.h' % self.model_name)
1757 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1758 'Parameters_%s.cc' % self.model_name)
1759
1760 file_h, file_cc = self.generate_parameters_class_files()
1761
1762
1763 writers.CPPWriter(parameter_h_file).writelines(file_h)
1764 writers.CPPWriter(parameter_cc_file).writelines(file_cc)
1765
1766
1767 for copy_file in self.copy_include_files:
1768 shutil.copy(os.path.join(_file_path, 'iolibs',
1769 'template_files',copy_file),
1770 os.path.join(self.dir_path, self.include_dir))
1771
1772 for copy_file in self.copy_cc_files:
1773 shutil.copy(os.path.join(_file_path, 'iolibs',
1774 'template_files',copy_file),
1775 os.path.join(self.dir_path, self.cc_file_dir))
1776
1777 logger.info("Created files %s and %s in directory" \
1778 % (os.path.split(parameter_h_file)[-1],
1779 os.path.split(parameter_cc_file)[-1]))
1780 logger.info("%s and %s" % \
1781 (os.path.split(parameter_h_file)[0],
1782 os.path.split(parameter_cc_file)[0]))
1783
1785 """Create the content of the Parameters_model.h and .cc files"""
1786
1787 replace_dict = {}
1788
1789 replace_dict['info_lines'] = get_mg5_info_lines()
1790 replace_dict['model_name'] = self.model_name
1791
1792 replace_dict['independent_parameters'] = \
1793 "// Model parameters independent of aS\n" + \
1794 self.write_parameters(self.params_indep)
1795 replace_dict['independent_couplings'] = \
1796 "// Model parameters dependent on aS\n" + \
1797 self.write_parameters(self.params_dep)
1798 replace_dict['dependent_parameters'] = \
1799 "// Model couplings independent of aS\n" + \
1800 self.write_parameters(self.coups_indep)
1801 replace_dict['dependent_couplings'] = \
1802 "// Model couplings dependent on aS\n" + \
1803 self.write_parameters(self.coups_dep.values())
1804
1805 replace_dict['set_independent_parameters'] = \
1806 self.write_set_parameters(self.params_indep)
1807 replace_dict['set_independent_couplings'] = \
1808 self.write_set_parameters(self.coups_indep)
1809 replace_dict['set_dependent_parameters'] = \
1810 self.write_set_parameters(self.params_dep)
1811 replace_dict['set_dependent_couplings'] = \
1812 self.write_set_parameters(self.coups_dep.values())
1813
1814 replace_dict['print_independent_parameters'] = \
1815 self.write_print_parameters(self.params_indep)
1816 replace_dict['print_independent_couplings'] = \
1817 self.write_print_parameters(self.coups_indep)
1818 replace_dict['print_dependent_parameters'] = \
1819 self.write_print_parameters(self.params_dep)
1820 replace_dict['print_dependent_couplings'] = \
1821 self.write_print_parameters(self.coups_dep.values())
1822
1823 file_h = read_template_file(self.param_template_h) % \
1824 replace_dict
1825 file_cc = read_template_file(self.param_template_cc) % \
1826 replace_dict
1827
1828 return file_h, file_cc
1829
1831 """Write out the definitions of parameters"""
1832
1833
1834 type_param_dict = {}
1835
1836 for param in params:
1837 type_param_dict[param.type] = \
1838 type_param_dict.setdefault(param.type, []) + [param.name]
1839
1840
1841
1842 res_strings = []
1843 for key in type_param_dict:
1844 res_strings.append("%s %s;" % (self.type_dict[key],
1845 ",".join(type_param_dict[key])))
1846
1847 return "\n".join(res_strings)
1848
1850 """Write out the lines of independent parameters"""
1851
1852
1853
1854 res_strings = []
1855 for param in params:
1856 res_strings.append("%s" % param.expr)
1857
1858
1859
1860 for particle in self.model.get('particles'):
1861 if particle.is_fermion() and particle.get('self_antipart') and \
1862 particle.get('width').lower() != 'zero':
1863 res_strings.append("if (%s < 0)" % particle.get('mass'))
1864 res_strings.append("%(width)s = -abs(%(width)s);" % \
1865 {"width": particle.get('width')})
1866
1867 return "\n".join(res_strings)
1868
1870 """Write out the lines of independent parameters"""
1871
1872
1873
1874 res_strings = []
1875 for param in params:
1876 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name))
1877
1878 return "\n".join(res_strings)
1879
1880
1881
1883 """Generate the hel_amps_model.h and hel_amps_model.cc files, which
1884 have the complete set of generalized Helas routines for the model"""
1885
1886 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)):
1887 os.makedirs(os.path.join(self.dir_path, self.include_dir))
1888 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)):
1889 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir))
1890
1891 model_h_file = os.path.join(self.dir_path, self.include_dir,
1892 'HelAmps_%s.h' % self.model_name)
1893 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir,
1894 'HelAmps_%s.cc' % self.model_name)
1895
1896 replace_dict = {}
1897
1898 replace_dict['output_name'] = self.output_name
1899 replace_dict['info_lines'] = get_mg5_info_lines()
1900 replace_dict['namespace'] = self.namespace
1901 replace_dict['model_name'] = self.model_name
1902
1903
1904
1905 template_h_files = self.read_aloha_template_files(ext = 'h')
1906 template_cc_files = self.read_aloha_template_files(ext = 'cc')
1907
1908 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name'))
1909 aloha_model.add_Lorentz_object(self.model.get('lorentz'))
1910
1911 if self.wanted_lorentz:
1912 aloha_model.compute_subset(self.wanted_lorentz)
1913 else:
1914 aloha_model.compute_all(save=False, custom_propa=True)
1915
1916 for abstracthelas in dict(aloha_model).values():
1917 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP',
1918 mode='no_include')
1919
1920 template_h_files.append(h_rout)
1921 template_cc_files.append(cc_rout)
1922
1923
1924
1925
1926
1927
1928
1929
1930
1931 replace_dict['function_declarations'] = '\n'.join(template_h_files)
1932 replace_dict['function_definitions'] = '\n'.join(template_cc_files)
1933
1934 file_h = read_template_file(self.aloha_template_h) % replace_dict
1935 file_cc = read_template_file(self.aloha_template_cc) % replace_dict
1936
1937
1938 writers.CPPWriter(model_h_file).writelines(file_h)
1939 writers.CPPWriter(model_cc_file).writelines(file_cc)
1940
1941 logger.info("Created files %s and %s in directory" \
1942 % (os.path.split(model_h_file)[-1],
1943 os.path.split(model_cc_file)[-1]))
1944 logger.info("%s and %s" % \
1945 (os.path.split(model_h_file)[0],
1946 os.path.split(model_cc_file)[0]))
1947
1948
1950 """Read all ALOHA template files with extension ext, strip them of
1951 compiler options and namespace options, and return in a list"""
1952
1953 template_files = []
1954 for filename in glob.glob(os.path.join(MG5DIR, 'aloha',
1955 'template_files', '*.%s' % ext)):
1956 file = open(filename, 'r')
1957 template_file_string = ""
1958 while file:
1959 line = file.readline()
1960 if len(line) == 0: break
1961 line = self.clean_line(line)
1962 if not line:
1963 continue
1964 template_file_string += line.strip() + '\n'
1965 template_files.append(template_file_string)
1966
1967 return template_files
1968
1969
1970
1971
1972
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983
1984
1985
1986
1987
1988
1989
1990
1992 """Strip a line of compiler options and namespace options."""
1993
1994 if self.compiler_option_re.match(line) or self.namespace_re.match(line):
1995 return ""
1996
1997 return line
1998
1999
2000
2001
2002 -def generate_example_file_pythia8(path,
2003 model_path,
2004 process_names,
2005 exporter,
2006 main_file_name = "",
2007 example_dir = "examples"):
2008 """Generate the main_model_name.cc file and Makefile in the examples dir"""
2009
2010 filepath = os.path.join(path, example_dir)
2011 if not os.path.isdir(filepath):
2012 os.makedirs(filepath)
2013
2014 replace_dict = {}
2015
2016
2017 info_lines = get_mg5_info_lines()
2018 replace_dict['info_lines'] = info_lines
2019
2020
2021 replace_dict['model_name'] = exporter.model_name
2022
2023
2024 replace_dict['include_lines'] = \
2025 "\n".join(["#include \"%s.h\"" % proc_name \
2026 for proc_name in process_names])
2027
2028
2029 replace_dict['sigma_pointer_lines'] = \
2030 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \
2031 for proc_name in process_names])
2032
2033
2034 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path,
2035 "param_card_%s.dat" % \
2036 exporter.model_name)
2037
2038
2039 file = read_template_file('pythia8_main_example_cc.inc') % \
2040 replace_dict
2041
2042 if not main_file_name:
2043 num = 1
2044 while os.path.exists(os.path.join(filepath,
2045 'main_%s_%i' % (exporter.model_name, num))):
2046 num += 1
2047 main_file_name = str(num)
2048
2049 main_file = 'main_%s_%s' % (exporter.model_name,
2050 main_file_name)
2051
2052 main_filename = os.path.join(filepath, main_file + '.cc')
2053
2054
2055 writers.CPPWriter(main_filename).writelines(file)
2056
2057 replace_dict = {}
2058
2059
2060 replace_dict['info_lines'] = get_mg5_info_lines()
2061
2062 replace_dict['main_file'] = main_file
2063
2064 replace_dict['process_dir'] = model_path
2065
2066 replace_dict['include_dir'] = exporter.include_dir
2067
2068
2069 file = read_template_file('pythia8_main_makefile.inc') % \
2070 replace_dict
2071
2072 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \
2073 (exporter.model_name, main_file_name))
2074
2075
2076 open(make_filename, 'w').write(file)
2077
2078 logger.info("Created files %s and %s in directory %s" \
2079 % (os.path.split(main_filename)[-1],
2080 os.path.split(make_filename)[-1],
2081 os.path.split(make_filename)[0]))
2082 return main_file, make_filename
2083
2091 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)"""
2092
2093 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h')):
2094 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir)
2095
2096
2097 model_builder = UFOModelConverterPythia8(model, pythia_dir)
2098 model_builder.cc_file_dir = "Processes_" + model_builder.model_name
2099 model_builder.include_dir = model_builder.cc_file_dir
2100
2101 model_builder.write_files()
2102
2103 model_builder.write_makefile()
2104
2105 model_builder.write_param_card()
2106 return model_builder.model_name, model_builder.cc_file_dir
2107
2113 """ A converter of the UFO-MG5 Model to the Pythia 8 format """
2114
2115
2116 output_name = 'Pythia 8'
2117 namespace = 'Pythia8'
2118
2119
2120 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(pow(pd->m0(23),2))',
2121 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(pow(pd->m0(23),2))*pow(pd->m0(23),2)/(sqrt(2.)*pow(pd->m0(24),2)*(pow(pd->m0(23),2)-pow(pd->m0(24),2)))',
2122 ('SMINPUTS', (3,)): 'alpS',
2123 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)',
2124 }
2125
2126
2127 param_template_h = 'pythia8_model_parameters_h.inc'
2128 param_template_cc = 'pythia8_model_parameters_cc.inc'
2129
2131 """Extract the model parameters from Pythia 8, and store them in
2132 the two lists params_indep and params_dep"""
2133
2134
2135 keys = self.model['parameters'].keys()
2136 keys.sort(key=len)
2137 params_ext = []
2138 for key in keys:
2139 if key == ('external',):
2140 params_ext += [p for p in self.model['parameters'][key] if p.name]
2141 elif 'aS' in key:
2142 for p in self.model['parameters'][key]:
2143 self.params_dep.append(base_objects.ModelVariable(p.name,
2144 p.name + " = " + \
2145 self.p_to_cpp.parse(p.expr) + ';',
2146 p.type,
2147 p.depend))
2148 else:
2149 for p in self.model['parameters'][key]:
2150 self.params_indep.append(base_objects.ModelVariable(p.name,
2151 p.name + " = " + \
2152 self.p_to_cpp.parse(p.expr) + ';',
2153 p.type,
2154 p.depend))
2155
2156
2157
2158
2159 while params_ext:
2160 param = params_ext.pop(0)
2161 key = (param.lhablock, tuple(param.lhacode))
2162 if 'aS' in self.slha_to_depend.setdefault(key, ()):
2163
2164 self.params_dep.insert(0,
2165 base_objects.ModelVariable(param.name,
2166 param.name + ' = ' + \
2167 self.slha_to_expr[key] + ';',
2168 'real'))
2169 else:
2170 try:
2171
2172 self.params_indep.insert(0,
2173 base_objects.ModelVariable(param.name,
2174 param.name + ' = ' + \
2175 self.slha_to_expr[key] + ';',
2176 'real'))
2177 except Exception:
2178
2179
2180 if param.lhablock == 'YUKAWA':
2181 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \
2182 % param.lhacode[0]
2183 if param.lhablock == 'MASS':
2184 self.slha_to_expr[key] = 'pd->m0(%i)' \
2185 % param.lhacode[0]
2186 if param.lhablock == 'DECAY':
2187 self.slha_to_expr[key] = \
2188 'pd->mWidth(%i)' % param.lhacode[0]
2189 if key in self.slha_to_expr:
2190 self.params_indep.insert(0,\
2191 base_objects.ModelVariable(param.name,
2192 param.name + "=" + self.slha_to_expr[key] \
2193 + ';',
2194 'real'))
2195 else:
2196
2197 if len(param.lhacode) == 1:
2198 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \
2199 (param.lhablock.lower(),
2200 param.lhacode[0],
2201 param.name) + \
2202 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2203 + "%s = %e;}") % (param.name, param.value.real,
2204 param.name, param.value.real)
2205 elif len(param.lhacode) == 2:
2206 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \
2207 (param.lhablock.lower(),
2208 param.lhacode[0],
2209 param.lhacode[1],
2210 param.name) + \
2211 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2212 + "%s = %e;}") % (param.name, param.value.real,
2213 param.name, param.value.real)
2214 elif len(param.lhacode) == 3:
2215 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \
2216 (param.lhablock.lower(),
2217 param.lhacode[0],
2218 param.lhacode[1],
2219 param.lhacode[2],
2220 param.name) + \
2221 ("cout << \"Warning, setting %s to %e\" << endl;\n" \
2222 + "%s = %e;}") % (param.name, param.value.real,
2223 param.name, param.value.real)
2224 else:
2225 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices")
2226 self.params_indep.insert(0,
2227 base_objects.ModelVariable(param.name,
2228 expression,
2229 'real'))
2230
2232 """Generate the Makefile, which creates library files."""
2233
2234 makefilename = os.path.join(self.dir_path, self.cc_file_dir,
2235 'Makefile')
2236
2237 replace_dict = {}
2238
2239 replace_dict['info_lines'] = get_mg5_info_lines()
2240 replace_dict['model'] = self.model_name
2241
2242 makefile = read_template_file('pythia8_makefile.inc') % replace_dict
2243
2244
2245 open(makefilename, 'w').write(makefile)
2246
2247 logger.info("Created %s in directory %s" \
2248 % (os.path.split(makefilename)[-1],
2249 os.path.split(makefilename)[0]))
2250
2252 """Generate the param_card for the model."""
2253
2254 paramcardname = os.path.join(self.dir_path, self.cc_file_dir,
2255 'param_card_%s.dat' % self.model_name)
2256
2257 open(paramcardname, 'w').write(\
2258 self.model.write_param_card())
2259
2260 logger.info("Created %s in directory %s" \
2261 % (os.path.split(paramcardname)[-1],
2262 os.path.split(paramcardname)[0]))
2263