Package madgraph :: Package iolibs :: Module export_cpp
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_cpp

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15   
  16  """Methods and classes to export models and matrix elements to Pythia 8 
  17  and C++ Standalone format.""" 
  18   
  19  from __future__ import absolute_import 
  20  import fractions 
  21  import glob 
  22  import itertools 
  23  import logging 
  24  from math import fmod 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29   
  30  import madgraph.core.base_objects as base_objects 
  31  import madgraph.core.color_algebra as color 
  32  import madgraph.core.helas_objects as helas_objects 
  33  import madgraph.iolibs.drawing_eps as draw 
  34  import madgraph.iolibs.files as files 
  35  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  36  import madgraph.iolibs.file_writers as writers 
  37  import madgraph.iolibs.template_files as template_files 
  38  import madgraph.iolibs.ufo_expression_parsers as parsers 
  39  import madgraph.various.banner as banner_mod 
  40  from madgraph import MadGraph5Error, InvalidCmd, MG5DIR 
  41  from madgraph.iolibs.files import cp, ln, mv 
  42   
  43  from madgraph.iolibs.export_v4 import VirtualExporter 
  44  import madgraph.various.misc as misc 
  45   
  46  import aloha.create_aloha as create_aloha 
  47  import aloha.aloha_writers as aloha_writers 
  48  from six.moves import range 
  49  from six.moves import zip 
  50   
  51  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  52  logger = logging.getLogger('madgraph.export_pythia8') 
  53  pjoin = os.path.join 
54 55 56 -def make_model_cpp(dir_path):
57 """Make the model library in a C++ standalone directory""" 58 59 source_dir = os.path.join(dir_path, "src") 60 # Run standalone 61 logger.info("Running make for src") 62 misc.compile(cwd=source_dir)
63
64 65 -class OneProcessExporterCPP(object):
66 """Class to take care of exporting a set of matrix elements to 67 C++ format.""" 68 69 # Static variables (for inheritance) 70 process_dir = '.' 71 include_dir = '.' 72 template_path = os.path.join(_file_path, 'iolibs', 'template_files') 73 __template_path = os.path.join(_file_path, 'iolibs', 'template_files') 74 process_template_h = 'cpp_process_h.inc' 75 process_template_cc = 'cpp_process_cc.inc' 76 process_class_template = 'cpp_process_class.inc' 77 process_definition_template = 'cpp_process_function_definitions.inc' 78 process_wavefunction_template = 'cpp_process_wavefunctions.inc' 79 process_sigmaKin_function_template = 'cpp_process_sigmaKin_function.inc' 80 single_process_template = 'cpp_process_matrix.inc' 81
82 - class ProcessExporterCPPError(Exception):
83 pass
84
85 - def __init__(self, matrix_elements, cpp_helas_call_writer, process_string = "", 86 process_number = 0, path = os.getcwd()):
87 """Initiate with matrix elements, helas call writer, process 88 string, path. Generate the process .h and .cc files.""" 89 90 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 91 self.matrix_elements = matrix_elements.get('matrix_elements') 92 elif isinstance(matrix_elements, helas_objects.HelasMatrixElement): 93 self.matrix_elements = \ 94 helas_objects.HelasMatrixElementList([matrix_elements]) 95 elif isinstance(matrix_elements, helas_objects.HelasMatrixElementList): 96 self.matrix_elements = matrix_elements 97 else: 98 raise base_objects.PhysicsObject.PhysicsObjectError("Wrong object type for matrix_elements: %s" % type(matrix_elements)) 99 100 if not self.matrix_elements: 101 raise MadGraph5Error("No matrix elements to export") 102 103 self.model = self.matrix_elements[0].get('processes')[0].get('model') 104 self.model_name = ProcessExporterCPP.get_model_name(self.model.get('name')) 105 106 self.processes = sum([me.get('processes') for \ 107 me in self.matrix_elements], []) 108 self.processes.extend(sum([me.get_mirror_processes() for \ 109 me in self.matrix_elements], [])) 110 111 self.nprocesses = len(self.matrix_elements) 112 if any([m.get('has_mirror_process') for m in self.matrix_elements]): 113 self.nprocesses = 2*len(self.matrix_elements) 114 115 if process_string: 116 self.process_string = process_string 117 else: 118 self.process_string = self.processes[0].base_string() 119 120 if process_number: 121 self.process_number = process_number 122 else: 123 self.process_number = self.processes[0].get('id') 124 125 self.process_name = self.get_process_name() 126 self.process_class = "CPPProcess" 127 128 self.path = path 129 self.helas_call_writer = cpp_helas_call_writer 130 131 if not isinstance(self.helas_call_writer, helas_call_writers.CPPUFOHelasCallWriter): 132 raise self.ProcessExporterCPPError("helas_call_writer not CPPUFOHelasCallWriter") 133 134 self.nexternal, self.ninitial = \ 135 self.matrix_elements[0].get_nexternal_ninitial() 136 self.nfinal = self.nexternal - self.ninitial 137 138 # Check if we can use the same helicities for all matrix 139 # elements 140 141 self.single_helicities = True 142 143 hel_matrix = self.get_helicity_matrix(self.matrix_elements[0]) 144 145 for me in self.matrix_elements[1:]: 146 if self.get_helicity_matrix(me) != hel_matrix: 147 self.single_helicities = False 148 149 if self.single_helicities: 150 # If all processes have the same helicity structure, this 151 # allows us to reuse the same wavefunctions for the 152 # different processes 153 154 self.wavefunctions = [] 155 wf_number = 0 156 157 for me in self.matrix_elements: 158 for iwf, wf in enumerate(me.get_all_wavefunctions()): 159 try: 160 old_wf = \ 161 self.wavefunctions[self.wavefunctions.index(wf)] 162 wf.set('number', old_wf.get('number')) 163 except ValueError: 164 wf_number += 1 165 wf.set('number', wf_number) 166 self.wavefunctions.append(wf) 167 168 # Also combine amplitudes 169 self.amplitudes = helas_objects.HelasAmplitudeList() 170 amp_number = 0 171 for me in self.matrix_elements: 172 for iamp, amp in enumerate(me.get_all_amplitudes()): 173 try: 174 old_amp = \ 175 self.amplitudes[self.amplitudes.index(amp)] 176 amp.set('number', old_amp.get('number')) 177 except ValueError: 178 amp_number += 1 179 amp.set('number', amp_number) 180 self.amplitudes.append(amp) 181 diagram = helas_objects.HelasDiagram({'amplitudes': self.amplitudes}) 182 self.amplitudes = helas_objects.HelasMatrixElement({\ 183 'diagrams': helas_objects.HelasDiagramList([diagram])})
184 185 #=============================================================================== 186 # Global helper methods 187 #=============================================================================== 188 @classmethod
189 - def read_template_file(cls, filename, classpath=False):
190 """Open a template file and return the contents.""" 191 192 if isinstance(filename, tuple): 193 file_path = filename[0] 194 filename = filename[1] 195 elif isinstance(filename, str): 196 if classpath: 197 file_path = cls.__template_path 198 else: 199 file_path = cls.template_path 200 else: 201 raise MadGraph5Error('Argument should be string or tuple.') 202 203 return open(os.path.join(file_path, filename)).read()
204 205 206 207 208 209 210 # Methods for generation of process files for C++
211 - def generate_process_files(self):
212 """Generate the .h and .cc files needed for C++, for the 213 processes described by multi_matrix_element""" 214 215 # Create the files 216 if not os.path.isdir(os.path.join(self.path, self.include_dir)): 217 os.makedirs(os.path.join(self.path, self.include_dir)) 218 filename = os.path.join(self.path, self.include_dir, 219 '%s.h' % self.process_class) 220 self.write_process_h_file(writers.CPPWriter(filename)) 221 222 if not os.path.isdir(os.path.join(self.path, self.process_dir)): 223 os.makedirs(os.path.join(self.path, self.process_dir)) 224 filename = os.path.join(self.path, self.process_dir, 225 '%s.cc' % self.process_class) 226 self.write_process_cc_file(writers.CPPWriter(filename)) 227 228 logger.info('Created files %(process)s.h and %(process)s.cc in' % \ 229 {'process': self.process_class} + \ 230 ' directory %(dir)s' % {'dir': os.path.split(filename)[0]})
231 232
233 - def get_default_converter(self):
234 235 replace_dict = {} 236 237 238 return replace_dict
239 240 #=========================================================================== 241 # write_process_h_file 242 #===========================================================================
243 - def write_process_h_file(self, writer):
244 """Write the class definition (.h) file for the process""" 245 246 if writer and not isinstance(writer, writers.CPPWriter): 247 raise writers.CPPWriter.CPPWriterError(\ 248 "writer not CPPWriter") 249 250 replace_dict = self.get_default_converter() 251 252 # Extract version number and date from VERSION file 253 info_lines = get_mg5_info_lines() 254 replace_dict['info_lines'] = info_lines 255 256 # Extract model name 257 replace_dict['model_name'] = \ 258 self.model_name 259 260 # Extract process file name 261 replace_dict['process_file_name'] = self.process_name 262 263 # Extract class definitions 264 process_class_definitions = self.get_process_class_definitions() 265 replace_dict['process_class_definitions'] = process_class_definitions 266 267 if writer: 268 file = self.read_template_file(self.process_template_h) % replace_dict 269 # Write the file 270 writer.writelines(file) 271 else: 272 return replace_dict
273 #=========================================================================== 274 # write_process_cc_file 275 #===========================================================================
276 - def write_process_cc_file(self, writer):
277 """Write the class member definition (.cc) file for the process 278 described by matrix_element""" 279 280 if writer: 281 if not isinstance(writer, writers.CPPWriter): 282 raise writers.CPPWriter.CPPWriterError(\ 283 "writer not CPPWriter") 284 285 replace_dict = self.get_default_converter() 286 287 # Extract version number and date from VERSION file 288 info_lines = get_mg5_info_lines() 289 replace_dict['info_lines'] = info_lines 290 291 # Extract process file name 292 replace_dict['process_file_name'] = self.process_name 293 294 # Extract model name 295 replace_dict['model_name'] = self.model_name 296 297 298 # Extract class function definitions 299 process_function_definitions = \ 300 self.get_process_function_definitions() 301 replace_dict['process_function_definitions'] = \ 302 process_function_definitions 303 304 if writer: 305 file = self.read_template_file(self.process_template_cc) % replace_dict 306 # Write the file 307 writer.writelines(file) 308 else: 309 return replace_dict
310 311 #=========================================================================== 312 # Process export helper functions 313 #===========================================================================
314 - def get_process_class_definitions(self, write=True):
315 """The complete class definition for the process""" 316 317 replace_dict = {} 318 319 # Extract model name 320 replace_dict['model_name'] = self.model_name 321 322 # Extract process info lines for all processes 323 process_lines = "\n".join([self.get_process_info_lines(me) for me in \ 324 self.matrix_elements]) 325 326 replace_dict['process_lines'] = process_lines 327 328 # Extract number of external particles 329 replace_dict['nfinal'] = self.nfinal 330 331 # Extract number of external particles 332 replace_dict['ninitial'] = self.ninitial 333 334 # Extract process class name (for the moment same as file name) 335 replace_dict['process_class_name'] = self.process_name 336 337 # Extract process definition 338 process_definition = "%s (%s)" % (self.process_string, 339 self.model_name) 340 replace_dict['process_definition'] = process_definition 341 342 process = self.processes[0] 343 344 replace_dict['process_code'] = self.process_number 345 replace_dict['nexternal'] = self.nexternal 346 replace_dict['nprocesses'] = self.nprocesses 347 348 349 color_amplitudes = self.matrix_elements[0].get_color_amplitudes() 350 # Number of color flows 351 replace_dict['ncolor'] = len(color_amplitudes) 352 353 if self.single_helicities: 354 replace_dict['all_sigma_kin_definitions'] = \ 355 """// Calculate wavefunctions 356 void calculate_wavefunctions(const int perm[], const int hel[]); 357 static const int nwavefuncs = %d; 358 std::complex<double> w[nwavefuncs][18]; 359 static const int namplitudes = %d; 360 std::complex<double> amp[namplitudes];""" % \ 361 (len(self.wavefunctions), 362 len(self.amplitudes.get_all_amplitudes())) 363 replace_dict['all_matrix_definitions'] = \ 364 "\n".join(["double matrix_%s();" % \ 365 me.get('processes')[0].shell_string().\ 366 replace("0_", "") \ 367 for me in self.matrix_elements]) 368 369 else: 370 replace_dict['all_sigma_kin_definitions'] = \ 371 "\n".join(["void sigmaKin_%s();" % \ 372 me.get('processes')[0].shell_string().\ 373 replace("0_", "") \ 374 for me in self.matrix_elements]) 375 replace_dict['all_matrix_definitions'] = \ 376 "\n".join(["double matrix_%s(const int hel[]);" % \ 377 me.get('processes')[0].shell_string().\ 378 replace("0_", "") \ 379 for me in self.matrix_elements]) 380 381 if write: 382 file = self.read_template_file(self.process_class_template) % replace_dict 383 return file 384 else: 385 return replace_dict
386
388 """The complete Pythia 8 class definition for the process""" 389 390 replace_dict = {} 391 392 # Extract model name 393 replace_dict['model_name'] = self.model_name 394 395 # Extract process info lines 396 replace_dict['process_lines'] = \ 397 "\n".join([self.get_process_info_lines(me) for \ 398 me in self.matrix_elements]) 399 400 # Extract process class name (for the moment same as file name) 401 replace_dict['process_class_name'] = self.process_name 402 403 color_amplitudes = [me.get_color_amplitudes() for me in \ 404 self.matrix_elements] 405 406 replace_dict['initProc_lines'] = \ 407 self.get_initProc_lines(self.matrix_elements[0], 408 color_amplitudes) 409 replace_dict['reset_jamp_lines'] = \ 410 self.get_reset_jamp_lines(color_amplitudes) 411 replace_dict['sigmaKin_lines'] = \ 412 self.get_sigmaKin_lines(color_amplitudes) 413 replace_dict['sigmaHat_lines'] = \ 414 self.get_sigmaHat_lines() 415 416 replace_dict['all_sigmaKin'] = \ 417 self.get_all_sigmaKin_lines(color_amplitudes, 418 'CPPProcess') 419 420 file = self.read_template_file(self.process_definition_template) %\ 421 replace_dict 422 423 return file
424
425 - def get_process_name(self):
426 """Return process file name for the process in matrix_element""" 427 428 process_string = self.process_string 429 430 # Extract process number 431 proc_number_pattern = re.compile("^(.+)@\s*(\d+)\s*(.*)$") 432 proc_number_re = proc_number_pattern.match(process_string) 433 proc_number = 0 434 if proc_number_re: 435 proc_number = int(proc_number_re.group(2)) 436 process_string = proc_number_re.group(1) + \ 437 proc_number_re.group(3) 438 439 # Remove order information 440 order_pattern = re.compile("^(.+)\s+(\w+)\s*=\s*(\d+)\s*$") 441 order_re = order_pattern.match(process_string) 442 while order_re: 443 process_string = order_re.group(1) 444 order_re = order_pattern.match(process_string) 445 446 process_string = process_string.replace(' ', '') 447 process_string = process_string.replace('>', '_') 448 process_string = process_string.replace('+', 'p') 449 process_string = process_string.replace('-', 'm') 450 process_string = process_string.replace('~', 'x') 451 process_string = process_string.replace('/', '_no_') 452 process_string = process_string.replace('$', '_nos_') 453 process_string = process_string.replace('|', '_or_') 454 if proc_number != 0: 455 process_string = "%d_%s" % (proc_number, process_string) 456 457 process_string = "Sigma_%s_%s" % (self.model_name, 458 process_string) 459 return process_string
460
461 - def get_process_info_lines(self, matrix_element):
462 """Return info lines describing the processes for this matrix element""" 463 464 return"\n".join([ "# " + process.nice_string().replace('\n', '\n# * ') \ 465 for process in matrix_element.get('processes')])
466 467
468 - def get_initProc_lines(self, matrix_element, color_amplitudes):
469 """Get initProc_lines for function definition for Pythia 8 .cc file""" 470 471 initProc_lines = [] 472 473 initProc_lines.append("// Set external particle masses for this matrix element") 474 475 for part in matrix_element.get_external_wavefunctions(): 476 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass')) 477 for i, colamp in enumerate(color_amplitudes): 478 initProc_lines.append("jamp2[%d] = new double[%d];" % \ 479 (i, len(colamp))) 480 481 return "\n".join(initProc_lines)
482
483 - def get_reset_jamp_lines(self, color_amplitudes):
484 """Get lines to reset jamps""" 485 486 ret_lines = "" 487 for icol, col_amp in enumerate(color_amplitudes): 488 ret_lines+= """for(int i=0;i < %(ncolor)d; i++) 489 jamp2[%(proc_number)d][i]=0.;\n""" % \ 490 {"ncolor": len(col_amp), "proc_number": icol} 491 return ret_lines
492 493
494 - def get_calculate_wavefunctions(self, wavefunctions, amplitudes, write=True):
495 """Return the lines for optimized calculation of the 496 wavefunctions for all subprocesses""" 497 498 replace_dict = {} 499 500 replace_dict['nwavefuncs'] = len(wavefunctions) 501 502 #ensure no recycling of wavefunction ! incompatible with some output 503 for me in self.matrix_elements: 504 me.restore_original_wavefunctions() 505 506 replace_dict['wavefunction_calls'] = "\n".join(\ 507 self.helas_call_writer.get_wavefunction_calls(\ 508 helas_objects.HelasWavefunctionList(wavefunctions))) 509 510 replace_dict['amplitude_calls'] = "\n".join(\ 511 self.helas_call_writer.get_amplitude_calls(amplitudes)) 512 513 if write: 514 file = self.read_template_file(self.process_wavefunction_template) % \ 515 replace_dict 516 return file 517 else: 518 return replace_dict
519 520
521 - def get_sigmaKin_lines(self, color_amplitudes, write=True):
522 """Get sigmaKin_lines for function definition for Pythia 8 .cc file""" 523 524 525 if self.single_helicities: 526 replace_dict = {} 527 528 # Number of helicity combinations 529 replace_dict['ncomb'] = \ 530 self.matrix_elements[0].get_helicity_combinations() 531 532 # Process name 533 replace_dict['process_class_name'] = self.process_name 534 535 # Particle ids for the call to setupForME 536 replace_dict['id1'] = self.processes[0].get('legs')[0].get('id') 537 replace_dict['id2'] = self.processes[0].get('legs')[1].get('id') 538 539 # Extract helicity matrix 540 replace_dict['helicity_matrix'] = \ 541 self.get_helicity_matrix(self.matrix_elements[0]) 542 543 # Extract denominator 544 den_factors = [str(me.get_denominator_factor()) for me in \ 545 self.matrix_elements] 546 if self.nprocesses != len(self.matrix_elements): 547 den_factors.extend(den_factors) 548 replace_dict['den_factors'] = ",".join(den_factors) 549 replace_dict['get_matrix_t_lines'] = "\n".join( 550 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \ 551 {"iproc": i, "proc_name": \ 552 me.get('processes')[0].shell_string().replace("0_", "")} \ 553 for i, me in enumerate(self.matrix_elements)]) 554 555 # Generate lines for mirror matrix element calculation 556 mirror_matrix_lines = "" 557 558 if any([m.get('has_mirror_process') for m in self.matrix_elements]): 559 mirror_matrix_lines += \ 560 """ // Mirror initial state momenta for mirror process 561 perm[0]=1; 562 perm[1]=0; 563 // Calculate wavefunctions 564 calculate_wavefunctions(perm, helicities[ihel]); 565 // Mirror back 566 perm[0]=0; 567 perm[1]=1; 568 // Calculate matrix elements 569 """ 570 571 mirror_matrix_lines += "\n".join( 572 ["t[%(iproc)d]=matrix_%(proc_name)s();" % \ 573 {"iproc": i + len(self.matrix_elements), "proc_name": \ 574 me.get('processes')[0].shell_string().replace("0_", "")} \ 575 for i, me in enumerate(self.matrix_elements) if me.get('has_mirror_process')]) 576 577 replace_dict['get_mirror_matrix_lines'] = mirror_matrix_lines 578 579 if write: 580 file = \ 581 self.read_template_file(\ 582 self.process_sigmaKin_function_template) %\ 583 replace_dict 584 return file 585 else: 586 return replace_dict 587 else: 588 ret_lines = "// Call the individual sigmaKin for each process\n" 589 ret_lines = ret_lines + \ 590 "\n".join(["sigmaKin_%s();" % \ 591 me.get('processes')[0].shell_string().\ 592 replace("0_", "") for \ 593 me in self.matrix_elements]) 594 if write: 595 return ret_lines 596 else: 597 replace_dict['get_mirror_matrix_lines'] = ret_lines 598 return replace_dict
599
600 - def get_all_sigmaKin_lines(self, color_amplitudes, class_name):
601 """Get sigmaKin_process for all subprocesses for Pythia 8 .cc file""" 602 603 ret_lines = [] 604 if self.single_helicities: 605 ret_lines.append(\ 606 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \ 607 class_name) 608 ret_lines.append("// Calculate wavefunctions for all processes") 609 ret_lines.append(self.get_calculate_wavefunctions(\ 610 self.wavefunctions, self.amplitudes)) 611 ret_lines.append("}") 612 else: 613 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \ 614 for i, me in enumerate(self.matrix_elements)]) 615 ret_lines.extend([self.get_matrix_single_process(i, me, 616 color_amplitudes[i], 617 class_name) \ 618 for i, me in enumerate(self.matrix_elements)]) 619 return "\n".join(ret_lines)
620 621
622 - def get_sigmaKin_single_process(self, i, matrix_element, write=True):
623 """Write sigmaKin for each process""" 624 625 # Write sigmaKin for the process 626 627 replace_dict = {} 628 629 # Process name 630 replace_dict['proc_name'] = \ 631 matrix_element.get('processes')[0].shell_string().replace("0_", "") 632 633 # Process name 634 replace_dict['process_class_name'] = self.process_name 635 636 # Process number 637 replace_dict['proc_number'] = i 638 639 # Number of helicity combinations 640 replace_dict['ncomb'] = matrix_element.get_helicity_combinations() 641 642 # Extract helicity matrix 643 replace_dict['helicity_matrix'] = \ 644 self.get_helicity_matrix(matrix_element) 645 # Extract denominator 646 replace_dict['den_factor'] = matrix_element.get_denominator_factor() 647 648 if write: 649 file = \ 650 self.read_template_file('cpp_process_sigmaKin_subproc_function.inc') %\ 651 replace_dict 652 return file 653 else: 654 return replace_dict
655
656 - def get_matrix_single_process(self, i, matrix_element, color_amplitudes, 657 class_name, write=True):
658 """Write matrix() for each process""" 659 660 # Write matrix() for the process 661 662 replace_dict = {} 663 664 # Process name 665 replace_dict['proc_name'] = \ 666 matrix_element.get('processes')[0].shell_string().replace("0_", "") 667 668 669 # Wavefunction and amplitude calls 670 if self.single_helicities: 671 replace_dict['matrix_args'] = "" 672 replace_dict['all_wavefunction_calls'] = "int i, j;" 673 else: 674 replace_dict['matrix_args'] = "const int hel[]" 675 wavefunctions = matrix_element.get_all_wavefunctions() 676 replace_dict['all_wavefunction_calls'] = \ 677 """const int nwavefuncs = %d; 678 std::complex<double> w[nwavefuncs][18]; 679 """ % len(wavefunctions)+ \ 680 self.get_calculate_wavefunctions(wavefunctions, []) 681 682 # Process name 683 replace_dict['process_class_name'] = class_name 684 685 # Process number 686 replace_dict['proc_number'] = i 687 688 # Number of color flows 689 replace_dict['ncolor'] = len(color_amplitudes) 690 691 replace_dict['ngraphs'] = matrix_element.get_number_of_amplitudes() 692 693 # Extract color matrix 694 replace_dict['color_matrix_lines'] = \ 695 self.get_color_matrix_lines(matrix_element) 696 697 698 replace_dict['jamp_lines'] = self.get_jamp_lines(color_amplitudes) 699 700 701 #specific exporter hack 702 replace_dict = self.get_class_specific_definition_matrix(replace_dict, matrix_element) 703 704 if write: 705 file = self.read_template_file(self.single_process_template) % \ 706 replace_dict 707 return file 708 else: 709 return replace_dict
710
711 - def get_class_specific_definition_matrix(self, converter, matrix_element):
712 """place to add some specific hack to a given exporter. 713 Please always use Super in that case""" 714 715 return converter
716
717 - def get_sigmaHat_lines(self):
718 """Get sigmaHat_lines for function definition for Pythia 8 .cc file""" 719 720 # Create a set with the pairs of incoming partons 721 beams = set([(process.get('legs')[0].get('id'), 722 process.get('legs')[1].get('id')) \ 723 for process in self.processes]) 724 beams = sorted(list(beams)) 725 res_lines = [] 726 727 # Write a selection routine for the different processes with 728 # the same beam particles 729 res_lines.append("// Select between the different processes") 730 for ibeam, beam_parts in enumerate(beams): 731 732 if ibeam == 0: 733 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts) 734 else: 735 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts) 736 737 # Pick out all processes with this beam pair 738 beam_processes = [(i, me) for (i, me) in \ 739 enumerate(self.matrix_elements) if beam_parts in \ 740 [(process.get('legs')[0].get('id'), 741 process.get('legs')[1].get('id')) \ 742 for process in me.get('processes')]] 743 744 # Add mirror processes, 745 beam_processes.extend([(len(self.matrix_elements) + i, me) for (i, me) in \ 746 enumerate(self.matrix_elements) if beam_parts in \ 747 [(process.get('legs')[0].get('id'), 748 process.get('legs')[1].get('id')) \ 749 for process in me.get_mirror_processes()]]) 750 751 # Now add matrix elements for the processes with the right factors 752 res_lines.append("// Add matrix elements for processes with beams %s" % \ 753 repr(beam_parts)) 754 res_lines.append("return %s;" % \ 755 ("+".join(["matrix_element[%i]*%i" % \ 756 (i, len([proc for proc in \ 757 me.get('processes') if beam_parts == \ 758 (proc.get('legs')[0].get('id'), 759 proc.get('legs')[1].get('id')) or \ 760 me.get('has_mirror_process') and \ 761 beam_parts == \ 762 (proc.get('legs')[1].get('id'), 763 proc.get('legs')[0].get('id'))])) \ 764 for (i, me) in beam_processes]).\ 765 replace('*1', ''))) 766 res_lines.append("}") 767 768 769 res_lines.append("else {") 770 res_lines.append("// Return 0 if not correct initial state assignment") 771 res_lines.append(" return 0.;}") 772 773 return "\n".join(res_lines)
774 775
776 - def get_helicity_matrix(self, matrix_element):
777 """Return the Helicity matrix definition lines for this matrix element""" 778 779 helicity_line = "static const int helicities[ncomb][nexternal] = {"; 780 helicity_line_list = [] 781 782 for helicities in matrix_element.get_helicity_matrix(allow_reverse=False): 783 helicity_line_list.append("{"+",".join(['%d'] * len(helicities)) % \ 784 tuple(helicities) + "}") 785 786 return helicity_line + ",".join(helicity_line_list) + "};"
787
788 - def get_den_factor_line(self, matrix_element):
789 """Return the denominator factor line for this matrix element""" 790 791 return "const int denominator = %d;" % \ 792 matrix_element.get_denominator_factor()
793
794 - def get_color_matrix_lines(self, matrix_element):
795 """Return the color matrix definition lines for this matrix element. Split 796 rows in chunks of size n.""" 797 798 if not matrix_element.get('color_matrix'): 799 return "\n".join(["static const double denom[1] = {1.};", 800 "static const double cf[1][1] = {1.};"]) 801 else: 802 color_denominators = matrix_element.get('color_matrix').\ 803 get_line_denominators() 804 denom_string = "static const double denom[ncolor] = {%s};" % \ 805 ",".join(["%i" % denom for denom in color_denominators]) 806 807 matrix_strings = [] 808 my_cs = color.ColorString() 809 for index, denominator in enumerate(color_denominators): 810 # Then write the numerators for the matrix elements 811 num_list = matrix_element.get('color_matrix').\ 812 get_line_numerators(index, denominator) 813 814 matrix_strings.append("{%s}" % \ 815 ",".join(["%d" % i for i in num_list])) 816 matrix_string = "static const double cf[ncolor][ncolor] = {" + \ 817 ",".join(matrix_strings) + "};" 818 return "\n".join([denom_string, matrix_string])
819 820 821 822 823 824
825 - def get_jamp_lines(self, color_amplitudes):
826 """Return the jamp = sum(fermionfactor * amp[i]) lines""" 827 828 res_list = [] 829 830 for i, coeff_list in enumerate(color_amplitudes): 831 832 res = "jamp[%i]=" % i 833 834 # Optimization: if all contributions to that color basis element have 835 # the same coefficient (up to a sign), put it in front 836 list_fracs = [abs(coefficient[0][1]) for coefficient in coeff_list] 837 common_factor = False 838 diff_fracs = list(set(list_fracs)) 839 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 840 common_factor = True 841 global_factor = diff_fracs[0] 842 res = res + '%s(' % coeff(1, global_factor, False, 0) 843 844 for (coefficient, amp_number) in coeff_list: 845 if common_factor: 846 res = res + "%samp[%d]" % (coeff(coefficient[0], 847 coefficient[1] / abs(coefficient[1]), 848 coefficient[2], 849 coefficient[3]), 850 amp_number - 1) 851 else: 852 res = res + "%samp[%d]" % (coeff(coefficient[0], 853 coefficient[1], 854 coefficient[2], 855 coefficient[3]), 856 amp_number - 1) 857 858 if common_factor: 859 res = res + ')' 860 861 res += ';' 862 863 res_list.append(res) 864 865 return "\n".join(res_list)
866
867 868 -class OneProcessExporterMatchbox(OneProcessExporterCPP):
869 """Class to take care of exporting a set of matrix elements to 870 Matchbox format.""" 871 872 # Static variables (for inheritance) 873 process_class_template = 'matchbox_class.inc' 874 single_process_template = 'matchbox_matrix.inc' 875 process_definition_template = 'matchbox_function_definitions.inc' 876
877 - def get_initProc_lines(self, matrix_element, color_amplitudes):
878 """Get initProc_lines for function definition for Pythia 8 .cc file""" 879 880 initProc_lines = [] 881 882 initProc_lines.append("// Set external particle masses for this matrix element") 883 884 for part in matrix_element.get_external_wavefunctions(): 885 initProc_lines.append("mME.push_back(pars->%s);" % part.get('mass')) 886 return "\n".join(initProc_lines)
887 888
889 - def get_class_specific_definition_matrix(self, converter, matrix_element):
890 """ """ 891 892 converter = super(OneProcessExporterMatchbox, self).get_class_specific_definition_matrix(converter, matrix_element) 893 894 # T(....) 895 converter['color_sting_lines'] = \ 896 self.get_color_string_lines(matrix_element) 897 898 return converter
899
900 - def get_all_sigmaKin_lines(self, color_amplitudes, class_name):
901 """Get sigmaKin_process for all subprocesses for MAtchbox .cc file""" 902 903 ret_lines = [] 904 if self.single_helicities: 905 ret_lines.append(\ 906 "void %s::calculate_wavefunctions(const int perm[], const int hel[]){" % \ 907 class_name) 908 ret_lines.append("// Calculate wavefunctions for all processes") 909 ret_lines.append(self.get_calculate_wavefunctions(\ 910 self.wavefunctions, self.amplitudes)) 911 ret_lines.append(self.get_jamp_lines(color_amplitudes[0])) 912 ret_lines.append("}") 913 else: 914 ret_lines.extend([self.get_sigmaKin_single_process(i, me) \ 915 for i, me in enumerate(self.matrix_elements)]) 916 ret_lines.extend([self.get_matrix_single_process(i, me, 917 color_amplitudes[i], 918 class_name) \ 919 for i, me in enumerate(self.matrix_elements)]) 920 return "\n".join(ret_lines)
921 922
923 - def get_color_string_lines(self, matrix_element):
924 """Return the color matrix definition lines for this matrix element. Split 925 rows in chunks of size n.""" 926 927 if not matrix_element.get('color_matrix'): 928 return "\n".join(["static const double res[1][1] = {-1.};"]) 929 930 #start the real work 931 color_denominators = matrix_element.get('color_matrix').\ 932 get_line_denominators() 933 matrix_strings = [] 934 my_cs = color.ColorString() 935 936 for i_color in range(len(color_denominators)): 937 # Then write the numerators for the matrix elements 938 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 939 t_str=repr(my_cs) 940 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 941 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 942 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 943 all_matches = t_match.findall(t_str) 944 tmp_color = [] 945 for match in all_matches: 946 ctype, arg = match[0], [m.strip() for m in match[1].split(',')] 947 if ctype not in ['T', 'Tr']: 948 raise self.ProcessExporterCPPError('Color Structure not handle by Matchbox') 949 tmp_color.append(arg) 950 #compute the maximal size of the vector 951 nb_index = sum(len(o) for o in tmp_color) 952 max_len = nb_index + (nb_index//2) -1 953 #create the list with the 0 separator 954 curr_color = tmp_color[0] 955 for tcolor in tmp_color[1:]: 956 curr_color += ['0'] + tcolor 957 curr_color += ['0'] * (max_len- len(curr_color)) 958 #format the output 959 matrix_strings.append('{%s}' % ','.join(curr_color)) 960 961 matrix_string = 'static const double res[%s][%s] = {%s};' % \ 962 (len(color_denominators), max_len, ",".join(matrix_strings)) 963 964 return matrix_string
965
966 967 #=============================================================================== 968 # ProcessExporterPythia8 969 #=============================================================================== 970 -class OneProcessExporterPythia8(OneProcessExporterCPP):
971 """Class to take care of exporting a set of matrix elements to 972 Pythia 8 format.""" 973 974 # Static variables (for inheritance) 975 process_template_h = 'pythia8_process_h.inc' 976 process_template_cc = 'pythia8_process_cc.inc' 977 process_class_template = 'pythia8_process_class.inc' 978 process_definition_template = 'pythia8_process_function_definitions.inc' 979 process_wavefunction_template = 'pythia8_process_wavefunctions.inc' 980 process_sigmaKin_function_template = 'pythia8_process_sigmaKin_function.inc' 981 template_path = os.path.join(_file_path, 'iolibs', 'template_files', 'pythia8') 982 983
984 - def __init__(self, *args, **opts):
985 """Set process class name""" 986 987 if 'version' in opts: 988 self.version = opts['version'] 989 del opts['version'] 990 else: 991 self.version='8.2' 992 super(OneProcessExporterPythia8, self).__init__(*args, **opts) 993 994 # Check if any processes are not 2->1,2,3 995 for me in self.matrix_elements: 996 if me.get_nexternal_ninitial() not in [(3,2),(4,2),(5,2)]: 997 nex,nin = me.get_nexternal_ninitial() 998 raise InvalidCmd("Pythia 8 can only handle 2->1,2,3 processes, not %d->%d" % \ 999 (nin,nex-nin)) 1000 1001 self.process_class = self.process_name
1002 1003 # Methods for generation of process files for Pythia 8 1004
1005 - def get_default_converter(self):
1006 1007 replace_dict = {} 1008 # Extract model name 1009 replace_dict['model_name'] = self.model_name 1010 if self.version =="8.2": 1011 replace_dict['include_prefix'] = 'Pythia8/' 1012 else: 1013 replace_dict['include_prefix'] = '' 1014 1015 replace_dict['version'] = self.version 1016 1017 return replace_dict
1018 #=========================================================================== 1019 # Process export helper functions 1020 #===========================================================================
1021 - def get_process_class_definitions(self, write=True):
1022 """The complete Pythia 8 class definition for the process""" 1023 1024 replace_dict = self.get_default_converter() 1025 1026 1027 # Extract process info lines for all processes 1028 process_lines = "\n".join([self.get_process_info_lines(me) for me in \ 1029 self.matrix_elements]) 1030 1031 replace_dict['process_lines'] = process_lines 1032 1033 # Extract number of external particles 1034 replace_dict['nfinal'] = self.nfinal 1035 1036 # Extract process class name (for the moment same as file name) 1037 replace_dict['process_class_name'] = self.process_name 1038 1039 # Extract process definition 1040 process_definition = "%s (%s)" % (self.process_string, 1041 self.model_name) 1042 replace_dict['process_definition'] = process_definition 1043 1044 process = self.processes[0] 1045 replace_dict['process_code'] = 10000 + \ 1046 100*process.get('id') + \ 1047 self.process_number 1048 1049 replace_dict['inFlux'] = self.get_process_influx() 1050 1051 replace_dict['id_masses'] = self.get_id_masses(process) 1052 replace_dict['resonances'] = self.get_resonance_lines() 1053 1054 replace_dict['nexternal'] = self.nexternal 1055 replace_dict['nprocesses'] = self.nprocesses 1056 1057 if self.single_helicities: 1058 replace_dict['all_sigma_kin_definitions'] = \ 1059 """// Calculate wavefunctions 1060 void calculate_wavefunctions(const int perm[], const int hel[]); 1061 static const int nwavefuncs = %d; 1062 std::complex<double> w[nwavefuncs][18]; 1063 static const int namplitudes = %d; 1064 std::complex<double> amp[namplitudes];""" % \ 1065 (len(self.wavefunctions), 1066 len(self.amplitudes.get_all_amplitudes())) 1067 replace_dict['all_matrix_definitions'] = \ 1068 "\n".join(["double matrix_%s();" % \ 1069 me.get('processes')[0].shell_string().\ 1070 replace("0_", "") \ 1071 for me in self.matrix_elements]) 1072 1073 else: 1074 replace_dict['all_sigma_kin_definitions'] = \ 1075 "\n".join(["void sigmaKin_%s();" % \ 1076 me.get('processes')[0].shell_string().\ 1077 replace("0_", "") \ 1078 for me in self.matrix_elements]) 1079 replace_dict['all_matrix_definitions'] = \ 1080 "\n".join(["double matrix_%s(const int hel[]);" % \ 1081 me.get('processes')[0].shell_string().\ 1082 replace("0_", "") \ 1083 for me in self.matrix_elements]) 1084 1085 if write: 1086 file = self.read_template_file('pythia8_process_class.inc') % replace_dict 1087 return file 1088 else: 1089 return replace_dict
1090
1091 - def get_process_function_definitions(self, write=True):
1092 """The complete Pythia 8 class definition for the process""" 1093 1094 1095 replace_dict = self.get_default_converter() 1096 1097 # Extract process info lines 1098 replace_dict['process_lines'] = \ 1099 "\n".join([self.get_process_info_lines(me) for \ 1100 me in self.matrix_elements]) 1101 1102 # Extract process class name (for the moment same as file name) 1103 replace_dict['process_class_name'] = self.process_name 1104 1105 color_amplitudes = [me.get_color_amplitudes() for me in \ 1106 self.matrix_elements] 1107 1108 replace_dict['initProc_lines'] = \ 1109 self.get_initProc_lines(color_amplitudes) 1110 replace_dict['reset_jamp_lines'] = \ 1111 self.get_reset_jamp_lines(color_amplitudes) 1112 replace_dict['sigmaKin_lines'] = \ 1113 self.get_sigmaKin_lines(color_amplitudes) 1114 replace_dict['sigmaHat_lines'] = \ 1115 self.get_sigmaHat_lines() 1116 1117 replace_dict['setIdColAcol_lines'] = \ 1118 self.get_setIdColAcol_lines(color_amplitudes) 1119 1120 replace_dict['weightDecay_lines'] = \ 1121 self.get_weightDecay_lines() 1122 1123 replace_dict['all_sigmaKin'] = \ 1124 self.get_all_sigmaKin_lines(color_amplitudes, 1125 self.process_name) 1126 if write: 1127 file = self.read_template_file('pythia8_process_function_definitions.inc') %\ 1128 replace_dict 1129 return file 1130 else: 1131 return replace_dict
1132
1133 - def get_process_influx(self):
1134 """Return process file name for the process in matrix_element""" 1135 1136 # Create a set with the pairs of incoming partons in definite order, 1137 # e.g., g g >... u d > ... d~ u > ... gives ([21,21], [1,2], [-2,1]) 1138 beams = set([tuple(sorted([process.get('legs')[0].get('id'), 1139 process.get('legs')[1].get('id')])) \ 1140 for process in self.processes]) 1141 1142 # Define a number of useful sets 1143 antiquarks = list(range(-1, -6, -1)) 1144 quarks = list(range(1,6)) 1145 antileptons = list(range(-11, -17, -1)) 1146 leptons = list(range(11, 17, 1)) 1147 allquarks = antiquarks + quarks 1148 antifermions = antiquarks + antileptons 1149 fermions = quarks + leptons 1150 allfermions = allquarks + antileptons + leptons 1151 downfermions = list(range(-2, -5, -2)) + list(range(-1, -5, -2)) + \ 1152 list(range(-12, -17, -2)) + list(range(-11, -17, -2)) 1153 upfermions = list(range(1, 5, 2)) + list(range(2, 5, 2)) + \ 1154 list(range(11, 17, 2)) + list(range(12, 17, 2)) 1155 1156 # The following gives a list from flavor combinations to "inFlux" values 1157 # allowed by Pythia8, see Pythia 8 document SemiInternalProcesses.html 1158 set_tuples = [(set([(21, 21)]), "gg"), 1159 (set(list(itertools.product(allquarks, [21]))), "qg"), 1160 (set(zip(antiquarks, quarks)), "qqbarSame"), 1161 (set(list(itertools.product(allquarks, 1162 allquarks))), "qq"), 1163 (set(zip(antifermions, fermions)),"ffbarSame"), 1164 (set(zip(downfermions, upfermions)),"ffbarChg"), 1165 (set(list(itertools.product(allfermions, 1166 allfermions))), "ff"), 1167 (set(list(itertools.product(allfermions, [22]))), "fgm"), 1168 (set([(21, 22)]), "ggm"), 1169 (set([(22, 22)]), "gmgm")] 1170 1171 for set_tuple in set_tuples: 1172 if beams.issubset(set_tuple[0]): 1173 return set_tuple[1] 1174 1175 raise InvalidCmd('Pythia 8 cannot handle incoming flavors %s' %\ 1176 repr(beams)) 1177 1178 return
1179 1180 #=============================================================================== 1181 # Global helper methods 1182 #=============================================================================== 1183 @classmethod
1184 - def read_template_file(cls, filename):
1185 """Open a template file and return the contents.""" 1186 1187 try: 1188 return super(OneProcessExporterPythia8, cls).read_template_file(filename) 1189 except: 1190 return super(OneProcessExporterPythia8, cls).read_template_file(filename, classpath=True)
1191 1192
1193 - def get_id_masses(self, process):
1194 """Return the lines which define the ids for the final state particles, 1195 for the Pythia phase space""" 1196 1197 if self.nfinal == 1: 1198 return "" 1199 1200 mass_strings = [] 1201 for i in range(2, len(process.get_legs_with_decays())): 1202 if self.model.get_particle(process.get_legs_with_decays()[i].get('id')).\ 1203 get('mass') not in ['zero', 'ZERO']: 1204 mass_strings.append("int id%dMass() const {return %d;}" % \ 1205 (i + 1, abs(process.get_legs_with_decays()[i].get('id')))) 1206 1207 return "\n".join(mass_strings)
1208
1209 - def get_resonance_lines(self):
1210 """Return the lines which define the ids for intermediate resonances 1211 for the Pythia phase space""" 1212 1213 if self.nfinal == 1: 1214 return "virtual int resonanceA() const {return %d;}" % \ 1215 abs(self.processes[0].get('legs')[2].get('id')) 1216 1217 res_strings = [] 1218 res_letters = ['A', 'B'] 1219 1220 sids, singleres, schannel = self.get_resonances() 1221 1222 for i, sid in enumerate(sids[:2]): 1223 res_strings.append("virtual int resonance%s() const {return %d;}"\ 1224 % (res_letters[i], sid)) 1225 1226 if schannel: 1227 res_strings.append("virtual bool isSChannel() const {return true;}") 1228 1229 if singleres != 0: 1230 res_strings.append("virtual int idSChannel() const {return %d;}" \ 1231 % singleres) 1232 1233 return "\n".join(res_strings)
1234
1235 - def get_resonances(self):
1236 """Return the PIDs for any resonances in 2->2 and 2->3 processes.""" 1237 1238 model = self.matrix_elements[0].get('processes')[0].get('model') 1239 new_pdg = model.get_first_non_pdg() 1240 # Get a list of all resonant s-channel contributions 1241 diagrams = sum([me.get('diagrams') for me in self.matrix_elements], []) 1242 resonances = [] 1243 no_t_channels = True 1244 final_s_channels = [] 1245 for diagram in diagrams: 1246 schannels, tchannels = diagram.get('amplitudes')[0].\ 1247 get_s_and_t_channels(self.ninitial, model, 1248 new_pdg) 1249 for schannel in schannels: 1250 sid = schannel.get('legs')[-1].get('id') 1251 part = self.model.get_particle(sid) 1252 if part: 1253 width = self.model.get_particle(sid).get('width') 1254 if width.lower() != 'zero': 1255 # Only care about absolute value of resonance PIDs: 1256 resonances.append(abs(sid)) 1257 else: 1258 sid = 0 1259 if len(tchannels) == 1 and schannel == schannels[-1]: 1260 final_s_channels.append(abs(sid)) 1261 1262 if len(tchannels) > 1: 1263 # There are t-channel diagrams 1264 no_t_channels = False 1265 1266 resonance_set = set(resonances) 1267 final_s_set = set(final_s_channels) 1268 1269 singleres = 0 1270 # singleres is set if all diagrams have the same final resonance 1271 if len(final_s_channels) == len(diagrams) and len(final_s_set) == 1 \ 1272 and final_s_channels[0] != 0: 1273 singleres = final_s_channels[0] 1274 1275 resonance_set = list(set([pid for pid in resonance_set])) 1276 1277 # schannel is True if all diagrams are pure s-channel and there are 1278 # no QCD vertices 1279 schannel = no_t_channels and \ 1280 not any(['QCD' in d.calculate_orders() for d in diagrams]) 1281 1282 return resonance_set, singleres, schannel
1283
1284 - def get_initProc_lines(self, color_amplitudes):
1285 """Get initProc_lines for function definition for Pythia 8 .cc file""" 1286 1287 initProc_lines = [] 1288 1289 initProc_lines.append("// Set massive/massless matrix elements for c/b/mu/tau") 1290 # Add lines to set c/b/tau/mu kinematics massive/massless 1291 if not self.model.get_particle(4) or \ 1292 self.model.get_particle(4).get('mass').lower() == 'zero': 1293 cMassiveME = "0." 1294 else: 1295 cMassiveME = "particleDataPtr->m0(4)" 1296 initProc_lines.append("mcME = %s;" % cMassiveME) 1297 if not self.model.get_particle(5) or \ 1298 self.model.get_particle(5).get('mass').lower() == 'zero': 1299 bMassiveME = "0." 1300 else: 1301 bMassiveME = "particleDataPtr->m0(5)" 1302 initProc_lines.append("mbME = %s;" % bMassiveME) 1303 if not self.model.get_particle(13) or \ 1304 self.model.get_particle(13).get('mass').lower() == 'zero': 1305 muMassiveME = "0." 1306 else: 1307 muMassiveME = "particleDataPtr->m0(13)" 1308 initProc_lines.append("mmuME = %s;" % muMassiveME) 1309 if not self.model.get_particle(15) or \ 1310 self.model.get_particle(15).get('mass').lower() == 'zero': 1311 tauMassiveME = "0." 1312 else: 1313 tauMassiveME = "particleDataPtr->m0(15)" 1314 initProc_lines.append("mtauME = %s;" % tauMassiveME) 1315 1316 for i, me in enumerate(self.matrix_elements): 1317 initProc_lines.append("jamp2[%d] = new double[%d];" % \ 1318 (i, len(color_amplitudes[i]))) 1319 1320 return "\n".join(initProc_lines)
1321
1322 - def get_setIdColAcol_lines(self, color_amplitudes):
1323 """Generate lines to set final-state id and color info for process""" 1324 1325 res_lines = [] 1326 1327 # Create a set with the pairs of incoming partons 1328 beams = set([(process.get('legs')[0].get('id'), 1329 process.get('legs')[1].get('id')) \ 1330 for process in self.processes]) 1331 beams = sorted(list(beams)) 1332 # Now write a selection routine for final state ids 1333 for ibeam, beam_parts in enumerate(beams): 1334 if ibeam == 0: 1335 res_lines.append("if(id1 == %d && id2 == %d){" % beam_parts) 1336 else: 1337 res_lines.append("else if(id1 == %d && id2 == %d){" % beam_parts) 1338 # Pick out all processes with this beam pair 1339 beam_processes = [(i, me) for (i, me) in \ 1340 enumerate(self.matrix_elements) if beam_parts in \ 1341 [(process.get('legs')[0].get('id'), 1342 process.get('legs')[1].get('id')) \ 1343 for process in me.get('processes')]] 1344 # Pick out all mirror processes for this beam pair 1345 beam_mirror_processes = [] 1346 if beam_parts[0] != beam_parts[1]: 1347 beam_mirror_processes = [(i, me) for (i, me) in \ 1348 enumerate(self.matrix_elements) if beam_parts in \ 1349 [(process.get('legs')[1].get('id'), 1350 process.get('legs')[0].get('id')) \ 1351 for process in me.get('processes')]] 1352 1353 final_id_list = [] 1354 final_mirror_id_list = [] 1355 for (i, me) in beam_processes: 1356 final_id_list.extend([tuple([l.get('id') for l in \ 1357 proc.get_legs_with_decays() if l.get('state')]) \ 1358 for proc in me.get('processes') \ 1359 if beam_parts == \ 1360 (proc.get('legs')[0].get('id'), 1361 proc.get('legs')[1].get('id'))]) 1362 for (i, me) in beam_mirror_processes: 1363 final_mirror_id_list.extend([tuple([l.get('id') for l in \ 1364 proc.get_legs_with_decays() if l.get('state')]) \ 1365 for proc in me.get_mirror_processes() \ 1366 if beam_parts == \ 1367 (proc.get('legs')[0].get('id'), 1368 proc.get('legs')[1].get('id'))]) 1369 final_id_list = set(final_id_list) 1370 final_mirror_id_list = set(final_mirror_id_list) 1371 1372 if final_id_list and final_mirror_id_list or \ 1373 not final_id_list and not final_mirror_id_list: 1374 raise self.ProcessExporterCPPError("Missing processes, or both process and mirror process") 1375 1376 1377 ncombs = len(final_id_list)+len(final_mirror_id_list) 1378 1379 res_lines.append("// Pick one of the flavor combinations %s" % \ 1380 ", ".join([repr(ids) for ids in final_id_list])) 1381 1382 me_weight = [] 1383 for final_ids in final_id_list: 1384 items = [(i, len([ p for p in me.get('processes') \ 1385 if [l.get('id') for l in \ 1386 p.get_legs_with_decays()] == \ 1387 list(beam_parts) + list(final_ids)])) \ 1388 for (i, me) in beam_processes] 1389 me_weight.append("+".join(["matrix_element[%i]*%i" % (i, l) for\ 1390 (i, l) in items if l > 0]).\ 1391 replace('*1', '')) 1392 if any([l>1 for (i, l) in items]): 1393 raise self.ProcessExporterCPPError("More than one process with identical " + \ 1394 "external particles is not supported") 1395 1396 for final_ids in final_mirror_id_list: 1397 items = [(i, len([ p for p in me.get_mirror_processes() \ 1398 if [l.get('id') for l in p.get_legs_with_decays()] == \ 1399 list(beam_parts) + list(final_ids)])) \ 1400 for (i, me) in beam_mirror_processes] 1401 me_weight.append("+".join(["matrix_element[%i]*%i" % \ 1402 (i+len(self.matrix_elements), l) for\ 1403 (i, l) in items if l > 0]).\ 1404 replace('*1', '')) 1405 if any([l>1 for (i, l) in items]): 1406 raise self.ProcessExporterCPPError("More than one process with identical " + \ 1407 "external particles is not supported") 1408 1409 if final_id_list: 1410 res_lines.append("int flavors[%d][%d] = {%s};" % \ 1411 (ncombs, self.nfinal, 1412 ",".join(["{" + ",".join([str(id) for id \ 1413 in ids]) + "}" for ids \ 1414 in final_id_list]))) 1415 elif final_mirror_id_list: 1416 res_lines.append("int flavors[%d][%d] = {%s};" % \ 1417 (ncombs, self.nfinal, 1418 ",".join(["{" + ",".join([str(id) for id \ 1419 in ids]) + "}" for ids \ 1420 in final_mirror_id_list]))) 1421 res_lines.append("vector<double> probs;") 1422 res_lines.append("double sum = %s;" % "+".join(me_weight)) 1423 for me in me_weight: 1424 res_lines.append("probs.push_back(%s/sum);" % me) 1425 res_lines.append("int choice = rndmPtr->pick(probs);") 1426 for i in range(self.nfinal): 1427 res_lines.append("id%d = flavors[choice][%d];" % (i+3, i)) 1428 1429 res_lines.append("}") 1430 1431 res_lines.append("setId(%s);" % ",".join(["id%d" % i for i in \ 1432 range(1, self.nexternal + 1)])) 1433 1434 # Now write a selection routine for color flows 1435 1436 # We need separate selection for each flavor combination, 1437 # since the different processes might have different color 1438 # structures. 1439 1440 # Here goes the color connections corresponding to the JAMPs 1441 # Only one output, for the first subproc! 1442 1443 res_lines.append("// Pick color flow") 1444 1445 res_lines.append("int ncolor[%d] = {%s};" % \ 1446 (len(color_amplitudes), 1447 ",".join([str(len(colamp)) for colamp in \ 1448 color_amplitudes]))) 1449 1450 1451 for ime, me in enumerate(self.matrix_elements): 1452 1453 res_lines.append("if((%s)){" % \ 1454 ")||(".join(["&&".join(["id%d == %d" % \ 1455 (i+1, l.get('id')) for (i, l) in \ 1456 enumerate(p.get_legs_with_decays())])\ 1457 for p in me.get('processes')])) 1458 if ime > 0: 1459 res_lines[-1] = "else " + res_lines[-1] 1460 1461 proc = me.get('processes')[0] 1462 if not me.get('color_basis'): 1463 # If no color basis, just output trivial color flow 1464 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal)) 1465 else: 1466 # Else, build a color representation dictionnary 1467 repr_dict = {} 1468 legs = proc.get_legs_with_decays() 1469 for l in legs: 1470 repr_dict[l.get('number')] = \ 1471 proc.get('model').get_particle(l.get('id')).get_color() 1472 # Get the list of color flows 1473 color_flow_list = \ 1474 me.get('color_basis').color_flow_decomposition(\ 1475 repr_dict, self.ninitial) 1476 # Select a color flow 1477 ncolor = len(me.get('color_basis')) 1478 res_lines.append("""vector<double> probs; 1479 double sum = %s; 1480 for(int i=0;i<ncolor[%i];i++) 1481 probs.push_back(jamp2[%i][i]/sum); 1482 int ic = rndmPtr->pick(probs);""" % \ 1483 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \ 1484 in range(ncolor)]), ime, ime)) 1485 1486 color_flows = [] 1487 for color_flow_dict in color_flow_list: 1488 color_flows.append([int(fmod(color_flow_dict[l.get('number')][i], 500)) \ 1489 for (l,i) in itertools.product(legs, [0,1])]) 1490 1491 # Write out colors for the selected color flow 1492 res_lines.append("static int colors[%d][%d] = {%s};" % \ 1493 (ncolor, 2 * self.nexternal, 1494 ",".join(["{" + ",".join([str(id) for id \ 1495 in flows]) + "}" for flows \ 1496 in color_flows]))) 1497 1498 res_lines.append("setColAcol(%s);" % \ 1499 ",".join(["colors[ic][%d]" % i for i in \ 1500 range(2 * self.nexternal)])) 1501 res_lines.append('}') 1502 1503 # Same thing but for mirror processes 1504 for ime, me in enumerate(self.matrix_elements): 1505 if not me.get('has_mirror_process'): 1506 continue 1507 res_lines.append("else if((%s)){" % \ 1508 ")||(".join(["&&".join(["id%d == %d" % \ 1509 (i+1, l.get('id')) for (i, l) in \ 1510 enumerate(p.get_legs_with_decays())])\ 1511 for p in me.get_mirror_processes()])) 1512 1513 proc = me.get('processes')[0] 1514 if not me.get('color_basis'): 1515 # If no color basis, just output trivial color flow 1516 res_lines.append("setColAcol(%s);" % ",".join(["0"]*2*self.nfinal)) 1517 else: 1518 # Else, build a color representation dictionnary 1519 repr_dict = {} 1520 legs = proc.get_legs_with_decays() 1521 legs[0:2] = [legs[1],legs[0]] 1522 for l in legs: 1523 repr_dict[l.get('number')] = \ 1524 proc.get('model').get_particle(l.get('id')).get_color() 1525 # Get the list of color flows 1526 color_flow_list = \ 1527 me.get('color_basis').color_flow_decomposition(\ 1528 repr_dict, self.ninitial) 1529 # Select a color flow 1530 ncolor = len(me.get('color_basis')) 1531 res_lines.append("""vector<double> probs; 1532 double sum = %s; 1533 for(int i=0;i<ncolor[%i];i++) 1534 probs.push_back(jamp2[%i][i]/sum); 1535 int ic = rndmPtr->pick(probs);""" % \ 1536 ("+".join(["jamp2[%d][%d]" % (ime, i) for i \ 1537 in range(ncolor)]), ime, ime)) 1538 1539 color_flows = [] 1540 for color_flow_dict in color_flow_list: 1541 color_flows.append([color_flow_dict[l.get('number')][i] % 500 \ 1542 for (l,i) in itertools.product(legs, [0,1])]) 1543 1544 # Write out colors for the selected color flow 1545 res_lines.append("static int colors[%d][%d] = {%s};" % \ 1546 (ncolor, 2 * self.nexternal, 1547 ",".join(["{" + ",".join([str(id) for id \ 1548 in flows]) + "}" for flows \ 1549 in color_flows]))) 1550 1551 res_lines.append("setColAcol(%s);" % \ 1552 ",".join(["colors[ic][%d]" % i for i in \ 1553 range(2 * self.nexternal)])) 1554 res_lines.append('}') 1555 1556 return "\n".join(res_lines)
1557 1558
1559 - def get_weightDecay_lines(self):
1560 """Get weightDecay_lines for function definition for Pythia 8 .cc file""" 1561 1562 weightDecay_lines = "// Just use isotropic decay (default)\n" 1563 weightDecay_lines += "return 1.;" 1564 1565 return weightDecay_lines
1566 1567 #=============================================================================== 1568 # Routines to export/output UFO models in Pythia8 format 1569 #===============================================================================
1570 - def convert_model_to_pythia8(self, model, pythia_dir):
1571 """Create a full valid Pythia 8 model from an MG5 model (coming from UFO)""" 1572 1573 if not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia.h'))\ 1574 and not os.path.isfile(os.path.join(pythia_dir, 'include', 'Pythia8', 'Pythia.h')): 1575 logger.warning('Directory %s is not a valid Pythia 8 main dir.' % pythia_dir) 1576 1577 # create the model parameter files 1578 model_builder = UFOModelConverterPythia8(model, pythia_dir, replace_dict=self.get_default_converter()) 1579 model_builder.cc_file_dir = "Processes_" + model_builder.model_name 1580 model_builder.include_dir = model_builder.cc_file_dir 1581 1582 model_builder.write_files() 1583 # Write makefile 1584 model_builder.write_makefile() 1585 # Write param_card 1586 model_builder.write_param_card() 1587 return model_builder.model_name, model_builder.cc_file_dir
1588
1589 1590 #=============================================================================== 1591 # ProcessExporterCPP 1592 #=============================================================================== 1593 -class ProcessExporterCPP(VirtualExporter):
1594 """Class to take care of exporting a set of matrix elements to 1595 Fortran (v4) format.""" 1596 1597 grouped_mode = False 1598 exporter = 'cpp' 1599 1600 default_opt = {'clean': False, 'complex_mass':False, 1601 'export_format':'madevent', 'mp': False, 1602 'v5_model': True 1603 } 1604 1605 oneprocessclass = OneProcessExporterCPP 1606 s= _file_path + 'iolibs/template_files/' 1607 from_template = {'src': [s+'rambo.h', s+'rambo.cc', s+'read_slha.h', s+'read_slha.cc'], 1608 'SubProcesses': [s+'check_sa.cpp']} 1609 to_link_in_P = ['check_sa.cpp', 'Makefile'] 1610 template_src_make = pjoin(_file_path, 'iolibs', 'template_files','Makefile_sa_cpp_src') 1611 template_Sub_make = template_src_make 1612 1613 1614
1615 - def __init__(self, dir_path = "", opt=None):
1616 """Initiate the ProcessExporterFortran with directory information""" 1617 self.mgme_dir = MG5DIR 1618 self.dir_path = dir_path 1619 self.model = None 1620 1621 self.opt = dict(self.default_opt) 1622 if opt: 1623 self.opt.update(opt) 1624 1625 #place holder to pass information to the run_interface 1626 self.proc_characteristic = banner_mod.ProcCharacteristic()
1627
1628 - def copy_template(self, model):
1629 """Prepare export_dir as standalone_cpp directory, including: 1630 src (for RAMBO, model and ALOHA files + makefile) 1631 lib (with compiled libraries from src) 1632 SubProcesses (with check_sa.cpp + makefile and Pxxxxx directories) 1633 """ 1634 1635 try: 1636 os.mkdir(self.dir_path) 1637 except os.error as error: 1638 logger.warning(error.strerror + " " + self.dir_path) 1639 1640 with misc.chdir(self.dir_path): 1641 logger.info('Creating subdirectories in directory %s' % self.dir_path) 1642 1643 for d in ['src', 'lib', 'Cards', 'SubProcesses']: 1644 try: 1645 os.mkdir(d) 1646 except os.error as error: 1647 logger.warning(error.strerror + " " + self.dir_path) 1648 1649 # Write param_card 1650 open(os.path.join("Cards","param_card.dat"), 'w').write(\ 1651 model.write_param_card()) 1652 1653 1654 # Copy the needed src files 1655 for key in self.from_template: 1656 for f in self.from_template[key]: 1657 cp(f, key) 1658 1659 # Copy src Makefile 1660 makefile = self.read_template_file('Makefile_sa_cpp_src') % \ 1661 {'model': self.get_model_name(model.get('name'))} 1662 open(os.path.join('src', 'Makefile'), 'w').write(makefile) 1663 1664 # Copy SubProcesses Makefile 1665 makefile = self.read_template_file('Makefile_sa_cpp_sp') % \ 1666 {'model': self.get_model_name(model.get('name'))} 1667 open(os.path.join('SubProcesses', 'Makefile'), 'w').write(makefile)
1668 1669 #=========================================================================== 1670 # Helper functions 1671 #===========================================================================
1672 - def modify_grouping(self, matrix_element):
1673 """allow to modify the grouping (if grouping is in place) 1674 return two value: 1675 - True/False if the matrix_element was modified 1676 - the new(or old) matrix element""" 1677 1678 return False, matrix_element
1679 1680 1681
1682 - def convert_model(self, model, wanted_lorentz = [], 1683 wanted_couplings = []):
1684 # create the model parameter files 1685 model_builder = UFOModelConverterCPP(model, 1686 os.path.join(self.dir_path, 'src'), 1687 wanted_lorentz, 1688 wanted_couplings) 1689 model_builder.write_files()
1690
1691 - def compile_model(self):
1692 make_model_cpp(self.dir_path)
1693 1694 @classmethod
1695 - def read_template_file(cls, *args, **opts):
1696 """Open a template file and return the contents.""" 1697 1698 return cls.oneprocessclass.read_template_file(*args, **opts)
1699 1700 #=============================================================================== 1701 # generate_subprocess_directory 1702 #===============================================================================
1703 - def generate_subprocess_directory(self, matrix_element, cpp_helas_call_writer, 1704 proc_number=None):
1705 """Generate the Pxxxxx directory for a subprocess in C++ standalone, 1706 including the necessary .h and .cc files""" 1707 1708 1709 process_exporter_cpp = self.oneprocessclass(matrix_element,cpp_helas_call_writer) 1710 1711 1712 # Create the directory PN_xx_xxxxx in the specified path 1713 dirpath = pjoin(self.dir_path, 'SubProcesses', "P%d_%s" % (process_exporter_cpp.process_number, 1714 process_exporter_cpp.process_name)) 1715 try: 1716 os.mkdir(dirpath) 1717 except os.error as error: 1718 logger.warning(error.strerror + " " + dirpath) 1719 1720 with misc.chdir(dirpath): 1721 logger.info('Creating files in directory %s' % dirpath) 1722 process_exporter_cpp.path = dirpath 1723 # Create the process .h and .cc files 1724 process_exporter_cpp.generate_process_files() 1725 for file in self.to_link_in_P: 1726 ln('../%s' % file) 1727 return
1728 1729 @staticmethod
1730 - def get_model_name(name):
1731 """Replace - with _, + with _plus_ in a model name.""" 1732 1733 name = name.replace('-', '_') 1734 name = name.replace('+', '_plus_') 1735 return name
1736
1737 - def finalize(self, *args, **opts):
1738 """ """ 1739 self.compile_model() 1740 pass
1741
1742 -class ProcessExporterMatchbox(ProcessExporterCPP):
1743 oneprocessclass = OneProcessExporterMatchbox
1744
1745 -class ProcessExporterPythia8(ProcessExporterCPP):
1746 oneprocessclass = OneProcessExporterPythia8 1747 grouped_mode = 'madevent' 1748 1749 #=============================================================================== 1750 # generate_process_files_pythia8 1751 #===============================================================================
1752 - def generate_process_directory(self, multi_matrix_element, cpp_helas_call_writer, 1753 process_string = "", 1754 process_number = 0, 1755 version='8.2'):
1756 1757 """Generate the .h and .cc files needed for Pythia 8, for the 1758 processes described by multi_matrix_element""" 1759 1760 process_exporter_pythia8 = OneProcessExporterPythia8(multi_matrix_element, 1761 cpp_helas_call_writer, 1762 process_string, 1763 process_number, 1764 self.dir_path, 1765 version=version) 1766 1767 # Set process directory 1768 model = process_exporter_pythia8.model 1769 model_name = process_exporter_pythia8.model_name 1770 process_exporter_pythia8.process_dir = \ 1771 'Processes_%(model)s' % {'model': \ 1772 model_name} 1773 process_exporter_pythia8.include_dir = process_exporter_pythia8.process_dir 1774 process_exporter_pythia8.generate_process_files() 1775 return process_exporter_pythia8
1776 1777 #=============================================================================== 1778 # generate_example_file_pythia8 1779 #=============================================================================== 1780 @staticmethod
1781 - def generate_example_file_pythia8(path, 1782 model_path, 1783 process_names, 1784 exporter, 1785 main_file_name = "", 1786 example_dir = "examples", 1787 version="8.2"):
1788 """Generate the main_model_name.cc file and Makefile in the examples dir""" 1789 1790 filepath = os.path.join(path, example_dir) 1791 if not os.path.isdir(filepath): 1792 os.makedirs(filepath) 1793 1794 replace_dict = {} 1795 1796 # Extract version number and date from VERSION file 1797 info_lines = get_mg5_info_lines() 1798 replace_dict['info_lines'] = info_lines 1799 1800 # Extract model name 1801 replace_dict['model_name'] = exporter.model_name 1802 1803 # Extract include line 1804 replace_dict['include_lines'] = \ 1805 "\n".join(["#include \"%s.h\"" % proc_name \ 1806 for proc_name in process_names]) 1807 1808 # Extract setSigmaPtr line 1809 replace_dict['sigma_pointer_lines'] = \ 1810 "\n".join(["pythia.setSigmaPtr(new %s());" % proc_name \ 1811 for proc_name in process_names]) 1812 1813 # Extract param_card path 1814 replace_dict['param_card'] = os.path.join(os.path.pardir,model_path, 1815 "param_card_%s.dat" % \ 1816 exporter.model_name) 1817 1818 # Create the example main file 1819 if version =="8.2": 1820 template_path = 'pythia8.2_main_example_cc.inc' 1821 makefile_path = 'pythia8.2_main_makefile.inc' 1822 replace_dict['include_prefix'] = 'Pythia8/' 1823 else: 1824 template_path = 'pythia8_main_example_cc.inc' 1825 makefile_path = 'pythia8_main_makefile.inc' 1826 replace_dict['include_prefix'] = '' 1827 1828 1829 file = ProcessExporterPythia8.read_template_file(template_path) % \ 1830 replace_dict 1831 1832 if not main_file_name: 1833 num = 1 1834 while os.path.exists(os.path.join(filepath, 1835 'main_%s_%i.cc' % (exporter.model_name, num))) or \ 1836 os.path.exists(os.path.join(filepath, 1837 'main_%s_%i' % (exporter.model_name, num))): 1838 num += 1 1839 main_file_name = str(num) 1840 1841 main_file = 'main_%s_%s' % (exporter.model_name, 1842 main_file_name) 1843 1844 main_filename = os.path.join(filepath, main_file + '.cc') 1845 1846 # Write the file 1847 writers.CPPWriter(main_filename).writelines(file) 1848 1849 replace_dict = {} 1850 1851 # Extract version number and date from VERSION file 1852 replace_dict['info_lines'] = get_mg5_info_lines() 1853 1854 replace_dict['main_file'] = main_file 1855 1856 replace_dict['process_dir'] = model_path 1857 1858 replace_dict['include_dir'] = exporter.include_dir 1859 1860 # Create the makefile 1861 file = ProcessExporterPythia8.read_template_file(makefile_path) % replace_dict 1862 1863 make_filename = os.path.join(filepath, 'Makefile_%s_%s' % \ 1864 (exporter.model_name, main_file_name)) 1865 1866 # Write the file 1867 open(make_filename, 'w').write(file) 1868 1869 logger.info("Created files %s and %s in directory %s" \ 1870 % (os.path.split(main_filename)[-1], 1871 os.path.split(make_filename)[-1], 1872 os.path.split(make_filename)[0])) 1873 return main_file, make_filename
1874
1875 - def convert_model(self,*args,**opts):
1876 pass
1877 - def finalize(self, *args, **opts):
1878 pass
1879
1880 -def get_mg5_info_lines():
1881 """Return info lines for MG5, suitable to place at beginning of 1882 Fortran files""" 1883 1884 info = misc.get_pkg_info() 1885 info_lines = "" 1886 if info and 'version' in info and 'date' in info: 1887 info_lines = "# MadGraph5_aMC@NLO v. %s, %s\n" % \ 1888 (info['version'], info['date']) 1889 info_lines = info_lines + \ 1890 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 1891 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 1892 else: 1893 info_lines = "# MadGraph5_aMC@NLO\n" + \ 1894 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 1895 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 1896 1897 return info_lines
1898
1899 -def coeff(ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1900 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1901 1902 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1903 1904 if total_coeff == 1: 1905 if is_imaginary: 1906 return '+std::complex<double>(0,1)*' 1907 else: 1908 return '+' 1909 elif total_coeff == -1: 1910 if is_imaginary: 1911 return '-std::complex<double>(0,1)*' 1912 else: 1913 return '-' 1914 1915 res_str = '%+i.' % total_coeff.numerator 1916 1917 if total_coeff.denominator != 1: 1918 # Check if total_coeff is an integer 1919 res_str = res_str + '/%i.' % total_coeff.denominator 1920 1921 if is_imaginary: 1922 res_str = res_str + '*std::complex<double>(0,1)' 1923 1924 return res_str + '*'
1925
1926 #=============================================================================== 1927 # UFOModelConverterCPP 1928 #=============================================================================== 1929 1930 -class UFOModelConverterCPP(object):
1931 """ A converter of the UFO-MG5 Model to the C++ format """ 1932 1933 # Static variables (for inheritance) 1934 output_name = 'C++ Standalone' 1935 namespace = 'MG5' 1936 1937 # Dictionary from Python type to C++ type 1938 type_dict = {"real": "double", 1939 "complex": "std::complex<double>"} 1940 1941 # Regular expressions for cleaning of lines from Aloha files 1942 compiler_option_re = re.compile('^#\w') 1943 namespace_re = re.compile('^using namespace') 1944 1945 slha_to_depend = {('SMINPUTS', (3,)): ('aS',), 1946 ('SMINPUTS', (1,)): ('aEM',)} 1947 1948 # Template files to use 1949 include_dir = '.' 1950 cc_file_dir = '.' 1951 param_template_h = 'cpp_model_parameters_h.inc' 1952 param_template_cc = 'cpp_model_parameters_cc.inc' 1953 aloha_template_h = 'cpp_hel_amps_h.inc' 1954 aloha_template_cc = 'cpp_hel_amps_cc.inc' 1955 1956 copy_include_files = [] 1957 copy_cc_files = [] 1958
1959 - def __init__(self, model, output_path, wanted_lorentz = [], 1960 wanted_couplings = [], replace_dict={}):
1961 """ initialization of the objects """ 1962 1963 self.model = model 1964 self.model_name = ProcessExporterCPP.get_model_name(model['name']) 1965 1966 self.dir_path = output_path 1967 self.default_replace_dict = dict(replace_dict) 1968 # List of needed ALOHA routines 1969 self.wanted_lorentz = wanted_lorentz 1970 1971 # For dependent couplings, only want to update the ones 1972 # actually used in each process. For other couplings and 1973 # parameters, just need a list of all. 1974 self.coups_dep = {} # name -> base_objects.ModelVariable 1975 self.coups_indep = [] # base_objects.ModelVariable 1976 self.params_dep = [] # base_objects.ModelVariable 1977 self.params_indep = [] # base_objects.ModelVariable 1978 self.p_to_cpp = parsers.UFOExpressionParserCPP() 1979 1980 # Prepare parameters and couplings for writeout in C++ 1981 self.prepare_parameters() 1982 self.prepare_couplings(wanted_couplings)
1983
1984 - def write_files(self):
1985 """Create all necessary files""" 1986 1987 # Write Helas Routines 1988 self.write_aloha_routines() 1989 1990 # Write parameter (and coupling) class files 1991 self.write_parameter_class_files()
1992 1993 # Routines for preparing parameters and couplings from the model 1994
1995 - def prepare_parameters(self):
1996 """Extract the parameters from the model, and store them in 1997 the two lists params_indep and params_dep""" 1998 1999 # Keep only dependences on alphaS, to save time in execution 2000 keys = list(self.model['parameters'].keys()) 2001 keys.sort(key=len) 2002 params_ext = [] 2003 for key in keys: 2004 if key == ('external',): 2005 params_ext += [p for p in self.model['parameters'][key] if p.name] 2006 elif 'aS' in key: 2007 for p in self.model['parameters'][key]: 2008 self.params_dep.append(base_objects.ModelVariable(p.name, 2009 p.name + " = " + \ 2010 self.p_to_cpp.parse(p.expr) + ";", 2011 p.type, 2012 p.depend)) 2013 else: 2014 for p in self.model['parameters'][key]: 2015 if p.name == 'ZERO': 2016 continue 2017 self.params_indep.append(base_objects.ModelVariable(p.name, 2018 p.name + " = " + \ 2019 self.p_to_cpp.parse(p.expr) + ";", 2020 p.type, 2021 p.depend)) 2022 2023 # For external parameters, want to read off the SLHA block code 2024 while params_ext: 2025 param = params_ext.pop(0) 2026 # Read value from the slha variable 2027 expression = "" 2028 assert param.value.imag == 0 2029 if len(param.lhacode) == 1: 2030 expression = "%s = slha.get_block_entry(\"%s\", %d, %e);" % \ 2031 (param.name, param.lhablock.lower(), 2032 param.lhacode[0], param.value.real) 2033 elif len(param.lhacode) == 2: 2034 expression = "indices[0] = %d;\nindices[1] = %d;\n" % \ 2035 (param.lhacode[0], param.lhacode[1]) 2036 expression += "%s = slha.get_block_entry(\"%s\", indices, %e);" \ 2037 % (param.name, param.lhablock.lower(), param.value.real) 2038 else: 2039 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices") 2040 self.params_indep.insert(0, 2041 base_objects.ModelVariable(param.name, 2042 expression, 2043 'real'))
2044
2045 - def prepare_couplings(self, wanted_couplings = []):
2046 """Extract the couplings from the model, and store them in 2047 the two lists coups_indep and coups_dep""" 2048 2049 # Keep only dependences on alphaS, to save time in execution 2050 keys = list(self.model['couplings'].keys()) 2051 keys.sort(key=len) 2052 for key, coup_list in self.model['couplings'].items(): 2053 if "aS" in key: 2054 for c in coup_list: 2055 if not wanted_couplings or c.name in wanted_couplings: 2056 self.coups_dep[c.name] = base_objects.ModelVariable(\ 2057 c.name, 2058 c.expr, 2059 c.type, 2060 c.depend) 2061 else: 2062 for c in coup_list: 2063 if not wanted_couplings or c.name in wanted_couplings: 2064 self.coups_indep.append(base_objects.ModelVariable(\ 2065 c.name, 2066 c.expr, 2067 c.type, 2068 c.depend)) 2069 2070 # Convert coupling expressions from Python to C++ 2071 for coup in list(self.coups_dep.values()) + self.coups_indep: 2072 coup.expr = coup.name + " = " + self.p_to_cpp.parse(coup.expr) + ";"
2073 2074 # Routines for writing the parameter files 2075
2077 """Generate the parameters_model.h and parameters_model.cc 2078 files, which have the parameters and couplings for the model.""" 2079 2080 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)): 2081 os.makedirs(os.path.join(self.dir_path, self.include_dir)) 2082 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)): 2083 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir)) 2084 2085 parameter_h_file = os.path.join(self.dir_path, self.include_dir, 2086 'Parameters_%s.h' % self.model_name) 2087 parameter_cc_file = os.path.join(self.dir_path, self.cc_file_dir, 2088 'Parameters_%s.cc' % self.model_name) 2089 2090 file_h, file_cc = self.generate_parameters_class_files() 2091 2092 # Write the files 2093 writers.CPPWriter(parameter_h_file).writelines(file_h) 2094 writers.CPPWriter(parameter_cc_file).writelines(file_cc) 2095 2096 # Copy additional needed files 2097 for copy_file in self.copy_include_files: 2098 shutil.copy(os.path.join(_file_path, 'iolibs', 2099 'template_files',copy_file), 2100 os.path.join(self.dir_path, self.include_dir)) 2101 # Copy additional needed files 2102 for copy_file in self.copy_cc_files: 2103 shutil.copy(os.path.join(_file_path, 'iolibs', 2104 'template_files',copy_file), 2105 os.path.join(self.dir_path, self.cc_file_dir)) 2106 2107 logger.info("Created files %s and %s in directory" \ 2108 % (os.path.split(parameter_h_file)[-1], 2109 os.path.split(parameter_cc_file)[-1])) 2110 logger.info("%s and %s" % \ 2111 (os.path.split(parameter_h_file)[0], 2112 os.path.split(parameter_cc_file)[0]))
2113
2115 """Create the content of the Parameters_model.h and .cc files""" 2116 2117 replace_dict = self.default_replace_dict 2118 2119 replace_dict['info_lines'] = get_mg5_info_lines() 2120 replace_dict['model_name'] = self.model_name 2121 2122 replace_dict['independent_parameters'] = \ 2123 "// Model parameters independent of aS\n" + \ 2124 self.write_parameters(self.params_indep) 2125 replace_dict['independent_couplings'] = \ 2126 "// Model parameters dependent on aS\n" + \ 2127 self.write_parameters(self.params_dep) 2128 replace_dict['dependent_parameters'] = \ 2129 "// Model couplings independent of aS\n" + \ 2130 self.write_parameters(self.coups_indep) 2131 replace_dict['dependent_couplings'] = \ 2132 "// Model couplings dependent on aS\n" + \ 2133 self.write_parameters(list(self.coups_dep.values())) 2134 2135 replace_dict['set_independent_parameters'] = \ 2136 self.write_set_parameters(self.params_indep) 2137 replace_dict['set_independent_couplings'] = \ 2138 self.write_set_parameters(self.coups_indep) 2139 replace_dict['set_dependent_parameters'] = \ 2140 self.write_set_parameters(self.params_dep) 2141 replace_dict['set_dependent_couplings'] = \ 2142 self.write_set_parameters(list(self.coups_dep.values())) 2143 2144 replace_dict['print_independent_parameters'] = \ 2145 self.write_print_parameters(self.params_indep) 2146 replace_dict['print_independent_couplings'] = \ 2147 self.write_print_parameters(self.coups_indep) 2148 replace_dict['print_dependent_parameters'] = \ 2149 self.write_print_parameters(self.params_dep) 2150 replace_dict['print_dependent_couplings'] = \ 2151 self.write_print_parameters(list(self.coups_dep.values())) 2152 2153 if 'include_prefix' not in replace_dict: 2154 replace_dict['include_prefix'] = '' 2155 2156 2157 file_h = self.read_template_file(self.param_template_h) % \ 2158 replace_dict 2159 file_cc = self.read_template_file(self.param_template_cc) % \ 2160 replace_dict 2161 2162 return file_h, file_cc
2163
2164 - def write_parameters(self, params):
2165 """Write out the definitions of parameters""" 2166 2167 # Create a dictionary from parameter type to list of parameter names 2168 type_param_dict = {} 2169 2170 for param in params: 2171 type_param_dict[param.type] = \ 2172 type_param_dict.setdefault(param.type, []) + [param.name] 2173 2174 # For each parameter type, write out the definition string 2175 # type parameters; 2176 res_strings = [] 2177 for key in type_param_dict: 2178 res_strings.append("%s %s;" % (self.type_dict[key], 2179 ",".join(type_param_dict[key]))) 2180 2181 return "\n".join(res_strings)
2182
2183 - def write_set_parameters(self, params):
2184 """Write out the lines of independent parameters""" 2185 2186 # For each parameter, write name = expr; 2187 2188 res_strings = [] 2189 for param in params: 2190 res_strings.append("%s" % param.expr) 2191 2192 # Correct width sign for Majorana particles (where the width 2193 # and mass need to have the same sign) 2194 for particle in self.model.get('particles'): 2195 if particle.is_fermion() and particle.get('self_antipart') and \ 2196 particle.get('width').lower() != 'zero': 2197 res_strings.append("if (%s < 0)" % particle.get('mass')) 2198 res_strings.append("%(width)s = -abs(%(width)s);" % \ 2199 {"width": particle.get('width')}) 2200 2201 return "\n".join(res_strings)
2202
2203 - def write_print_parameters(self, params):
2204 """Write out the lines of independent parameters""" 2205 2206 # For each parameter, write name = expr; 2207 2208 res_strings = [] 2209 for param in params: 2210 res_strings.append("cout << setw(20) << \"%s \" << \"= \" << setiosflags(ios::scientific) << setw(10) << %s << endl;" % (param.name, param.name)) 2211 2212 return "\n".join(res_strings)
2213 2214 # Routines for writing the ALOHA files 2215
2216 - def write_aloha_routines(self):
2217 """Generate the hel_amps_model.h and hel_amps_model.cc files, which 2218 have the complete set of generalized Helas routines for the model""" 2219 2220 if not os.path.isdir(os.path.join(self.dir_path, self.include_dir)): 2221 os.makedirs(os.path.join(self.dir_path, self.include_dir)) 2222 if not os.path.isdir(os.path.join(self.dir_path, self.cc_file_dir)): 2223 os.makedirs(os.path.join(self.dir_path, self.cc_file_dir)) 2224 2225 model_h_file = os.path.join(self.dir_path, self.include_dir, 2226 'HelAmps_%s.h' % self.model_name) 2227 model_cc_file = os.path.join(self.dir_path, self.cc_file_dir, 2228 'HelAmps_%s.cc' % self.model_name) 2229 2230 replace_dict = {} 2231 2232 replace_dict['output_name'] = self.output_name 2233 replace_dict['info_lines'] = get_mg5_info_lines() 2234 replace_dict['namespace'] = self.namespace 2235 replace_dict['model_name'] = self.model_name 2236 2237 # Read in the template .h and .cc files, stripped of compiler 2238 # commands and namespaces 2239 template_h_files = self.read_aloha_template_files(ext = 'h') 2240 template_cc_files = self.read_aloha_template_files(ext = 'cc') 2241 2242 aloha_model = create_aloha.AbstractALOHAModel(self.model.get('name')) 2243 aloha_model.add_Lorentz_object(self.model.get('lorentz')) 2244 2245 if self.wanted_lorentz: 2246 aloha_model.compute_subset(self.wanted_lorentz) 2247 else: 2248 aloha_model.compute_all(save=False, custom_propa=True) 2249 2250 for abstracthelas in dict(aloha_model).values(): 2251 h_rout, cc_rout = abstracthelas.write(output_dir=None, language='CPP', 2252 mode='no_include') 2253 2254 template_h_files.append(h_rout) 2255 template_cc_files.append(cc_rout) 2256 2257 #aloha_writer = aloha_writers.ALOHAWriterForCPP(abstracthelas, 2258 # self.dir_path) 2259 #header = aloha_writer.define_header() 2260 #template_h_files.append(self.write_function_declaration(\ 2261 # aloha_writer, header)) 2262 #template_cc_files.append(self.write_function_definition(\ 2263 # aloha_writer, header)) 2264 2265 replace_dict['function_declarations'] = '\n'.join(template_h_files) 2266 replace_dict['function_definitions'] = '\n'.join(template_cc_files) 2267 2268 file_h = self.read_template_file(self.aloha_template_h) % replace_dict 2269 file_cc = self.read_template_file(self.aloha_template_cc) % replace_dict 2270 2271 # Write the files 2272 writers.CPPWriter(model_h_file).writelines(file_h) 2273 writers.CPPWriter(model_cc_file).writelines(file_cc) 2274 2275 logger.info("Created files %s and %s in directory" \ 2276 % (os.path.split(model_h_file)[-1], 2277 os.path.split(model_cc_file)[-1])) 2278 logger.info("%s and %s" % \ 2279 (os.path.split(model_h_file)[0], 2280 os.path.split(model_cc_file)[0]))
2281 2282
2283 - def read_aloha_template_files(self, ext):
2284 """Read all ALOHA template files with extension ext, strip them of 2285 compiler options and namespace options, and return in a list""" 2286 2287 template_files = [] 2288 for filename in misc.glob('*.%s' % ext, pjoin(MG5DIR, 'aloha','template_files')): 2289 file = open(filename, 'r') 2290 template_file_string = "" 2291 while file: 2292 line = file.readline() 2293 if len(line) == 0: break 2294 line = self.clean_line(line) 2295 if not line: 2296 continue 2297 template_file_string += line.strip() + '\n' 2298 template_files.append(template_file_string) 2299 2300 return template_files
2301 2302 # def write_function_declaration(self, aloha_writer, header): 2303 # """Write the function declaration for the ALOHA routine""" 2304 # 2305 # ret_lines = [] 2306 # for line in aloha_writer.write_h(header).split('\n'): 2307 # if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2308 # # Strip out compiler flags and namespaces 2309 # continue 2310 # ret_lines.append(line) 2311 # return "\n".join(ret_lines) 2312 # 2313 # def write_function_definition(self, aloha_writer, header): 2314 # """Write the function definition for the ALOHA routine""" 2315 # 2316 # ret_lines = [] 2317 # for line in aloha_writer.write_cc(header).split('\n'): 2318 # if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2319 # # Strip out compiler flags and namespaces 2320 # continue 2321 # ret_lines.append(line) 2322 # return "\n".join(ret_lines) 2323
2324 - def clean_line(self, line):
2325 """Strip a line of compiler options and namespace options.""" 2326 2327 if self.compiler_option_re.match(line) or self.namespace_re.match(line): 2328 return "" 2329 2330 return line
2331 2332 #=============================================================================== 2333 # Global helper methods 2334 #=============================================================================== 2335 @classmethod
2336 - def read_template_file(cls, filename, classpath=False):
2337 """Open a template file and return the contents.""" 2338 2339 return OneProcessExporterCPP.read_template_file(filename, classpath)
2340
2341 2342 #=============================================================================== 2343 # UFOModelConverterPythia8 2344 #=============================================================================== 2345 2346 -class UFOModelConverterPythia8(UFOModelConverterCPP):
2347 """ A converter of the UFO-MG5 Model to the Pythia 8 format """ 2348 2349 # Static variables (for inheritance) 2350 output_name = 'Pythia 8' 2351 namespace = 'Pythia8' 2352 2353 # Dictionaries for expression of MG5 SM parameters into Pythia 8 2354 slha_to_expr = {('SMINPUTS', (1,)): '1./csm->alphaEM(((pd->m0(23))*(pd->m0(23))))', 2355 ('SMINPUTS', (2,)): 'M_PI*csm->alphaEM(((pd->m0(23))*(pd->m0(23))))*((pd->m0(23))*(pd->m0(23)))/(sqrt(2.)*((pd->m0(24))*(pd->m0(24)))*(((pd->m0(23))*(pd->m0(23)))-((pd->m0(24))*(pd->m0(24)))))', 2356 ('SMINPUTS', (3,)): 'alpS', 2357 ('CKMBLOCK', (1,)): 'csm->VCKMgen(1,2)', 2358 } 2359 2360 # Template files to use 2361 param_template_h = 'pythia8_model_parameters_h.inc' 2362 param_template_cc = 'pythia8_model_parameters_cc.inc' 2363 template_paths = os.path.join(_file_path, 'iolibs', 'template_files', 'pythia8') 2364
2365 - def prepare_parameters(self):
2366 """Extract the model parameters from Pythia 8, and store them in 2367 the two lists params_indep and params_dep""" 2368 2369 # Keep only dependences on alphaS, to save time in execution 2370 keys = list(self.model['parameters'].keys()) 2371 keys.sort(key=len) 2372 params_ext = [] 2373 for key in keys: 2374 if key == ('external',): 2375 params_ext += [p for p in self.model['parameters'][key] if p.name] 2376 elif 'aS' in key: 2377 for p in self.model['parameters'][key]: 2378 self.params_dep.append(base_objects.ModelVariable(p.name, 2379 p.name + " = " + \ 2380 self.p_to_cpp.parse(p.expr) + ';', 2381 p.type, 2382 p.depend)) 2383 else: 2384 for p in self.model['parameters'][key]: 2385 self.params_indep.append(base_objects.ModelVariable(p.name, 2386 p.name + " = " + \ 2387 self.p_to_cpp.parse(p.expr) + ';', 2388 p.type, 2389 p.depend)) 2390 2391 # For external parameters, want to use the internal Pythia 2392 # parameters for SM params and masses and widths. For other 2393 # parameters, want to read off the SLHA block code 2394 while params_ext: 2395 param = params_ext.pop(0) 2396 key = (param.lhablock, tuple(param.lhacode)) 2397 if 'aS' in self.slha_to_depend.setdefault(key, ()): 2398 # This value needs to be set event by event 2399 self.params_dep.insert(0, 2400 base_objects.ModelVariable(param.name, 2401 param.name + ' = ' + \ 2402 self.slha_to_expr[key] + ';', 2403 'real')) 2404 else: 2405 try: 2406 # This is an SM parameter defined above 2407 self.params_indep.insert(0, 2408 base_objects.ModelVariable(param.name, 2409 param.name + ' = ' + \ 2410 self.slha_to_expr[key] + ';', 2411 'real')) 2412 except Exception: 2413 # For Yukawa couplings, masses and widths, insert 2414 # the Pythia 8 value 2415 if param.lhablock == 'YUKAWA': 2416 self.slha_to_expr[key] = 'pd->mRun(%i, pd->m0(24))' \ 2417 % param.lhacode[0] 2418 if param.lhablock == 'MASS': 2419 self.slha_to_expr[key] = 'pd->m0(%i)' \ 2420 % param.lhacode[0] 2421 if param.lhablock == 'DECAY': 2422 self.slha_to_expr[key] = \ 2423 'pd->mWidth(%i)' % param.lhacode[0] 2424 if key in self.slha_to_expr: 2425 self.params_indep.insert(0,\ 2426 base_objects.ModelVariable(param.name, 2427 param.name + "=" + self.slha_to_expr[key] \ 2428 + ';', 2429 'real')) 2430 else: 2431 # This is a BSM parameter which is read from SLHA 2432 if len(param.lhacode) == 1: 2433 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %s)){\n" % \ 2434 (param.lhablock.lower(), 2435 param.lhacode[0], 2436 param.name) + \ 2437 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2438 + "%s = %e;}") % (param.name, param.value.real, 2439 param.name, param.value.real) 2440 elif len(param.lhacode) == 2: 2441 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %s)){\n" % \ 2442 (param.lhablock.lower(), 2443 param.lhacode[0], 2444 param.lhacode[1], 2445 param.name) + \ 2446 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2447 + "%s = %e;}") % (param.name, param.value.real, 2448 param.name, param.value.real) 2449 elif len(param.lhacode) == 3: 2450 expression = "if(!slhaPtr->getEntry<double>(\"%s\", %d, %d, %d, %s)){\n" % \ 2451 (param.lhablock.lower(), 2452 param.lhacode[0], 2453 param.lhacode[1], 2454 param.lhacode[2], 2455 param.name) + \ 2456 ("cout << \"Warning, setting %s to %e\" << endl;\n" \ 2457 + "%s = %e;}") % (param.name, param.value.real, 2458 param.name, param.value.real) 2459 else: 2460 raise MadGraph5Error("Only support for SLHA blocks with 1 or 2 indices") 2461 self.params_indep.insert(0, 2462 base_objects.ModelVariable(param.name, 2463 expression, 2464 'real'))
2465
2466 - def write_makefile(self):
2467 """Generate the Makefile, which creates library files.""" 2468 2469 makefilename = os.path.join(self.dir_path, self.cc_file_dir, 2470 'Makefile') 2471 2472 replace_dict = {} 2473 2474 replace_dict['info_lines'] = get_mg5_info_lines() 2475 replace_dict['model'] = self.model_name 2476 2477 if self.default_replace_dict['version'] == "8.2": 2478 path = 'pythia8.2_makefile.inc' 2479 else: 2480 path = 'pythia8_makefile.inc' 2481 makefile = self.read_template_file(path) % replace_dict 2482 2483 # Write the files 2484 open(makefilename, 'w').write(makefile) 2485 2486 logger.info("Created %s in directory %s" \ 2487 % (os.path.split(makefilename)[-1], 2488 os.path.split(makefilename)[0]))
2489
2490 - def write_param_card(self):
2491 """Generate the param_card for the model.""" 2492 2493 paramcardname = os.path.join(self.dir_path, self.cc_file_dir, 2494 'param_card_%s.dat' % self.model_name) 2495 # Write out param_card 2496 open(paramcardname, 'w').write(\ 2497 self.model.write_param_card()) 2498 2499 logger.info("Created %s in directory %s" \ 2500 % (os.path.split(paramcardname)[-1], 2501 os.path.split(paramcardname)[0]))
2502 2503 #=============================================================================== 2504 # Global helper methods 2505 #=============================================================================== 2506 @classmethod
2507 - def read_template_file(cls, *args, **opts):
2508 """Open a template file and return the contents.""" 2509 2510 return OneProcessExporterPythia8.read_template_file(*args, **opts)
2511
2512 -def ExportCPPFactory(cmd, group_subprocesses=False, cmd_options={}):
2513 """ Determine which Export class is required. cmd is the command 2514 interface containing all potential usefull information. 2515 """ 2516 2517 opt = dict(cmd.options) 2518 opt['output_options'] = cmd_options 2519 cformat = cmd._export_format 2520 2521 if cformat == 'pythia8': 2522 return ProcessExporterPythia8(cmd._export_dir, opt) 2523 elif cformat == 'standalone_cpp': 2524 return ProcessExporterCPP(cmd._export_dir, opt) 2525 elif cformat == 'matchbox_cpp': 2526 return ProcessExporterMatchbox(cmd._export_dir, opt) 2527 elif cformat == 'plugin': 2528 return cmd._export_plugin(cmd._export_dir, opt)
2529