Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30   
  31   
  32  import aloha 
  33   
  34  import madgraph.core.base_objects as base_objects 
  35  import madgraph.core.color_algebra as color 
  36  import madgraph.core.helas_objects as helas_objects 
  37  import madgraph.iolibs.drawing_eps as draw 
  38  import madgraph.iolibs.files as files 
  39  import madgraph.iolibs.group_subprocs as group_subprocs 
  40  import madgraph.iolibs.file_writers as writers 
  41  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  42  import madgraph.iolibs.template_files as template_files 
  43  import madgraph.iolibs.ufo_expression_parsers as parsers 
  44  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  45  import madgraph.interface.common_run_interface as common_run_interface 
  46  import madgraph.various.diagram_symmetry as diagram_symmetry 
  47  import madgraph.various.misc as misc 
  48  import madgraph.various.banner as banner_mod 
  49  import madgraph.various.process_checks as process_checks 
  50  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  51  import aloha.create_aloha as create_aloha 
  52  import models.import_ufo as import_ufo 
  53  import models.write_param_card as param_writer 
  54  import models.check_param_card as check_param_card 
  55   
  56   
  57  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  58  from madgraph.iolibs.files import cp, ln, mv 
  59   
  60  from madgraph import InvalidCmd 
  61   
  62  pjoin = os.path.join 
  63   
  64  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  65  logger = logging.getLogger('madgraph.export_v4') 
  66   
  67  default_compiler= {'fortran': 'gfortran', 
  68                         'f2py': 'f2py', 
  69                         'cpp':'g++'} 
70 71 #=============================================================================== 72 # ProcessExporterFortran 73 #=============================================================================== 74 -class ProcessExporterFortran(object):
75 """Class to take care of exporting a set of matrix elements to 76 Fortran (v4) format.""" 77 78 default_opt = {'clean': False, 'complex_mass':False, 79 'export_format':'madevent', 'mp': False, 80 'v5_model': True 81 } 82
83 - def __init__(self, mgme_dir = "", dir_path = "", opt=None):
84 """Initiate the ProcessExporterFortran with directory information""" 85 self.mgme_dir = mgme_dir 86 self.dir_path = dir_path 87 self.model = None 88 89 self.opt = dict(self.default_opt) 90 if opt: 91 self.opt.update(opt) 92 93 #place holder to pass information to the run_interface 94 self.proc_characteristic = banner_mod.ProcCharacteristic()
95 96 97 #=========================================================================== 98 # process exporter fortran switch between group and not grouped 99 #===========================================================================
100 - def export_processes(self, matrix_elements, fortran_model):
101 """Make the switch between grouped and not grouped output""" 102 103 calls = 0 104 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 105 for (group_number, me_group) in enumerate(matrix_elements): 106 calls = calls + self.generate_subprocess_directory_v4(\ 107 me_group, fortran_model, group_number) 108 else: 109 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 110 calls = calls + self.generate_subprocess_directory_v4(\ 111 me, fortran_model, me_number) 112 113 return calls
114 115 116 117 #=========================================================================== 118 # create the run_card 119 #===========================================================================
120 - def create_run_card(self, matrix_elements, history):
121 """ """ 122 123 run_card = banner_mod.RunCard() 124 125 126 default=True 127 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 128 processes = [me.get('processes') for megroup in matrix_elements 129 for me in megroup['matrix_elements']] 130 elif matrix_elements: 131 processes = [me.get('processes') 132 for me in matrix_elements['matrix_elements']] 133 else: 134 default =False 135 136 if default: 137 run_card.create_default_for_process(self.proc_characteristic, 138 history, 139 processes) 140 141 142 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 143 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
144 145 146 #=========================================================================== 147 # copy the Template in a new directory. 148 #===========================================================================
149 - def copy_v4template(self, modelname):
150 """create the directory run_name as a copy of the MadEvent 151 Template, and clean the directory 152 """ 153 154 #First copy the full template tree if dir_path doesn't exit 155 if not os.path.isdir(self.dir_path): 156 assert self.mgme_dir, \ 157 "No valid MG_ME path given for MG4 run directory creation." 158 logger.info('initialize a new directory: %s' % \ 159 os.path.basename(self.dir_path)) 160 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 161 self.dir_path, True) 162 # distutils.dir_util.copy_tree since dir_path already exists 163 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 164 self.dir_path) 165 # Duplicate run_card and plot_card 166 for card in ['plot_card']: 167 try: 168 shutil.copy(pjoin(self.dir_path, 'Cards', 169 card + '.dat'), 170 pjoin(self.dir_path, 'Cards', 171 card + '_default.dat')) 172 except IOError: 173 logger.warning("Failed to copy " + card + ".dat to default") 174 elif os.getcwd() == os.path.realpath(self.dir_path): 175 logger.info('working in local directory: %s' % \ 176 os.path.realpath(self.dir_path)) 177 # distutils.dir_util.copy_tree since dir_path already exists 178 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 179 self.dir_path) 180 # for name in misc.glob('Template/LO/*', self.mgme_dir): 181 # name = os.path.basename(name) 182 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 183 # if os.path.isfile(filename): 184 # files.cp(filename, pjoin(self.dir_path,name)) 185 # elif os.path.isdir(filename): 186 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 187 # distutils.dir_util.copy_tree since dir_path already exists 188 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 189 self.dir_path) 190 # Duplicate run_card and plot_card 191 for card in ['plot_card']: 192 try: 193 shutil.copy(pjoin(self.dir_path, 'Cards', 194 card + '.dat'), 195 pjoin(self.dir_path, 'Cards', 196 card + '_default.dat')) 197 except IOError: 198 logger.warning("Failed to copy " + card + ".dat to default") 199 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 200 assert self.mgme_dir, \ 201 "No valid MG_ME path given for MG4 run directory creation." 202 try: 203 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 204 except IOError: 205 MG5_version = misc.get_pkg_info() 206 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 207 "5." + MG5_version['version']) 208 209 #Ensure that the Template is clean 210 if self.opt['clean']: 211 logger.info('remove old information in %s' % \ 212 os.path.basename(self.dir_path)) 213 if os.environ.has_key('MADGRAPH_BASE'): 214 misc.call([pjoin('bin', 'internal', 'clean_template'), 215 '--web'], cwd=self.dir_path) 216 else: 217 try: 218 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 219 cwd=self.dir_path) 220 except Exception, why: 221 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 222 % (os.path.basename(self.dir_path),why)) 223 224 #Write version info 225 MG_version = misc.get_pkg_info() 226 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 227 MG_version['version']) 228 229 230 # add the makefile in Source directory 231 filename = pjoin(self.dir_path,'Source','makefile') 232 self.write_source_makefile(writers.FileWriter(filename)) 233 234 # add the DiscreteSampler information 235 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 236 pjoin(self.dir_path, 'Source')) 237 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 238 pjoin(self.dir_path, 'Source')) 239 240 # We need to create the correct open_data for the pdf 241 self.write_pdf_opendata()
242 243 244 245 246 #=========================================================================== 247 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 248 #===========================================================================
249 - def write_procdef_mg5(self, file_pos, modelname, process_str):
250 """ write an equivalent of the MG4 proc_card in order that all the Madevent 251 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 252 253 proc_card_template = template_files.mg4_proc_card.mg4_template 254 process_template = template_files.mg4_proc_card.process_template 255 process_text = '' 256 coupling = '' 257 new_process_content = [] 258 259 260 # First find the coupling and suppress the coupling from process_str 261 #But first ensure that coupling are define whithout spaces: 262 process_str = process_str.replace(' =', '=') 263 process_str = process_str.replace('= ', '=') 264 process_str = process_str.replace(',',' , ') 265 #now loop on the element and treat all the coupling 266 for info in process_str.split(): 267 if '=' in info: 268 coupling += info + '\n' 269 else: 270 new_process_content.append(info) 271 # Recombine the process_str (which is the input process_str without coupling 272 #info) 273 process_str = ' '.join(new_process_content) 274 275 #format the SubProcess 276 process_text += process_template.substitute({'process': process_str, \ 277 'coupling': coupling}) 278 279 text = proc_card_template.substitute({'process': process_text, 280 'model': modelname, 281 'multiparticle':''}) 282 ff = open(file_pos, 'w') 283 ff.write(text) 284 ff.close()
285 286 #=========================================================================== 287 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 288 #===========================================================================
289 - def finalize_v4_directory(self, matrix_elements, history = "", makejpg = False, 290 online = False, compiler=default_compiler):
291 """Function to finalize v4 directory, for inheritance. 292 """ 293 294 self.create_run_card(matrix_elements, history) 295 296 pass
297 298 #=========================================================================== 299 # Create the proc_characteristic file passing information to the run_interface 300 #===========================================================================
301 - def create_proc_charac(self, matrix_elements=None, history= "", **opts):
302 303 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
304 305 #=========================================================================== 306 # write_matrix_element_v4 307 #===========================================================================
308 - def write_matrix_element_v4(self):
309 """Function to write a matrix.f file, for inheritance. 310 """ 311 pass
312 313 #=========================================================================== 314 # write_pdf_opendata 315 #===========================================================================
316 - def write_pdf_opendata(self):
317 """ modify the pdf opendata file, to allow direct access to cluster node 318 repository if configure""" 319 320 if not self.opt["cluster_local_path"]: 321 changer = {"pdf_systemwide": ""} 322 else: 323 to_add = """ 324 tempname='%(path)s'//Tablefile 325 open(IU,file=tempname,status='old',ERR=1) 326 return 327 1 tempname='%(path)s/Pdfdata/'//Tablefile 328 open(IU,file=tempname,status='old',ERR=2) 329 return 330 2 tempname='%(path)s/lhapdf'//Tablefile 331 open(IU,file=tempname,status='old',ERR=3) 332 return 333 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 334 open(IU,file=tempname,status='old',ERR=4) 335 return 336 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 337 open(IU,file=tempname,status='old',ERR=5) 338 return 339 """ % {"path" : self.opt["cluster_local_path"]} 340 341 changer = {"pdf_systemwide": to_add} 342 343 ff = open(pjoin(self.dir_path, "Source", "PDF", "opendata.f"),"w") 344 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 345 ff.write(template % changer) 346 347 # Do the same for lhapdf set 348 if not self.opt["cluster_local_path"]: 349 changer = {"cluster_specific_path": ""} 350 else: 351 to_add=""" 352 LHAPath='%(path)s/PDFsets' 353 Inquire(File=LHAPath, exist=exists) 354 if(exists)return 355 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 356 Inquire(File=LHAPath, exist=exists) 357 if(exists)return 358 LHAPath='%(path)s/../lhapdf/pdfsets/' 359 Inquire(File=LHAPath, exist=exists) 360 if(exists)return 361 LHAPath='./PDFsets' 362 """ % {"path" : self.opt["cluster_local_path"]} 363 changer = {"cluster_specific_path": to_add} 364 365 ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 366 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 367 ff.write(template % changer) 368 369 370 return
371 372 373 374 #=========================================================================== 375 # write_maxparticles_file 376 #===========================================================================
377 - def write_maxparticles_file(self, writer, matrix_elements):
378 """Write the maxparticles.inc file for MadEvent""" 379 380 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 381 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 382 matrix_elements.get('matrix_elements')]) 383 else: 384 maxparticles = max([me.get_nexternal_ninitial()[0] \ 385 for me in matrix_elements]) 386 387 lines = "integer max_particles\n" 388 lines += "parameter(max_particles=%d)" % maxparticles 389 390 # Write the file 391 writer.writelines(lines) 392 393 return True
394 395 396 #=========================================================================== 397 # export the model 398 #===========================================================================
399 - def export_model_files(self, model_path):
400 """Configure the files/link of the process according to the model""" 401 402 # Import the model 403 for file in os.listdir(model_path): 404 if os.path.isfile(pjoin(model_path, file)): 405 shutil.copy2(pjoin(model_path, file), \ 406 pjoin(self.dir_path, 'Source', 'MODEL'))
407 408 422 429 430 #=========================================================================== 431 # export the helas routine 432 #===========================================================================
433 - def export_helas(self, helas_path):
434 """Configure the files/link of the process according to the model""" 435 436 # Import helas routine 437 for filename in os.listdir(helas_path): 438 filepos = pjoin(helas_path, filename) 439 if os.path.isfile(filepos): 440 if filepos.endswith('Makefile.template'): 441 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 442 elif filepos.endswith('Makefile'): 443 pass 444 else: 445 cp(filepos, self.dir_path + '/Source/DHELAS')
446 # following lines do the same but whithout symbolic link 447 # 448 #def export_helas(mgme_dir, dir_path): 449 # 450 # # Copy the HELAS directory 451 # helas_dir = pjoin(mgme_dir, 'HELAS') 452 # for filename in os.listdir(helas_dir): 453 # if os.path.isfile(pjoin(helas_dir, filename)): 454 # shutil.copy2(pjoin(helas_dir, filename), 455 # pjoin(dir_path, 'Source', 'DHELAS')) 456 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 457 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 458 # 459 460 #=========================================================================== 461 # generate_subprocess_directory_v4 462 #===========================================================================
463 - def generate_subprocess_directory_v4(self, matrix_element, 464 fortran_model, 465 me_number):
466 """Routine to generate a subprocess directory (for inheritance)""" 467 468 pass
469 470 #=========================================================================== 471 # get_source_libraries_list 472 #===========================================================================
473 - def get_source_libraries_list(self):
474 """ Returns the list of libraries to be compiling when compiling the 475 SOURCE directory. It is different for loop_induced processes and 476 also depends on the value of the 'output_dependencies' option""" 477 478 return ['$(LIBDIR)libdhelas.$(libext)', 479 '$(LIBDIR)libpdf.$(libext)', 480 '$(LIBDIR)libmodel.$(libext)', 481 '$(LIBDIR)libcernlib.$(libext)']
482 483 #=========================================================================== 484 # write_source_makefile 485 #===========================================================================
486 - def write_source_makefile(self, writer):
487 """Write the nexternal.inc file for MG4""" 488 489 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 490 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 491 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 492 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 493 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 494 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 495 else: 496 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 497 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 498 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 499 writer.write(text) 500 501 return True
502 503 #=========================================================================== 504 # write_nexternal_madspin 505 #===========================================================================
506 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
507 """Write the nexternal_prod.inc file for madspin""" 508 509 replace_dict = {} 510 511 replace_dict['nexternal'] = nexternal 512 replace_dict['ninitial'] = ninitial 513 514 file = """ \ 515 integer nexternal_prod 516 parameter (nexternal_prod=%(nexternal)d) 517 integer nincoming_prod 518 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 519 520 # Write the file 521 writer.writelines(file) 522 523 return True
524 525 #=========================================================================== 526 # write_helamp_madspin 527 #===========================================================================
528 - def write_helamp_madspin(self, writer, ncomb):
529 """Write the helamp.inc file for madspin""" 530 531 replace_dict = {} 532 533 replace_dict['ncomb'] = ncomb 534 535 file = """ \ 536 integer ncomb1 537 parameter (ncomb1=%(ncomb)d) 538 double precision helamp(ncomb1) 539 common /to_helamp/helamp """ % replace_dict 540 541 # Write the file 542 writer.writelines(file) 543 544 return True
545 546 547 #=========================================================================== 548 # write_nexternal_file 549 #===========================================================================
550 - def write_nexternal_file(self, writer, nexternal, ninitial):
551 """Write the nexternal.inc file for MG4""" 552 553 replace_dict = {} 554 555 replace_dict['nexternal'] = nexternal 556 replace_dict['ninitial'] = ninitial 557 558 file = """ \ 559 integer nexternal 560 parameter (nexternal=%(nexternal)d) 561 integer nincoming 562 parameter (nincoming=%(ninitial)d)""" % replace_dict 563 564 # Write the file 565 writer.writelines(file) 566 567 return True
568 569 #=========================================================================== 570 # write_pmass_file 571 #===========================================================================
572 - def write_pmass_file(self, writer, matrix_element):
573 """Write the pmass.inc file for MG4""" 574 575 model = matrix_element.get('processes')[0].get('model') 576 577 lines = [] 578 for wf in matrix_element.get_external_wavefunctions(): 579 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 580 if mass.lower() != "zero": 581 mass = "abs(%s)" % mass 582 583 lines.append("pmass(%d)=%s" % \ 584 (wf.get('number_external'), mass)) 585 586 # Write the file 587 writer.writelines(lines) 588 589 return True
590 591 #=========================================================================== 592 # write_ngraphs_file 593 #===========================================================================
594 - def write_ngraphs_file(self, writer, nconfigs):
595 """Write the ngraphs.inc file for MG4. Needs input from 596 write_configs_file.""" 597 598 file = " integer n_max_cg\n" 599 file = file + "parameter (n_max_cg=%d)" % nconfigs 600 601 # Write the file 602 writer.writelines(file) 603 604 return True
605 606 #=========================================================================== 607 # write_leshouche_file 608 #===========================================================================
609 - def write_leshouche_file(self, writer, matrix_element):
610 """Write the leshouche.inc file for MG4""" 611 612 # Write the file 613 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 614 615 return True
616 617 #=========================================================================== 618 # get_leshouche_lines 619 #===========================================================================
620 - def get_leshouche_lines(self, matrix_element, numproc):
621 """Write the leshouche.inc file for MG4""" 622 623 # Extract number of external particles 624 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 625 626 lines = [] 627 for iproc, proc in enumerate(matrix_element.get('processes')): 628 legs = proc.get_legs_with_decays() 629 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 630 (iproc + 1, numproc+1, nexternal, 631 ",".join([str(l.get('id')) for l in legs]))) 632 if iproc == 0 and numproc == 0: 633 for i in [1, 2]: 634 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 635 (i, nexternal, 636 ",".join([ "%3r" % 0 ] * ninitial + \ 637 [ "%3r" % i ] * (nexternal - ninitial)))) 638 639 # Here goes the color connections corresponding to the JAMPs 640 # Only one output, for the first subproc! 641 if iproc == 0: 642 # If no color basis, just output trivial color flow 643 if not matrix_element.get('color_basis'): 644 for i in [1, 2]: 645 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 646 (i, numproc+1,nexternal, 647 ",".join([ "%3r" % 0 ] * nexternal))) 648 649 else: 650 # First build a color representation dictionnary 651 repr_dict = {} 652 for l in legs: 653 repr_dict[l.get('number')] = \ 654 proc.get('model').get_particle(l.get('id')).get_color()\ 655 * (-1)**(1+l.get('state')) 656 # Get the list of color flows 657 color_flow_list = \ 658 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 659 ninitial) 660 # And output them properly 661 for cf_i, color_flow_dict in enumerate(color_flow_list): 662 for i in [0, 1]: 663 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 664 (i + 1, cf_i + 1, numproc+1, nexternal, 665 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 666 for l in legs]))) 667 668 return lines
669 670 671 672 673 #=========================================================================== 674 # write_maxamps_file 675 #===========================================================================
676 - def write_maxamps_file(self, writer, maxamps, maxflows, 677 maxproc,maxsproc):
678 """Write the maxamps.inc file for MG4.""" 679 680 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 681 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 682 (maxamps, maxflows) 683 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 684 (maxproc, maxsproc) 685 686 # Write the file 687 writer.writelines(file) 688 689 return True
690 691 #=========================================================================== 692 # write_props_file 693 #===========================================================================
694 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
695 """Write the props.inc file for MadEvent. Needs input from 696 write_configs_file.""" 697 698 lines = [] 699 700 particle_dict = matrix_element.get('processes')[0].get('model').\ 701 get('particle_dict') 702 703 for iconf, configs in enumerate(s_and_t_channels): 704 for vertex in configs[0] + configs[1][:-1]: 705 leg = vertex.get('legs')[-1] 706 if leg.get('id') not in particle_dict: 707 # Fake propagator used in multiparticle vertices 708 mass = 'zero' 709 width = 'zero' 710 pow_part = 0 711 else: 712 particle = particle_dict[leg.get('id')] 713 # Get mass 714 if particle.get('mass').lower() == 'zero': 715 mass = particle.get('mass') 716 else: 717 mass = "abs(%s)" % particle.get('mass') 718 # Get width 719 if particle.get('width').lower() == 'zero': 720 width = particle.get('width') 721 else: 722 width = "abs(%s)" % particle.get('width') 723 724 pow_part = 1 + int(particle.is_boson()) 725 726 lines.append("prmass(%d,%d) = %s" % \ 727 (leg.get('number'), iconf + 1, mass)) 728 lines.append("prwidth(%d,%d) = %s" % \ 729 (leg.get('number'), iconf + 1, width)) 730 lines.append("pow(%d,%d) = %d" % \ 731 (leg.get('number'), iconf + 1, pow_part)) 732 733 # Write the file 734 writer.writelines(lines) 735 736 return True
737 738 739 740 741 742 #=========================================================================== 743 # Routines to output UFO models in MG4 format 744 #=========================================================================== 745
746 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 747 wanted_couplings = []):
748 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 749 750 # Make sure aloha is in quadruple precision if needed 751 old_aloha_mp=aloha.mp_precision 752 aloha.mp_precision=self.opt['mp'] 753 754 # create the MODEL 755 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 756 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 757 model_builder.build(wanted_couplings) 758 759 # Backup the loop mode, because it can be changed in what follows. 760 old_loop_mode = aloha.loop_mode 761 762 # Create the aloha model or use the existing one (for loop exporters 763 # this is useful as the aloha model will be used again in the 764 # LoopHelasMatrixElements generated). We do not save the model generated 765 # here if it didn't exist already because it would be a waste of 766 # memory for tree level applications since aloha is only needed at the 767 # time of creating the aloha fortran subroutines. 768 if hasattr(self, 'aloha_model'): 769 aloha_model = self.aloha_model 770 else: 771 aloha_model = create_aloha.AbstractALOHAModel(model.get('name')) 772 aloha_model.add_Lorentz_object(model.get('lorentz')) 773 774 # Compute the subroutines 775 if wanted_lorentz: 776 aloha_model.compute_subset(wanted_lorentz) 777 else: 778 aloha_model.compute_all(save=False) 779 780 # Write them out 781 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 782 aloha_model.write(write_dir, 'Fortran') 783 784 # Revert the original aloha loop mode 785 aloha.loop_mode = old_loop_mode 786 787 #copy Helas Template 788 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 789 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 790 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 791 write_dir+'/aloha_functions.f') 792 aloha_model.loop_mode = False 793 else: 794 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 795 write_dir+'/aloha_functions.f') 796 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 797 798 # Make final link in the Process 799 self.make_model_symbolic_link() 800 801 # Re-establish original aloha mode 802 aloha.mp_precision=old_aloha_mp
803 804 #=========================================================================== 805 # Helper functions 806 #===========================================================================
807 - def get_mg5_info_lines(self):
808 """Return info lines for MG5, suitable to place at beginning of 809 Fortran files""" 810 811 info = misc.get_pkg_info() 812 info_lines = "" 813 if info and info.has_key('version') and info.has_key('date'): 814 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 815 (info['version'], info['date']) 816 info_lines = info_lines + \ 817 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 818 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 819 else: 820 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 821 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 822 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 823 824 return info_lines
825
826 - def get_process_info_lines(self, matrix_element):
827 """Return info lines describing the processes for this matrix element""" 828 829 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 830 for process in matrix_element.get('processes')])
831 832
833 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
834 """Return the Helicity matrix definition lines for this matrix element""" 835 836 helicity_line_list = [] 837 i = 0 838 for helicities in matrix_element.get_helicity_matrix(): 839 i = i + 1 840 int_list = [i, len(helicities)] 841 int_list.extend(helicities) 842 helicity_line_list.append(\ 843 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 844 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 845 846 return "\n".join(helicity_line_list)
847
848 - def get_ic_line(self, matrix_element):
849 """Return the IC definition line coming after helicities, required by 850 switchmom in madevent""" 851 852 nexternal = matrix_element.get_nexternal_ninitial()[0] 853 int_list = range(1, nexternal + 1) 854 855 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 856 ",".join([str(i) for \ 857 i in int_list]))
858
859 - def set_chosen_SO_index(self, process, squared_orders):
860 """ From the squared order constraints set by the user, this function 861 finds what indices of the squared_orders list the user intends to pick. 862 It returns this as a string of comma-separated successive '.true.' or 863 '.false.' for each index.""" 864 865 user_squared_orders = process.get('squared_orders') 866 split_orders = process.get('split_orders') 867 868 if len(user_squared_orders)==0: 869 return ','.join(['.true.']*len(squared_orders)) 870 871 res = [] 872 for sqsos in squared_orders: 873 is_a_match = True 874 for user_sqso, value in user_squared_orders.items(): 875 if (process.get_squared_order_type(user_sqso) =='==' and \ 876 value!=sqsos[split_orders.index(user_sqso)]) or \ 877 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 878 value<sqsos[split_orders.index(user_sqso)]) or \ 879 (process.get_squared_order_type(user_sqso) == '>' and \ 880 value>=sqsos[split_orders.index(user_sqso)]): 881 is_a_match = False 882 break 883 res.append('.true.' if is_a_match else '.false.') 884 885 return ','.join(res)
886
887 - def get_split_orders_lines(self, orders, array_name, n=5):
888 """ Return the split orders definition as defined in the list orders and 889 for the name of the array 'array_name'. Split rows in chunks of size n.""" 890 891 ret_list = [] 892 for index, order in enumerate(orders): 893 for k in xrange(0, len(order), n): 894 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 895 (array_name,index + 1, k + 1, min(k + n, len(order)), 896 ','.join(["%5r" % i for i in order[k:k + n]]))) 897 return ret_list
898
899 - def format_integer_list(self, list, name, n=5):
900 """ Return an initialization of the python list in argument following 901 the fortran syntax using the data keyword assignment, filling an array 902 of name 'name'. It splits rows in chunks of size n.""" 903 904 ret_list = [] 905 for k in xrange(0, len(list), n): 906 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 907 (name, k + 1, min(k + n, len(list)), 908 ','.join(["%5r" % i for i in list[k:k + n]]))) 909 return ret_list
910
911 - def get_color_data_lines(self, matrix_element, n=6):
912 """Return the color matrix definition lines for this matrix element. Split 913 rows in chunks of size n.""" 914 915 if not matrix_element.get('color_matrix'): 916 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 917 else: 918 ret_list = [] 919 my_cs = color.ColorString() 920 for index, denominator in \ 921 enumerate(matrix_element.get('color_matrix').\ 922 get_line_denominators()): 923 # First write the common denominator for this color matrix line 924 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 925 # Then write the numerators for the matrix elements 926 num_list = matrix_element.get('color_matrix').\ 927 get_line_numerators(index, denominator) 928 929 for k in xrange(0, len(num_list), n): 930 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 931 (index + 1, k + 1, min(k + n, len(num_list)), 932 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 933 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 934 ret_list.append("C %s" % repr(my_cs)) 935 return ret_list
936 937
938 - def get_den_factor_line(self, matrix_element):
939 """Return the denominator factor line for this matrix element""" 940 941 return "DATA IDEN/%2r/" % \ 942 matrix_element.get_denominator_factor()
943
944 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
945 """Return the ICOLAMP matrix, showing which JAMPs contribute to 946 which configs (diagrams).""" 947 948 ret_list = [] 949 950 booldict = {False: ".false.", True: ".true."} 951 952 if not matrix_element.get('color_basis'): 953 # No color, so only one color factor. Simply write a ".true." 954 # for each config (i.e., each diagram with only 3 particle 955 # vertices 956 configs = len(mapconfigs) 957 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 958 (num_matrix_element, configs, 959 ','.join([".true." for i in range(configs)]))) 960 return ret_list 961 962 # There is a color basis - create a list showing which JAMPs have 963 # contributions to which configs 964 965 # Only want to include leading color flows, so find max_Nc 966 color_basis = matrix_element.get('color_basis') 967 968 # We don't want to include the power of Nc's which come from the potential 969 # loop color trace (i.e. in the case of a closed fermion loop for example) 970 # so we subtract it here when computing max_Nc 971 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 972 color_basis.values()],[])) 973 974 # Crate dictionary between diagram number and JAMP number 975 diag_jamp = {} 976 for ijamp, col_basis_elem in \ 977 enumerate(sorted(matrix_element.get('color_basis').keys())): 978 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 979 # Only use color flows with Nc == max_Nc. However, notice that 980 # we don't want to include the Nc power coming from the loop 981 # in this counting. 982 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 983 diag_num = diag_tuple[0] + 1 984 # Add this JAMP number to this diag_num 985 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 986 [ijamp+1] 987 988 colamps = ijamp + 1 989 for iconfig, num_diag in enumerate(mapconfigs): 990 if num_diag == 0: 991 continue 992 993 # List of True or False 994 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 995 # Add line 996 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 997 (iconfig+1, num_matrix_element, colamps, 998 ','.join(["%s" % booldict[b] for b in \ 999 bool_list]))) 1000 1001 return ret_list
1002
1003 - def get_amp2_lines(self, matrix_element, config_map = []):
1004 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1005 1006 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1007 # Get minimum legs in a vertex 1008 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1009 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1010 minvert = min(vert_list) if vert_list!=[] else 0 1011 1012 ret_lines = [] 1013 if config_map: 1014 # In this case, we need to sum up all amplitudes that have 1015 # identical topologies, as given by the config_map (which 1016 # gives the topology/config for each of the diagrams 1017 diagrams = matrix_element.get('diagrams') 1018 # Combine the diagrams with identical topologies 1019 config_to_diag_dict = {} 1020 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1021 if config_map[idiag] == 0: 1022 continue 1023 try: 1024 config_to_diag_dict[config_map[idiag]].append(idiag) 1025 except KeyError: 1026 config_to_diag_dict[config_map[idiag]] = [idiag] 1027 # Write out the AMP2s summing squares of amplitudes belonging 1028 # to eiher the same diagram or different diagrams with 1029 # identical propagator properties. Note that we need to use 1030 # AMP2 number corresponding to the first diagram number used 1031 # for that AMP2. 1032 for config in sorted(config_to_diag_dict.keys()): 1033 1034 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1035 {"num": (config_to_diag_dict[config][0] + 1)} 1036 1037 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1038 sum([diagrams[idiag].get('amplitudes') for \ 1039 idiag in config_to_diag_dict[config]], [])]) 1040 1041 # Not using \sum |M|^2 anymore since this creates troubles 1042 # when ckm is not diagonal due to the JIM mechanism. 1043 if '+' in amp: 1044 line += "(%s)*dconjg(%s)" % (amp, amp) 1045 else: 1046 line += "%s*dconjg(%s)" % (amp, amp) 1047 ret_lines.append(line) 1048 else: 1049 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1050 # Ignore any diagrams with 4-particle vertices. 1051 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1052 continue 1053 # Now write out the expression for AMP2, meaning the sum of 1054 # squared amplitudes belonging to the same diagram 1055 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1056 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1057 {"num": a.get('number')} for a in \ 1058 diag.get('amplitudes')]) 1059 ret_lines.append(line) 1060 1061 return ret_lines
1062 1063 #=========================================================================== 1064 # Returns the data statements initializing the coeffictients for the JAMP 1065 # decomposition. It is used when the JAMP initialization is decided to be 1066 # done through big arrays containing the projection coefficients. 1067 #===========================================================================
1068 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1069 n=50, Nc_value=3):
1070 """This functions return the lines defining the DATA statement setting 1071 the coefficients building the JAMPS out of the AMPS. Split rows in 1072 bunches of size n. 1073 One can specify the color_basis from which the color amplitudes originates 1074 so that there are commentaries telling what color structure each JAMP 1075 corresponds to.""" 1076 1077 if(not isinstance(color_amplitudes,list) or 1078 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1079 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1080 1081 res_list = [] 1082 my_cs = color.ColorString() 1083 for index, coeff_list in enumerate(color_amplitudes): 1084 # Create the list of the complete numerical coefficient. 1085 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1086 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1087 coefficient in coeff_list] 1088 # Create the list of the numbers of the contributing amplitudes. 1089 # Mutliply by -1 for those which have an imaginary coefficient. 1090 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1091 for coefficient in coeff_list] 1092 # Find the common denominator. 1093 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1094 num_list=[(coefficient*commondenom).numerator \ 1095 for coefficient in coefs_list] 1096 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1097 index+1,len(num_list))) 1098 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1099 index+1,commondenom)) 1100 if color_basis: 1101 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1102 res_list.append("C %s" % repr(my_cs)) 1103 for k in xrange(0, len(num_list), n): 1104 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1105 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1106 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1107 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1108 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1109 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1110 pass 1111 return res_list
1112 1113
1114 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1115 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1116 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1117 defined as a matrix element or directly as a color_amplitudes dictionary. 1118 The split_order_amps specifies the group of amplitudes sharing the same 1119 amplitude orders which should be put in together in a given set of JAMPS. 1120 The split_order_amps is supposed to have the format of the second output 1121 of the function get_split_orders_mapping function in helas_objects.py. 1122 The split_order_names is optional (it should correspond to the process 1123 'split_orders' attribute) and only present to provide comments in the 1124 JAMP definitions in the code.""" 1125 1126 # Let the user call get_JAMP_lines_split_order directly from a 1127 error_msg="Malformed '%s' argument passed to the "+\ 1128 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1129 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1130 color_amplitudes=col_amps.get_color_amplitudes() 1131 elif(isinstance(col_amps,list)): 1132 if(col_amps and isinstance(col_amps[0],list)): 1133 color_amplitudes=col_amps 1134 else: 1135 raise MadGraph5Error, error_msg%'col_amps' 1136 else: 1137 raise MadGraph5Error, error_msg%'col_amps' 1138 1139 # Verify the sanity of the split_order_amps and split_order_names args 1140 if isinstance(split_order_amps,list): 1141 for elem in split_order_amps: 1142 if len(elem)!=2: 1143 raise MadGraph5Error, error_msg%'split_order_amps' 1144 # Check the first element of the two lists to make sure they are 1145 # integers, although in principle they should all be integers. 1146 if not isinstance(elem[0],tuple) or \ 1147 not isinstance(elem[1],tuple) or \ 1148 not isinstance(elem[0][0],int) or \ 1149 not isinstance(elem[1][0],int): 1150 raise MadGraph5Error, error_msg%'split_order_amps' 1151 else: 1152 raise MadGraph5Error, error_msg%'split_order_amps' 1153 1154 if not split_order_names is None: 1155 if isinstance(split_order_names,list): 1156 # Should specify the same number of names as there are elements 1157 # in the key of the split_order_amps. 1158 if len(split_order_names)!=len(split_order_amps[0][0]): 1159 raise MadGraph5Error, error_msg%'split_order_names' 1160 # Check the first element of the list to be a string 1161 if not isinstance(split_order_names[0],str): 1162 raise MadGraph5Error, error_msg%'split_order_names' 1163 else: 1164 raise MadGraph5Error, error_msg%'split_order_names' 1165 1166 # Now scan all contributing orders to be individually computed and 1167 # construct the list of color_amplitudes for JAMP to be constructed 1168 # accordingly. 1169 res_list=[] 1170 for i, amp_order in enumerate(split_order_amps): 1171 col_amps_order = [] 1172 for jamp in color_amplitudes: 1173 col_amps_order.append(filter(lambda col_amp: 1174 col_amp[1] in amp_order[1],jamp)) 1175 if split_order_names: 1176 res_list.append('C JAMPs contributing to orders '+' '.join( 1177 ['%s=%i'%order for order in zip(split_order_names, 1178 amp_order[0])])) 1179 if self.opt['export_format'] in ['madloop_matchbox']: 1180 res_list.extend(self.get_JAMP_lines(col_amps_order, 1181 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1182 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1183 else: 1184 res_list.extend(self.get_JAMP_lines(col_amps_order, 1185 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1186 1187 return res_list
1188 1189
1190 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1191 split=-1):
1192 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1193 defined as a matrix element or directly as a color_amplitudes dictionary, 1194 Jamp_formatLC should be define to allow to add LeadingColor computation 1195 (usefull for MatchBox) 1196 The split argument defines how the JAMP lines should be split in order 1197 not to be too long.""" 1198 1199 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1200 # the color amplitudes lists. 1201 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1202 color_amplitudes=col_amps.get_color_amplitudes() 1203 elif(isinstance(col_amps,list)): 1204 if(col_amps and isinstance(col_amps[0],list)): 1205 color_amplitudes=col_amps 1206 else: 1207 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1208 else: 1209 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1210 1211 1212 res_list = [] 1213 for i, coeff_list in enumerate(color_amplitudes): 1214 # It might happen that coeff_list is empty if this function was 1215 # called from get_JAMP_lines_split_order (i.e. if some color flow 1216 # does not contribute at all for a given order). 1217 # In this case we simply set it to 0. 1218 if coeff_list==[]: 1219 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1220 continue 1221 # Break the JAMP definition into 'n=split' pieces to avoid having 1222 # arbitrarly long lines. 1223 first=True 1224 n = (len(coeff_list)+1 if split<=0 else split) 1225 while coeff_list!=[]: 1226 coefs=coeff_list[:n] 1227 coeff_list=coeff_list[n:] 1228 res = ((JAMP_format+"=") % str(i + 1)) + \ 1229 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1230 1231 first=False 1232 # Optimization: if all contributions to that color basis element have 1233 # the same coefficient (up to a sign), put it in front 1234 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1235 common_factor = False 1236 diff_fracs = list(set(list_fracs)) 1237 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1238 common_factor = True 1239 global_factor = diff_fracs[0] 1240 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1241 1242 # loop for JAMP 1243 for (coefficient, amp_number) in coefs: 1244 if not coefficient: 1245 continue 1246 if common_factor: 1247 res = (res + "%s" + AMP_format) % \ 1248 (self.coeff(coefficient[0], 1249 coefficient[1] / abs(coefficient[1]), 1250 coefficient[2], 1251 coefficient[3]), 1252 str(amp_number)) 1253 else: 1254 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1255 coefficient[1], 1256 coefficient[2], 1257 coefficient[3]), 1258 str(amp_number)) 1259 1260 if common_factor: 1261 res = res + ')' 1262 1263 res_list.append(res) 1264 1265 return res_list
1266
1267 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1268 """Generate the PDF lines for the auto_dsig.f file""" 1269 1270 processes = matrix_element.get('processes') 1271 model = processes[0].get('model') 1272 1273 pdf_definition_lines = "" 1274 pdf_data_lines = "" 1275 pdf_lines = "" 1276 1277 if ninitial == 1: 1278 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1279 for i, proc in enumerate(processes): 1280 process_line = proc.base_string() 1281 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1282 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1283 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1284 else: 1285 # Pick out all initial state particles for the two beams 1286 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1287 p in processes]))), 1288 sorted(list(set([p.get_initial_pdg(2) for \ 1289 p in processes])))] 1290 1291 # Prepare all variable names 1292 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1293 sum(initial_states,[])]) 1294 for key,val in pdf_codes.items(): 1295 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1296 1297 # Set conversion from PDG code to number used in PDF calls 1298 pdgtopdf = {21: 0, 22: 7} 1299 1300 # Fill in missing entries of pdgtopdf 1301 for pdg in sum(initial_states,[]): 1302 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1303 pdgtopdf[pdg] = pdg 1304 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1305 # If any particle has pdg code 7, we need to use something else 1306 pdgtopdf[pdg] = 6000000 + pdg 1307 1308 # Get PDF variable declarations for all initial states 1309 for i in [0,1]: 1310 pdf_definition_lines += "DOUBLE PRECISION " + \ 1311 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1312 for pdg in \ 1313 initial_states[i]]) + \ 1314 "\n" 1315 1316 # Get PDF data lines for all initial states 1317 for i in [0,1]: 1318 pdf_data_lines += "DATA " + \ 1319 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1320 for pdg in initial_states[i]]) + \ 1321 "/%d*1D0/" % len(initial_states[i]) + \ 1322 "\n" 1323 1324 # Get PDF lines for all different initial states 1325 for i, init_states in enumerate(initial_states): 1326 if subproc_group: 1327 pdf_lines = pdf_lines + \ 1328 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1329 % (i + 1, i + 1) 1330 else: 1331 pdf_lines = pdf_lines + \ 1332 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1333 % (i + 1, i + 1) 1334 1335 for initial_state in init_states: 1336 if initial_state in pdf_codes.keys(): 1337 if subproc_group: 1338 pdf_lines = pdf_lines + \ 1339 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP," + \ 1340 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1341 (pdf_codes[initial_state], 1342 i + 1, i + 1, pdgtopdf[initial_state], 1343 i + 1, i + 1) 1344 else: 1345 pdf_lines = pdf_lines + \ 1346 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP," + \ 1347 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1348 (pdf_codes[initial_state], 1349 i + 1, i + 1, pdgtopdf[initial_state], 1350 i + 1, i + 1) 1351 pdf_lines = pdf_lines + "ENDIF\n" 1352 1353 # Add up PDFs for the different initial state particles 1354 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1355 for proc in processes: 1356 process_line = proc.base_string() 1357 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1358 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1359 for ibeam in [1, 2]: 1360 initial_state = proc.get_initial_pdg(ibeam) 1361 if initial_state in pdf_codes.keys(): 1362 pdf_lines = pdf_lines + "%s%d*" % \ 1363 (pdf_codes[initial_state], ibeam) 1364 else: 1365 pdf_lines = pdf_lines + "1d0*" 1366 # Remove last "*" from pdf_lines 1367 pdf_lines = pdf_lines[:-1] + "\n" 1368 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1369 1370 # Remove last line break from the return variables 1371 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1372 1373 #=========================================================================== 1374 # write_props_file 1375 #===========================================================================
1376 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1377 """Write the props.inc file for MadEvent. Needs input from 1378 write_configs_file.""" 1379 1380 lines = [] 1381 1382 particle_dict = matrix_element.get('processes')[0].get('model').\ 1383 get('particle_dict') 1384 1385 for iconf, configs in enumerate(s_and_t_channels): 1386 for vertex in configs[0] + configs[1][:-1]: 1387 leg = vertex.get('legs')[-1] 1388 if leg.get('id') not in particle_dict: 1389 # Fake propagator used in multiparticle vertices 1390 mass = 'zero' 1391 width = 'zero' 1392 pow_part = 0 1393 else: 1394 particle = particle_dict[leg.get('id')] 1395 # Get mass 1396 if particle.get('mass').lower() == 'zero': 1397 mass = particle.get('mass') 1398 else: 1399 mass = "abs(%s)" % particle.get('mass') 1400 # Get width 1401 if particle.get('width').lower() == 'zero': 1402 width = particle.get('width') 1403 else: 1404 width = "abs(%s)" % particle.get('width') 1405 1406 pow_part = 1 + int(particle.is_boson()) 1407 1408 lines.append("prmass(%d,%d) = %s" % \ 1409 (leg.get('number'), iconf + 1, mass)) 1410 lines.append("prwidth(%d,%d) = %s" % \ 1411 (leg.get('number'), iconf + 1, width)) 1412 lines.append("pow(%d,%d) = %d" % \ 1413 (leg.get('number'), iconf + 1, pow_part)) 1414 1415 # Write the file 1416 writer.writelines(lines) 1417 1418 return True
1419 1420 #=========================================================================== 1421 # write_configs_file 1422 #===========================================================================
1423 - def write_configs_file(self, writer, matrix_element):
1424 """Write the configs.inc file for MadEvent""" 1425 1426 # Extract number of external particles 1427 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1428 1429 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1430 mapconfigs = [c[0] for c in configs] 1431 model = matrix_element.get('processes')[0].get('model') 1432 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1433 [[c[1]] for c in configs], 1434 mapconfigs, 1435 nexternal, ninitial, 1436 model)
1437 1438 #=========================================================================== 1439 # write_configs_file_from_diagrams 1440 #===========================================================================
1441 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1442 nexternal, ninitial, model):
1443 """Write the actual configs.inc file. 1444 1445 configs is the diagrams corresponding to configs (each 1446 diagrams is a list of corresponding diagrams for all 1447 subprocesses, with None if there is no corresponding diagrams 1448 for a given process). 1449 mapconfigs gives the diagram number for each config. 1450 1451 For s-channels, we need to output one PDG for each subprocess in 1452 the subprocess group, in order to be able to pick the right 1453 one for multiprocesses.""" 1454 1455 lines = [] 1456 1457 s_and_t_channels = [] 1458 1459 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1460 for config in configs if [d for d in config if d][0].\ 1461 get_vertex_leg_numbers()!=[]] 1462 minvert = min(vert_list) if vert_list!=[] else 0 1463 1464 # Number of subprocesses 1465 nsubprocs = len(configs[0]) 1466 1467 nconfigs = 0 1468 1469 new_pdg = model.get_first_non_pdg() 1470 1471 for iconfig, helas_diags in enumerate(configs): 1472 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1473 [0].get_vertex_leg_numbers()) : 1474 # Only 3-vertices allowed in configs.inc except for vertices 1475 # which originate from a shrunk loop. 1476 continue 1477 nconfigs += 1 1478 1479 # Need s- and t-channels for all subprocesses, including 1480 # those that don't contribute to this config 1481 empty_verts = [] 1482 stchannels = [] 1483 for h in helas_diags: 1484 if h: 1485 # get_s_and_t_channels gives vertices starting from 1486 # final state external particles and working inwards 1487 stchannels.append(h.get('amplitudes')[0].\ 1488 get_s_and_t_channels(ninitial, model, new_pdg)) 1489 else: 1490 stchannels.append((empty_verts, None)) 1491 1492 # For t-channels, just need the first non-empty one 1493 tchannels = [t for s,t in stchannels if t != None][0] 1494 1495 # For s_and_t_channels (to be used later) use only first config 1496 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1497 tchannels]) 1498 1499 # Make sure empty_verts is same length as real vertices 1500 if any([s for s,t in stchannels]): 1501 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1502 1503 # Reorganize s-channel vertices to get a list of all 1504 # subprocesses for each vertex 1505 schannels = zip(*[s for s,t in stchannels]) 1506 else: 1507 schannels = [] 1508 1509 allchannels = schannels 1510 if len(tchannels) > 1: 1511 # Write out tchannels only if there are any non-trivial ones 1512 allchannels = schannels + tchannels 1513 1514 # Write out propagators for s-channel and t-channel vertices 1515 1516 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1517 # Correspondance between the config and the diagram = amp2 1518 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1519 mapconfigs[iconfig])) 1520 1521 for verts in allchannels: 1522 if verts in schannels: 1523 vert = [v for v in verts if v][0] 1524 else: 1525 vert = verts 1526 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1527 last_leg = vert.get('legs')[-1] 1528 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1529 (last_leg.get('number'), nconfigs, len(daughters), 1530 ",".join([str(d) for d in daughters]))) 1531 if verts in schannels: 1532 pdgs = [] 1533 for v in verts: 1534 if v: 1535 pdgs.append(v.get('legs')[-1].get('id')) 1536 else: 1537 pdgs.append(0) 1538 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1539 (last_leg.get('number'), nconfigs, nsubprocs, 1540 ",".join([str(d) for d in pdgs]))) 1541 lines.append("data tprid(%d,%d)/0/" % \ 1542 (last_leg.get('number'), nconfigs)) 1543 elif verts in tchannels[:-1]: 1544 lines.append("data tprid(%d,%d)/%d/" % \ 1545 (last_leg.get('number'), nconfigs, 1546 abs(last_leg.get('id')))) 1547 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1548 (last_leg.get('number'), nconfigs, nsubprocs, 1549 ",".join(['0'] * nsubprocs))) 1550 1551 # Write out number of configs 1552 lines.append("# Number of configs") 1553 lines.append("data mapconfig(0)/%d/" % nconfigs) 1554 1555 # Write the file 1556 writer.writelines(lines) 1557 1558 return s_and_t_channels
1559 1560 #=========================================================================== 1561 # Global helper methods 1562 #=========================================================================== 1563
1564 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1565 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1566 1567 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1568 1569 if total_coeff == 1: 1570 if is_imaginary: 1571 return '+imag1*' 1572 else: 1573 return '+' 1574 elif total_coeff == -1: 1575 if is_imaginary: 1576 return '-imag1*' 1577 else: 1578 return '-' 1579 1580 res_str = '%+iD0' % total_coeff.numerator 1581 1582 if total_coeff.denominator != 1: 1583 # Check if total_coeff is an integer 1584 res_str = res_str + '/%iD0' % total_coeff.denominator 1585 1586 if is_imaginary: 1587 res_str = res_str + '*imag1' 1588 1589 return res_str + '*'
1590 1591
1592 - def set_fortran_compiler(self, default_compiler, force=False):
1593 """Set compiler based on what's available on the system""" 1594 1595 # Check for compiler 1596 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1597 f77_compiler = default_compiler['fortran'] 1598 elif misc.which('gfortran'): 1599 f77_compiler = 'gfortran' 1600 elif misc.which('g77'): 1601 f77_compiler = 'g77' 1602 elif misc.which('f77'): 1603 f77_compiler = 'f77' 1604 elif default_compiler['fortran']: 1605 logger.warning('No Fortran Compiler detected! Please install one') 1606 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1607 else: 1608 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1609 logger.info('Use Fortran compiler ' + f77_compiler) 1610 1611 1612 # Check for compiler. 1. set default. 1613 if default_compiler['f2py']: 1614 f2py_compiler = default_compiler['f2py'] 1615 else: 1616 f2py_compiler = '' 1617 # Try to find the correct one. 1618 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1619 f2py_compiler = default_compiler 1620 elif misc.which('f2py'): 1621 f2py_compiler = 'f2py' 1622 elif sys.version_info[1] == 6: 1623 if misc.which('f2py-2.6'): 1624 f2py_compiler = 'f2py-2.6' 1625 elif misc.which('f2py2.6'): 1626 f2py_compiler = 'f2py2.6' 1627 elif sys.version_info[1] == 7: 1628 if misc.which('f2py-2.7'): 1629 f2py_compiler = 'f2py-2.7' 1630 elif misc.which('f2py2.7'): 1631 f2py_compiler = 'f2py2.7' 1632 1633 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1634 1635 1636 self.replace_make_opt_f_compiler(to_replace) 1637 # Replace also for Template but not for cluster 1638 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1639 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1640 1641 return f77_compiler
1642 1643 # an alias for backward compatibility 1644 set_compiler = set_fortran_compiler 1645 1646
1647 - def set_cpp_compiler(self, default_compiler, force=False):
1648 """Set compiler based on what's available on the system""" 1649 1650 # Check for compiler 1651 if default_compiler and misc.which(default_compiler): 1652 compiler = default_compiler 1653 elif misc.which('g++'): 1654 #check if clang version 1655 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1656 stderr=subprocess.PIPE) 1657 out, _ = p.communicate() 1658 if 'clang' in out and misc.which('clang'): 1659 compiler = 'clang' 1660 else: 1661 compiler = 'g++' 1662 elif misc.which('c++'): 1663 compiler = 'c++' 1664 elif misc.which('clang'): 1665 compiler = 'clang' 1666 elif default_compiler: 1667 logger.warning('No c++ Compiler detected! Please install one') 1668 compiler = default_compiler # maybe misc fail so try with it 1669 else: 1670 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1671 logger.info('Use c++ compiler ' + compiler) 1672 self.replace_make_opt_c_compiler(compiler) 1673 # Replace also for Template but not for cluster 1674 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1675 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1676 1677 return compiler
1678 1679
1680 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1681 """Set FC=compiler in Source/make_opts""" 1682 1683 assert isinstance(compilers, dict) 1684 1685 mod = False #avoid to rewrite the file if not needed 1686 if not root_dir: 1687 root_dir = self.dir_path 1688 1689 compiler= compilers['fortran'] 1690 f2py_compiler = compilers['f2py'] 1691 if not f2py_compiler: 1692 f2py_compiler = 'f2py' 1693 for_update= {'DEFAULT_F_COMPILER':compiler, 1694 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1695 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1696 1697 try: 1698 common_run_interface.CommonRunCmd.update_make_opts_full( 1699 make_opts, for_update) 1700 except IOError: 1701 if root_dir == self.dir_path: 1702 logger.info('Fail to set compiler. Trying to continue anyway.')
1703
1704 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1705 """Set CXX=compiler in Source/make_opts. 1706 The version is also checked, in order to set some extra flags 1707 if the compiler is clang (on MACOS)""" 1708 1709 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1710 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1711 1712 # list of the variable to set in the make_opts file 1713 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1714 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1715 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1716 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1717 } 1718 1719 if not root_dir: 1720 root_dir = self.dir_path 1721 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1722 1723 try: 1724 common_run_interface.CommonRunCmd.update_make_opts_full( 1725 make_opts, for_update) 1726 except IOError: 1727 if root_dir == self.dir_path: 1728 logger.info('Fail to set compiler. Trying to continue anyway.') 1729 1730 return
1731
1732 #=============================================================================== 1733 # ProcessExporterFortranSA 1734 #=============================================================================== 1735 -class ProcessExporterFortranSA(ProcessExporterFortran):
1736 """Class to take care of exporting a set of matrix elements to 1737 MadGraph v4 StandAlone format.""" 1738 1739 matrix_template = "matrix_standalone_v4.inc" 1740
1741 - def __init__(self, *args, **opts):
1742 """add the format information compare to standard init""" 1743 1744 if 'format' in opts: 1745 self.format = opts['format'] 1746 del opts['format'] 1747 else: 1748 self.format = 'standalone' 1749 ProcessExporterFortran.__init__(self, *args, **opts)
1750
1751 - def copy_v4template(self, modelname):
1752 """Additional actions needed for setup of Template 1753 """ 1754 1755 #First copy the full template tree if dir_path doesn't exit 1756 if os.path.isdir(self.dir_path): 1757 return 1758 1759 logger.info('initialize a new standalone directory: %s' % \ 1760 os.path.basename(self.dir_path)) 1761 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1762 1763 # Create the directory structure 1764 os.mkdir(self.dir_path) 1765 os.mkdir(pjoin(self.dir_path, 'Source')) 1766 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1767 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1768 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1769 os.mkdir(pjoin(self.dir_path, 'bin')) 1770 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1771 os.mkdir(pjoin(self.dir_path, 'lib')) 1772 os.mkdir(pjoin(self.dir_path, 'Cards')) 1773 1774 # Information at top-level 1775 #Write version info 1776 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1777 try: 1778 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1779 except IOError: 1780 MG5_version = misc.get_pkg_info() 1781 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1782 "5." + MG5_version['version']) 1783 1784 1785 # Add file in SubProcesses 1786 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1787 pjoin(self.dir_path, 'SubProcesses', 'makefile')) 1788 1789 if self.format == 'standalone': 1790 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1791 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1792 1793 # Add file in Source 1794 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1795 pjoin(self.dir_path, 'Source')) 1796 # add the makefile 1797 filename = pjoin(self.dir_path,'Source','makefile') 1798 self.write_source_makefile(writers.FileWriter(filename))
1799 1800 #=========================================================================== 1801 # export model files 1802 #===========================================================================
1803 - def export_model_files(self, model_path):
1804 """export the model dependent files for V4 model""" 1805 1806 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1807 # Add the routine update_as_param in v4 model 1808 # This is a function created in the UFO 1809 text=""" 1810 subroutine update_as_param() 1811 call setpara('param_card.dat',.false.) 1812 return 1813 end 1814 """ 1815 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1816 ff.write(text) 1817 ff.close() 1818 1819 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1820 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1821 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1822 fsock.write(text) 1823 fsock.close() 1824 1825 self.make_model_symbolic_link()
1826 1827 #=========================================================================== 1828 # Make the Helas and Model directories for Standalone directory 1829 #===========================================================================
1830 - def make(self):
1831 """Run make in the DHELAS and MODEL directories, to set up 1832 everything for running standalone 1833 """ 1834 1835 source_dir = pjoin(self.dir_path, "Source") 1836 logger.info("Running make for Helas") 1837 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1838 logger.info("Running make for Model") 1839 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1840 1841 #=========================================================================== 1842 # Create proc_card_mg5.dat for Standalone directory 1843 #===========================================================================
1844 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 1845 online = False, compiler=default_compiler):
1846 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 1847 1848 self.compiler_choice(compiler) 1849 self.make() 1850 1851 # Write command history as proc_card_mg5 1852 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1853 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1854 history.write(output_file) 1855 1856 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 1857 history, makejpg, online, compiler) 1858 open(pjoin(self.dir_path,'__init__.py'),'w') 1859 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 1860 1861 if 'mode' in self.opt and self.opt['mode'] == "reweight": 1862 #add the module to hande the NLO weight 1863 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 1864 pjoin(self.dir_path, 'Source')) 1865 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 1866 pjoin(self.dir_path, 'Source', 'PDF')) 1867 self.write_pdf_opendata()
1868 1869 1870
1871 - def compiler_choice(self, compiler):
1872 """ Different daughter classes might want different compilers. 1873 So this function is meant to be overloaded if desired.""" 1874 1875 self.set_compiler(compiler)
1876 1877 #=========================================================================== 1878 # generate_subprocess_directory_v4 1879 #===========================================================================
1880 - def generate_subprocess_directory_v4(self, matrix_element, 1881 fortran_model):
1882 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 1883 including the necessary matrix.f and nexternal.inc files""" 1884 1885 cwd = os.getcwd() 1886 1887 # Create the directory PN_xx_xxxxx in the specified path 1888 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 1889 "P%s" % matrix_element.get('processes')[0].shell_string()) 1890 1891 if self.opt['sa_symmetry']: 1892 # avoid symmetric output 1893 for i,proc in enumerate(matrix_element.get('processes')): 1894 1895 initial = [] #filled in the next line 1896 final = [l.get('id') for l in proc.get('legs')\ 1897 if l.get('state') or initial.append(l.get('id'))] 1898 decay_finals = proc.get_final_ids_after_decay() 1899 decay_finals.sort() 1900 tag = (tuple(initial), tuple(decay_finals)) 1901 legs = proc.get('legs')[:] 1902 leg0 = proc.get('legs')[0] 1903 leg1 = proc.get('legs')[1] 1904 if not leg1.get('state'): 1905 proc.get('legs')[0] = leg1 1906 proc.get('legs')[1] = leg0 1907 flegs = proc.get('legs')[2:] 1908 for perm in itertools.permutations(flegs): 1909 for i,p in enumerate(perm): 1910 proc.get('legs')[i+2] = p 1911 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 1912 "P%s" % proc.shell_string()) 1913 #restore original order 1914 proc.get('legs')[2:] = legs[2:] 1915 if os.path.exists(dirpath2): 1916 proc.get('legs')[:] = legs 1917 return 0 1918 proc.get('legs')[:] = legs 1919 1920 try: 1921 os.mkdir(dirpath) 1922 except os.error as error: 1923 logger.warning(error.strerror + " " + dirpath) 1924 1925 #try: 1926 # os.chdir(dirpath) 1927 #except os.error: 1928 # logger.error('Could not cd to directory %s' % dirpath) 1929 # return 0 1930 1931 logger.info('Creating files in directory %s' % dirpath) 1932 1933 # Extract number of external particles 1934 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1935 1936 # Create the matrix.f file and the nexternal.inc file 1937 if self.opt['export_format']=='standalone_msP': 1938 filename = pjoin(dirpath, 'matrix_prod.f') 1939 else: 1940 filename = pjoin(dirpath, 'matrix.f') 1941 calls = self.write_matrix_element_v4( 1942 writers.FortranWriter(filename), 1943 matrix_element, 1944 fortran_model) 1945 1946 if self.opt['export_format'] == 'standalone_msP': 1947 filename = pjoin(dirpath,'configs_production.inc') 1948 mapconfigs, s_and_t_channels = self.write_configs_file(\ 1949 writers.FortranWriter(filename), 1950 matrix_element) 1951 1952 filename = pjoin(dirpath,'props_production.inc') 1953 self.write_props_file(writers.FortranWriter(filename), 1954 matrix_element, 1955 s_and_t_channels) 1956 1957 filename = pjoin(dirpath,'nexternal_prod.inc') 1958 self.write_nexternal_madspin(writers.FortranWriter(filename), 1959 nexternal, ninitial) 1960 1961 if self.opt['export_format']=='standalone_msF': 1962 filename = pjoin(dirpath, 'helamp.inc') 1963 ncomb=matrix_element.get_helicity_combinations() 1964 self.write_helamp_madspin(writers.FortranWriter(filename), 1965 ncomb) 1966 1967 filename = pjoin(dirpath, 'nexternal.inc') 1968 self.write_nexternal_file(writers.FortranWriter(filename), 1969 nexternal, ninitial) 1970 1971 filename = pjoin(dirpath, 'pmass.inc') 1972 self.write_pmass_file(writers.FortranWriter(filename), 1973 matrix_element) 1974 1975 filename = pjoin(dirpath, 'ngraphs.inc') 1976 self.write_ngraphs_file(writers.FortranWriter(filename), 1977 len(matrix_element.get_all_amplitudes())) 1978 1979 # Generate diagrams 1980 filename = pjoin(dirpath, "matrix.ps") 1981 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 1982 get('diagrams'), 1983 filename, 1984 model=matrix_element.get('processes')[0].\ 1985 get('model'), 1986 amplitude=True) 1987 logger.info("Generating Feynman diagrams for " + \ 1988 matrix_element.get('processes')[0].nice_string()) 1989 plot.draw() 1990 1991 linkfiles = ['check_sa.f', 'coupl.inc', 'makefile'] 1992 1993 for file in linkfiles: 1994 ln('../%s' % file, cwd=dirpath) 1995 1996 # Return to original PWD 1997 #os.chdir(cwd) 1998 1999 if not calls: 2000 calls = 0 2001 return calls
2002 2003 2004 #=========================================================================== 2005 # write_source_makefile 2006 #===========================================================================
2007 - def write_source_makefile(self, writer):
2008 """Write the nexternal.inc file for MG4""" 2009 2010 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2011 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2012 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2013 text = open(path).read() % {'libraries': set_of_lib, 'model':model_line} 2014 writer.write(text) 2015 2016 return True
2017 2018 #=========================================================================== 2019 # write_matrix_element_v4 2020 #===========================================================================
2021 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2022 write=True, proc_prefix=''):
2023 """Export a matrix element to a matrix.f file in MG4 standalone format 2024 if write is on False, just return the replace_dict and not write anything.""" 2025 2026 2027 if not matrix_element.get('processes') or \ 2028 not matrix_element.get('diagrams'): 2029 return 0 2030 2031 if not isinstance(writer, writers.FortranWriter): 2032 raise writers.FortranWriter.FortranWriterError(\ 2033 "writer not FortranWriter but %s" % type(writer)) 2034 2035 if not self.opt.has_key('sa_symmetry'): 2036 self.opt['sa_symmetry']=False 2037 2038 # Set lowercase/uppercase Fortran code 2039 writers.FortranWriter.downcase = False 2040 2041 # The proc_id is for MadEvent grouping which is never used in SA. 2042 replace_dict = {'global_variable':'', 'amp2_lines':'', 2043 'proc_prefix':proc_prefix, 'proc_id':''} 2044 2045 # Extract helas calls 2046 helas_calls = fortran_model.get_matrix_element_calls(\ 2047 matrix_element) 2048 2049 replace_dict['helas_calls'] = "\n".join(helas_calls) 2050 2051 # Extract version number and date from VERSION file 2052 info_lines = self.get_mg5_info_lines() 2053 replace_dict['info_lines'] = info_lines 2054 2055 # Extract process info lines 2056 process_lines = self.get_process_info_lines(matrix_element) 2057 replace_dict['process_lines'] = process_lines 2058 2059 # Extract number of external particles 2060 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2061 replace_dict['nexternal'] = nexternal 2062 2063 # Extract ncomb 2064 ncomb = matrix_element.get_helicity_combinations() 2065 replace_dict['ncomb'] = ncomb 2066 2067 # Extract helicity lines 2068 helicity_lines = self.get_helicity_lines(matrix_element) 2069 replace_dict['helicity_lines'] = helicity_lines 2070 2071 # Extract overall denominator 2072 # Averaging initial state color, spin, and identical FS particles 2073 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2074 2075 # Extract ngraphs 2076 ngraphs = matrix_element.get_number_of_amplitudes() 2077 replace_dict['ngraphs'] = ngraphs 2078 2079 # Extract nwavefuncs 2080 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2081 replace_dict['nwavefuncs'] = nwavefuncs 2082 2083 # Extract ncolor 2084 ncolor = max(1, len(matrix_element.get('color_basis'))) 2085 replace_dict['ncolor'] = ncolor 2086 2087 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2088 2089 # Extract color data lines 2090 color_data_lines = self.get_color_data_lines(matrix_element) 2091 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2092 2093 if self.opt['export_format']=='standalone_msP': 2094 # For MadSpin need to return the AMP2 2095 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2096 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2097 replace_dict['global_variable'] = \ 2098 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2099 2100 # JAMP definition, depends on the number of independent split orders 2101 split_orders=matrix_element.get('processes')[0].get('split_orders') 2102 2103 if len(split_orders)==0: 2104 replace_dict['nSplitOrders']='' 2105 # Extract JAMP lines 2106 jamp_lines = self.get_JAMP_lines(matrix_element) 2107 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2108 # set all amplitude order to weight 1 and only one squared order 2109 # contribution which is of course ALL_ORDERS=2. 2110 squared_orders = [(2,),] 2111 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2112 replace_dict['chosen_so_configs'] = '.TRUE.' 2113 replace_dict['nSqAmpSplitOrders']=1 2114 replace_dict['split_order_str_list']='' 2115 else: 2116 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2117 replace_dict['nAmpSplitOrders']=len(amp_orders) 2118 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2119 replace_dict['nSplitOrders']=len(split_orders) 2120 replace_dict['split_order_str_list']=str(split_orders) 2121 amp_so = self.get_split_orders_lines( 2122 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2123 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2124 replace_dict['ampsplitorders']='\n'.join(amp_so) 2125 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2126 jamp_lines = self.get_JAMP_lines_split_order(\ 2127 matrix_element,amp_orders,split_order_names=split_orders) 2128 2129 # Now setup the array specifying what squared split order is chosen 2130 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2131 matrix_element.get('processes')[0],squared_orders) 2132 2133 # For convenience we also write the driver check_sa_splitOrders.f 2134 # that explicitely writes out the contribution from each squared order. 2135 # The original driver still works and is compiled with 'make' while 2136 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2137 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2138 self.write_check_sa_splitOrders(squared_orders,split_orders, 2139 nexternal,ninitial,proc_prefix,check_sa_writer) 2140 2141 if write: 2142 writers.FortranWriter('nsqso_born.inc').writelines( 2143 """INTEGER NSQSO_BORN 2144 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2145 2146 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2147 2148 matrix_template = self.matrix_template 2149 if self.opt['export_format']=='standalone_msP' : 2150 matrix_template = 'matrix_standalone_msP_v4.inc' 2151 elif self.opt['export_format']=='standalone_msF': 2152 matrix_template = 'matrix_standalone_msF_v4.inc' 2153 elif self.opt['export_format']=='matchbox': 2154 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2155 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2156 2157 if len(split_orders)>0: 2158 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2159 logger.debug("Warning: The export format %s is not "+\ 2160 " available for individual ME evaluation of given coupl. orders."+\ 2161 " Only the total ME will be computed.", self.opt['export_format']) 2162 elif self.opt['export_format'] in ['madloop_matchbox']: 2163 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2164 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2165 else: 2166 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2167 2168 if write: 2169 path = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2170 content = open(path).read() 2171 content = content % replace_dict 2172 # Write the file 2173 writer.writelines(content) 2174 # Add the helper functions. 2175 if len(split_orders)>0: 2176 content = '\n' + open(pjoin(_file_path, \ 2177 'iolibs/template_files/split_orders_helping_functions.inc'))\ 2178 .read()%replace_dict 2179 writer.writelines(content) 2180 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2181 else: 2182 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2183 return replace_dict # for subclass update
2184
2185 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2186 nincoming, proc_prefix, writer):
2187 """ Write out a more advanced version of the check_sa drivers that 2188 individually returns the matrix element for each contributing squared 2189 order.""" 2190 2191 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2192 'template_files', 'check_sa_splitOrders.f')).read() 2193 printout_sq_orders=[] 2194 for i, squared_order in enumerate(squared_orders): 2195 sq_orders=[] 2196 for j, sqo in enumerate(squared_order): 2197 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2198 printout_sq_orders.append(\ 2199 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2200 %(i+1,' '.join(sq_orders),i+1)) 2201 printout_sq_orders='\n'.join(printout_sq_orders) 2202 writer.writelines(check_sa_content%{\ 2203 'printout_sqorders':printout_sq_orders, 2204 'nSplitOrders':len(squared_orders), 2205 'nexternal':nexternal, 2206 'nincoming':nincoming, 2207 'proc_prefix':proc_prefix})
2208
2209 2210 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2211 """class to take care of exporting a set of matrix element for the Matchbox 2212 code in the case of Born only routine""" 2213 2214 default_opt = {'clean': False, 'complex_mass':False, 2215 'export_format':'matchbox', 'mp': False, 2216 'sa_symmetry': True} 2217 2218 #specific template of the born 2219 2220 2221 matrix_template = "matrix_standalone_matchbox.inc" 2222 2223 @staticmethod
2224 - def get_color_string_lines(matrix_element):
2225 """Return the color matrix definition lines for this matrix element. Split 2226 rows in chunks of size n.""" 2227 2228 if not matrix_element.get('color_matrix'): 2229 return "\n".join(["out = 1"]) 2230 2231 #start the real work 2232 color_denominators = matrix_element.get('color_matrix').\ 2233 get_line_denominators() 2234 matrix_strings = [] 2235 my_cs = color.ColorString() 2236 for i_color in xrange(len(color_denominators)): 2237 # Then write the numerators for the matrix elements 2238 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2239 t_str=repr(my_cs) 2240 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2241 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2242 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2243 all_matches = t_match.findall(t_str) 2244 output = {} 2245 arg=[] 2246 for match in all_matches: 2247 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2248 if ctype in ['ColorOne' ]: 2249 continue 2250 if ctype not in ['T', 'Tr' ]: 2251 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2252 tmparg += ['0'] 2253 arg +=tmparg 2254 for j, v in enumerate(arg): 2255 output[(i_color,j)] = v 2256 2257 for key in output: 2258 if matrix_strings == []: 2259 #first entry 2260 matrix_strings.append(""" 2261 if (in1.eq.%s.and.in2.eq.%s)then 2262 out = %s 2263 """ % (key[0], key[1], output[key])) 2264 else: 2265 #not first entry 2266 matrix_strings.append(""" 2267 elseif (in1.eq.%s.and.in2.eq.%s)then 2268 out = %s 2269 """ % (key[0], key[1], output[key])) 2270 if len(matrix_strings): 2271 matrix_strings.append(" else \n out = - 1 \n endif") 2272 else: 2273 return "\n out = - 1 \n " 2274 return "\n".join(matrix_strings)
2275
2276 - def make(self,*args,**opts):
2277 pass
2278
2279 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2280 JAMP_formatLC=None):
2281 2282 """Adding leading color part of the colorflow""" 2283 2284 if not JAMP_formatLC: 2285 JAMP_formatLC= "LN%s" % JAMP_format 2286 2287 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2288 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2289 col_amps=col_amps.get_color_amplitudes() 2290 elif(isinstance(col_amps,list)): 2291 if(col_amps and isinstance(col_amps[0],list)): 2292 col_amps=col_amps 2293 else: 2294 raise MadGraph5Error, error_msg % 'col_amps' 2295 else: 2296 raise MadGraph5Error, error_msg % 'col_amps' 2297 2298 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2299 JAMP_format=JAMP_format, 2300 AMP_format=AMP_format, 2301 split=-1) 2302 2303 2304 # Filter the col_ampls to generate only those without any 1/NC terms 2305 2306 LC_col_amps = [] 2307 for coeff_list in col_amps: 2308 to_add = [] 2309 for (coefficient, amp_number) in coeff_list: 2310 if coefficient[3]==0: 2311 to_add.append( (coefficient, amp_number) ) 2312 LC_col_amps.append(to_add) 2313 2314 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2315 JAMP_format=JAMP_formatLC, 2316 AMP_format=AMP_format, 2317 split=-1) 2318 2319 return text
2320
2321 2322 2323 2324 #=============================================================================== 2325 # ProcessExporterFortranMW 2326 #=============================================================================== 2327 -class ProcessExporterFortranMW(ProcessExporterFortran):
2328 """Class to take care of exporting a set of matrix elements to 2329 MadGraph v4 - MadWeight format.""" 2330 2331 matrix_file="matrix_standalone_v4.inc" 2332
2333 - def copy_v4template(self, modelname):
2334 """Additional actions needed for setup of Template 2335 """ 2336 2337 super(ProcessExporterFortranMW, self).copy_v4template(modelname) 2338 2339 # Add the MW specific file 2340 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2341 pjoin(self.dir_path, 'Source','MadWeight'), True) 2342 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2343 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2344 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2345 pjoin(self.dir_path, 'Source','setrun.f')) 2346 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2347 pjoin(self.dir_path, 'Source','run.inc')) 2348 # File created from Template (Different in some child class) 2349 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2350 self.write_run_config_file(writers.FortranWriter(filename)) 2351 2352 try: 2353 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2354 stdout = os.open(os.devnull, os.O_RDWR), 2355 stderr = os.open(os.devnull, os.O_RDWR), 2356 cwd=self.dir_path) 2357 except OSError: 2358 # Probably madweight already called 2359 pass 2360 2361 # Copy the different python file in the Template 2362 self.copy_python_file() 2363 # create the appropriate cuts.f 2364 self.get_mw_cuts_version() 2365 2366 # add the makefile in Source directory 2367 filename = os.path.join(self.dir_path,'Source','makefile') 2368 self.write_source_makefile(writers.FortranWriter(filename))
2369 2370 2371 2372 2373 #=========================================================================== 2374 # convert_model_to_mg4 2375 #===========================================================================
2376 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 2377 wanted_couplings = []):
2378 2379 super(ProcessExporterFortranMW,self).convert_model_to_mg4(model, 2380 wanted_lorentz, wanted_couplings) 2381 2382 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2383 try: 2384 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2385 except OSError as error: 2386 pass 2387 model_path = model.get('modelpath') 2388 # This is not safe if there is a '##' or '-' in the path. 2389 shutil.copytree(model_path, 2390 pjoin(self.dir_path,'bin','internal','ufomodel'), 2391 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2392 if hasattr(model, 'restrict_card'): 2393 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2394 'restrict_default.dat') 2395 if isinstance(model.restrict_card, check_param_card.ParamCard): 2396 model.restrict_card.write(out_path) 2397 else: 2398 files.cp(model.restrict_card, out_path)
2399 2400 #=========================================================================== 2401 # generate_subprocess_directory_v4 2402 #===========================================================================
2403 - def copy_python_file(self):
2404 """copy the python file require for the Template""" 2405 2406 # madevent interface 2407 cp(_file_path+'/interface/madweight_interface.py', 2408 self.dir_path+'/bin/internal/madweight_interface.py') 2409 cp(_file_path+'/interface/extended_cmd.py', 2410 self.dir_path+'/bin/internal/extended_cmd.py') 2411 cp(_file_path+'/interface/common_run_interface.py', 2412 self.dir_path+'/bin/internal/common_run_interface.py') 2413 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2414 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2415 #cp(_file_path+'/iolibs/save_load_object.py', 2416 # self.dir_path+'/bin/internal/save_load_object.py') 2417 cp(_file_path+'/iolibs/file_writers.py', 2418 self.dir_path+'/bin/internal/file_writers.py') 2419 #model file 2420 cp(_file_path+'../models/check_param_card.py', 2421 self.dir_path+'/bin/internal/check_param_card.py') 2422 2423 #madevent file 2424 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2425 cp(_file_path+'/various/lhe_parser.py', 2426 self.dir_path+'/bin/internal/lhe_parser.py') 2427 2428 cp(_file_path+'/various/banner.py', 2429 self.dir_path+'/bin/internal/banner.py') 2430 cp(_file_path+'/various/shower_card.py', 2431 self.dir_path+'/bin/internal/shower_card.py') 2432 cp(_file_path+'/various/cluster.py', 2433 self.dir_path+'/bin/internal/cluster.py') 2434 2435 # logging configuration 2436 cp(_file_path+'/interface/.mg5_logging.conf', 2437 self.dir_path+'/bin/internal/me5_logging.conf') 2438 cp(_file_path+'/interface/coloring_logging.py', 2439 self.dir_path+'/bin/internal/coloring_logging.py')
2440 2441 2442 #=========================================================================== 2443 # Change the version of cuts.f to the one compatible with MW 2444 #===========================================================================
2445 - def get_mw_cuts_version(self, outpath=None):
2446 """create the appropriate cuts.f 2447 This is based on the one associated to ME output but: 2448 1) No clustering (=> remove initcluster/setclscales) 2449 2) Adding the definition of cut_bw at the file. 2450 """ 2451 2452 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2453 2454 text = StringIO() 2455 #1) remove all dependencies in ickkw >1: 2456 nb_if = 0 2457 for line in template: 2458 if 'if(xqcut.gt.0d0' in line: 2459 nb_if = 1 2460 if nb_if == 0: 2461 text.write(line) 2462 continue 2463 if re.search(r'if\(.*\)\s*then', line): 2464 nb_if += 1 2465 elif 'endif' in line: 2466 nb_if -= 1 2467 2468 #2) add fake cut_bw (have to put the true one later) 2469 text.write(""" 2470 logical function cut_bw(p) 2471 include 'madweight_param.inc' 2472 double precision p(*) 2473 if (bw_cut) then 2474 cut_bw = .true. 2475 else 2476 stop 1 2477 endif 2478 return 2479 end 2480 """) 2481 2482 final = text.getvalue() 2483 #3) remove the call to initcluster: 2484 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2485 template = template.replace('genps.inc', 'maxparticles.inc') 2486 #Now we can write it 2487 if not outpath: 2488 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2489 elif isinstance(outpath, str): 2490 fsock = open(outpath, 'w') 2491 else: 2492 fsock = outpath 2493 fsock.write(template)
2494 2495 2496 2497 #=========================================================================== 2498 # Make the Helas and Model directories for Standalone directory 2499 #===========================================================================
2500 - def make(self):
2501 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2502 everything for running madweight 2503 """ 2504 2505 source_dir = os.path.join(self.dir_path, "Source") 2506 logger.info("Running make for Helas") 2507 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2508 logger.info("Running make for Model") 2509 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2510 logger.info("Running make for PDF") 2511 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2512 logger.info("Running make for CERNLIB") 2513 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2514 logger.info("Running make for GENERIC") 2515 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2516 logger.info("Running make for blocks") 2517 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2518 logger.info("Running make for tools") 2519 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2520 2521 #=========================================================================== 2522 # Create proc_card_mg5.dat for MadWeight directory 2523 #===========================================================================
2524 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 2525 online = False, compiler=default_compiler):
2526 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2527 2528 #proc_charac 2529 self.create_proc_charac() 2530 2531 # Write maxparticles.inc based on max of ME's/subprocess groups 2532 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2533 self.write_maxparticles_file(writers.FortranWriter(filename), 2534 matrix_elements) 2535 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2536 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2537 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2538 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2539 2540 self.set_compiler(compiler) 2541 self.make() 2542 2543 # Write command history as proc_card_mg5 2544 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2545 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2546 history.write(output_file) 2547 2548 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 2549 history, makejpg, online, compiler)
2550 2551 2552 #=========================================================================== 2553 # create the run_card for MW 2554 #===========================================================================
2555 - def create_run_card(self, matrix_elements, history):
2556 """ """ 2557 2558 run_card = banner_mod.RunCard() 2559 2560 # pass to default for MW 2561 run_card["run_tag"] = "\'not_use\'" 2562 run_card["fixed_ren_scale"] = "T" 2563 run_card["fixed_fac_scale"] = "T" 2564 run_card.remove_all_cut() 2565 2566 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2567 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2568 python_template=True) 2569 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2570 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2571 python_template=True)
2572 2573 #=========================================================================== 2574 # export model files 2575 #===========================================================================
2576 - def export_model_files(self, model_path):
2577 """export the model dependent files for V4 model""" 2578 2579 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2580 # Add the routine update_as_param in v4 model 2581 # This is a function created in the UFO 2582 text=""" 2583 subroutine update_as_param() 2584 call setpara('param_card.dat',.false.) 2585 return 2586 end 2587 """ 2588 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2589 ff.write(text) 2590 ff.close() 2591 2592 # Modify setrun.f 2593 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2594 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2595 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2596 fsock.write(text) 2597 fsock.close() 2598 2599 # Modify initialization.f 2600 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2601 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2602 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2603 fsock.write(text) 2604 fsock.close() 2605 2606 2607 self.make_model_symbolic_link()
2608 2609 #=========================================================================== 2610 # generate_subprocess_directory_v4 2611 #===========================================================================
2612 - def generate_subprocess_directory_v4(self, matrix_element, 2613 fortran_model,number):
2614 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2615 including the necessary matrix.f and nexternal.inc files""" 2616 2617 cwd = os.getcwd() 2618 2619 # Create the directory PN_xx_xxxxx in the specified path 2620 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2621 "P%s" % matrix_element.get('processes')[0].shell_string()) 2622 2623 try: 2624 os.mkdir(dirpath) 2625 except os.error as error: 2626 logger.warning(error.strerror + " " + dirpath) 2627 2628 #try: 2629 # os.chdir(dirpath) 2630 #except os.error: 2631 # logger.error('Could not cd to directory %s' % dirpath) 2632 # return 0 2633 2634 logger.info('Creating files in directory %s' % dirpath) 2635 2636 # Extract number of external particles 2637 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2638 2639 # Create the matrix.f file and the nexternal.inc file 2640 filename = pjoin(dirpath,'matrix.f') 2641 calls,ncolor = self.write_matrix_element_v4( 2642 writers.FortranWriter(filename), 2643 matrix_element, 2644 fortran_model) 2645 2646 filename = pjoin(dirpath, 'auto_dsig.f') 2647 self.write_auto_dsig_file(writers.FortranWriter(filename), 2648 matrix_element) 2649 2650 filename = pjoin(dirpath, 'configs.inc') 2651 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2652 writers.FortranWriter(filename), 2653 matrix_element) 2654 2655 filename = pjoin(dirpath, 'nexternal.inc') 2656 self.write_nexternal_file(writers.FortranWriter(filename), 2657 nexternal, ninitial) 2658 2659 filename = pjoin(dirpath, 'leshouche.inc') 2660 self.write_leshouche_file(writers.FortranWriter(filename), 2661 matrix_element) 2662 2663 filename = pjoin(dirpath, 'props.inc') 2664 self.write_props_file(writers.FortranWriter(filename), 2665 matrix_element, 2666 s_and_t_channels) 2667 2668 filename = pjoin(dirpath, 'pmass.inc') 2669 self.write_pmass_file(writers.FortranWriter(filename), 2670 matrix_element) 2671 2672 filename = pjoin(dirpath, 'ngraphs.inc') 2673 self.write_ngraphs_file(writers.FortranWriter(filename), 2674 len(matrix_element.get_all_amplitudes())) 2675 2676 filename = pjoin(dirpath, 'maxamps.inc') 2677 self.write_maxamps_file(writers.FortranWriter(filename), 2678 len(matrix_element.get('diagrams')), 2679 ncolor, 2680 len(matrix_element.get('processes')), 2681 1) 2682 2683 filename = pjoin(dirpath, 'phasespace.inc') 2684 self.write_phasespace_file(writers.FortranWriter(filename), 2685 len(matrix_element.get('diagrams')), 2686 ) 2687 2688 # Generate diagrams 2689 filename = pjoin(dirpath, "matrix.ps") 2690 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2691 get('diagrams'), 2692 filename, 2693 model=matrix_element.get('processes')[0].\ 2694 get('model'), 2695 amplitude='') 2696 logger.info("Generating Feynman diagrams for " + \ 2697 matrix_element.get('processes')[0].nice_string()) 2698 plot.draw() 2699 2700 #import genps.inc and maxconfigs.inc into Subprocesses 2701 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 2702 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 2703 2704 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 2705 2706 for file in linkfiles: 2707 ln('../%s' % file, starting_dir=cwd) 2708 2709 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 2710 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 2711 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 2712 ln('phasespace.inc', '../', log=True, cwd=dirpath) 2713 # Return to original PWD 2714 #os.chdir(cwd) 2715 2716 if not calls: 2717 calls = 0 2718 return calls
2719 2720 #=========================================================================== 2721 # write_matrix_element_v4 2722 #===========================================================================
2723 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
2724 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 2725 2726 if not matrix_element.get('processes') or \ 2727 not matrix_element.get('diagrams'): 2728 return 0 2729 2730 if not isinstance(writer, writers.FortranWriter): 2731 raise writers.FortranWriter.FortranWriterError(\ 2732 "writer not FortranWriter") 2733 2734 # Set lowercase/uppercase Fortran code 2735 writers.FortranWriter.downcase = False 2736 2737 replace_dict = {} 2738 2739 # Extract version number and date from VERSION file 2740 info_lines = self.get_mg5_info_lines() 2741 replace_dict['info_lines'] = info_lines 2742 2743 # Extract process info lines 2744 process_lines = self.get_process_info_lines(matrix_element) 2745 replace_dict['process_lines'] = process_lines 2746 2747 # Set proc_id 2748 replace_dict['proc_id'] = proc_id 2749 2750 # Extract number of external particles 2751 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2752 replace_dict['nexternal'] = nexternal 2753 2754 # Extract ncomb 2755 ncomb = matrix_element.get_helicity_combinations() 2756 replace_dict['ncomb'] = ncomb 2757 2758 # Extract helicity lines 2759 helicity_lines = self.get_helicity_lines(matrix_element) 2760 replace_dict['helicity_lines'] = helicity_lines 2761 2762 # Extract overall denominator 2763 # Averaging initial state color, spin, and identical FS particles 2764 den_factor_line = self.get_den_factor_line(matrix_element) 2765 replace_dict['den_factor_line'] = den_factor_line 2766 2767 # Extract ngraphs 2768 ngraphs = matrix_element.get_number_of_amplitudes() 2769 replace_dict['ngraphs'] = ngraphs 2770 2771 # Extract nwavefuncs 2772 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2773 replace_dict['nwavefuncs'] = nwavefuncs 2774 2775 # Extract ncolor 2776 ncolor = max(1, len(matrix_element.get('color_basis'))) 2777 replace_dict['ncolor'] = ncolor 2778 2779 # Extract color data lines 2780 color_data_lines = self.get_color_data_lines(matrix_element) 2781 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2782 2783 # Extract helas calls 2784 helas_calls = fortran_model.get_matrix_element_calls(\ 2785 matrix_element) 2786 2787 replace_dict['helas_calls'] = "\n".join(helas_calls) 2788 2789 # Extract JAMP lines 2790 jamp_lines = self.get_JAMP_lines(matrix_element) 2791 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2792 2793 file = open(os.path.join(_file_path, \ 2794 'iolibs/template_files/%s' % self.matrix_file)).read() 2795 file = file % replace_dict 2796 2797 2798 # Write the file 2799 writer.writelines(file) 2800 2801 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor
2802 2803 #=========================================================================== 2804 # write_source_makefile 2805 #===========================================================================
2806 - def write_source_makefile(self, writer):
2807 """Write the nexternal.inc file for madweight""" 2808 2809 2810 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 2811 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 2812 text = open(path).read() % {'libraries': set_of_lib} 2813 writer.write(text) 2814 2815 return True
2816
2817 - def write_phasespace_file(self, writer, nb_diag):
2818 """ """ 2819 2820 template = """ include 'maxparticles.inc' 2821 integer max_branches 2822 parameter (max_branches=max_particles-1) 2823 integer max_configs 2824 parameter (max_configs=%(nb_diag)s) 2825 2826 c channel position 2827 integer config_pos,perm_pos 2828 common /to_config/config_pos,perm_pos 2829 2830 """ 2831 2832 writer.write(template % {'nb_diag': nb_diag})
2833 2834 2835 #=========================================================================== 2836 # write_auto_dsig_file 2837 #===========================================================================
2838 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
2839 """Write the auto_dsig.f file for the differential cross section 2840 calculation, includes pdf call information (MadWeight format)""" 2841 2842 if not matrix_element.get('processes') or \ 2843 not matrix_element.get('diagrams'): 2844 return 0 2845 2846 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 2847 2848 if ninitial < 1 or ninitial > 2: 2849 raise writers.FortranWriter.FortranWriterError, \ 2850 """Need ninitial = 1 or 2 to write auto_dsig file""" 2851 2852 replace_dict = {} 2853 2854 # Extract version number and date from VERSION file 2855 info_lines = self.get_mg5_info_lines() 2856 replace_dict['info_lines'] = info_lines 2857 2858 # Extract process info lines 2859 process_lines = self.get_process_info_lines(matrix_element) 2860 replace_dict['process_lines'] = process_lines 2861 2862 # Set proc_id 2863 replace_dict['proc_id'] = proc_id 2864 replace_dict['numproc'] = 1 2865 2866 # Set dsig_line 2867 if ninitial == 1: 2868 # No conversion, since result of decay should be given in GeV 2869 dsig_line = "pd(0)*dsiguu" 2870 else: 2871 # Convert result (in GeV) to pb 2872 dsig_line = "pd(0)*conv*dsiguu" 2873 2874 replace_dict['dsig_line'] = dsig_line 2875 2876 # Extract pdf lines 2877 pdf_vars, pdf_data, pdf_lines = \ 2878 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 2879 replace_dict['pdf_vars'] = pdf_vars 2880 replace_dict['pdf_data'] = pdf_data 2881 replace_dict['pdf_lines'] = pdf_lines 2882 2883 # Lines that differ between subprocess group and regular 2884 if proc_id: 2885 replace_dict['numproc'] = int(proc_id) 2886 replace_dict['passcuts_begin'] = "" 2887 replace_dict['passcuts_end'] = "" 2888 # Set lines for subprocess group version 2889 # Set define_iconfigs_lines 2890 replace_dict['define_subdiag_lines'] = \ 2891 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 2892 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 2893 else: 2894 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 2895 replace_dict['passcuts_end'] = "ENDIF" 2896 replace_dict['define_subdiag_lines'] = "" 2897 2898 file = open(os.path.join(_file_path, \ 2899 'iolibs/template_files/auto_dsig_mw.inc')).read() 2900 2901 file = file % replace_dict 2902 2903 2904 # Write the file 2905 writer.writelines(file)
2906 2907 #=========================================================================== 2908 # write_configs_file 2909 #===========================================================================
2910 - def write_configs_file(self, writer, matrix_element):
2911 """Write the configs.inc file for MadEvent""" 2912 2913 # Extract number of external particles 2914 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2915 2916 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 2917 mapconfigs = [c[0] for c in configs] 2918 model = matrix_element.get('processes')[0].get('model') 2919 return mapconfigs, self.write_configs_file_from_diagrams(writer, 2920 [[c[1]] for c in configs], 2921 mapconfigs, 2922 nexternal, ninitial,matrix_element, model)
2923 2924 #=========================================================================== 2925 # write_run_configs_file 2926 #===========================================================================
2927 - def write_run_config_file(self, writer):
2928 """Write the run_configs.inc file for MadWeight""" 2929 2930 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 2931 text = open(path).read() % {'chanperjob':'5'} 2932 writer.write(text) 2933 return True
2934 2935 #=========================================================================== 2936 # write_configs_file_from_diagrams 2937 #===========================================================================
2938 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 2939 nexternal, ninitial, matrix_element, model):
2940 """Write the actual configs.inc file. 2941 2942 configs is the diagrams corresponding to configs (each 2943 diagrams is a list of corresponding diagrams for all 2944 subprocesses, with None if there is no corresponding diagrams 2945 for a given process). 2946 mapconfigs gives the diagram number for each config. 2947 2948 For s-channels, we need to output one PDG for each subprocess in 2949 the subprocess group, in order to be able to pick the right 2950 one for multiprocesses.""" 2951 2952 lines = [] 2953 2954 particle_dict = matrix_element.get('processes')[0].get('model').\ 2955 get('particle_dict') 2956 2957 s_and_t_channels = [] 2958 2959 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 2960 for config in configs if [d for d in config if d][0].\ 2961 get_vertex_leg_numbers()!=[]] 2962 2963 minvert = min(vert_list) if vert_list!=[] else 0 2964 # Number of subprocesses 2965 nsubprocs = len(configs[0]) 2966 2967 nconfigs = 0 2968 2969 new_pdg = model.get_first_non_pdg() 2970 2971 for iconfig, helas_diags in enumerate(configs): 2972 if any([vert > minvert for vert in 2973 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 2974 # Only 3-vertices allowed in configs.inc 2975 continue 2976 nconfigs += 1 2977 2978 # Need s- and t-channels for all subprocesses, including 2979 # those that don't contribute to this config 2980 empty_verts = [] 2981 stchannels = [] 2982 for h in helas_diags: 2983 if h: 2984 # get_s_and_t_channels gives vertices starting from 2985 # final state external particles and working inwards 2986 stchannels.append(h.get('amplitudes')[0].\ 2987 get_s_and_t_channels(ninitial,model,new_pdg)) 2988 else: 2989 stchannels.append((empty_verts, None)) 2990 2991 # For t-channels, just need the first non-empty one 2992 tchannels = [t for s,t in stchannels if t != None][0] 2993 2994 # For s_and_t_channels (to be used later) use only first config 2995 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 2996 tchannels]) 2997 2998 # Make sure empty_verts is same length as real vertices 2999 if any([s for s,t in stchannels]): 3000 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3001 3002 # Reorganize s-channel vertices to get a list of all 3003 # subprocesses for each vertex 3004 schannels = zip(*[s for s,t in stchannels]) 3005 else: 3006 schannels = [] 3007 3008 allchannels = schannels 3009 if len(tchannels) > 1: 3010 # Write out tchannels only if there are any non-trivial ones 3011 allchannels = schannels + tchannels 3012 3013 # Write out propagators for s-channel and t-channel vertices 3014 3015 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3016 # Correspondance between the config and the diagram = amp2 3017 lines.append("* %d %d " % (nconfigs, 3018 mapconfigs[iconfig])) 3019 3020 for verts in allchannels: 3021 if verts in schannels: 3022 vert = [v for v in verts if v][0] 3023 else: 3024 vert = verts 3025 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3026 last_leg = vert.get('legs')[-1] 3027 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3028 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3029 # (last_leg.get('number'), nconfigs, len(daughters), 3030 # ",".join([str(d) for d in daughters]))) 3031 3032 if last_leg.get('id') == 21 and 21 not in particle_dict: 3033 # Fake propagator used in multiparticle vertices 3034 mass = 'zero' 3035 width = 'zero' 3036 pow_part = 0 3037 else: 3038 if (last_leg.get('id')!=7): 3039 particle = particle_dict[last_leg.get('id')] 3040 # Get mass 3041 mass = particle.get('mass') 3042 # Get width 3043 width = particle.get('width') 3044 else : # fake propagator used in multiparticle vertices 3045 mass= 'zero' 3046 width= 'zero' 3047 3048 line=line+" "+mass+" "+width+" " 3049 3050 if verts in schannels: 3051 pdgs = [] 3052 for v in verts: 3053 if v: 3054 pdgs.append(v.get('legs')[-1].get('id')) 3055 else: 3056 pdgs.append(0) 3057 lines.append(line+" S "+str(last_leg.get('id'))) 3058 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3059 # (last_leg.get('number'), nconfigs, nsubprocs, 3060 # ",".join([str(d) for d in pdgs]))) 3061 # lines.append("data tprid(%d,%d)/0/" % \ 3062 # (last_leg.get('number'), nconfigs)) 3063 elif verts in tchannels[:-1]: 3064 lines.append(line+" T "+str(last_leg.get('id'))) 3065 # lines.append("data tprid(%d,%d)/%d/" % \ 3066 # (last_leg.get('number'), nconfigs, 3067 # abs(last_leg.get('id')))) 3068 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3069 # (last_leg.get('number'), nconfigs, nsubprocs, 3070 # ",".join(['0'] * nsubprocs))) 3071 3072 # Write out number of configs 3073 # lines.append("# Number of configs") 3074 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3075 lines.append(" * ") # a line with just a star indicates this is the end of file 3076 # Write the file 3077 writer.writelines(lines) 3078 3079 return s_and_t_channels
3080
3081 3082 #=============================================================================== 3083 # ProcessExporterFortranME 3084 #=============================================================================== 3085 -class ProcessExporterFortranME(ProcessExporterFortran):
3086 """Class to take care of exporting a set of matrix elements to 3087 MadEvent format.""" 3088 3089 matrix_file = "matrix_madevent_v4.inc" 3090
3091 - def copy_v4template(self, modelname):
3092 """Additional actions needed for setup of Template 3093 """ 3094 3095 super(ProcessExporterFortranME, self).copy_v4template(modelname) 3096 3097 # File created from Template (Different in some child class) 3098 filename = pjoin(self.dir_path,'Source','run_config.inc') 3099 self.write_run_config_file(writers.FortranWriter(filename)) 3100 3101 # The next file are model dependant (due to SLAH convention) 3102 self.model_name = modelname 3103 # Add the symmetry.f 3104 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3105 self.write_symmetry(writers.FortranWriter(filename)) 3106 # 3107 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3108 self.write_addmothers(writers.FortranWriter(filename)) 3109 # Copy the different python file in the Template 3110 self.copy_python_file()
3111 3112 3113 3114 3115 3116 #=========================================================================== 3117 # generate_subprocess_directory_v4 3118 #===========================================================================
3119 - def copy_python_file(self):
3120 """copy the python file require for the Template""" 3121 3122 # madevent interface 3123 cp(_file_path+'/interface/madevent_interface.py', 3124 self.dir_path+'/bin/internal/madevent_interface.py') 3125 cp(_file_path+'/interface/extended_cmd.py', 3126 self.dir_path+'/bin/internal/extended_cmd.py') 3127 cp(_file_path+'/interface/common_run_interface.py', 3128 self.dir_path+'/bin/internal/common_run_interface.py') 3129 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3130 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3131 cp(_file_path+'/iolibs/save_load_object.py', 3132 self.dir_path+'/bin/internal/save_load_object.py') 3133 cp(_file_path+'/iolibs/file_writers.py', 3134 self.dir_path+'/bin/internal/file_writers.py') 3135 #model file 3136 cp(_file_path+'../models/check_param_card.py', 3137 self.dir_path+'/bin/internal/check_param_card.py') 3138 3139 #copy all the file present in madevent directory 3140 for name in os.listdir(pjoin(_file_path, 'madevent')): 3141 if name not in ['__init__.py'] and name.endswith('.py'): 3142 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3143 3144 #madevent file 3145 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3146 cp(_file_path+'/various/lhe_parser.py', 3147 self.dir_path+'/bin/internal/lhe_parser.py') 3148 cp(_file_path+'/various/banner.py', 3149 self.dir_path+'/bin/internal/banner.py') 3150 cp(_file_path+'/various/cluster.py', 3151 self.dir_path+'/bin/internal/cluster.py') 3152 cp(_file_path+'/madevent/combine_runs.py', 3153 self.dir_path+'/bin/internal/combine_runs.py') 3154 # logging configuration 3155 cp(_file_path+'/interface/.mg5_logging.conf', 3156 self.dir_path+'/bin/internal/me5_logging.conf') 3157 cp(_file_path+'/interface/coloring_logging.py', 3158 self.dir_path+'/bin/internal/coloring_logging.py') 3159 # shower card and FO_analyse_card. 3160 # Although not needed, it is imported by banner.py 3161 cp(_file_path+'/various/shower_card.py', 3162 self.dir_path+'/bin/internal/shower_card.py') 3163 cp(_file_path+'/various/FO_analyse_card.py', 3164 self.dir_path+'/bin/internal/FO_analyse_card.py')
3165 3166
3167 - def convert_model_to_mg4(self, model, wanted_lorentz = [], 3168 wanted_couplings = []):
3169 3170 super(ProcessExporterFortranME,self).convert_model_to_mg4(model, 3171 wanted_lorentz, wanted_couplings) 3172 3173 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3174 try: 3175 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3176 except OSError as error: 3177 pass 3178 model_path = model.get('modelpath') 3179 # This is not safe if there is a '##' or '-' in the path. 3180 shutil.copytree(model_path, 3181 pjoin(self.dir_path,'bin','internal','ufomodel'), 3182 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3183 if hasattr(model, 'restrict_card'): 3184 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3185 'restrict_default.dat') 3186 if isinstance(model.restrict_card, check_param_card.ParamCard): 3187 model.restrict_card.write(out_path) 3188 else: 3189 files.cp(model.restrict_card, out_path)
3190 3191 #=========================================================================== 3192 # export model files 3193 #===========================================================================
3194 - def export_model_files(self, model_path):
3195 """export the model dependent files""" 3196 3197 super(ProcessExporterFortranME,self).export_model_files(model_path) 3198 3199 # Add the routine update_as_param in v4 model 3200 # This is a function created in the UFO 3201 text=""" 3202 subroutine update_as_param() 3203 call setpara('param_card.dat',.false.) 3204 return 3205 end 3206 """ 3207 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3208 ff.write(text) 3209 ff.close() 3210 3211 # Add the symmetry.f 3212 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3213 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3214 3215 # Modify setrun.f 3216 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3217 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3218 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3219 fsock.write(text) 3220 fsock.close() 3221 3222 self.make_model_symbolic_link()
3223 3224 3225 #=========================================================================== 3226 # generate_subprocess_directory_v4 3227 #===========================================================================
3228 - def generate_subprocess_directory_v4(self, matrix_element, 3229 fortran_model, 3230 me_number):
3231 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3232 including the necessary matrix.f and various helper files""" 3233 3234 cwd = os.getcwd() 3235 path = pjoin(self.dir_path, 'SubProcesses') 3236 3237 3238 if not self.model: 3239 self.model = matrix_element.get('processes')[0].get('model') 3240 3241 3242 3243 #os.chdir(path) 3244 # Create the directory PN_xx_xxxxx in the specified path 3245 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3246 try: 3247 os.mkdir(pjoin(path,subprocdir)) 3248 except os.error as error: 3249 logger.warning(error.strerror + " " + subprocdir) 3250 3251 #try: 3252 # os.chdir(subprocdir) 3253 #except os.error: 3254 # logger.error('Could not cd to directory %s' % subprocdir) 3255 # return 0 3256 3257 logger.info('Creating files in directory %s' % subprocdir) 3258 Ppath = pjoin(path, subprocdir) 3259 3260 # Extract number of external particles 3261 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3262 3263 # Add the driver.f 3264 ncomb = matrix_element.get_helicity_combinations() 3265 filename = pjoin(Ppath,'driver.f') 3266 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3267 v5=self.opt['v5_model']) 3268 3269 # Create the matrix.f file, auto_dsig.f file and all inc files 3270 filename = pjoin(Ppath, 'matrix.f') 3271 calls, ncolor = \ 3272 self.write_matrix_element_v4(writers.FortranWriter(filename), 3273 matrix_element, fortran_model, subproc_number = me_number) 3274 3275 filename = pjoin(Ppath, 'auto_dsig.f') 3276 self.write_auto_dsig_file(writers.FortranWriter(filename), 3277 matrix_element) 3278 3279 filename = pjoin(Ppath, 'configs.inc') 3280 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3281 writers.FortranWriter(filename), 3282 matrix_element) 3283 3284 filename = pjoin(Ppath, 'config_nqcd.inc') 3285 self.write_config_nqcd_file(writers.FortranWriter(filename), 3286 nqcd_list) 3287 3288 filename = pjoin(Ppath, 'config_subproc_map.inc') 3289 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3290 s_and_t_channels) 3291 3292 filename = pjoin(Ppath, 'coloramps.inc') 3293 self.write_coloramps_file(writers.FortranWriter(filename), 3294 mapconfigs, 3295 matrix_element) 3296 3297 filename = pjoin(Ppath, 'get_color.f') 3298 self.write_colors_file(writers.FortranWriter(filename), 3299 matrix_element) 3300 3301 filename = pjoin(Ppath, 'decayBW.inc') 3302 self.write_decayBW_file(writers.FortranWriter(filename), 3303 s_and_t_channels) 3304 3305 filename = pjoin(Ppath, 'dname.mg') 3306 self.write_dname_file(writers.FileWriter(filename), 3307 "P"+matrix_element.get('processes')[0].shell_string()) 3308 3309 filename = pjoin(Ppath, 'iproc.dat') 3310 self.write_iproc_file(writers.FortranWriter(filename), 3311 me_number) 3312 3313 filename = pjoin(Ppath, 'leshouche.inc') 3314 self.write_leshouche_file(writers.FortranWriter(filename), 3315 matrix_element) 3316 3317 filename = pjoin(Ppath, 'maxamps.inc') 3318 self.write_maxamps_file(writers.FortranWriter(filename), 3319 len(matrix_element.get('diagrams')), 3320 ncolor, 3321 len(matrix_element.get('processes')), 3322 1) 3323 3324 filename = pjoin(Ppath, 'mg.sym') 3325 self.write_mg_sym_file(writers.FortranWriter(filename), 3326 matrix_element) 3327 3328 filename = pjoin(Ppath, 'ncombs.inc') 3329 self.write_ncombs_file(writers.FortranWriter(filename), 3330 nexternal) 3331 3332 filename = pjoin(Ppath, 'nexternal.inc') 3333 self.write_nexternal_file(writers.FortranWriter(filename), 3334 nexternal, ninitial) 3335 3336 filename = pjoin(Ppath, 'ngraphs.inc') 3337 self.write_ngraphs_file(writers.FortranWriter(filename), 3338 len(mapconfigs)) 3339 3340 3341 filename = pjoin(Ppath, 'pmass.inc') 3342 self.write_pmass_file(writers.FortranWriter(filename), 3343 matrix_element) 3344 3345 filename = pjoin(Ppath, 'props.inc') 3346 self.write_props_file(writers.FortranWriter(filename), 3347 matrix_element, 3348 s_and_t_channels) 3349 3350 # Find config symmetries and permutations 3351 symmetry, perms, ident_perms = \ 3352 diagram_symmetry.find_symmetry(matrix_element) 3353 3354 filename = pjoin(Ppath, 'symswap.inc') 3355 self.write_symswap_file(writers.FortranWriter(filename), 3356 ident_perms) 3357 3358 filename = pjoin(Ppath, 'symfact_orig.dat') 3359 self.write_symfact_file(open(filename, 'w'), symmetry) 3360 3361 # Generate diagrams 3362 filename = pjoin(Ppath, "matrix.ps") 3363 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3364 get('diagrams'), 3365 filename, 3366 model=matrix_element.get('processes')[0].\ 3367 get('model'), 3368 amplitude=True) 3369 logger.info("Generating Feynman diagrams for " + \ 3370 matrix_element.get('processes')[0].nice_string()) 3371 plot.draw() 3372 3373 self.link_files_in_SubProcess(Ppath) 3374 3375 #import nexternal/leshouche in Source 3376 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3377 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3378 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3379 # Return to SubProcesses dir 3380 #os.chdir(os.path.pardir) 3381 3382 # Add subprocess to subproc.mg 3383 filename = pjoin(path, 'subproc.mg') 3384 files.append_to_file(filename, 3385 self.write_subproc, 3386 subprocdir) 3387 3388 # Return to original dir 3389 #os.chdir(cwd) 3390 3391 # Generate info page 3392 gen_infohtml.make_info_html(self.dir_path) 3393 3394 3395 if not calls: 3396 calls = 0 3397 return calls
3398 3434
3435 - def finalize_v4_directory(self, matrix_elements, history, makejpg = False, 3436 online = False, compiler=default_compiler):
3437 """Finalize ME v4 directory by creating jpeg diagrams, html 3438 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3439 3440 # indicate that the output type is not grouped 3441 if not isinstance(self, ProcessExporterFortranMEGroup): 3442 self.proc_characteristic['grouped_matrix'] = False 3443 3444 modelname = self.opt['model'] 3445 if modelname == 'mssm' or modelname.startswith('mssm-'): 3446 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3447 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3448 check_param_card.convert_to_mg5card(param_card, mg5_param) 3449 check_param_card.check_valid_param_card(mg5_param) 3450 3451 # Add the combine_events.f modify param_card path/number of @X 3452 filename = pjoin(self.dir_path,'Source','combine_events.f') 3453 try: 3454 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3455 except AttributeError: 3456 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3457 nb_proc = len(set(nb_proc)) 3458 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3459 # Write maxconfigs.inc based on max of ME's/subprocess groups 3460 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3461 self.write_maxconfigs_file(writers.FortranWriter(filename), 3462 matrix_elements) 3463 3464 # Write maxparticles.inc based on max of ME's/subprocess groups 3465 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3466 self.write_maxparticles_file(writers.FortranWriter(filename), 3467 matrix_elements) 3468 3469 # Touch "done" file 3470 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3471 3472 # Check for compiler 3473 self.set_compiler(compiler) 3474 self.set_cpp_compiler(compiler['cpp']) 3475 3476 3477 old_pos = os.getcwd() 3478 subpath = pjoin(self.dir_path, 'SubProcesses') 3479 3480 P_dir_list = [proc for proc in os.listdir(subpath) 3481 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3482 3483 devnull = os.open(os.devnull, os.O_RDWR) 3484 # Convert the poscript in jpg files (if authorize) 3485 if makejpg: 3486 try: 3487 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3488 except Exception, error: 3489 pass 3490 logger.info("Generate jpeg diagrams") 3491 for Pdir in P_dir_list: 3492 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3493 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3494 3495 logger.info("Generate web pages") 3496 # Create the WebPage using perl script 3497 3498 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3499 stdout = devnull,cwd=pjoin(self.dir_path)) 3500 3501 #os.chdir(os.path.pardir) 3502 3503 obj = gen_infohtml.make_info_html(self.dir_path) 3504 3505 if online: 3506 nb_channel = obj.rep_rule['nb_gen_diag'] 3507 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3508 #add the information to proc_charac 3509 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3510 3511 # Write command history as proc_card_mg5 3512 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3513 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3514 history.write(output_file) 3515 3516 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3517 stdout = devnull) 3518 3519 #crate the proc_characteristic file 3520 self.create_proc_charac(matrix_elements, history) 3521 3522 # create the run_card 3523 ProcessExporterFortran.finalize_v4_directory(self, matrix_elements, 3524 history, makejpg, online, compiler) 3525 3526 # Run "make" to generate madevent.tar.gz file 3527 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3528 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3529 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3530 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3531 stdout = devnull, cwd=self.dir_path) 3532 3533 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3534 stdout = devnull, cwd=self.dir_path)
3535 3536 3537 3538 3539 3540 3541 #return to the initial dir 3542 #os.chdir(old_pos) 3543 3544 #=========================================================================== 3545 # write_matrix_element_v4 3546 #===========================================================================
3547 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3548 proc_id = "", config_map = [], subproc_number = ""):
3549 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3550 3551 if not matrix_element.get('processes') or \ 3552 not matrix_element.get('diagrams'): 3553 return 0 3554 3555 if not isinstance(writer, writers.FortranWriter): 3556 raise writers.FortranWriter.FortranWriterError(\ 3557 "writer not FortranWriter") 3558 3559 3560 # Set lowercase/uppercase Fortran code 3561 writers.FortranWriter.downcase = False 3562 3563 # The proc prefix is not used for MadEvent output so it can safely be set 3564 # to an empty string. 3565 replace_dict = {'proc_prefix':''} 3566 3567 # Extract helas calls 3568 helas_calls = fortran_model.get_matrix_element_calls(\ 3569 matrix_element) 3570 3571 replace_dict['helas_calls'] = "\n".join(helas_calls) 3572 3573 3574 # Extract version number and date from VERSION file 3575 info_lines = self.get_mg5_info_lines() 3576 replace_dict['info_lines'] = info_lines 3577 3578 # Extract process info lines 3579 process_lines = self.get_process_info_lines(matrix_element) 3580 replace_dict['process_lines'] = process_lines 3581 3582 # Set proc_id 3583 replace_dict['proc_id'] = proc_id 3584 3585 # Extract ncomb 3586 ncomb = matrix_element.get_helicity_combinations() 3587 replace_dict['ncomb'] = ncomb 3588 3589 # Extract helicity lines 3590 helicity_lines = self.get_helicity_lines(matrix_element) 3591 replace_dict['helicity_lines'] = helicity_lines 3592 3593 # Extract IC line 3594 ic_line = self.get_ic_line(matrix_element) 3595 replace_dict['ic_line'] = ic_line 3596 3597 # Extract overall denominator 3598 # Averaging initial state color, spin, and identical FS particles 3599 den_factor_line = self.get_den_factor_line(matrix_element) 3600 replace_dict['den_factor_line'] = den_factor_line 3601 3602 # Extract ngraphs 3603 ngraphs = matrix_element.get_number_of_amplitudes() 3604 replace_dict['ngraphs'] = ngraphs 3605 3606 # Extract ndiags 3607 ndiags = len(matrix_element.get('diagrams')) 3608 replace_dict['ndiags'] = ndiags 3609 3610 # Set define_iconfigs_lines 3611 replace_dict['define_iconfigs_lines'] = \ 3612 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3613 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3614 3615 if proc_id: 3616 # Set lines for subprocess group version 3617 # Set define_iconfigs_lines 3618 replace_dict['define_iconfigs_lines'] += \ 3619 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3620 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3621 # Set set_amp2_line 3622 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3623 proc_id 3624 else: 3625 # Standard running 3626 # Set set_amp2_line 3627 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3628 3629 # Extract nwavefuncs 3630 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3631 replace_dict['nwavefuncs'] = nwavefuncs 3632 3633 # Extract ncolor 3634 ncolor = max(1, len(matrix_element.get('color_basis'))) 3635 replace_dict['ncolor'] = ncolor 3636 3637 # Extract color data lines 3638 color_data_lines = self.get_color_data_lines(matrix_element) 3639 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3640 3641 3642 # Set the size of Wavefunction 3643 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 3644 replace_dict['wavefunctionsize'] = 18 3645 else: 3646 replace_dict['wavefunctionsize'] = 6 3647 3648 # Extract amp2 lines 3649 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 3650 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 3651 3652 # The JAMP definition depends on the splitting order 3653 split_orders=matrix_element.get('processes')[0].get('split_orders') 3654 if len(split_orders)>0: 3655 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 3656 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 3657 matrix_element.get('processes')[0],squared_orders) 3658 else: 3659 # Consider the output of a dummy order 'ALL_ORDERS' for which we 3660 # set all amplitude order to weight 1 and only one squared order 3661 # contribution which is of course ALL_ORDERS=2. 3662 squared_orders = [(2,),] 3663 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 3664 replace_dict['chosen_so_configs'] = '.TRUE.' 3665 3666 replace_dict['nAmpSplitOrders']=len(amp_orders) 3667 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 3668 replace_dict['split_order_str_list']=str(split_orders) 3669 replace_dict['nSplitOrders']=max(len(split_orders),1) 3670 amp_so = self.get_split_orders_lines( 3671 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 3672 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 3673 replace_dict['ampsplitorders']='\n'.join(amp_so) 3674 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 3675 3676 3677 # Extract JAMP lines 3678 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 3679 jamp_lines = self.get_JAMP_lines_split_order(\ 3680 matrix_element,amp_orders,split_order_names= 3681 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 3682 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3683 3684 file = open(pjoin(_file_path, \ 3685 'iolibs/template_files/%s' % self.matrix_file)).read() 3686 3687 file = file % replace_dict 3688 3689 # Add the split orders helper functions. 3690 file = file + '\n' + open(pjoin(_file_path, \ 3691 'iolibs/template_files/split_orders_helping_functions.inc'))\ 3692 .read()%replace_dict 3693 # Write the file 3694 writer.writelines(file) 3695 3696 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor
3697 3698 #=========================================================================== 3699 # write_auto_dsig_file 3700 #===========================================================================
3701 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3702 """Write the auto_dsig.f file for the differential cross section 3703 calculation, includes pdf call information""" 3704 3705 if not matrix_element.get('processes') or \ 3706 not matrix_element.get('diagrams'): 3707 return 0 3708 3709 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3710 self.proc_characteristic['ninitial'] = ninitial 3711 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 3712 3713 if ninitial < 1 or ninitial > 2: 3714 raise writers.FortranWriter.FortranWriterError, \ 3715 """Need ninitial = 1 or 2 to write auto_dsig file""" 3716 3717 replace_dict = {} 3718 3719 # Extract version number and date from VERSION file 3720 info_lines = self.get_mg5_info_lines() 3721 replace_dict['info_lines'] = info_lines 3722 3723 # Extract process info lines 3724 process_lines = self.get_process_info_lines(matrix_element) 3725 replace_dict['process_lines'] = process_lines 3726 3727 # Set proc_id 3728 replace_dict['proc_id'] = proc_id 3729 replace_dict['numproc'] = 1 3730 3731 # Set dsig_line 3732 if ninitial == 1: 3733 # No conversion, since result of decay should be given in GeV 3734 dsig_line = "pd(0)*dsiguu" 3735 else: 3736 # Convert result (in GeV) to pb 3737 dsig_line = "pd(0)*conv*dsiguu" 3738 3739 replace_dict['dsig_line'] = dsig_line 3740 3741 # Extract pdf lines 3742 pdf_vars, pdf_data, pdf_lines = \ 3743 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3744 replace_dict['pdf_vars'] = pdf_vars 3745 replace_dict['pdf_data'] = pdf_data 3746 replace_dict['pdf_lines'] = pdf_lines 3747 3748 # Lines that differ between subprocess group and regular 3749 if proc_id: 3750 replace_dict['numproc'] = int(proc_id) 3751 replace_dict['passcuts_begin'] = "" 3752 replace_dict['passcuts_end'] = "" 3753 # Set lines for subprocess group version 3754 # Set define_iconfigs_lines 3755 replace_dict['define_subdiag_lines'] = \ 3756 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3757 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3758 replace_dict['cutsdone'] = "" 3759 else: 3760 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3761 replace_dict['passcuts_end'] = "ENDIF" 3762 replace_dict['define_subdiag_lines'] = "" 3763 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 3764 3765 if not isinstance(self, ProcessExporterFortranMEGroup): 3766 ncomb=matrix_element.get_helicity_combinations() 3767 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 3768 else: 3769 replace_dict['read_write_good_hel'] = "" 3770 3771 3772 3773 file = open(pjoin(_file_path, \ 3774 'iolibs/template_files/auto_dsig_v4.inc')).read() 3775 file = file % replace_dict 3776 3777 # Write the file 3778 writer.writelines(file, context={'read_write_good_hel':True})
3779 3780 #=========================================================================== 3781 # write_coloramps_file 3782 #===========================================================================
3783 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
3784 """Write the coloramps.inc file for MadEvent""" 3785 3786 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 3787 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 3788 (max(len(matrix_element.get('color_basis').keys()), 1), 3789 len(mapconfigs))) 3790 3791 3792 # Write the file 3793 writer.writelines(lines) 3794 3795 return True
3796 3797 #=========================================================================== 3798 # write_colors_file 3799 #===========================================================================
3800 - def write_colors_file(self, writer, matrix_elements):
3801 """Write the get_color.f file for MadEvent, which returns color 3802 for all particles used in the matrix element.""" 3803 3804 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 3805 matrix_elements = [matrix_elements] 3806 3807 model = matrix_elements[0].get('processes')[0].get('model') 3808 3809 # We need the both particle and antiparticle wf_ids, since the identity 3810 # depends on the direction of the wf. 3811 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 3812 for wf in d.get('wavefunctions')],[]) \ 3813 for d in me.get('diagrams')], []) \ 3814 for me in matrix_elements], [])) 3815 3816 leg_ids = set(sum([sum([sum([[l.get('id'), 3817 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 3818 for l in p.get_legs_with_decays()], []) \ 3819 for p in me.get('processes')], []) \ 3820 for me in matrix_elements], [])) 3821 particle_ids = sorted(list(wf_ids.union(leg_ids))) 3822 3823 lines = """function get_color(ipdg) 3824 implicit none 3825 integer get_color, ipdg 3826 3827 if(ipdg.eq.%d)then 3828 get_color=%d 3829 return 3830 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 3831 3832 for part_id in particle_ids[1:]: 3833 lines += """else if(ipdg.eq.%d)then 3834 get_color=%d 3835 return 3836 """ % (part_id, model.get_particle(part_id).get_color()) 3837 # Dummy particle for multiparticle vertices with pdg given by 3838 # first code not in the model 3839 lines += """else if(ipdg.eq.%d)then 3840 c This is dummy particle used in multiparticle vertices 3841 get_color=2 3842 return 3843 """ % model.get_first_non_pdg() 3844 lines += """else 3845 write(*,*)'Error: No color given for pdg ',ipdg 3846 get_color=0 3847 return 3848 endif 3849 end 3850 """ 3851 3852 # Write the file 3853 writer.writelines(lines) 3854 3855 return True
3856 3857 #=========================================================================== 3858 # write_config_nqcd_file 3859 #===========================================================================
3860 - def write_config_nqcd_file(self, writer, nqcd_list):
3861 """Write the config_nqcd.inc with the number of QCD couplings 3862 for each config""" 3863 3864 lines = [] 3865 for iconf, n in enumerate(nqcd_list): 3866 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 3867 3868 # Write the file 3869 writer.writelines(lines) 3870 3871 return True
3872 3873 #=========================================================================== 3874 # write_maxconfigs_file 3875 #===========================================================================
3876 - def write_maxconfigs_file(self, writer, matrix_elements):
3877 """Write the maxconfigs.inc file for MadEvent""" 3878 3879 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 3880 maxconfigs = max([me.get_num_configs() for me in \ 3881 matrix_elements.get('matrix_elements')]) 3882 else: 3883 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 3884 3885 lines = "integer lmaxconfigs\n" 3886 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 3887 3888 # Write the file 3889 writer.writelines(lines) 3890 3891 return True
3892 3893 #=========================================================================== 3894 # read_write_good_hel 3895 #===========================================================================
3896 - def read_write_good_hel(self, ncomb):
3897 """return the code to read/write the good_hel common_block""" 3898 3899 convert = {'ncomb' : ncomb} 3900 output = """ 3901 subroutine write_good_hel(stream_id) 3902 implicit none 3903 integer stream_id 3904 INTEGER NCOMB 3905 PARAMETER ( NCOMB=%(ncomb)d) 3906 LOGICAL GOODHEL(NCOMB) 3907 INTEGER NTRY 3908 common/BLOCK_GOODHEL/NTRY,GOODHEL 3909 write(stream_id,*) GOODHEL 3910 return 3911 end 3912 3913 3914 subroutine read_good_hel(stream_id) 3915 implicit none 3916 include 'genps.inc' 3917 integer stream_id 3918 INTEGER NCOMB 3919 PARAMETER ( NCOMB=%(ncomb)d) 3920 LOGICAL GOODHEL(NCOMB) 3921 INTEGER NTRY 3922 common/BLOCK_GOODHEL/NTRY,GOODHEL 3923 read(stream_id,*) GOODHEL 3924 NTRY = MAXTRIES + 1 3925 return 3926 end 3927 3928 subroutine init_good_hel() 3929 implicit none 3930 INTEGER NCOMB 3931 PARAMETER ( NCOMB=%(ncomb)d) 3932 LOGICAL GOODHEL(NCOMB) 3933 INTEGER NTRY 3934 INTEGER I 3935 3936 do i=1,NCOMB 3937 GOODHEL(I) = .false. 3938 enddo 3939 NTRY = 0 3940 end 3941 3942 integer function get_maxsproc() 3943 implicit none 3944 get_maxsproc = 1 3945 return 3946 end 3947 3948 """ % convert 3949 3950 return output
3951 3952 #=========================================================================== 3953 # write_config_subproc_map_file 3954 #===========================================================================
3955 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
3956 """Write a dummy config_subproc.inc file for MadEvent""" 3957 3958 lines = [] 3959 3960 for iconfig in range(len(s_and_t_channels)): 3961 lines.append("DATA CONFSUB(1,%d)/1/" % \ 3962 (iconfig + 1)) 3963 3964 # Write the file 3965 writer.writelines(lines) 3966 3967 return True
3968 3969 #=========================================================================== 3970 # write_configs_file 3971 #===========================================================================
3972 - def write_configs_file(self, writer, matrix_element):
3973 """Write the configs.inc file for MadEvent""" 3974 3975 # Extract number of external particles 3976 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3977 3978 model = matrix_element.get('processes')[0].get('model') 3979 configs = [(i+1, d) for (i, d) in \ 3980 enumerate(matrix_element.get('diagrams'))] 3981 mapconfigs = [c[0] for c in configs] 3982 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3983 [[c[1]] for c in configs], 3984 mapconfigs, 3985 nexternal, ninitial, 3986 model)
3987 3988 #=========================================================================== 3989 # write_run_configs_file 3990 #===========================================================================
3991 - def write_run_config_file(self, writer):
3992 """Write the run_configs.inc file for MadEvent""" 3993 3994 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 3995 3996 if self.proc_characteristic['loop_induced']: 3997 job_per_chan = 1 3998 else: 3999 job_per_chan = 5 4000 text = open(path).read() % {'chanperjob': job_per_chan} 4001 writer.write(text) 4002 return True
4003 4004 4005 #=========================================================================== 4006 # write_configs_file_from_diagrams 4007 #===========================================================================
4008 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4009 nexternal, ninitial, model):
4010 """Write the actual configs.inc file. 4011 4012 configs is the diagrams corresponding to configs (each 4013 diagrams is a list of corresponding diagrams for all 4014 subprocesses, with None if there is no corresponding diagrams 4015 for a given process). 4016 mapconfigs gives the diagram number for each config. 4017 4018 For s-channels, we need to output one PDG for each subprocess in 4019 the subprocess group, in order to be able to pick the right 4020 one for multiprocesses.""" 4021 4022 lines = [] 4023 4024 s_and_t_channels = [] 4025 4026 nqcd_list = [] 4027 4028 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4029 for config in configs if [d for d in config if d][0].\ 4030 get_vertex_leg_numbers()!=[]] 4031 minvert = min(vert_list) if vert_list!=[] else 0 4032 4033 # Number of subprocesses 4034 nsubprocs = len(configs[0]) 4035 4036 nconfigs = 0 4037 4038 new_pdg = model.get_first_non_pdg() 4039 4040 for iconfig, helas_diags in enumerate(configs): 4041 if any([vert > minvert for vert in 4042 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4043 # Only 3-vertices allowed in configs.inc 4044 continue 4045 nconfigs += 1 4046 4047 # Need s- and t-channels for all subprocesses, including 4048 # those that don't contribute to this config 4049 empty_verts = [] 4050 stchannels = [] 4051 for h in helas_diags: 4052 if h: 4053 # get_s_and_t_channels gives vertices starting from 4054 # final state external particles and working inwards 4055 stchannels.append(h.get('amplitudes')[0].\ 4056 get_s_and_t_channels(ninitial, model, 4057 new_pdg)) 4058 else: 4059 stchannels.append((empty_verts, None)) 4060 4061 # For t-channels, just need the first non-empty one 4062 tchannels = [t for s,t in stchannels if t != None][0] 4063 4064 # For s_and_t_channels (to be used later) use only first config 4065 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4066 tchannels]) 4067 4068 # Make sure empty_verts is same length as real vertices 4069 if any([s for s,t in stchannels]): 4070 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4071 4072 # Reorganize s-channel vertices to get a list of all 4073 # subprocesses for each vertex 4074 schannels = zip(*[s for s,t in stchannels]) 4075 else: 4076 schannels = [] 4077 4078 allchannels = schannels 4079 if len(tchannels) > 1: 4080 # Write out tchannels only if there are any non-trivial ones 4081 allchannels = schannels + tchannels 4082 4083 # Write out propagators for s-channel and t-channel vertices 4084 4085 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4086 # Correspondance between the config and the diagram = amp2 4087 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4088 mapconfigs[iconfig])) 4089 # Number of QCD couplings in this diagram 4090 nqcd = 0 4091 for h in helas_diags: 4092 if h: 4093 try: 4094 nqcd = h.calculate_orders()['QCD'] 4095 except KeyError: 4096 pass 4097 break 4098 else: 4099 continue 4100 4101 nqcd_list.append(nqcd) 4102 4103 for verts in allchannels: 4104 if verts in schannels: 4105 vert = [v for v in verts if v][0] 4106 else: 4107 vert = verts 4108 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4109 last_leg = vert.get('legs')[-1] 4110 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4111 (last_leg.get('number'), nconfigs, len(daughters), 4112 ",".join([str(d) for d in daughters]))) 4113 if verts in schannels: 4114 pdgs = [] 4115 for v in verts: 4116 if v: 4117 pdgs.append(v.get('legs')[-1].get('id')) 4118 else: 4119 pdgs.append(0) 4120 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4121 (last_leg.get('number'), nconfigs, nsubprocs, 4122 ",".join([str(d) for d in pdgs]))) 4123 lines.append("data tprid(%d,%d)/0/" % \ 4124 (last_leg.get('number'), nconfigs)) 4125 elif verts in tchannels[:-1]: 4126 lines.append("data tprid(%d,%d)/%d/" % \ 4127 (last_leg.get('number'), nconfigs, 4128 abs(last_leg.get('id')))) 4129 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4130 (last_leg.get('number'), nconfigs, nsubprocs, 4131 ",".join(['0'] * nsubprocs))) 4132 4133 # Write out number of configs 4134 lines.append("# Number of configs") 4135 lines.append("data mapconfig(0)/%d/" % nconfigs) 4136 4137 # Write the file 4138 writer.writelines(lines) 4139 4140 return s_and_t_channels, nqcd_list
4141 4142 #=========================================================================== 4143 # write_decayBW_file 4144 #===========================================================================
4145 - def write_decayBW_file(self, writer, s_and_t_channels):
4146 """Write the decayBW.inc file for MadEvent""" 4147 4148 lines = [] 4149 4150 booldict = {None: "0", True: "1", False: "2"} 4151 4152 for iconf, config in enumerate(s_and_t_channels): 4153 schannels = config[0] 4154 for vertex in schannels: 4155 # For the resulting leg, pick out whether it comes from 4156 # decay or not, as given by the onshell flag 4157 leg = vertex.get('legs')[-1] 4158 lines.append("data gForceBW(%d,%d)/%s/" % \ 4159 (leg.get('number'), iconf + 1, 4160 booldict[leg.get('onshell')])) 4161 4162 # Write the file 4163 writer.writelines(lines) 4164 4165 return True
4166 4167 #=========================================================================== 4168 # write_dname_file 4169 #===========================================================================
4170 - def write_dname_file(self, writer, dir_name):
4171 """Write the dname.mg file for MG4""" 4172 4173 line = "DIRNAME=%s" % dir_name 4174 4175 # Write the file 4176 writer.write(line + "\n") 4177 4178 return True
4179 4180 #=========================================================================== 4181 # write_driver 4182 #===========================================================================
4183 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4184 """Write the SubProcess/driver.f file for MG4""" 4185 4186 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4187 4188 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4189 card = 'Source/MODEL/MG5_param.dat' 4190 else: 4191 card = 'param_card.dat' 4192 # Requiring each helicity configuration to be probed by 10 points for 4193 # matrix element before using the resulting grid for MC over helicity 4194 # sampling. 4195 # We multiply this by 2 because each grouped subprocess is called at most 4196 # twice for each IMIRROR. 4197 replace_dict = {'param_card_name':card, 4198 'ncomb':ncomb, 4199 'hel_init_points':n_grouped_proc*10*2} 4200 if not v5: 4201 replace_dict['secondparam']=',.true.' 4202 else: 4203 replace_dict['secondparam']='' 4204 4205 text = open(path).read() % replace_dict 4206 4207 writer.write(text) 4208 4209 return True
4210 4211 #=========================================================================== 4212 # write_addmothers 4213 #===========================================================================
4214 - def write_addmothers(self, writer):
4215 """Write the SubProcess/addmothers.f""" 4216 4217 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4218 4219 text = open(path).read() % {'iconfig': 'diag_number'} 4220 writer.write(text) 4221 4222 return True
4223 4224 4225 #=========================================================================== 4226 # write_combine_events 4227 #===========================================================================
4228 - def write_combine_events(self, writer, nb_proc=100):
4229 """Write the SubProcess/driver.f file for MG4""" 4230 4231 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4232 4233 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4234 card = 'Source/MODEL/MG5_param.dat' 4235 else: 4236 card = 'param_card.dat' 4237 4238 #set maxpup (number of @X in the process card) 4239 4240 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4241 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4242 writer.write(text) 4243 4244 return True
4245 4246 4247 #=========================================================================== 4248 # write_symmetry 4249 #===========================================================================
4250 - def write_symmetry(self, writer, v5=True):
4251 """Write the SubProcess/driver.f file for ME""" 4252 4253 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4254 4255 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4256 card = 'Source/MODEL/MG5_param.dat' 4257 else: 4258 card = 'param_card.dat' 4259 text = open(path).read() 4260 4261 if v5: 4262 text = text % {'param_card_name':card, 'setparasecondarg':''} 4263 else: 4264 text = text % {'param_card_name':card, 'setparasecondarg':',.true.'} 4265 writer.write(text) 4266 4267 return True
4268 4269 4270 4271 4272 #=========================================================================== 4273 # write_iproc_file 4274 #===========================================================================
4275 - def write_iproc_file(self, writer, me_number):
4276 """Write the iproc.dat file for MG4""" 4277 line = "%d" % (me_number + 1) 4278 4279 # Write the file 4280 for line_to_write in writer.write_line(line): 4281 writer.write(line_to_write) 4282 return True
4283 4284 #=========================================================================== 4285 # write_mg_sym_file 4286 #===========================================================================
4287 - def write_mg_sym_file(self, writer, matrix_element):
4288 """Write the mg.sym file for MadEvent.""" 4289 4290 lines = [] 4291 4292 # Extract process with all decays included 4293 final_legs = filter(lambda leg: leg.get('state') == True, 4294 matrix_element.get('processes')[0].get_legs_with_decays()) 4295 4296 ninitial = len(filter(lambda leg: leg.get('state') == False, 4297 matrix_element.get('processes')[0].get('legs'))) 4298 4299 identical_indices = {} 4300 4301 # Extract identical particle info 4302 for i, leg in enumerate(final_legs): 4303 if leg.get('id') in identical_indices: 4304 identical_indices[leg.get('id')].append(\ 4305 i + ninitial + 1) 4306 else: 4307 identical_indices[leg.get('id')] = [i + ninitial + 1] 4308 4309 # Remove keys which have only one particle 4310 for key in identical_indices.keys(): 4311 if len(identical_indices[key]) < 2: 4312 del identical_indices[key] 4313 4314 # Write mg.sym file 4315 lines.append(str(len(identical_indices.keys()))) 4316 for key in identical_indices.keys(): 4317 lines.append(str(len(identical_indices[key]))) 4318 for number in identical_indices[key]: 4319 lines.append(str(number)) 4320 4321 # Write the file 4322 writer.writelines(lines) 4323 4324 return True
4325 4326 #=========================================================================== 4327 # write_mg_sym_file 4328 #===========================================================================
4329 - def write_default_mg_sym_file(self, writer):
4330 """Write the mg.sym file for MadEvent.""" 4331 4332 lines = "0" 4333 4334 # Write the file 4335 writer.writelines(lines) 4336 4337 return True
4338 4339 #=========================================================================== 4340 # write_ncombs_file 4341 #===========================================================================
4342 - def write_ncombs_file(self, writer, nexternal):
4343 """Write the ncombs.inc file for MadEvent.""" 4344 4345 # ncomb (used for clustering) is 2^nexternal 4346 file = " integer n_max_cl\n" 4347 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4348 4349 # Write the file 4350 writer.writelines(file) 4351 4352 return True
4353 4354 #=========================================================================== 4355 # write_processes_file 4356 #===========================================================================
4357 - def write_processes_file(self, writer, subproc_group):
4358 """Write the processes.dat file with info about the subprocesses 4359 in this group.""" 4360 4361 lines = [] 4362 4363 for ime, me in \ 4364 enumerate(subproc_group.get('matrix_elements')): 4365 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4366 ",".join(p.base_string() for p in \ 4367 me.get('processes')))) 4368 if me.get('has_mirror_process'): 4369 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4370 for proc in mirror_procs: 4371 legs = copy.copy(proc.get('legs_with_decays')) 4372 legs.insert(0, legs.pop(1)) 4373 proc.set("legs_with_decays", legs) 4374 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4375 mirror_procs)) 4376 else: 4377 lines.append("mirror none") 4378 4379 # Write the file 4380 writer.write("\n".join(lines)) 4381 4382 return True
4383 4384 #=========================================================================== 4385 # write_symswap_file 4386 #===========================================================================
4387 - def write_symswap_file(self, writer, ident_perms):
4388 """Write the file symswap.inc for MG4 by comparing diagrams using 4389 the internal matrix element value functionality.""" 4390 4391 lines = [] 4392 4393 # Write out lines for symswap.inc file (used to permute the 4394 # external leg momenta 4395 for iperm, perm in enumerate(ident_perms): 4396 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4397 (iperm+1, ",".join([str(i+1) for i in perm]))) 4398 lines.append("data nsym/%d/" % len(ident_perms)) 4399 4400 # Write the file 4401 writer.writelines(lines) 4402 4403 return True
4404 4405 #=========================================================================== 4406 # write_symfact_file 4407 #===========================================================================
4408 - def write_symfact_file(self, writer, symmetry):
4409 """Write the files symfact.dat for MG4 by comparing diagrams using 4410 the internal matrix element value functionality.""" 4411 4412 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4413 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4414 # Write out lines for symswap.inc file (used to permute the 4415 # external leg momenta 4416 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4417 # Write the file 4418 writer.write('\n'.join(lines)) 4419 writer.write('\n') 4420 4421 return True
4422 4423 #=========================================================================== 4424 # write_symperms_file 4425 #===========================================================================
4426 - def write_symperms_file(self, writer, perms):
4427 """Write the symperms.inc file for subprocess group, used for 4428 symmetric configurations""" 4429 4430 lines = [] 4431 for iperm, perm in enumerate(perms): 4432 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4433 (iperm+1, ",".join([str(i+1) for i in perm]))) 4434 4435 # Write the file 4436 writer.writelines(lines) 4437 4438 return True
4439 4440 #=========================================================================== 4441 # write_subproc 4442 #===========================================================================
4443 - def write_subproc(self, writer, subprocdir):
4444 """Append this subprocess to the subproc.mg file for MG4""" 4445 4446 # Write line to file 4447 writer.write(subprocdir + "\n") 4448 4449 return True
4450
4451 #=============================================================================== 4452 # ProcessExporterFortranMEGroup 4453 #=============================================================================== 4454 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4455 """Class to take care of exporting a set of matrix elements to 4456 MadEvent subprocess group format.""" 4457 4458 matrix_file = "matrix_madevent_group_v4.inc" 4459 4460 #=========================================================================== 4461 # generate_subprocess_directory_v4 4462 #===========================================================================
4463 - def generate_subprocess_directory_v4(self, subproc_group, 4464 fortran_model, 4465 group_number):
4466 """Generate the Pn directory for a subprocess group in MadEvent, 4467 including the necessary matrix_N.f files, configs.inc and various 4468 other helper files""" 4469 4470 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4471 "subproc_group object not SubProcessGroup" 4472 4473 if not self.model: 4474 self.model = subproc_group.get('matrix_elements')[0].\ 4475 get('processes')[0].get('model') 4476 4477 cwd = os.getcwd() 4478 path = pjoin(self.dir_path, 'SubProcesses') 4479 4480 os.chdir(path) 4481 pathdir = os.getcwd() 4482 4483 # Create the directory PN in the specified path 4484 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4485 subproc_group.get('name')) 4486 try: 4487 os.mkdir(subprocdir) 4488 except os.error as error: 4489 logger.warning(error.strerror + " " + subprocdir) 4490 4491 try: 4492 os.chdir(subprocdir) 4493 except os.error: 4494 logger.error('Could not cd to directory %s' % subprocdir) 4495 return 0 4496 4497 logger.info('Creating files in directory %s' % subprocdir) 4498 4499 # Create the matrix.f files, auto_dsig.f files and all inc files 4500 # for all subprocesses in the group 4501 4502 maxamps = 0 4503 maxflows = 0 4504 tot_calls = 0 4505 4506 matrix_elements = subproc_group.get('matrix_elements') 4507 4508 # Add the driver.f, all grouped ME's must share the same number of 4509 # helicity configuration 4510 ncomb = matrix_elements[0].get_helicity_combinations() 4511 for me in matrix_elements[1:]: 4512 if ncomb!=me.get_helicity_combinations(): 4513 raise MadGraph5Error, "All grouped processes must share the "+\ 4514 "same number of helicity configurations." 4515 4516 filename = 'driver.f' 4517 self.write_driver(writers.FortranWriter(filename),ncomb, 4518 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4519 4520 for ime, matrix_element in \ 4521 enumerate(matrix_elements): 4522 filename = 'matrix%d.f' % (ime+1) 4523 calls, ncolor = \ 4524 self.write_matrix_element_v4(writers.FortranWriter(filename), 4525 matrix_element, 4526 fortran_model, 4527 proc_id=str(ime+1), 4528 config_map=subproc_group.get('diagram_maps')[ime], 4529 subproc_number=group_number) 4530 4531 filename = 'auto_dsig%d.f' % (ime+1) 4532 self.write_auto_dsig_file(writers.FortranWriter(filename), 4533 matrix_element, 4534 str(ime+1)) 4535 4536 # Keep track of needed quantities 4537 tot_calls += int(calls) 4538 maxflows = max(maxflows, ncolor) 4539 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4540 4541 # Draw diagrams 4542 filename = "matrix%d.ps" % (ime+1) 4543 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4544 get('diagrams'), 4545 filename, 4546 model = \ 4547 matrix_element.get('processes')[0].\ 4548 get('model'), 4549 amplitude=True) 4550 logger.info("Generating Feynman diagrams for " + \ 4551 matrix_element.get('processes')[0].nice_string()) 4552 plot.draw() 4553 4554 # Extract number of external particles 4555 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4556 4557 # Generate a list of diagrams corresponding to each configuration 4558 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4559 # If a subprocess has no diagrams for this config, the number is 0 4560 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4561 4562 filename = 'auto_dsig.f' 4563 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4564 subproc_group) 4565 4566 filename = 'coloramps.inc' 4567 self.write_coloramps_file(writers.FortranWriter(filename), 4568 subproc_diagrams_for_config, 4569 maxflows, 4570 matrix_elements) 4571 4572 filename = 'get_color.f' 4573 self.write_colors_file(writers.FortranWriter(filename), 4574 matrix_elements) 4575 4576 filename = 'config_subproc_map.inc' 4577 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4578 subproc_diagrams_for_config) 4579 4580 filename = 'configs.inc' 4581 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4582 writers.FortranWriter(filename), 4583 subproc_group, 4584 subproc_diagrams_for_config) 4585 4586 filename = 'config_nqcd.inc' 4587 self.write_config_nqcd_file(writers.FortranWriter(filename), 4588 nqcd_list) 4589 4590 filename = 'decayBW.inc' 4591 self.write_decayBW_file(writers.FortranWriter(filename), 4592 s_and_t_channels) 4593 4594 filename = 'dname.mg' 4595 self.write_dname_file(writers.FortranWriter(filename), 4596 subprocdir) 4597 4598 filename = 'iproc.dat' 4599 self.write_iproc_file(writers.FortranWriter(filename), 4600 group_number) 4601 4602 filename = 'leshouche.inc' 4603 self.write_leshouche_file(writers.FortranWriter(filename), 4604 subproc_group) 4605 4606 filename = 'maxamps.inc' 4607 self.write_maxamps_file(writers.FortranWriter(filename), 4608 maxamps, 4609 maxflows, 4610 max([len(me.get('processes')) for me in \ 4611 matrix_elements]), 4612 len(matrix_elements)) 4613 4614 # Note that mg.sym is not relevant for this case 4615 filename = 'mg.sym' 4616 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 4617 4618 filename = 'mirrorprocs.inc' 4619 self.write_mirrorprocs(writers.FortranWriter(filename), 4620 subproc_group) 4621 4622 filename = 'ncombs.inc' 4623 self.write_ncombs_file(writers.FortranWriter(filename), 4624 nexternal) 4625 4626 filename = 'nexternal.inc' 4627 self.write_nexternal_file(writers.FortranWriter(filename), 4628 nexternal, ninitial) 4629 4630 filename = 'ngraphs.inc' 4631 self.write_ngraphs_file(writers.FortranWriter(filename), 4632 nconfigs) 4633 4634 filename = 'pmass.inc' 4635 self.write_pmass_file(writers.FortranWriter(filename), 4636 matrix_element) 4637 4638 filename = 'props.inc' 4639 self.write_props_file(writers.FortranWriter(filename), 4640 matrix_element, 4641 s_and_t_channels) 4642 4643 filename = 'processes.dat' 4644 files.write_to_file(filename, 4645 self.write_processes_file, 4646 subproc_group) 4647 4648 # Find config symmetries and permutations 4649 symmetry, perms, ident_perms = \ 4650 diagram_symmetry.find_symmetry(subproc_group) 4651 4652 filename = 'symswap.inc' 4653 self.write_symswap_file(writers.FortranWriter(filename), 4654 ident_perms) 4655 4656 filename = 'symfact_orig.dat' 4657 self.write_symfact_file(open(filename, 'w'), symmetry) 4658 4659 filename = 'symperms.inc' 4660 self.write_symperms_file(writers.FortranWriter(filename), 4661 perms) 4662 4663 # Generate jpgs -> pass in make_html 4664 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 4665 4666 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 4667 4668 #import nexternal/leshouch in Source 4669 ln('nexternal.inc', '../../Source', log=False) 4670 ln('leshouche.inc', '../../Source', log=False) 4671 ln('maxamps.inc', '../../Source', log=False) 4672 4673 # Return to SubProcesses dir) 4674 os.chdir(pathdir) 4675 4676 # Add subprocess to subproc.mg 4677 filename = 'subproc.mg' 4678 files.append_to_file(filename, 4679 self.write_subproc, 4680 subprocdir) 4681 4682 # Return to original dir 4683 os.chdir(cwd) 4684 4685 if not tot_calls: 4686 tot_calls = 0 4687 return tot_calls
4688 4689 #=========================================================================== 4690 # write_super_auto_dsig_file 4691 #===========================================================================
4692 - def write_super_auto_dsig_file(self, writer, subproc_group):
4693 """Write the auto_dsig.f file selecting between the subprocesses 4694 in subprocess group mode""" 4695 4696 replace_dict = {} 4697 4698 # Extract version number and date from VERSION file 4699 info_lines = self.get_mg5_info_lines() 4700 replace_dict['info_lines'] = info_lines 4701 4702 matrix_elements = subproc_group.get('matrix_elements') 4703 4704 # Extract process info lines 4705 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 4706 matrix_elements]) 4707 replace_dict['process_lines'] = process_lines 4708 4709 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 4710 replace_dict['nexternal'] = nexternal 4711 4712 replace_dict['nsprocs'] = 2*len(matrix_elements) 4713 4714 # Generate dsig definition line 4715 dsig_def_line = "DOUBLE PRECISION " + \ 4716 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 4717 range(len(matrix_elements))]) 4718 replace_dict["dsig_def_line"] = dsig_def_line 4719 4720 # Generate dsig process lines 4721 call_dsig_proc_lines = [] 4722 for iproc in range(len(matrix_elements)): 4723 call_dsig_proc_lines.append(\ 4724 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 4725 {"num": iproc + 1, 4726 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 4727 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 4728 4729 ncomb=matrix_elements[0].get_helicity_combinations() 4730 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4731 4732 file = open(pjoin(_file_path, \ 4733 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 4734 file = file % replace_dict 4735 4736 # Write the file 4737 writer.writelines(file)
4738 4739 #=========================================================================== 4740 # write_mirrorprocs 4741 #===========================================================================
4742 - def write_mirrorprocs(self, writer, subproc_group):
4743 """Write the mirrorprocs.inc file determining which processes have 4744 IS mirror process in subprocess group mode.""" 4745 4746 lines = [] 4747 bool_dict = {True: '.true.', False: '.false.'} 4748 matrix_elements = subproc_group.get('matrix_elements') 4749 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 4750 (len(matrix_elements), 4751 ",".join([bool_dict[me.get('has_mirror_process')] for \ 4752 me in matrix_elements]))) 4753 # Write the file 4754 writer.writelines(lines)
4755 4756 #=========================================================================== 4757 # write_addmothers 4758 #===========================================================================
4759 - def write_addmothers(self, writer):
4760 """Write the SubProcess/addmothers.f""" 4761 4762 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4763 4764 text = open(path).read() % {'iconfig': 'lconfig'} 4765 writer.write(text) 4766 4767 return True
4768 4769 4770 #=========================================================================== 4771 # write_coloramps_file 4772 #===========================================================================
4773 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 4774 matrix_elements):
4775 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 4776 4777 # Create a map from subprocess (matrix element) to a list of 4778 # the diagrams corresponding to each config 4779 4780 lines = [] 4781 4782 subproc_to_confdiag = {} 4783 for config in diagrams_for_config: 4784 for subproc, diag in enumerate(config): 4785 try: 4786 subproc_to_confdiag[subproc].append(diag) 4787 except KeyError: 4788 subproc_to_confdiag[subproc] = [diag] 4789 4790 for subproc in sorted(subproc_to_confdiag.keys()): 4791 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 4792 matrix_elements[subproc], 4793 subproc + 1)) 4794 4795 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 4796 (maxflows, 4797 len(diagrams_for_config), 4798 len(matrix_elements))) 4799 4800 # Write the file 4801 writer.writelines(lines) 4802 4803 return True
4804 4805 #=========================================================================== 4806 # write_config_subproc_map_file 4807 #===========================================================================
4808 - def write_config_subproc_map_file(self, writer, config_subproc_map):
4809 """Write the config_subproc_map.inc file for subprocess groups""" 4810 4811 lines = [] 4812 # Output only configs that have some corresponding diagrams 4813 iconfig = 0 4814 for config in config_subproc_map: 4815 if set(config) == set([0]): 4816 continue 4817 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 4818 (iconfig + 1, len(config), 4819 ",".join([str(i) for i in config]))) 4820 iconfig += 1 4821 # Write the file 4822 writer.writelines(lines) 4823 4824 return True
4825 4826 #=========================================================================== 4827 # read_write_good_hel 4828 #===========================================================================
4829 - def read_write_good_hel(self, ncomb):
4830 """return the code to read/write the good_hel common_block""" 4831 4832 convert = {'ncomb' : ncomb} 4833 4834 output = """ 4835 subroutine write_good_hel(stream_id) 4836 implicit none 4837 integer stream_id 4838 INTEGER NCOMB 4839 PARAMETER ( NCOMB=%(ncomb)d) 4840 LOGICAL GOODHEL(NCOMB, 2) 4841 INTEGER NTRY(2) 4842 common/BLOCK_GOODHEL/NTRY,GOODHEL 4843 write(stream_id,*) GOODHEL 4844 return 4845 end 4846 4847 4848 subroutine read_good_hel(stream_id) 4849 implicit none 4850 include 'genps.inc' 4851 integer stream_id 4852 INTEGER NCOMB 4853 PARAMETER ( NCOMB=%(ncomb)d) 4854 LOGICAL GOODHEL(NCOMB, 2) 4855 INTEGER NTRY(2) 4856 common/BLOCK_GOODHEL/NTRY,GOODHEL 4857 read(stream_id,*) GOODHEL 4858 NTRY(1) = MAXTRIES + 1 4859 NTRY(2) = MAXTRIES + 1 4860 return 4861 end 4862 4863 subroutine init_good_hel() 4864 implicit none 4865 INTEGER NCOMB 4866 PARAMETER ( NCOMB=%(ncomb)d) 4867 LOGICAL GOODHEL(NCOMB, 2) 4868 INTEGER NTRY(2) 4869 INTEGER I 4870 4871 do i=1,NCOMB 4872 GOODHEL(I,1) = .false. 4873 GOODHEL(I,2) = .false. 4874 enddo 4875 NTRY(1) = 0 4876 NTRY(2) = 0 4877 end 4878 4879 integer function get_maxsproc() 4880 implicit none 4881 include 'maxamps.inc' 4882 4883 get_maxsproc = maxsproc 4884 return 4885 end 4886 4887 """ % convert 4888 4889 return output
4890 4891 4892 4893 #=========================================================================== 4894 # write_configs_file 4895 #===========================================================================
4896 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
4897 """Write the configs.inc file with topology information for a 4898 subprocess group. Use the first subprocess with a diagram for each 4899 configuration.""" 4900 4901 matrix_elements = subproc_group.get('matrix_elements') 4902 model = matrix_elements[0].get('processes')[0].get('model') 4903 4904 diagrams = [] 4905 config_numbers = [] 4906 for iconfig, config in enumerate(diagrams_for_config): 4907 # Check if any diagrams correspond to this config 4908 if set(config) == set([0]): 4909 continue 4910 subproc_diags = [] 4911 for s,d in enumerate(config): 4912 if d: 4913 subproc_diags.append(matrix_elements[s].\ 4914 get('diagrams')[d-1]) 4915 else: 4916 subproc_diags.append(None) 4917 diagrams.append(subproc_diags) 4918 config_numbers.append(iconfig + 1) 4919 4920 # Extract number of external particles 4921 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 4922 4923 return len(diagrams), \ 4924 self.write_configs_file_from_diagrams(writer, diagrams, 4925 config_numbers, 4926 nexternal, ninitial, 4927 model)
4928 4929 #=========================================================================== 4930 # write_run_configs_file 4931 #===========================================================================
4932 - def write_run_config_file(self, writer):
4933 """Write the run_configs.inc file for MadEvent""" 4934 4935 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4936 if self.proc_characteristic['loop_induced']: 4937 job_per_chan = 1 4938 else: 4939 job_per_chan = 2 4940 text = open(path).read() % {'chanperjob':job_per_chan} 4941 writer.write(text) 4942 return True
4943 4944 4945 #=========================================================================== 4946 # write_leshouche_file 4947 #===========================================================================
4948 - def write_leshouche_file(self, writer, subproc_group):
4949 """Write the leshouche.inc file for MG4""" 4950 4951 all_lines = [] 4952 4953 for iproc, matrix_element in \ 4954 enumerate(subproc_group.get('matrix_elements')): 4955 all_lines.extend(self.get_leshouche_lines(matrix_element, 4956 iproc)) 4957 4958 # Write the file 4959 writer.writelines(all_lines) 4960 4961 return True
4962 4963 4964
4965 - def finalize_v4_directory(self,*args, **opts):
4966 4967 4968 4969 super(ProcessExporterFortranMEGroup, self).finalize_v4_directory(*args, 4970 **opts) 4971 #ensure that the grouping information is on the correct value 4972 self.proc_characteristic['grouped_matrix'] = True
4973 4974 4975 #=============================================================================== 4976 # UFO_model_to_mg4 4977 #=============================================================================== 4978 4979 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
4980 4981 -class UFO_model_to_mg4(object):
4982 """ A converter of the UFO-MG5 Model to the MG4 format """ 4983 4984 # The list below shows the only variables the user is allowed to change by 4985 # himself for each PS point. If he changes any other, then calling 4986 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 4987 # correctly account for the change. 4988 PS_dependent_key = ['aS','MU_R'] 4989 mp_complex_format = 'complex*32' 4990 mp_real_format = 'real*16' 4991 # Warning, it is crucial none of the couplings/parameters of the model 4992 # starts with this prefix. I should add a check for this. 4993 # You can change it as the global variable to check_param_card.ParamCard 4994 mp_prefix = check_param_card.ParamCard.mp_prefix 4995
4996 - def __init__(self, model, output_path, opt=None):
4997 """ initialization of the objects """ 4998 4999 self.model = model 5000 self.model_name = model['name'] 5001 self.dir_path = output_path 5002 if opt: 5003 self.opt = opt 5004 else: 5005 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5006 'loop_induced': False} 5007 5008 self.coups_dep = [] # (name, expression, type) 5009 self.coups_indep = [] # (name, expression, type) 5010 self.params_dep = [] # (name, expression, type) 5011 self.params_indep = [] # (name, expression, type) 5012 self.params_ext = [] # external parameter 5013 self.p_to_f = parsers.UFOExpressionParserFortran() 5014 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran()
5015
5017 """modify the parameter if some of them are identical up to the case""" 5018 5019 lower_dict={} 5020 duplicate = set() 5021 keys = self.model['parameters'].keys() 5022 for key in keys: 5023 for param in self.model['parameters'][key]: 5024 lower_name = param.name.lower() 5025 if not lower_name: 5026 continue 5027 try: 5028 lower_dict[lower_name].append(param) 5029 except KeyError,error: 5030 lower_dict[lower_name] = [param] 5031 else: 5032 duplicate.add(lower_name) 5033 logger.debug('%s is define both as lower case and upper case.' 5034 % lower_name) 5035 if not duplicate: 5036 return 5037 5038 re_expr = r'''\b(%s)\b''' 5039 to_change = [] 5040 change={} 5041 for value in duplicate: 5042 for i, var in enumerate(lower_dict[value]): 5043 to_change.append(var.name) 5044 new_name = '%s%s' % (var.name.lower(), 5045 ('__%d'%(i+1) if i>0 else '')) 5046 change[var.name] = new_name 5047 var.name = new_name 5048 5049 # Apply the modification to the map_CTcoup_CTparam of the model 5050 # if it has one (giving for each coupling the CT parameters whcih 5051 # are necessary and which should be exported to the model. 5052 if hasattr(self.model,'map_CTcoup_CTparam'): 5053 for coup, ctparams in self.model.map_CTcoup_CTparam: 5054 for i, ctparam in enumerate(ctparams): 5055 try: 5056 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5057 except KeyError: 5058 pass 5059 5060 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5061 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5062 5063 # change parameters 5064 for key in keys: 5065 if key == ('external',): 5066 continue 5067 for param in self.model['parameters'][key]: 5068 param.expr = rep_pattern.sub(replace, param.expr) 5069 5070 # change couplings 5071 for key in self.model['couplings'].keys(): 5072 for coup in self.model['couplings'][key]: 5073 coup.expr = rep_pattern.sub(replace, coup.expr) 5074 5075 # change mass/width 5076 for part in self.model['particles']: 5077 if str(part.get('mass')) in to_change: 5078 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5079 if str(part.get('width')) in to_change: 5080 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5081
5082 - def refactorize(self, wanted_couplings = []):
5083 """modify the couplings to fit with MG4 convention """ 5084 5085 # Keep only separation in alphaS 5086 keys = self.model['parameters'].keys() 5087 keys.sort(key=len) 5088 for key in keys: 5089 to_add = [o for o in self.model['parameters'][key] if o.name] 5090 5091 if key == ('external',): 5092 self.params_ext += to_add 5093 elif any([(k in key) for k in self.PS_dependent_key]): 5094 self.params_dep += to_add 5095 else: 5096 self.params_indep += to_add 5097 # same for couplings 5098 keys = self.model['couplings'].keys() 5099 keys.sort(key=len) 5100 for key, coup_list in self.model['couplings'].items(): 5101 if any([(k in key) for k in self.PS_dependent_key]): 5102 self.coups_dep += [c for c in coup_list if 5103 (not wanted_couplings or c.name in \ 5104 wanted_couplings)] 5105 else: 5106 self.coups_indep += [c for c in coup_list if 5107 (not wanted_couplings or c.name in \ 5108 wanted_couplings)] 5109 5110 # MG4 use G and not aS as it basic object for alphas related computation 5111 #Pass G in the independant list 5112 if 'G' in self.params_dep: 5113 index = self.params_dep.index('G') 5114 G = self.params_dep.pop(index) 5115 # G.expr = '2*cmath.sqrt(as*pi)' 5116 # self.params_indep.insert(0, self.params_dep.pop(index)) 5117 # No need to add it if not defined 5118 5119 if 'aS' not in self.params_ext: 5120 logger.critical('aS not define as external parameter adding it!') 5121 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5122 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5123 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5124 - def build(self, wanted_couplings = [], full=True):
5125 """modify the couplings to fit with MG4 convention and creates all the 5126 different files""" 5127 5128 self.pass_parameter_to_case_insensitive() 5129 self.refactorize(wanted_couplings) 5130 5131 # write the files 5132 if full: 5133 if wanted_couplings: 5134 # extract the wanted ct parameters 5135 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5136 self.write_all()
5137 5138
5139 - def open(self, name, comment='c', format='default'):
5140 """ Open the file name in the correct directory and with a valid 5141 header.""" 5142 5143 file_path = pjoin(self.dir_path, name) 5144 5145 if format == 'fortran': 5146 fsock = writers.FortranWriter(file_path, 'w') 5147 else: 5148 fsock = open(file_path, 'w') 5149 5150 file.writelines(fsock, comment * 77 + '\n') 5151 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5152 {'comment': comment + (6 - len(comment)) * ' '}) 5153 file.writelines(fsock, comment * 77 + '\n\n') 5154 return fsock
5155 5156
5157 - def write_all(self):
5158 """ write all the files """ 5159 #write the part related to the external parameter 5160 self.create_ident_card() 5161 self.create_param_read() 5162 5163 #write the definition of the parameter 5164 self.create_input() 5165 self.create_intparam_def(dp=True,mp=False) 5166 if self.opt['mp']: 5167 self.create_intparam_def(dp=False,mp=True) 5168 5169 # definition of the coupling. 5170 self.create_actualize_mp_ext_param_inc() 5171 self.create_coupl_inc() 5172 self.create_write_couplings() 5173 self.create_couplings() 5174 5175 # the makefile 5176 self.create_makeinc() 5177 self.create_param_write() 5178 5179 # The model functions 5180 self.create_model_functions_inc() 5181 self.create_model_functions_def() 5182 5183 # The param_card.dat 5184 self.create_param_card() 5185 5186 5187 # All the standard files 5188 self.copy_standard_file()
5189 5190 ############################################################################ 5191 ## ROUTINE CREATING THE FILES ############################################ 5192 ############################################################################ 5193
5194 - def copy_standard_file(self):
5195 """Copy the standard files for the fortran model.""" 5196 5197 #copy the library files 5198 file_to_link = ['formats.inc','printout.f', \ 5199 'rw_para.f', 'testprog.f'] 5200 5201 for filename in file_to_link: 5202 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5203 self.dir_path) 5204 5205 file = open(os.path.join(MG5DIR,\ 5206 'models/template_files/fortran/rw_para.f')).read() 5207 5208 includes=["include \'coupl.inc\'","include \'input.inc\'", 5209 "include \'model_functions.inc\'"] 5210 if self.opt['mp']: 5211 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5212 # In standalone and madloop we do no use the compiled param card but 5213 # still parse the .dat one so we must load it. 5214 if self.opt['loop_induced']: 5215 #loop induced follow MadEvent way to handle the card. 5216 load_card = '' 5217 lha_read_filename='lha_read.f' 5218 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5219 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5220 lha_read_filename='lha_read_mp.f' 5221 elif self.opt['export_format'].startswith('standalone') or self.opt['export_format'] in ['madweight']\ 5222 or self.opt['export_format'].startswith('matchbox'): 5223 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5224 lha_read_filename='lha_read.f' 5225 else: 5226 load_card = '' 5227 lha_read_filename='lha_read.f' 5228 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5229 os.path.join(self.dir_path,'lha_read.f')) 5230 5231 file=file%{'includes':'\n '.join(includes), 5232 'load_card':load_card} 5233 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5234 writer.writelines(file) 5235 writer.close() 5236 5237 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5238 or self.opt['loop_induced']: 5239 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5240 self.dir_path + '/makefile') 5241 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5242 path = pjoin(self.dir_path, 'makefile') 5243 text = open(path).read() 5244 text = text.replace('madevent','aMCatNLO') 5245 open(path, 'w').writelines(text) 5246 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5247 'madloop','madloop_optimized', 'standalone_rw', 'madweight','matchbox','madloop_matchbox']: 5248 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5249 self.dir_path + '/makefile') 5250 #elif self.opt['export_format'] in []: 5251 #pass 5252 else: 5253 raise MadGraph5Error('Unknown format')
5254
5255 - def create_coupl_inc(self):
5256 """ write coupling.inc """ 5257 5258 fsock = self.open('coupl.inc', format='fortran') 5259 if self.opt['mp']: 5260 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5261 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5262 format='fortran') 5263 5264 # Write header 5265 header = """double precision G 5266 common/strong/ G 5267 5268 double complex gal(2) 5269 common/weak/ gal 5270 5271 double precision MU_R 5272 common/rscale/ MU_R 5273 5274 double precision Nf 5275 parameter(Nf=%d) 5276 """ % self.model.get_nflav() 5277 5278 fsock.writelines(header) 5279 5280 if self.opt['mp']: 5281 header = """%(real_mp_format)s %(mp_prefix)sG 5282 common/MP_strong/ %(mp_prefix)sG 5283 5284 %(complex_mp_format)s %(mp_prefix)sgal(2) 5285 common/MP_weak/ %(mp_prefix)sgal 5286 5287 %(complex_mp_format)s %(mp_prefix)sMU_R 5288 common/MP_rscale/ %(mp_prefix)sMU_R 5289 5290 """ 5291 5292 5293 5294 5295 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5296 'complex_mp_format':self.mp_complex_format, 5297 'mp_prefix':self.mp_prefix}) 5298 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5299 'complex_mp_format':self.mp_complex_format, 5300 'mp_prefix':''}) 5301 5302 # Write the Mass definition/ common block 5303 masses = set() 5304 widths = set() 5305 if self.opt['complex_mass']: 5306 complex_mass = set() 5307 5308 for particle in self.model.get('particles'): 5309 #find masses 5310 one_mass = particle.get('mass') 5311 if one_mass.lower() != 'zero': 5312 masses.add(one_mass) 5313 5314 # find width 5315 one_width = particle.get('width') 5316 if one_width.lower() != 'zero': 5317 widths.add(one_width) 5318 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5319 complex_mass.add('CMASS_%s' % one_mass) 5320 5321 if masses: 5322 fsock.writelines('double precision '+','.join(masses)+'\n') 5323 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5324 if self.opt['mp']: 5325 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5326 ','.join(masses)+'\n') 5327 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5328 ','.join(masses)+'\n\n') 5329 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5330 self.mp_prefix+m for m in masses])+'\n') 5331 mp_fsock.writelines('common/MP_masses/ '+\ 5332 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5333 5334 if widths: 5335 fsock.writelines('double precision '+','.join(widths)+'\n') 5336 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5337 if self.opt['mp']: 5338 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5339 ','.join(widths)+'\n') 5340 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5341 ','.join(widths)+'\n\n') 5342 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5343 self.mp_prefix+w for w in widths])+'\n') 5344 mp_fsock.writelines('common/MP_widths/ '+\ 5345 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5346 5347 # Write the Couplings 5348 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5349 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5350 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5351 if self.opt['mp']: 5352 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5353 ','.join(coupling_list)+'\n') 5354 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5355 ','.join(coupling_list)+'\n\n') 5356 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5357 self.mp_prefix+c for c in coupling_list])+'\n') 5358 mp_fsock.writelines('common/MP_couplings/ '+\ 5359 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5360 5361 # Write complex mass for complex mass scheme (if activated) 5362 if self.opt['complex_mass'] and complex_mass: 5363 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5364 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5365 if self.opt['mp']: 5366 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5367 ','.join(complex_mass)+'\n') 5368 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5369 ','.join(complex_mass)+'\n\n') 5370 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5371 self.mp_prefix+cm for cm in complex_mass])+'\n') 5372 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5373 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5374
5375 - def create_write_couplings(self):
5376 """ write the file coupl_write.inc """ 5377 5378 fsock = self.open('coupl_write.inc', format='fortran') 5379 5380 fsock.writelines("""write(*,*) ' Couplings of %s' 5381 write(*,*) ' ---------------------------------' 5382 write(*,*) ' '""" % self.model_name) 5383 def format(coupl): 5384 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5385 5386 # Write the Couplings 5387 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5388 fsock.writelines('\n'.join(lines)) 5389 5390
5391 - def create_input(self):
5392 """create input.inc containing the definition of the parameters""" 5393 5394 fsock = self.open('input.inc', format='fortran') 5395 if self.opt['mp']: 5396 mp_fsock = self.open('mp_input.inc', format='fortran') 5397 5398 #find mass/ width since they are already define 5399 already_def = set() 5400 for particle in self.model.get('particles'): 5401 already_def.add(particle.get('mass').lower()) 5402 already_def.add(particle.get('width').lower()) 5403 if self.opt['complex_mass']: 5404 already_def.add('cmass_%s' % particle.get('mass').lower()) 5405 5406 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5407 name.lower() not in already_def 5408 5409 real_parameters = [param.name for param in self.params_dep + 5410 self.params_indep if param.type == 'real' 5411 and is_valid(param.name)] 5412 5413 real_parameters += [param.name for param in self.params_ext 5414 if param.type == 'real'and 5415 is_valid(param.name)] 5416 5417 # check the parameter is a CT parameter or not 5418 # if yes, just use the needed ones 5419 real_parameters = [param for param in real_parameters \ 5420 if self.check_needed_param(param)] 5421 5422 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5423 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5424 if self.opt['mp']: 5425 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5426 self.mp_prefix+p for p in real_parameters])+'\n') 5427 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5428 self.mp_prefix+p for p in real_parameters])+'\n\n') 5429 5430 complex_parameters = [param.name for param in self.params_dep + 5431 self.params_indep if param.type == 'complex' and 5432 is_valid(param.name)] 5433 5434 # check the parameter is a CT parameter or not 5435 # if yes, just use the needed ones 5436 complex_parameters = [param for param in complex_parameters \ 5437 if self.check_needed_param(param)] 5438 5439 if complex_parameters: 5440 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5441 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5442 if self.opt['mp']: 5443 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5444 self.mp_prefix+p for p in complex_parameters])+'\n') 5445 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5446 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5447
5448 - def check_needed_param(self, param):
5449 """ Returns whether the parameter in argument is needed for this 5450 specific computation or not.""" 5451 5452 # If this is a leading order model or if there was no CT parameter 5453 # employed in this NLO model, one can directly return that the 5454 # parameter is needed since only CTParameters are filtered. 5455 if not hasattr(self, 'allCTparameters') or \ 5456 self.allCTparameters is None or self.usedCTparameters is None or \ 5457 len(self.allCTparameters)==0: 5458 return True 5459 5460 # We must allow the conjugate shorthand for the complex parameter as 5461 # well so we check wether either the parameter name or its name with 5462 # 'conjg__' substituted with '' is present in the list. 5463 # This is acceptable even if some parameter had an original name 5464 # including 'conjg__' in it, because at worst we export a parameter 5465 # was not needed. 5466 param = param.lower() 5467 cjg_param = param.replace('conjg__','',1) 5468 5469 # First make sure it is a CTparameter 5470 if param not in self.allCTparameters and \ 5471 cjg_param not in self.allCTparameters: 5472 return True 5473 5474 # Now check if it is in the list of CTparameters actually used 5475 return (param in self.usedCTparameters or \ 5476 cjg_param in self.usedCTparameters)
5477
5478 - def extract_needed_CTparam(self,wanted_couplings=[]):
5479 """ Extract what are the needed CT parameters given the wanted_couplings""" 5480 5481 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5482 # Setting these lists to none wil disable the filtering in 5483 # check_needed_param 5484 self.allCTparameters = None 5485 self.usedCTparameters = None 5486 return 5487 5488 # All CTparameters appearin in all CT couplings 5489 allCTparameters=self.model.map_CTcoup_CTparam.values() 5490 # Define in this class the list of all CT parameters 5491 self.allCTparameters=list(\ 5492 set(itertools.chain.from_iterable(allCTparameters))) 5493 5494 # All used CT couplings 5495 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5496 allUsedCTCouplings = [coupl for coupl in 5497 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5498 5499 # Now define the list of all CT parameters that are actually used 5500 self.usedCTparameters=list(\ 5501 set(itertools.chain.from_iterable([ 5502 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5503 ]))) 5504 5505 # Now at last, make these list case insensitive 5506 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5507 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5508
5509 - def create_intparam_def(self, dp=True, mp=False):
5510 """ create intparam_definition.inc setting the internal parameters. 5511 Output the double precision and/or the multiple precision parameters 5512 depending on the parameters dp and mp. If mp only, then the file names 5513 get the 'mp_' prefix. 5514 """ 5515 5516 fsock = self.open('%sintparam_definition.inc'% 5517 ('mp_' if mp and not dp else ''), format='fortran') 5518 5519 fsock.write_comments(\ 5520 "Parameters that should not be recomputed event by event.\n") 5521 fsock.writelines("if(readlha) then\n") 5522 if dp: 5523 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5524 if mp: 5525 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5526 5527 for param in self.params_indep: 5528 if param.name == 'ZERO': 5529 continue 5530 # check whether the parameter is a CT parameter 5531 # if yes,just used the needed ones 5532 if not self.check_needed_param(param.name): 5533 continue 5534 if dp: 5535 fsock.writelines("%s = %s\n" % (param.name, 5536 self.p_to_f.parse(param.expr))) 5537 if mp: 5538 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5539 self.mp_p_to_f.parse(param.expr))) 5540 5541 fsock.writelines('endif') 5542 5543 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5544 if dp: 5545 fsock.writelines("aS = G**2/4/pi\n") 5546 if mp: 5547 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5548 for param in self.params_dep: 5549 # check whether the parameter is a CT parameter 5550 # if yes,just used the needed ones 5551 if not self.check_needed_param(param.name): 5552 continue 5553 if dp: 5554 fsock.writelines("%s = %s\n" % (param.name, 5555 self.p_to_f.parse(param.expr))) 5556 elif mp: 5557 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5558 self.mp_p_to_f.parse(param.expr))) 5559 5560 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5561 if ('aEWM1',) in self.model['parameters']: 5562 if dp: 5563 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5564 gal(2) = 1d0 5565 """) 5566 elif mp: 5567 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5568 %(mp_prefix)sgal(2) = 1d0 5569 """ %{'mp_prefix':self.mp_prefix}) 5570 pass 5571 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5572 elif ('Gf',) in self.model['parameters']: 5573 if dp: 5574 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5575 gal(2) = 1d0 5576 """) 5577 elif mp: 5578 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5579 %(mp_prefix)sgal(2) = 1d0 5580 """ %{'mp_prefix':self.mp_prefix}) 5581 pass 5582 else: 5583 if dp: 5584 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5585 fsock.writelines(""" gal(1) = 1d0 5586 gal(2) = 1d0 5587 """) 5588 elif mp: 5589 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5590 %(mp_prefix)sgal(2) = 1e0_16 5591 """%{'mp_prefix':self.mp_prefix})
5592 5593
5594 - def create_couplings(self):
5595 """ create couplings.f and all couplingsX.f """ 5596 5597 nb_def_by_file = 25 5598 5599 self.create_couplings_main(nb_def_by_file) 5600 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5601 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5602 5603 for i in range(nb_coup_indep): 5604 # For the independent couplings, we compute the double and multiple 5605 # precision ones together 5606 data = self.coups_indep[nb_def_by_file * i: 5607 min(len(self.coups_indep), nb_def_by_file * (i+1))] 5608 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 5609 5610 for i in range(nb_coup_dep): 5611 # For the dependent couplings, we compute the double and multiple 5612 # precision ones in separate subroutines. 5613 data = self.coups_dep[nb_def_by_file * i: 5614 min(len(self.coups_dep), nb_def_by_file * (i+1))] 5615 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5616 dp=True,mp=False) 5617 if self.opt['mp']: 5618 self.create_couplings_part( i + 1 + nb_coup_indep , data, 5619 dp=False,mp=True)
5620 5621
5622 - def create_couplings_main(self, nb_def_by_file=25):
5623 """ create couplings.f """ 5624 5625 fsock = self.open('couplings.f', format='fortran') 5626 5627 fsock.writelines("""subroutine coup() 5628 5629 implicit none 5630 double precision PI, ZERO 5631 logical READLHA 5632 parameter (PI=3.141592653589793d0) 5633 parameter (ZERO=0d0) 5634 include \'model_functions.inc\'""") 5635 if self.opt['mp']: 5636 fsock.writelines("""%s MP__PI, MP__ZERO 5637 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5638 parameter (MP__ZERO=0e0_16) 5639 include \'mp_input.inc\' 5640 include \'mp_coupl.inc\' 5641 """%self.mp_real_format) 5642 fsock.writelines("""include \'input.inc\' 5643 include \'coupl.inc\' 5644 READLHA = .true. 5645 include \'intparam_definition.inc\'""") 5646 if self.opt['mp']: 5647 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 5648 5649 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5650 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5651 5652 fsock.writelines('\n'.join(\ 5653 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 5654 5655 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5656 5657 fsock.writelines('\n'.join(\ 5658 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5659 for i in range(nb_coup_dep)])) 5660 if self.opt['mp']: 5661 fsock.writelines('\n'.join(\ 5662 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5663 for i in range(nb_coup_dep)])) 5664 fsock.writelines('''\n return \n end\n''') 5665 5666 fsock.writelines("""subroutine update_as_param() 5667 5668 implicit none 5669 double precision PI, ZERO 5670 logical READLHA 5671 parameter (PI=3.141592653589793d0) 5672 parameter (ZERO=0d0) 5673 include \'model_functions.inc\'""") 5674 fsock.writelines("""include \'input.inc\' 5675 include \'coupl.inc\' 5676 READLHA = .false.""") 5677 fsock.writelines(""" 5678 include \'intparam_definition.inc\'\n 5679 """) 5680 5681 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5682 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5683 5684 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5685 5686 fsock.writelines('\n'.join(\ 5687 ['call coup%s()' % (nb_coup_indep + i + 1) \ 5688 for i in range(nb_coup_dep)])) 5689 fsock.writelines('''\n return \n end\n''') 5690 5691 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 5692 5693 implicit none 5694 double precision PI 5695 parameter (PI=3.141592653589793d0) 5696 double precision mu_r2, as2 5697 include \'model_functions.inc\'""") 5698 fsock.writelines("""include \'input.inc\' 5699 include \'coupl.inc\'""") 5700 fsock.writelines(""" 5701 if (mu_r2.gt.0d0) MU_R = mu_r2 5702 G = SQRT(4.0d0*PI*AS2) 5703 AS = as2 5704 5705 CALL UPDATE_AS_PARAM() 5706 """) 5707 fsock.writelines('''\n return \n end\n''') 5708 5709 if self.opt['mp']: 5710 fsock.writelines("""subroutine mp_update_as_param() 5711 5712 implicit none 5713 logical READLHA 5714 include \'model_functions.inc\'""") 5715 fsock.writelines("""%s MP__PI, MP__ZERO 5716 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5717 parameter (MP__ZERO=0e0_16) 5718 include \'mp_input.inc\' 5719 include \'mp_coupl.inc\' 5720 """%self.mp_real_format) 5721 fsock.writelines("""include \'input.inc\' 5722 include \'coupl.inc\' 5723 include \'actualize_mp_ext_params.inc\' 5724 READLHA = .false. 5725 include \'mp_intparam_definition.inc\'\n 5726 """) 5727 5728 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 5729 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 5730 5731 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 5732 5733 fsock.writelines('\n'.join(\ 5734 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 5735 for i in range(nb_coup_dep)])) 5736 fsock.writelines('''\n return \n end\n''')
5737
5738 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
5739 """ create couplings[nb_file].f containing information coming from data. 5740 Outputs the computation of the double precision and/or the multiple 5741 precision couplings depending on the parameters dp and mp. 5742 If mp is True and dp is False, then the prefix 'MP_' is appended to the 5743 filename and subroutine name. 5744 """ 5745 5746 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 5747 nb_file), format='fortran') 5748 fsock.writelines("""subroutine %scoup%s() 5749 5750 implicit none 5751 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 5752 if dp: 5753 fsock.writelines(""" 5754 double precision PI, ZERO 5755 parameter (PI=3.141592653589793d0) 5756 parameter (ZERO=0d0) 5757 include 'input.inc' 5758 include 'coupl.inc'""") 5759 if mp: 5760 fsock.writelines("""%s MP__PI, MP__ZERO 5761 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 5762 parameter (MP__ZERO=0e0_16) 5763 include \'mp_input.inc\' 5764 include \'mp_coupl.inc\' 5765 """%self.mp_real_format) 5766 5767 for coupling in data: 5768 if dp: 5769 fsock.writelines('%s = %s' % (coupling.name, 5770 self.p_to_f.parse(coupling.expr))) 5771 if mp: 5772 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 5773 self.mp_p_to_f.parse(coupling.expr))) 5774 fsock.writelines('end')
5775
5776 - def create_model_functions_inc(self):
5777 """ Create model_functions.inc which contains the various declarations 5778 of auxiliary functions which might be used in the couplings expressions 5779 """ 5780 5781 additional_fct = [] 5782 # check for functions define in the UFO model 5783 ufo_fct = self.model.get('functions') 5784 if ufo_fct: 5785 for fct in ufo_fct: 5786 # already handle by default 5787 if fct.name not in ["complexconjugate", "re", "im", "sec", 5788 "csc", "asec", "acsc", "theta_function", "cond", 5789 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot"]: 5790 additional_fct.append(fct.name) 5791 5792 5793 fsock = self.open('model_functions.inc', format='fortran') 5794 fsock.writelines("""double complex cond 5795 double complex condif 5796 double complex reglog 5797 double complex reglogp 5798 double complex reglogm 5799 double complex recms 5800 double complex arg 5801 %s 5802 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 5803 5804 5805 if self.opt['mp']: 5806 fsock.writelines("""%(complex_mp_format)s mp_cond 5807 %(complex_mp_format)s mp_condif 5808 %(complex_mp_format)s mp_reglog 5809 %(complex_mp_format)s mp_reglogp 5810 %(complex_mp_format)s mp_reglogm 5811 %(complex_mp_format)s mp_recms 5812 %(complex_mp_format)s mp_arg 5813 %(additional)s 5814 """ %\ 5815 {"additional": "\n".join([" %s %s" % (self.mp_complex_format, i) for i in additional_fct]), 5816 'complex_mp_format':self.mp_complex_format 5817 })
5818
5819 - def create_model_functions_def(self):
5820 """ Create model_functions.f which contains the various definitions 5821 of auxiliary functions which might be used in the couplings expressions 5822 Add the functions.f functions for formfactors support 5823 """ 5824 5825 fsock = self.open('model_functions.f', format='fortran') 5826 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 5827 implicit none 5828 double complex condition,truecase,falsecase 5829 if(condition.eq.(0.0d0,0.0d0)) then 5830 cond=truecase 5831 else 5832 cond=falsecase 5833 endif 5834 end 5835 5836 double complex function condif(condition,truecase,falsecase) 5837 implicit none 5838 logical condition 5839 double complex truecase,falsecase 5840 if(condition) then 5841 condif=truecase 5842 else 5843 condif=falsecase 5844 endif 5845 end 5846 5847 double complex function recms(condition,expr) 5848 implicit none 5849 logical condition 5850 double complex expr 5851 if(condition)then 5852 recms=expr 5853 else 5854 recms=dcmplx(dble(expr)) 5855 endif 5856 end 5857 5858 double complex function reglog(arg) 5859 implicit none 5860 double complex TWOPII 5861 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5862 double complex arg 5863 if(arg.eq.(0.0d0,0.0d0)) then 5864 reglog=(0.0d0,0.0d0) 5865 else 5866 reglog=log(arg) 5867 endif 5868 end 5869 5870 double complex function reglogp(arg) 5871 implicit none 5872 double complex TWOPII 5873 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5874 double complex arg 5875 if(arg.eq.(0.0d0,0.0d0))then 5876 reglogp=(0.0d0,0.0d0) 5877 else 5878 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 5879 reglogp=log(arg) + TWOPII 5880 else 5881 reglogp=log(arg) 5882 endif 5883 endif 5884 end 5885 5886 double complex function reglogm(arg) 5887 implicit none 5888 double complex TWOPII 5889 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 5890 double complex arg 5891 if(arg.eq.(0.0d0,0.0d0))then 5892 reglogm=(0.0d0,0.0d0) 5893 else 5894 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 5895 reglogm=log(arg) - TWOPII 5896 else 5897 reglogm=log(arg) 5898 endif 5899 endif 5900 end 5901 5902 double complex function arg(comnum) 5903 implicit none 5904 double complex comnum 5905 double complex iim 5906 iim = (0.0d0,1.0d0) 5907 if(comnum.eq.(0.0d0,0.0d0)) then 5908 arg=(0.0d0,0.0d0) 5909 else 5910 arg=log(comnum/abs(comnum))/iim 5911 endif 5912 end""") 5913 if self.opt['mp']: 5914 fsock.writelines(""" 5915 5916 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 5917 implicit none 5918 %(complex_mp_format)s condition,truecase,falsecase 5919 if(condition.eq.(0.0e0_16,0.0e0_16)) then 5920 mp_cond=truecase 5921 else 5922 mp_cond=falsecase 5923 endif 5924 end 5925 5926 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 5927 implicit none 5928 logical condition 5929 %(complex_mp_format)s truecase,falsecase 5930 if(condition) then 5931 mp_condif=truecase 5932 else 5933 mp_condif=falsecase 5934 endif 5935 end 5936 5937 %(complex_mp_format)s function mp_recms(condition,expr) 5938 implicit none 5939 logical condition 5940 %(complex_mp_format)s expr 5941 if(condition)then 5942 mp_recms=expr 5943 else 5944 mp_recms=cmplx(real(expr),kind=16) 5945 endif 5946 end 5947 5948 %(complex_mp_format)s function mp_reglog(arg) 5949 implicit none 5950 %(complex_mp_format)s TWOPII 5951 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5952 %(complex_mp_format)s arg 5953 if(arg.eq.(0.0e0_16,0.0e0_16)) then 5954 mp_reglog=(0.0e0_16,0.0e0_16) 5955 else 5956 mp_reglog=log(arg) 5957 endif 5958 end 5959 5960 %(complex_mp_format)s function mp_reglogp(arg) 5961 implicit none 5962 %(complex_mp_format)s TWOPII 5963 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5964 %(complex_mp_format)s arg 5965 if(arg.eq.(0.0e0_16,0.0e0_16))then 5966 mp_reglogp=(0.0e0_16,0.0e0_16) 5967 else 5968 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 5969 mp_reglogp=log(arg) + TWOPII 5970 else 5971 mp_reglogp=log(arg) 5972 endif 5973 endif 5974 end 5975 5976 %(complex_mp_format)s function mp_reglogm(arg) 5977 implicit none 5978 %(complex_mp_format)s TWOPII 5979 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 5980 %(complex_mp_format)s arg 5981 if(arg.eq.(0.0e0_16,0.0e0_16))then 5982 mp_reglogm=(0.0e0_16,0.0e0_16) 5983 else 5984 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 5985 mp_reglogm=log(arg) - TWOPII 5986 else 5987 mp_reglogm=log(arg) 5988 endif 5989 endif 5990 end 5991 5992 %(complex_mp_format)s function mp_arg(comnum) 5993 implicit none 5994 %(complex_mp_format)s comnum 5995 %(complex_mp_format)s imm 5996 imm = (0.0e0_16,1.0e0_16) 5997 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 5998 mp_arg=(0.0e0_16,0.0e0_16) 5999 else 6000 mp_arg=log(comnum/abs(comnum))/imm 6001 endif 6002 end"""%{'complex_mp_format':self.mp_complex_format}) 6003 6004 6005 #check for the file functions.f 6006 model_path = self.model.get('modelpath') 6007 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6008 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6009 input = pjoin(model_path,'Fortran','functions.f') 6010 file.writelines(fsock, open(input).read()) 6011 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6012 6013 # check for functions define in the UFO model 6014 ufo_fct = self.model.get('functions') 6015 if ufo_fct: 6016 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6017 for fct in ufo_fct: 6018 # already handle by default 6019 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6020 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg"]: 6021 ufo_fct_template = """ 6022 double complex function %(name)s(%(args)s) 6023 implicit none 6024 double complex %(args)s 6025 %(name)s = %(fct)s 6026 6027 return 6028 end 6029 """ 6030 text = ufo_fct_template % { 6031 'name': fct.name, 6032 'args': ", ".join(fct.arguments), 6033 'fct': self.p_to_f.parse(fct.expr) 6034 } 6035 fsock.writelines(text) 6036 if self.opt['mp']: 6037 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6038 for fct in ufo_fct: 6039 # already handle by default 6040 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6041 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg"]: 6042 ufo_fct_template = """ 6043 %(complex_mp_format)s function mp__%(name)s(mp__%(args)s) 6044 implicit none 6045 %(complex_mp_format)s mp__%(args)s 6046 mp__%(name)s = %(fct)s 6047 6048 return 6049 end 6050 """ 6051 text = ufo_fct_template % { 6052 'name': fct.name, 6053 'args': ", mp__".join(fct.arguments), 6054 'fct': self.mp_p_to_f.parse(fct.expr), 6055 'complex_mp_format': self.mp_complex_format 6056 } 6057 fsock.writelines(text) 6058 6059 6060 6061 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6062 6063 6064
6065 - def create_makeinc(self):
6066 """create makeinc.inc containing the file to compile """ 6067 6068 fsock = self.open('makeinc.inc', comment='#') 6069 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6070 text += ' model_functions.o ' 6071 6072 nb_coup_indep = 1 + len(self.coups_dep) // 25 6073 nb_coup_dep = 1 + len(self.coups_indep) // 25 6074 couplings_files=['couplings%s.o' % (i+1) \ 6075 for i in range(nb_coup_dep + nb_coup_indep) ] 6076 if self.opt['mp']: 6077 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6078 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6079 text += ' '.join(couplings_files) 6080 fsock.writelines(text)
6081
6082 - def create_param_write(self):
6083 """ create param_write """ 6084 6085 fsock = self.open('param_write.inc', format='fortran') 6086 6087 fsock.writelines("""write(*,*) ' External Params' 6088 write(*,*) ' ---------------------------------' 6089 write(*,*) ' '""") 6090 def format(name): 6091 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6092 6093 # Write the external parameter 6094 lines = [format(param.name) for param in self.params_ext] 6095 fsock.writelines('\n'.join(lines)) 6096 6097 fsock.writelines("""write(*,*) ' Internal Params' 6098 write(*,*) ' ---------------------------------' 6099 write(*,*) ' '""") 6100 lines = [format(data.name) for data in self.params_indep 6101 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6102 fsock.writelines('\n'.join(lines)) 6103 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6104 write(*,*) ' ----------------------------------------' 6105 write(*,*) ' '""") 6106 lines = [format(data.name) for data in self.params_dep \ 6107 if self.check_needed_param(data.name)] 6108 6109 fsock.writelines('\n'.join(lines)) 6110 6111 6112
6113 - def create_ident_card(self):
6114 """ create the ident_card.dat """ 6115 6116 def format(parameter): 6117 """return the line for the ident_card corresponding to this parameter""" 6118 colum = [parameter.lhablock.lower()] + \ 6119 [str(value) for value in parameter.lhacode] + \ 6120 [parameter.name] 6121 if not parameter.name: 6122 return '' 6123 return ' '.join(colum)+'\n'
6124 6125 fsock = self.open('ident_card.dat') 6126 6127 external_param = [format(param) for param in self.params_ext] 6128 fsock.writelines('\n'.join(external_param)) 6129
6130 - def create_actualize_mp_ext_param_inc(self):
6131 """ create the actualize_mp_ext_params.inc code """ 6132 6133 # In principle one should actualize all external, but for now, it is 6134 # hardcoded that only AS and MU_R can by dynamically changed by the user 6135 # so that we only update those ones. 6136 # Of course, to be on the safe side, one could decide to update all 6137 # external parameters. 6138 update_params_list=[p for p in self.params_ext if p.name in 6139 self.PS_dependent_key] 6140 6141 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6142 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6143 for param in update_params_list] 6144 # When read_lha is false, it is G which is taken in input and not AS, so 6145 # this is what should be reset here too. 6146 if 'aS' in [param.name for param in update_params_list]: 6147 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6148 6149 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6150 fsock.writelines('\n'.join(res_strings))
6151
6152 - def create_param_read(self):
6153 """create param_read""" 6154 6155 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6156 or self.opt['loop_induced']: 6157 fsock = self.open('param_read.inc', format='fortran') 6158 fsock.writelines(' include \'../param_card.inc\'') 6159 return 6160 6161 def format_line(parameter): 6162 """return the line for the ident_card corresponding to this 6163 parameter""" 6164 template = \ 6165 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6166 % {'name': parameter.name, 6167 'value': self.p_to_f.parse(str(parameter.value.real))} 6168 if self.opt['mp']: 6169 template = template+ \ 6170 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6171 "%(mp_prefix)s%(name)s,%(value)s)") \ 6172 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6173 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6174 return template 6175 6176 fsock = self.open('param_read.inc', format='fortran') 6177 res_strings = [format_line(param) \ 6178 for param in self.params_ext] 6179 6180 # Correct width sign for Majorana particles (where the width 6181 # and mass need to have the same sign) 6182 for particle in self.model.get('particles'): 6183 if particle.is_fermion() and particle.get('self_antipart') and \ 6184 particle.get('width').lower() != 'zero': 6185 6186 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6187 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6188 if self.opt['mp']: 6189 res_strings.append(\ 6190 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6191 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6192 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6193 6194 fsock.writelines('\n'.join(res_strings)) 6195 6196 6197 @staticmethod
6198 - def create_param_card_static(model, output_path, rule_card_path=False, 6199 mssm_convert=True):
6200 """ create the param_card.dat for a givent model --static method-- """ 6201 #1. Check if a default param_card is present: 6202 done = False 6203 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6204 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6205 model_path = model.get('modelpath') 6206 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6207 done = True 6208 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6209 output_path) 6210 if not done: 6211 param_writer.ParamCardWriter(model, output_path) 6212 6213 if rule_card_path: 6214 if hasattr(model, 'rule_card'): 6215 model.rule_card.write_file(rule_card_path) 6216 6217 if mssm_convert: 6218 model_name = model.get('name') 6219 # IF MSSM convert the card to SLAH1 6220 if model_name == 'mssm' or model_name.startswith('mssm-'): 6221 import models.check_param_card as translator 6222 # Check the format of the param_card for Pythia and make it correct 6223 if rule_card_path: 6224 translator.make_valid_param_card(output_path, rule_card_path) 6225 translator.convert_to_slha1(output_path)
6226
6227 - def create_param_card(self):
6228 """ create the param_card.dat """ 6229 6230 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6231 if not os.path.exists(rule_card): 6232 rule_card=False 6233 self.create_param_card_static(self.model, 6234 output_path=pjoin(self.dir_path, 'param_card.dat'), 6235 rule_card_path=rule_card, 6236 mssm_convert=True)
6237
6238 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True):
6239 """ Determine which Export_v4 class is required. cmd is the command 6240 interface containing all potential usefull information. 6241 The output_type argument specifies from which context the output 6242 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6243 and 'default' for tree-level outputs.""" 6244 6245 opt = cmd.options 6246 6247 # ========================================================================== 6248 # First check whether Ninja must be installed. 6249 # Ninja would only be required if: 6250 # a) Loop optimized output is selected 6251 # b) the process gathered from the amplitude generated use loops 6252 6253 if len(cmd._curr_amps)>0: 6254 try: 6255 curr_proc = cmd._curr_amps[0].get('process') 6256 except base_objects.PhysicsObject.PhysicsObjectError: 6257 curr_proc = None 6258 elif hasattr(cmd,'_fks_multi_proc') and \ 6259 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6260 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6261 else: 6262 curr_proc = None 6263 6264 requires_ninja = opt['loop_optimized_output'] and (not curr_proc is None) and \ 6265 (curr_proc.get('perturbation_couplings') != [] and \ 6266 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6267 # An installation is required then, but only if the specified path is the 6268 # default local one and that the Ninja library appears missing. 6269 if requires_ninja and (not opt['ninja'] is None) and\ 6270 os.path.abspath(opt['ninja'])==pjoin(MG5DIR,'HEPTools','lib') and\ 6271 not os.path.isfile(pjoin(MG5DIR,'HEPTools','lib','libninja.a')): 6272 # Then install Ninja here from the tarballs in the vendor 6273 # directory so that it would work offline too. 6274 logger.info( 6275 """MG5aMC will now install the loop reduction tool 'Ninja' from the local offline installer. 6276 Use the command 'install ninja' if you want to update to the latest online version. 6277 This installation can take some time but only needs to be performed once.""",'$MG:color:GREEN') 6278 try: 6279 cmd.do_install('ninja',paths={'HEPToolsInstaller': 6280 pjoin(MG5DIR,'vendor','OfflineHEPToolsInstaller.tar.gz')}, 6281 additional_options=[ 6282 '--ninja_tarball=%s'%pjoin(MG5DIR,'vendor','ninja.tar.gz'), 6283 '--oneloop_tarball=%s'%pjoin(MG5DIR,'vendor','oneloop.tar.gz')]) 6284 except InvalidCmd: 6285 logger.warning( 6286 """The offline installation of Ninja was unsuccessful, and MG5aMC disabled it. 6287 In the future, if you want to reactivate Ninja, you can do so by re-attempting 6288 its online installation with the command 'install ninja' or install it on your 6289 own and set the path to its library in the MG5aMC option 'ninja'.""") 6290 cmd.exec_cmd("set ninja ''") 6291 cmd.exec_cmd('save options') 6292 6293 6294 # ========================================================================== 6295 # First treat the MadLoop5 standalone case 6296 MadLoop_SA_options = {'clean': not noclean, 6297 'complex_mass':cmd.options['complex_mass_scheme'], 6298 'export_format':'madloop', 6299 'mp':True, 6300 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6301 'cuttools_dir': cmd._cuttools_dir, 6302 'iregi_dir':cmd._iregi_dir, 6303 'pjfry_dir':cmd.options['pjfry'], 6304 'golem_dir':cmd.options['golem'], 6305 'samurai_dir':cmd.options['samurai'], 6306 'ninja_dir':cmd.options['ninja'], 6307 'fortran_compiler':cmd.options['fortran_compiler'], 6308 'f2py_compiler':cmd.options['f2py_compiler'], 6309 'output_dependencies':cmd.options['output_dependencies'], 6310 'SubProc_prefix':'P', 6311 'compute_color_flows':cmd.options['loop_color_flows'], 6312 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6313 'cluster_local_path': cmd.options['cluster_local_path'] 6314 } 6315 6316 if output_type.startswith('madloop'): 6317 import madgraph.loop.loop_exporters as loop_exporters 6318 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6319 ExporterClass=None 6320 if not cmd.options['loop_optimized_output']: 6321 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6322 else: 6323 if output_type == "madloop": 6324 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6325 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6326 elif output_type == "madloop_matchbox": 6327 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6328 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6329 else: 6330 raise Exception, "output_type not recognize %s" % output_type 6331 return ExporterClass(cmd._mgme_dir, cmd._export_dir, MadLoop_SA_options) 6332 else: 6333 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6334 ' in %s'%str(cmd._mgme_dir)) 6335 6336 # Then treat the aMC@NLO output 6337 elif output_type=='amcatnlo': 6338 import madgraph.iolibs.export_fks as export_fks 6339 ExporterClass=None 6340 amcatnlo_options = dict(opt) 6341 amcatnlo_options.update(MadLoop_SA_options) 6342 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6343 if not cmd.options['loop_optimized_output']: 6344 logger.info("Writing out the aMC@NLO code") 6345 ExporterClass = export_fks.ProcessExporterFortranFKS 6346 amcatnlo_options['export_format']='FKS5_default' 6347 else: 6348 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6349 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6350 amcatnlo_options['export_format']='FKS5_optimized' 6351 return ExporterClass(cmd._mgme_dir, cmd._export_dir, amcatnlo_options) 6352 6353 # Then the default tree-level output 6354 elif output_type=='default': 6355 assert group_subprocesses in [True, False] 6356 6357 opt = dict(opt) 6358 opt.update({'clean': not noclean, 6359 'complex_mass': cmd.options['complex_mass_scheme'], 6360 'export_format':cmd._export_format, 6361 'mp': False, 6362 'sa_symmetry':False, 6363 'model': cmd._curr_model.get('name'), 6364 'v5_model': False if cmd._model_v4_path else True }) 6365 6366 format = cmd._export_format #shortcut 6367 6368 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6369 opt['sa_symmetry'] = True 6370 6371 loop_induced_opt = dict(opt) 6372 loop_induced_opt.update(MadLoop_SA_options) 6373 loop_induced_opt['export_format'] = 'madloop_optimized' 6374 loop_induced_opt['SubProc_prefix'] = 'PV' 6375 # For loop_induced output with MadEvent, we must have access to the 6376 # color flows. 6377 loop_induced_opt['compute_color_flows'] = True 6378 for key in opt: 6379 if key not in loop_induced_opt: 6380 loop_induced_opt[key] = opt[key] 6381 6382 if format == 'matrix' or format.startswith('standalone'): 6383 return ProcessExporterFortranSA(cmd._mgme_dir, cmd._export_dir, opt, 6384 format=format) 6385 6386 elif format in ['madevent'] and group_subprocesses: 6387 if isinstance(cmd._curr_amps[0], 6388 loop_diagram_generation.LoopAmplitude): 6389 import madgraph.loop.loop_exporters as loop_exporters 6390 return loop_exporters.LoopInducedExporterMEGroup(cmd._mgme_dir, 6391 cmd._export_dir,loop_induced_opt) 6392 else: 6393 return ProcessExporterFortranMEGroup(cmd._mgme_dir, 6394 cmd._export_dir,opt) 6395 elif format in ['madevent']: 6396 if isinstance(cmd._curr_amps[0], 6397 loop_diagram_generation.LoopAmplitude): 6398 import madgraph.loop.loop_exporters as loop_exporters 6399 return loop_exporters.LoopInducedExporterMENoGroup(cmd._mgme_dir, 6400 cmd._export_dir,loop_induced_opt) 6401 else: 6402 return ProcessExporterFortranME(cmd._mgme_dir, 6403 cmd._export_dir,opt) 6404 elif format in ['matchbox']: 6405 return ProcessExporterFortranMatchBox(cmd._mgme_dir, cmd._export_dir,opt) 6406 elif cmd._export_format in ['madweight'] and group_subprocesses: 6407 6408 return ProcessExporterFortranMWGroup(cmd._mgme_dir, cmd._export_dir, 6409 opt) 6410 elif cmd._export_format in ['madweight']: 6411 return ProcessExporterFortranMW(cmd._mgme_dir, cmd._export_dir, opt) 6412 else: 6413 raise Exception, 'Wrong export_v4 format' 6414 else: 6415 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6416
6417 6418 6419 6420 #=============================================================================== 6421 # ProcessExporterFortranMWGroup 6422 #=============================================================================== 6423 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6424 """Class to take care of exporting a set of matrix elements to 6425 MadEvent subprocess group format.""" 6426 6427 matrix_file = "matrix_madweight_group_v4.inc" 6428 6429 #=========================================================================== 6430 # generate_subprocess_directory_v4 6431 #===========================================================================
6432 - def generate_subprocess_directory_v4(self, subproc_group, 6433 fortran_model, 6434 group_number):
6435 """Generate the Pn directory for a subprocess group in MadEvent, 6436 including the necessary matrix_N.f files, configs.inc and various 6437 other helper files""" 6438 6439 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6440 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6441 "subproc_group object not SubProcessGroup" 6442 6443 if not self.model: 6444 self.model = subproc_group.get('matrix_elements')[0].\ 6445 get('processes')[0].get('model') 6446 6447 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6448 6449 # Create the directory PN in the specified path 6450 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6451 subproc_group.get('name')) 6452 try: 6453 os.mkdir(pjoin(pathdir, subprocdir)) 6454 except os.error as error: 6455 logger.warning(error.strerror + " " + subprocdir) 6456 6457 6458 logger.info('Creating files in directory %s' % subprocdir) 6459 Ppath = pjoin(pathdir, subprocdir) 6460 6461 # Create the matrix.f files, auto_dsig.f files and all inc files 6462 # for all subprocesses in the group 6463 6464 maxamps = 0 6465 maxflows = 0 6466 tot_calls = 0 6467 6468 matrix_elements = subproc_group.get('matrix_elements') 6469 6470 for ime, matrix_element in \ 6471 enumerate(matrix_elements): 6472 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 6473 calls, ncolor = \ 6474 self.write_matrix_element_v4(writers.FortranWriter(filename), 6475 matrix_element, 6476 fortran_model, 6477 str(ime+1), 6478 subproc_group.get('diagram_maps')[\ 6479 ime]) 6480 6481 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 6482 self.write_auto_dsig_file(writers.FortranWriter(filename), 6483 matrix_element, 6484 str(ime+1)) 6485 6486 # Keep track of needed quantities 6487 tot_calls += int(calls) 6488 maxflows = max(maxflows, ncolor) 6489 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 6490 6491 # Draw diagrams 6492 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 6493 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 6494 get('diagrams'), 6495 filename, 6496 model = \ 6497 matrix_element.get('processes')[0].\ 6498 get('model'), 6499 amplitude=True) 6500 logger.info("Generating Feynman diagrams for " + \ 6501 matrix_element.get('processes')[0].nice_string()) 6502 plot.draw() 6503 6504 # Extract number of external particles 6505 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 6506 6507 # Generate a list of diagrams corresponding to each configuration 6508 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 6509 # If a subprocess has no diagrams for this config, the number is 0 6510 6511 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 6512 6513 filename = pjoin(Ppath, 'auto_dsig.f') 6514 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 6515 subproc_group) 6516 6517 filename = pjoin(Ppath,'configs.inc') 6518 nconfigs, s_and_t_channels = self.write_configs_file(\ 6519 writers.FortranWriter(filename), 6520 subproc_group, 6521 subproc_diagrams_for_config) 6522 6523 filename = pjoin(Ppath, 'leshouche.inc') 6524 self.write_leshouche_file(writers.FortranWriter(filename), 6525 subproc_group) 6526 6527 filename = pjoin(Ppath, 'phasespace.inc') 6528 self.write_phasespace_file(writers.FortranWriter(filename), 6529 nconfigs) 6530 6531 6532 filename = pjoin(Ppath, 'maxamps.inc') 6533 self.write_maxamps_file(writers.FortranWriter(filename), 6534 maxamps, 6535 maxflows, 6536 max([len(me.get('processes')) for me in \ 6537 matrix_elements]), 6538 len(matrix_elements)) 6539 6540 filename = pjoin(Ppath, 'mirrorprocs.inc') 6541 self.write_mirrorprocs(writers.FortranWriter(filename), 6542 subproc_group) 6543 6544 filename = pjoin(Ppath, 'nexternal.inc') 6545 self.write_nexternal_file(writers.FortranWriter(filename), 6546 nexternal, ninitial) 6547 6548 filename = pjoin(Ppath, 'pmass.inc') 6549 self.write_pmass_file(writers.FortranWriter(filename), 6550 matrix_element) 6551 6552 filename = pjoin(Ppath, 'props.inc') 6553 self.write_props_file(writers.FortranWriter(filename), 6554 matrix_element, 6555 s_and_t_channels) 6556 6557 # filename = pjoin(Ppath, 'processes.dat') 6558 # files.write_to_file(filename, 6559 # self.write_processes_file, 6560 # subproc_group) 6561 6562 # Generate jpgs -> pass in make_html 6563 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 6564 6565 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 6566 6567 for file in linkfiles: 6568 ln('../%s' % file, cwd=Ppath) 6569 6570 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 6571 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 6572 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 6573 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 6574 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 6575 ln('phasespace.inc', '../', log=True, cwd=Ppath) 6576 if not tot_calls: 6577 tot_calls = 0 6578 return tot_calls
6579 6580 #=========================================================================== 6581 # write_super_auto_dsig_file 6582 #===========================================================================
6583 - def write_super_auto_dsig_file(self, writer, subproc_group):
6584 """Write the auto_dsig.f file selecting between the subprocesses 6585 in subprocess group mode""" 6586 6587 replace_dict = {} 6588 6589 # Extract version number and date from VERSION file 6590 info_lines = self.get_mg5_info_lines() 6591 replace_dict['info_lines'] = info_lines 6592 6593 matrix_elements = subproc_group.get('matrix_elements') 6594 6595 # Extract process info lines 6596 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 6597 matrix_elements]) 6598 replace_dict['process_lines'] = process_lines 6599 6600 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 6601 replace_dict['nexternal'] = nexternal 6602 6603 replace_dict['nsprocs'] = 2*len(matrix_elements) 6604 6605 # Generate dsig definition line 6606 dsig_def_line = "DOUBLE PRECISION " + \ 6607 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 6608 range(len(matrix_elements))]) 6609 replace_dict["dsig_def_line"] = dsig_def_line 6610 6611 # Generate dsig process lines 6612 call_dsig_proc_lines = [] 6613 for iproc in range(len(matrix_elements)): 6614 call_dsig_proc_lines.append(\ 6615 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 6616 {"num": iproc + 1, 6617 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 6618 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 6619 6620 file = open(os.path.join(_file_path, \ 6621 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 6622 file = file % replace_dict 6623 6624 # Write the file 6625 writer.writelines(file)
6626 6627 #=========================================================================== 6628 # write_mirrorprocs 6629 #===========================================================================
6630 - def write_mirrorprocs(self, writer, subproc_group):
6631 """Write the mirrorprocs.inc file determining which processes have 6632 IS mirror process in subprocess group mode.""" 6633 6634 lines = [] 6635 bool_dict = {True: '.true.', False: '.false.'} 6636 matrix_elements = subproc_group.get('matrix_elements') 6637 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 6638 (len(matrix_elements), 6639 ",".join([bool_dict[me.get('has_mirror_process')] for \ 6640 me in matrix_elements]))) 6641 # Write the file 6642 writer.writelines(lines)
6643 6644 #=========================================================================== 6645 # write_configs_file 6646 #===========================================================================
6647 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
6648 """Write the configs.inc file with topology information for a 6649 subprocess group. Use the first subprocess with a diagram for each 6650 configuration.""" 6651 6652 matrix_elements = subproc_group.get('matrix_elements') 6653 model = matrix_elements[0].get('processes')[0].get('model') 6654 6655 diagrams = [] 6656 config_numbers = [] 6657 for iconfig, config in enumerate(diagrams_for_config): 6658 # Check if any diagrams correspond to this config 6659 if set(config) == set([0]): 6660 continue 6661 subproc_diags = [] 6662 for s,d in enumerate(config): 6663 if d: 6664 subproc_diags.append(matrix_elements[s].\ 6665 get('diagrams')[d-1]) 6666 else: 6667 subproc_diags.append(None) 6668 diagrams.append(subproc_diags) 6669 config_numbers.append(iconfig + 1) 6670 6671 # Extract number of external particles 6672 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 6673 6674 return len(diagrams), \ 6675 self.write_configs_file_from_diagrams(writer, diagrams, 6676 config_numbers, 6677 nexternal, ninitial, 6678 matrix_elements[0],model)
6679 6680 #=========================================================================== 6681 # write_run_configs_file 6682 #===========================================================================
6683 - def write_run_config_file(self, writer):
6684 """Write the run_configs.inc file for MadEvent""" 6685 6686 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 6687 text = open(path).read() % {'chanperjob':'2'} 6688 writer.write(text) 6689 return True
6690 6691 6692 #=========================================================================== 6693 # write_leshouche_file 6694 #===========================================================================
6695 - def write_leshouche_file(self, writer, subproc_group):
6696 """Write the leshouche.inc file for MG4""" 6697 6698 all_lines = [] 6699 6700 for iproc, matrix_element in \ 6701 enumerate(subproc_group.get('matrix_elements')): 6702 all_lines.extend(self.get_leshouche_lines(matrix_element, 6703 iproc)) 6704 6705 # Write the file 6706 writer.writelines(all_lines) 6707 6708 return True
6709