Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 return
102
103 - def copy_template(self, model):
104 return
105
106 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
107 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 108 return 0 # return an integer stating the number of call to helicity routine
109
110 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
111 return
112
113 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
114 return
115 116
117 - def pass_information_from_cmd(self, cmd):
118 """pass information from the command interface to the exporter. 119 Please do not modify any object of the interface from the exporter. 120 """ 121 return
122
123 - def modify_grouping(self, matrix_element):
124 return False, matrix_element
125
126 - def export_model_files(self, model_v4_path):
127 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 128 return
129
130 - def export_helas(self, HELAS_PATH):
131 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 132 return
133
134 #=============================================================================== 135 # ProcessExporterFortran 136 #=============================================================================== 137 -class ProcessExporterFortran(VirtualExporter):
138 """Class to take care of exporting a set of matrix elements to 139 Fortran (v4) format.""" 140 141 default_opt = {'clean': False, 'complex_mass':False, 142 'export_format':'madevent', 'mp': False, 143 'v5_model': True, 144 'output_options':{} 145 } 146 grouped_mode = False 147
148 - def __init__(self, dir_path = "", opt=None):
149 """Initiate the ProcessExporterFortran with directory information""" 150 self.mgme_dir = MG5DIR 151 self.dir_path = dir_path 152 self.model = None 153 154 self.opt = dict(self.default_opt) 155 if opt: 156 self.opt.update(opt) 157 158 self.cmd_options = self.opt['output_options'] 159 160 #place holder to pass information to the run_interface 161 self.proc_characteristic = banner_mod.ProcCharacteristic()
162 163 164 #=========================================================================== 165 # process exporter fortran switch between group and not grouped 166 #===========================================================================
167 - def export_processes(self, matrix_elements, fortran_model):
168 """Make the switch between grouped and not grouped output""" 169 170 calls = 0 171 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 172 for (group_number, me_group) in enumerate(matrix_elements): 173 calls = calls + self.generate_subprocess_directory(\ 174 me_group, fortran_model, group_number) 175 else: 176 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 177 calls = calls + self.generate_subprocess_directory(\ 178 me, fortran_model, me_number) 179 180 return calls
181 182 183 #=========================================================================== 184 # create the run_card 185 #===========================================================================
186 - def create_run_card(self, matrix_elements, history):
187 """ """ 188 189 190 # bypass this for the loop-check 191 import madgraph.loop.loop_helas_objects as loop_helas_objects 192 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 193 matrix_elements = None 194 195 run_card = banner_mod.RunCard() 196 197 198 default=True 199 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 200 processes = [me.get('processes') for megroup in matrix_elements 201 for me in megroup['matrix_elements']] 202 elif matrix_elements: 203 processes = [me.get('processes') 204 for me in matrix_elements['matrix_elements']] 205 else: 206 default =False 207 208 if default: 209 run_card.create_default_for_process(self.proc_characteristic, 210 history, 211 processes) 212 213 214 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 215 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'))
216 217 218 #=========================================================================== 219 # copy the Template in a new directory. 220 #===========================================================================
221 - def copy_template(self, model):
222 """create the directory run_name as a copy of the MadEvent 223 Template, and clean the directory 224 """ 225 226 #First copy the full template tree if dir_path doesn't exit 227 if not os.path.isdir(self.dir_path): 228 assert self.mgme_dir, \ 229 "No valid MG_ME path given for MG4 run directory creation." 230 logger.info('initialize a new directory: %s' % \ 231 os.path.basename(self.dir_path)) 232 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 233 self.dir_path, True) 234 # distutils.dir_util.copy_tree since dir_path already exists 235 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 236 self.dir_path) 237 # copy plot_card 238 for card in ['plot_card']: 239 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 240 try: 241 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 242 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 243 except IOError: 244 logger.warning("Failed to copy " + card + ".dat to default") 245 elif os.getcwd() == os.path.realpath(self.dir_path): 246 logger.info('working in local directory: %s' % \ 247 os.path.realpath(self.dir_path)) 248 # distutils.dir_util.copy_tree since dir_path already exists 249 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 250 self.dir_path) 251 # for name in misc.glob('Template/LO/*', self.mgme_dir): 252 # name = os.path.basename(name) 253 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 254 # if os.path.isfile(filename): 255 # files.cp(filename, pjoin(self.dir_path,name)) 256 # elif os.path.isdir(filename): 257 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 258 # distutils.dir_util.copy_tree since dir_path already exists 259 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 260 self.dir_path) 261 # Copy plot_card 262 for card in ['plot_card']: 263 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 264 try: 265 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 266 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 267 except IOError: 268 logger.warning("Failed to copy " + card + ".dat to default") 269 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 270 assert self.mgme_dir, \ 271 "No valid MG_ME path given for MG4 run directory creation." 272 try: 273 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 274 except IOError: 275 MG5_version = misc.get_pkg_info() 276 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 277 278 #Ensure that the Template is clean 279 if self.opt['clean']: 280 logger.info('remove old information in %s' % \ 281 os.path.basename(self.dir_path)) 282 if os.environ.has_key('MADGRAPH_BASE'): 283 misc.call([pjoin('bin', 'internal', 'clean_template'), 284 '--web'], cwd=self.dir_path) 285 else: 286 try: 287 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 288 cwd=self.dir_path) 289 except Exception, why: 290 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 291 % (os.path.basename(self.dir_path),why)) 292 293 #Write version info 294 MG_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 296 MG_version['version']) 297 298 # add the makefile in Source directory 299 filename = pjoin(self.dir_path,'Source','makefile') 300 self.write_source_makefile(writers.FileWriter(filename)) 301 302 # add the DiscreteSampler information 303 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 304 pjoin(self.dir_path, 'Source')) 305 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 306 pjoin(self.dir_path, 'Source')) 307 308 # We need to create the correct open_data for the pdf 309 self.write_pdf_opendata()
310 311 312 #=========================================================================== 313 # Call MadAnalysis5 to generate the default cards for this process 314 #===========================================================================
315 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 316 ma5_path, output_dir, levels = ['parton','hadron']):
317 """ Call MA5 so that it writes default cards for both parton and 318 post-shower levels, tailored for this particular process.""" 319 320 if len(levels)==0: 321 return 322 start = time.time() 323 logger.info('Generating MadAnalysis5 default cards tailored to this process') 324 try: 325 MA5_interpreter = common_run_interface.CommonRunCmd.\ 326 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 327 except (Exception, SystemExit) as e: 328 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 329 return 330 if MA5_interpreter is None: 331 return 332 333 MA5_main = MA5_interpreter.main 334 for lvl in ['parton','hadron']: 335 if lvl in levels: 336 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 337 try: 338 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 339 except (Exception, SystemExit) as e: 340 # keep the default card (skip only) 341 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 342 ' default analysis card for this process.') 343 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 344 error=StringIO() 345 traceback.print_exc(file=error) 346 logger.debug('MadAnalysis5 error was:') 347 logger.debug('-'*60) 348 logger.debug(error.getvalue()[:-1]) 349 logger.debug('-'*60) 350 else: 351 open(card_to_generate,'w').write(text) 352 stop = time.time() 353 if stop-start >1: 354 logger.info('Cards created in %.2fs' % (stop-start))
355 356 #=========================================================================== 357 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 358 #===========================================================================
359 - def write_procdef_mg5(self, file_pos, modelname, process_str):
360 """ write an equivalent of the MG4 proc_card in order that all the Madevent 361 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 362 363 proc_card_template = template_files.mg4_proc_card.mg4_template 364 process_template = template_files.mg4_proc_card.process_template 365 process_text = '' 366 coupling = '' 367 new_process_content = [] 368 369 370 # First find the coupling and suppress the coupling from process_str 371 #But first ensure that coupling are define whithout spaces: 372 process_str = process_str.replace(' =', '=') 373 process_str = process_str.replace('= ', '=') 374 process_str = process_str.replace(',',' , ') 375 #now loop on the element and treat all the coupling 376 for info in process_str.split(): 377 if '=' in info: 378 coupling += info + '\n' 379 else: 380 new_process_content.append(info) 381 # Recombine the process_str (which is the input process_str without coupling 382 #info) 383 process_str = ' '.join(new_process_content) 384 385 #format the SubProcess 386 replace_dict = {'process': process_str, 387 'coupling': coupling} 388 process_text += process_template.substitute(replace_dict) 389 390 replace_dict = {'process': process_text, 391 'model': modelname, 392 'multiparticle':''} 393 text = proc_card_template.substitute(replace_dict) 394 395 if file_pos: 396 ff = open(file_pos, 'w') 397 ff.write(text) 398 ff.close() 399 else: 400 return replace_dict
401 402
403 - def pass_information_from_cmd(self, cmd):
404 """Pass information for MA5""" 405 406 self.proc_defs = cmd._curr_proc_defs
407 408 #=========================================================================== 409 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 410 #===========================================================================
411 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
412 """Function to finalize v4 directory, for inheritance.""" 413 414 self.create_run_card(matrix_elements, history) 415 self.create_MA5_cards(matrix_elements, history)
416
417 - def create_MA5_cards(self,matrix_elements,history):
418 """ A wrapper around the creation of the MA5 cards so that it can be 419 bypassed by daughter classes (i.e. in standalone).""" 420 if 'madanalysis5_path' in self.opt and not \ 421 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 422 processes = None 423 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 424 processes = [me.get('processes') for megroup in matrix_elements 425 for me in megroup['matrix_elements']] 426 elif matrix_elements: 427 processes = [me.get('processes') 428 for me in matrix_elements['matrix_elements']] 429 430 self.create_default_madanalysis5_cards( 431 history, self.proc_defs, processes, 432 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 433 levels = ['hadron','parton']) 434 435 for level in ['hadron','parton']: 436 # Copying these cards turn on the use of MadAnalysis5 by default. 437 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 438 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 439 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
440 441 #=========================================================================== 442 # Create the proc_characteristic file passing information to the run_interface 443 #===========================================================================
444 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
445 446 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
447 448 #=========================================================================== 449 # write_matrix_element_v4 450 #===========================================================================
451 - def write_matrix_element_v4(self):
452 """Function to write a matrix.f file, for inheritance. 453 """ 454 pass
455 456 #=========================================================================== 457 # write_pdf_opendata 458 #===========================================================================
459 - def write_pdf_opendata(self):
460 """ modify the pdf opendata file, to allow direct access to cluster node 461 repository if configure""" 462 463 if not self.opt["cluster_local_path"]: 464 changer = {"pdf_systemwide": ""} 465 else: 466 to_add = """ 467 tempname='%(path)s'//Tablefile 468 open(IU,file=tempname,status='old',ERR=1) 469 return 470 1 tempname='%(path)s/Pdfdata/'//Tablefile 471 open(IU,file=tempname,status='old',ERR=2) 472 return 473 2 tempname='%(path)s/lhapdf'//Tablefile 474 open(IU,file=tempname,status='old',ERR=3) 475 return 476 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 477 open(IU,file=tempname,status='old',ERR=4) 478 return 479 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 480 open(IU,file=tempname,status='old',ERR=5) 481 return 482 """ % {"path" : self.opt["cluster_local_path"]} 483 484 changer = {"pdf_systemwide": to_add} 485 486 487 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 488 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 489 ff.writelines(template % changer) 490 491 # Do the same for lhapdf set 492 if not self.opt["cluster_local_path"]: 493 changer = {"cluster_specific_path": ""} 494 else: 495 to_add=""" 496 LHAPath='%(path)s/PDFsets' 497 Inquire(File=LHAPath, exist=exists) 498 if(exists)return 499 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 500 Inquire(File=LHAPath, exist=exists) 501 if(exists)return 502 LHAPath='%(path)s/../lhapdf/pdfsets/' 503 Inquire(File=LHAPath, exist=exists) 504 if(exists)return 505 LHAPath='./PDFsets' 506 """ % {"path" : self.opt["cluster_local_path"]} 507 changer = {"cluster_specific_path": to_add} 508 509 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 510 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 511 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 512 ff.writelines(template % changer) 513 514 515 return
516 517 518 519 #=========================================================================== 520 # write_maxparticles_file 521 #===========================================================================
522 - def write_maxparticles_file(self, writer, matrix_elements):
523 """Write the maxparticles.inc file for MadEvent""" 524 525 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 526 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 527 matrix_elements.get('matrix_elements')]) 528 else: 529 maxparticles = max([me.get_nexternal_ninitial()[0] \ 530 for me in matrix_elements]) 531 532 lines = "integer max_particles\n" 533 lines += "parameter(max_particles=%d)" % maxparticles 534 535 # Write the file 536 writer.writelines(lines) 537 538 return True
539 540 541 #=========================================================================== 542 # export the model 543 #===========================================================================
544 - def export_model_files(self, model_path):
545 """Configure the files/link of the process according to the model""" 546 547 # Import the model 548 for file in os.listdir(model_path): 549 if os.path.isfile(pjoin(model_path, file)): 550 shutil.copy2(pjoin(model_path, file), \ 551 pjoin(self.dir_path, 'Source', 'MODEL'))
552 553 567 575 576 577 #=========================================================================== 578 # export the helas routine 579 #===========================================================================
580 - def export_helas(self, helas_path):
581 """Configure the files/link of the process according to the model""" 582 583 # Import helas routine 584 for filename in os.listdir(helas_path): 585 filepos = pjoin(helas_path, filename) 586 if os.path.isfile(filepos): 587 if filepos.endswith('Makefile.template'): 588 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 589 elif filepos.endswith('Makefile'): 590 pass 591 else: 592 cp(filepos, self.dir_path + '/Source/DHELAS')
593 # following lines do the same but whithout symbolic link 594 # 595 #def export_helas(mgme_dir, dir_path): 596 # 597 # # Copy the HELAS directory 598 # helas_dir = pjoin(mgme_dir, 'HELAS') 599 # for filename in os.listdir(helas_dir): 600 # if os.path.isfile(pjoin(helas_dir, filename)): 601 # shutil.copy2(pjoin(helas_dir, filename), 602 # pjoin(dir_path, 'Source', 'DHELAS')) 603 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 604 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 605 # 606 607 #=========================================================================== 608 # generate_subprocess_directory 609 #===========================================================================
610 - def generate_subprocess_directory(self, matrix_element, 611 fortran_model, 612 me_number):
613 """Routine to generate a subprocess directory (for inheritance)""" 614 615 pass
616 617 #=========================================================================== 618 # get_source_libraries_list 619 #===========================================================================
620 - def get_source_libraries_list(self):
621 """ Returns the list of libraries to be compiling when compiling the 622 SOURCE directory. It is different for loop_induced processes and 623 also depends on the value of the 'output_dependencies' option""" 624 625 return ['$(LIBDIR)libdhelas.$(libext)', 626 '$(LIBDIR)libpdf.$(libext)', 627 '$(LIBDIR)libmodel.$(libext)', 628 '$(LIBDIR)libcernlib.$(libext)', 629 '$(LIBDIR)libbias.$(libext)']
630 631 #=========================================================================== 632 # write_source_makefile 633 #===========================================================================
634 - def write_source_makefile(self, writer):
635 """Write the nexternal.inc file for MG4""" 636 637 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 638 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 639 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 640 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 641 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 642 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 643 else: 644 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 645 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 646 647 replace_dict= {'libraries': set_of_lib, 648 'model':model_line, 649 'additional_dsample': '', 650 'additional_dependencies':''} 651 652 if writer: 653 text = open(path).read() % replace_dict 654 writer.write(text) 655 656 return replace_dict
657 658 #=========================================================================== 659 # write_nexternal_madspin 660 #===========================================================================
661 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
662 """Write the nexternal_prod.inc file for madspin""" 663 664 replace_dict = {} 665 666 replace_dict['nexternal'] = nexternal 667 replace_dict['ninitial'] = ninitial 668 669 file = """ \ 670 integer nexternal_prod 671 parameter (nexternal_prod=%(nexternal)d) 672 integer nincoming_prod 673 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 674 675 # Write the file 676 if writer: 677 writer.writelines(file) 678 return True 679 else: 680 return replace_dict
681 682 #=========================================================================== 683 # write_helamp_madspin 684 #===========================================================================
685 - def write_helamp_madspin(self, writer, ncomb):
686 """Write the helamp.inc file for madspin""" 687 688 replace_dict = {} 689 690 replace_dict['ncomb'] = ncomb 691 692 file = """ \ 693 integer ncomb1 694 parameter (ncomb1=%(ncomb)d) 695 double precision helamp(ncomb1) 696 common /to_helamp/helamp """ % replace_dict 697 698 # Write the file 699 if writer: 700 writer.writelines(file) 701 return True 702 else: 703 return replace_dict
704 705 706 707 #=========================================================================== 708 # write_nexternal_file 709 #===========================================================================
710 - def write_nexternal_file(self, writer, nexternal, ninitial):
711 """Write the nexternal.inc file for MG4""" 712 713 replace_dict = {} 714 715 replace_dict['nexternal'] = nexternal 716 replace_dict['ninitial'] = ninitial 717 718 file = """ \ 719 integer nexternal 720 parameter (nexternal=%(nexternal)d) 721 integer nincoming 722 parameter (nincoming=%(ninitial)d)""" % replace_dict 723 724 # Write the file 725 if writer: 726 writer.writelines(file) 727 return True 728 else: 729 return replace_dict
730 #=========================================================================== 731 # write_pmass_file 732 #===========================================================================
733 - def write_pmass_file(self, writer, matrix_element):
734 """Write the pmass.inc file for MG4""" 735 736 model = matrix_element.get('processes')[0].get('model') 737 738 lines = [] 739 for wf in matrix_element.get_external_wavefunctions(): 740 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 741 if mass.lower() != "zero": 742 mass = "abs(%s)" % mass 743 744 lines.append("pmass(%d)=%s" % \ 745 (wf.get('number_external'), mass)) 746 747 # Write the file 748 writer.writelines(lines) 749 750 return True
751 752 #=========================================================================== 753 # write_ngraphs_file 754 #===========================================================================
755 - def write_ngraphs_file(self, writer, nconfigs):
756 """Write the ngraphs.inc file for MG4. Needs input from 757 write_configs_file.""" 758 759 file = " integer n_max_cg\n" 760 file = file + "parameter (n_max_cg=%d)" % nconfigs 761 762 # Write the file 763 writer.writelines(file) 764 765 return True
766 767 #=========================================================================== 768 # write_leshouche_file 769 #===========================================================================
770 - def write_leshouche_file(self, writer, matrix_element):
771 """Write the leshouche.inc file for MG4""" 772 773 # Write the file 774 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 775 776 return True
777 778 #=========================================================================== 779 # get_leshouche_lines 780 #===========================================================================
781 - def get_leshouche_lines(self, matrix_element, numproc):
782 """Write the leshouche.inc file for MG4""" 783 784 # Extract number of external particles 785 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 786 787 lines = [] 788 for iproc, proc in enumerate(matrix_element.get('processes')): 789 legs = proc.get_legs_with_decays() 790 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 791 (iproc + 1, numproc+1, nexternal, 792 ",".join([str(l.get('id')) for l in legs]))) 793 if iproc == 0 and numproc == 0: 794 for i in [1, 2]: 795 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 796 (i, nexternal, 797 ",".join([ "%3r" % 0 ] * ninitial + \ 798 [ "%3r" % i ] * (nexternal - ninitial)))) 799 800 # Here goes the color connections corresponding to the JAMPs 801 # Only one output, for the first subproc! 802 if iproc == 0: 803 # If no color basis, just output trivial color flow 804 if not matrix_element.get('color_basis'): 805 for i in [1, 2]: 806 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 807 (i, numproc+1,nexternal, 808 ",".join([ "%3r" % 0 ] * nexternal))) 809 810 else: 811 # First build a color representation dictionnary 812 repr_dict = {} 813 for l in legs: 814 repr_dict[l.get('number')] = \ 815 proc.get('model').get_particle(l.get('id')).get_color()\ 816 * (-1)**(1+l.get('state')) 817 # Get the list of color flows 818 color_flow_list = \ 819 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 820 ninitial) 821 # And output them properly 822 for cf_i, color_flow_dict in enumerate(color_flow_list): 823 for i in [0, 1]: 824 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 825 (i + 1, cf_i + 1, numproc+1, nexternal, 826 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 827 for l in legs]))) 828 829 return lines
830 831 832 833 834 #=========================================================================== 835 # write_maxamps_file 836 #===========================================================================
837 - def write_maxamps_file(self, writer, maxamps, maxflows, 838 maxproc,maxsproc):
839 """Write the maxamps.inc file for MG4.""" 840 841 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 842 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 843 (maxamps, maxflows) 844 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 845 (maxproc, maxsproc) 846 847 # Write the file 848 writer.writelines(file) 849 850 return True
851 852 853 #=========================================================================== 854 # Routines to output UFO models in MG4 format 855 #=========================================================================== 856
857 - def convert_model(self, model, wanted_lorentz = [], 858 wanted_couplings = []):
859 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 860 861 # Make sure aloha is in quadruple precision if needed 862 old_aloha_mp=aloha.mp_precision 863 aloha.mp_precision=self.opt['mp'] 864 865 # create the MODEL 866 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 867 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 868 model_builder.build(wanted_couplings) 869 870 # Backup the loop mode, because it can be changed in what follows. 871 old_loop_mode = aloha.loop_mode 872 873 # Create the aloha model or use the existing one (for loop exporters 874 # this is useful as the aloha model will be used again in the 875 # LoopHelasMatrixElements generated). We do not save the model generated 876 # here if it didn't exist already because it would be a waste of 877 # memory for tree level applications since aloha is only needed at the 878 # time of creating the aloha fortran subroutines. 879 if hasattr(self, 'aloha_model'): 880 aloha_model = self.aloha_model 881 else: 882 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 883 aloha_model.add_Lorentz_object(model.get('lorentz')) 884 885 # Compute the subroutines 886 if wanted_lorentz: 887 aloha_model.compute_subset(wanted_lorentz) 888 else: 889 aloha_model.compute_all(save=False) 890 891 # Write them out 892 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 893 aloha_model.write(write_dir, 'Fortran') 894 895 # Revert the original aloha loop mode 896 aloha.loop_mode = old_loop_mode 897 898 #copy Helas Template 899 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 900 if any([any(['L' in tag for tag in d[1]]) for d in wanted_lorentz]): 901 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 902 write_dir+'/aloha_functions.f') 903 aloha_model.loop_mode = False 904 else: 905 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 906 write_dir+'/aloha_functions.f') 907 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 908 909 # Make final link in the Process 910 self.make_model_symbolic_link() 911 912 # Re-establish original aloha mode 913 aloha.mp_precision=old_aloha_mp
914 915 916 #=========================================================================== 917 # Helper functions 918 #===========================================================================
919 - def modify_grouping(self, matrix_element):
920 """allow to modify the grouping (if grouping is in place) 921 return two value: 922 - True/False if the matrix_element was modified 923 - the new(or old) matrix element""" 924 925 return False, matrix_element
926 927 #=========================================================================== 928 # Helper functions 929 #===========================================================================
930 - def get_mg5_info_lines(self):
931 """Return info lines for MG5, suitable to place at beginning of 932 Fortran files""" 933 934 info = misc.get_pkg_info() 935 info_lines = "" 936 if info and info.has_key('version') and info.has_key('date'): 937 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 938 (info['version'], info['date']) 939 info_lines = info_lines + \ 940 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 941 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 942 else: 943 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 944 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 945 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 946 947 return info_lines
948
949 - def get_process_info_lines(self, matrix_element):
950 """Return info lines describing the processes for this matrix element""" 951 952 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 953 for process in matrix_element.get('processes')])
954 955
956 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
957 """Return the Helicity matrix definition lines for this matrix element""" 958 959 helicity_line_list = [] 960 i = 0 961 for helicities in matrix_element.get_helicity_matrix(): 962 i = i + 1 963 int_list = [i, len(helicities)] 964 int_list.extend(helicities) 965 helicity_line_list.append(\ 966 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 967 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 968 969 return "\n".join(helicity_line_list)
970
971 - def get_ic_line(self, matrix_element):
972 """Return the IC definition line coming after helicities, required by 973 switchmom in madevent""" 974 975 nexternal = matrix_element.get_nexternal_ninitial()[0] 976 int_list = range(1, nexternal + 1) 977 978 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 979 ",".join([str(i) for \ 980 i in int_list]))
981
982 - def set_chosen_SO_index(self, process, squared_orders):
983 """ From the squared order constraints set by the user, this function 984 finds what indices of the squared_orders list the user intends to pick. 985 It returns this as a string of comma-separated successive '.true.' or 986 '.false.' for each index.""" 987 988 user_squared_orders = process.get('squared_orders') 989 split_orders = process.get('split_orders') 990 991 if len(user_squared_orders)==0: 992 return ','.join(['.true.']*len(squared_orders)) 993 994 res = [] 995 for sqsos in squared_orders: 996 is_a_match = True 997 for user_sqso, value in user_squared_orders.items(): 998 if (process.get_squared_order_type(user_sqso) =='==' and \ 999 value!=sqsos[split_orders.index(user_sqso)]) or \ 1000 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1001 value<sqsos[split_orders.index(user_sqso)]) or \ 1002 (process.get_squared_order_type(user_sqso) == '>' and \ 1003 value>=sqsos[split_orders.index(user_sqso)]): 1004 is_a_match = False 1005 break 1006 res.append('.true.' if is_a_match else '.false.') 1007 1008 return ','.join(res)
1009
1010 - def get_split_orders_lines(self, orders, array_name, n=5):
1011 """ Return the split orders definition as defined in the list orders and 1012 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1013 1014 ret_list = [] 1015 for index, order in enumerate(orders): 1016 for k in xrange(0, len(order), n): 1017 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1018 (array_name,index + 1, k + 1, min(k + n, len(order)), 1019 ','.join(["%5r" % i for i in order[k:k + n]]))) 1020 return ret_list
1021
1022 - def format_integer_list(self, list, name, n=5):
1023 """ Return an initialization of the python list in argument following 1024 the fortran syntax using the data keyword assignment, filling an array 1025 of name 'name'. It splits rows in chunks of size n.""" 1026 1027 ret_list = [] 1028 for k in xrange(0, len(list), n): 1029 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1030 (name, k + 1, min(k + n, len(list)), 1031 ','.join(["%5r" % i for i in list[k:k + n]]))) 1032 return ret_list
1033
1034 - def get_color_data_lines(self, matrix_element, n=6):
1035 """Return the color matrix definition lines for this matrix element. Split 1036 rows in chunks of size n.""" 1037 1038 if not matrix_element.get('color_matrix'): 1039 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1040 else: 1041 ret_list = [] 1042 my_cs = color.ColorString() 1043 for index, denominator in \ 1044 enumerate(matrix_element.get('color_matrix').\ 1045 get_line_denominators()): 1046 # First write the common denominator for this color matrix line 1047 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1048 # Then write the numerators for the matrix elements 1049 num_list = matrix_element.get('color_matrix').\ 1050 get_line_numerators(index, denominator) 1051 1052 for k in xrange(0, len(num_list), n): 1053 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1054 (index + 1, k + 1, min(k + n, len(num_list)), 1055 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1056 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1057 ret_list.append("C %s" % repr(my_cs)) 1058 return ret_list
1059 1060
1061 - def get_den_factor_line(self, matrix_element):
1062 """Return the denominator factor line for this matrix element""" 1063 1064 return "DATA IDEN/%2r/" % \ 1065 matrix_element.get_denominator_factor()
1066
1067 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1068 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1069 which configs (diagrams).""" 1070 1071 ret_list = [] 1072 1073 booldict = {False: ".false.", True: ".true."} 1074 1075 if not matrix_element.get('color_basis'): 1076 # No color, so only one color factor. Simply write a ".true." 1077 # for each config (i.e., each diagram with only 3 particle 1078 # vertices 1079 configs = len(mapconfigs) 1080 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1081 (num_matrix_element, configs, 1082 ','.join([".true." for i in range(configs)]))) 1083 return ret_list 1084 1085 # There is a color basis - create a list showing which JAMPs have 1086 # contributions to which configs 1087 1088 # Only want to include leading color flows, so find max_Nc 1089 color_basis = matrix_element.get('color_basis') 1090 1091 # We don't want to include the power of Nc's which come from the potential 1092 # loop color trace (i.e. in the case of a closed fermion loop for example) 1093 # so we subtract it here when computing max_Nc 1094 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1095 color_basis.values()],[])) 1096 1097 # Crate dictionary between diagram number and JAMP number 1098 diag_jamp = {} 1099 for ijamp, col_basis_elem in \ 1100 enumerate(sorted(matrix_element.get('color_basis').keys())): 1101 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1102 # Only use color flows with Nc == max_Nc. However, notice that 1103 # we don't want to include the Nc power coming from the loop 1104 # in this counting. 1105 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1106 diag_num = diag_tuple[0] + 1 1107 # Add this JAMP number to this diag_num 1108 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1109 [ijamp+1] 1110 1111 colamps = ijamp + 1 1112 for iconfig, num_diag in enumerate(mapconfigs): 1113 if num_diag == 0: 1114 continue 1115 1116 # List of True or False 1117 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1118 # Add line 1119 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1120 (iconfig+1, num_matrix_element, colamps, 1121 ','.join(["%s" % booldict[b] for b in \ 1122 bool_list]))) 1123 1124 return ret_list
1125
1126 - def get_amp2_lines(self, matrix_element, config_map = []):
1127 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1128 1129 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1130 # Get minimum legs in a vertex 1131 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1132 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1133 minvert = min(vert_list) if vert_list!=[] else 0 1134 1135 ret_lines = [] 1136 if config_map: 1137 # In this case, we need to sum up all amplitudes that have 1138 # identical topologies, as given by the config_map (which 1139 # gives the topology/config for each of the diagrams 1140 diagrams = matrix_element.get('diagrams') 1141 # Combine the diagrams with identical topologies 1142 config_to_diag_dict = {} 1143 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1144 if config_map[idiag] == 0: 1145 continue 1146 try: 1147 config_to_diag_dict[config_map[idiag]].append(idiag) 1148 except KeyError: 1149 config_to_diag_dict[config_map[idiag]] = [idiag] 1150 # Write out the AMP2s summing squares of amplitudes belonging 1151 # to eiher the same diagram or different diagrams with 1152 # identical propagator properties. Note that we need to use 1153 # AMP2 number corresponding to the first diagram number used 1154 # for that AMP2. 1155 for config in sorted(config_to_diag_dict.keys()): 1156 1157 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1158 {"num": (config_to_diag_dict[config][0] + 1)} 1159 1160 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1161 sum([diagrams[idiag].get('amplitudes') for \ 1162 idiag in config_to_diag_dict[config]], [])]) 1163 1164 # Not using \sum |M|^2 anymore since this creates troubles 1165 # when ckm is not diagonal due to the JIM mechanism. 1166 if '+' in amp: 1167 line += "(%s)*dconjg(%s)" % (amp, amp) 1168 else: 1169 line += "%s*dconjg(%s)" % (amp, amp) 1170 ret_lines.append(line) 1171 else: 1172 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1173 # Ignore any diagrams with 4-particle vertices. 1174 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1175 continue 1176 # Now write out the expression for AMP2, meaning the sum of 1177 # squared amplitudes belonging to the same diagram 1178 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1179 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1180 {"num": a.get('number')} for a in \ 1181 diag.get('amplitudes')]) 1182 ret_lines.append(line) 1183 1184 return ret_lines
1185 1186 #=========================================================================== 1187 # Returns the data statements initializing the coeffictients for the JAMP 1188 # decomposition. It is used when the JAMP initialization is decided to be 1189 # done through big arrays containing the projection coefficients. 1190 #===========================================================================
1191 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1192 n=50, Nc_value=3):
1193 """This functions return the lines defining the DATA statement setting 1194 the coefficients building the JAMPS out of the AMPS. Split rows in 1195 bunches of size n. 1196 One can specify the color_basis from which the color amplitudes originates 1197 so that there are commentaries telling what color structure each JAMP 1198 corresponds to.""" 1199 1200 if(not isinstance(color_amplitudes,list) or 1201 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1202 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1203 1204 res_list = [] 1205 my_cs = color.ColorString() 1206 for index, coeff_list in enumerate(color_amplitudes): 1207 # Create the list of the complete numerical coefficient. 1208 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1209 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1210 coefficient in coeff_list] 1211 # Create the list of the numbers of the contributing amplitudes. 1212 # Mutliply by -1 for those which have an imaginary coefficient. 1213 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1214 for coefficient in coeff_list] 1215 # Find the common denominator. 1216 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1217 num_list=[(coefficient*commondenom).numerator \ 1218 for coefficient in coefs_list] 1219 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1220 index+1,len(num_list))) 1221 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1222 index+1,commondenom)) 1223 if color_basis: 1224 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1225 res_list.append("C %s" % repr(my_cs)) 1226 for k in xrange(0, len(num_list), n): 1227 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1228 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1229 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1230 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1231 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1232 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1233 pass 1234 return res_list
1235 1236
1237 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1238 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1239 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1240 defined as a matrix element or directly as a color_amplitudes dictionary. 1241 The split_order_amps specifies the group of amplitudes sharing the same 1242 amplitude orders which should be put in together in a given set of JAMPS. 1243 The split_order_amps is supposed to have the format of the second output 1244 of the function get_split_orders_mapping function in helas_objects.py. 1245 The split_order_names is optional (it should correspond to the process 1246 'split_orders' attribute) and only present to provide comments in the 1247 JAMP definitions in the code.""" 1248 1249 # Let the user call get_JAMP_lines_split_order directly from a 1250 error_msg="Malformed '%s' argument passed to the "+\ 1251 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1252 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1253 color_amplitudes=col_amps.get_color_amplitudes() 1254 elif(isinstance(col_amps,list)): 1255 if(col_amps and isinstance(col_amps[0],list)): 1256 color_amplitudes=col_amps 1257 else: 1258 raise MadGraph5Error, error_msg%'col_amps' 1259 else: 1260 raise MadGraph5Error, error_msg%'col_amps' 1261 1262 # Verify the sanity of the split_order_amps and split_order_names args 1263 if isinstance(split_order_amps,list): 1264 for elem in split_order_amps: 1265 if len(elem)!=2: 1266 raise MadGraph5Error, error_msg%'split_order_amps' 1267 # Check the first element of the two lists to make sure they are 1268 # integers, although in principle they should all be integers. 1269 if not isinstance(elem[0],tuple) or \ 1270 not isinstance(elem[1],tuple) or \ 1271 not isinstance(elem[0][0],int) or \ 1272 not isinstance(elem[1][0],int): 1273 raise MadGraph5Error, error_msg%'split_order_amps' 1274 else: 1275 raise MadGraph5Error, error_msg%'split_order_amps' 1276 1277 if not split_order_names is None: 1278 if isinstance(split_order_names,list): 1279 # Should specify the same number of names as there are elements 1280 # in the key of the split_order_amps. 1281 if len(split_order_names)!=len(split_order_amps[0][0]): 1282 raise MadGraph5Error, error_msg%'split_order_names' 1283 # Check the first element of the list to be a string 1284 if not isinstance(split_order_names[0],str): 1285 raise MadGraph5Error, error_msg%'split_order_names' 1286 else: 1287 raise MadGraph5Error, error_msg%'split_order_names' 1288 1289 # Now scan all contributing orders to be individually computed and 1290 # construct the list of color_amplitudes for JAMP to be constructed 1291 # accordingly. 1292 res_list=[] 1293 for i, amp_order in enumerate(split_order_amps): 1294 col_amps_order = [] 1295 for jamp in color_amplitudes: 1296 col_amps_order.append(filter(lambda col_amp: 1297 col_amp[1] in amp_order[1],jamp)) 1298 if split_order_names: 1299 res_list.append('C JAMPs contributing to orders '+' '.join( 1300 ['%s=%i'%order for order in zip(split_order_names, 1301 amp_order[0])])) 1302 if self.opt['export_format'] in ['madloop_matchbox']: 1303 res_list.extend(self.get_JAMP_lines(col_amps_order, 1304 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1305 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1306 else: 1307 res_list.extend(self.get_JAMP_lines(col_amps_order, 1308 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1309 1310 return res_list
1311 1312
1313 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1314 split=-1):
1315 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1316 defined as a matrix element or directly as a color_amplitudes dictionary, 1317 Jamp_formatLC should be define to allow to add LeadingColor computation 1318 (usefull for MatchBox) 1319 The split argument defines how the JAMP lines should be split in order 1320 not to be too long.""" 1321 1322 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1323 # the color amplitudes lists. 1324 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1325 color_amplitudes=col_amps.get_color_amplitudes() 1326 elif(isinstance(col_amps,list)): 1327 if(col_amps and isinstance(col_amps[0],list)): 1328 color_amplitudes=col_amps 1329 else: 1330 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1331 else: 1332 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1333 1334 1335 res_list = [] 1336 for i, coeff_list in enumerate(color_amplitudes): 1337 # It might happen that coeff_list is empty if this function was 1338 # called from get_JAMP_lines_split_order (i.e. if some color flow 1339 # does not contribute at all for a given order). 1340 # In this case we simply set it to 0. 1341 if coeff_list==[]: 1342 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1343 continue 1344 # Break the JAMP definition into 'n=split' pieces to avoid having 1345 # arbitrarly long lines. 1346 first=True 1347 n = (len(coeff_list)+1 if split<=0 else split) 1348 while coeff_list!=[]: 1349 coefs=coeff_list[:n] 1350 coeff_list=coeff_list[n:] 1351 res = ((JAMP_format+"=") % str(i + 1)) + \ 1352 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1353 1354 first=False 1355 # Optimization: if all contributions to that color basis element have 1356 # the same coefficient (up to a sign), put it in front 1357 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1358 common_factor = False 1359 diff_fracs = list(set(list_fracs)) 1360 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1361 common_factor = True 1362 global_factor = diff_fracs[0] 1363 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1364 1365 # loop for JAMP 1366 for (coefficient, amp_number) in coefs: 1367 if not coefficient: 1368 continue 1369 if common_factor: 1370 res = (res + "%s" + AMP_format) % \ 1371 (self.coeff(coefficient[0], 1372 coefficient[1] / abs(coefficient[1]), 1373 coefficient[2], 1374 coefficient[3]), 1375 str(amp_number)) 1376 else: 1377 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1378 coefficient[1], 1379 coefficient[2], 1380 coefficient[3]), 1381 str(amp_number)) 1382 1383 if common_factor: 1384 res = res + ')' 1385 1386 res_list.append(res) 1387 1388 return res_list
1389
1390 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1391 """Generate the PDF lines for the auto_dsig.f file""" 1392 1393 processes = matrix_element.get('processes') 1394 model = processes[0].get('model') 1395 1396 pdf_definition_lines = "" 1397 pdf_data_lines = "" 1398 pdf_lines = "" 1399 1400 if ninitial == 1: 1401 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1402 for i, proc in enumerate(processes): 1403 process_line = proc.base_string() 1404 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1405 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1406 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1407 else: 1408 # Pick out all initial state particles for the two beams 1409 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1410 p in processes]))), 1411 sorted(list(set([p.get_initial_pdg(2) for \ 1412 p in processes])))] 1413 1414 # Prepare all variable names 1415 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1416 sum(initial_states,[])]) 1417 for key,val in pdf_codes.items(): 1418 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1419 1420 # Set conversion from PDG code to number used in PDF calls 1421 pdgtopdf = {21: 0, 22: 7} 1422 1423 # Fill in missing entries of pdgtopdf 1424 for pdg in sum(initial_states,[]): 1425 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1426 pdgtopdf[pdg] = pdg 1427 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1428 # If any particle has pdg code 7, we need to use something else 1429 pdgtopdf[pdg] = 6000000 + pdg 1430 1431 # Get PDF variable declarations for all initial states 1432 for i in [0,1]: 1433 pdf_definition_lines += "DOUBLE PRECISION " + \ 1434 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1435 for pdg in \ 1436 initial_states[i]]) + \ 1437 "\n" 1438 1439 # Get PDF data lines for all initial states 1440 for i in [0,1]: 1441 pdf_data_lines += "DATA " + \ 1442 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1443 for pdg in initial_states[i]]) + \ 1444 "/%d*1D0/" % len(initial_states[i]) + \ 1445 "\n" 1446 1447 # Get PDF lines for all different initial states 1448 for i, init_states in enumerate(initial_states): 1449 if subproc_group: 1450 pdf_lines = pdf_lines + \ 1451 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1452 % (i + 1, i + 1) 1453 else: 1454 pdf_lines = pdf_lines + \ 1455 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1456 % (i + 1, i + 1) 1457 1458 for nbi,initial_state in enumerate(init_states): 1459 if initial_state in pdf_codes.keys(): 1460 if subproc_group: 1461 pdf_lines = pdf_lines + \ 1462 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1463 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1464 (pdf_codes[initial_state], 1465 i + 1, i + 1, pdgtopdf[initial_state], 1466 i + 1, i + 1) 1467 else: 1468 pdf_lines = pdf_lines + \ 1469 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1470 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1471 (pdf_codes[initial_state], 1472 i + 1, i + 1, pdgtopdf[initial_state], 1473 i + 1, 1474 i + 1, i + 1) 1475 pdf_lines = pdf_lines + "ENDIF\n" 1476 1477 # Add up PDFs for the different initial state particles 1478 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1479 for proc in processes: 1480 process_line = proc.base_string() 1481 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1482 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1483 for ibeam in [1, 2]: 1484 initial_state = proc.get_initial_pdg(ibeam) 1485 if initial_state in pdf_codes.keys(): 1486 pdf_lines = pdf_lines + "%s%d*" % \ 1487 (pdf_codes[initial_state], ibeam) 1488 else: 1489 pdf_lines = pdf_lines + "1d0*" 1490 # Remove last "*" from pdf_lines 1491 pdf_lines = pdf_lines[:-1] + "\n" 1492 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1493 1494 # Remove last line break from the return variables 1495 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1496 1497 #=========================================================================== 1498 # write_props_file 1499 #===========================================================================
1500 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1501 """Write the props.inc file for MadEvent. Needs input from 1502 write_configs_file.""" 1503 1504 lines = [] 1505 1506 particle_dict = matrix_element.get('processes')[0].get('model').\ 1507 get('particle_dict') 1508 1509 for iconf, configs in enumerate(s_and_t_channels): 1510 for vertex in configs[0] + configs[1][:-1]: 1511 leg = vertex.get('legs')[-1] 1512 if leg.get('id') not in particle_dict: 1513 # Fake propagator used in multiparticle vertices 1514 mass = 'zero' 1515 width = 'zero' 1516 pow_part = 0 1517 else: 1518 particle = particle_dict[leg.get('id')] 1519 # Get mass 1520 if particle.get('mass').lower() == 'zero': 1521 mass = particle.get('mass') 1522 else: 1523 mass = "abs(%s)" % particle.get('mass') 1524 # Get width 1525 if particle.get('width').lower() == 'zero': 1526 width = particle.get('width') 1527 else: 1528 width = "abs(%s)" % particle.get('width') 1529 1530 pow_part = 1 + int(particle.is_boson()) 1531 1532 lines.append("prmass(%d,%d) = %s" % \ 1533 (leg.get('number'), iconf + 1, mass)) 1534 lines.append("prwidth(%d,%d) = %s" % \ 1535 (leg.get('number'), iconf + 1, width)) 1536 lines.append("pow(%d,%d) = %d" % \ 1537 (leg.get('number'), iconf + 1, pow_part)) 1538 1539 # Write the file 1540 writer.writelines(lines) 1541 1542 return True
1543 1544 #=========================================================================== 1545 # write_configs_file 1546 #===========================================================================
1547 - def write_configs_file(self, writer, matrix_element):
1548 """Write the configs.inc file for MadEvent""" 1549 1550 # Extract number of external particles 1551 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1552 1553 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1554 mapconfigs = [c[0] for c in configs] 1555 model = matrix_element.get('processes')[0].get('model') 1556 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1557 [[c[1]] for c in configs], 1558 mapconfigs, 1559 nexternal, ninitial, 1560 model)
1561 1562 #=========================================================================== 1563 # write_configs_file_from_diagrams 1564 #===========================================================================
1565 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1566 nexternal, ninitial, model):
1567 """Write the actual configs.inc file. 1568 1569 configs is the diagrams corresponding to configs (each 1570 diagrams is a list of corresponding diagrams for all 1571 subprocesses, with None if there is no corresponding diagrams 1572 for a given process). 1573 mapconfigs gives the diagram number for each config. 1574 1575 For s-channels, we need to output one PDG for each subprocess in 1576 the subprocess group, in order to be able to pick the right 1577 one for multiprocesses.""" 1578 1579 lines = [] 1580 1581 s_and_t_channels = [] 1582 1583 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1584 for config in configs if [d for d in config if d][0].\ 1585 get_vertex_leg_numbers()!=[]] 1586 minvert = min(vert_list) if vert_list!=[] else 0 1587 1588 # Number of subprocesses 1589 nsubprocs = len(configs[0]) 1590 1591 nconfigs = 0 1592 1593 new_pdg = model.get_first_non_pdg() 1594 1595 for iconfig, helas_diags in enumerate(configs): 1596 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1597 [0].get_vertex_leg_numbers()) : 1598 # Only 3-vertices allowed in configs.inc except for vertices 1599 # which originate from a shrunk loop. 1600 continue 1601 nconfigs += 1 1602 1603 # Need s- and t-channels for all subprocesses, including 1604 # those that don't contribute to this config 1605 empty_verts = [] 1606 stchannels = [] 1607 for h in helas_diags: 1608 if h: 1609 # get_s_and_t_channels gives vertices starting from 1610 # final state external particles and working inwards 1611 stchannels.append(h.get('amplitudes')[0].\ 1612 get_s_and_t_channels(ninitial, model, new_pdg)) 1613 else: 1614 stchannels.append((empty_verts, None)) 1615 1616 # For t-channels, just need the first non-empty one 1617 tchannels = [t for s,t in stchannels if t != None][0] 1618 1619 # For s_and_t_channels (to be used later) use only first config 1620 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1621 tchannels]) 1622 1623 # Make sure empty_verts is same length as real vertices 1624 if any([s for s,t in stchannels]): 1625 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1626 1627 # Reorganize s-channel vertices to get a list of all 1628 # subprocesses for each vertex 1629 schannels = zip(*[s for s,t in stchannels]) 1630 else: 1631 schannels = [] 1632 1633 allchannels = schannels 1634 if len(tchannels) > 1: 1635 # Write out tchannels only if there are any non-trivial ones 1636 allchannels = schannels + tchannels 1637 1638 # Write out propagators for s-channel and t-channel vertices 1639 1640 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1641 # Correspondance between the config and the diagram = amp2 1642 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1643 mapconfigs[iconfig])) 1644 1645 for verts in allchannels: 1646 if verts in schannels: 1647 vert = [v for v in verts if v][0] 1648 else: 1649 vert = verts 1650 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1651 last_leg = vert.get('legs')[-1] 1652 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1653 (last_leg.get('number'), nconfigs, len(daughters), 1654 ",".join([str(d) for d in daughters]))) 1655 if verts in schannels: 1656 pdgs = [] 1657 for v in verts: 1658 if v: 1659 pdgs.append(v.get('legs')[-1].get('id')) 1660 else: 1661 pdgs.append(0) 1662 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1663 (last_leg.get('number'), nconfigs, nsubprocs, 1664 ",".join([str(d) for d in pdgs]))) 1665 lines.append("data tprid(%d,%d)/0/" % \ 1666 (last_leg.get('number'), nconfigs)) 1667 elif verts in tchannels[:-1]: 1668 lines.append("data tprid(%d,%d)/%d/" % \ 1669 (last_leg.get('number'), nconfigs, 1670 abs(last_leg.get('id')))) 1671 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1672 (last_leg.get('number'), nconfigs, nsubprocs, 1673 ",".join(['0'] * nsubprocs))) 1674 1675 # Write out number of configs 1676 lines.append("# Number of configs") 1677 lines.append("data mapconfig(0)/%d/" % nconfigs) 1678 1679 # Write the file 1680 writer.writelines(lines) 1681 1682 return s_and_t_channels
1683 1684 #=========================================================================== 1685 # Global helper methods 1686 #=========================================================================== 1687
1688 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1689 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1690 1691 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1692 1693 if total_coeff == 1: 1694 if is_imaginary: 1695 return '+imag1*' 1696 else: 1697 return '+' 1698 elif total_coeff == -1: 1699 if is_imaginary: 1700 return '-imag1*' 1701 else: 1702 return '-' 1703 1704 res_str = '%+iD0' % total_coeff.numerator 1705 1706 if total_coeff.denominator != 1: 1707 # Check if total_coeff is an integer 1708 res_str = res_str + '/%iD0' % total_coeff.denominator 1709 1710 if is_imaginary: 1711 res_str = res_str + '*imag1' 1712 1713 return res_str + '*'
1714 1715
1716 - def set_fortran_compiler(self, default_compiler, force=False):
1717 """Set compiler based on what's available on the system""" 1718 1719 # Check for compiler 1720 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1721 f77_compiler = default_compiler['fortran'] 1722 elif misc.which('gfortran'): 1723 f77_compiler = 'gfortran' 1724 elif misc.which('g77'): 1725 f77_compiler = 'g77' 1726 elif misc.which('f77'): 1727 f77_compiler = 'f77' 1728 elif default_compiler['fortran']: 1729 logger.warning('No Fortran Compiler detected! Please install one') 1730 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1731 else: 1732 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1733 logger.info('Use Fortran compiler ' + f77_compiler) 1734 1735 1736 # Check for compiler. 1. set default. 1737 if default_compiler['f2py']: 1738 f2py_compiler = default_compiler['f2py'] 1739 else: 1740 f2py_compiler = '' 1741 # Try to find the correct one. 1742 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1743 f2py_compiler = default_compiler['f2py'] 1744 elif misc.which('f2py'): 1745 f2py_compiler = 'f2py' 1746 elif sys.version_info[1] == 6: 1747 if misc.which('f2py-2.6'): 1748 f2py_compiler = 'f2py-2.6' 1749 elif misc.which('f2py2.6'): 1750 f2py_compiler = 'f2py2.6' 1751 elif sys.version_info[1] == 7: 1752 if misc.which('f2py-2.7'): 1753 f2py_compiler = 'f2py-2.7' 1754 elif misc.which('f2py2.7'): 1755 f2py_compiler = 'f2py2.7' 1756 1757 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1758 1759 1760 self.replace_make_opt_f_compiler(to_replace) 1761 # Replace also for Template but not for cluster 1762 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1763 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1764 1765 return f77_compiler
1766 1767 # an alias for backward compatibility 1768 set_compiler = set_fortran_compiler 1769 1770
1771 - def set_cpp_compiler(self, default_compiler, force=False):
1772 """Set compiler based on what's available on the system""" 1773 1774 # Check for compiler 1775 if default_compiler and misc.which(default_compiler): 1776 compiler = default_compiler 1777 elif misc.which('g++'): 1778 #check if clang version 1779 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1780 stderr=subprocess.PIPE) 1781 out, _ = p.communicate() 1782 if 'clang' in out and misc.which('clang'): 1783 compiler = 'clang' 1784 else: 1785 compiler = 'g++' 1786 elif misc.which('c++'): 1787 compiler = 'c++' 1788 elif misc.which('clang'): 1789 compiler = 'clang' 1790 elif default_compiler: 1791 logger.warning('No c++ Compiler detected! Please install one') 1792 compiler = default_compiler # maybe misc fail so try with it 1793 else: 1794 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1795 logger.info('Use c++ compiler ' + compiler) 1796 self.replace_make_opt_c_compiler(compiler) 1797 # Replace also for Template but not for cluster 1798 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1799 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1800 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1801 1802 return compiler
1803 1804
1805 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1806 """Set FC=compiler in Source/make_opts""" 1807 1808 assert isinstance(compilers, dict) 1809 1810 mod = False #avoid to rewrite the file if not needed 1811 if not root_dir: 1812 root_dir = self.dir_path 1813 1814 compiler= compilers['fortran'] 1815 f2py_compiler = compilers['f2py'] 1816 if not f2py_compiler: 1817 f2py_compiler = 'f2py' 1818 for_update= {'DEFAULT_F_COMPILER':compiler, 1819 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1820 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1821 1822 try: 1823 common_run_interface.CommonRunCmd.update_make_opts_full( 1824 make_opts, for_update) 1825 except IOError: 1826 if root_dir == self.dir_path: 1827 logger.info('Fail to set compiler. Trying to continue anyway.')
1828
1829 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1830 """Set CXX=compiler in Source/make_opts. 1831 The version is also checked, in order to set some extra flags 1832 if the compiler is clang (on MACOS)""" 1833 1834 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1835 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1836 1837 # list of the variable to set in the make_opts file 1838 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1839 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1840 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1841 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1842 } 1843 1844 if not root_dir: 1845 root_dir = self.dir_path 1846 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1847 1848 try: 1849 common_run_interface.CommonRunCmd.update_make_opts_full( 1850 make_opts, for_update) 1851 except IOError: 1852 if root_dir == self.dir_path: 1853 logger.info('Fail to set compiler. Trying to continue anyway.') 1854 1855 return
1856
1857 #=============================================================================== 1858 # ProcessExporterFortranSA 1859 #=============================================================================== 1860 -class ProcessExporterFortranSA(ProcessExporterFortran):
1861 """Class to take care of exporting a set of matrix elements to 1862 MadGraph v4 StandAlone format.""" 1863 1864 matrix_template = "matrix_standalone_v4.inc" 1865
1866 - def __init__(self, *args,**opts):
1867 """add the format information compare to standard init""" 1868 1869 if 'format' in opts: 1870 self.format = opts['format'] 1871 del opts['format'] 1872 else: 1873 self.format = 'standalone' 1874 1875 self.prefix_info = {} 1876 ProcessExporterFortran.__init__(self, *args, **opts)
1877
1878 - def copy_template(self, model):
1879 """Additional actions needed for setup of Template 1880 """ 1881 1882 #First copy the full template tree if dir_path doesn't exit 1883 if os.path.isdir(self.dir_path): 1884 return 1885 1886 logger.info('initialize a new standalone directory: %s' % \ 1887 os.path.basename(self.dir_path)) 1888 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1889 1890 # Create the directory structure 1891 os.mkdir(self.dir_path) 1892 os.mkdir(pjoin(self.dir_path, 'Source')) 1893 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1894 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1895 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1896 os.mkdir(pjoin(self.dir_path, 'bin')) 1897 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1898 os.mkdir(pjoin(self.dir_path, 'lib')) 1899 os.mkdir(pjoin(self.dir_path, 'Cards')) 1900 1901 # Information at top-level 1902 #Write version info 1903 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1904 try: 1905 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1906 except IOError: 1907 MG5_version = misc.get_pkg_info() 1908 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1909 "5." + MG5_version['version']) 1910 1911 1912 # Add file in SubProcesses 1913 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1914 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1915 1916 if self.format == 'standalone': 1917 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1918 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1919 1920 # Add file in Source 1921 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1922 pjoin(self.dir_path, 'Source')) 1923 # add the makefile 1924 filename = pjoin(self.dir_path,'Source','makefile') 1925 self.write_source_makefile(writers.FileWriter(filename))
1926 1927 #=========================================================================== 1928 # export model files 1929 #===========================================================================
1930 - def export_model_files(self, model_path):
1931 """export the model dependent files for V4 model""" 1932 1933 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1934 # Add the routine update_as_param in v4 model 1935 # This is a function created in the UFO 1936 text=""" 1937 subroutine update_as_param() 1938 call setpara('param_card.dat',.false.) 1939 return 1940 end 1941 """ 1942 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1943 ff.write(text) 1944 ff.close() 1945 1946 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1947 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1948 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1949 fsock.write(text) 1950 fsock.close() 1951 1952 self.make_model_symbolic_link()
1953 1954 #=========================================================================== 1955 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1956 #===========================================================================
1957 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1958 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1959 Perl script of MadEvent4 are still working properly for pure MG5 run. 1960 Not needed for StandAlone so just return 1961 """ 1962 1963 return
1964 1965 1966 #=========================================================================== 1967 # Make the Helas and Model directories for Standalone directory 1968 #===========================================================================
1969 - def make(self):
1970 """Run make in the DHELAS and MODEL directories, to set up 1971 everything for running standalone 1972 """ 1973 1974 source_dir = pjoin(self.dir_path, "Source") 1975 logger.info("Running make for Helas") 1976 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 1977 logger.info("Running make for Model") 1978 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
1979 1980 #=========================================================================== 1981 # Create proc_card_mg5.dat for Standalone directory 1982 #===========================================================================
1983 - def finalize(self, matrix_elements, history, mg5options, flaglist):
1984 """Finalize Standalone MG4 directory by 1985 generation proc_card_mg5.dat 1986 generate a global makefile 1987 """ 1988 1989 compiler = {'fortran': mg5options['fortran_compiler'], 1990 'cpp': mg5options['cpp_compiler'], 1991 'f2py': mg5options['f2py_compiler']} 1992 1993 self.compiler_choice(compiler) 1994 self.make() 1995 1996 # Write command history as proc_card_mg5 1997 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 1998 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 1999 history.write(output_file) 2000 2001 ProcessExporterFortran.finalize(self, matrix_elements, 2002 history, mg5options, flaglist) 2003 open(pjoin(self.dir_path,'__init__.py'),'w') 2004 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2005 2006 if 'mode' in self.opt and self.opt['mode'] == "reweight": 2007 #add the module to hande the NLO weight 2008 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2009 pjoin(self.dir_path, 'Source')) 2010 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2011 pjoin(self.dir_path, 'Source', 'PDF')) 2012 self.write_pdf_opendata() 2013 2014 if self.prefix_info: 2015 self.write_f2py_splitter() 2016 self.write_f2py_makefile() 2017 self.write_f2py_check_sa(matrix_elements, 2018 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2019 else: 2020 # create a single makefile to compile all the subprocesses 2021 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2022 deppython = '' 2023 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2024 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2025 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2026 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2027 text+='all: %s\n\techo \'done\'' % deppython 2028 2029 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2030 ff.write(text) 2031 ff.close()
2032
2033 - def write_f2py_splitter(self):
2034 """write a function to call the correct matrix element""" 2035 2036 template = """ 2037 %(python_information)s 2038 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2039 IMPLICIT NONE 2040 2041 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2042 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2043 CF2PY integer, intent(in) :: npdg 2044 CF2PY double precision, intent(out) :: ANS 2045 CF2PY double precision, intent(in) :: ALPHAS 2046 CF2PY double precision, intent(in) :: SCALE2 2047 integer pdgs(*) 2048 integer npdg, nhel 2049 double precision p(*) 2050 double precision ANS, ALPHAS, PI,SCALE2 2051 include 'coupl.inc' 2052 2053 PI = 3.141592653589793D0 2054 G = 2* DSQRT(ALPHAS*PI) 2055 CALL UPDATE_AS_PARAM() 2056 if (scale2.ne.0d0) stop 1 2057 2058 %(smatrixhel)s 2059 2060 return 2061 end 2062 2063 SUBROUTINE INITIALISE(PATH) 2064 C ROUTINE FOR F2PY to read the benchmark point. 2065 IMPLICIT NONE 2066 CHARACTER*512 PATH 2067 CF2PY INTENT(IN) :: PATH 2068 CALL SETPARA(PATH) !first call to setup the paramaters 2069 RETURN 2070 END 2071 2072 subroutine get_pdg_order(PDG) 2073 IMPLICIT NONE 2074 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2075 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2076 DATA PDGS/ %(pdgs)s / 2077 PDG = PDGS 2078 RETURN 2079 END 2080 2081 subroutine get_prefix(PREFIX) 2082 IMPLICIT NONE 2083 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2084 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2085 DATA PREF / '%(prefix)s'/ 2086 PREFIX = PREF 2087 RETURN 2088 END 2089 2090 2091 """ 2092 2093 allids = self.prefix_info.keys() 2094 allprefix = [self.prefix_info[key][0] for key in allids] 2095 min_nexternal = min([len(ids) for ids in allids]) 2096 max_nexternal = max([len(ids) for ids in allids]) 2097 2098 info = [] 2099 for key, (prefix, tag) in self.prefix_info.items(): 2100 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2101 2102 2103 text = [] 2104 for n_ext in range(min_nexternal, max_nexternal+1): 2105 current = [ids for ids in allids if len(ids)==n_ext] 2106 if not current: 2107 continue 2108 if min_nexternal != max_nexternal: 2109 if n_ext == min_nexternal: 2110 text.append(' if (npdg.eq.%i)then' % n_ext) 2111 else: 2112 text.append(' else if (npdg.eq.%i)then' % n_ext) 2113 for ii,pdgs in enumerate(current): 2114 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2115 if ii==0: 2116 text.append( ' if(%s) then ! %i' % (condition, i)) 2117 else: 2118 text.append( ' else if(%s) then ! %i' % (condition,i)) 2119 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2120 text.append(' endif') 2121 #close the function 2122 if min_nexternal != max_nexternal: 2123 text.append('endif') 2124 2125 formatting = {'python_information':'\n'.join(info), 2126 'smatrixhel': '\n'.join(text), 2127 'maxpart': max_nexternal, 2128 'nb_me': len(allids), 2129 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2130 for i in range(max_nexternal) for pdg in allids), 2131 'prefix':'\',\''.join(allprefix) 2132 } 2133 formatting['lenprefix'] = len(formatting['prefix']) 2134 text = template % formatting 2135 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2136 fsock.writelines(text) 2137 fsock.close()
2138
2139 - def write_f2py_check_sa(self, matrix_element, writer):
2140 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2141 # To be implemented. It is just an example file, i.e. not crucial. 2142 return
2143
2144 - def write_f2py_makefile(self):
2145 """ """ 2146 # Add file in SubProcesses 2147 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2148 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2149
2150 - def create_MA5_cards(self,*args,**opts):
2151 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2152 pass
2153
2154 - def compiler_choice(self, compiler):
2155 """ Different daughter classes might want different compilers. 2156 So this function is meant to be overloaded if desired.""" 2157 2158 self.set_compiler(compiler)
2159 2160 #=========================================================================== 2161 # generate_subprocess_directory 2162 #===========================================================================
2163 - def generate_subprocess_directory(self, matrix_element, 2164 fortran_model, number):
2165 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2166 including the necessary matrix.f and nexternal.inc files""" 2167 2168 cwd = os.getcwd() 2169 # Create the directory PN_xx_xxxxx in the specified path 2170 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2171 "P%s" % matrix_element.get('processes')[0].shell_string()) 2172 2173 if self.opt['sa_symmetry']: 2174 # avoid symmetric output 2175 for i,proc in enumerate(matrix_element.get('processes')): 2176 2177 tag = proc.get_tag() 2178 legs = proc.get('legs')[:] 2179 leg0 = proc.get('legs')[0] 2180 leg1 = proc.get('legs')[1] 2181 if not leg1.get('state'): 2182 proc.get('legs')[0] = leg1 2183 proc.get('legs')[1] = leg0 2184 flegs = proc.get('legs')[2:] 2185 for perm in itertools.permutations(flegs): 2186 for i,p in enumerate(perm): 2187 proc.get('legs')[i+2] = p 2188 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2189 "P%s" % proc.shell_string()) 2190 #restore original order 2191 proc.get('legs')[2:] = legs[2:] 2192 if os.path.exists(dirpath2): 2193 proc.get('legs')[:] = legs 2194 return 0 2195 proc.get('legs')[:] = legs 2196 2197 try: 2198 os.mkdir(dirpath) 2199 except os.error as error: 2200 logger.warning(error.strerror + " " + dirpath) 2201 2202 #try: 2203 # os.chdir(dirpath) 2204 #except os.error: 2205 # logger.error('Could not cd to directory %s' % dirpath) 2206 # return 0 2207 2208 logger.info('Creating files in directory %s' % dirpath) 2209 2210 # Extract number of external particles 2211 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2212 2213 # Create the matrix.f file and the nexternal.inc file 2214 if self.opt['export_format']=='standalone_msP': 2215 filename = pjoin(dirpath, 'matrix_prod.f') 2216 else: 2217 filename = pjoin(dirpath, 'matrix.f') 2218 2219 proc_prefix = '' 2220 if 'prefix' in self.cmd_options: 2221 if self.cmd_options['prefix'] == 'int': 2222 proc_prefix = 'M%s_' % number 2223 elif self.cmd_options['prefix'] == 'proc': 2224 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2225 else: 2226 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2227 for proc in matrix_element.get('processes'): 2228 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2229 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2230 2231 calls = self.write_matrix_element_v4( 2232 writers.FortranWriter(filename), 2233 matrix_element, 2234 fortran_model, 2235 proc_prefix=proc_prefix) 2236 2237 if self.opt['export_format'] == 'standalone_msP': 2238 filename = pjoin(dirpath,'configs_production.inc') 2239 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2240 writers.FortranWriter(filename), 2241 matrix_element) 2242 2243 filename = pjoin(dirpath,'props_production.inc') 2244 self.write_props_file(writers.FortranWriter(filename), 2245 matrix_element, 2246 s_and_t_channels) 2247 2248 filename = pjoin(dirpath,'nexternal_prod.inc') 2249 self.write_nexternal_madspin(writers.FortranWriter(filename), 2250 nexternal, ninitial) 2251 2252 if self.opt['export_format']=='standalone_msF': 2253 filename = pjoin(dirpath, 'helamp.inc') 2254 ncomb=matrix_element.get_helicity_combinations() 2255 self.write_helamp_madspin(writers.FortranWriter(filename), 2256 ncomb) 2257 2258 filename = pjoin(dirpath, 'nexternal.inc') 2259 self.write_nexternal_file(writers.FortranWriter(filename), 2260 nexternal, ninitial) 2261 2262 filename = pjoin(dirpath, 'pmass.inc') 2263 self.write_pmass_file(writers.FortranWriter(filename), 2264 matrix_element) 2265 2266 filename = pjoin(dirpath, 'ngraphs.inc') 2267 self.write_ngraphs_file(writers.FortranWriter(filename), 2268 len(matrix_element.get_all_amplitudes())) 2269 2270 # Generate diagrams 2271 filename = pjoin(dirpath, "matrix.ps") 2272 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2273 get('diagrams'), 2274 filename, 2275 model=matrix_element.get('processes')[0].\ 2276 get('model'), 2277 amplitude=True) 2278 logger.info("Generating Feynman diagrams for " + \ 2279 matrix_element.get('processes')[0].nice_string()) 2280 plot.draw() 2281 2282 linkfiles = ['check_sa.f', 'coupl.inc'] 2283 2284 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2285 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2286 pat = re.compile('smatrix', re.I) 2287 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2288 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2289 f.write(new_text) 2290 linkfiles.pop(0) 2291 2292 for file in linkfiles: 2293 ln('../%s' % file, cwd=dirpath) 2294 ln('../makefileP', name='makefile', cwd=dirpath) 2295 # Return to original PWD 2296 #os.chdir(cwd) 2297 2298 if not calls: 2299 calls = 0 2300 return calls
2301 2302 2303 #=========================================================================== 2304 # write_source_makefile 2305 #===========================================================================
2306 - def write_source_makefile(self, writer):
2307 """Write the nexternal.inc file for MG4""" 2308 2309 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2310 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2311 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2312 2313 replace_dict= {'libraries': set_of_lib, 2314 'model':model_line, 2315 'additional_dsample': '', 2316 'additional_dependencies':''} 2317 2318 text = open(path).read() % replace_dict 2319 2320 if writer: 2321 writer.write(text) 2322 2323 return replace_dict
2324 2325 #=========================================================================== 2326 # write_matrix_element_v4 2327 #===========================================================================
2328 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2329 write=True, proc_prefix=''):
2330 """Export a matrix element to a matrix.f file in MG4 standalone format 2331 if write is on False, just return the replace_dict and not write anything.""" 2332 2333 2334 if not matrix_element.get('processes') or \ 2335 not matrix_element.get('diagrams'): 2336 return 0 2337 2338 if writer: 2339 if not isinstance(writer, writers.FortranWriter): 2340 raise writers.FortranWriter.FortranWriterError(\ 2341 "writer not FortranWriter but %s" % type(writer)) 2342 # Set lowercase/uppercase Fortran code 2343 writers.FortranWriter.downcase = False 2344 2345 2346 if not self.opt.has_key('sa_symmetry'): 2347 self.opt['sa_symmetry']=False 2348 2349 2350 # The proc_id is for MadEvent grouping which is never used in SA. 2351 replace_dict = {'global_variable':'', 'amp2_lines':'', 2352 'proc_prefix':proc_prefix, 'proc_id':''} 2353 2354 # Extract helas calls 2355 helas_calls = fortran_model.get_matrix_element_calls(\ 2356 matrix_element) 2357 2358 replace_dict['helas_calls'] = "\n".join(helas_calls) 2359 2360 # Extract version number and date from VERSION file 2361 info_lines = self.get_mg5_info_lines() 2362 replace_dict['info_lines'] = info_lines 2363 2364 # Extract process info lines 2365 process_lines = self.get_process_info_lines(matrix_element) 2366 replace_dict['process_lines'] = process_lines 2367 2368 # Extract number of external particles 2369 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2370 replace_dict['nexternal'] = nexternal 2371 replace_dict['nincoming'] = ninitial 2372 2373 # Extract ncomb 2374 ncomb = matrix_element.get_helicity_combinations() 2375 replace_dict['ncomb'] = ncomb 2376 2377 # Extract helicity lines 2378 helicity_lines = self.get_helicity_lines(matrix_element) 2379 replace_dict['helicity_lines'] = helicity_lines 2380 2381 # Extract overall denominator 2382 # Averaging initial state color, spin, and identical FS particles 2383 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2384 2385 # Extract ngraphs 2386 ngraphs = matrix_element.get_number_of_amplitudes() 2387 replace_dict['ngraphs'] = ngraphs 2388 2389 # Extract nwavefuncs 2390 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2391 replace_dict['nwavefuncs'] = nwavefuncs 2392 2393 # Extract ncolor 2394 ncolor = max(1, len(matrix_element.get('color_basis'))) 2395 replace_dict['ncolor'] = ncolor 2396 2397 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2398 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2399 matrix_element.get_beams_hel_avg_factor() 2400 2401 # Extract color data lines 2402 color_data_lines = self.get_color_data_lines(matrix_element) 2403 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2404 2405 if self.opt['export_format']=='standalone_msP': 2406 # For MadSpin need to return the AMP2 2407 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2408 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2409 replace_dict['global_variable'] = \ 2410 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2411 2412 # JAMP definition, depends on the number of independent split orders 2413 split_orders=matrix_element.get('processes')[0].get('split_orders') 2414 2415 if len(split_orders)==0: 2416 replace_dict['nSplitOrders']='' 2417 # Extract JAMP lines 2418 jamp_lines = self.get_JAMP_lines(matrix_element) 2419 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2420 # set all amplitude order to weight 1 and only one squared order 2421 # contribution which is of course ALL_ORDERS=2. 2422 squared_orders = [(2,),] 2423 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2424 replace_dict['chosen_so_configs'] = '.TRUE.' 2425 replace_dict['nSqAmpSplitOrders']=1 2426 replace_dict['split_order_str_list']='' 2427 else: 2428 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2429 replace_dict['nAmpSplitOrders']=len(amp_orders) 2430 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2431 replace_dict['nSplitOrders']=len(split_orders) 2432 replace_dict['split_order_str_list']=str(split_orders) 2433 amp_so = self.get_split_orders_lines( 2434 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2435 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2436 replace_dict['ampsplitorders']='\n'.join(amp_so) 2437 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2438 jamp_lines = self.get_JAMP_lines_split_order(\ 2439 matrix_element,amp_orders,split_order_names=split_orders) 2440 2441 # Now setup the array specifying what squared split order is chosen 2442 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2443 matrix_element.get('processes')[0],squared_orders) 2444 2445 # For convenience we also write the driver check_sa_splitOrders.f 2446 # that explicitely writes out the contribution from each squared order. 2447 # The original driver still works and is compiled with 'make' while 2448 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2449 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2450 self.write_check_sa_splitOrders(squared_orders,split_orders, 2451 nexternal,ninitial,proc_prefix,check_sa_writer) 2452 2453 if write: 2454 writers.FortranWriter('nsqso_born.inc').writelines( 2455 """INTEGER NSQSO_BORN 2456 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2457 2458 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2459 2460 matrix_template = self.matrix_template 2461 if self.opt['export_format']=='standalone_msP' : 2462 matrix_template = 'matrix_standalone_msP_v4.inc' 2463 elif self.opt['export_format']=='standalone_msF': 2464 matrix_template = 'matrix_standalone_msF_v4.inc' 2465 elif self.opt['export_format']=='matchbox': 2466 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2467 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2468 2469 if len(split_orders)>0: 2470 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2471 logger.debug("Warning: The export format %s is not "+\ 2472 " available for individual ME evaluation of given coupl. orders."+\ 2473 " Only the total ME will be computed.", self.opt['export_format']) 2474 elif self.opt['export_format'] in ['madloop_matchbox']: 2475 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2476 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2477 else: 2478 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2479 2480 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2481 replace_dict['template_file2'] = pjoin(_file_path, \ 2482 'iolibs/template_files/split_orders_helping_functions.inc') 2483 if write and writer: 2484 path = replace_dict['template_file'] 2485 content = open(path).read() 2486 content = content % replace_dict 2487 # Write the file 2488 writer.writelines(content) 2489 # Add the helper functions. 2490 if len(split_orders)>0: 2491 content = '\n' + open(replace_dict['template_file2'])\ 2492 .read()%replace_dict 2493 writer.writelines(content) 2494 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2495 else: 2496 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2497 return replace_dict # for subclass update
2498
2499 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2500 nincoming, proc_prefix, writer):
2501 """ Write out a more advanced version of the check_sa drivers that 2502 individually returns the matrix element for each contributing squared 2503 order.""" 2504 2505 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2506 'template_files', 'check_sa_splitOrders.f')).read() 2507 printout_sq_orders=[] 2508 for i, squared_order in enumerate(squared_orders): 2509 sq_orders=[] 2510 for j, sqo in enumerate(squared_order): 2511 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2512 printout_sq_orders.append(\ 2513 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2514 %(i+1,' '.join(sq_orders),i+1)) 2515 printout_sq_orders='\n'.join(printout_sq_orders) 2516 replace_dict = {'printout_sqorders':printout_sq_orders, 2517 'nSplitOrders':len(squared_orders), 2518 'nexternal':nexternal, 2519 'nincoming':nincoming, 2520 'proc_prefix':proc_prefix} 2521 2522 if writer: 2523 writer.writelines(check_sa_content % replace_dict) 2524 else: 2525 return replace_dict
2526
2527 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2528 """class to take care of exporting a set of matrix element for the Matchbox 2529 code in the case of Born only routine""" 2530 2531 default_opt = {'clean': False, 'complex_mass':False, 2532 'export_format':'matchbox', 'mp': False, 2533 'sa_symmetry': True} 2534 2535 #specific template of the born 2536 2537 2538 matrix_template = "matrix_standalone_matchbox.inc" 2539 2540 @staticmethod
2541 - def get_color_string_lines(matrix_element):
2542 """Return the color matrix definition lines for this matrix element. Split 2543 rows in chunks of size n.""" 2544 2545 if not matrix_element.get('color_matrix'): 2546 return "\n".join(["out = 1"]) 2547 2548 #start the real work 2549 color_denominators = matrix_element.get('color_matrix').\ 2550 get_line_denominators() 2551 matrix_strings = [] 2552 my_cs = color.ColorString() 2553 for i_color in xrange(len(color_denominators)): 2554 # Then write the numerators for the matrix elements 2555 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2556 t_str=repr(my_cs) 2557 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2558 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2559 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2560 all_matches = t_match.findall(t_str) 2561 output = {} 2562 arg=[] 2563 for match in all_matches: 2564 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2565 if ctype in ['ColorOne' ]: 2566 continue 2567 if ctype not in ['T', 'Tr' ]: 2568 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2569 tmparg += ['0'] 2570 arg +=tmparg 2571 for j, v in enumerate(arg): 2572 output[(i_color,j)] = v 2573 2574 for key in output: 2575 if matrix_strings == []: 2576 #first entry 2577 matrix_strings.append(""" 2578 if (in1.eq.%s.and.in2.eq.%s)then 2579 out = %s 2580 """ % (key[0], key[1], output[key])) 2581 else: 2582 #not first entry 2583 matrix_strings.append(""" 2584 elseif (in1.eq.%s.and.in2.eq.%s)then 2585 out = %s 2586 """ % (key[0], key[1], output[key])) 2587 if len(matrix_strings): 2588 matrix_strings.append(" else \n out = - 1 \n endif") 2589 else: 2590 return "\n out = - 1 \n " 2591 return "\n".join(matrix_strings)
2592
2593 - def make(self,*args,**opts):
2594 pass
2595
2596 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2597 JAMP_formatLC=None):
2598 2599 """Adding leading color part of the colorflow""" 2600 2601 if not JAMP_formatLC: 2602 JAMP_formatLC= "LN%s" % JAMP_format 2603 2604 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2605 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2606 col_amps=col_amps.get_color_amplitudes() 2607 elif(isinstance(col_amps,list)): 2608 if(col_amps and isinstance(col_amps[0],list)): 2609 col_amps=col_amps 2610 else: 2611 raise MadGraph5Error, error_msg % 'col_amps' 2612 else: 2613 raise MadGraph5Error, error_msg % 'col_amps' 2614 2615 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2616 JAMP_format=JAMP_format, 2617 AMP_format=AMP_format, 2618 split=-1) 2619 2620 2621 # Filter the col_ampls to generate only those without any 1/NC terms 2622 2623 LC_col_amps = [] 2624 for coeff_list in col_amps: 2625 to_add = [] 2626 for (coefficient, amp_number) in coeff_list: 2627 if coefficient[3]==0: 2628 to_add.append( (coefficient, amp_number) ) 2629 LC_col_amps.append(to_add) 2630 2631 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2632 JAMP_format=JAMP_formatLC, 2633 AMP_format=AMP_format, 2634 split=-1) 2635 2636 return text
2637
2638 2639 2640 2641 #=============================================================================== 2642 # ProcessExporterFortranMW 2643 #=============================================================================== 2644 -class ProcessExporterFortranMW(ProcessExporterFortran):
2645 """Class to take care of exporting a set of matrix elements to 2646 MadGraph v4 - MadWeight format.""" 2647 2648 matrix_file="matrix_standalone_v4.inc" 2649
2650 - def copy_template(self, model):
2651 """Additional actions needed for setup of Template 2652 """ 2653 2654 super(ProcessExporterFortranMW, self).copy_template(model) 2655 2656 # Add the MW specific file 2657 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2658 pjoin(self.dir_path, 'Source','MadWeight'), True) 2659 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2660 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2661 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2662 pjoin(self.dir_path, 'Source','setrun.f')) 2663 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2664 pjoin(self.dir_path, 'Source','run.inc')) 2665 # File created from Template (Different in some child class) 2666 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2667 self.write_run_config_file(writers.FortranWriter(filename)) 2668 2669 try: 2670 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2671 stdout = os.open(os.devnull, os.O_RDWR), 2672 stderr = os.open(os.devnull, os.O_RDWR), 2673 cwd=self.dir_path) 2674 except OSError: 2675 # Probably madweight already called 2676 pass 2677 2678 # Copy the different python file in the Template 2679 self.copy_python_file() 2680 # create the appropriate cuts.f 2681 self.get_mw_cuts_version() 2682 2683 # add the makefile in Source directory 2684 filename = os.path.join(self.dir_path,'Source','makefile') 2685 self.write_source_makefile(writers.FortranWriter(filename))
2686 2687 2688 2689 2690 #=========================================================================== 2691 # convert_model 2692 #===========================================================================
2693 - def convert_model(self, model, wanted_lorentz = [], 2694 wanted_couplings = []):
2695 2696 super(ProcessExporterFortranMW,self).convert_model(model, 2697 wanted_lorentz, wanted_couplings) 2698 2699 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2700 try: 2701 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2702 except OSError as error: 2703 pass 2704 model_path = model.get('modelpath') 2705 # This is not safe if there is a '##' or '-' in the path. 2706 shutil.copytree(model_path, 2707 pjoin(self.dir_path,'bin','internal','ufomodel'), 2708 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2709 if hasattr(model, 'restrict_card'): 2710 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2711 'restrict_default.dat') 2712 if isinstance(model.restrict_card, check_param_card.ParamCard): 2713 model.restrict_card.write(out_path) 2714 else: 2715 files.cp(model.restrict_card, out_path)
2716 2717 #=========================================================================== 2718 # generate_subprocess_directory 2719 #===========================================================================
2720 - def copy_python_file(self):
2721 """copy the python file require for the Template""" 2722 2723 # madevent interface 2724 cp(_file_path+'/interface/madweight_interface.py', 2725 self.dir_path+'/bin/internal/madweight_interface.py') 2726 cp(_file_path+'/interface/extended_cmd.py', 2727 self.dir_path+'/bin/internal/extended_cmd.py') 2728 cp(_file_path+'/interface/common_run_interface.py', 2729 self.dir_path+'/bin/internal/common_run_interface.py') 2730 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2731 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2732 cp(_file_path+'/iolibs/save_load_object.py', 2733 self.dir_path+'/bin/internal/save_load_object.py') 2734 cp(_file_path+'/madevent/gen_crossxhtml.py', 2735 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2736 cp(_file_path+'/madevent/sum_html.py', 2737 self.dir_path+'/bin/internal/sum_html.py') 2738 cp(_file_path+'/various/FO_analyse_card.py', 2739 self.dir_path+'/bin/internal/FO_analyse_card.py') 2740 cp(_file_path+'/iolibs/file_writers.py', 2741 self.dir_path+'/bin/internal/file_writers.py') 2742 #model file 2743 cp(_file_path+'../models/check_param_card.py', 2744 self.dir_path+'/bin/internal/check_param_card.py') 2745 2746 #madevent file 2747 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2748 cp(_file_path+'/various/lhe_parser.py', 2749 self.dir_path+'/bin/internal/lhe_parser.py') 2750 2751 cp(_file_path+'/various/banner.py', 2752 self.dir_path+'/bin/internal/banner.py') 2753 cp(_file_path+'/various/shower_card.py', 2754 self.dir_path+'/bin/internal/shower_card.py') 2755 cp(_file_path+'/various/cluster.py', 2756 self.dir_path+'/bin/internal/cluster.py') 2757 2758 # logging configuration 2759 cp(_file_path+'/interface/.mg5_logging.conf', 2760 self.dir_path+'/bin/internal/me5_logging.conf') 2761 cp(_file_path+'/interface/coloring_logging.py', 2762 self.dir_path+'/bin/internal/coloring_logging.py')
2763 2764 2765 #=========================================================================== 2766 # Change the version of cuts.f to the one compatible with MW 2767 #===========================================================================
2768 - def get_mw_cuts_version(self, outpath=None):
2769 """create the appropriate cuts.f 2770 This is based on the one associated to ME output but: 2771 1) No clustering (=> remove initcluster/setclscales) 2772 2) Adding the definition of cut_bw at the file. 2773 """ 2774 2775 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2776 2777 text = StringIO() 2778 #1) remove all dependencies in ickkw >1: 2779 nb_if = 0 2780 for line in template: 2781 if 'if(xqcut.gt.0d0' in line: 2782 nb_if = 1 2783 if nb_if == 0: 2784 text.write(line) 2785 continue 2786 if re.search(r'if\(.*\)\s*then', line): 2787 nb_if += 1 2788 elif 'endif' in line: 2789 nb_if -= 1 2790 2791 #2) add fake cut_bw (have to put the true one later) 2792 text.write(""" 2793 logical function cut_bw(p) 2794 include 'madweight_param.inc' 2795 double precision p(*) 2796 if (bw_cut) then 2797 cut_bw = .true. 2798 else 2799 stop 1 2800 endif 2801 return 2802 end 2803 """) 2804 2805 final = text.getvalue() 2806 #3) remove the call to initcluster: 2807 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2808 template = template.replace('genps.inc', 'maxparticles.inc') 2809 #Now we can write it 2810 if not outpath: 2811 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2812 elif isinstance(outpath, str): 2813 fsock = open(outpath, 'w') 2814 else: 2815 fsock = outpath 2816 fsock.write(template)
2817 2818 2819 2820 #=========================================================================== 2821 # Make the Helas and Model directories for Standalone directory 2822 #===========================================================================
2823 - def make(self):
2824 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2825 everything for running madweight 2826 """ 2827 2828 source_dir = os.path.join(self.dir_path, "Source") 2829 logger.info("Running make for Helas") 2830 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2831 logger.info("Running make for Model") 2832 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2833 logger.info("Running make for PDF") 2834 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2835 logger.info("Running make for CERNLIB") 2836 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2837 logger.info("Running make for GENERIC") 2838 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2839 logger.info("Running make for blocks") 2840 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2841 logger.info("Running make for tools") 2842 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2843 2844 #=========================================================================== 2845 # Create proc_card_mg5.dat for MadWeight directory 2846 #===========================================================================
2847 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2848 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2849 2850 compiler = {'fortran': mg5options['fortran_compiler'], 2851 'cpp': mg5options['cpp_compiler'], 2852 'f2py': mg5options['f2py_compiler']} 2853 2854 2855 2856 #proc_charac 2857 self.create_proc_charac() 2858 2859 # Write maxparticles.inc based on max of ME's/subprocess groups 2860 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2861 self.write_maxparticles_file(writers.FortranWriter(filename), 2862 matrix_elements) 2863 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2864 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2865 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2866 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2867 2868 self.set_compiler(compiler) 2869 self.make() 2870 2871 # Write command history as proc_card_mg5 2872 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2873 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2874 history.write(output_file) 2875 2876 ProcessExporterFortran.finalize(self, matrix_elements, 2877 history, mg5options, flaglist)
2878 2879 2880 2881 #=========================================================================== 2882 # create the run_card for MW 2883 #===========================================================================
2884 - def create_run_card(self, matrix_elements, history):
2885 """ """ 2886 2887 run_card = banner_mod.RunCard() 2888 2889 # pass to default for MW 2890 run_card["run_tag"] = "\'not_use\'" 2891 run_card["fixed_ren_scale"] = "T" 2892 run_card["fixed_fac_scale"] = "T" 2893 run_card.remove_all_cut() 2894 2895 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2896 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2897 python_template=True) 2898 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2899 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2900 python_template=True)
2901 2902 #=========================================================================== 2903 # export model files 2904 #===========================================================================
2905 - def export_model_files(self, model_path):
2906 """export the model dependent files for V4 model""" 2907 2908 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2909 # Add the routine update_as_param in v4 model 2910 # This is a function created in the UFO 2911 text=""" 2912 subroutine update_as_param() 2913 call setpara('param_card.dat',.false.) 2914 return 2915 end 2916 """ 2917 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 2918 ff.write(text) 2919 ff.close() 2920 2921 # Modify setrun.f 2922 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 2923 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 2924 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 2925 fsock.write(text) 2926 fsock.close() 2927 2928 # Modify initialization.f 2929 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 2930 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 2931 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 2932 fsock.write(text) 2933 fsock.close() 2934 2935 2936 self.make_model_symbolic_link()
2937 2938 #=========================================================================== 2939 # generate_subprocess_directory 2940 #===========================================================================
2941 - def generate_subprocess_directory(self, matrix_element, 2942 fortran_model,number):
2943 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 2944 including the necessary matrix.f and nexternal.inc files""" 2945 2946 cwd = os.getcwd() 2947 # Create the directory PN_xx_xxxxx in the specified path 2948 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 2949 "P%s" % matrix_element.get('processes')[0].shell_string()) 2950 2951 try: 2952 os.mkdir(dirpath) 2953 except os.error as error: 2954 logger.warning(error.strerror + " " + dirpath) 2955 2956 #try: 2957 # os.chdir(dirpath) 2958 #except os.error: 2959 # logger.error('Could not cd to directory %s' % dirpath) 2960 # return 0 2961 2962 logger.info('Creating files in directory %s' % dirpath) 2963 2964 # Extract number of external particles 2965 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2966 2967 # Create the matrix.f file and the nexternal.inc file 2968 filename = pjoin(dirpath,'matrix.f') 2969 calls,ncolor = self.write_matrix_element_v4( 2970 writers.FortranWriter(filename), 2971 matrix_element, 2972 fortran_model) 2973 2974 filename = pjoin(dirpath, 'auto_dsig.f') 2975 self.write_auto_dsig_file(writers.FortranWriter(filename), 2976 matrix_element) 2977 2978 filename = pjoin(dirpath, 'configs.inc') 2979 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2980 writers.FortranWriter(filename), 2981 matrix_element) 2982 2983 filename = pjoin(dirpath, 'nexternal.inc') 2984 self.write_nexternal_file(writers.FortranWriter(filename), 2985 nexternal, ninitial) 2986 2987 filename = pjoin(dirpath, 'leshouche.inc') 2988 self.write_leshouche_file(writers.FortranWriter(filename), 2989 matrix_element) 2990 2991 filename = pjoin(dirpath, 'props.inc') 2992 self.write_props_file(writers.FortranWriter(filename), 2993 matrix_element, 2994 s_and_t_channels) 2995 2996 filename = pjoin(dirpath, 'pmass.inc') 2997 self.write_pmass_file(writers.FortranWriter(filename), 2998 matrix_element) 2999 3000 filename = pjoin(dirpath, 'ngraphs.inc') 3001 self.write_ngraphs_file(writers.FortranWriter(filename), 3002 len(matrix_element.get_all_amplitudes())) 3003 3004 filename = pjoin(dirpath, 'maxamps.inc') 3005 self.write_maxamps_file(writers.FortranWriter(filename), 3006 len(matrix_element.get('diagrams')), 3007 ncolor, 3008 len(matrix_element.get('processes')), 3009 1) 3010 3011 filename = pjoin(dirpath, 'phasespace.inc') 3012 self.write_phasespace_file(writers.FortranWriter(filename), 3013 len(matrix_element.get('diagrams')), 3014 ) 3015 3016 # Generate diagrams 3017 filename = pjoin(dirpath, "matrix.ps") 3018 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3019 get('diagrams'), 3020 filename, 3021 model=matrix_element.get('processes')[0].\ 3022 get('model'), 3023 amplitude='') 3024 logger.info("Generating Feynman diagrams for " + \ 3025 matrix_element.get('processes')[0].nice_string()) 3026 plot.draw() 3027 3028 #import genps.inc and maxconfigs.inc into Subprocesses 3029 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3030 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3031 3032 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3033 3034 for file in linkfiles: 3035 ln('../%s' % file, starting_dir=cwd) 3036 3037 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3038 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3039 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3040 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3041 # Return to original PWD 3042 #os.chdir(cwd) 3043 3044 if not calls: 3045 calls = 0 3046 return calls
3047 3048 #=========================================================================== 3049 # write_matrix_element_v4 3050 #===========================================================================
3051 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3052 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3053 3054 if not matrix_element.get('processes') or \ 3055 not matrix_element.get('diagrams'): 3056 return 0 3057 3058 if writer: 3059 if not isinstance(writer, writers.FortranWriter): 3060 raise writers.FortranWriter.FortranWriterError(\ 3061 "writer not FortranWriter") 3062 3063 # Set lowercase/uppercase Fortran code 3064 writers.FortranWriter.downcase = False 3065 3066 replace_dict = {} 3067 3068 # Extract version number and date from VERSION file 3069 info_lines = self.get_mg5_info_lines() 3070 replace_dict['info_lines'] = info_lines 3071 3072 # Extract process info lines 3073 process_lines = self.get_process_info_lines(matrix_element) 3074 replace_dict['process_lines'] = process_lines 3075 3076 # Set proc_id 3077 replace_dict['proc_id'] = proc_id 3078 3079 # Extract number of external particles 3080 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3081 replace_dict['nexternal'] = nexternal 3082 3083 # Extract ncomb 3084 ncomb = matrix_element.get_helicity_combinations() 3085 replace_dict['ncomb'] = ncomb 3086 3087 # Extract helicity lines 3088 helicity_lines = self.get_helicity_lines(matrix_element) 3089 replace_dict['helicity_lines'] = helicity_lines 3090 3091 # Extract overall denominator 3092 # Averaging initial state color, spin, and identical FS particles 3093 den_factor_line = self.get_den_factor_line(matrix_element) 3094 replace_dict['den_factor_line'] = den_factor_line 3095 3096 # Extract ngraphs 3097 ngraphs = matrix_element.get_number_of_amplitudes() 3098 replace_dict['ngraphs'] = ngraphs 3099 3100 # Extract nwavefuncs 3101 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3102 replace_dict['nwavefuncs'] = nwavefuncs 3103 3104 # Extract ncolor 3105 ncolor = max(1, len(matrix_element.get('color_basis'))) 3106 replace_dict['ncolor'] = ncolor 3107 3108 # Extract color data lines 3109 color_data_lines = self.get_color_data_lines(matrix_element) 3110 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3111 3112 # Extract helas calls 3113 helas_calls = fortran_model.get_matrix_element_calls(\ 3114 matrix_element) 3115 3116 replace_dict['helas_calls'] = "\n".join(helas_calls) 3117 3118 # Extract JAMP lines 3119 jamp_lines = self.get_JAMP_lines(matrix_element) 3120 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3121 3122 replace_dict['template_file'] = os.path.join(_file_path, \ 3123 'iolibs/template_files/%s' % self.matrix_file) 3124 replace_dict['template_file2'] = '' 3125 3126 if writer: 3127 file = open(replace_dict['template_file']).read() 3128 file = file % replace_dict 3129 # Write the file 3130 writer.writelines(file) 3131 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3132 else: 3133 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3134 3135 #=========================================================================== 3136 # write_source_makefile 3137 #===========================================================================
3138 - def write_source_makefile(self, writer):
3139 """Write the nexternal.inc file for madweight""" 3140 3141 3142 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3143 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3144 text = open(path).read() % {'libraries': set_of_lib} 3145 writer.write(text) 3146 3147 return True
3148
3149 - def write_phasespace_file(self, writer, nb_diag):
3150 """ """ 3151 3152 template = """ include 'maxparticles.inc' 3153 integer max_branches 3154 parameter (max_branches=max_particles-1) 3155 integer max_configs 3156 parameter (max_configs=%(nb_diag)s) 3157 3158 c channel position 3159 integer config_pos,perm_pos 3160 common /to_config/config_pos,perm_pos 3161 3162 """ 3163 3164 writer.write(template % {'nb_diag': nb_diag})
3165 3166 3167 #=========================================================================== 3168 # write_auto_dsig_file 3169 #===========================================================================
3170 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3171 """Write the auto_dsig.f file for the differential cross section 3172 calculation, includes pdf call information (MadWeight format)""" 3173 3174 if not matrix_element.get('processes') or \ 3175 not matrix_element.get('diagrams'): 3176 return 0 3177 3178 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3179 3180 if ninitial < 1 or ninitial > 2: 3181 raise writers.FortranWriter.FortranWriterError, \ 3182 """Need ninitial = 1 or 2 to write auto_dsig file""" 3183 3184 replace_dict = {} 3185 3186 # Extract version number and date from VERSION file 3187 info_lines = self.get_mg5_info_lines() 3188 replace_dict['info_lines'] = info_lines 3189 3190 # Extract process info lines 3191 process_lines = self.get_process_info_lines(matrix_element) 3192 replace_dict['process_lines'] = process_lines 3193 3194 # Set proc_id 3195 replace_dict['proc_id'] = proc_id 3196 replace_dict['numproc'] = 1 3197 3198 # Set dsig_line 3199 if ninitial == 1: 3200 # No conversion, since result of decay should be given in GeV 3201 dsig_line = "pd(0)*dsiguu" 3202 else: 3203 # Convert result (in GeV) to pb 3204 dsig_line = "pd(0)*conv*dsiguu" 3205 3206 replace_dict['dsig_line'] = dsig_line 3207 3208 # Extract pdf lines 3209 pdf_vars, pdf_data, pdf_lines = \ 3210 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3211 replace_dict['pdf_vars'] = pdf_vars 3212 replace_dict['pdf_data'] = pdf_data 3213 replace_dict['pdf_lines'] = pdf_lines 3214 3215 # Lines that differ between subprocess group and regular 3216 if proc_id: 3217 replace_dict['numproc'] = int(proc_id) 3218 replace_dict['passcuts_begin'] = "" 3219 replace_dict['passcuts_end'] = "" 3220 # Set lines for subprocess group version 3221 # Set define_iconfigs_lines 3222 replace_dict['define_subdiag_lines'] = \ 3223 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3224 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3225 else: 3226 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3227 replace_dict['passcuts_end'] = "ENDIF" 3228 replace_dict['define_subdiag_lines'] = "" 3229 3230 if writer: 3231 file = open(os.path.join(_file_path, \ 3232 'iolibs/template_files/auto_dsig_mw.inc')).read() 3233 3234 file = file % replace_dict 3235 # Write the file 3236 writer.writelines(file) 3237 else: 3238 return replace_dict
3239 #=========================================================================== 3240 # write_configs_file 3241 #===========================================================================
3242 - def write_configs_file(self, writer, matrix_element):
3243 """Write the configs.inc file for MadEvent""" 3244 3245 # Extract number of external particles 3246 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3247 3248 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3249 mapconfigs = [c[0] for c in configs] 3250 model = matrix_element.get('processes')[0].get('model') 3251 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3252 [[c[1]] for c in configs], 3253 mapconfigs, 3254 nexternal, ninitial,matrix_element, model)
3255 3256 #=========================================================================== 3257 # write_run_configs_file 3258 #===========================================================================
3259 - def write_run_config_file(self, writer):
3260 """Write the run_configs.inc file for MadWeight""" 3261 3262 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3263 text = open(path).read() % {'chanperjob':'5'} 3264 writer.write(text) 3265 return True
3266 3267 #=========================================================================== 3268 # write_configs_file_from_diagrams 3269 #===========================================================================
3270 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3271 nexternal, ninitial, matrix_element, model):
3272 """Write the actual configs.inc file. 3273 3274 configs is the diagrams corresponding to configs (each 3275 diagrams is a list of corresponding diagrams for all 3276 subprocesses, with None if there is no corresponding diagrams 3277 for a given process). 3278 mapconfigs gives the diagram number for each config. 3279 3280 For s-channels, we need to output one PDG for each subprocess in 3281 the subprocess group, in order to be able to pick the right 3282 one for multiprocesses.""" 3283 3284 lines = [] 3285 3286 particle_dict = matrix_element.get('processes')[0].get('model').\ 3287 get('particle_dict') 3288 3289 s_and_t_channels = [] 3290 3291 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3292 for config in configs if [d for d in config if d][0].\ 3293 get_vertex_leg_numbers()!=[]] 3294 3295 minvert = min(vert_list) if vert_list!=[] else 0 3296 # Number of subprocesses 3297 nsubprocs = len(configs[0]) 3298 3299 nconfigs = 0 3300 3301 new_pdg = model.get_first_non_pdg() 3302 3303 for iconfig, helas_diags in enumerate(configs): 3304 if any([vert > minvert for vert in 3305 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3306 # Only 3-vertices allowed in configs.inc 3307 continue 3308 nconfigs += 1 3309 3310 # Need s- and t-channels for all subprocesses, including 3311 # those that don't contribute to this config 3312 empty_verts = [] 3313 stchannels = [] 3314 for h in helas_diags: 3315 if h: 3316 # get_s_and_t_channels gives vertices starting from 3317 # final state external particles and working inwards 3318 stchannels.append(h.get('amplitudes')[0].\ 3319 get_s_and_t_channels(ninitial,model,new_pdg)) 3320 else: 3321 stchannels.append((empty_verts, None)) 3322 3323 # For t-channels, just need the first non-empty one 3324 tchannels = [t for s,t in stchannels if t != None][0] 3325 3326 # For s_and_t_channels (to be used later) use only first config 3327 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3328 tchannels]) 3329 3330 # Make sure empty_verts is same length as real vertices 3331 if any([s for s,t in stchannels]): 3332 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3333 3334 # Reorganize s-channel vertices to get a list of all 3335 # subprocesses for each vertex 3336 schannels = zip(*[s for s,t in stchannels]) 3337 else: 3338 schannels = [] 3339 3340 allchannels = schannels 3341 if len(tchannels) > 1: 3342 # Write out tchannels only if there are any non-trivial ones 3343 allchannels = schannels + tchannels 3344 3345 # Write out propagators for s-channel and t-channel vertices 3346 3347 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3348 # Correspondance between the config and the diagram = amp2 3349 lines.append("* %d %d " % (nconfigs, 3350 mapconfigs[iconfig])) 3351 3352 for verts in allchannels: 3353 if verts in schannels: 3354 vert = [v for v in verts if v][0] 3355 else: 3356 vert = verts 3357 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3358 last_leg = vert.get('legs')[-1] 3359 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3360 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3361 # (last_leg.get('number'), nconfigs, len(daughters), 3362 # ",".join([str(d) for d in daughters]))) 3363 3364 if last_leg.get('id') == 21 and 21 not in particle_dict: 3365 # Fake propagator used in multiparticle vertices 3366 mass = 'zero' 3367 width = 'zero' 3368 pow_part = 0 3369 else: 3370 if (last_leg.get('id')!=7): 3371 particle = particle_dict[last_leg.get('id')] 3372 # Get mass 3373 mass = particle.get('mass') 3374 # Get width 3375 width = particle.get('width') 3376 else : # fake propagator used in multiparticle vertices 3377 mass= 'zero' 3378 width= 'zero' 3379 3380 line=line+" "+mass+" "+width+" " 3381 3382 if verts in schannels: 3383 pdgs = [] 3384 for v in verts: 3385 if v: 3386 pdgs.append(v.get('legs')[-1].get('id')) 3387 else: 3388 pdgs.append(0) 3389 lines.append(line+" S "+str(last_leg.get('id'))) 3390 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3391 # (last_leg.get('number'), nconfigs, nsubprocs, 3392 # ",".join([str(d) for d in pdgs]))) 3393 # lines.append("data tprid(%d,%d)/0/" % \ 3394 # (last_leg.get('number'), nconfigs)) 3395 elif verts in tchannels[:-1]: 3396 lines.append(line+" T "+str(last_leg.get('id'))) 3397 # lines.append("data tprid(%d,%d)/%d/" % \ 3398 # (last_leg.get('number'), nconfigs, 3399 # abs(last_leg.get('id')))) 3400 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3401 # (last_leg.get('number'), nconfigs, nsubprocs, 3402 # ",".join(['0'] * nsubprocs))) 3403 3404 # Write out number of configs 3405 # lines.append("# Number of configs") 3406 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3407 lines.append(" * ") # a line with just a star indicates this is the end of file 3408 # Write the file 3409 writer.writelines(lines) 3410 3411 return s_and_t_channels
3412
3413 3414 #=============================================================================== 3415 # ProcessExporterFortranME 3416 #=============================================================================== 3417 -class ProcessExporterFortranME(ProcessExporterFortran):
3418 """Class to take care of exporting a set of matrix elements to 3419 MadEvent format.""" 3420 3421 matrix_file = "matrix_madevent_v4.inc" 3422
3423 - def copy_template(self, model):
3424 """Additional actions needed for setup of Template 3425 """ 3426 3427 super(ProcessExporterFortranME, self).copy_template(model) 3428 3429 # File created from Template (Different in some child class) 3430 filename = pjoin(self.dir_path,'Source','run_config.inc') 3431 self.write_run_config_file(writers.FortranWriter(filename)) 3432 3433 # The next file are model dependant (due to SLAH convention) 3434 self.model_name = model.get('name') 3435 # Add the symmetry.f 3436 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3437 self.write_symmetry(writers.FortranWriter(filename)) 3438 # 3439 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3440 self.write_addmothers(writers.FortranWriter(filename)) 3441 # Copy the different python file in the Template 3442 self.copy_python_file()
3443 3444 3445 3446 3447 3448 #=========================================================================== 3449 # generate_subprocess_directory 3450 #===========================================================================
3451 - def copy_python_file(self):
3452 """copy the python file require for the Template""" 3453 3454 # madevent interface 3455 cp(_file_path+'/interface/madevent_interface.py', 3456 self.dir_path+'/bin/internal/madevent_interface.py') 3457 cp(_file_path+'/interface/extended_cmd.py', 3458 self.dir_path+'/bin/internal/extended_cmd.py') 3459 cp(_file_path+'/interface/common_run_interface.py', 3460 self.dir_path+'/bin/internal/common_run_interface.py') 3461 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3462 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3463 cp(_file_path+'/iolibs/save_load_object.py', 3464 self.dir_path+'/bin/internal/save_load_object.py') 3465 cp(_file_path+'/iolibs/file_writers.py', 3466 self.dir_path+'/bin/internal/file_writers.py') 3467 #model file 3468 cp(_file_path+'../models/check_param_card.py', 3469 self.dir_path+'/bin/internal/check_param_card.py') 3470 3471 #copy all the file present in madevent directory 3472 for name in os.listdir(pjoin(_file_path, 'madevent')): 3473 if name not in ['__init__.py'] and name.endswith('.py'): 3474 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3475 3476 #madevent file 3477 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3478 cp(_file_path+'/various/lhe_parser.py', 3479 self.dir_path+'/bin/internal/lhe_parser.py') 3480 cp(_file_path+'/various/banner.py', 3481 self.dir_path+'/bin/internal/banner.py') 3482 cp(_file_path+'/various/histograms.py', 3483 self.dir_path+'/bin/internal/histograms.py') 3484 cp(_file_path+'/various/plot_djrs.py', 3485 self.dir_path+'/bin/internal/plot_djrs.py') 3486 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3487 3488 cp(_file_path+'/various/cluster.py', 3489 self.dir_path+'/bin/internal/cluster.py') 3490 cp(_file_path+'/madevent/combine_runs.py', 3491 self.dir_path+'/bin/internal/combine_runs.py') 3492 # logging configuration 3493 cp(_file_path+'/interface/.mg5_logging.conf', 3494 self.dir_path+'/bin/internal/me5_logging.conf') 3495 cp(_file_path+'/interface/coloring_logging.py', 3496 self.dir_path+'/bin/internal/coloring_logging.py') 3497 # shower card and FO_analyse_card. 3498 # Although not needed, it is imported by banner.py 3499 cp(_file_path+'/various/shower_card.py', 3500 self.dir_path+'/bin/internal/shower_card.py') 3501 cp(_file_path+'/various/FO_analyse_card.py', 3502 self.dir_path+'/bin/internal/FO_analyse_card.py')
3503 3504
3505 - def convert_model(self, model, wanted_lorentz = [], 3506 wanted_couplings = []):
3507 3508 super(ProcessExporterFortranME,self).convert_model(model, 3509 wanted_lorentz, wanted_couplings) 3510 3511 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3512 try: 3513 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3514 except OSError as error: 3515 pass 3516 model_path = model.get('modelpath') 3517 # This is not safe if there is a '##' or '-' in the path. 3518 shutil.copytree(model_path, 3519 pjoin(self.dir_path,'bin','internal','ufomodel'), 3520 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3521 if hasattr(model, 'restrict_card'): 3522 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3523 'restrict_default.dat') 3524 if isinstance(model.restrict_card, check_param_card.ParamCard): 3525 model.restrict_card.write(out_path) 3526 else: 3527 files.cp(model.restrict_card, out_path)
3528 3529 #=========================================================================== 3530 # export model files 3531 #===========================================================================
3532 - def export_model_files(self, model_path):
3533 """export the model dependent files""" 3534 3535 super(ProcessExporterFortranME,self).export_model_files(model_path) 3536 3537 # Add the routine update_as_param in v4 model 3538 # This is a function created in the UFO 3539 text=""" 3540 subroutine update_as_param() 3541 call setpara('param_card.dat',.false.) 3542 return 3543 end 3544 """ 3545 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3546 ff.write(text) 3547 ff.close() 3548 3549 # Add the symmetry.f 3550 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3551 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3552 3553 # Modify setrun.f 3554 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3555 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3556 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3557 fsock.write(text) 3558 fsock.close() 3559 3560 self.make_model_symbolic_link()
3561 3562 #=========================================================================== 3563 # generate_subprocess_directory 3564 #===========================================================================
3565 - def generate_subprocess_directory(self, matrix_element, 3566 fortran_model, 3567 me_number):
3568 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3569 including the necessary matrix.f and various helper files""" 3570 3571 cwd = os.getcwd() 3572 path = pjoin(self.dir_path, 'SubProcesses') 3573 3574 3575 if not self.model: 3576 self.model = matrix_element.get('processes')[0].get('model') 3577 3578 3579 3580 #os.chdir(path) 3581 # Create the directory PN_xx_xxxxx in the specified path 3582 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3583 try: 3584 os.mkdir(pjoin(path,subprocdir)) 3585 except os.error as error: 3586 logger.warning(error.strerror + " " + subprocdir) 3587 3588 #try: 3589 # os.chdir(subprocdir) 3590 #except os.error: 3591 # logger.error('Could not cd to directory %s' % subprocdir) 3592 # return 0 3593 3594 logger.info('Creating files in directory %s' % subprocdir) 3595 Ppath = pjoin(path, subprocdir) 3596 3597 # Extract number of external particles 3598 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3599 3600 # Add the driver.f 3601 ncomb = matrix_element.get_helicity_combinations() 3602 filename = pjoin(Ppath,'driver.f') 3603 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3604 v5=self.opt['v5_model']) 3605 3606 # Create the matrix.f file, auto_dsig.f file and all inc files 3607 filename = pjoin(Ppath, 'matrix.f') 3608 calls, ncolor = \ 3609 self.write_matrix_element_v4(writers.FortranWriter(filename), 3610 matrix_element, fortran_model, subproc_number = me_number) 3611 3612 filename = pjoin(Ppath, 'auto_dsig.f') 3613 self.write_auto_dsig_file(writers.FortranWriter(filename), 3614 matrix_element) 3615 3616 filename = pjoin(Ppath, 'configs.inc') 3617 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3618 writers.FortranWriter(filename), 3619 matrix_element) 3620 3621 filename = pjoin(Ppath, 'config_nqcd.inc') 3622 self.write_config_nqcd_file(writers.FortranWriter(filename), 3623 nqcd_list) 3624 3625 filename = pjoin(Ppath, 'config_subproc_map.inc') 3626 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3627 s_and_t_channels) 3628 3629 filename = pjoin(Ppath, 'coloramps.inc') 3630 self.write_coloramps_file(writers.FortranWriter(filename), 3631 mapconfigs, 3632 matrix_element) 3633 3634 filename = pjoin(Ppath, 'get_color.f') 3635 self.write_colors_file(writers.FortranWriter(filename), 3636 matrix_element) 3637 3638 filename = pjoin(Ppath, 'decayBW.inc') 3639 self.write_decayBW_file(writers.FortranWriter(filename), 3640 s_and_t_channels) 3641 3642 filename = pjoin(Ppath, 'dname.mg') 3643 self.write_dname_file(writers.FileWriter(filename), 3644 "P"+matrix_element.get('processes')[0].shell_string()) 3645 3646 filename = pjoin(Ppath, 'iproc.dat') 3647 self.write_iproc_file(writers.FortranWriter(filename), 3648 me_number) 3649 3650 filename = pjoin(Ppath, 'leshouche.inc') 3651 self.write_leshouche_file(writers.FortranWriter(filename), 3652 matrix_element) 3653 3654 filename = pjoin(Ppath, 'maxamps.inc') 3655 self.write_maxamps_file(writers.FortranWriter(filename), 3656 len(matrix_element.get('diagrams')), 3657 ncolor, 3658 len(matrix_element.get('processes')), 3659 1) 3660 3661 filename = pjoin(Ppath, 'mg.sym') 3662 self.write_mg_sym_file(writers.FortranWriter(filename), 3663 matrix_element) 3664 3665 filename = pjoin(Ppath, 'ncombs.inc') 3666 self.write_ncombs_file(writers.FortranWriter(filename), 3667 nexternal) 3668 3669 filename = pjoin(Ppath, 'nexternal.inc') 3670 self.write_nexternal_file(writers.FortranWriter(filename), 3671 nexternal, ninitial) 3672 3673 filename = pjoin(Ppath, 'ngraphs.inc') 3674 self.write_ngraphs_file(writers.FortranWriter(filename), 3675 len(mapconfigs)) 3676 3677 3678 filename = pjoin(Ppath, 'pmass.inc') 3679 self.write_pmass_file(writers.FortranWriter(filename), 3680 matrix_element) 3681 3682 filename = pjoin(Ppath, 'props.inc') 3683 self.write_props_file(writers.FortranWriter(filename), 3684 matrix_element, 3685 s_and_t_channels) 3686 3687 # Find config symmetries and permutations 3688 symmetry, perms, ident_perms = \ 3689 diagram_symmetry.find_symmetry(matrix_element) 3690 3691 filename = pjoin(Ppath, 'symswap.inc') 3692 self.write_symswap_file(writers.FortranWriter(filename), 3693 ident_perms) 3694 3695 filename = pjoin(Ppath, 'symfact_orig.dat') 3696 self.write_symfact_file(open(filename, 'w'), symmetry) 3697 3698 # Generate diagrams 3699 filename = pjoin(Ppath, "matrix.ps") 3700 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3701 get('diagrams'), 3702 filename, 3703 model=matrix_element.get('processes')[0].\ 3704 get('model'), 3705 amplitude=True) 3706 logger.info("Generating Feynman diagrams for " + \ 3707 matrix_element.get('processes')[0].nice_string()) 3708 plot.draw() 3709 3710 self.link_files_in_SubProcess(Ppath) 3711 3712 #import nexternal/leshouche in Source 3713 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3714 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3715 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3716 # Return to SubProcesses dir 3717 #os.chdir(os.path.pardir) 3718 3719 # Add subprocess to subproc.mg 3720 filename = pjoin(path, 'subproc.mg') 3721 files.append_to_file(filename, 3722 self.write_subproc, 3723 subprocdir) 3724 3725 # Return to original dir 3726 #os.chdir(cwd) 3727 3728 # Generate info page 3729 gen_infohtml.make_info_html(self.dir_path) 3730 3731 3732 if not calls: 3733 calls = 0 3734 return calls
3735 3736 link_Sub_files = ['addmothers.f', 3737 'cluster.f', 3738 'cluster.inc', 3739 'coupl.inc', 3740 'cuts.f', 3741 'cuts.inc', 3742 'genps.f', 3743 'genps.inc', 3744 'idenparts.f', 3745 'initcluster.f', 3746 'makefile', 3747 'message.inc', 3748 'myamp.f', 3749 'reweight.f', 3750 'run.inc', 3751 'maxconfigs.inc', 3752 'maxparticles.inc', 3753 'run_config.inc', 3754 'lhe_event_infos.inc', 3755 'setcuts.f', 3756 'setscales.f', 3757 'sudakov.inc', 3758 'symmetry.f', 3759 'unwgt.f', 3760 'dummy_fct.f' 3761 ] 3762 3776 3777
3778 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3779 """Finalize ME v4 directory by creating jpeg diagrams, html 3780 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3781 3782 if 'nojpeg' in flaglist: 3783 makejpg = False 3784 else: 3785 makejpg = True 3786 if 'online' in flaglist: 3787 online = True 3788 else: 3789 online = False 3790 3791 compiler = {'fortran': mg5options['fortran_compiler'], 3792 'cpp': mg5options['cpp_compiler'], 3793 'f2py': mg5options['f2py_compiler']} 3794 3795 # indicate that the output type is not grouped 3796 if not isinstance(self, ProcessExporterFortranMEGroup): 3797 self.proc_characteristic['grouped_matrix'] = False 3798 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3799 # indicate the PDG of all initial particle 3800 try: 3801 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3802 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3803 except AttributeError: 3804 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3805 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3806 self.proc_characteristic['pdg_initial1'] = pdgs1 3807 self.proc_characteristic['pdg_initial2'] = pdgs2 3808 3809 3810 modelname = self.opt['model'] 3811 if modelname == 'mssm' or modelname.startswith('mssm-'): 3812 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3813 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3814 check_param_card.convert_to_mg5card(param_card, mg5_param) 3815 check_param_card.check_valid_param_card(mg5_param) 3816 3817 # Add the combine_events.f modify param_card path/number of @X 3818 filename = pjoin(self.dir_path,'Source','combine_events.f') 3819 try: 3820 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3821 except AttributeError: 3822 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3823 nb_proc = len(set(nb_proc)) 3824 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3825 # Write maxconfigs.inc based on max of ME's/subprocess groups 3826 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3827 self.write_maxconfigs_file(writers.FortranWriter(filename), 3828 matrix_elements) 3829 3830 # Write maxparticles.inc based on max of ME's/subprocess groups 3831 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3832 self.write_maxparticles_file(writers.FortranWriter(filename), 3833 matrix_elements) 3834 3835 # Touch "done" file 3836 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3837 3838 # Check for compiler 3839 self.set_compiler(compiler) 3840 self.set_cpp_compiler(compiler['cpp']) 3841 3842 3843 old_pos = os.getcwd() 3844 subpath = pjoin(self.dir_path, 'SubProcesses') 3845 3846 P_dir_list = [proc for proc in os.listdir(subpath) 3847 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3848 3849 devnull = os.open(os.devnull, os.O_RDWR) 3850 # Convert the poscript in jpg files (if authorize) 3851 if makejpg: 3852 try: 3853 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3854 except Exception, error: 3855 pass 3856 3857 if misc.which('gs'): 3858 logger.info("Generate jpeg diagrams") 3859 for Pdir in P_dir_list: 3860 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3861 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3862 3863 logger.info("Generate web pages") 3864 # Create the WebPage using perl script 3865 3866 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3867 stdout = devnull,cwd=pjoin(self.dir_path)) 3868 3869 #os.chdir(os.path.pardir) 3870 3871 obj = gen_infohtml.make_info_html(self.dir_path) 3872 3873 if online: 3874 nb_channel = obj.rep_rule['nb_gen_diag'] 3875 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3876 #add the information to proc_charac 3877 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3878 3879 # Write command history as proc_card_mg5 3880 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3881 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3882 history.write(output_file) 3883 3884 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3885 stdout = devnull) 3886 3887 #crate the proc_characteristic file 3888 self.create_proc_charac(matrix_elements, history) 3889 3890 # create the run_card 3891 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3892 3893 # Run "make" to generate madevent.tar.gz file 3894 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3895 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 3896 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 3897 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 3898 stdout = devnull, cwd=self.dir_path) 3899 3900 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3901 stdout = devnull, cwd=self.dir_path)
3902 3903 3904 3905 3906 3907 3908 #return to the initial dir 3909 #os.chdir(old_pos) 3910 3911 #=========================================================================== 3912 # write_matrix_element_v4 3913 #===========================================================================
3914 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 3915 proc_id = "", config_map = [], subproc_number = ""):
3916 """Export a matrix element to a matrix.f file in MG4 madevent format""" 3917 3918 if not matrix_element.get('processes') or \ 3919 not matrix_element.get('diagrams'): 3920 return 0 3921 3922 if writer: 3923 if not isinstance(writer, writers.FortranWriter): 3924 raise writers.FortranWriter.FortranWriterError(\ 3925 "writer not FortranWriter") 3926 # Set lowercase/uppercase Fortran code 3927 writers.FortranWriter.downcase = False 3928 3929 # The proc prefix is not used for MadEvent output so it can safely be set 3930 # to an empty string. 3931 replace_dict = {'proc_prefix':''} 3932 3933 # Extract helas calls 3934 helas_calls = fortran_model.get_matrix_element_calls(\ 3935 matrix_element) 3936 3937 replace_dict['helas_calls'] = "\n".join(helas_calls) 3938 3939 3940 # Extract version number and date from VERSION file 3941 info_lines = self.get_mg5_info_lines() 3942 replace_dict['info_lines'] = info_lines 3943 3944 # Extract process info lines 3945 process_lines = self.get_process_info_lines(matrix_element) 3946 replace_dict['process_lines'] = process_lines 3947 3948 # Set proc_id 3949 replace_dict['proc_id'] = proc_id 3950 3951 # Extract ncomb 3952 ncomb = matrix_element.get_helicity_combinations() 3953 replace_dict['ncomb'] = ncomb 3954 3955 # Extract helicity lines 3956 helicity_lines = self.get_helicity_lines(matrix_element) 3957 replace_dict['helicity_lines'] = helicity_lines 3958 3959 # Extract IC line 3960 ic_line = self.get_ic_line(matrix_element) 3961 replace_dict['ic_line'] = ic_line 3962 3963 # Extract overall denominator 3964 # Averaging initial state color, spin, and identical FS particles 3965 den_factor_line = self.get_den_factor_line(matrix_element) 3966 replace_dict['den_factor_line'] = den_factor_line 3967 3968 # Extract ngraphs 3969 ngraphs = matrix_element.get_number_of_amplitudes() 3970 replace_dict['ngraphs'] = ngraphs 3971 3972 # Extract ndiags 3973 ndiags = len(matrix_element.get('diagrams')) 3974 replace_dict['ndiags'] = ndiags 3975 3976 # Set define_iconfigs_lines 3977 replace_dict['define_iconfigs_lines'] = \ 3978 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 3979 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 3980 3981 if proc_id: 3982 # Set lines for subprocess group version 3983 # Set define_iconfigs_lines 3984 replace_dict['define_iconfigs_lines'] += \ 3985 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3986 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3987 # Set set_amp2_line 3988 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 3989 proc_id 3990 else: 3991 # Standard running 3992 # Set set_amp2_line 3993 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 3994 3995 # Extract nwavefuncs 3996 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3997 replace_dict['nwavefuncs'] = nwavefuncs 3998 3999 # Extract ncolor 4000 ncolor = max(1, len(matrix_element.get('color_basis'))) 4001 replace_dict['ncolor'] = ncolor 4002 4003 # Extract color data lines 4004 color_data_lines = self.get_color_data_lines(matrix_element) 4005 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4006 4007 4008 # Set the size of Wavefunction 4009 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4010 replace_dict['wavefunctionsize'] = 18 4011 else: 4012 replace_dict['wavefunctionsize'] = 6 4013 4014 # Extract amp2 lines 4015 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4016 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4017 4018 # The JAMP definition depends on the splitting order 4019 split_orders=matrix_element.get('processes')[0].get('split_orders') 4020 if len(split_orders)>0: 4021 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4022 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4023 matrix_element.get('processes')[0],squared_orders) 4024 else: 4025 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4026 # set all amplitude order to weight 1 and only one squared order 4027 # contribution which is of course ALL_ORDERS=2. 4028 squared_orders = [(2,),] 4029 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4030 replace_dict['chosen_so_configs'] = '.TRUE.' 4031 4032 replace_dict['nAmpSplitOrders']=len(amp_orders) 4033 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4034 replace_dict['split_order_str_list']=str(split_orders) 4035 replace_dict['nSplitOrders']=max(len(split_orders),1) 4036 amp_so = self.get_split_orders_lines( 4037 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4038 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4039 replace_dict['ampsplitorders']='\n'.join(amp_so) 4040 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4041 4042 4043 # Extract JAMP lines 4044 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4045 jamp_lines = self.get_JAMP_lines_split_order(\ 4046 matrix_element,amp_orders,split_order_names= 4047 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4048 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4049 4050 replace_dict['template_file'] = pjoin(_file_path, \ 4051 'iolibs/template_files/%s' % self.matrix_file) 4052 replace_dict['template_file2'] = pjoin(_file_path, \ 4053 'iolibs/template_files/split_orders_helping_functions.inc') 4054 if writer: 4055 file = open(replace_dict['template_file']).read() 4056 file = file % replace_dict 4057 # Add the split orders helper functions. 4058 file = file + '\n' + open(replace_dict['template_file2'])\ 4059 .read()%replace_dict 4060 # Write the file 4061 writer.writelines(file) 4062 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4063 else: 4064 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4065 return replace_dict
4066 4067 #=========================================================================== 4068 # write_auto_dsig_file 4069 #===========================================================================
4070 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4071 """Write the auto_dsig.f file for the differential cross section 4072 calculation, includes pdf call information""" 4073 4074 if not matrix_element.get('processes') or \ 4075 not matrix_element.get('diagrams'): 4076 return 0 4077 4078 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4079 self.proc_characteristic['ninitial'] = ninitial 4080 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4081 4082 # Add information relevant for MLM matching: 4083 # Maximum QCD power in all the contributions 4084 max_qcd_order = 0 4085 for diag in matrix_element.get('diagrams'): 4086 orders = diag.calculate_orders() 4087 if 'QCD' in orders: 4088 max_qcd_order = max(max_qcd_order,orders['QCD']) 4089 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4090 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4091 proc.get('model').get_particle(id).get('color')>1]) 4092 for proc in matrix_element.get('processes')) 4093 # Maximum number of final state light jets to be matched 4094 self.proc_characteristic['max_n_matched_jets'] = max( 4095 self.proc_characteristic['max_n_matched_jets'], 4096 min(max_qcd_order,max_n_light_final_partons)) 4097 4098 # List of default pdgs to be considered for the CKKWl merging cut 4099 self.proc_characteristic['colored_pdgs'] = \ 4100 sorted(list(set([abs(p.get('pdg_code')) for p in 4101 matrix_element.get('processes')[0].get('model').get('particles') if 4102 p.get('color')>1]))) 4103 4104 if ninitial < 1 or ninitial > 2: 4105 raise writers.FortranWriter.FortranWriterError, \ 4106 """Need ninitial = 1 or 2 to write auto_dsig file""" 4107 4108 replace_dict = {} 4109 4110 # Extract version number and date from VERSION file 4111 info_lines = self.get_mg5_info_lines() 4112 replace_dict['info_lines'] = info_lines 4113 4114 # Extract process info lines 4115 process_lines = self.get_process_info_lines(matrix_element) 4116 replace_dict['process_lines'] = process_lines 4117 4118 # Set proc_id 4119 replace_dict['proc_id'] = proc_id 4120 replace_dict['numproc'] = 1 4121 4122 # Set dsig_line 4123 if ninitial == 1: 4124 # No conversion, since result of decay should be given in GeV 4125 dsig_line = "pd(0)*dsiguu" 4126 else: 4127 # Convert result (in GeV) to pb 4128 dsig_line = "pd(0)*conv*dsiguu" 4129 4130 replace_dict['dsig_line'] = dsig_line 4131 4132 # Extract pdf lines 4133 pdf_vars, pdf_data, pdf_lines = \ 4134 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4135 replace_dict['pdf_vars'] = pdf_vars 4136 replace_dict['pdf_data'] = pdf_data 4137 replace_dict['pdf_lines'] = pdf_lines 4138 4139 # Lines that differ between subprocess group and regular 4140 if proc_id: 4141 replace_dict['numproc'] = int(proc_id) 4142 replace_dict['passcuts_begin'] = "" 4143 replace_dict['passcuts_end'] = "" 4144 # Set lines for subprocess group version 4145 # Set define_iconfigs_lines 4146 replace_dict['define_subdiag_lines'] = \ 4147 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4148 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4149 replace_dict['cutsdone'] = "" 4150 else: 4151 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4152 replace_dict['passcuts_end'] = "ENDIF" 4153 replace_dict['define_subdiag_lines'] = "" 4154 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4155 4156 if not isinstance(self, ProcessExporterFortranMEGroup): 4157 ncomb=matrix_element.get_helicity_combinations() 4158 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4159 else: 4160 replace_dict['read_write_good_hel'] = "" 4161 4162 context = {'read_write_good_hel':True} 4163 4164 if writer: 4165 file = open(pjoin(_file_path, \ 4166 'iolibs/template_files/auto_dsig_v4.inc')).read() 4167 file = file % replace_dict 4168 4169 # Write the file 4170 writer.writelines(file, context=context) 4171 else: 4172 return replace_dict, context
4173 #=========================================================================== 4174 # write_coloramps_file 4175 #===========================================================================
4176 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4177 """Write the coloramps.inc file for MadEvent""" 4178 4179 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4180 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4181 (max(len(matrix_element.get('color_basis').keys()), 1), 4182 len(mapconfigs))) 4183 4184 4185 # Write the file 4186 writer.writelines(lines) 4187 4188 return True
4189 4190 #=========================================================================== 4191 # write_colors_file 4192 #===========================================================================
4193 - def write_colors_file(self, writer, matrix_elements):
4194 """Write the get_color.f file for MadEvent, which returns color 4195 for all particles used in the matrix element.""" 4196 4197 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4198 matrix_elements = [matrix_elements] 4199 4200 model = matrix_elements[0].get('processes')[0].get('model') 4201 4202 # We need the both particle and antiparticle wf_ids, since the identity 4203 # depends on the direction of the wf. 4204 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4205 for wf in d.get('wavefunctions')],[]) \ 4206 for d in me.get('diagrams')], []) \ 4207 for me in matrix_elements], [])) 4208 4209 leg_ids = set(sum([sum([sum([[l.get('id'), 4210 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4211 for l in p.get_legs_with_decays()], []) \ 4212 for p in me.get('processes')], []) \ 4213 for me in matrix_elements], [])) 4214 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4215 4216 lines = """function get_color(ipdg) 4217 implicit none 4218 integer get_color, ipdg 4219 4220 if(ipdg.eq.%d)then 4221 get_color=%d 4222 return 4223 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4224 4225 for part_id in particle_ids[1:]: 4226 lines += """else if(ipdg.eq.%d)then 4227 get_color=%d 4228 return 4229 """ % (part_id, model.get_particle(part_id).get_color()) 4230 # Dummy particle for multiparticle vertices with pdg given by 4231 # first code not in the model 4232 lines += """else if(ipdg.eq.%d)then 4233 c This is dummy particle used in multiparticle vertices 4234 get_color=2 4235 return 4236 """ % model.get_first_non_pdg() 4237 lines += """else 4238 write(*,*)'Error: No color given for pdg ',ipdg 4239 get_color=0 4240 return 4241 endif 4242 end 4243 """ 4244 4245 # Write the file 4246 writer.writelines(lines) 4247 4248 return True
4249 4250 #=========================================================================== 4251 # write_config_nqcd_file 4252 #===========================================================================
4253 - def write_config_nqcd_file(self, writer, nqcd_list):
4254 """Write the config_nqcd.inc with the number of QCD couplings 4255 for each config""" 4256 4257 lines = [] 4258 for iconf, n in enumerate(nqcd_list): 4259 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4260 4261 # Write the file 4262 writer.writelines(lines) 4263 4264 return True
4265 4266 #=========================================================================== 4267 # write_maxconfigs_file 4268 #===========================================================================
4269 - def write_maxconfigs_file(self, writer, matrix_elements):
4270 """Write the maxconfigs.inc file for MadEvent""" 4271 4272 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4273 maxconfigs = max([me.get_num_configs() for me in \ 4274 matrix_elements.get('matrix_elements')]) 4275 else: 4276 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4277 4278 lines = "integer lmaxconfigs\n" 4279 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4280 4281 # Write the file 4282 writer.writelines(lines) 4283 4284 return True
4285 4286 #=========================================================================== 4287 # read_write_good_hel 4288 #===========================================================================
4289 - def read_write_good_hel(self, ncomb):
4290 """return the code to read/write the good_hel common_block""" 4291 4292 convert = {'ncomb' : ncomb} 4293 output = """ 4294 subroutine write_good_hel(stream_id) 4295 implicit none 4296 integer stream_id 4297 INTEGER NCOMB 4298 PARAMETER ( NCOMB=%(ncomb)d) 4299 LOGICAL GOODHEL(NCOMB) 4300 INTEGER NTRY 4301 common/BLOCK_GOODHEL/NTRY,GOODHEL 4302 write(stream_id,*) GOODHEL 4303 return 4304 end 4305 4306 4307 subroutine read_good_hel(stream_id) 4308 implicit none 4309 include 'genps.inc' 4310 integer stream_id 4311 INTEGER NCOMB 4312 PARAMETER ( NCOMB=%(ncomb)d) 4313 LOGICAL GOODHEL(NCOMB) 4314 INTEGER NTRY 4315 common/BLOCK_GOODHEL/NTRY,GOODHEL 4316 read(stream_id,*) GOODHEL 4317 NTRY = MAXTRIES + 1 4318 return 4319 end 4320 4321 subroutine init_good_hel() 4322 implicit none 4323 INTEGER NCOMB 4324 PARAMETER ( NCOMB=%(ncomb)d) 4325 LOGICAL GOODHEL(NCOMB) 4326 INTEGER NTRY 4327 INTEGER I 4328 4329 do i=1,NCOMB 4330 GOODHEL(I) = .false. 4331 enddo 4332 NTRY = 0 4333 end 4334 4335 integer function get_maxsproc() 4336 implicit none 4337 get_maxsproc = 1 4338 return 4339 end 4340 4341 """ % convert 4342 4343 return output
4344 4345 #=========================================================================== 4346 # write_config_subproc_map_file 4347 #===========================================================================
4348 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4349 """Write a dummy config_subproc.inc file for MadEvent""" 4350 4351 lines = [] 4352 4353 for iconfig in range(len(s_and_t_channels)): 4354 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4355 (iconfig + 1)) 4356 4357 # Write the file 4358 writer.writelines(lines) 4359 4360 return True
4361 4362 #=========================================================================== 4363 # write_configs_file 4364 #===========================================================================
4365 - def write_configs_file(self, writer, matrix_element):
4366 """Write the configs.inc file for MadEvent""" 4367 4368 # Extract number of external particles 4369 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4370 4371 model = matrix_element.get('processes')[0].get('model') 4372 configs = [(i+1, d) for (i, d) in \ 4373 enumerate(matrix_element.get('diagrams'))] 4374 mapconfigs = [c[0] for c in configs] 4375 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4376 [[c[1]] for c in configs], 4377 mapconfigs, 4378 nexternal, ninitial, 4379 model)
4380 4381 #=========================================================================== 4382 # write_run_configs_file 4383 #===========================================================================
4384 - def write_run_config_file(self, writer):
4385 """Write the run_configs.inc file for MadEvent""" 4386 4387 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4388 4389 if self.proc_characteristic['loop_induced']: 4390 job_per_chan = 1 4391 else: 4392 job_per_chan = 5 4393 4394 if writer: 4395 text = open(path).read() % {'chanperjob': job_per_chan} 4396 writer.write(text) 4397 return True 4398 else: 4399 return {'chanperjob': job_per_chan}
4400 4401 #=========================================================================== 4402 # write_configs_file_from_diagrams 4403 #===========================================================================
4404 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4405 nexternal, ninitial, model):
4406 """Write the actual configs.inc file. 4407 4408 configs is the diagrams corresponding to configs (each 4409 diagrams is a list of corresponding diagrams for all 4410 subprocesses, with None if there is no corresponding diagrams 4411 for a given process). 4412 mapconfigs gives the diagram number for each config. 4413 4414 For s-channels, we need to output one PDG for each subprocess in 4415 the subprocess group, in order to be able to pick the right 4416 one for multiprocesses.""" 4417 4418 lines = [] 4419 4420 s_and_t_channels = [] 4421 4422 nqcd_list = [] 4423 4424 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4425 for config in configs if [d for d in config if d][0].\ 4426 get_vertex_leg_numbers()!=[]] 4427 minvert = min(vert_list) if vert_list!=[] else 0 4428 4429 # Number of subprocesses 4430 nsubprocs = len(configs[0]) 4431 4432 nconfigs = 0 4433 4434 new_pdg = model.get_first_non_pdg() 4435 4436 for iconfig, helas_diags in enumerate(configs): 4437 if any([vert > minvert for vert in 4438 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4439 # Only 3-vertices allowed in configs.inc 4440 continue 4441 nconfigs += 1 4442 4443 # Need s- and t-channels for all subprocesses, including 4444 # those that don't contribute to this config 4445 empty_verts = [] 4446 stchannels = [] 4447 for h in helas_diags: 4448 if h: 4449 # get_s_and_t_channels gives vertices starting from 4450 # final state external particles and working inwards 4451 stchannels.append(h.get('amplitudes')[0].\ 4452 get_s_and_t_channels(ninitial, model, 4453 new_pdg)) 4454 else: 4455 stchannels.append((empty_verts, None)) 4456 4457 # For t-channels, just need the first non-empty one 4458 tchannels = [t for s,t in stchannels if t != None][0] 4459 4460 # For s_and_t_channels (to be used later) use only first config 4461 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4462 tchannels]) 4463 4464 # Make sure empty_verts is same length as real vertices 4465 if any([s for s,t in stchannels]): 4466 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4467 4468 # Reorganize s-channel vertices to get a list of all 4469 # subprocesses for each vertex 4470 schannels = zip(*[s for s,t in stchannels]) 4471 else: 4472 schannels = [] 4473 4474 allchannels = schannels 4475 if len(tchannels) > 1: 4476 # Write out tchannels only if there are any non-trivial ones 4477 allchannels = schannels + tchannels 4478 4479 # Write out propagators for s-channel and t-channel vertices 4480 4481 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4482 # Correspondance between the config and the diagram = amp2 4483 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4484 mapconfigs[iconfig])) 4485 # Number of QCD couplings in this diagram 4486 nqcd = 0 4487 for h in helas_diags: 4488 if h: 4489 try: 4490 nqcd = h.calculate_orders()['QCD'] 4491 except KeyError: 4492 pass 4493 break 4494 else: 4495 continue 4496 4497 nqcd_list.append(nqcd) 4498 4499 for verts in allchannels: 4500 if verts in schannels: 4501 vert = [v for v in verts if v][0] 4502 else: 4503 vert = verts 4504 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4505 last_leg = vert.get('legs')[-1] 4506 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4507 (last_leg.get('number'), nconfigs, len(daughters), 4508 ",".join([str(d) for d in daughters]))) 4509 if verts in schannels: 4510 pdgs = [] 4511 for v in verts: 4512 if v: 4513 pdgs.append(v.get('legs')[-1].get('id')) 4514 else: 4515 pdgs.append(0) 4516 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4517 (last_leg.get('number'), nconfigs, nsubprocs, 4518 ",".join([str(d) for d in pdgs]))) 4519 lines.append("data tprid(%d,%d)/0/" % \ 4520 (last_leg.get('number'), nconfigs)) 4521 elif verts in tchannels[:-1]: 4522 lines.append("data tprid(%d,%d)/%d/" % \ 4523 (last_leg.get('number'), nconfigs, 4524 abs(last_leg.get('id')))) 4525 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4526 (last_leg.get('number'), nconfigs, nsubprocs, 4527 ",".join(['0'] * nsubprocs))) 4528 4529 # Write out number of configs 4530 lines.append("# Number of configs") 4531 lines.append("data mapconfig(0)/%d/" % nconfigs) 4532 4533 # Write the file 4534 writer.writelines(lines) 4535 4536 return s_and_t_channels, nqcd_list
4537 4538 #=========================================================================== 4539 # write_decayBW_file 4540 #===========================================================================
4541 - def write_decayBW_file(self, writer, s_and_t_channels):
4542 """Write the decayBW.inc file for MadEvent""" 4543 4544 lines = [] 4545 4546 booldict = {None: "0", True: "1", False: "2"} 4547 4548 for iconf, config in enumerate(s_and_t_channels): 4549 schannels = config[0] 4550 for vertex in schannels: 4551 # For the resulting leg, pick out whether it comes from 4552 # decay or not, as given by the onshell flag 4553 leg = vertex.get('legs')[-1] 4554 lines.append("data gForceBW(%d,%d)/%s/" % \ 4555 (leg.get('number'), iconf + 1, 4556 booldict[leg.get('onshell')])) 4557 4558 # Write the file 4559 writer.writelines(lines) 4560 4561 return True
4562 4563 #=========================================================================== 4564 # write_dname_file 4565 #===========================================================================
4566 - def write_dname_file(self, writer, dir_name):
4567 """Write the dname.mg file for MG4""" 4568 4569 line = "DIRNAME=%s" % dir_name 4570 4571 # Write the file 4572 writer.write(line + "\n") 4573 4574 return True
4575 4576 #=========================================================================== 4577 # write_driver 4578 #===========================================================================
4579 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4580 """Write the SubProcess/driver.f file for MG4""" 4581 4582 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4583 4584 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4585 card = 'Source/MODEL/MG5_param.dat' 4586 else: 4587 card = 'param_card.dat' 4588 # Requiring each helicity configuration to be probed by 10 points for 4589 # matrix element before using the resulting grid for MC over helicity 4590 # sampling. 4591 # We multiply this by 2 because each grouped subprocess is called at most 4592 # twice for each IMIRROR. 4593 replace_dict = {'param_card_name':card, 4594 'ncomb':ncomb, 4595 'hel_init_points':n_grouped_proc*10*2} 4596 if not v5: 4597 replace_dict['secondparam']=',.true.' 4598 else: 4599 replace_dict['secondparam']='' 4600 4601 if writer: 4602 text = open(path).read() % replace_dict 4603 writer.write(text) 4604 return True 4605 else: 4606 return replace_dict
4607 4608 #=========================================================================== 4609 # write_addmothers 4610 #===========================================================================
4611 - def write_addmothers(self, writer):
4612 """Write the SubProcess/addmothers.f""" 4613 4614 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4615 4616 text = open(path).read() % {'iconfig': 'diag_number'} 4617 writer.write(text) 4618 4619 return True
4620 4621 4622 #=========================================================================== 4623 # write_combine_events 4624 #===========================================================================
4625 - def write_combine_events(self, writer, nb_proc=100):
4626 """Write the SubProcess/driver.f file for MG4""" 4627 4628 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4629 4630 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4631 card = 'Source/MODEL/MG5_param.dat' 4632 else: 4633 card = 'param_card.dat' 4634 4635 #set maxpup (number of @X in the process card) 4636 4637 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4638 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4639 writer.write(text) 4640 4641 return True
4642 4643 4644 #=========================================================================== 4645 # write_symmetry 4646 #===========================================================================
4647 - def write_symmetry(self, writer, v5=True):
4648 """Write the SubProcess/driver.f file for ME""" 4649 4650 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4651 4652 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4653 card = 'Source/MODEL/MG5_param.dat' 4654 else: 4655 card = 'param_card.dat' 4656 4657 if v5: 4658 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4659 else: 4660 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4661 4662 if writer: 4663 text = open(path).read() 4664 text = text % replace_dict 4665 writer.write(text) 4666 return True 4667 else: 4668 return replace_dict
4669 4670 4671 4672 #=========================================================================== 4673 # write_iproc_file 4674 #===========================================================================
4675 - def write_iproc_file(self, writer, me_number):
4676 """Write the iproc.dat file for MG4""" 4677 line = "%d" % (me_number + 1) 4678 4679 # Write the file 4680 for line_to_write in writer.write_line(line): 4681 writer.write(line_to_write) 4682 return True
4683 4684 #=========================================================================== 4685 # write_mg_sym_file 4686 #===========================================================================
4687 - def write_mg_sym_file(self, writer, matrix_element):
4688 """Write the mg.sym file for MadEvent.""" 4689 4690 lines = [] 4691 4692 # Extract process with all decays included 4693 final_legs = filter(lambda leg: leg.get('state') == True, 4694 matrix_element.get('processes')[0].get_legs_with_decays()) 4695 4696 ninitial = len(filter(lambda leg: leg.get('state') == False, 4697 matrix_element.get('processes')[0].get('legs'))) 4698 4699 identical_indices = {} 4700 4701 # Extract identical particle info 4702 for i, leg in enumerate(final_legs): 4703 if leg.get('id') in identical_indices: 4704 identical_indices[leg.get('id')].append(\ 4705 i + ninitial + 1) 4706 else: 4707 identical_indices[leg.get('id')] = [i + ninitial + 1] 4708 4709 # Remove keys which have only one particle 4710 for key in identical_indices.keys(): 4711 if len(identical_indices[key]) < 2: 4712 del identical_indices[key] 4713 4714 # Write mg.sym file 4715 lines.append(str(len(identical_indices.keys()))) 4716 for key in identical_indices.keys(): 4717 lines.append(str(len(identical_indices[key]))) 4718 for number in identical_indices[key]: 4719 lines.append(str(number)) 4720 4721 # Write the file 4722 writer.writelines(lines) 4723 4724 return True
4725 4726 #=========================================================================== 4727 # write_mg_sym_file 4728 #===========================================================================
4729 - def write_default_mg_sym_file(self, writer):
4730 """Write the mg.sym file for MadEvent.""" 4731 4732 lines = "0" 4733 4734 # Write the file 4735 writer.writelines(lines) 4736 4737 return True
4738 4739 #=========================================================================== 4740 # write_ncombs_file 4741 #===========================================================================
4742 - def write_ncombs_file(self, writer, nexternal):
4743 """Write the ncombs.inc file for MadEvent.""" 4744 4745 # ncomb (used for clustering) is 2^nexternal 4746 file = " integer n_max_cl\n" 4747 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4748 4749 # Write the file 4750 writer.writelines(file) 4751 4752 return True
4753 4754 #=========================================================================== 4755 # write_processes_file 4756 #===========================================================================
4757 - def write_processes_file(self, writer, subproc_group):
4758 """Write the processes.dat file with info about the subprocesses 4759 in this group.""" 4760 4761 lines = [] 4762 4763 for ime, me in \ 4764 enumerate(subproc_group.get('matrix_elements')): 4765 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4766 ",".join(p.base_string() for p in \ 4767 me.get('processes')))) 4768 if me.get('has_mirror_process'): 4769 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4770 for proc in mirror_procs: 4771 legs = copy.copy(proc.get('legs_with_decays')) 4772 legs.insert(0, legs.pop(1)) 4773 proc.set("legs_with_decays", legs) 4774 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4775 mirror_procs)) 4776 else: 4777 lines.append("mirror none") 4778 4779 # Write the file 4780 writer.write("\n".join(lines)) 4781 4782 return True
4783 4784 #=========================================================================== 4785 # write_symswap_file 4786 #===========================================================================
4787 - def write_symswap_file(self, writer, ident_perms):
4788 """Write the file symswap.inc for MG4 by comparing diagrams using 4789 the internal matrix element value functionality.""" 4790 4791 lines = [] 4792 4793 # Write out lines for symswap.inc file (used to permute the 4794 # external leg momenta 4795 for iperm, perm in enumerate(ident_perms): 4796 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4797 (iperm+1, ",".join([str(i+1) for i in perm]))) 4798 lines.append("data nsym/%d/" % len(ident_perms)) 4799 4800 # Write the file 4801 writer.writelines(lines) 4802 4803 return True
4804 4805 #=========================================================================== 4806 # write_symfact_file 4807 #===========================================================================
4808 - def write_symfact_file(self, writer, symmetry):
4809 """Write the files symfact.dat for MG4 by comparing diagrams using 4810 the internal matrix element value functionality.""" 4811 4812 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4813 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4814 # Write out lines for symswap.inc file (used to permute the 4815 # external leg momenta 4816 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4817 # Write the file 4818 writer.write('\n'.join(lines)) 4819 writer.write('\n') 4820 4821 return True
4822 4823 #=========================================================================== 4824 # write_symperms_file 4825 #===========================================================================
4826 - def write_symperms_file(self, writer, perms):
4827 """Write the symperms.inc file for subprocess group, used for 4828 symmetric configurations""" 4829 4830 lines = [] 4831 for iperm, perm in enumerate(perms): 4832 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4833 (iperm+1, ",".join([str(i+1) for i in perm]))) 4834 4835 # Write the file 4836 writer.writelines(lines) 4837 4838 return True
4839 4840 #=========================================================================== 4841 # write_subproc 4842 #===========================================================================
4843 - def write_subproc(self, writer, subprocdir):
4844 """Append this subprocess to the subproc.mg file for MG4""" 4845 4846 # Write line to file 4847 writer.write(subprocdir + "\n") 4848 4849 return True
4850
4851 #=============================================================================== 4852 # ProcessExporterFortranMEGroup 4853 #=============================================================================== 4854 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4855 """Class to take care of exporting a set of matrix elements to 4856 MadEvent subprocess group format.""" 4857 4858 matrix_file = "matrix_madevent_group_v4.inc" 4859 grouped_mode = 'madevent' 4860 #=========================================================================== 4861 # generate_subprocess_directory 4862 #===========================================================================
4863 - def generate_subprocess_directory(self, subproc_group, 4864 fortran_model, 4865 group_number):
4866 """Generate the Pn directory for a subprocess group in MadEvent, 4867 including the necessary matrix_N.f files, configs.inc and various 4868 other helper files.""" 4869 4870 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 4871 "subproc_group object not SubProcessGroup" 4872 4873 if not self.model: 4874 self.model = subproc_group.get('matrix_elements')[0].\ 4875 get('processes')[0].get('model') 4876 4877 cwd = os.getcwd() 4878 path = pjoin(self.dir_path, 'SubProcesses') 4879 4880 os.chdir(path) 4881 pathdir = os.getcwd() 4882 4883 # Create the directory PN in the specified path 4884 subprocdir = "P%d_%s" % (subproc_group.get('number'), 4885 subproc_group.get('name')) 4886 try: 4887 os.mkdir(subprocdir) 4888 except os.error as error: 4889 logger.warning(error.strerror + " " + subprocdir) 4890 4891 try: 4892 os.chdir(subprocdir) 4893 except os.error: 4894 logger.error('Could not cd to directory %s' % subprocdir) 4895 return 0 4896 4897 logger.info('Creating files in directory %s' % subprocdir) 4898 4899 # Create the matrix.f files, auto_dsig.f files and all inc files 4900 # for all subprocesses in the group 4901 4902 maxamps = 0 4903 maxflows = 0 4904 tot_calls = 0 4905 4906 matrix_elements = subproc_group.get('matrix_elements') 4907 4908 # Add the driver.f, all grouped ME's must share the same number of 4909 # helicity configuration 4910 ncomb = matrix_elements[0].get_helicity_combinations() 4911 for me in matrix_elements[1:]: 4912 if ncomb!=me.get_helicity_combinations(): 4913 raise MadGraph5Error, "All grouped processes must share the "+\ 4914 "same number of helicity configurations." 4915 4916 filename = 'driver.f' 4917 self.write_driver(writers.FortranWriter(filename),ncomb, 4918 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 4919 4920 for ime, matrix_element in \ 4921 enumerate(matrix_elements): 4922 filename = 'matrix%d.f' % (ime+1) 4923 calls, ncolor = \ 4924 self.write_matrix_element_v4(writers.FortranWriter(filename), 4925 matrix_element, 4926 fortran_model, 4927 proc_id=str(ime+1), 4928 config_map=subproc_group.get('diagram_maps')[ime], 4929 subproc_number=group_number) 4930 4931 filename = 'auto_dsig%d.f' % (ime+1) 4932 self.write_auto_dsig_file(writers.FortranWriter(filename), 4933 matrix_element, 4934 str(ime+1)) 4935 4936 # Keep track of needed quantities 4937 tot_calls += int(calls) 4938 maxflows = max(maxflows, ncolor) 4939 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 4940 4941 # Draw diagrams 4942 filename = "matrix%d.ps" % (ime+1) 4943 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 4944 get('diagrams'), 4945 filename, 4946 model = \ 4947 matrix_element.get('processes')[0].\ 4948 get('model'), 4949 amplitude=True) 4950 logger.info("Generating Feynman diagrams for " + \ 4951 matrix_element.get('processes')[0].nice_string()) 4952 plot.draw() 4953 4954 # Extract number of external particles 4955 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4956 4957 # Generate a list of diagrams corresponding to each configuration 4958 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 4959 # If a subprocess has no diagrams for this config, the number is 0 4960 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 4961 4962 filename = 'auto_dsig.f' 4963 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 4964 subproc_group) 4965 4966 filename = 'coloramps.inc' 4967 self.write_coloramps_file(writers.FortranWriter(filename), 4968 subproc_diagrams_for_config, 4969 maxflows, 4970 matrix_elements) 4971 4972 filename = 'get_color.f' 4973 self.write_colors_file(writers.FortranWriter(filename), 4974 matrix_elements) 4975 4976 filename = 'config_subproc_map.inc' 4977 self.write_config_subproc_map_file(writers.FortranWriter(filename), 4978 subproc_diagrams_for_config) 4979 4980 filename = 'configs.inc' 4981 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 4982 writers.FortranWriter(filename), 4983 subproc_group, 4984 subproc_diagrams_for_config) 4985 4986 filename = 'config_nqcd.inc' 4987 self.write_config_nqcd_file(writers.FortranWriter(filename), 4988 nqcd_list) 4989 4990 filename = 'decayBW.inc' 4991 self.write_decayBW_file(writers.FortranWriter(filename), 4992 s_and_t_channels) 4993 4994 filename = 'dname.mg' 4995 self.write_dname_file(writers.FortranWriter(filename), 4996 subprocdir) 4997 4998 filename = 'iproc.dat' 4999 self.write_iproc_file(writers.FortranWriter(filename), 5000 group_number) 5001 5002 filename = 'leshouche.inc' 5003 self.write_leshouche_file(writers.FortranWriter(filename), 5004 subproc_group) 5005 5006 filename = 'maxamps.inc' 5007 self.write_maxamps_file(writers.FortranWriter(filename), 5008 maxamps, 5009 maxflows, 5010 max([len(me.get('processes')) for me in \ 5011 matrix_elements]), 5012 len(matrix_elements)) 5013 5014 # Note that mg.sym is not relevant for this case 5015 filename = 'mg.sym' 5016 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5017 5018 filename = 'mirrorprocs.inc' 5019 self.write_mirrorprocs(writers.FortranWriter(filename), 5020 subproc_group) 5021 5022 filename = 'ncombs.inc' 5023 self.write_ncombs_file(writers.FortranWriter(filename), 5024 nexternal) 5025 5026 filename = 'nexternal.inc' 5027 self.write_nexternal_file(writers.FortranWriter(filename), 5028 nexternal, ninitial) 5029 5030 filename = 'ngraphs.inc' 5031 self.write_ngraphs_file(writers.FortranWriter(filename), 5032 nconfigs) 5033 5034 filename = 'pmass.inc' 5035 self.write_pmass_file(writers.FortranWriter(filename), 5036 matrix_element) 5037 5038 filename = 'props.inc' 5039 self.write_props_file(writers.FortranWriter(filename), 5040 matrix_element, 5041 s_and_t_channels) 5042 5043 filename = 'processes.dat' 5044 files.write_to_file(filename, 5045 self.write_processes_file, 5046 subproc_group) 5047 5048 # Find config symmetries and permutations 5049 symmetry, perms, ident_perms = \ 5050 diagram_symmetry.find_symmetry(subproc_group) 5051 5052 filename = 'symswap.inc' 5053 self.write_symswap_file(writers.FortranWriter(filename), 5054 ident_perms) 5055 5056 filename = 'symfact_orig.dat' 5057 self.write_symfact_file(open(filename, 'w'), symmetry) 5058 5059 filename = 'symperms.inc' 5060 self.write_symperms_file(writers.FortranWriter(filename), 5061 perms) 5062 5063 # Generate jpgs -> pass in make_html 5064 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5065 5066 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5067 5068 #import nexternal/leshouch in Source 5069 ln('nexternal.inc', '../../Source', log=False) 5070 ln('leshouche.inc', '../../Source', log=False) 5071 ln('maxamps.inc', '../../Source', log=False) 5072 5073 # Return to SubProcesses dir) 5074 os.chdir(pathdir) 5075 5076 # Add subprocess to subproc.mg 5077 filename = 'subproc.mg' 5078 files.append_to_file(filename, 5079 self.write_subproc, 5080 subprocdir) 5081 5082 # Return to original dir 5083 os.chdir(cwd) 5084 5085 if not tot_calls: 5086 tot_calls = 0 5087 return tot_calls
5088 5089 #=========================================================================== 5090 # write_super_auto_dsig_file 5091 #===========================================================================
5092 - def write_super_auto_dsig_file(self, writer, subproc_group):
5093 """Write the auto_dsig.f file selecting between the subprocesses 5094 in subprocess group mode""" 5095 5096 replace_dict = {} 5097 5098 # Extract version number and date from VERSION file 5099 info_lines = self.get_mg5_info_lines() 5100 replace_dict['info_lines'] = info_lines 5101 5102 matrix_elements = subproc_group.get('matrix_elements') 5103 5104 # Extract process info lines 5105 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5106 matrix_elements]) 5107 replace_dict['process_lines'] = process_lines 5108 5109 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5110 replace_dict['nexternal'] = nexternal 5111 5112 replace_dict['nsprocs'] = 2*len(matrix_elements) 5113 5114 # Generate dsig definition line 5115 dsig_def_line = "DOUBLE PRECISION " + \ 5116 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5117 range(len(matrix_elements))]) 5118 replace_dict["dsig_def_line"] = dsig_def_line 5119 5120 # Generate dsig process lines 5121 call_dsig_proc_lines = [] 5122 for iproc in range(len(matrix_elements)): 5123 call_dsig_proc_lines.append(\ 5124 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5125 {"num": iproc + 1, 5126 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5127 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5128 5129 ncomb=matrix_elements[0].get_helicity_combinations() 5130 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5131 5132 if writer: 5133 file = open(pjoin(_file_path, \ 5134 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5135 file = file % replace_dict 5136 5137 # Write the file 5138 writer.writelines(file) 5139 else: 5140 return replace_dict
5141 5142 #=========================================================================== 5143 # write_mirrorprocs 5144 #===========================================================================
5145 - def write_mirrorprocs(self, writer, subproc_group):
5146 """Write the mirrorprocs.inc file determining which processes have 5147 IS mirror process in subprocess group mode.""" 5148 5149 lines = [] 5150 bool_dict = {True: '.true.', False: '.false.'} 5151 matrix_elements = subproc_group.get('matrix_elements') 5152 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5153 (len(matrix_elements), 5154 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5155 me in matrix_elements]))) 5156 # Write the file 5157 writer.writelines(lines)
5158 5159 #=========================================================================== 5160 # write_addmothers 5161 #===========================================================================
5162 - def write_addmothers(self, writer):
5163 """Write the SubProcess/addmothers.f""" 5164 5165 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5166 5167 text = open(path).read() % {'iconfig': 'lconfig'} 5168 writer.write(text) 5169 5170 return True
5171 5172 5173 #=========================================================================== 5174 # write_coloramps_file 5175 #===========================================================================
5176 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5177 matrix_elements):
5178 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5179 5180 # Create a map from subprocess (matrix element) to a list of 5181 # the diagrams corresponding to each config 5182 5183 lines = [] 5184 5185 subproc_to_confdiag = {} 5186 for config in diagrams_for_config: 5187 for subproc, diag in enumerate(config): 5188 try: 5189 subproc_to_confdiag[subproc].append(diag) 5190 except KeyError: 5191 subproc_to_confdiag[subproc] = [diag] 5192 5193 for subproc in sorted(subproc_to_confdiag.keys()): 5194 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5195 matrix_elements[subproc], 5196 subproc + 1)) 5197 5198 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5199 (maxflows, 5200 len(diagrams_for_config), 5201 len(matrix_elements))) 5202 5203 # Write the file 5204 writer.writelines(lines) 5205 5206 return True
5207 5208 #=========================================================================== 5209 # write_config_subproc_map_file 5210 #===========================================================================
5211 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5212 """Write the config_subproc_map.inc file for subprocess groups""" 5213 5214 lines = [] 5215 # Output only configs that have some corresponding diagrams 5216 iconfig = 0 5217 for config in config_subproc_map: 5218 if set(config) == set([0]): 5219 continue 5220 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5221 (iconfig + 1, len(config), 5222 ",".join([str(i) for i in config]))) 5223 iconfig += 1 5224 # Write the file 5225 writer.writelines(lines) 5226 5227 return True
5228 5229 #=========================================================================== 5230 # read_write_good_hel 5231 #===========================================================================
5232 - def read_write_good_hel(self, ncomb):
5233 """return the code to read/write the good_hel common_block""" 5234 5235 convert = {'ncomb' : ncomb} 5236 5237 output = """ 5238 subroutine write_good_hel(stream_id) 5239 implicit none 5240 integer stream_id 5241 INTEGER NCOMB 5242 PARAMETER ( NCOMB=%(ncomb)d) 5243 LOGICAL GOODHEL(NCOMB, 2) 5244 INTEGER NTRY(2) 5245 common/BLOCK_GOODHEL/NTRY,GOODHEL 5246 write(stream_id,*) GOODHEL 5247 return 5248 end 5249 5250 5251 subroutine read_good_hel(stream_id) 5252 implicit none 5253 include 'genps.inc' 5254 integer stream_id 5255 INTEGER NCOMB 5256 PARAMETER ( NCOMB=%(ncomb)d) 5257 LOGICAL GOODHEL(NCOMB, 2) 5258 INTEGER NTRY(2) 5259 common/BLOCK_GOODHEL/NTRY,GOODHEL 5260 read(stream_id,*) GOODHEL 5261 NTRY(1) = MAXTRIES + 1 5262 NTRY(2) = MAXTRIES + 1 5263 return 5264 end 5265 5266 subroutine init_good_hel() 5267 implicit none 5268 INTEGER NCOMB 5269 PARAMETER ( NCOMB=%(ncomb)d) 5270 LOGICAL GOODHEL(NCOMB, 2) 5271 INTEGER NTRY(2) 5272 INTEGER I 5273 5274 do i=1,NCOMB 5275 GOODHEL(I,1) = .false. 5276 GOODHEL(I,2) = .false. 5277 enddo 5278 NTRY(1) = 0 5279 NTRY(2) = 0 5280 end 5281 5282 integer function get_maxsproc() 5283 implicit none 5284 include 'maxamps.inc' 5285 5286 get_maxsproc = maxsproc 5287 return 5288 end 5289 5290 """ % convert 5291 5292 return output
5293 5294 5295 5296 #=========================================================================== 5297 # write_configs_file 5298 #===========================================================================
5299 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5300 """Write the configs.inc file with topology information for a 5301 subprocess group. Use the first subprocess with a diagram for each 5302 configuration.""" 5303 5304 matrix_elements = subproc_group.get('matrix_elements') 5305 model = matrix_elements[0].get('processes')[0].get('model') 5306 5307 diagrams = [] 5308 config_numbers = [] 5309 for iconfig, config in enumerate(diagrams_for_config): 5310 # Check if any diagrams correspond to this config 5311 if set(config) == set([0]): 5312 continue 5313 subproc_diags = [] 5314 for s,d in enumerate(config): 5315 if d: 5316 subproc_diags.append(matrix_elements[s].\ 5317 get('diagrams')[d-1]) 5318 else: 5319 subproc_diags.append(None) 5320 diagrams.append(subproc_diags) 5321 config_numbers.append(iconfig + 1) 5322 5323 # Extract number of external particles 5324 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5325 5326 return len(diagrams), \ 5327 self.write_configs_file_from_diagrams(writer, diagrams, 5328 config_numbers, 5329 nexternal, ninitial, 5330 model)
5331 5332 #=========================================================================== 5333 # write_run_configs_file 5334 #===========================================================================
5335 - def write_run_config_file(self, writer):
5336 """Write the run_configs.inc file for MadEvent""" 5337 5338 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5339 if self.proc_characteristic['loop_induced']: 5340 job_per_chan = 1 5341 else: 5342 job_per_chan = 2 5343 text = open(path).read() % {'chanperjob':job_per_chan} 5344 writer.write(text) 5345 return True
5346 5347 5348 #=========================================================================== 5349 # write_leshouche_file 5350 #===========================================================================
5351 - def write_leshouche_file(self, writer, subproc_group):
5352 """Write the leshouche.inc file for MG4""" 5353 5354 all_lines = [] 5355 5356 for iproc, matrix_element in \ 5357 enumerate(subproc_group.get('matrix_elements')): 5358 all_lines.extend(self.get_leshouche_lines(matrix_element, 5359 iproc)) 5360 # Write the file 5361 writer.writelines(all_lines) 5362 return True
5363 5364
5365 - def finalize(self,*args, **opts):
5366 5367 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5368 #ensure that the grouping information is on the correct value 5369 self.proc_characteristic['grouped_matrix'] = True
5370 5371 5372 #=============================================================================== 5373 # UFO_model_to_mg4 5374 #=============================================================================== 5375 5376 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5377 5378 -class UFO_model_to_mg4(object):
5379 """ A converter of the UFO-MG5 Model to the MG4 format """ 5380 5381 # The list below shows the only variables the user is allowed to change by 5382 # himself for each PS point. If he changes any other, then calling 5383 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5384 # correctly account for the change. 5385 PS_dependent_key = ['aS','MU_R'] 5386 mp_complex_format = 'complex*32' 5387 mp_real_format = 'real*16' 5388 # Warning, it is crucial none of the couplings/parameters of the model 5389 # starts with this prefix. I should add a check for this. 5390 # You can change it as the global variable to check_param_card.ParamCard 5391 mp_prefix = check_param_card.ParamCard.mp_prefix 5392
5393 - def __init__(self, model, output_path, opt=None):
5394 """ initialization of the objects """ 5395 5396 self.model = model 5397 self.model_name = model['name'] 5398 self.dir_path = output_path 5399 5400 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5401 'loop_induced': False} 5402 if opt: 5403 self.opt.update(opt) 5404 5405 self.coups_dep = [] # (name, expression, type) 5406 self.coups_indep = [] # (name, expression, type) 5407 self.params_dep = [] # (name, expression, type) 5408 self.params_indep = [] # (name, expression, type) 5409 self.params_ext = [] # external parameter 5410 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5411 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5412
5414 """modify the parameter if some of them are identical up to the case""" 5415 5416 lower_dict={} 5417 duplicate = set() 5418 keys = self.model['parameters'].keys() 5419 for key in keys: 5420 for param in self.model['parameters'][key]: 5421 lower_name = param.name.lower() 5422 if not lower_name: 5423 continue 5424 try: 5425 lower_dict[lower_name].append(param) 5426 except KeyError,error: 5427 lower_dict[lower_name] = [param] 5428 else: 5429 duplicate.add(lower_name) 5430 logger.debug('%s is define both as lower case and upper case.' 5431 % lower_name) 5432 if not duplicate: 5433 return 5434 5435 re_expr = r'''\b(%s)\b''' 5436 to_change = [] 5437 change={} 5438 for value in duplicate: 5439 for i, var in enumerate(lower_dict[value]): 5440 to_change.append(var.name) 5441 new_name = '%s%s' % (var.name.lower(), 5442 ('__%d'%(i+1) if i>0 else '')) 5443 change[var.name] = new_name 5444 var.name = new_name 5445 5446 # Apply the modification to the map_CTcoup_CTparam of the model 5447 # if it has one (giving for each coupling the CT parameters whcih 5448 # are necessary and which should be exported to the model. 5449 if hasattr(self.model,'map_CTcoup_CTparam'): 5450 for coup, ctparams in self.model.map_CTcoup_CTparam: 5451 for i, ctparam in enumerate(ctparams): 5452 try: 5453 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5454 except KeyError: 5455 pass 5456 5457 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5458 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5459 5460 # change parameters 5461 for key in keys: 5462 if key == ('external',): 5463 continue 5464 for param in self.model['parameters'][key]: 5465 param.expr = rep_pattern.sub(replace, param.expr) 5466 5467 # change couplings 5468 for key in self.model['couplings'].keys(): 5469 for coup in self.model['couplings'][key]: 5470 coup.expr = rep_pattern.sub(replace, coup.expr) 5471 5472 # change mass/width 5473 for part in self.model['particles']: 5474 if str(part.get('mass')) in to_change: 5475 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5476 if str(part.get('width')) in to_change: 5477 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5478
5479 - def refactorize(self, wanted_couplings = []):
5480 """modify the couplings to fit with MG4 convention """ 5481 5482 # Keep only separation in alphaS 5483 keys = self.model['parameters'].keys() 5484 keys.sort(key=len) 5485 for key in keys: 5486 to_add = [o for o in self.model['parameters'][key] if o.name] 5487 5488 if key == ('external',): 5489 self.params_ext += to_add 5490 elif any([(k in key) for k in self.PS_dependent_key]): 5491 self.params_dep += to_add 5492 else: 5493 self.params_indep += to_add 5494 # same for couplings 5495 keys = self.model['couplings'].keys() 5496 keys.sort(key=len) 5497 for key, coup_list in self.model['couplings'].items(): 5498 if any([(k in key) for k in self.PS_dependent_key]): 5499 self.coups_dep += [c for c in coup_list if 5500 (not wanted_couplings or c.name in \ 5501 wanted_couplings)] 5502 else: 5503 self.coups_indep += [c for c in coup_list if 5504 (not wanted_couplings or c.name in \ 5505 wanted_couplings)] 5506 5507 # MG4 use G and not aS as it basic object for alphas related computation 5508 #Pass G in the independant list 5509 if 'G' in self.params_dep: 5510 index = self.params_dep.index('G') 5511 G = self.params_dep.pop(index) 5512 # G.expr = '2*cmath.sqrt(as*pi)' 5513 # self.params_indep.insert(0, self.params_dep.pop(index)) 5514 # No need to add it if not defined 5515 5516 if 'aS' not in self.params_ext: 5517 logger.critical('aS not define as external parameter adding it!') 5518 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5519 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5520 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5521 - def build(self, wanted_couplings = [], full=True):
5522 """modify the couplings to fit with MG4 convention and creates all the 5523 different files""" 5524 5525 self.pass_parameter_to_case_insensitive() 5526 self.refactorize(wanted_couplings) 5527 5528 # write the files 5529 if full: 5530 if wanted_couplings: 5531 # extract the wanted ct parameters 5532 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5533 self.write_all()
5534 5535
5536 - def open(self, name, comment='c', format='default'):
5537 """ Open the file name in the correct directory and with a valid 5538 header.""" 5539 5540 file_path = pjoin(self.dir_path, name) 5541 5542 if format == 'fortran': 5543 fsock = writers.FortranWriter(file_path, 'w') 5544 else: 5545 fsock = open(file_path, 'w') 5546 5547 file.writelines(fsock, comment * 77 + '\n') 5548 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5549 {'comment': comment + (6 - len(comment)) * ' '}) 5550 file.writelines(fsock, comment * 77 + '\n\n') 5551 return fsock
5552 5553
5554 - def write_all(self):
5555 """ write all the files """ 5556 #write the part related to the external parameter 5557 self.create_ident_card() 5558 self.create_param_read() 5559 5560 #write the definition of the parameter 5561 self.create_input() 5562 self.create_intparam_def(dp=True,mp=False) 5563 if self.opt['mp']: 5564 self.create_intparam_def(dp=False,mp=True) 5565 5566 # definition of the coupling. 5567 self.create_actualize_mp_ext_param_inc() 5568 self.create_coupl_inc() 5569 self.create_write_couplings() 5570 self.create_couplings() 5571 5572 # the makefile 5573 self.create_makeinc() 5574 self.create_param_write() 5575 5576 # The model functions 5577 self.create_model_functions_inc() 5578 self.create_model_functions_def() 5579 5580 # The param_card.dat 5581 self.create_param_card() 5582 5583 5584 # All the standard files 5585 self.copy_standard_file()
5586 5587 ############################################################################ 5588 ## ROUTINE CREATING THE FILES ############################################ 5589 ############################################################################ 5590
5591 - def copy_standard_file(self):
5592 """Copy the standard files for the fortran model.""" 5593 5594 #copy the library files 5595 file_to_link = ['formats.inc','printout.f', \ 5596 'rw_para.f', 'testprog.f'] 5597 5598 for filename in file_to_link: 5599 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5600 self.dir_path) 5601 5602 file = open(os.path.join(MG5DIR,\ 5603 'models/template_files/fortran/rw_para.f')).read() 5604 5605 includes=["include \'coupl.inc\'","include \'input.inc\'", 5606 "include \'model_functions.inc\'"] 5607 if self.opt['mp']: 5608 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5609 # In standalone and madloop we do no use the compiled param card but 5610 # still parse the .dat one so we must load it. 5611 if self.opt['loop_induced']: 5612 #loop induced follow MadEvent way to handle the card. 5613 load_card = '' 5614 lha_read_filename='lha_read.f' 5615 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5616 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5617 lha_read_filename='lha_read_mp.f' 5618 elif self.opt['export_format'].startswith('standalone') \ 5619 or self.opt['export_format'] in ['madweight', 'plugin']\ 5620 or self.opt['export_format'].startswith('matchbox'): 5621 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5622 lha_read_filename='lha_read.f' 5623 else: 5624 load_card = '' 5625 lha_read_filename='lha_read.f' 5626 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5627 os.path.join(self.dir_path,'lha_read.f')) 5628 5629 file=file%{'includes':'\n '.join(includes), 5630 'load_card':load_card} 5631 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5632 writer.writelines(file) 5633 writer.close() 5634 5635 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5636 or self.opt['loop_induced']: 5637 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5638 self.dir_path + '/makefile') 5639 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5640 path = pjoin(self.dir_path, 'makefile') 5641 text = open(path).read() 5642 text = text.replace('madevent','aMCatNLO') 5643 open(path, 'w').writelines(text) 5644 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5645 'madloop','madloop_optimized', 'standalone_rw', 5646 'madweight','matchbox','madloop_matchbox', 'plugin']: 5647 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5648 self.dir_path + '/makefile') 5649 #elif self.opt['export_format'] in []: 5650 #pass 5651 else: 5652 raise MadGraph5Error('Unknown format')
5653
5654 - def create_coupl_inc(self):
5655 """ write coupling.inc """ 5656 5657 fsock = self.open('coupl.inc', format='fortran') 5658 if self.opt['mp']: 5659 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5660 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5661 format='fortran') 5662 5663 # Write header 5664 header = """double precision G 5665 common/strong/ G 5666 5667 double complex gal(2) 5668 common/weak/ gal 5669 5670 double precision MU_R 5671 common/rscale/ MU_R 5672 5673 double precision Nf 5674 parameter(Nf=%d) 5675 """ % self.model.get_nflav() 5676 5677 fsock.writelines(header) 5678 5679 if self.opt['mp']: 5680 header = """%(real_mp_format)s %(mp_prefix)sG 5681 common/MP_strong/ %(mp_prefix)sG 5682 5683 %(complex_mp_format)s %(mp_prefix)sgal(2) 5684 common/MP_weak/ %(mp_prefix)sgal 5685 5686 %(complex_mp_format)s %(mp_prefix)sMU_R 5687 common/MP_rscale/ %(mp_prefix)sMU_R 5688 5689 """ 5690 5691 5692 5693 5694 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5695 'complex_mp_format':self.mp_complex_format, 5696 'mp_prefix':self.mp_prefix}) 5697 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5698 'complex_mp_format':self.mp_complex_format, 5699 'mp_prefix':''}) 5700 5701 # Write the Mass definition/ common block 5702 masses = set() 5703 widths = set() 5704 if self.opt['complex_mass']: 5705 complex_mass = set() 5706 5707 for particle in self.model.get('particles'): 5708 #find masses 5709 one_mass = particle.get('mass') 5710 if one_mass.lower() != 'zero': 5711 masses.add(one_mass) 5712 5713 # find width 5714 one_width = particle.get('width') 5715 if one_width.lower() != 'zero': 5716 widths.add(one_width) 5717 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5718 complex_mass.add('CMASS_%s' % one_mass) 5719 5720 if masses: 5721 fsock.writelines('double precision '+','.join(masses)+'\n') 5722 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5723 if self.opt['mp']: 5724 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5725 ','.join(masses)+'\n') 5726 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5727 ','.join(masses)+'\n\n') 5728 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5729 self.mp_prefix+m for m in masses])+'\n') 5730 mp_fsock.writelines('common/MP_masses/ '+\ 5731 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5732 5733 if widths: 5734 fsock.writelines('double precision '+','.join(widths)+'\n') 5735 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5736 if self.opt['mp']: 5737 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5738 ','.join(widths)+'\n') 5739 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5740 ','.join(widths)+'\n\n') 5741 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5742 self.mp_prefix+w for w in widths])+'\n') 5743 mp_fsock.writelines('common/MP_widths/ '+\ 5744 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5745 5746 # Write the Couplings 5747 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5748 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5749 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5750 if self.opt['mp']: 5751 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5752 ','.join(coupling_list)+'\n') 5753 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5754 ','.join(coupling_list)+'\n\n') 5755 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5756 self.mp_prefix+c for c in coupling_list])+'\n') 5757 mp_fsock.writelines('common/MP_couplings/ '+\ 5758 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5759 5760 # Write complex mass for complex mass scheme (if activated) 5761 if self.opt['complex_mass'] and complex_mass: 5762 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5763 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5764 if self.opt['mp']: 5765 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5766 ','.join(complex_mass)+'\n') 5767 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5768 ','.join(complex_mass)+'\n\n') 5769 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5770 self.mp_prefix+cm for cm in complex_mass])+'\n') 5771 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5772 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5773
5774 - def create_write_couplings(self):
5775 """ write the file coupl_write.inc """ 5776 5777 fsock = self.open('coupl_write.inc', format='fortran') 5778 5779 fsock.writelines("""write(*,*) ' Couplings of %s' 5780 write(*,*) ' ---------------------------------' 5781 write(*,*) ' '""" % self.model_name) 5782 def format(coupl): 5783 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5784 5785 # Write the Couplings 5786 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5787 fsock.writelines('\n'.join(lines)) 5788 5789
5790 - def create_input(self):
5791 """create input.inc containing the definition of the parameters""" 5792 5793 fsock = self.open('input.inc', format='fortran') 5794 if self.opt['mp']: 5795 mp_fsock = self.open('mp_input.inc', format='fortran') 5796 5797 #find mass/ width since they are already define 5798 already_def = set() 5799 for particle in self.model.get('particles'): 5800 already_def.add(particle.get('mass').lower()) 5801 already_def.add(particle.get('width').lower()) 5802 if self.opt['complex_mass']: 5803 already_def.add('cmass_%s' % particle.get('mass').lower()) 5804 5805 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5806 name.lower() not in already_def 5807 5808 real_parameters = [param.name for param in self.params_dep + 5809 self.params_indep if param.type == 'real' 5810 and is_valid(param.name)] 5811 5812 real_parameters += [param.name for param in self.params_ext 5813 if param.type == 'real'and 5814 is_valid(param.name)] 5815 5816 # check the parameter is a CT parameter or not 5817 # if yes, just use the needed ones 5818 real_parameters = [param for param in real_parameters \ 5819 if self.check_needed_param(param)] 5820 5821 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5822 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5823 if self.opt['mp']: 5824 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5825 self.mp_prefix+p for p in real_parameters])+'\n') 5826 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5827 self.mp_prefix+p for p in real_parameters])+'\n\n') 5828 5829 complex_parameters = [param.name for param in self.params_dep + 5830 self.params_indep if param.type == 'complex' and 5831 is_valid(param.name)] 5832 5833 # check the parameter is a CT parameter or not 5834 # if yes, just use the needed ones 5835 complex_parameters = [param for param in complex_parameters \ 5836 if self.check_needed_param(param)] 5837 5838 if complex_parameters: 5839 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5840 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 5841 if self.opt['mp']: 5842 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5843 self.mp_prefix+p for p in complex_parameters])+'\n') 5844 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 5845 self.mp_prefix+p for p in complex_parameters])+'\n\n')
5846
5847 - def check_needed_param(self, param):
5848 """ Returns whether the parameter in argument is needed for this 5849 specific computation or not.""" 5850 5851 # If this is a leading order model or if there was no CT parameter 5852 # employed in this NLO model, one can directly return that the 5853 # parameter is needed since only CTParameters are filtered. 5854 if not hasattr(self, 'allCTparameters') or \ 5855 self.allCTparameters is None or self.usedCTparameters is None or \ 5856 len(self.allCTparameters)==0: 5857 return True 5858 5859 # We must allow the conjugate shorthand for the complex parameter as 5860 # well so we check wether either the parameter name or its name with 5861 # 'conjg__' substituted with '' is present in the list. 5862 # This is acceptable even if some parameter had an original name 5863 # including 'conjg__' in it, because at worst we export a parameter 5864 # was not needed. 5865 param = param.lower() 5866 cjg_param = param.replace('conjg__','',1) 5867 5868 # First make sure it is a CTparameter 5869 if param not in self.allCTparameters and \ 5870 cjg_param not in self.allCTparameters: 5871 return True 5872 5873 # Now check if it is in the list of CTparameters actually used 5874 return (param in self.usedCTparameters or \ 5875 cjg_param in self.usedCTparameters)
5876
5877 - def extract_needed_CTparam(self,wanted_couplings=[]):
5878 """ Extract what are the needed CT parameters given the wanted_couplings""" 5879 5880 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 5881 # Setting these lists to none wil disable the filtering in 5882 # check_needed_param 5883 self.allCTparameters = None 5884 self.usedCTparameters = None 5885 return 5886 5887 # All CTparameters appearin in all CT couplings 5888 allCTparameters=self.model.map_CTcoup_CTparam.values() 5889 # Define in this class the list of all CT parameters 5890 self.allCTparameters=list(\ 5891 set(itertools.chain.from_iterable(allCTparameters))) 5892 5893 # All used CT couplings 5894 w_coupls = [coupl.lower() for coupl in wanted_couplings] 5895 allUsedCTCouplings = [coupl for coupl in 5896 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 5897 5898 # Now define the list of all CT parameters that are actually used 5899 self.usedCTparameters=list(\ 5900 set(itertools.chain.from_iterable([ 5901 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 5902 ]))) 5903 5904 # Now at last, make these list case insensitive 5905 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 5906 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
5907
5908 - def create_intparam_def(self, dp=True, mp=False):
5909 """ create intparam_definition.inc setting the internal parameters. 5910 Output the double precision and/or the multiple precision parameters 5911 depending on the parameters dp and mp. If mp only, then the file names 5912 get the 'mp_' prefix. 5913 """ 5914 5915 fsock = self.open('%sintparam_definition.inc'% 5916 ('mp_' if mp and not dp else ''), format='fortran') 5917 5918 fsock.write_comments(\ 5919 "Parameters that should not be recomputed event by event.\n") 5920 fsock.writelines("if(readlha) then\n") 5921 if dp: 5922 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 5923 if mp: 5924 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 5925 5926 for param in self.params_indep: 5927 if param.name == 'ZERO': 5928 continue 5929 # check whether the parameter is a CT parameter 5930 # if yes,just used the needed ones 5931 if not self.check_needed_param(param.name): 5932 continue 5933 if dp: 5934 fsock.writelines("%s = %s\n" % (param.name, 5935 self.p_to_f.parse(param.expr))) 5936 if mp: 5937 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5938 self.mp_p_to_f.parse(param.expr))) 5939 5940 fsock.writelines('endif') 5941 5942 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 5943 if dp: 5944 fsock.writelines("aS = G**2/4/pi\n") 5945 if mp: 5946 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 5947 for param in self.params_dep: 5948 # check whether the parameter is a CT parameter 5949 # if yes,just used the needed ones 5950 if not self.check_needed_param(param.name): 5951 continue 5952 if dp: 5953 fsock.writelines("%s = %s\n" % (param.name, 5954 self.p_to_f.parse(param.expr))) 5955 elif mp: 5956 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 5957 self.mp_p_to_f.parse(param.expr))) 5958 5959 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 5960 if ('aEWM1',) in self.model['parameters']: 5961 if dp: 5962 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 5963 gal(2) = 1d0 5964 """) 5965 elif mp: 5966 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 5967 %(mp_prefix)sgal(2) = 1d0 5968 """ %{'mp_prefix':self.mp_prefix}) 5969 pass 5970 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 5971 elif ('Gf',) in self.model['parameters']: 5972 if dp: 5973 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 5974 gal(2) = 1d0 5975 """) 5976 elif mp: 5977 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 5978 %(mp_prefix)sgal(2) = 1d0 5979 """ %{'mp_prefix':self.mp_prefix}) 5980 pass 5981 else: 5982 if dp: 5983 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 5984 fsock.writelines(""" gal(1) = 1d0 5985 gal(2) = 1d0 5986 """) 5987 elif mp: 5988 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 5989 %(mp_prefix)sgal(2) = 1e0_16 5990 """%{'mp_prefix':self.mp_prefix})
5991 5992
5993 - def create_couplings(self):
5994 """ create couplings.f and all couplingsX.f """ 5995 5996 nb_def_by_file = 25 5997 5998 self.create_couplings_main(nb_def_by_file) 5999 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6000 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6001 6002 for i in range(nb_coup_indep): 6003 # For the independent couplings, we compute the double and multiple 6004 # precision ones together 6005 data = self.coups_indep[nb_def_by_file * i: 6006 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6007 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6008 6009 for i in range(nb_coup_dep): 6010 # For the dependent couplings, we compute the double and multiple 6011 # precision ones in separate subroutines. 6012 data = self.coups_dep[nb_def_by_file * i: 6013 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6014 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6015 dp=True,mp=False) 6016 if self.opt['mp']: 6017 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6018 dp=False,mp=True)
6019 6020
6021 - def create_couplings_main(self, nb_def_by_file=25):
6022 """ create couplings.f """ 6023 6024 fsock = self.open('couplings.f', format='fortran') 6025 6026 fsock.writelines("""subroutine coup() 6027 6028 implicit none 6029 double precision PI, ZERO 6030 logical READLHA 6031 parameter (PI=3.141592653589793d0) 6032 parameter (ZERO=0d0) 6033 include \'model_functions.inc\'""") 6034 if self.opt['mp']: 6035 fsock.writelines("""%s MP__PI, MP__ZERO 6036 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6037 parameter (MP__ZERO=0e0_16) 6038 include \'mp_input.inc\' 6039 include \'mp_coupl.inc\' 6040 """%self.mp_real_format) 6041 fsock.writelines("""include \'input.inc\' 6042 include \'coupl.inc\' 6043 READLHA = .true. 6044 include \'intparam_definition.inc\'""") 6045 if self.opt['mp']: 6046 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6047 6048 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6049 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6050 6051 fsock.writelines('\n'.join(\ 6052 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6053 6054 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6055 6056 fsock.writelines('\n'.join(\ 6057 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6058 for i in range(nb_coup_dep)])) 6059 if self.opt['mp']: 6060 fsock.writelines('\n'.join(\ 6061 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6062 for i in range(nb_coup_dep)])) 6063 fsock.writelines('''\n return \n end\n''') 6064 6065 fsock.writelines("""subroutine update_as_param() 6066 6067 implicit none 6068 double precision PI, ZERO 6069 logical READLHA 6070 parameter (PI=3.141592653589793d0) 6071 parameter (ZERO=0d0) 6072 include \'model_functions.inc\'""") 6073 fsock.writelines("""include \'input.inc\' 6074 include \'coupl.inc\' 6075 READLHA = .false.""") 6076 fsock.writelines(""" 6077 include \'intparam_definition.inc\'\n 6078 """) 6079 6080 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6081 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6082 6083 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6084 6085 fsock.writelines('\n'.join(\ 6086 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6087 for i in range(nb_coup_dep)])) 6088 fsock.writelines('''\n return \n end\n''') 6089 6090 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6091 6092 implicit none 6093 double precision PI 6094 parameter (PI=3.141592653589793d0) 6095 double precision mu_r2, as2 6096 include \'model_functions.inc\'""") 6097 fsock.writelines("""include \'input.inc\' 6098 include \'coupl.inc\'""") 6099 fsock.writelines(""" 6100 if (mu_r2.gt.0d0) MU_R = mu_r2 6101 G = SQRT(4.0d0*PI*AS2) 6102 AS = as2 6103 6104 CALL UPDATE_AS_PARAM() 6105 """) 6106 fsock.writelines('''\n return \n end\n''') 6107 6108 if self.opt['mp']: 6109 fsock.writelines("""subroutine mp_update_as_param() 6110 6111 implicit none 6112 logical READLHA 6113 include \'model_functions.inc\'""") 6114 fsock.writelines("""%s MP__PI, MP__ZERO 6115 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6116 parameter (MP__ZERO=0e0_16) 6117 include \'mp_input.inc\' 6118 include \'mp_coupl.inc\' 6119 """%self.mp_real_format) 6120 fsock.writelines("""include \'input.inc\' 6121 include \'coupl.inc\' 6122 include \'actualize_mp_ext_params.inc\' 6123 READLHA = .false. 6124 include \'mp_intparam_definition.inc\'\n 6125 """) 6126 6127 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6128 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6129 6130 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6131 6132 fsock.writelines('\n'.join(\ 6133 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6134 for i in range(nb_coup_dep)])) 6135 fsock.writelines('''\n return \n end\n''')
6136
6137 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6138 """ create couplings[nb_file].f containing information coming from data. 6139 Outputs the computation of the double precision and/or the multiple 6140 precision couplings depending on the parameters dp and mp. 6141 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6142 filename and subroutine name. 6143 """ 6144 6145 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6146 nb_file), format='fortran') 6147 fsock.writelines("""subroutine %scoup%s() 6148 6149 implicit none 6150 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6151 if dp: 6152 fsock.writelines(""" 6153 double precision PI, ZERO 6154 parameter (PI=3.141592653589793d0) 6155 parameter (ZERO=0d0) 6156 include 'input.inc' 6157 include 'coupl.inc'""") 6158 if mp: 6159 fsock.writelines("""%s MP__PI, MP__ZERO 6160 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6161 parameter (MP__ZERO=0e0_16) 6162 include \'mp_input.inc\' 6163 include \'mp_coupl.inc\' 6164 """%self.mp_real_format) 6165 6166 for coupling in data: 6167 if dp: 6168 fsock.writelines('%s = %s' % (coupling.name, 6169 self.p_to_f.parse(coupling.expr))) 6170 if mp: 6171 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6172 self.mp_p_to_f.parse(coupling.expr))) 6173 fsock.writelines('end')
6174
6175 - def create_model_functions_inc(self):
6176 """ Create model_functions.inc which contains the various declarations 6177 of auxiliary functions which might be used in the couplings expressions 6178 """ 6179 6180 additional_fct = [] 6181 # check for functions define in the UFO model 6182 ufo_fct = self.model.get('functions') 6183 if ufo_fct: 6184 for fct in ufo_fct: 6185 # already handle by default 6186 if fct.name not in ["complexconjugate", "re", "im", "sec", 6187 "csc", "asec", "acsc", "theta_function", "cond", 6188 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6189 "grreglog","regsqrt"]: 6190 additional_fct.append(fct.name) 6191 6192 6193 fsock = self.open('model_functions.inc', format='fortran') 6194 fsock.writelines("""double complex cond 6195 double complex condif 6196 double complex reglog 6197 double complex reglogp 6198 double complex reglogm 6199 double complex recms 6200 double complex arg 6201 double complex grreglog 6202 double complex regsqrt 6203 %s 6204 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6205 6206 6207 if self.opt['mp']: 6208 fsock.writelines("""%(complex_mp_format)s mp_cond 6209 %(complex_mp_format)s mp_condif 6210 %(complex_mp_format)s mp_reglog 6211 %(complex_mp_format)s mp_reglogp 6212 %(complex_mp_format)s mp_reglogm 6213 %(complex_mp_format)s mp_recms 6214 %(complex_mp_format)s mp_arg 6215 %(complex_mp_format)s mp_grreglog 6216 %(complex_mp_format)s mp_regsqrt 6217 %(additional)s 6218 """ %\ 6219 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6220 'complex_mp_format':self.mp_complex_format 6221 })
6222
6223 - def create_model_functions_def(self):
6224 """ Create model_functions.f which contains the various definitions 6225 of auxiliary functions which might be used in the couplings expressions 6226 Add the functions.f functions for formfactors support 6227 """ 6228 6229 fsock = self.open('model_functions.f', format='fortran') 6230 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6231 implicit none 6232 double complex condition,truecase,falsecase 6233 if(condition.eq.(0.0d0,0.0d0)) then 6234 cond=truecase 6235 else 6236 cond=falsecase 6237 endif 6238 end 6239 6240 double complex function condif(condition,truecase,falsecase) 6241 implicit none 6242 logical condition 6243 double complex truecase,falsecase 6244 if(condition) then 6245 condif=truecase 6246 else 6247 condif=falsecase 6248 endif 6249 end 6250 6251 double complex function recms(condition,expr) 6252 implicit none 6253 logical condition 6254 double complex expr 6255 if(condition)then 6256 recms=expr 6257 else 6258 recms=dcmplx(dble(expr)) 6259 endif 6260 end 6261 6262 double complex function reglog(arg) 6263 implicit none 6264 double complex TWOPII 6265 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6266 double complex arg 6267 if(arg.eq.(0.0d0,0.0d0)) then 6268 reglog=(0.0d0,0.0d0) 6269 else 6270 reglog=log(arg) 6271 endif 6272 end 6273 6274 double complex function reglogp(arg) 6275 implicit none 6276 double complex TWOPII 6277 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6278 double complex arg 6279 if(arg.eq.(0.0d0,0.0d0))then 6280 reglogp=(0.0d0,0.0d0) 6281 else 6282 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6283 reglogp=log(arg) + TWOPII 6284 else 6285 reglogp=log(arg) 6286 endif 6287 endif 6288 end 6289 6290 double complex function reglogm(arg) 6291 implicit none 6292 double complex TWOPII 6293 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6294 double complex arg 6295 if(arg.eq.(0.0d0,0.0d0))then 6296 reglogm=(0.0d0,0.0d0) 6297 else 6298 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6299 reglogm=log(arg) - TWOPII 6300 else 6301 reglogm=log(arg) 6302 endif 6303 endif 6304 end 6305 6306 double complex function regsqrt(arg_in) 6307 implicit none 6308 double complex arg_in 6309 double complex arg 6310 arg=arg_in 6311 if(dabs(dimag(arg)).eq.0.0d0)then 6312 arg=dcmplx(dble(arg),0.0d0) 6313 endif 6314 if(dabs(dble(arg)).eq.0.0d0)then 6315 arg=dcmplx(0.0d0,dimag(arg)) 6316 endif 6317 regsqrt=sqrt(arg) 6318 end 6319 6320 double complex function grreglog(logsw,expr1_in,expr2_in) 6321 implicit none 6322 double complex TWOPII 6323 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6324 double complex expr1_in,expr2_in 6325 double complex expr1,expr2 6326 double precision logsw 6327 double precision imagexpr 6328 logical firstsheet 6329 expr1=expr1_in 6330 expr2=expr2_in 6331 if(dabs(dimag(expr1)).eq.0.0d0)then 6332 expr1=dcmplx(dble(expr1),0.0d0) 6333 endif 6334 if(dabs(dble(expr1)).eq.0.0d0)then 6335 expr1=dcmplx(0.0d0,dimag(expr1)) 6336 endif 6337 if(dabs(dimag(expr2)).eq.0.0d0)then 6338 expr2=dcmplx(dble(expr2),0.0d0) 6339 endif 6340 if(dabs(dble(expr2)).eq.0.0d0)then 6341 expr2=dcmplx(0.0d0,dimag(expr2)) 6342 endif 6343 if(expr1.eq.(0.0d0,0.0d0))then 6344 grreglog=(0.0d0,0.0d0) 6345 else 6346 imagexpr=dimag(expr1)*dimag(expr2) 6347 firstsheet=imagexpr.ge.0.0d0 6348 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6349 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6350 if(firstsheet)then 6351 grreglog=log(expr1) 6352 else 6353 if(dimag(expr1).gt.0.0d0)then 6354 grreglog=log(expr1) - logsw*TWOPII 6355 else 6356 grreglog=log(expr1) + logsw*TWOPII 6357 endif 6358 endif 6359 endif 6360 end 6361 6362 double complex function arg(comnum) 6363 implicit none 6364 double complex comnum 6365 double complex iim 6366 iim = (0.0d0,1.0d0) 6367 if(comnum.eq.(0.0d0,0.0d0)) then 6368 arg=(0.0d0,0.0d0) 6369 else 6370 arg=log(comnum/abs(comnum))/iim 6371 endif 6372 end""") 6373 if self.opt['mp']: 6374 fsock.writelines(""" 6375 6376 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6377 implicit none 6378 %(complex_mp_format)s condition,truecase,falsecase 6379 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6380 mp_cond=truecase 6381 else 6382 mp_cond=falsecase 6383 endif 6384 end 6385 6386 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6387 implicit none 6388 logical condition 6389 %(complex_mp_format)s truecase,falsecase 6390 if(condition) then 6391 mp_condif=truecase 6392 else 6393 mp_condif=falsecase 6394 endif 6395 end 6396 6397 %(complex_mp_format)s function mp_recms(condition,expr) 6398 implicit none 6399 logical condition 6400 %(complex_mp_format)s expr 6401 if(condition)then 6402 mp_recms=expr 6403 else 6404 mp_recms=cmplx(real(expr),kind=16) 6405 endif 6406 end 6407 6408 %(complex_mp_format)s function mp_reglog(arg) 6409 implicit none 6410 %(complex_mp_format)s TWOPII 6411 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6412 %(complex_mp_format)s arg 6413 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6414 mp_reglog=(0.0e0_16,0.0e0_16) 6415 else 6416 mp_reglog=log(arg) 6417 endif 6418 end 6419 6420 %(complex_mp_format)s function mp_reglogp(arg) 6421 implicit none 6422 %(complex_mp_format)s TWOPII 6423 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6424 %(complex_mp_format)s arg 6425 if(arg.eq.(0.0e0_16,0.0e0_16))then 6426 mp_reglogp=(0.0e0_16,0.0e0_16) 6427 else 6428 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6429 mp_reglogp=log(arg) + TWOPII 6430 else 6431 mp_reglogp=log(arg) 6432 endif 6433 endif 6434 end 6435 6436 %(complex_mp_format)s function mp_reglogm(arg) 6437 implicit none 6438 %(complex_mp_format)s TWOPII 6439 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6440 %(complex_mp_format)s arg 6441 if(arg.eq.(0.0e0_16,0.0e0_16))then 6442 mp_reglogm=(0.0e0_16,0.0e0_16) 6443 else 6444 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6445 mp_reglogm=log(arg) - TWOPII 6446 else 6447 mp_reglogm=log(arg) 6448 endif 6449 endif 6450 end 6451 6452 %(complex_mp_format)s function mp_regsqrt(arg_in) 6453 implicit none 6454 %(complex_mp_format)s arg_in 6455 %(complex_mp_format)s arg 6456 arg=arg_in 6457 if(abs(imagpart(arg)).eq.0.0e0_16)then 6458 arg=cmplx(real(arg,kind=16),0.0e0_16) 6459 endif 6460 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6461 arg=cmplx(0.0e0_16,imagpart(arg)) 6462 endif 6463 mp_regsqrt=sqrt(arg) 6464 end 6465 6466 6467 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6468 implicit none 6469 %(complex_mp_format)s TWOPII 6470 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6471 %(complex_mp_format)s expr1_in,expr2_in 6472 %(complex_mp_format)s expr1,expr2 6473 %(real_mp_format)s logsw 6474 %(real_mp_format)s imagexpr 6475 logical firstsheet 6476 expr1=expr1_in 6477 expr2=expr2_in 6478 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6479 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6480 endif 6481 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6482 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6483 endif 6484 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6485 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6486 endif 6487 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6488 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6489 endif 6490 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6491 mp_grreglog=(0.0e0_16,0.0e0_16) 6492 else 6493 imagexpr=imagpart(expr1)*imagpart(expr2) 6494 firstsheet=imagexpr.ge.0.0e0_16 6495 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6496 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6497 if(firstsheet)then 6498 mp_grreglog=log(expr1) 6499 else 6500 if(imagpart(expr1).gt.0.0e0_16)then 6501 mp_grreglog=log(expr1) - logsw*TWOPII 6502 else 6503 mp_grreglog=log(expr1) + logsw*TWOPII 6504 endif 6505 endif 6506 endif 6507 end 6508 6509 %(complex_mp_format)s function mp_arg(comnum) 6510 implicit none 6511 %(complex_mp_format)s comnum 6512 %(complex_mp_format)s imm 6513 imm = (0.0e0_16,1.0e0_16) 6514 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6515 mp_arg=(0.0e0_16,0.0e0_16) 6516 else 6517 mp_arg=log(comnum/abs(comnum))/imm 6518 endif 6519 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6520 6521 6522 #check for the file functions.f 6523 model_path = self.model.get('modelpath') 6524 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6525 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6526 input = pjoin(model_path,'Fortran','functions.f') 6527 file.writelines(fsock, open(input).read()) 6528 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6529 6530 # check for functions define in the UFO model 6531 ufo_fct = self.model.get('functions') 6532 if ufo_fct: 6533 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6534 for fct in ufo_fct: 6535 # already handle by default 6536 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6537 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6538 "grreglog","regsqrt"]: 6539 ufo_fct_template = """ 6540 double complex function %(name)s(%(args)s) 6541 implicit none 6542 double complex %(args)s 6543 %(definitions)s 6544 %(name)s = %(fct)s 6545 6546 return 6547 end 6548 """ 6549 str_fct = self.p_to_f.parse(fct.expr) 6550 if not self.p_to_f.to_define: 6551 definitions = [] 6552 else: 6553 definitions=[] 6554 for d in self.p_to_f.to_define: 6555 if d == 'pi': 6556 definitions.append(' double precision pi') 6557 definitions.append(' data pi /3.1415926535897932d0/') 6558 else: 6559 definitions.append(' double complex %s' % d) 6560 6561 text = ufo_fct_template % { 6562 'name': fct.name, 6563 'args': ", ".join(fct.arguments), 6564 'fct': str_fct, 6565 'definitions': '\n'.join(definitions) 6566 } 6567 6568 fsock.writelines(text) 6569 if self.opt['mp']: 6570 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6571 for fct in ufo_fct: 6572 # already handle by default 6573 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6574 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6575 "grreglog","regsqrt"]: 6576 ufo_fct_template = """ 6577 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6578 implicit none 6579 %(complex_mp_format)s mp__%(args)s 6580 %(definitions)s 6581 mp_%(name)s = %(fct)s 6582 6583 return 6584 end 6585 """ 6586 str_fct = self.mp_p_to_f.parse(fct.expr) 6587 if not self.mp_p_to_f.to_define: 6588 definitions = [] 6589 else: 6590 definitions=[] 6591 for d in self.mp_p_to_f.to_define: 6592 if d == 'pi': 6593 definitions.append(' %s mp__pi' % self.mp_real_format) 6594 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6595 else: 6596 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6597 text = ufo_fct_template % { 6598 'name': fct.name, 6599 'args': ", mp__".join(fct.arguments), 6600 'fct': str_fct, 6601 'definitions': '\n'.join(definitions), 6602 'complex_mp_format': self.mp_complex_format 6603 } 6604 fsock.writelines(text) 6605 6606 6607 6608 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6609 6610 6611
6612 - def create_makeinc(self):
6613 """create makeinc.inc containing the file to compile """ 6614 6615 fsock = self.open('makeinc.inc', comment='#') 6616 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6617 text += ' model_functions.o ' 6618 6619 nb_coup_indep = 1 + len(self.coups_dep) // 25 6620 nb_coup_dep = 1 + len(self.coups_indep) // 25 6621 couplings_files=['couplings%s.o' % (i+1) \ 6622 for i in range(nb_coup_dep + nb_coup_indep) ] 6623 if self.opt['mp']: 6624 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6625 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6626 text += ' '.join(couplings_files) 6627 fsock.writelines(text)
6628
6629 - def create_param_write(self):
6630 """ create param_write """ 6631 6632 fsock = self.open('param_write.inc', format='fortran') 6633 6634 fsock.writelines("""write(*,*) ' External Params' 6635 write(*,*) ' ---------------------------------' 6636 write(*,*) ' '""") 6637 def format(name): 6638 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6639 6640 # Write the external parameter 6641 lines = [format(param.name) for param in self.params_ext] 6642 fsock.writelines('\n'.join(lines)) 6643 6644 fsock.writelines("""write(*,*) ' Internal Params' 6645 write(*,*) ' ---------------------------------' 6646 write(*,*) ' '""") 6647 lines = [format(data.name) for data in self.params_indep 6648 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6649 fsock.writelines('\n'.join(lines)) 6650 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6651 write(*,*) ' ----------------------------------------' 6652 write(*,*) ' '""") 6653 lines = [format(data.name) for data in self.params_dep \ 6654 if self.check_needed_param(data.name)] 6655 6656 fsock.writelines('\n'.join(lines)) 6657 6658 6659
6660 - def create_ident_card(self):
6661 """ create the ident_card.dat """ 6662 6663 def format(parameter): 6664 """return the line for the ident_card corresponding to this parameter""" 6665 colum = [parameter.lhablock.lower()] + \ 6666 [str(value) for value in parameter.lhacode] + \ 6667 [parameter.name] 6668 if not parameter.name: 6669 return '' 6670 return ' '.join(colum)+'\n'
6671 6672 fsock = self.open('ident_card.dat') 6673 6674 external_param = [format(param) for param in self.params_ext] 6675 fsock.writelines('\n'.join(external_param)) 6676
6677 - def create_actualize_mp_ext_param_inc(self):
6678 """ create the actualize_mp_ext_params.inc code """ 6679 6680 # In principle one should actualize all external, but for now, it is 6681 # hardcoded that only AS and MU_R can by dynamically changed by the user 6682 # so that we only update those ones. 6683 # Of course, to be on the safe side, one could decide to update all 6684 # external parameters. 6685 update_params_list=[p for p in self.params_ext if p.name in 6686 self.PS_dependent_key] 6687 6688 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6689 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6690 for param in update_params_list] 6691 # When read_lha is false, it is G which is taken in input and not AS, so 6692 # this is what should be reset here too. 6693 if 'aS' in [param.name for param in update_params_list]: 6694 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6695 6696 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6697 fsock.writelines('\n'.join(res_strings))
6698
6699 - def create_param_read(self):
6700 """create param_read""" 6701 6702 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6703 or self.opt['loop_induced']: 6704 fsock = self.open('param_read.inc', format='fortran') 6705 fsock.writelines(' include \'../param_card.inc\'') 6706 return 6707 6708 def format_line(parameter): 6709 """return the line for the ident_card corresponding to this 6710 parameter""" 6711 template = \ 6712 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6713 % {'name': parameter.name, 6714 'value': self.p_to_f.parse(str(parameter.value.real))} 6715 if self.opt['mp']: 6716 template = template+ \ 6717 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6718 "%(mp_prefix)s%(name)s,%(value)s)") \ 6719 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6720 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6721 return template 6722 6723 fsock = self.open('param_read.inc', format='fortran') 6724 res_strings = [format_line(param) \ 6725 for param in self.params_ext] 6726 6727 # Correct width sign for Majorana particles (where the width 6728 # and mass need to have the same sign) 6729 for particle in self.model.get('particles'): 6730 if particle.is_fermion() and particle.get('self_antipart') and \ 6731 particle.get('width').lower() != 'zero': 6732 6733 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6734 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6735 if self.opt['mp']: 6736 res_strings.append(\ 6737 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6738 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6739 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6740 6741 fsock.writelines('\n'.join(res_strings)) 6742 6743 6744 @staticmethod
6745 - def create_param_card_static(model, output_path, rule_card_path=False, 6746 mssm_convert=True):
6747 """ create the param_card.dat for a givent model --static method-- """ 6748 #1. Check if a default param_card is present: 6749 done = False 6750 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6751 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6752 model_path = model.get('modelpath') 6753 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6754 done = True 6755 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6756 output_path) 6757 if not done: 6758 param_writer.ParamCardWriter(model, output_path) 6759 6760 if rule_card_path: 6761 if hasattr(model, 'rule_card'): 6762 model.rule_card.write_file(rule_card_path) 6763 6764 if mssm_convert: 6765 model_name = model.get('name') 6766 # IF MSSM convert the card to SLAH1 6767 if model_name == 'mssm' or model_name.startswith('mssm-'): 6768 import models.check_param_card as translator 6769 # Check the format of the param_card for Pythia and make it correct 6770 if rule_card_path: 6771 translator.make_valid_param_card(output_path, rule_card_path) 6772 translator.convert_to_slha1(output_path)
6773
6774 - def create_param_card(self):
6775 """ create the param_card.dat """ 6776 6777 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6778 if not hasattr(self.model, 'rule_card'): 6779 rule_card=False 6780 self.create_param_card_static(self.model, 6781 output_path=pjoin(self.dir_path, 'param_card.dat'), 6782 rule_card_path=rule_card, 6783 mssm_convert=True)
6784
6785 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6786 """ Determine which Export_v4 class is required. cmd is the command 6787 interface containing all potential usefull information. 6788 The output_type argument specifies from which context the output 6789 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6790 and 'default' for tree-level outputs.""" 6791 6792 opt = dict(cmd.options) 6793 opt['output_options'] = cmd_options 6794 6795 # ========================================================================== 6796 # First check whether Ninja must be installed. 6797 # Ninja would only be required if: 6798 # a) Loop optimized output is selected 6799 # b) the process gathered from the amplitude generated use loops 6800 6801 if len(cmd._curr_amps)>0: 6802 try: 6803 curr_proc = cmd._curr_amps[0].get('process') 6804 except base_objects.PhysicsObject.PhysicsObjectError: 6805 curr_proc = None 6806 elif hasattr(cmd,'_fks_multi_proc') and \ 6807 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6808 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6809 else: 6810 curr_proc = None 6811 6812 requires_reduction_tool = opt['loop_optimized_output'] and \ 6813 (not curr_proc is None) and \ 6814 (curr_proc.get('perturbation_couplings') != [] and \ 6815 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6816 6817 # An installation is required then, but only if the specified path is the 6818 # default local one and that the Ninja library appears missing. 6819 if requires_reduction_tool: 6820 cmd.install_reduction_library() 6821 6822 # ========================================================================== 6823 # First treat the MadLoop5 standalone case 6824 MadLoop_SA_options = {'clean': not noclean, 6825 'complex_mass':cmd.options['complex_mass_scheme'], 6826 'export_format':'madloop', 6827 'mp':True, 6828 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6829 'cuttools_dir': cmd._cuttools_dir, 6830 'iregi_dir':cmd._iregi_dir, 6831 'pjfry_dir':cmd.options['pjfry'], 6832 'golem_dir':cmd.options['golem'], 6833 'samurai_dir':cmd.options['samurai'], 6834 'ninja_dir':cmd.options['ninja'], 6835 'collier_dir':cmd.options['collier'], 6836 'fortran_compiler':cmd.options['fortran_compiler'], 6837 'f2py_compiler':cmd.options['f2py_compiler'], 6838 'output_dependencies':cmd.options['output_dependencies'], 6839 'SubProc_prefix':'P', 6840 'compute_color_flows':cmd.options['loop_color_flows'], 6841 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 6842 'cluster_local_path': cmd.options['cluster_local_path'], 6843 'output_options': cmd_options 6844 } 6845 6846 if output_type.startswith('madloop'): 6847 import madgraph.loop.loop_exporters as loop_exporters 6848 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 6849 ExporterClass=None 6850 if not cmd.options['loop_optimized_output']: 6851 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 6852 else: 6853 if output_type == "madloop": 6854 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 6855 MadLoop_SA_options['export_format'] = 'madloop_optimized' 6856 elif output_type == "madloop_matchbox": 6857 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 6858 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 6859 else: 6860 raise Exception, "output_type not recognize %s" % output_type 6861 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 6862 else: 6863 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 6864 ' in %s'%str(cmd._mgme_dir)) 6865 6866 # Then treat the aMC@NLO output 6867 elif output_type=='amcatnlo': 6868 import madgraph.iolibs.export_fks as export_fks 6869 ExporterClass=None 6870 amcatnlo_options = dict(opt) 6871 amcatnlo_options.update(MadLoop_SA_options) 6872 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 6873 if not cmd.options['loop_optimized_output']: 6874 logger.info("Writing out the aMC@NLO code") 6875 ExporterClass = export_fks.ProcessExporterFortranFKS 6876 amcatnlo_options['export_format']='FKS5_default' 6877 else: 6878 logger.info("Writing out the aMC@NLO code, using optimized Loops") 6879 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 6880 amcatnlo_options['export_format']='FKS5_optimized' 6881 return ExporterClass(cmd._export_dir, amcatnlo_options) 6882 6883 6884 # Then the default tree-level output 6885 elif output_type=='default': 6886 assert group_subprocesses in [True, False] 6887 6888 opt = dict(opt) 6889 opt.update({'clean': not noclean, 6890 'complex_mass': cmd.options['complex_mass_scheme'], 6891 'export_format':cmd._export_format, 6892 'mp': False, 6893 'sa_symmetry':False, 6894 'model': cmd._curr_model.get('name'), 6895 'v5_model': False if cmd._model_v4_path else True }) 6896 6897 format = cmd._export_format #shortcut 6898 6899 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 6900 opt['sa_symmetry'] = True 6901 elif format == 'plugin': 6902 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 6903 6904 loop_induced_opt = dict(opt) 6905 loop_induced_opt.update(MadLoop_SA_options) 6906 loop_induced_opt['export_format'] = 'madloop_optimized' 6907 loop_induced_opt['SubProc_prefix'] = 'PV' 6908 # For loop_induced output with MadEvent, we must have access to the 6909 # color flows. 6910 loop_induced_opt['compute_color_flows'] = True 6911 for key in opt: 6912 if key not in loop_induced_opt: 6913 loop_induced_opt[key] = opt[key] 6914 6915 # Madevent output supports MadAnalysis5 6916 if format in ['madevent']: 6917 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 6918 6919 if format == 'matrix' or format.startswith('standalone'): 6920 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 6921 6922 elif format in ['madevent'] and group_subprocesses: 6923 if isinstance(cmd._curr_amps[0], 6924 loop_diagram_generation.LoopAmplitude): 6925 import madgraph.loop.loop_exporters as loop_exporters 6926 return loop_exporters.LoopInducedExporterMEGroup( 6927 cmd._export_dir,loop_induced_opt) 6928 else: 6929 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 6930 elif format in ['madevent']: 6931 if isinstance(cmd._curr_amps[0], 6932 loop_diagram_generation.LoopAmplitude): 6933 import madgraph.loop.loop_exporters as loop_exporters 6934 return loop_exporters.LoopInducedExporterMENoGroup( 6935 cmd._export_dir,loop_induced_opt) 6936 else: 6937 return ProcessExporterFortranME(cmd._export_dir,opt) 6938 elif format in ['matchbox']: 6939 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 6940 elif cmd._export_format in ['madweight'] and group_subprocesses: 6941 6942 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 6943 elif cmd._export_format in ['madweight']: 6944 return ProcessExporterFortranMW(cmd._export_dir, opt) 6945 elif format == 'plugin': 6946 if isinstance(cmd._curr_amps[0], 6947 loop_diagram_generation.LoopAmplitude): 6948 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 6949 else: 6950 return cmd._export_plugin(cmd._export_dir, opt) 6951 6952 else: 6953 raise Exception, 'Wrong export_v4 format' 6954 else: 6955 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
6956
6957 6958 6959 6960 #=============================================================================== 6961 # ProcessExporterFortranMWGroup 6962 #=============================================================================== 6963 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
6964 """Class to take care of exporting a set of matrix elements to 6965 MadEvent subprocess group format.""" 6966 6967 matrix_file = "matrix_madweight_group_v4.inc" 6968 grouped_mode = 'madweight' 6969 #=========================================================================== 6970 # generate_subprocess_directory 6971 #===========================================================================
6972 - def generate_subprocess_directory(self, subproc_group, 6973 fortran_model, 6974 group_number):
6975 """Generate the Pn directory for a subprocess group in MadEvent, 6976 including the necessary matrix_N.f files, configs.inc and various 6977 other helper files.""" 6978 6979 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 6980 raise base_objects.PhysicsObject.PhysicsObjectError,\ 6981 "subproc_group object not SubProcessGroup" 6982 6983 if not self.model: 6984 self.model = subproc_group.get('matrix_elements')[0].\ 6985 get('processes')[0].get('model') 6986 6987 pathdir = os.path.join(self.dir_path, 'SubProcesses') 6988 6989 # Create the directory PN in the specified path 6990 subprocdir = "P%d_%s" % (subproc_group.get('number'), 6991 subproc_group.get('name')) 6992 try: 6993 os.mkdir(pjoin(pathdir, subprocdir)) 6994 except os.error as error: 6995 logger.warning(error.strerror + " " + subprocdir) 6996 6997 6998 logger.info('Creating files in directory %s' % subprocdir) 6999 Ppath = pjoin(pathdir, subprocdir) 7000 7001 # Create the matrix.f files, auto_dsig.f files and all inc files 7002 # for all subprocesses in the group 7003 7004 maxamps = 0 7005 maxflows = 0 7006 tot_calls = 0 7007 7008 matrix_elements = subproc_group.get('matrix_elements') 7009 7010 for ime, matrix_element in \ 7011 enumerate(matrix_elements): 7012 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7013 calls, ncolor = \ 7014 self.write_matrix_element_v4(writers.FortranWriter(filename), 7015 matrix_element, 7016 fortran_model, 7017 str(ime+1), 7018 subproc_group.get('diagram_maps')[\ 7019 ime]) 7020 7021 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7022 self.write_auto_dsig_file(writers.FortranWriter(filename), 7023 matrix_element, 7024 str(ime+1)) 7025 7026 # Keep track of needed quantities 7027 tot_calls += int(calls) 7028 maxflows = max(maxflows, ncolor) 7029 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7030 7031 # Draw diagrams 7032 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7033 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7034 get('diagrams'), 7035 filename, 7036 model = \ 7037 matrix_element.get('processes')[0].\ 7038 get('model'), 7039 amplitude=True) 7040 logger.info("Generating Feynman diagrams for " + \ 7041 matrix_element.get('processes')[0].nice_string()) 7042 plot.draw() 7043 7044 # Extract number of external particles 7045 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7046 7047 # Generate a list of diagrams corresponding to each configuration 7048 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7049 # If a subprocess has no diagrams for this config, the number is 0 7050 7051 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7052 7053 filename = pjoin(Ppath, 'auto_dsig.f') 7054 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7055 subproc_group) 7056 7057 filename = pjoin(Ppath,'configs.inc') 7058 nconfigs, s_and_t_channels = self.write_configs_file(\ 7059 writers.FortranWriter(filename), 7060 subproc_group, 7061 subproc_diagrams_for_config) 7062 7063 filename = pjoin(Ppath, 'leshouche.inc') 7064 self.write_leshouche_file(writers.FortranWriter(filename), 7065 subproc_group) 7066 7067 filename = pjoin(Ppath, 'phasespace.inc') 7068 self.write_phasespace_file(writers.FortranWriter(filename), 7069 nconfigs) 7070 7071 7072 filename = pjoin(Ppath, 'maxamps.inc') 7073 self.write_maxamps_file(writers.FortranWriter(filename), 7074 maxamps, 7075 maxflows, 7076 max([len(me.get('processes')) for me in \ 7077 matrix_elements]), 7078 len(matrix_elements)) 7079 7080 filename = pjoin(Ppath, 'mirrorprocs.inc') 7081 self.write_mirrorprocs(writers.FortranWriter(filename), 7082 subproc_group) 7083 7084 filename = pjoin(Ppath, 'nexternal.inc') 7085 self.write_nexternal_file(writers.FortranWriter(filename), 7086 nexternal, ninitial) 7087 7088 filename = pjoin(Ppath, 'pmass.inc') 7089 self.write_pmass_file(writers.FortranWriter(filename), 7090 matrix_element) 7091 7092 filename = pjoin(Ppath, 'props.inc') 7093 self.write_props_file(writers.FortranWriter(filename), 7094 matrix_element, 7095 s_and_t_channels) 7096 7097 # filename = pjoin(Ppath, 'processes.dat') 7098 # files.write_to_file(filename, 7099 # self.write_processes_file, 7100 # subproc_group) 7101 7102 # Generate jpgs -> pass in make_html 7103 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7104 7105 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7106 7107 for file in linkfiles: 7108 ln('../%s' % file, cwd=Ppath) 7109 7110 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7111 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7112 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7113 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7114 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7115 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7116 if not tot_calls: 7117 tot_calls = 0 7118 return tot_calls
7119 7120 7121 #=========================================================================== 7122 # Helper functions 7123 #===========================================================================
7124 - def modify_grouping(self, matrix_element):
7125 """allow to modify the grouping (if grouping is in place) 7126 return two value: 7127 - True/False if the matrix_element was modified 7128 - the new(or old) matrix element""" 7129 7130 return True, matrix_element.split_lepton_grouping()
7131 7132 #=========================================================================== 7133 # write_super_auto_dsig_file 7134 #===========================================================================
7135 - def write_super_auto_dsig_file(self, writer, subproc_group):
7136 """Write the auto_dsig.f file selecting between the subprocesses 7137 in subprocess group mode""" 7138 7139 replace_dict = {} 7140 7141 # Extract version number and date from VERSION file 7142 info_lines = self.get_mg5_info_lines() 7143 replace_dict['info_lines'] = info_lines 7144 7145 matrix_elements = subproc_group.get('matrix_elements') 7146 7147 # Extract process info lines 7148 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7149 matrix_elements]) 7150 replace_dict['process_lines'] = process_lines 7151 7152 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7153 replace_dict['nexternal'] = nexternal 7154 7155 replace_dict['nsprocs'] = 2*len(matrix_elements) 7156 7157 # Generate dsig definition line 7158 dsig_def_line = "DOUBLE PRECISION " + \ 7159 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7160 range(len(matrix_elements))]) 7161 replace_dict["dsig_def_line"] = dsig_def_line 7162 7163 # Generate dsig process lines 7164 call_dsig_proc_lines = [] 7165 for iproc in range(len(matrix_elements)): 7166 call_dsig_proc_lines.append(\ 7167 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7168 {"num": iproc + 1, 7169 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7170 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7171 7172 if writer: 7173 file = open(os.path.join(_file_path, \ 7174 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7175 file = file % replace_dict 7176 # Write the file 7177 writer.writelines(file) 7178 else: 7179 return replace_dict
7180 7181 #=========================================================================== 7182 # write_mirrorprocs 7183 #===========================================================================
7184 - def write_mirrorprocs(self, writer, subproc_group):
7185 """Write the mirrorprocs.inc file determining which processes have 7186 IS mirror process in subprocess group mode.""" 7187 7188 lines = [] 7189 bool_dict = {True: '.true.', False: '.false.'} 7190 matrix_elements = subproc_group.get('matrix_elements') 7191 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7192 (len(matrix_elements), 7193 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7194 me in matrix_elements]))) 7195 # Write the file 7196 writer.writelines(lines)
7197 7198 #=========================================================================== 7199 # write_configs_file 7200 #===========================================================================
7201 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7202 """Write the configs.inc file with topology information for a 7203 subprocess group. Use the first subprocess with a diagram for each 7204 configuration.""" 7205 7206 matrix_elements = subproc_group.get('matrix_elements') 7207 model = matrix_elements[0].get('processes')[0].get('model') 7208 7209 diagrams = [] 7210 config_numbers = [] 7211 for iconfig, config in enumerate(diagrams_for_config): 7212 # Check if any diagrams correspond to this config 7213 if set(config) == set([0]): 7214 continue 7215 subproc_diags = [] 7216 for s,d in enumerate(config): 7217 if d: 7218 subproc_diags.append(matrix_elements[s].\ 7219 get('diagrams')[d-1]) 7220 else: 7221 subproc_diags.append(None) 7222 diagrams.append(subproc_diags) 7223 config_numbers.append(iconfig + 1) 7224 7225 # Extract number of external particles 7226 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7227 7228 return len(diagrams), \ 7229 self.write_configs_file_from_diagrams(writer, diagrams, 7230 config_numbers, 7231 nexternal, ninitial, 7232 matrix_elements[0],model)
7233 7234 #=========================================================================== 7235 # write_run_configs_file 7236 #===========================================================================
7237 - def write_run_config_file(self, writer):
7238 """Write the run_configs.inc file for MadEvent""" 7239 7240 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7241 text = open(path).read() % {'chanperjob':'2'} 7242 writer.write(text) 7243 return True
7244 7245 7246 #=========================================================================== 7247 # write_leshouche_file 7248 #===========================================================================
7249 - def write_leshouche_file(self, writer, subproc_group):
7250 """Write the leshouche.inc file for MG4""" 7251 7252 all_lines = [] 7253 7254 for iproc, matrix_element in \ 7255 enumerate(subproc_group.get('matrix_elements')): 7256 all_lines.extend(self.get_leshouche_lines(matrix_element, 7257 iproc)) 7258 7259 # Write the file 7260 writer.writelines(all_lines) 7261 7262 return True
7263