Package madgraph :: Package iolibs :: Module export_v4
[hide private]
[frames] | no frames]

Source Code for Module madgraph.iolibs.export_v4

   1  ################################################################################ 
   2  # 
   3  # Copyright (c) 2009 The MadGraph5_aMC@NLO Development team and Contributors 
   4  # 
   5  # This file is a part of the MadGraph5_aMC@NLO project, an application which  
   6  # automatically generates Feynman diagrams and matrix elements for arbitrary 
   7  # high-energy processes in the Standard Model and beyond. 
   8  # 
   9  # It is subject to the MadGraph5_aMC@NLO license which should accompany this  
  10  # distribution. 
  11  # 
  12  # For more information, visit madgraph.phys.ucl.ac.be and amcatnlo.web.cern.ch 
  13  # 
  14  ################################################################################ 
  15  """Methods and classes to export matrix elements to v4 format.""" 
  16   
  17  import copy 
  18  from cStringIO import StringIO 
  19  from distutils import dir_util 
  20  import itertools 
  21  import fractions 
  22  import glob 
  23  import logging 
  24  import math 
  25  import os 
  26  import re 
  27  import shutil 
  28  import subprocess 
  29  import sys 
  30  import time 
  31  import traceback 
  32   
  33  import aloha 
  34   
  35  import madgraph.core.base_objects as base_objects 
  36  import madgraph.core.color_algebra as color 
  37  import madgraph.core.helas_objects as helas_objects 
  38  import madgraph.iolibs.drawing_eps as draw 
  39  import madgraph.iolibs.files as files 
  40  import madgraph.iolibs.group_subprocs as group_subprocs 
  41  import madgraph.iolibs.file_writers as writers 
  42  import madgraph.iolibs.gen_infohtml as gen_infohtml 
  43  import madgraph.iolibs.template_files as template_files 
  44  import madgraph.iolibs.ufo_expression_parsers as parsers 
  45  import madgraph.iolibs.helas_call_writers as helas_call_writers 
  46  import madgraph.interface.common_run_interface as common_run_interface 
  47  import madgraph.various.diagram_symmetry as diagram_symmetry 
  48  import madgraph.various.misc as misc 
  49  import madgraph.various.banner as banner_mod 
  50  import madgraph.various.process_checks as process_checks 
  51  import madgraph.loop.loop_diagram_generation as loop_diagram_generation 
  52  import aloha.create_aloha as create_aloha 
  53  import models.import_ufo as import_ufo 
  54  import models.write_param_card as param_writer 
  55  import models.check_param_card as check_param_card 
  56   
  57   
  58  from madgraph import MadGraph5Error, MG5DIR, ReadWrite 
  59  from madgraph.iolibs.files import cp, ln, mv 
  60   
  61  from madgraph import InvalidCmd 
  62   
  63  pjoin = os.path.join 
  64   
  65  _file_path = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0] + '/' 
  66  logger = logging.getLogger('madgraph.export_v4') 
  67   
  68  default_compiler= {'fortran': 'gfortran', 
  69                         'f2py': 'f2py', 
  70                         'cpp':'g++'} 
71 72 73 -class VirtualExporter(object):
74 75 #exporter variable who modified the way madgraph interacts with this class 76 77 grouped_mode = 'madevent' 78 # This variable changes the type of object called within 'generate_subprocess_directory' 79 #functions. 80 # False to avoid grouping (only identical matrix element are merged) 81 # 'madevent' group the massless quark and massless lepton 82 # 'madweight' group the gluon with the massless quark 83 sa_symmetry = False 84 # If no grouped_mode=False, uu~ and u~u will be called independently. 85 #Putting sa_symmetry generates only one of the two matrix-element. 86 check = True 87 # Ask madgraph to check if the directory already exists and propose to the user to 88 #remove it first if this is the case 89 output = 'Template' 90 # [Template, None, dir] 91 # - Template, madgraph will call copy_template 92 # - dir, madgraph will just create an empty directory for initialisation 93 # - None, madgraph do nothing for initialisation 94 exporter = 'v4' 95 # language of the output 'v4' for Fortran output 96 # 'cpp' for C++ output 97 98
99 - def __init__(self, dir_path = "", opt=None):
100 # cmd_options is a dictionary with all the optional argurment passed at output time 101 102 # Activate some monkey patching for the helas call writer. 103 helas_call_writers.HelasCallWriter.customize_argument_for_all_other_helas_object = \ 104 self.helas_call_writer_custom
105 106 107 # helper function for customise helas writter 108 @staticmethod
109 - def custom_helas_call(call, arg):
110 """static method to customise the way aloha function call are written 111 call is the default template for the call 112 arg are the dictionary used for the call 113 """ 114 return call, arg
115 116 helas_call_writer_custom = lambda x,y,z: x.custom_helas_call(y,z) 117 118
119 - def copy_template(self, model):
120 return
121
122 - def generate_subprocess_directory(self, subproc_group, helicity_model, me=None):
123 # generate_subprocess_directory(self, matrix_element, helicity_model, me_number) [for ungrouped] 124 return 0 # return an integer stating the number of call to helicity routine
125
126 - def convert_model(self, model, wanted_lorentz=[], wanted_couplings=[]):
127 return
128
129 - def finalize(self,matrix_element, cmdhistory, MG5options, outputflag):
130 return
131 132
133 - def pass_information_from_cmd(self, cmd):
134 """pass information from the command interface to the exporter. 135 Please do not modify any object of the interface from the exporter. 136 """ 137 return
138
139 - def modify_grouping(self, matrix_element):
140 return False, matrix_element
141
142 - def export_model_files(self, model_v4_path):
143 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 144 return
145
146 - def export_helas(self, HELAS_PATH):
147 raise Exception, "V4 model not supported by this type of exporter. Please use UFO model" 148 return
149
150 #=============================================================================== 151 # ProcessExporterFortran 152 #=============================================================================== 153 -class ProcessExporterFortran(VirtualExporter):
154 """Class to take care of exporting a set of matrix elements to 155 Fortran (v4) format.""" 156 157 default_opt = {'clean': False, 'complex_mass':False, 158 'export_format':'madevent', 'mp': False, 159 'v5_model': True, 160 'output_options':{} 161 } 162 grouped_mode = False 163
164 - def __init__(self, dir_path = "", opt=None):
165 """Initiate the ProcessExporterFortran with directory information""" 166 self.mgme_dir = MG5DIR 167 self.dir_path = dir_path 168 self.model = None 169 170 self.opt = dict(self.default_opt) 171 if opt: 172 self.opt.update(opt) 173 174 self.cmd_options = self.opt['output_options'] 175 176 #place holder to pass information to the run_interface 177 self.proc_characteristic = banner_mod.ProcCharacteristic() 178 # call mother class 179 super(ProcessExporterFortran,self).__init__(dir_path, opt)
180 181 182 #=========================================================================== 183 # process exporter fortran switch between group and not grouped 184 #===========================================================================
185 - def export_processes(self, matrix_elements, fortran_model):
186 """Make the switch between grouped and not grouped output""" 187 188 calls = 0 189 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 190 for (group_number, me_group) in enumerate(matrix_elements): 191 calls = calls + self.generate_subprocess_directory(\ 192 me_group, fortran_model, group_number) 193 else: 194 for me_number, me in enumerate(matrix_elements.get_matrix_elements()): 195 calls = calls + self.generate_subprocess_directory(\ 196 me, fortran_model, me_number) 197 198 return calls
199 200 201 #=========================================================================== 202 # create the run_card 203 #===========================================================================
204 - def create_run_card(self, matrix_elements, history):
205 """ """ 206 207 208 # bypass this for the loop-check 209 import madgraph.loop.loop_helas_objects as loop_helas_objects 210 if isinstance(matrix_elements, loop_helas_objects.LoopHelasMatrixElement): 211 matrix_elements = None 212 213 run_card = banner_mod.RunCard() 214 215 216 default=True 217 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 218 processes = [me.get('processes') for megroup in matrix_elements 219 for me in megroup['matrix_elements']] 220 elif matrix_elements: 221 processes = [me.get('processes') 222 for me in matrix_elements['matrix_elements']] 223 else: 224 default =False 225 226 if default: 227 run_card.create_default_for_process(self.proc_characteristic, 228 history, 229 processes) 230 231 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat')) 232 shutil.copyfile(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 233 pjoin(self.dir_path, 'Cards', 'run_card.dat'))
234 235 236 237 #=========================================================================== 238 # copy the Template in a new directory. 239 #===========================================================================
240 - def copy_template(self, model):
241 """create the directory run_name as a copy of the MadEvent 242 Template, and clean the directory 243 """ 244 245 #First copy the full template tree if dir_path doesn't exit 246 if not os.path.isdir(self.dir_path): 247 assert self.mgme_dir, \ 248 "No valid MG_ME path given for MG4 run directory creation." 249 logger.info('initialize a new directory: %s' % \ 250 os.path.basename(self.dir_path)) 251 shutil.copytree(pjoin(self.mgme_dir, 'Template/LO'), 252 self.dir_path, True) 253 # distutils.dir_util.copy_tree since dir_path already exists 254 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 255 self.dir_path) 256 # copy plot_card 257 for card in ['plot_card']: 258 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 259 try: 260 shutil.copy(pjoin(self.dir_path, 'Cards',card + '.dat'), 261 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 262 except IOError: 263 logger.warning("Failed to copy " + card + ".dat to default") 264 elif os.getcwd() == os.path.realpath(self.dir_path): 265 logger.info('working in local directory: %s' % \ 266 os.path.realpath(self.dir_path)) 267 # distutils.dir_util.copy_tree since dir_path already exists 268 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/LO'), 269 self.dir_path) 270 # for name in misc.glob('Template/LO/*', self.mgme_dir): 271 # name = os.path.basename(name) 272 # filname = pjoin(self.mgme_dir, 'Template','LO',name) 273 # if os.path.isfile(filename): 274 # files.cp(filename, pjoin(self.dir_path,name)) 275 # elif os.path.isdir(filename): 276 # shutil.copytree(filename, pjoin(self.dir_path,name), True) 277 # distutils.dir_util.copy_tree since dir_path already exists 278 dir_util.copy_tree(pjoin(self.mgme_dir, 'Template/Common'), 279 self.dir_path) 280 # Copy plot_card 281 for card in ['plot_card']: 282 if os.path.isfile(pjoin(self.dir_path, 'Cards',card + '.dat')): 283 try: 284 shutil.copy(pjoin(self.dir_path, 'Cards', card + '.dat'), 285 pjoin(self.dir_path, 'Cards', card + '_default.dat')) 286 except IOError: 287 logger.warning("Failed to copy " + card + ".dat to default") 288 elif not os.path.isfile(pjoin(self.dir_path, 'TemplateVersion.txt')): 289 assert self.mgme_dir, \ 290 "No valid MG_ME path given for MG4 run directory creation." 291 try: 292 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 293 except IOError: 294 MG5_version = misc.get_pkg_info() 295 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write(MG5_version['version']) 296 297 #Ensure that the Template is clean 298 if self.opt['clean']: 299 logger.info('remove old information in %s' % \ 300 os.path.basename(self.dir_path)) 301 if os.environ.has_key('MADGRAPH_BASE'): 302 misc.call([pjoin('bin', 'internal', 'clean_template'), 303 '--web'], cwd=self.dir_path) 304 else: 305 try: 306 misc.call([pjoin('bin', 'internal', 'clean_template')], \ 307 cwd=self.dir_path) 308 except Exception, why: 309 raise MadGraph5Error('Failed to clean correctly %s: \n %s' \ 310 % (os.path.basename(self.dir_path),why)) 311 312 #Write version info 313 MG_version = misc.get_pkg_info() 314 open(pjoin(self.dir_path, 'SubProcesses', 'MGVersion.txt'), 'w').write( 315 MG_version['version']) 316 317 # add the makefile in Source directory 318 filename = pjoin(self.dir_path,'Source','makefile') 319 self.write_source_makefile(writers.FileWriter(filename)) 320 321 # add the DiscreteSampler information 322 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'DiscreteSampler.f'), 323 pjoin(self.dir_path, 'Source')) 324 files.cp(pjoin(MG5DIR,'vendor', 'DiscreteSampler', 'StringCast.f'), 325 pjoin(self.dir_path, 'Source')) 326 327 # We need to create the correct open_data for the pdf 328 self.write_pdf_opendata()
329 330 331 #=========================================================================== 332 # Call MadAnalysis5 to generate the default cards for this process 333 #===========================================================================
334 - def create_default_madanalysis5_cards(self, history, proc_defs, processes, 335 ma5_path, output_dir, levels = ['parton','hadron']):
336 """ Call MA5 so that it writes default cards for both parton and 337 post-shower levels, tailored for this particular process.""" 338 339 if len(levels)==0: 340 return 341 start = time.time() 342 logger.info('Generating MadAnalysis5 default cards tailored to this process') 343 try: 344 MA5_interpreter = common_run_interface.CommonRunCmd.\ 345 get_MadAnalysis5_interpreter(MG5DIR,ma5_path,loglevel=100) 346 except (Exception, SystemExit) as e: 347 logger.warning('Fail to create a MadAnalysis5 instance. Therefore the default analysis with MadAnalysis5 will be empty.') 348 return 349 if MA5_interpreter is None: 350 return 351 352 MA5_main = MA5_interpreter.main 353 for lvl in ['parton','hadron']: 354 if lvl in levels: 355 card_to_generate = pjoin(output_dir,'madanalysis5_%s_card_default.dat'%lvl) 356 try: 357 text = MA5_main.madgraph.generate_card(history, proc_defs, processes,lvl) 358 except (Exception, SystemExit) as e: 359 # keep the default card (skip only) 360 logger.warning('MadAnalysis5 failed to write a %s-level'%lvl+ 361 ' default analysis card for this process.') 362 logger.warning('Therefore, %s-level default analysis with MadAnalysis5 will be empty.'%lvl) 363 error=StringIO() 364 traceback.print_exc(file=error) 365 logger.debug('MadAnalysis5 error was:') 366 logger.debug('-'*60) 367 logger.debug(error.getvalue()[:-1]) 368 logger.debug('-'*60) 369 else: 370 open(card_to_generate,'w').write(text) 371 stop = time.time() 372 if stop-start >1: 373 logger.info('Cards created in %.2fs' % (stop-start))
374 375 #=========================================================================== 376 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 377 #===========================================================================
378 - def write_procdef_mg5(self, file_pos, modelname, process_str):
379 """ write an equivalent of the MG4 proc_card in order that all the Madevent 380 Perl script of MadEvent4 are still working properly for pure MG5 run.""" 381 382 proc_card_template = template_files.mg4_proc_card.mg4_template 383 process_template = template_files.mg4_proc_card.process_template 384 process_text = '' 385 coupling = '' 386 new_process_content = [] 387 388 389 # First find the coupling and suppress the coupling from process_str 390 #But first ensure that coupling are define whithout spaces: 391 process_str = process_str.replace(' =', '=') 392 process_str = process_str.replace('= ', '=') 393 process_str = process_str.replace(',',' , ') 394 #now loop on the element and treat all the coupling 395 for info in process_str.split(): 396 if '=' in info: 397 coupling += info + '\n' 398 else: 399 new_process_content.append(info) 400 # Recombine the process_str (which is the input process_str without coupling 401 #info) 402 process_str = ' '.join(new_process_content) 403 404 #format the SubProcess 405 replace_dict = {'process': process_str, 406 'coupling': coupling} 407 process_text += process_template.substitute(replace_dict) 408 409 replace_dict = {'process': process_text, 410 'model': modelname, 411 'multiparticle':''} 412 text = proc_card_template.substitute(replace_dict) 413 414 if file_pos: 415 ff = open(file_pos, 'w') 416 ff.write(text) 417 ff.close() 418 else: 419 return replace_dict
420 421
422 - def pass_information_from_cmd(self, cmd):
423 """Pass information for MA5""" 424 425 self.proc_defs = cmd._curr_proc_defs
426 427 #=========================================================================== 428 # Create jpeg diagrams, html pages,proc_card_mg5.dat and madevent.tar.gz 429 #===========================================================================
430 - def finalize(self, matrix_elements, history='', mg5options={}, flaglist=[]):
431 """Function to finalize v4 directory, for inheritance.""" 432 433 self.create_run_card(matrix_elements, history) 434 self.create_MA5_cards(matrix_elements, history)
435
436 - def create_MA5_cards(self,matrix_elements,history):
437 """ A wrapper around the creation of the MA5 cards so that it can be 438 bypassed by daughter classes (i.e. in standalone).""" 439 if 'madanalysis5_path' in self.opt and not \ 440 self.opt['madanalysis5_path'] is None and not self.proc_defs is None: 441 processes = None 442 if isinstance(matrix_elements, group_subprocs.SubProcessGroupList): 443 processes = [me.get('processes') for megroup in matrix_elements 444 for me in megroup['matrix_elements']] 445 elif matrix_elements: 446 processes = [me.get('processes') 447 for me in matrix_elements['matrix_elements']] 448 449 self.create_default_madanalysis5_cards( 450 history, self.proc_defs, processes, 451 self.opt['madanalysis5_path'], pjoin(self.dir_path,'Cards'), 452 levels = ['hadron','parton']) 453 454 for level in ['hadron','parton']: 455 # Copying these cards turn on the use of MadAnalysis5 by default. 456 if os.path.isfile(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level)): 457 shutil.copy(pjoin(self.dir_path,'Cards','madanalysis5_%s_card_default.dat'%level), 458 pjoin(self.dir_path,'Cards','madanalysis5_%s_card.dat'%level))
459 460 #=========================================================================== 461 # Create the proc_characteristic file passing information to the run_interface 462 #===========================================================================
463 - def create_proc_charac(self, matrix_elements=None, history="", **opts):
464 465 self.proc_characteristic.write(pjoin(self.dir_path, 'SubProcesses', 'proc_characteristics'))
466 467 #=========================================================================== 468 # write_matrix_element_v4 469 #===========================================================================
470 - def write_matrix_element_v4(self):
471 """Function to write a matrix.f file, for inheritance. 472 """ 473 pass
474 475 #=========================================================================== 476 # write_pdf_opendata 477 #===========================================================================
478 - def write_pdf_opendata(self):
479 """ modify the pdf opendata file, to allow direct access to cluster node 480 repository if configure""" 481 482 if not self.opt["cluster_local_path"]: 483 changer = {"pdf_systemwide": ""} 484 else: 485 to_add = """ 486 tempname='%(path)s'//Tablefile 487 open(IU,file=tempname,status='old',ERR=1) 488 return 489 1 tempname='%(path)s/Pdfdata/'//Tablefile 490 open(IU,file=tempname,status='old',ERR=2) 491 return 492 2 tempname='%(path)s/lhapdf'//Tablefile 493 open(IU,file=tempname,status='old',ERR=3) 494 return 495 3 tempname='%(path)s/../lhapdf/pdfsets/'//Tablefile 496 open(IU,file=tempname,status='old',ERR=4) 497 return 498 4 tempname='%(path)s/../lhapdf/pdfsets/6.1/'//Tablefile 499 open(IU,file=tempname,status='old',ERR=5) 500 return 501 """ % {"path" : self.opt["cluster_local_path"]} 502 503 changer = {"pdf_systemwide": to_add} 504 505 506 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "opendata.f")) 507 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_opendata.f"),"r").read() 508 ff.writelines(template % changer) 509 510 # Do the same for lhapdf set 511 if not self.opt["cluster_local_path"]: 512 changer = {"cluster_specific_path": ""} 513 else: 514 to_add=""" 515 LHAPath='%(path)s/PDFsets' 516 Inquire(File=LHAPath, exist=exists) 517 if(exists)return 518 LHAPath='%(path)s/../lhapdf/pdfsets/6.1/' 519 Inquire(File=LHAPath, exist=exists) 520 if(exists)return 521 LHAPath='%(path)s/../lhapdf/pdfsets/' 522 Inquire(File=LHAPath, exist=exists) 523 if(exists)return 524 LHAPath='./PDFsets' 525 """ % {"path" : self.opt["cluster_local_path"]} 526 changer = {"cluster_specific_path": to_add} 527 528 ff = writers.FortranWriter(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f")) 529 #ff = open(pjoin(self.dir_path, "Source", "PDF", "pdfwrap_lhapdf.f"),"w") 530 template = open(pjoin(MG5DIR, "madgraph", "iolibs", "template_files", "pdf_wrap_lhapdf.f"),"r").read() 531 ff.writelines(template % changer) 532 533 534 return
535 536 537 538 #=========================================================================== 539 # write_maxparticles_file 540 #===========================================================================
541 - def write_maxparticles_file(self, writer, matrix_elements):
542 """Write the maxparticles.inc file for MadEvent""" 543 544 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 545 maxparticles = max([me.get_nexternal_ninitial()[0] for me in \ 546 matrix_elements.get('matrix_elements')]) 547 else: 548 maxparticles = max([me.get_nexternal_ninitial()[0] \ 549 for me in matrix_elements]) 550 551 lines = "integer max_particles\n" 552 lines += "parameter(max_particles=%d)" % maxparticles 553 554 # Write the file 555 writer.writelines(lines) 556 557 return True
558 559 560 #=========================================================================== 561 # export the model 562 #===========================================================================
563 - def export_model_files(self, model_path):
564 """Configure the files/link of the process according to the model""" 565 566 # Import the model 567 for file in os.listdir(model_path): 568 if os.path.isfile(pjoin(model_path, file)): 569 shutil.copy2(pjoin(model_path, file), \ 570 pjoin(self.dir_path, 'Source', 'MODEL'))
571 572 586 594 595 596 #=========================================================================== 597 # export the helas routine 598 #===========================================================================
599 - def export_helas(self, helas_path):
600 """Configure the files/link of the process according to the model""" 601 602 # Import helas routine 603 for filename in os.listdir(helas_path): 604 filepos = pjoin(helas_path, filename) 605 if os.path.isfile(filepos): 606 if filepos.endswith('Makefile.template'): 607 cp(filepos, self.dir_path + '/Source/DHELAS/Makefile') 608 elif filepos.endswith('Makefile'): 609 pass 610 else: 611 cp(filepos, self.dir_path + '/Source/DHELAS')
612 # following lines do the same but whithout symbolic link 613 # 614 #def export_helas(mgme_dir, dir_path): 615 # 616 # # Copy the HELAS directory 617 # helas_dir = pjoin(mgme_dir, 'HELAS') 618 # for filename in os.listdir(helas_dir): 619 # if os.path.isfile(pjoin(helas_dir, filename)): 620 # shutil.copy2(pjoin(helas_dir, filename), 621 # pjoin(dir_path, 'Source', 'DHELAS')) 622 # shutil.move(pjoin(dir_path, 'Source', 'DHELAS', 'Makefile.template'), 623 # pjoin(dir_path, 'Source', 'DHELAS', 'Makefile')) 624 # 625 626 #=========================================================================== 627 # generate_subprocess_directory 628 #===========================================================================
629 - def generate_subprocess_directory(self, matrix_element, 630 fortran_model, 631 me_number):
632 """Routine to generate a subprocess directory (for inheritance)""" 633 634 pass
635 636 #=========================================================================== 637 # get_source_libraries_list 638 #===========================================================================
639 - def get_source_libraries_list(self):
640 """ Returns the list of libraries to be compiling when compiling the 641 SOURCE directory. It is different for loop_induced processes and 642 also depends on the value of the 'output_dependencies' option""" 643 644 return ['$(LIBDIR)libdhelas.$(libext)', 645 '$(LIBDIR)libpdf.$(libext)', 646 '$(LIBDIR)libmodel.$(libext)', 647 '$(LIBDIR)libcernlib.$(libext)', 648 '$(LIBDIR)libbias.$(libext)']
649 650 #=========================================================================== 651 # write_source_makefile 652 #===========================================================================
653 - def write_source_makefile(self, writer):
654 """Write the nexternal.inc file for MG4""" 655 656 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 657 set_of_lib = ' '.join(['$(LIBRARIES)']+self.get_source_libraries_list()) 658 if self.opt['model'] == 'mssm' or self.opt['model'].startswith('mssm-'): 659 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 660 MODEL/MG5_param.dat: ../Cards/param_card.dat\n\t../bin/madevent treatcards param 661 param_card.inc: MODEL/MG5_param.dat\n\t../bin/madevent treatcards param\n''' 662 else: 663 model_line='''$(LIBDIR)libmodel.$(libext): MODEL param_card.inc\n\tcd MODEL; make 664 param_card.inc: ../Cards/param_card.dat\n\t../bin/madevent treatcards param\n''' 665 666 replace_dict= {'libraries': set_of_lib, 667 'model':model_line, 668 'additional_dsample': '', 669 'additional_dependencies':''} 670 671 if writer: 672 text = open(path).read() % replace_dict 673 writer.write(text) 674 675 return replace_dict
676 677 #=========================================================================== 678 # write_nexternal_madspin 679 #===========================================================================
680 - def write_nexternal_madspin(self, writer, nexternal, ninitial):
681 """Write the nexternal_prod.inc file for madspin""" 682 683 replace_dict = {} 684 685 replace_dict['nexternal'] = nexternal 686 replace_dict['ninitial'] = ninitial 687 688 file = """ \ 689 integer nexternal_prod 690 parameter (nexternal_prod=%(nexternal)d) 691 integer nincoming_prod 692 parameter (nincoming_prod=%(ninitial)d)""" % replace_dict 693 694 # Write the file 695 if writer: 696 writer.writelines(file) 697 return True 698 else: 699 return replace_dict
700 701 #=========================================================================== 702 # write_helamp_madspin 703 #===========================================================================
704 - def write_helamp_madspin(self, writer, ncomb):
705 """Write the helamp.inc file for madspin""" 706 707 replace_dict = {} 708 709 replace_dict['ncomb'] = ncomb 710 711 file = """ \ 712 integer ncomb1 713 parameter (ncomb1=%(ncomb)d) 714 double precision helamp(ncomb1) 715 common /to_helamp/helamp """ % replace_dict 716 717 # Write the file 718 if writer: 719 writer.writelines(file) 720 return True 721 else: 722 return replace_dict
723 724 725 726 #=========================================================================== 727 # write_nexternal_file 728 #===========================================================================
729 - def write_nexternal_file(self, writer, nexternal, ninitial):
730 """Write the nexternal.inc file for MG4""" 731 732 replace_dict = {} 733 734 replace_dict['nexternal'] = nexternal 735 replace_dict['ninitial'] = ninitial 736 737 file = """ \ 738 integer nexternal 739 parameter (nexternal=%(nexternal)d) 740 integer nincoming 741 parameter (nincoming=%(ninitial)d)""" % replace_dict 742 743 # Write the file 744 if writer: 745 writer.writelines(file) 746 return True 747 else: 748 return replace_dict
749 #=========================================================================== 750 # write_pmass_file 751 #===========================================================================
752 - def write_pmass_file(self, writer, matrix_element):
753 """Write the pmass.inc file for MG4""" 754 755 model = matrix_element.get('processes')[0].get('model') 756 757 lines = [] 758 for wf in matrix_element.get_external_wavefunctions(): 759 mass = model.get('particle_dict')[wf.get('pdg_code')].get('mass') 760 if mass.lower() != "zero": 761 mass = "abs(%s)" % mass 762 763 lines.append("pmass(%d)=%s" % \ 764 (wf.get('number_external'), mass)) 765 766 # Write the file 767 writer.writelines(lines) 768 769 return True
770 771 #=========================================================================== 772 # write_ngraphs_file 773 #===========================================================================
774 - def write_ngraphs_file(self, writer, nconfigs):
775 """Write the ngraphs.inc file for MG4. Needs input from 776 write_configs_file.""" 777 778 file = " integer n_max_cg\n" 779 file = file + "parameter (n_max_cg=%d)" % nconfigs 780 781 # Write the file 782 writer.writelines(file) 783 784 return True
785 786 #=========================================================================== 787 # write_leshouche_file 788 #===========================================================================
789 - def write_leshouche_file(self, writer, matrix_element):
790 """Write the leshouche.inc file for MG4""" 791 792 # Write the file 793 writer.writelines(self.get_leshouche_lines(matrix_element, 0)) 794 795 return True
796 797 #=========================================================================== 798 # get_leshouche_lines 799 #===========================================================================
800 - def get_leshouche_lines(self, matrix_element, numproc):
801 """Write the leshouche.inc file for MG4""" 802 803 # Extract number of external particles 804 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 805 806 lines = [] 807 for iproc, proc in enumerate(matrix_element.get('processes')): 808 legs = proc.get_legs_with_decays() 809 lines.append("DATA (IDUP(i,%d,%d),i=1,%d)/%s/" % \ 810 (iproc + 1, numproc+1, nexternal, 811 ",".join([str(l.get('id')) for l in legs]))) 812 if iproc == 0 and numproc == 0: 813 for i in [1, 2]: 814 lines.append("DATA (MOTHUP(%d,i),i=1,%2r)/%s/" % \ 815 (i, nexternal, 816 ",".join([ "%3r" % 0 ] * ninitial + \ 817 [ "%3r" % i ] * (nexternal - ninitial)))) 818 819 # Here goes the color connections corresponding to the JAMPs 820 # Only one output, for the first subproc! 821 if iproc == 0: 822 # If no color basis, just output trivial color flow 823 if not matrix_element.get('color_basis'): 824 for i in [1, 2]: 825 lines.append("DATA (ICOLUP(%d,i,1,%d),i=1,%2r)/%s/" % \ 826 (i, numproc+1,nexternal, 827 ",".join([ "%3r" % 0 ] * nexternal))) 828 829 else: 830 # First build a color representation dictionnary 831 repr_dict = {} 832 for l in legs: 833 repr_dict[l.get('number')] = \ 834 proc.get('model').get_particle(l.get('id')).get_color()\ 835 * (-1)**(1+l.get('state')) 836 # Get the list of color flows 837 color_flow_list = \ 838 matrix_element.get('color_basis').color_flow_decomposition(repr_dict, 839 ninitial) 840 # And output them properly 841 for cf_i, color_flow_dict in enumerate(color_flow_list): 842 for i in [0, 1]: 843 lines.append("DATA (ICOLUP(%d,i,%d,%d),i=1,%2r)/%s/" % \ 844 (i + 1, cf_i + 1, numproc+1, nexternal, 845 ",".join(["%3r" % color_flow_dict[l.get('number')][i] \ 846 for l in legs]))) 847 848 return lines
849 850 851 852 853 #=========================================================================== 854 # write_maxamps_file 855 #===========================================================================
856 - def write_maxamps_file(self, writer, maxamps, maxflows, 857 maxproc,maxsproc):
858 """Write the maxamps.inc file for MG4.""" 859 860 file = " integer maxamps, maxflow, maxproc, maxsproc\n" 861 file = file + "parameter (maxamps=%d, maxflow=%d)\n" % \ 862 (maxamps, maxflows) 863 file = file + "parameter (maxproc=%d, maxsproc=%d)" % \ 864 (maxproc, maxsproc) 865 866 # Write the file 867 writer.writelines(file) 868 869 return True
870 871 872 #=========================================================================== 873 # Routines to output UFO models in MG4 format 874 #=========================================================================== 875
876 - def convert_model(self, model, wanted_lorentz = [], 877 wanted_couplings = []):
878 """ Create a full valid MG4 model from a MG5 model (coming from UFO)""" 879 880 # Make sure aloha is in quadruple precision if needed 881 old_aloha_mp=aloha.mp_precision 882 aloha.mp_precision=self.opt['mp'] 883 self.model = model 884 # create the MODEL 885 write_dir=pjoin(self.dir_path, 'Source', 'MODEL') 886 model_builder = UFO_model_to_mg4(model, write_dir, self.opt + self.proc_characteristic) 887 model_builder.build(wanted_couplings) 888 889 # Backup the loop mode, because it can be changed in what follows. 890 old_loop_mode = aloha.loop_mode 891 892 # Create the aloha model or use the existing one (for loop exporters 893 # this is useful as the aloha model will be used again in the 894 # LoopHelasMatrixElements generated). We do not save the model generated 895 # here if it didn't exist already because it would be a waste of 896 # memory for tree level applications since aloha is only needed at the 897 # time of creating the aloha fortran subroutines. 898 if hasattr(self, 'aloha_model'): 899 aloha_model = self.aloha_model 900 else: 901 aloha_model = create_aloha.AbstractALOHAModel(os.path.basename(model.get('modelpath'))) 902 aloha_model.add_Lorentz_object(model.get('lorentz')) 903 904 # Compute the subroutines 905 if wanted_lorentz: 906 aloha_model.compute_subset(wanted_lorentz) 907 else: 908 aloha_model.compute_all(save=False) 909 910 # Write them out 911 write_dir=pjoin(self.dir_path, 'Source', 'DHELAS') 912 aloha_model.write(write_dir, 'Fortran') 913 914 # Revert the original aloha loop mode 915 aloha.loop_mode = old_loop_mode 916 917 #copy Helas Template 918 cp(MG5DIR + '/aloha/template_files/Makefile_F', write_dir+'/makefile') 919 if any([any([tag.startswith('L') for tag in d[1]]) for d in wanted_lorentz]): 920 cp(MG5DIR + '/aloha/template_files/aloha_functions_loop.f', 921 write_dir+'/aloha_functions.f') 922 aloha_model.loop_mode = False 923 else: 924 cp(MG5DIR + '/aloha/template_files/aloha_functions.f', 925 write_dir+'/aloha_functions.f') 926 create_aloha.write_aloha_file_inc(write_dir, '.f', '.o') 927 928 # Make final link in the Process 929 self.make_model_symbolic_link() 930 931 # Re-establish original aloha mode 932 aloha.mp_precision=old_aloha_mp
933 934 935 #=========================================================================== 936 # Helper functions 937 #===========================================================================
938 - def modify_grouping(self, matrix_element):
939 """allow to modify the grouping (if grouping is in place) 940 return two value: 941 - True/False if the matrix_element was modified 942 - the new(or old) matrix element""" 943 944 return False, matrix_element
945 946 #=========================================================================== 947 # Helper functions 948 #===========================================================================
949 - def get_mg5_info_lines(self):
950 """Return info lines for MG5, suitable to place at beginning of 951 Fortran files""" 952 953 info = misc.get_pkg_info() 954 info_lines = "" 955 if info and info.has_key('version') and info.has_key('date'): 956 info_lines = "# Generated by MadGraph5_aMC@NLO v. %s, %s\n" % \ 957 (info['version'], info['date']) 958 info_lines = info_lines + \ 959 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 960 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 961 else: 962 info_lines = "# Generated by MadGraph5_aMC@NLO\n" + \ 963 "# By the MadGraph5_aMC@NLO Development Team\n" + \ 964 "# Visit launchpad.net/madgraph5 and amcatnlo.web.cern.ch" 965 966 return info_lines
967
968 - def get_process_info_lines(self, matrix_element):
969 """Return info lines describing the processes for this matrix element""" 970 971 return"\n".join([ "C " + process.nice_string().replace('\n', '\nC * ') \ 972 for process in matrix_element.get('processes')])
973 974
975 - def get_helicity_lines(self, matrix_element,array_name='NHEL'):
976 """Return the Helicity matrix definition lines for this matrix element""" 977 978 helicity_line_list = [] 979 i = 0 980 for helicities in matrix_element.get_helicity_matrix(): 981 i = i + 1 982 int_list = [i, len(helicities)] 983 int_list.extend(helicities) 984 helicity_line_list.append(\ 985 ("DATA ("+array_name+"(I,%4r),I=1,%d) /" + \ 986 ",".join(['%2r'] * len(helicities)) + "/") % tuple(int_list)) 987 988 return "\n".join(helicity_line_list)
989
990 - def get_ic_line(self, matrix_element):
991 """Return the IC definition line coming after helicities, required by 992 switchmom in madevent""" 993 994 nexternal = matrix_element.get_nexternal_ninitial()[0] 995 int_list = range(1, nexternal + 1) 996 997 return "DATA (IC(I,1),I=1,%i) /%s/" % (nexternal, 998 ",".join([str(i) for \ 999 i in int_list]))
1000
1001 - def set_chosen_SO_index(self, process, squared_orders):
1002 """ From the squared order constraints set by the user, this function 1003 finds what indices of the squared_orders list the user intends to pick. 1004 It returns this as a string of comma-separated successive '.true.' or 1005 '.false.' for each index.""" 1006 1007 user_squared_orders = process.get('squared_orders') 1008 split_orders = process.get('split_orders') 1009 1010 if len(user_squared_orders)==0: 1011 return ','.join(['.true.']*len(squared_orders)) 1012 1013 res = [] 1014 for sqsos in squared_orders: 1015 is_a_match = True 1016 for user_sqso, value in user_squared_orders.items(): 1017 if (process.get_squared_order_type(user_sqso) =='==' and \ 1018 value!=sqsos[split_orders.index(user_sqso)]) or \ 1019 (process.get_squared_order_type(user_sqso) in ['<=','='] and \ 1020 value<sqsos[split_orders.index(user_sqso)]) or \ 1021 (process.get_squared_order_type(user_sqso) == '>' and \ 1022 value>=sqsos[split_orders.index(user_sqso)]): 1023 is_a_match = False 1024 break 1025 res.append('.true.' if is_a_match else '.false.') 1026 1027 return ','.join(res)
1028
1029 - def get_split_orders_lines(self, orders, array_name, n=5):
1030 """ Return the split orders definition as defined in the list orders and 1031 for the name of the array 'array_name'. Split rows in chunks of size n.""" 1032 1033 ret_list = [] 1034 for index, order in enumerate(orders): 1035 for k in xrange(0, len(order), n): 1036 ret_list.append("DATA (%s(%3r,i),i=%3r,%3r) /%s/" % \ 1037 (array_name,index + 1, k + 1, min(k + n, len(order)), 1038 ','.join(["%5r" % i for i in order[k:k + n]]))) 1039 return ret_list
1040
1041 - def format_integer_list(self, list, name, n=5):
1042 """ Return an initialization of the python list in argument following 1043 the fortran syntax using the data keyword assignment, filling an array 1044 of name 'name'. It splits rows in chunks of size n.""" 1045 1046 ret_list = [] 1047 for k in xrange(0, len(list), n): 1048 ret_list.append("DATA (%s(i),i=%3r,%3r) /%s/" % \ 1049 (name, k + 1, min(k + n, len(list)), 1050 ','.join(["%5r" % i for i in list[k:k + n]]))) 1051 return ret_list
1052
1053 - def get_color_data_lines(self, matrix_element, n=6):
1054 """Return the color matrix definition lines for this matrix element. Split 1055 rows in chunks of size n.""" 1056 1057 if not matrix_element.get('color_matrix'): 1058 return ["DATA Denom(1)/1/", "DATA (CF(i,1),i=1,1) /1/"] 1059 else: 1060 ret_list = [] 1061 my_cs = color.ColorString() 1062 for index, denominator in \ 1063 enumerate(matrix_element.get('color_matrix').\ 1064 get_line_denominators()): 1065 # First write the common denominator for this color matrix line 1066 ret_list.append("DATA Denom(%i)/%i/" % (index + 1, denominator)) 1067 # Then write the numerators for the matrix elements 1068 num_list = matrix_element.get('color_matrix').\ 1069 get_line_numerators(index, denominator) 1070 1071 for k in xrange(0, len(num_list), n): 1072 ret_list.append("DATA (CF(i,%3r),i=%3r,%3r) /%s/" % \ 1073 (index + 1, k + 1, min(k + n, len(num_list)), 1074 ','.join(["%5r" % i for i in num_list[k:k + n]]))) 1075 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[index]) 1076 ret_list.append("C %s" % repr(my_cs)) 1077 return ret_list
1078 1079
1080 - def get_den_factor_line(self, matrix_element):
1081 """Return the denominator factor line for this matrix element""" 1082 1083 return "DATA IDEN/%2r/" % \ 1084 matrix_element.get_denominator_factor()
1085
1086 - def get_icolamp_lines(self, mapconfigs, matrix_element, num_matrix_element):
1087 """Return the ICOLAMP matrix, showing which JAMPs contribute to 1088 which configs (diagrams).""" 1089 1090 ret_list = [] 1091 1092 booldict = {False: ".false.", True: ".true."} 1093 1094 if not matrix_element.get('color_basis'): 1095 # No color, so only one color factor. Simply write a ".true." 1096 # for each config (i.e., each diagram with only 3 particle 1097 # vertices 1098 configs = len(mapconfigs) 1099 ret_list.append("DATA(icolamp(1,i,%d),i=1,%d)/%s/" % \ 1100 (num_matrix_element, configs, 1101 ','.join([".true." for i in range(configs)]))) 1102 return ret_list 1103 1104 # There is a color basis - create a list showing which JAMPs have 1105 # contributions to which configs 1106 1107 # Only want to include leading color flows, so find max_Nc 1108 color_basis = matrix_element.get('color_basis') 1109 1110 # We don't want to include the power of Nc's which come from the potential 1111 # loop color trace (i.e. in the case of a closed fermion loop for example) 1112 # so we subtract it here when computing max_Nc 1113 max_Nc = max(sum([[(v[4]-v[5]) for v in val] for val in 1114 color_basis.values()],[])) 1115 1116 # Crate dictionary between diagram number and JAMP number 1117 diag_jamp = {} 1118 for ijamp, col_basis_elem in \ 1119 enumerate(sorted(matrix_element.get('color_basis').keys())): 1120 for diag_tuple in matrix_element.get('color_basis')[col_basis_elem]: 1121 # Only use color flows with Nc == max_Nc. However, notice that 1122 # we don't want to include the Nc power coming from the loop 1123 # in this counting. 1124 if (diag_tuple[4]-diag_tuple[5]) == max_Nc: 1125 diag_num = diag_tuple[0] + 1 1126 # Add this JAMP number to this diag_num 1127 diag_jamp[diag_num] = diag_jamp.setdefault(diag_num, []) + \ 1128 [ijamp+1] 1129 1130 colamps = ijamp + 1 1131 for iconfig, num_diag in enumerate(mapconfigs): 1132 if num_diag == 0: 1133 continue 1134 1135 # List of True or False 1136 bool_list = [(i + 1 in diag_jamp[num_diag]) for i in range(colamps)] 1137 # Add line 1138 ret_list.append("DATA(icolamp(i,%d,%d),i=1,%d)/%s/" % \ 1139 (iconfig+1, num_matrix_element, colamps, 1140 ','.join(["%s" % booldict[b] for b in \ 1141 bool_list]))) 1142 1143 return ret_list
1144
1145 - def get_amp2_lines(self, matrix_element, config_map = []):
1146 """Return the amp2(i) = sum(amp for diag(i))^2 lines""" 1147 1148 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 1149 # Get minimum legs in a vertex 1150 vert_list = [max(diag.get_vertex_leg_numbers()) for diag in \ 1151 matrix_element.get('diagrams') if diag.get_vertex_leg_numbers()!=[]] 1152 minvert = min(vert_list) if vert_list!=[] else 0 1153 1154 ret_lines = [] 1155 if config_map: 1156 # In this case, we need to sum up all amplitudes that have 1157 # identical topologies, as given by the config_map (which 1158 # gives the topology/config for each of the diagrams 1159 diagrams = matrix_element.get('diagrams') 1160 # Combine the diagrams with identical topologies 1161 config_to_diag_dict = {} 1162 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1163 if config_map[idiag] == 0: 1164 continue 1165 try: 1166 config_to_diag_dict[config_map[idiag]].append(idiag) 1167 except KeyError: 1168 config_to_diag_dict[config_map[idiag]] = [idiag] 1169 # Write out the AMP2s summing squares of amplitudes belonging 1170 # to eiher the same diagram or different diagrams with 1171 # identical propagator properties. Note that we need to use 1172 # AMP2 number corresponding to the first diagram number used 1173 # for that AMP2. 1174 for config in sorted(config_to_diag_dict.keys()): 1175 1176 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % \ 1177 {"num": (config_to_diag_dict[config][0] + 1)} 1178 1179 amp = "+".join(["AMP(%(num)d)" % {"num": a.get('number')} for a in \ 1180 sum([diagrams[idiag].get('amplitudes') for \ 1181 idiag in config_to_diag_dict[config]], [])]) 1182 1183 # Not using \sum |M|^2 anymore since this creates troubles 1184 # when ckm is not diagonal due to the JIM mechanism. 1185 if '+' in amp: 1186 line += "(%s)*dconjg(%s)" % (amp, amp) 1187 else: 1188 line += "%s*dconjg(%s)" % (amp, amp) 1189 ret_lines.append(line) 1190 else: 1191 for idiag, diag in enumerate(matrix_element.get('diagrams')): 1192 # Ignore any diagrams with 4-particle vertices. 1193 if diag.get_vertex_leg_numbers()!=[] and max(diag.get_vertex_leg_numbers()) > minvert: 1194 continue 1195 # Now write out the expression for AMP2, meaning the sum of 1196 # squared amplitudes belonging to the same diagram 1197 line = "AMP2(%(num)d)=AMP2(%(num)d)+" % {"num": (idiag + 1)} 1198 line += "+".join(["AMP(%(num)d)*dconjg(AMP(%(num)d))" % \ 1199 {"num": a.get('number')} for a in \ 1200 diag.get('amplitudes')]) 1201 ret_lines.append(line) 1202 1203 return ret_lines
1204 1205 #=========================================================================== 1206 # Returns the data statements initializing the coeffictients for the JAMP 1207 # decomposition. It is used when the JAMP initialization is decided to be 1208 # done through big arrays containing the projection coefficients. 1209 #===========================================================================
1210 - def get_JAMP_coefs(self, color_amplitudes, color_basis=None, tag_letter="",\ 1211 n=50, Nc_value=3):
1212 """This functions return the lines defining the DATA statement setting 1213 the coefficients building the JAMPS out of the AMPS. Split rows in 1214 bunches of size n. 1215 One can specify the color_basis from which the color amplitudes originates 1216 so that there are commentaries telling what color structure each JAMP 1217 corresponds to.""" 1218 1219 if(not isinstance(color_amplitudes,list) or 1220 not (color_amplitudes and isinstance(color_amplitudes[0],list))): 1221 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_coefs" 1222 1223 res_list = [] 1224 my_cs = color.ColorString() 1225 for index, coeff_list in enumerate(color_amplitudes): 1226 # Create the list of the complete numerical coefficient. 1227 coefs_list=[coefficient[0][0]*coefficient[0][1]*\ 1228 (fractions.Fraction(Nc_value)**coefficient[0][3]) for \ 1229 coefficient in coeff_list] 1230 # Create the list of the numbers of the contributing amplitudes. 1231 # Mutliply by -1 for those which have an imaginary coefficient. 1232 ampnumbers_list=[coefficient[1]*(-1 if coefficient[0][2] else 1) \ 1233 for coefficient in coeff_list] 1234 # Find the common denominator. 1235 commondenom=abs(reduce(fractions.gcd, coefs_list).denominator) 1236 num_list=[(coefficient*commondenom).numerator \ 1237 for coefficient in coefs_list] 1238 res_list.append("DATA NCONTRIBAMPS%s(%i)/%i/"%(tag_letter,\ 1239 index+1,len(num_list))) 1240 res_list.append("DATA DENOMCCOEF%s(%i)/%i/"%(tag_letter,\ 1241 index+1,commondenom)) 1242 if color_basis: 1243 my_cs.from_immutable(sorted(color_basis.keys())[index]) 1244 res_list.append("C %s" % repr(my_cs)) 1245 for k in xrange(0, len(num_list), n): 1246 res_list.append("DATA (NUMCCOEF%s(%3r,i),i=%6r,%6r) /%s/" % \ 1247 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1248 ','.join(["%6r" % i for i in num_list[k:k + n]]))) 1249 res_list.append("DATA (AMPNUMBERS%s(%3r,i),i=%6r,%6r) /%s/" % \ 1250 (tag_letter,index + 1, k + 1, min(k + n, len(num_list)), 1251 ','.join(["%6r" % i for i in ampnumbers_list[k:k + n]]))) 1252 pass 1253 return res_list
1254 1255
1256 - def get_JAMP_lines_split_order(self, col_amps, split_order_amps, 1257 split_order_names=None, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)"):
1258 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1259 defined as a matrix element or directly as a color_amplitudes dictionary. 1260 The split_order_amps specifies the group of amplitudes sharing the same 1261 amplitude orders which should be put in together in a given set of JAMPS. 1262 The split_order_amps is supposed to have the format of the second output 1263 of the function get_split_orders_mapping function in helas_objects.py. 1264 The split_order_names is optional (it should correspond to the process 1265 'split_orders' attribute) and only present to provide comments in the 1266 JAMP definitions in the code.""" 1267 1268 # Let the user call get_JAMP_lines_split_order directly from a 1269 error_msg="Malformed '%s' argument passed to the "+\ 1270 "get_JAMP_lines_split_order function: %s"%str(split_order_amps) 1271 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1272 color_amplitudes=col_amps.get_color_amplitudes() 1273 elif(isinstance(col_amps,list)): 1274 if(col_amps and isinstance(col_amps[0],list)): 1275 color_amplitudes=col_amps 1276 else: 1277 raise MadGraph5Error, error_msg%'col_amps' 1278 else: 1279 raise MadGraph5Error, error_msg%'col_amps' 1280 1281 # Verify the sanity of the split_order_amps and split_order_names args 1282 if isinstance(split_order_amps,list): 1283 for elem in split_order_amps: 1284 if len(elem)!=2: 1285 raise MadGraph5Error, error_msg%'split_order_amps' 1286 # Check the first element of the two lists to make sure they are 1287 # integers, although in principle they should all be integers. 1288 if not isinstance(elem[0],tuple) or \ 1289 not isinstance(elem[1],tuple) or \ 1290 not isinstance(elem[0][0],int) or \ 1291 not isinstance(elem[1][0],int): 1292 raise MadGraph5Error, error_msg%'split_order_amps' 1293 else: 1294 raise MadGraph5Error, error_msg%'split_order_amps' 1295 1296 if not split_order_names is None: 1297 if isinstance(split_order_names,list): 1298 # Should specify the same number of names as there are elements 1299 # in the key of the split_order_amps. 1300 if len(split_order_names)!=len(split_order_amps[0][0]): 1301 raise MadGraph5Error, error_msg%'split_order_names' 1302 # Check the first element of the list to be a string 1303 if not isinstance(split_order_names[0],str): 1304 raise MadGraph5Error, error_msg%'split_order_names' 1305 else: 1306 raise MadGraph5Error, error_msg%'split_order_names' 1307 1308 # Now scan all contributing orders to be individually computed and 1309 # construct the list of color_amplitudes for JAMP to be constructed 1310 # accordingly. 1311 res_list=[] 1312 for i, amp_order in enumerate(split_order_amps): 1313 col_amps_order = [] 1314 for jamp in color_amplitudes: 1315 col_amps_order.append(filter(lambda col_amp: 1316 col_amp[1] in amp_order[1],jamp)) 1317 if split_order_names: 1318 res_list.append('C JAMPs contributing to orders '+' '.join( 1319 ['%s=%i'%order for order in zip(split_order_names, 1320 amp_order[0])])) 1321 if self.opt['export_format'] in ['madloop_matchbox']: 1322 res_list.extend(self.get_JAMP_lines(col_amps_order, 1323 JAMP_format="JAMP(%s,{0})".format(str(i+1)), 1324 JAMP_formatLC="LNJAMP(%s,{0})".format(str(i+1)))) 1325 else: 1326 res_list.extend(self.get_JAMP_lines(col_amps_order, 1327 JAMP_format="JAMP(%s,{0})".format(str(i+1)))) 1328 1329 return res_list
1330 1331
1332 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", 1333 split=-1):
1334 """Return the JAMP = sum(fermionfactor * AMP(i)) lines from col_amps 1335 defined as a matrix element or directly as a color_amplitudes dictionary, 1336 Jamp_formatLC should be define to allow to add LeadingColor computation 1337 (usefull for MatchBox) 1338 The split argument defines how the JAMP lines should be split in order 1339 not to be too long.""" 1340 1341 # Let the user call get_JAMP_lines directly from a MatrixElement or from 1342 # the color amplitudes lists. 1343 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 1344 color_amplitudes=col_amps.get_color_amplitudes() 1345 elif(isinstance(col_amps,list)): 1346 if(col_amps and isinstance(col_amps[0],list)): 1347 color_amplitudes=col_amps 1348 else: 1349 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1350 else: 1351 raise MadGraph5Error, "Incorrect col_amps argument passed to get_JAMP_lines" 1352 1353 1354 res_list = [] 1355 for i, coeff_list in enumerate(color_amplitudes): 1356 # It might happen that coeff_list is empty if this function was 1357 # called from get_JAMP_lines_split_order (i.e. if some color flow 1358 # does not contribute at all for a given order). 1359 # In this case we simply set it to 0. 1360 if coeff_list==[]: 1361 res_list.append(((JAMP_format+"=0D0") % str(i + 1))) 1362 continue 1363 # Break the JAMP definition into 'n=split' pieces to avoid having 1364 # arbitrarly long lines. 1365 first=True 1366 n = (len(coeff_list)+1 if split<=0 else split) 1367 while coeff_list!=[]: 1368 coefs=coeff_list[:n] 1369 coeff_list=coeff_list[n:] 1370 res = ((JAMP_format+"=") % str(i + 1)) + \ 1371 ((JAMP_format % str(i + 1)) if not first and split>0 else '') 1372 1373 first=False 1374 # Optimization: if all contributions to that color basis element have 1375 # the same coefficient (up to a sign), put it in front 1376 list_fracs = [abs(coefficient[0][1]) for coefficient in coefs] 1377 common_factor = False 1378 diff_fracs = list(set(list_fracs)) 1379 if len(diff_fracs) == 1 and abs(diff_fracs[0]) != 1: 1380 common_factor = True 1381 global_factor = diff_fracs[0] 1382 res = res + '%s(' % self.coeff(1, global_factor, False, 0) 1383 1384 # loop for JAMP 1385 for (coefficient, amp_number) in coefs: 1386 if not coefficient: 1387 continue 1388 if common_factor: 1389 res = (res + "%s" + AMP_format) % \ 1390 (self.coeff(coefficient[0], 1391 coefficient[1] / abs(coefficient[1]), 1392 coefficient[2], 1393 coefficient[3]), 1394 str(amp_number)) 1395 else: 1396 res = (res + "%s" + AMP_format) % (self.coeff(coefficient[0], 1397 coefficient[1], 1398 coefficient[2], 1399 coefficient[3]), 1400 str(amp_number)) 1401 1402 if common_factor: 1403 res = res + ')' 1404 1405 res_list.append(res) 1406 1407 return res_list
1408
1409 - def get_pdf_lines(self, matrix_element, ninitial, subproc_group = False):
1410 """Generate the PDF lines for the auto_dsig.f file""" 1411 1412 processes = matrix_element.get('processes') 1413 model = processes[0].get('model') 1414 1415 pdf_definition_lines = "" 1416 pdf_data_lines = "" 1417 pdf_lines = "" 1418 1419 if ninitial == 1: 1420 pdf_lines = "PD(0) = 0d0\nIPROC = 0\n" 1421 for i, proc in enumerate(processes): 1422 process_line = proc.base_string() 1423 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1424 pdf_lines = pdf_lines + "\nPD(IPROC)=1d0\n" 1425 pdf_lines = pdf_lines + "\nPD(0)=PD(0)+PD(IPROC)\n" 1426 else: 1427 # Pick out all initial state particles for the two beams 1428 initial_states = [sorted(list(set([p.get_initial_pdg(1) for \ 1429 p in processes]))), 1430 sorted(list(set([p.get_initial_pdg(2) for \ 1431 p in processes])))] 1432 1433 # Prepare all variable names 1434 pdf_codes = dict([(p, model.get_particle(p).get_name()) for p in \ 1435 sum(initial_states,[])]) 1436 for key,val in pdf_codes.items(): 1437 pdf_codes[key] = val.replace('~','x').replace('+','p').replace('-','m') 1438 1439 # Set conversion from PDG code to number used in PDF calls 1440 pdgtopdf = {21: 0, 22: 7} 1441 1442 # Fill in missing entries of pdgtopdf 1443 for pdg in sum(initial_states,[]): 1444 if not pdg in pdgtopdf and not pdg in pdgtopdf.values(): 1445 pdgtopdf[pdg] = pdg 1446 elif pdg not in pdgtopdf and pdg in pdgtopdf.values(): 1447 # If any particle has pdg code 7, we need to use something else 1448 pdgtopdf[pdg] = 6000000 + pdg 1449 1450 # Get PDF variable declarations for all initial states 1451 for i in [0,1]: 1452 pdf_definition_lines += "DOUBLE PRECISION " + \ 1453 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1454 for pdg in \ 1455 initial_states[i]]) + \ 1456 "\n" 1457 1458 # Get PDF data lines for all initial states 1459 for i in [0,1]: 1460 pdf_data_lines += "DATA " + \ 1461 ",".join(["%s%d" % (pdf_codes[pdg],i+1) \ 1462 for pdg in initial_states[i]]) + \ 1463 "/%d*1D0/" % len(initial_states[i]) + \ 1464 "\n" 1465 1466 # Get PDF lines for all different initial states 1467 for i, init_states in enumerate(initial_states): 1468 if subproc_group: 1469 pdf_lines = pdf_lines + \ 1470 "IF (ABS(LPP(IB(%d))).GE.1) THEN\nLP=SIGN(1,LPP(IB(%d)))\n" \ 1471 % (i + 1, i + 1) 1472 else: 1473 pdf_lines = pdf_lines + \ 1474 "IF (ABS(LPP(%d)) .GE. 1) THEN\nLP=SIGN(1,LPP(%d))\n" \ 1475 % (i + 1, i + 1) 1476 1477 for nbi,initial_state in enumerate(init_states): 1478 if initial_state in pdf_codes.keys(): 1479 if subproc_group: 1480 pdf_lines = pdf_lines + \ 1481 ("%s%d=PDG2PDF(ABS(LPP(IB(%d))),%d*LP, 1," + \ 1482 "XBK(IB(%d)),DSQRT(Q2FACT(%d)))\n") % \ 1483 (pdf_codes[initial_state], 1484 i + 1, i + 1, pdgtopdf[initial_state], 1485 i + 1, i + 1) 1486 else: 1487 pdf_lines = pdf_lines + \ 1488 ("%s%d=PDG2PDF(ABS(LPP(%d)),%d*LP, %d," + \ 1489 "XBK(%d),DSQRT(Q2FACT(%d)))\n") % \ 1490 (pdf_codes[initial_state], 1491 i + 1, i + 1, pdgtopdf[initial_state], 1492 i + 1, 1493 i + 1, i + 1) 1494 pdf_lines = pdf_lines + "ENDIF\n" 1495 1496 # Add up PDFs for the different initial state particles 1497 pdf_lines = pdf_lines + "PD(0) = 0d0\nIPROC = 0\n" 1498 for proc in processes: 1499 process_line = proc.base_string() 1500 pdf_lines = pdf_lines + "IPROC=IPROC+1 ! " + process_line 1501 pdf_lines = pdf_lines + "\nPD(IPROC)=" 1502 for ibeam in [1, 2]: 1503 initial_state = proc.get_initial_pdg(ibeam) 1504 if initial_state in pdf_codes.keys(): 1505 pdf_lines = pdf_lines + "%s%d*" % \ 1506 (pdf_codes[initial_state], ibeam) 1507 else: 1508 pdf_lines = pdf_lines + "1d0*" 1509 # Remove last "*" from pdf_lines 1510 pdf_lines = pdf_lines[:-1] + "\n" 1511 pdf_lines = pdf_lines + "PD(0)=PD(0)+DABS(PD(IPROC))\n" 1512 1513 # Remove last line break from the return variables 1514 return pdf_definition_lines[:-1], pdf_data_lines[:-1], pdf_lines[:-1]
1515 1516 #=========================================================================== 1517 # write_props_file 1518 #===========================================================================
1519 - def write_props_file(self, writer, matrix_element, s_and_t_channels):
1520 """Write the props.inc file for MadEvent. Needs input from 1521 write_configs_file.""" 1522 1523 lines = [] 1524 1525 particle_dict = matrix_element.get('processes')[0].get('model').\ 1526 get('particle_dict') 1527 1528 for iconf, configs in enumerate(s_and_t_channels): 1529 for vertex in configs[0] + configs[1][:-1]: 1530 leg = vertex.get('legs')[-1] 1531 if leg.get('id') not in particle_dict: 1532 # Fake propagator used in multiparticle vertices 1533 mass = 'zero' 1534 width = 'zero' 1535 pow_part = 0 1536 else: 1537 particle = particle_dict[leg.get('id')] 1538 # Get mass 1539 if particle.get('mass').lower() == 'zero': 1540 mass = particle.get('mass') 1541 else: 1542 mass = "abs(%s)" % particle.get('mass') 1543 # Get width 1544 if particle.get('width').lower() == 'zero': 1545 width = particle.get('width') 1546 else: 1547 width = "abs(%s)" % particle.get('width') 1548 1549 pow_part = 1 + int(particle.is_boson()) 1550 1551 lines.append("prmass(%d,%d) = %s" % \ 1552 (leg.get('number'), iconf + 1, mass)) 1553 lines.append("prwidth(%d,%d) = %s" % \ 1554 (leg.get('number'), iconf + 1, width)) 1555 lines.append("pow(%d,%d) = %d" % \ 1556 (leg.get('number'), iconf + 1, pow_part)) 1557 1558 # Write the file 1559 writer.writelines(lines) 1560 1561 return True
1562 1563 #=========================================================================== 1564 # write_configs_file 1565 #===========================================================================
1566 - def write_configs_file(self, writer, matrix_element):
1567 """Write the configs.inc file for MadEvent""" 1568 1569 # Extract number of external particles 1570 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 1571 1572 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 1573 mapconfigs = [c[0] for c in configs] 1574 model = matrix_element.get('processes')[0].get('model') 1575 return mapconfigs, self.write_configs_file_from_diagrams(writer, 1576 [[c[1]] for c in configs], 1577 mapconfigs, 1578 nexternal, ninitial, 1579 model)
1580 1581 #=========================================================================== 1582 # write_configs_file_from_diagrams 1583 #===========================================================================
1584 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 1585 nexternal, ninitial, model):
1586 """Write the actual configs.inc file. 1587 1588 configs is the diagrams corresponding to configs (each 1589 diagrams is a list of corresponding diagrams for all 1590 subprocesses, with None if there is no corresponding diagrams 1591 for a given process). 1592 mapconfigs gives the diagram number for each config. 1593 1594 For s-channels, we need to output one PDG for each subprocess in 1595 the subprocess group, in order to be able to pick the right 1596 one for multiprocesses.""" 1597 1598 lines = [] 1599 1600 s_and_t_channels = [] 1601 1602 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 1603 for config in configs if [d for d in config if d][0].\ 1604 get_vertex_leg_numbers()!=[]] 1605 minvert = min(vert_list) if vert_list!=[] else 0 1606 1607 # Number of subprocesses 1608 nsubprocs = len(configs[0]) 1609 1610 nconfigs = 0 1611 1612 new_pdg = model.get_first_non_pdg() 1613 1614 for iconfig, helas_diags in enumerate(configs): 1615 if any(vert > minvert for vert in [d for d in helas_diags if d]\ 1616 [0].get_vertex_leg_numbers()) : 1617 # Only 3-vertices allowed in configs.inc except for vertices 1618 # which originate from a shrunk loop. 1619 continue 1620 nconfigs += 1 1621 1622 # Need s- and t-channels for all subprocesses, including 1623 # those that don't contribute to this config 1624 empty_verts = [] 1625 stchannels = [] 1626 for h in helas_diags: 1627 if h: 1628 # get_s_and_t_channels gives vertices starting from 1629 # final state external particles and working inwards 1630 stchannels.append(h.get('amplitudes')[0].\ 1631 get_s_and_t_channels(ninitial, model, new_pdg)) 1632 else: 1633 stchannels.append((empty_verts, None)) 1634 1635 # For t-channels, just need the first non-empty one 1636 tchannels = [t for s,t in stchannels if t != None][0] 1637 1638 # For s_and_t_channels (to be used later) use only first config 1639 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 1640 tchannels]) 1641 1642 # Make sure empty_verts is same length as real vertices 1643 if any([s for s,t in stchannels]): 1644 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 1645 1646 # Reorganize s-channel vertices to get a list of all 1647 # subprocesses for each vertex 1648 schannels = zip(*[s for s,t in stchannels]) 1649 else: 1650 schannels = [] 1651 1652 allchannels = schannels 1653 if len(tchannels) > 1: 1654 # Write out tchannels only if there are any non-trivial ones 1655 allchannels = schannels + tchannels 1656 1657 # Write out propagators for s-channel and t-channel vertices 1658 1659 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 1660 # Correspondance between the config and the diagram = amp2 1661 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 1662 mapconfigs[iconfig])) 1663 1664 for verts in allchannels: 1665 if verts in schannels: 1666 vert = [v for v in verts if v][0] 1667 else: 1668 vert = verts 1669 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 1670 last_leg = vert.get('legs')[-1] 1671 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 1672 (last_leg.get('number'), nconfigs, len(daughters), 1673 ",".join([str(d) for d in daughters]))) 1674 if verts in schannels: 1675 pdgs = [] 1676 for v in verts: 1677 if v: 1678 pdgs.append(v.get('legs')[-1].get('id')) 1679 else: 1680 pdgs.append(0) 1681 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1682 (last_leg.get('number'), nconfigs, nsubprocs, 1683 ",".join([str(d) for d in pdgs]))) 1684 lines.append("data tprid(%d,%d)/0/" % \ 1685 (last_leg.get('number'), nconfigs)) 1686 elif verts in tchannels[:-1]: 1687 lines.append("data tprid(%d,%d)/%d/" % \ 1688 (last_leg.get('number'), nconfigs, 1689 abs(last_leg.get('id')))) 1690 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 1691 (last_leg.get('number'), nconfigs, nsubprocs, 1692 ",".join(['0'] * nsubprocs))) 1693 1694 # Write out number of configs 1695 lines.append("# Number of configs") 1696 lines.append("data mapconfig(0)/%d/" % nconfigs) 1697 1698 # Write the file 1699 writer.writelines(lines) 1700 1701 return s_and_t_channels
1702 1703 #=========================================================================== 1704 # Global helper methods 1705 #=========================================================================== 1706
1707 - def coeff(self, ff_number, frac, is_imaginary, Nc_power, Nc_value=3):
1708 """Returns a nicely formatted string for the coefficients in JAMP lines""" 1709 1710 total_coeff = ff_number * frac * fractions.Fraction(Nc_value) ** Nc_power 1711 1712 if total_coeff == 1: 1713 if is_imaginary: 1714 return '+imag1*' 1715 else: 1716 return '+' 1717 elif total_coeff == -1: 1718 if is_imaginary: 1719 return '-imag1*' 1720 else: 1721 return '-' 1722 1723 res_str = '%+iD0' % total_coeff.numerator 1724 1725 if total_coeff.denominator != 1: 1726 # Check if total_coeff is an integer 1727 res_str = res_str + '/%iD0' % total_coeff.denominator 1728 1729 if is_imaginary: 1730 res_str = res_str + '*imag1' 1731 1732 return res_str + '*'
1733 1734
1735 - def set_fortran_compiler(self, default_compiler, force=False):
1736 """Set compiler based on what's available on the system""" 1737 1738 # Check for compiler 1739 if default_compiler['fortran'] and misc.which(default_compiler['fortran']): 1740 f77_compiler = default_compiler['fortran'] 1741 elif misc.which('gfortran'): 1742 f77_compiler = 'gfortran' 1743 elif misc.which('g77'): 1744 f77_compiler = 'g77' 1745 elif misc.which('f77'): 1746 f77_compiler = 'f77' 1747 elif default_compiler['fortran']: 1748 logger.warning('No Fortran Compiler detected! Please install one') 1749 f77_compiler = default_compiler['fortran'] # maybe misc fail so try with it 1750 else: 1751 raise MadGraph5Error, 'No Fortran Compiler detected! Please install one' 1752 logger.info('Use Fortran compiler ' + f77_compiler) 1753 1754 1755 # Check for compiler. 1. set default. 1756 if default_compiler['f2py']: 1757 f2py_compiler = default_compiler['f2py'] 1758 else: 1759 f2py_compiler = '' 1760 # Try to find the correct one. 1761 if default_compiler['f2py'] and misc.which(default_compiler['f2py']): 1762 f2py_compiler = default_compiler['f2py'] 1763 elif misc.which('f2py'): 1764 f2py_compiler = 'f2py' 1765 elif sys.version_info[1] == 6: 1766 if misc.which('f2py-2.6'): 1767 f2py_compiler = 'f2py-2.6' 1768 elif misc.which('f2py2.6'): 1769 f2py_compiler = 'f2py2.6' 1770 elif sys.version_info[1] == 7: 1771 if misc.which('f2py-2.7'): 1772 f2py_compiler = 'f2py-2.7' 1773 elif misc.which('f2py2.7'): 1774 f2py_compiler = 'f2py2.7' 1775 1776 to_replace = {'fortran': f77_compiler, 'f2py': f2py_compiler} 1777 1778 1779 self.replace_make_opt_f_compiler(to_replace) 1780 # Replace also for Template but not for cluster 1781 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite: 1782 self.replace_make_opt_f_compiler(to_replace, pjoin(MG5DIR, 'Template', 'LO')) 1783 1784 return f77_compiler
1785 1786 # an alias for backward compatibility 1787 set_compiler = set_fortran_compiler 1788 1789
1790 - def set_cpp_compiler(self, default_compiler, force=False):
1791 """Set compiler based on what's available on the system""" 1792 1793 # Check for compiler 1794 if default_compiler and misc.which(default_compiler): 1795 compiler = default_compiler 1796 elif misc.which('g++'): 1797 #check if clang version 1798 p = misc.Popen(['g++', '--version'], stdout=subprocess.PIPE, 1799 stderr=subprocess.PIPE) 1800 out, _ = p.communicate() 1801 if 'clang' in out and misc.which('clang'): 1802 compiler = 'clang' 1803 else: 1804 compiler = 'g++' 1805 elif misc.which('c++'): 1806 compiler = 'c++' 1807 elif misc.which('clang'): 1808 compiler = 'clang' 1809 elif default_compiler: 1810 logger.warning('No c++ Compiler detected! Please install one') 1811 compiler = default_compiler # maybe misc fail so try with it 1812 else: 1813 raise MadGraph5Error, 'No c++ Compiler detected! Please install one' 1814 logger.info('Use c++ compiler ' + compiler) 1815 self.replace_make_opt_c_compiler(compiler) 1816 # Replace also for Template but not for cluster 1817 if not os.environ.has_key('MADGRAPH_DATA') and ReadWrite and \ 1818 not __debug__ and not os.path.exists(pjoin(MG5DIR,'bin','create_release.py')): 1819 self.replace_make_opt_c_compiler(compiler, pjoin(MG5DIR, 'Template', 'LO')) 1820 1821 return compiler
1822 1823
1824 - def replace_make_opt_f_compiler(self, compilers, root_dir = ""):
1825 """Set FC=compiler in Source/make_opts""" 1826 1827 assert isinstance(compilers, dict) 1828 1829 mod = False #avoid to rewrite the file if not needed 1830 if not root_dir: 1831 root_dir = self.dir_path 1832 1833 compiler= compilers['fortran'] 1834 f2py_compiler = compilers['f2py'] 1835 if not f2py_compiler: 1836 f2py_compiler = 'f2py' 1837 for_update= {'DEFAULT_F_COMPILER':compiler, 1838 'DEFAULT_F2PY_COMPILER':f2py_compiler} 1839 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1840 1841 try: 1842 common_run_interface.CommonRunCmd.update_make_opts_full( 1843 make_opts, for_update) 1844 except IOError: 1845 if root_dir == self.dir_path: 1846 logger.info('Fail to set compiler. Trying to continue anyway.')
1847
1848 - def replace_make_opt_c_compiler(self, compiler, root_dir = ""):
1849 """Set CXX=compiler in Source/make_opts. 1850 The version is also checked, in order to set some extra flags 1851 if the compiler is clang (on MACOS)""" 1852 1853 is_clang = misc.detect_if_cpp_compiler_is_clang(compiler) 1854 is_lc = misc.detect_cpp_std_lib_dependence(compiler) == '-lc++' 1855 1856 1857 # list of the variable to set in the make_opts file 1858 for_update= {'DEFAULT_CPP_COMPILER':compiler, 1859 'MACFLAG':'-mmacosx-version-min=10.7' if is_clang and is_lc else '', 1860 'STDLIB': '-lc++' if is_lc else '-lstdc++', 1861 'STDLIB_FLAG': '-stdlib=libc++' if is_lc and is_clang else '' 1862 } 1863 1864 # for MOJAVE remove the MACFLAG: 1865 if is_clang: 1866 import platform 1867 version, _, _ = platform.mac_ver() 1868 if not version:# not linux 1869 version = 14 # set version to remove MACFLAG 1870 else: 1871 version = int(version.split('.')[1]) 1872 if version >= 14: 1873 for_update['MACFLAG'] = '-mmacosx-version-min=10.8' if is_lc else '' 1874 1875 if not root_dir: 1876 root_dir = self.dir_path 1877 make_opts = pjoin(root_dir, 'Source', 'make_opts') 1878 1879 try: 1880 common_run_interface.CommonRunCmd.update_make_opts_full( 1881 make_opts, for_update) 1882 except IOError: 1883 if root_dir == self.dir_path: 1884 logger.info('Fail to set compiler. Trying to continue anyway.') 1885 1886 return
1887
1888 #=============================================================================== 1889 # ProcessExporterFortranSA 1890 #=============================================================================== 1891 -class ProcessExporterFortranSA(ProcessExporterFortran):
1892 """Class to take care of exporting a set of matrix elements to 1893 MadGraph v4 StandAlone format.""" 1894 1895 matrix_template = "matrix_standalone_v4.inc" 1896
1897 - def __init__(self, *args,**opts):
1898 """add the format information compare to standard init""" 1899 1900 if 'format' in opts: 1901 self.format = opts['format'] 1902 del opts['format'] 1903 else: 1904 self.format = 'standalone' 1905 1906 self.prefix_info = {} 1907 ProcessExporterFortran.__init__(self, *args, **opts)
1908
1909 - def copy_template(self, model):
1910 """Additional actions needed for setup of Template 1911 """ 1912 1913 #First copy the full template tree if dir_path doesn't exit 1914 if os.path.isdir(self.dir_path): 1915 return 1916 1917 logger.info('initialize a new standalone directory: %s' % \ 1918 os.path.basename(self.dir_path)) 1919 temp_dir = pjoin(self.mgme_dir, 'Template/LO') 1920 1921 # Create the directory structure 1922 os.mkdir(self.dir_path) 1923 os.mkdir(pjoin(self.dir_path, 'Source')) 1924 os.mkdir(pjoin(self.dir_path, 'Source', 'MODEL')) 1925 os.mkdir(pjoin(self.dir_path, 'Source', 'DHELAS')) 1926 os.mkdir(pjoin(self.dir_path, 'SubProcesses')) 1927 os.mkdir(pjoin(self.dir_path, 'bin')) 1928 os.mkdir(pjoin(self.dir_path, 'bin', 'internal')) 1929 os.mkdir(pjoin(self.dir_path, 'lib')) 1930 os.mkdir(pjoin(self.dir_path, 'Cards')) 1931 1932 # Information at top-level 1933 #Write version info 1934 shutil.copy(pjoin(temp_dir, 'TemplateVersion.txt'), self.dir_path) 1935 try: 1936 shutil.copy(pjoin(self.mgme_dir, 'MGMEVersion.txt'), self.dir_path) 1937 except IOError: 1938 MG5_version = misc.get_pkg_info() 1939 open(pjoin(self.dir_path, 'MGMEVersion.txt'), 'w').write( \ 1940 "5." + MG5_version['version']) 1941 1942 1943 # Add file in SubProcesses 1944 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f_sp'), 1945 pjoin(self.dir_path, 'SubProcesses', 'makefileP')) 1946 1947 if self.format == 'standalone': 1948 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'check_sa.f'), 1949 pjoin(self.dir_path, 'SubProcesses', 'check_sa.f')) 1950 1951 # Add file in Source 1952 shutil.copy(pjoin(temp_dir, 'Source', 'make_opts'), 1953 pjoin(self.dir_path, 'Source')) 1954 # add the makefile 1955 filename = pjoin(self.dir_path,'Source','makefile') 1956 self.write_source_makefile(writers.FileWriter(filename))
1957 1958 #=========================================================================== 1959 # export model files 1960 #===========================================================================
1961 - def export_model_files(self, model_path):
1962 """export the model dependent files for V4 model""" 1963 1964 super(ProcessExporterFortranSA,self).export_model_files(model_path) 1965 # Add the routine update_as_param in v4 model 1966 # This is a function created in the UFO 1967 text=""" 1968 subroutine update_as_param() 1969 call setpara('param_card.dat',.false.) 1970 return 1971 end 1972 """ 1973 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 1974 ff.write(text) 1975 ff.close() 1976 1977 text = open(pjoin(self.dir_path,'SubProcesses','check_sa.f')).read() 1978 text = text.replace('call setpara(\'param_card.dat\')', 'call setpara(\'param_card.dat\', .true.)') 1979 fsock = open(pjoin(self.dir_path,'SubProcesses','check_sa.f'), 'w') 1980 fsock.write(text) 1981 fsock.close() 1982 1983 self.make_model_symbolic_link()
1984 1985 #=========================================================================== 1986 # write a procdef_mg5 (an equivalent of the MG4 proc_card.dat) 1987 #===========================================================================
1988 - def write_procdef_mg5(self, file_pos, modelname, process_str):
1989 """ write an equivalent of the MG4 proc_card in order that all the Madevent 1990 Perl script of MadEvent4 are still working properly for pure MG5 run. 1991 Not needed for StandAlone so just return 1992 """ 1993 1994 return
1995 1996 1997 #=========================================================================== 1998 # Make the Helas and Model directories for Standalone directory 1999 #===========================================================================
2000 - def make(self):
2001 """Run make in the DHELAS and MODEL directories, to set up 2002 everything for running standalone 2003 """ 2004 2005 source_dir = pjoin(self.dir_path, "Source") 2006 logger.info("Running make for Helas") 2007 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2008 logger.info("Running make for Model") 2009 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran')
2010 2011 #=========================================================================== 2012 # Create proc_card_mg5.dat for Standalone directory 2013 #===========================================================================
2014 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2015 """Finalize Standalone MG4 directory by 2016 generation proc_card_mg5.dat 2017 generate a global makefile 2018 """ 2019 2020 compiler = {'fortran': mg5options['fortran_compiler'], 2021 'cpp': mg5options['cpp_compiler'], 2022 'f2py': mg5options['f2py_compiler']} 2023 2024 self.compiler_choice(compiler) 2025 self.make() 2026 2027 # Write command history as proc_card_mg5 2028 if history and os.path.isdir(pjoin(self.dir_path, 'Cards')): 2029 output_file = pjoin(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2030 history.write(output_file) 2031 2032 ProcessExporterFortran.finalize(self, matrix_elements, 2033 history, mg5options, flaglist) 2034 open(pjoin(self.dir_path,'__init__.py'),'w') 2035 open(pjoin(self.dir_path,'SubProcesses','__init__.py'),'w') 2036 2037 if False:#'mode' in self.opt and self.opt['mode'] == "reweight": 2038 #add the module to hande the NLO weight 2039 files.copytree(pjoin(MG5DIR, 'Template', 'RWGTNLO'), 2040 pjoin(self.dir_path, 'Source')) 2041 files.copytree(pjoin(MG5DIR, 'Template', 'NLO', 'Source', 'PDF'), 2042 pjoin(self.dir_path, 'Source', 'PDF')) 2043 self.write_pdf_opendata() 2044 2045 if self.prefix_info: 2046 self.write_f2py_splitter() 2047 self.write_f2py_makefile() 2048 self.write_f2py_check_sa(matrix_elements, 2049 pjoin(self.dir_path,'SubProcesses','check_sa.py')) 2050 else: 2051 # create a single makefile to compile all the subprocesses 2052 text = '''\n# For python linking (require f2py part of numpy)\nifeq ($(origin MENUM),undefined)\n MENUM=2\nendif\n''' 2053 deppython = '' 2054 for Pdir in os.listdir(pjoin(self.dir_path,'SubProcesses')): 2055 if os.path.isdir(pjoin(self.dir_path, 'SubProcesses', Pdir)): 2056 text += '%(0)s/matrix$(MENUM)py.so:\n\tcd %(0)s;make matrix$(MENUM)py.so\n'% {'0': Pdir} 2057 deppython += ' %(0)s/matrix$(MENUM)py.so ' % {'0': Pdir} 2058 text+='all: %s\n\techo \'done\'' % deppython 2059 2060 ff = open(pjoin(self.dir_path, 'SubProcesses', 'makefile'),'a') 2061 ff.write(text) 2062 ff.close()
2063
2064 - def write_f2py_splitter(self):
2065 """write a function to call the correct matrix element""" 2066 2067 template = """ 2068 %(python_information)s 2069 subroutine smatrixhel(pdgs, npdg, p, ALPHAS, SCALE2, nhel, ANS) 2070 IMPLICIT NONE 2071 2072 CF2PY double precision, intent(in), dimension(0:3,npdg) :: p 2073 CF2PY integer, intent(in), dimension(npdg) :: pdgs 2074 CF2PY integer, intent(in) :: npdg 2075 CF2PY double precision, intent(out) :: ANS 2076 CF2PY double precision, intent(in) :: ALPHAS 2077 CF2PY double precision, intent(in) :: SCALE2 2078 integer pdgs(*) 2079 integer npdg, nhel 2080 double precision p(*) 2081 double precision ANS, ALPHAS, PI,SCALE2 2082 include 'coupl.inc' 2083 2084 PI = 3.141592653589793D0 2085 G = 2* DSQRT(ALPHAS*PI) 2086 CALL UPDATE_AS_PARAM() 2087 if (scale2.ne.0d0) stop 1 2088 2089 %(smatrixhel)s 2090 2091 return 2092 end 2093 2094 SUBROUTINE INITIALISE(PATH) 2095 C ROUTINE FOR F2PY to read the benchmark point. 2096 IMPLICIT NONE 2097 CHARACTER*512 PATH 2098 CF2PY INTENT(IN) :: PATH 2099 CALL SETPARA(PATH) !first call to setup the paramaters 2100 RETURN 2101 END 2102 2103 2104 subroutine CHANGE_PARA(name, value) 2105 implicit none 2106 CF2PY intent(in) :: name 2107 CF2PY intent(in) :: value 2108 2109 character*512 name 2110 double precision value 2111 2112 include '../Source/MODEL/input.inc' 2113 include '../Source/MODEL/coupl.inc' 2114 2115 SELECT CASE (name) 2116 %(parameter_setup)s 2117 CASE DEFAULT 2118 write(*,*) 'no parameter matching', name, value 2119 END SELECT 2120 2121 return 2122 end 2123 2124 subroutine update_all_coup() 2125 implicit none 2126 call coup() 2127 return 2128 end 2129 2130 2131 subroutine get_pdg_order(PDG) 2132 IMPLICIT NONE 2133 CF2PY INTEGER, intent(out) :: PDG(%(nb_me)i,%(maxpart)i) 2134 INTEGER PDG(%(nb_me)i,%(maxpart)i), PDGS(%(nb_me)i,%(maxpart)i) 2135 DATA PDGS/ %(pdgs)s / 2136 PDG = PDGS 2137 RETURN 2138 END 2139 2140 subroutine get_prefix(PREFIX) 2141 IMPLICIT NONE 2142 CF2PY CHARACTER*20, intent(out) :: PREFIX(%(nb_me)i) 2143 character*20 PREFIX(%(nb_me)i),PREF(%(nb_me)i) 2144 DATA PREF / '%(prefix)s'/ 2145 PREFIX = PREF 2146 RETURN 2147 END 2148 2149 2150 """ 2151 2152 allids = self.prefix_info.keys() 2153 allprefix = [self.prefix_info[key][0] for key in allids] 2154 min_nexternal = min([len(ids) for ids in allids]) 2155 max_nexternal = max([len(ids) for ids in allids]) 2156 2157 info = [] 2158 for key, (prefix, tag) in self.prefix_info.items(): 2159 info.append('#PY %s : %s # %s' % (tag, key, prefix)) 2160 2161 2162 text = [] 2163 for n_ext in range(min_nexternal, max_nexternal+1): 2164 current = [ids for ids in allids if len(ids)==n_ext] 2165 if not current: 2166 continue 2167 if min_nexternal != max_nexternal: 2168 if n_ext == min_nexternal: 2169 text.append(' if (npdg.eq.%i)then' % n_ext) 2170 else: 2171 text.append(' else if (npdg.eq.%i)then' % n_ext) 2172 for ii,pdgs in enumerate(current): 2173 condition = '.and.'.join(['%i.eq.pdgs(%i)' %(pdg, i+1) for i, pdg in enumerate(pdgs)]) 2174 if ii==0: 2175 text.append( ' if(%s) then ! %i' % (condition, i)) 2176 else: 2177 text.append( ' else if(%s) then ! %i' % (condition,i)) 2178 text.append(' call %ssmatrixhel(p, nhel, ans)' % self.prefix_info[pdgs][0]) 2179 text.append(' endif') 2180 #close the function 2181 if min_nexternal != max_nexternal: 2182 text.append('endif') 2183 2184 params = self.get_model_parameter(self.model) 2185 parameter_setup =[] 2186 for key, var in params.items(): 2187 parameter_setup.append(' CASE ("%s")\n %s = value' 2188 % (key, var)) 2189 2190 formatting = {'python_information':'\n'.join(info), 2191 'smatrixhel': '\n'.join(text), 2192 'maxpart': max_nexternal, 2193 'nb_me': len(allids), 2194 'pdgs': ','.join(str(pdg[i]) if i<len(pdg) else '0' 2195 for i in range(max_nexternal) for pdg in allids), 2196 'prefix':'\',\''.join(allprefix), 2197 'parameter_setup': '\n'.join(parameter_setup), 2198 } 2199 formatting['lenprefix'] = len(formatting['prefix']) 2200 text = template % formatting 2201 fsock = writers.FortranWriter(pjoin(self.dir_path, 'SubProcesses', 'all_matrix.f'),'w') 2202 fsock.writelines(text) 2203 fsock.close()
2204
2205 - def get_model_parameter(self, model):
2206 """ returns all the model parameter 2207 """ 2208 params = {} 2209 for p in model.get('parameters')[('external',)]: 2210 name = p.name 2211 nopref = name[4:] if name.startswith('mdl_') else name 2212 params[nopref] = name 2213 2214 block = p.lhablock 2215 lha = '_'.join([str(i) for i in p.lhacode]) 2216 params['%s_%s' % (block.upper(), lha)] = name 2217 2218 return params
2219 2220 2221 2222 2223
2224 - def write_f2py_check_sa(self, matrix_element, writer):
2225 """ Write the general check_sa.py in SubProcesses that calls all processes successively.""" 2226 # To be implemented. It is just an example file, i.e. not crucial. 2227 return
2228
2229 - def write_f2py_makefile(self):
2230 """ """ 2231 # Add file in SubProcesses 2232 shutil.copy(pjoin(self.mgme_dir, 'madgraph', 'iolibs', 'template_files', 'makefile_sa_f2py'), 2233 pjoin(self.dir_path, 'SubProcesses', 'makefile'))
2234
2235 - def create_MA5_cards(self,*args,**opts):
2236 """ Overload the function of the mother so as to bypass this in StandAlone.""" 2237 pass
2238
2239 - def compiler_choice(self, compiler):
2240 """ Different daughter classes might want different compilers. 2241 So this function is meant to be overloaded if desired.""" 2242 2243 self.set_compiler(compiler)
2244 2245 #=========================================================================== 2246 # generate_subprocess_directory 2247 #===========================================================================
2248 - def generate_subprocess_directory(self, matrix_element, 2249 fortran_model, number):
2250 """Generate the Pxxxxx directory for a subprocess in MG4 standalone, 2251 including the necessary matrix.f and nexternal.inc files""" 2252 2253 cwd = os.getcwd() 2254 # Create the directory PN_xx_xxxxx in the specified path 2255 dirpath = pjoin(self.dir_path, 'SubProcesses', \ 2256 "P%s" % matrix_element.get('processes')[0].shell_string()) 2257 2258 if self.opt['sa_symmetry']: 2259 # avoid symmetric output 2260 for i,proc in enumerate(matrix_element.get('processes')): 2261 2262 tag = proc.get_tag() 2263 legs = proc.get('legs')[:] 2264 leg0 = proc.get('legs')[0] 2265 leg1 = proc.get('legs')[1] 2266 if not leg1.get('state'): 2267 proc.get('legs')[0] = leg1 2268 proc.get('legs')[1] = leg0 2269 flegs = proc.get('legs')[2:] 2270 for perm in itertools.permutations(flegs): 2271 for i,p in enumerate(perm): 2272 proc.get('legs')[i+2] = p 2273 dirpath2 = pjoin(self.dir_path, 'SubProcesses', \ 2274 "P%s" % proc.shell_string()) 2275 #restore original order 2276 proc.get('legs')[2:] = legs[2:] 2277 if os.path.exists(dirpath2): 2278 proc.get('legs')[:] = legs 2279 return 0 2280 proc.get('legs')[:] = legs 2281 2282 try: 2283 os.mkdir(dirpath) 2284 except os.error as error: 2285 logger.warning(error.strerror + " " + dirpath) 2286 2287 #try: 2288 # os.chdir(dirpath) 2289 #except os.error: 2290 # logger.error('Could not cd to directory %s' % dirpath) 2291 # return 0 2292 2293 logger.info('Creating files in directory %s' % dirpath) 2294 2295 # Extract number of external particles 2296 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2297 2298 # Create the matrix.f file and the nexternal.inc file 2299 if self.opt['export_format']=='standalone_msP': 2300 filename = pjoin(dirpath, 'matrix_prod.f') 2301 else: 2302 filename = pjoin(dirpath, 'matrix.f') 2303 2304 proc_prefix = '' 2305 if 'prefix' in self.cmd_options: 2306 if self.cmd_options['prefix'] == 'int': 2307 proc_prefix = 'M%s_' % number 2308 elif self.cmd_options['prefix'] == 'proc': 2309 proc_prefix = matrix_element.get('processes')[0].shell_string().split('_',1)[1] 2310 else: 2311 raise Exception, '--prefix options supports only \'int\' and \'proc\'' 2312 for proc in matrix_element.get('processes'): 2313 ids = [l.get('id') for l in proc.get('legs_with_decays')] 2314 self.prefix_info[tuple(ids)] = [proc_prefix, proc.get_tag()] 2315 2316 calls = self.write_matrix_element_v4( 2317 writers.FortranWriter(filename), 2318 matrix_element, 2319 fortran_model, 2320 proc_prefix=proc_prefix) 2321 2322 if self.opt['export_format'] == 'standalone_msP': 2323 filename = pjoin(dirpath,'configs_production.inc') 2324 mapconfigs, s_and_t_channels = self.write_configs_file(\ 2325 writers.FortranWriter(filename), 2326 matrix_element) 2327 2328 filename = pjoin(dirpath,'props_production.inc') 2329 self.write_props_file(writers.FortranWriter(filename), 2330 matrix_element, 2331 s_and_t_channels) 2332 2333 filename = pjoin(dirpath,'nexternal_prod.inc') 2334 self.write_nexternal_madspin(writers.FortranWriter(filename), 2335 nexternal, ninitial) 2336 2337 if self.opt['export_format']=='standalone_msF': 2338 filename = pjoin(dirpath, 'helamp.inc') 2339 ncomb=matrix_element.get_helicity_combinations() 2340 self.write_helamp_madspin(writers.FortranWriter(filename), 2341 ncomb) 2342 2343 filename = pjoin(dirpath, 'nexternal.inc') 2344 self.write_nexternal_file(writers.FortranWriter(filename), 2345 nexternal, ninitial) 2346 2347 filename = pjoin(dirpath, 'pmass.inc') 2348 self.write_pmass_file(writers.FortranWriter(filename), 2349 matrix_element) 2350 2351 filename = pjoin(dirpath, 'ngraphs.inc') 2352 self.write_ngraphs_file(writers.FortranWriter(filename), 2353 len(matrix_element.get_all_amplitudes())) 2354 2355 # Generate diagrams 2356 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 2357 filename = pjoin(dirpath, "matrix.ps") 2358 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 2359 get('diagrams'), 2360 filename, 2361 model=matrix_element.get('processes')[0].\ 2362 get('model'), 2363 amplitude=True) 2364 logger.info("Generating Feynman diagrams for " + \ 2365 matrix_element.get('processes')[0].nice_string()) 2366 plot.draw() 2367 2368 linkfiles = ['check_sa.f', 'coupl.inc'] 2369 2370 if proc_prefix and os.path.exists(pjoin(dirpath, '..', 'check_sa.f')): 2371 text = open(pjoin(dirpath, '..', 'check_sa.f')).read() 2372 pat = re.compile('smatrix', re.I) 2373 new_text, n = re.subn(pat, '%ssmatrix' % proc_prefix, text) 2374 with open(pjoin(dirpath, 'check_sa.f'),'w') as f: 2375 f.write(new_text) 2376 linkfiles.pop(0) 2377 2378 for file in linkfiles: 2379 ln('../%s' % file, cwd=dirpath) 2380 ln('../makefileP', name='makefile', cwd=dirpath) 2381 # Return to original PWD 2382 #os.chdir(cwd) 2383 2384 if not calls: 2385 calls = 0 2386 return calls
2387 2388 2389 #=========================================================================== 2390 # write_source_makefile 2391 #===========================================================================
2392 - def write_source_makefile(self, writer):
2393 """Write the nexternal.inc file for MG4""" 2394 2395 path = pjoin(_file_path,'iolibs','template_files','madevent_makefile_source') 2396 set_of_lib = '$(LIBDIR)libdhelas.$(libext) $(LIBDIR)libmodel.$(libext)' 2397 model_line='''$(LIBDIR)libmodel.$(libext): MODEL\n\t cd MODEL; make\n''' 2398 2399 replace_dict= {'libraries': set_of_lib, 2400 'model':model_line, 2401 'additional_dsample': '', 2402 'additional_dependencies':''} 2403 2404 text = open(path).read() % replace_dict 2405 2406 if writer: 2407 writer.write(text) 2408 2409 return replace_dict
2410 2411 #=========================================================================== 2412 # write_matrix_element_v4 2413 #===========================================================================
2414 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 2415 write=True, proc_prefix=''):
2416 """Export a matrix element to a matrix.f file in MG4 standalone format 2417 if write is on False, just return the replace_dict and not write anything.""" 2418 2419 2420 if not matrix_element.get('processes') or \ 2421 not matrix_element.get('diagrams'): 2422 return 0 2423 2424 if writer: 2425 if not isinstance(writer, writers.FortranWriter): 2426 raise writers.FortranWriter.FortranWriterError(\ 2427 "writer not FortranWriter but %s" % type(writer)) 2428 # Set lowercase/uppercase Fortran code 2429 writers.FortranWriter.downcase = False 2430 2431 2432 if not self.opt.has_key('sa_symmetry'): 2433 self.opt['sa_symmetry']=False 2434 2435 2436 # The proc_id is for MadEvent grouping which is never used in SA. 2437 replace_dict = {'global_variable':'', 'amp2_lines':'', 2438 'proc_prefix':proc_prefix, 'proc_id':''} 2439 2440 # Extract helas calls 2441 helas_calls = fortran_model.get_matrix_element_calls(\ 2442 matrix_element) 2443 2444 replace_dict['helas_calls'] = "\n".join(helas_calls) 2445 2446 # Extract version number and date from VERSION file 2447 info_lines = self.get_mg5_info_lines() 2448 replace_dict['info_lines'] = info_lines 2449 2450 # Extract process info lines 2451 process_lines = self.get_process_info_lines(matrix_element) 2452 replace_dict['process_lines'] = process_lines 2453 2454 # Extract number of external particles 2455 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 2456 replace_dict['nexternal'] = nexternal 2457 replace_dict['nincoming'] = ninitial 2458 2459 # Extract ncomb 2460 ncomb = matrix_element.get_helicity_combinations() 2461 replace_dict['ncomb'] = ncomb 2462 2463 # Extract helicity lines 2464 helicity_lines = self.get_helicity_lines(matrix_element) 2465 replace_dict['helicity_lines'] = helicity_lines 2466 2467 # Extract overall denominator 2468 # Averaging initial state color, spin, and identical FS particles 2469 replace_dict['den_factor_line'] = self.get_den_factor_line(matrix_element) 2470 2471 # Extract ngraphs 2472 ngraphs = matrix_element.get_number_of_amplitudes() 2473 replace_dict['ngraphs'] = ngraphs 2474 2475 # Extract nwavefuncs 2476 nwavefuncs = matrix_element.get_number_of_wavefunctions() 2477 replace_dict['nwavefuncs'] = nwavefuncs 2478 2479 # Extract ncolor 2480 ncolor = max(1, len(matrix_element.get('color_basis'))) 2481 replace_dict['ncolor'] = ncolor 2482 2483 replace_dict['hel_avg_factor'] = matrix_element.get_hel_avg_factor() 2484 replace_dict['beamone_helavgfactor'], replace_dict['beamtwo_helavgfactor'] =\ 2485 matrix_element.get_beams_hel_avg_factor() 2486 2487 # Extract color data lines 2488 color_data_lines = self.get_color_data_lines(matrix_element) 2489 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 2490 2491 if self.opt['export_format']=='standalone_msP': 2492 # For MadSpin need to return the AMP2 2493 amp2_lines = self.get_amp2_lines(matrix_element, [] ) 2494 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 2495 replace_dict['global_variable'] = \ 2496 " Double Precision amp2(NGRAPHS)\n common/to_amps/ amp2\n" 2497 2498 # JAMP definition, depends on the number of independent split orders 2499 split_orders=matrix_element.get('processes')[0].get('split_orders') 2500 2501 if len(split_orders)==0: 2502 replace_dict['nSplitOrders']='' 2503 # Extract JAMP lines 2504 jamp_lines = self.get_JAMP_lines(matrix_element) 2505 # Consider the output of a dummy order 'ALL_ORDERS' for which we 2506 # set all amplitude order to weight 1 and only one squared order 2507 # contribution which is of course ALL_ORDERS=2. 2508 squared_orders = [(2,),] 2509 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 2510 replace_dict['chosen_so_configs'] = '.TRUE.' 2511 replace_dict['nSqAmpSplitOrders']=1 2512 replace_dict['split_order_str_list']='' 2513 else: 2514 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 2515 replace_dict['nAmpSplitOrders']=len(amp_orders) 2516 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 2517 replace_dict['nSplitOrders']=len(split_orders) 2518 replace_dict['split_order_str_list']=str(split_orders) 2519 amp_so = self.get_split_orders_lines( 2520 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 2521 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 2522 replace_dict['ampsplitorders']='\n'.join(amp_so) 2523 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 2524 jamp_lines = self.get_JAMP_lines_split_order(\ 2525 matrix_element,amp_orders,split_order_names=split_orders) 2526 2527 # Now setup the array specifying what squared split order is chosen 2528 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 2529 matrix_element.get('processes')[0],squared_orders) 2530 2531 # For convenience we also write the driver check_sa_splitOrders.f 2532 # that explicitely writes out the contribution from each squared order. 2533 # The original driver still works and is compiled with 'make' while 2534 # the splitOrders one is compiled with 'make check_sa_born_splitOrders' 2535 check_sa_writer=writers.FortranWriter('check_sa_born_splitOrders.f') 2536 self.write_check_sa_splitOrders(squared_orders,split_orders, 2537 nexternal,ninitial,proc_prefix,check_sa_writer) 2538 2539 if write: 2540 writers.FortranWriter('nsqso_born.inc').writelines( 2541 """INTEGER NSQSO_BORN 2542 PARAMETER (NSQSO_BORN=%d)"""%replace_dict['nSqAmpSplitOrders']) 2543 2544 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 2545 2546 matrix_template = self.matrix_template 2547 if self.opt['export_format']=='standalone_msP' : 2548 matrix_template = 'matrix_standalone_msP_v4.inc' 2549 elif self.opt['export_format']=='standalone_msF': 2550 matrix_template = 'matrix_standalone_msF_v4.inc' 2551 elif self.opt['export_format']=='matchbox': 2552 replace_dict["proc_prefix"] = 'MG5_%i_' % matrix_element.get('processes')[0].get('id') 2553 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2554 2555 if len(split_orders)>0: 2556 if self.opt['export_format'] in ['standalone_msP', 'standalone_msF']: 2557 logger.debug("Warning: The export format %s is not "+\ 2558 " available for individual ME evaluation of given coupl. orders."+\ 2559 " Only the total ME will be computed.", self.opt['export_format']) 2560 elif self.opt['export_format'] in ['madloop_matchbox']: 2561 replace_dict["color_information"] = self.get_color_string_lines(matrix_element) 2562 matrix_template = "matrix_standalone_matchbox_splitOrders_v4.inc" 2563 else: 2564 matrix_template = "matrix_standalone_splitOrders_v4.inc" 2565 2566 replace_dict['template_file'] = pjoin(_file_path, 'iolibs', 'template_files', matrix_template) 2567 replace_dict['template_file2'] = pjoin(_file_path, \ 2568 'iolibs/template_files/split_orders_helping_functions.inc') 2569 if write and writer: 2570 path = replace_dict['template_file'] 2571 content = open(path).read() 2572 content = content % replace_dict 2573 # Write the file 2574 writer.writelines(content) 2575 # Add the helper functions. 2576 if len(split_orders)>0: 2577 content = '\n' + open(replace_dict['template_file2'])\ 2578 .read()%replace_dict 2579 writer.writelines(content) 2580 return len(filter(lambda call: call.find('#') != 0, helas_calls)) 2581 else: 2582 replace_dict['return_value'] = len(filter(lambda call: call.find('#') != 0, helas_calls)) 2583 return replace_dict # for subclass update
2584
2585 - def write_check_sa_splitOrders(self,squared_orders, split_orders, nexternal, 2586 nincoming, proc_prefix, writer):
2587 """ Write out a more advanced version of the check_sa drivers that 2588 individually returns the matrix element for each contributing squared 2589 order.""" 2590 2591 check_sa_content = open(pjoin(self.mgme_dir, 'madgraph', 'iolibs', \ 2592 'template_files', 'check_sa_splitOrders.f')).read() 2593 printout_sq_orders=[] 2594 for i, squared_order in enumerate(squared_orders): 2595 sq_orders=[] 2596 for j, sqo in enumerate(squared_order): 2597 sq_orders.append('%s=%d'%(split_orders[j],sqo)) 2598 printout_sq_orders.append(\ 2599 "write(*,*) '%d) Matrix element for (%s) = ',MATELEMS(%d)"\ 2600 %(i+1,' '.join(sq_orders),i+1)) 2601 printout_sq_orders='\n'.join(printout_sq_orders) 2602 replace_dict = {'printout_sqorders':printout_sq_orders, 2603 'nSplitOrders':len(squared_orders), 2604 'nexternal':nexternal, 2605 'nincoming':nincoming, 2606 'proc_prefix':proc_prefix} 2607 2608 if writer: 2609 writer.writelines(check_sa_content % replace_dict) 2610 else: 2611 return replace_dict
2612
2613 -class ProcessExporterFortranMatchBox(ProcessExporterFortranSA):
2614 """class to take care of exporting a set of matrix element for the Matchbox 2615 code in the case of Born only routine""" 2616 2617 default_opt = {'clean': False, 'complex_mass':False, 2618 'export_format':'matchbox', 'mp': False, 2619 'sa_symmetry': True} 2620 2621 #specific template of the born 2622 2623 2624 matrix_template = "matrix_standalone_matchbox.inc" 2625 2626 @staticmethod
2627 - def get_color_string_lines(matrix_element):
2628 """Return the color matrix definition lines for this matrix element. Split 2629 rows in chunks of size n.""" 2630 2631 if not matrix_element.get('color_matrix'): 2632 return "\n".join(["out = 1"]) 2633 2634 #start the real work 2635 color_denominators = matrix_element.get('color_matrix').\ 2636 get_line_denominators() 2637 matrix_strings = [] 2638 my_cs = color.ColorString() 2639 for i_color in xrange(len(color_denominators)): 2640 # Then write the numerators for the matrix elements 2641 my_cs.from_immutable(sorted(matrix_element.get('color_basis').keys())[i_color]) 2642 t_str=repr(my_cs) 2643 t_match=re.compile(r"(\w+)\(([\s\d+\,]*)\)") 2644 # from '1 T(2,4,1) Tr(4,5,6) Epsilon(5,3,2,1) T(1,2)' returns with findall: 2645 # [('T', '2,4,1'), ('Tr', '4,5,6'), ('Epsilon', '5,3,2,1'), ('T', '1,2')] 2646 all_matches = t_match.findall(t_str) 2647 output = {} 2648 arg=[] 2649 for match in all_matches: 2650 ctype, tmparg = match[0], [m.strip() for m in match[1].split(',')] 2651 if ctype in ['ColorOne' ]: 2652 continue 2653 if ctype not in ['T', 'Tr' ]: 2654 raise MadGraph5Error, 'Color Structure not handled by Matchbox: %s' % ctype 2655 tmparg += ['0'] 2656 arg +=tmparg 2657 for j, v in enumerate(arg): 2658 output[(i_color,j)] = v 2659 2660 for key in output: 2661 if matrix_strings == []: 2662 #first entry 2663 matrix_strings.append(""" 2664 if (in1.eq.%s.and.in2.eq.%s)then 2665 out = %s 2666 """ % (key[0], key[1], output[key])) 2667 else: 2668 #not first entry 2669 matrix_strings.append(""" 2670 elseif (in1.eq.%s.and.in2.eq.%s)then 2671 out = %s 2672 """ % (key[0], key[1], output[key])) 2673 if len(matrix_strings): 2674 matrix_strings.append(" else \n out = - 1 \n endif") 2675 else: 2676 return "\n out = - 1 \n " 2677 return "\n".join(matrix_strings)
2678
2679 - def make(self,*args,**opts):
2680 pass
2681
2682 - def get_JAMP_lines(self, col_amps, JAMP_format="JAMP(%s)", AMP_format="AMP(%s)", split=-1, 2683 JAMP_formatLC=None):
2684 2685 """Adding leading color part of the colorflow""" 2686 2687 if not JAMP_formatLC: 2688 JAMP_formatLC= "LN%s" % JAMP_format 2689 2690 error_msg="Malformed '%s' argument passed to the get_JAMP_lines" 2691 if(isinstance(col_amps,helas_objects.HelasMatrixElement)): 2692 col_amps=col_amps.get_color_amplitudes() 2693 elif(isinstance(col_amps,list)): 2694 if(col_amps and isinstance(col_amps[0],list)): 2695 col_amps=col_amps 2696 else: 2697 raise MadGraph5Error, error_msg % 'col_amps' 2698 else: 2699 raise MadGraph5Error, error_msg % 'col_amps' 2700 2701 text = super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(col_amps, 2702 JAMP_format=JAMP_format, 2703 AMP_format=AMP_format, 2704 split=-1) 2705 2706 2707 # Filter the col_ampls to generate only those without any 1/NC terms 2708 2709 LC_col_amps = [] 2710 for coeff_list in col_amps: 2711 to_add = [] 2712 for (coefficient, amp_number) in coeff_list: 2713 if coefficient[3]==0: 2714 to_add.append( (coefficient, amp_number) ) 2715 LC_col_amps.append(to_add) 2716 2717 text += super(ProcessExporterFortranMatchBox, self).get_JAMP_lines(LC_col_amps, 2718 JAMP_format=JAMP_formatLC, 2719 AMP_format=AMP_format, 2720 split=-1) 2721 2722 return text
2723
2724 2725 2726 2727 #=============================================================================== 2728 # ProcessExporterFortranMW 2729 #=============================================================================== 2730 -class ProcessExporterFortranMW(ProcessExporterFortran):
2731 """Class to take care of exporting a set of matrix elements to 2732 MadGraph v4 - MadWeight format.""" 2733 2734 matrix_file="matrix_standalone_v4.inc" 2735
2736 - def copy_template(self, model):
2737 """Additional actions needed for setup of Template 2738 """ 2739 2740 super(ProcessExporterFortranMW, self).copy_template(model) 2741 2742 # Add the MW specific file 2743 shutil.copytree(pjoin(MG5DIR,'Template','MadWeight'), 2744 pjoin(self.dir_path, 'Source','MadWeight'), True) 2745 shutil.copytree(pjoin(MG5DIR,'madgraph','madweight'), 2746 pjoin(self.dir_path, 'bin','internal','madweight'), True) 2747 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','setrun.f'), 2748 pjoin(self.dir_path, 'Source','setrun.f')) 2749 files.mv(pjoin(self.dir_path, 'Source','MadWeight','src','run.inc'), 2750 pjoin(self.dir_path, 'Source','run.inc')) 2751 # File created from Template (Different in some child class) 2752 filename = os.path.join(self.dir_path,'Source','run_config.inc') 2753 self.write_run_config_file(writers.FortranWriter(filename)) 2754 2755 try: 2756 subprocess.call([os.path.join(self.dir_path, 'Source','MadWeight','bin','internal','pass_to_madweight')], 2757 stdout = os.open(os.devnull, os.O_RDWR), 2758 stderr = os.open(os.devnull, os.O_RDWR), 2759 cwd=self.dir_path) 2760 except OSError: 2761 # Probably madweight already called 2762 pass 2763 2764 # Copy the different python file in the Template 2765 self.copy_python_file() 2766 # create the appropriate cuts.f 2767 self.get_mw_cuts_version() 2768 2769 # add the makefile in Source directory 2770 filename = os.path.join(self.dir_path,'Source','makefile') 2771 self.write_source_makefile(writers.FortranWriter(filename))
2772 2773 2774 2775 2776 #=========================================================================== 2777 # convert_model 2778 #===========================================================================
2779 - def convert_model(self, model, wanted_lorentz = [], 2780 wanted_couplings = []):
2781 2782 super(ProcessExporterFortranMW,self).convert_model(model, 2783 wanted_lorentz, wanted_couplings) 2784 2785 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 2786 try: 2787 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 2788 except OSError as error: 2789 pass 2790 model_path = model.get('modelpath') 2791 # This is not safe if there is a '##' or '-' in the path. 2792 shutil.copytree(model_path, 2793 pjoin(self.dir_path,'bin','internal','ufomodel'), 2794 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 2795 if hasattr(model, 'restrict_card'): 2796 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 2797 'restrict_default.dat') 2798 if isinstance(model.restrict_card, check_param_card.ParamCard): 2799 model.restrict_card.write(out_path) 2800 else: 2801 files.cp(model.restrict_card, out_path)
2802 2803 #=========================================================================== 2804 # generate_subprocess_directory 2805 #===========================================================================
2806 - def copy_python_file(self):
2807 """copy the python file require for the Template""" 2808 2809 # madevent interface 2810 cp(_file_path+'/interface/madweight_interface.py', 2811 self.dir_path+'/bin/internal/madweight_interface.py') 2812 cp(_file_path+'/interface/extended_cmd.py', 2813 self.dir_path+'/bin/internal/extended_cmd.py') 2814 cp(_file_path+'/interface/common_run_interface.py', 2815 self.dir_path+'/bin/internal/common_run_interface.py') 2816 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 2817 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 2818 cp(_file_path+'/iolibs/save_load_object.py', 2819 self.dir_path+'/bin/internal/save_load_object.py') 2820 cp(_file_path+'/madevent/gen_crossxhtml.py', 2821 self.dir_path+'/bin/internal/gen_crossxhtml.py') 2822 cp(_file_path+'/madevent/sum_html.py', 2823 self.dir_path+'/bin/internal/sum_html.py') 2824 cp(_file_path+'/various/FO_analyse_card.py', 2825 self.dir_path+'/bin/internal/FO_analyse_card.py') 2826 cp(_file_path+'/iolibs/file_writers.py', 2827 self.dir_path+'/bin/internal/file_writers.py') 2828 #model file 2829 cp(_file_path+'../models/check_param_card.py', 2830 self.dir_path+'/bin/internal/check_param_card.py') 2831 2832 #madevent file 2833 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 2834 cp(_file_path+'/various/lhe_parser.py', 2835 self.dir_path+'/bin/internal/lhe_parser.py') 2836 2837 cp(_file_path+'/various/banner.py', 2838 self.dir_path+'/bin/internal/banner.py') 2839 cp(_file_path+'/various/shower_card.py', 2840 self.dir_path+'/bin/internal/shower_card.py') 2841 cp(_file_path+'/various/cluster.py', 2842 self.dir_path+'/bin/internal/cluster.py') 2843 2844 # logging configuration 2845 cp(_file_path+'/interface/.mg5_logging.conf', 2846 self.dir_path+'/bin/internal/me5_logging.conf') 2847 cp(_file_path+'/interface/coloring_logging.py', 2848 self.dir_path+'/bin/internal/coloring_logging.py')
2849 2850 2851 #=========================================================================== 2852 # Change the version of cuts.f to the one compatible with MW 2853 #===========================================================================
2854 - def get_mw_cuts_version(self, outpath=None):
2855 """create the appropriate cuts.f 2856 This is based on the one associated to ME output but: 2857 1) No clustering (=> remove initcluster/setclscales) 2858 2) Adding the definition of cut_bw at the file. 2859 """ 2860 2861 template = open(pjoin(MG5DIR,'Template','LO','SubProcesses','cuts.f')) 2862 2863 text = StringIO() 2864 #1) remove all dependencies in ickkw >1: 2865 nb_if = 0 2866 for line in template: 2867 if 'if(xqcut.gt.0d0' in line: 2868 nb_if = 1 2869 if nb_if == 0: 2870 text.write(line) 2871 continue 2872 if re.search(r'if\(.*\)\s*then', line): 2873 nb_if += 1 2874 elif 'endif' in line: 2875 nb_if -= 1 2876 2877 #2) add fake cut_bw (have to put the true one later) 2878 text.write(""" 2879 logical function cut_bw(p) 2880 include 'madweight_param.inc' 2881 double precision p(*) 2882 if (bw_cut) then 2883 cut_bw = .true. 2884 else 2885 stop 1 2886 endif 2887 return 2888 end 2889 """) 2890 2891 final = text.getvalue() 2892 #3) remove the call to initcluster: 2893 template = final.replace('call initcluster', '! Remove for MW!call initcluster') 2894 template = template.replace('genps.inc', 'maxparticles.inc') 2895 #Now we can write it 2896 if not outpath: 2897 fsock = open(pjoin(self.dir_path, 'SubProcesses', 'cuts.f'), 'w') 2898 elif isinstance(outpath, str): 2899 fsock = open(outpath, 'w') 2900 else: 2901 fsock = outpath 2902 fsock.write(template)
2903 2904 2905 2906 #=========================================================================== 2907 # Make the Helas and Model directories for Standalone directory 2908 #===========================================================================
2909 - def make(self):
2910 """Run make in the DHELAS, MODEL, PDF and CERNLIB directories, to set up 2911 everything for running madweight 2912 """ 2913 2914 source_dir = os.path.join(self.dir_path, "Source") 2915 logger.info("Running make for Helas") 2916 misc.compile(arg=['../lib/libdhelas.a'], cwd=source_dir, mode='fortran') 2917 logger.info("Running make for Model") 2918 misc.compile(arg=['../lib/libmodel.a'], cwd=source_dir, mode='fortran') 2919 logger.info("Running make for PDF") 2920 misc.compile(arg=['../lib/libpdf.a'], cwd=source_dir, mode='fortran') 2921 logger.info("Running make for CERNLIB") 2922 misc.compile(arg=['../lib/libcernlib.a'], cwd=source_dir, mode='fortran') 2923 logger.info("Running make for GENERIC") 2924 misc.compile(arg=['../lib/libgeneric.a'], cwd=source_dir, mode='fortran') 2925 logger.info("Running make for blocks") 2926 misc.compile(arg=['../lib/libblocks.a'], cwd=source_dir, mode='fortran') 2927 logger.info("Running make for tools") 2928 misc.compile(arg=['../lib/libtools.a'], cwd=source_dir, mode='fortran')
2929 2930 #=========================================================================== 2931 # Create proc_card_mg5.dat for MadWeight directory 2932 #===========================================================================
2933 - def finalize(self, matrix_elements, history, mg5options, flaglist):
2934 """Finalize Standalone MG4 directory by generation proc_card_mg5.dat""" 2935 2936 compiler = {'fortran': mg5options['fortran_compiler'], 2937 'cpp': mg5options['cpp_compiler'], 2938 'f2py': mg5options['f2py_compiler']} 2939 2940 2941 2942 #proc_charac 2943 self.create_proc_charac() 2944 2945 # Write maxparticles.inc based on max of ME's/subprocess groups 2946 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 2947 self.write_maxparticles_file(writers.FortranWriter(filename), 2948 matrix_elements) 2949 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2950 pjoin(self.dir_path, 'Source','MadWeight','blocks')) 2951 ln(pjoin(self.dir_path, 'Source', 'maxparticles.inc'), 2952 pjoin(self.dir_path, 'Source','MadWeight','tools')) 2953 2954 self.set_compiler(compiler) 2955 self.make() 2956 2957 # Write command history as proc_card_mg5 2958 if os.path.isdir(os.path.join(self.dir_path, 'Cards')): 2959 output_file = os.path.join(self.dir_path, 'Cards', 'proc_card_mg5.dat') 2960 history.write(output_file) 2961 2962 ProcessExporterFortran.finalize(self, matrix_elements, 2963 history, mg5options, flaglist)
2964 2965 2966 2967 #=========================================================================== 2968 # create the run_card for MW 2969 #===========================================================================
2970 - def create_run_card(self, matrix_elements, history):
2971 """ """ 2972 2973 run_card = banner_mod.RunCard() 2974 2975 # pass to default for MW 2976 run_card["run_tag"] = "\'not_use\'" 2977 run_card["fixed_ren_scale"] = "T" 2978 run_card["fixed_fac_scale"] = "T" 2979 run_card.remove_all_cut() 2980 2981 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card_default.dat'), 2982 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2983 python_template=True) 2984 run_card.write(pjoin(self.dir_path, 'Cards', 'run_card.dat'), 2985 template=pjoin(MG5DIR, 'Template', 'MadWeight', 'Cards', 'run_card.dat'), 2986 python_template=True)
2987 2988 #=========================================================================== 2989 # export model files 2990 #===========================================================================
2991 - def export_model_files(self, model_path):
2992 """export the model dependent files for V4 model""" 2993 2994 super(ProcessExporterFortranMW,self).export_model_files(model_path) 2995 # Add the routine update_as_param in v4 model 2996 # This is a function created in the UFO 2997 text=""" 2998 subroutine update_as_param() 2999 call setpara('param_card.dat',.false.) 3000 return 3001 end 3002 """ 3003 ff = open(os.path.join(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3004 ff.write(text) 3005 ff.close() 3006 3007 # Modify setrun.f 3008 text = open(os.path.join(self.dir_path,'Source','setrun.f')).read() 3009 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3010 fsock = open(os.path.join(self.dir_path,'Source','setrun.f'), 'w') 3011 fsock.write(text) 3012 fsock.close() 3013 3014 # Modify initialization.f 3015 text = open(os.path.join(self.dir_path,'SubProcesses','initialization.f')).read() 3016 text = text.replace('call setpara(param_name)', 'call setpara(param_name, .true.)') 3017 fsock = open(os.path.join(self.dir_path,'SubProcesses','initialization.f'), 'w') 3018 fsock.write(text) 3019 fsock.close() 3020 3021 3022 self.make_model_symbolic_link()
3023 3024 #=========================================================================== 3025 # generate_subprocess_directory 3026 #===========================================================================
3027 - def generate_subprocess_directory(self, matrix_element, 3028 fortran_model,number):
3029 """Generate the Pxxxxx directory for a subprocess in MG4 MadWeight format, 3030 including the necessary matrix.f and nexternal.inc files""" 3031 3032 cwd = os.getcwd() 3033 # Create the directory PN_xx_xxxxx in the specified path 3034 dirpath = os.path.join(self.dir_path, 'SubProcesses', \ 3035 "P%s" % matrix_element.get('processes')[0].shell_string()) 3036 3037 try: 3038 os.mkdir(dirpath) 3039 except os.error as error: 3040 logger.warning(error.strerror + " " + dirpath) 3041 3042 #try: 3043 # os.chdir(dirpath) 3044 #except os.error: 3045 # logger.error('Could not cd to directory %s' % dirpath) 3046 # return 0 3047 3048 logger.info('Creating files in directory %s' % dirpath) 3049 3050 # Extract number of external particles 3051 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3052 3053 # Create the matrix.f file and the nexternal.inc file 3054 filename = pjoin(dirpath,'matrix.f') 3055 calls,ncolor = self.write_matrix_element_v4( 3056 writers.FortranWriter(filename), 3057 matrix_element, 3058 fortran_model) 3059 3060 filename = pjoin(dirpath, 'auto_dsig.f') 3061 self.write_auto_dsig_file(writers.FortranWriter(filename), 3062 matrix_element) 3063 3064 filename = pjoin(dirpath, 'configs.inc') 3065 mapconfigs, s_and_t_channels = self.write_configs_file(\ 3066 writers.FortranWriter(filename), 3067 matrix_element) 3068 3069 filename = pjoin(dirpath, 'nexternal.inc') 3070 self.write_nexternal_file(writers.FortranWriter(filename), 3071 nexternal, ninitial) 3072 3073 filename = pjoin(dirpath, 'leshouche.inc') 3074 self.write_leshouche_file(writers.FortranWriter(filename), 3075 matrix_element) 3076 3077 filename = pjoin(dirpath, 'props.inc') 3078 self.write_props_file(writers.FortranWriter(filename), 3079 matrix_element, 3080 s_and_t_channels) 3081 3082 filename = pjoin(dirpath, 'pmass.inc') 3083 self.write_pmass_file(writers.FortranWriter(filename), 3084 matrix_element) 3085 3086 filename = pjoin(dirpath, 'ngraphs.inc') 3087 self.write_ngraphs_file(writers.FortranWriter(filename), 3088 len(matrix_element.get_all_amplitudes())) 3089 3090 filename = pjoin(dirpath, 'maxamps.inc') 3091 self.write_maxamps_file(writers.FortranWriter(filename), 3092 len(matrix_element.get('diagrams')), 3093 ncolor, 3094 len(matrix_element.get('processes')), 3095 1) 3096 3097 filename = pjoin(dirpath, 'phasespace.inc') 3098 self.write_phasespace_file(writers.FortranWriter(filename), 3099 len(matrix_element.get('diagrams')), 3100 ) 3101 3102 # Generate diagrams 3103 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3104 filename = pjoin(dirpath, "matrix.ps") 3105 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3106 get('diagrams'), 3107 filename, 3108 model=matrix_element.get('processes')[0].\ 3109 get('model'), 3110 amplitude='') 3111 logger.info("Generating Feynman diagrams for " + \ 3112 matrix_element.get('processes')[0].nice_string()) 3113 plot.draw() 3114 3115 #import genps.inc and maxconfigs.inc into Subprocesses 3116 ln(self.dir_path + '/Source/genps.inc', self.dir_path + '/SubProcesses', log=False) 3117 #ln(self.dir_path + '/Source/maxconfigs.inc', self.dir_path + '/SubProcesses', log=False) 3118 3119 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f', 'genps.inc'] 3120 3121 for file in linkfiles: 3122 ln('../%s' % file, starting_dir=cwd) 3123 3124 ln('nexternal.inc', '../../Source', log=False, cwd=dirpath) 3125 ln('leshouche.inc', '../../Source', log=False, cwd=dirpath) 3126 ln('maxamps.inc', '../../Source', log=False, cwd=dirpath) 3127 ln('phasespace.inc', '../', log=True, cwd=dirpath) 3128 # Return to original PWD 3129 #os.chdir(cwd) 3130 3131 if not calls: 3132 calls = 0 3133 return calls
3134 3135 #=========================================================================== 3136 # write_matrix_element_v4 3137 #===========================================================================
3138 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model,proc_id = "", config_map = []):
3139 """Export a matrix element to a matrix.f file in MG4 MadWeight format""" 3140 3141 if not matrix_element.get('processes') or \ 3142 not matrix_element.get('diagrams'): 3143 return 0 3144 3145 if writer: 3146 if not isinstance(writer, writers.FortranWriter): 3147 raise writers.FortranWriter.FortranWriterError(\ 3148 "writer not FortranWriter") 3149 3150 # Set lowercase/uppercase Fortran code 3151 writers.FortranWriter.downcase = False 3152 3153 replace_dict = {} 3154 3155 # Extract version number and date from VERSION file 3156 info_lines = self.get_mg5_info_lines() 3157 replace_dict['info_lines'] = info_lines 3158 3159 # Extract process info lines 3160 process_lines = self.get_process_info_lines(matrix_element) 3161 replace_dict['process_lines'] = process_lines 3162 3163 # Set proc_id 3164 replace_dict['proc_id'] = proc_id 3165 3166 # Extract number of external particles 3167 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3168 replace_dict['nexternal'] = nexternal 3169 3170 # Extract ncomb 3171 ncomb = matrix_element.get_helicity_combinations() 3172 replace_dict['ncomb'] = ncomb 3173 3174 # Extract helicity lines 3175 helicity_lines = self.get_helicity_lines(matrix_element) 3176 replace_dict['helicity_lines'] = helicity_lines 3177 3178 # Extract overall denominator 3179 # Averaging initial state color, spin, and identical FS particles 3180 den_factor_line = self.get_den_factor_line(matrix_element) 3181 replace_dict['den_factor_line'] = den_factor_line 3182 3183 # Extract ngraphs 3184 ngraphs = matrix_element.get_number_of_amplitudes() 3185 replace_dict['ngraphs'] = ngraphs 3186 3187 # Extract nwavefuncs 3188 nwavefuncs = matrix_element.get_number_of_wavefunctions() 3189 replace_dict['nwavefuncs'] = nwavefuncs 3190 3191 # Extract ncolor 3192 ncolor = max(1, len(matrix_element.get('color_basis'))) 3193 replace_dict['ncolor'] = ncolor 3194 3195 # Extract color data lines 3196 color_data_lines = self.get_color_data_lines(matrix_element) 3197 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 3198 3199 # Extract helas calls 3200 helas_calls = fortran_model.get_matrix_element_calls(\ 3201 matrix_element) 3202 3203 replace_dict['helas_calls'] = "\n".join(helas_calls) 3204 3205 # Extract JAMP lines 3206 jamp_lines = self.get_JAMP_lines(matrix_element) 3207 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 3208 3209 replace_dict['template_file'] = os.path.join(_file_path, \ 3210 'iolibs/template_files/%s' % self.matrix_file) 3211 replace_dict['template_file2'] = '' 3212 3213 if writer: 3214 file = open(replace_dict['template_file']).read() 3215 file = file % replace_dict 3216 # Write the file 3217 writer.writelines(file) 3218 return len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor 3219 else: 3220 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)),ncolor)
3221 3222 #=========================================================================== 3223 # write_source_makefile 3224 #===========================================================================
3225 - def write_source_makefile(self, writer):
3226 """Write the nexternal.inc file for madweight""" 3227 3228 3229 path = os.path.join(_file_path,'iolibs','template_files','madweight_makefile_source') 3230 set_of_lib = '$(LIBRARIES) $(LIBDIR)libdhelas.$(libext) $(LIBDIR)libpdf.$(libext) $(LIBDIR)libmodel.$(libext) $(LIBDIR)libcernlib.$(libext) $(LIBDIR)libtf.$(libext)' 3231 text = open(path).read() % {'libraries': set_of_lib} 3232 writer.write(text) 3233 3234 return True
3235
3236 - def write_phasespace_file(self, writer, nb_diag):
3237 """ """ 3238 3239 template = """ include 'maxparticles.inc' 3240 integer max_branches 3241 parameter (max_branches=max_particles-1) 3242 integer max_configs 3243 parameter (max_configs=%(nb_diag)s) 3244 3245 c channel position 3246 integer config_pos,perm_pos 3247 common /to_config/config_pos,perm_pos 3248 3249 """ 3250 3251 writer.write(template % {'nb_diag': nb_diag})
3252 3253 3254 #=========================================================================== 3255 # write_auto_dsig_file 3256 #===========================================================================
3257 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
3258 """Write the auto_dsig.f file for the differential cross section 3259 calculation, includes pdf call information (MadWeight format)""" 3260 3261 if not matrix_element.get('processes') or \ 3262 not matrix_element.get('diagrams'): 3263 return 0 3264 3265 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 3266 3267 if ninitial < 1 or ninitial > 2: 3268 raise writers.FortranWriter.FortranWriterError, \ 3269 """Need ninitial = 1 or 2 to write auto_dsig file""" 3270 3271 replace_dict = {} 3272 3273 # Extract version number and date from VERSION file 3274 info_lines = self.get_mg5_info_lines() 3275 replace_dict['info_lines'] = info_lines 3276 3277 # Extract process info lines 3278 process_lines = self.get_process_info_lines(matrix_element) 3279 replace_dict['process_lines'] = process_lines 3280 3281 # Set proc_id 3282 replace_dict['proc_id'] = proc_id 3283 replace_dict['numproc'] = 1 3284 3285 # Set dsig_line 3286 if ninitial == 1: 3287 # No conversion, since result of decay should be given in GeV 3288 dsig_line = "pd(0)*dsiguu" 3289 else: 3290 # Convert result (in GeV) to pb 3291 dsig_line = "pd(0)*conv*dsiguu" 3292 3293 replace_dict['dsig_line'] = dsig_line 3294 3295 # Extract pdf lines 3296 pdf_vars, pdf_data, pdf_lines = \ 3297 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 3298 replace_dict['pdf_vars'] = pdf_vars 3299 replace_dict['pdf_data'] = pdf_data 3300 replace_dict['pdf_lines'] = pdf_lines 3301 3302 # Lines that differ between subprocess group and regular 3303 if proc_id: 3304 replace_dict['numproc'] = int(proc_id) 3305 replace_dict['passcuts_begin'] = "" 3306 replace_dict['passcuts_end'] = "" 3307 # Set lines for subprocess group version 3308 # Set define_iconfigs_lines 3309 replace_dict['define_subdiag_lines'] = \ 3310 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 3311 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 3312 else: 3313 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 3314 replace_dict['passcuts_end'] = "ENDIF" 3315 replace_dict['define_subdiag_lines'] = "" 3316 3317 if writer: 3318 file = open(os.path.join(_file_path, \ 3319 'iolibs/template_files/auto_dsig_mw.inc')).read() 3320 3321 file = file % replace_dict 3322 # Write the file 3323 writer.writelines(file) 3324 else: 3325 return replace_dict
3326 #=========================================================================== 3327 # write_configs_file 3328 #===========================================================================
3329 - def write_configs_file(self, writer, matrix_element):
3330 """Write the configs.inc file for MadEvent""" 3331 3332 # Extract number of external particles 3333 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3334 3335 configs = [(i+1, d) for i,d in enumerate(matrix_element.get('diagrams'))] 3336 mapconfigs = [c[0] for c in configs] 3337 model = matrix_element.get('processes')[0].get('model') 3338 return mapconfigs, self.write_configs_file_from_diagrams(writer, 3339 [[c[1]] for c in configs], 3340 mapconfigs, 3341 nexternal, ninitial,matrix_element, model)
3342 3343 #=========================================================================== 3344 # write_run_configs_file 3345 #===========================================================================
3346 - def write_run_config_file(self, writer):
3347 """Write the run_configs.inc file for MadWeight""" 3348 3349 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 3350 text = open(path).read() % {'chanperjob':'5'} 3351 writer.write(text) 3352 return True
3353 3354 #=========================================================================== 3355 # write_configs_file_from_diagrams 3356 #===========================================================================
3357 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 3358 nexternal, ninitial, matrix_element, model):
3359 """Write the actual configs.inc file. 3360 3361 configs is the diagrams corresponding to configs (each 3362 diagrams is a list of corresponding diagrams for all 3363 subprocesses, with None if there is no corresponding diagrams 3364 for a given process). 3365 mapconfigs gives the diagram number for each config. 3366 3367 For s-channels, we need to output one PDG for each subprocess in 3368 the subprocess group, in order to be able to pick the right 3369 one for multiprocesses.""" 3370 3371 lines = [] 3372 3373 particle_dict = matrix_element.get('processes')[0].get('model').\ 3374 get('particle_dict') 3375 3376 s_and_t_channels = [] 3377 3378 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 3379 for config in configs if [d for d in config if d][0].\ 3380 get_vertex_leg_numbers()!=[]] 3381 3382 minvert = min(vert_list) if vert_list!=[] else 0 3383 # Number of subprocesses 3384 nsubprocs = len(configs[0]) 3385 3386 nconfigs = 0 3387 3388 new_pdg = model.get_first_non_pdg() 3389 3390 for iconfig, helas_diags in enumerate(configs): 3391 if any([vert > minvert for vert in 3392 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 3393 # Only 3-vertices allowed in configs.inc 3394 continue 3395 nconfigs += 1 3396 3397 # Need s- and t-channels for all subprocesses, including 3398 # those that don't contribute to this config 3399 empty_verts = [] 3400 stchannels = [] 3401 for h in helas_diags: 3402 if h: 3403 # get_s_and_t_channels gives vertices starting from 3404 # final state external particles and working inwards 3405 stchannels.append(h.get('amplitudes')[0].\ 3406 get_s_and_t_channels(ninitial,model,new_pdg)) 3407 else: 3408 stchannels.append((empty_verts, None)) 3409 3410 # For t-channels, just need the first non-empty one 3411 tchannels = [t for s,t in stchannels if t != None][0] 3412 3413 # For s_and_t_channels (to be used later) use only first config 3414 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 3415 tchannels]) 3416 3417 # Make sure empty_verts is same length as real vertices 3418 if any([s for s,t in stchannels]): 3419 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 3420 3421 # Reorganize s-channel vertices to get a list of all 3422 # subprocesses for each vertex 3423 schannels = zip(*[s for s,t in stchannels]) 3424 else: 3425 schannels = [] 3426 3427 allchannels = schannels 3428 if len(tchannels) > 1: 3429 # Write out tchannels only if there are any non-trivial ones 3430 allchannels = schannels + tchannels 3431 3432 # Write out propagators for s-channel and t-channel vertices 3433 3434 #lines.append("# Diagram %d" % (mapconfigs[iconfig])) 3435 # Correspondance between the config and the diagram = amp2 3436 lines.append("* %d %d " % (nconfigs, 3437 mapconfigs[iconfig])) 3438 3439 for verts in allchannels: 3440 if verts in schannels: 3441 vert = [v for v in verts if v][0] 3442 else: 3443 vert = verts 3444 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 3445 last_leg = vert.get('legs')[-1] 3446 line=str(last_leg.get('number'))+" "+str(daughters[0])+" "+str(daughters[1]) 3447 # lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 3448 # (last_leg.get('number'), nconfigs, len(daughters), 3449 # ",".join([str(d) for d in daughters]))) 3450 3451 if last_leg.get('id') == 21 and 21 not in particle_dict: 3452 # Fake propagator used in multiparticle vertices 3453 mass = 'zero' 3454 width = 'zero' 3455 pow_part = 0 3456 else: 3457 if (last_leg.get('id')!=7): 3458 particle = particle_dict[last_leg.get('id')] 3459 # Get mass 3460 mass = particle.get('mass') 3461 # Get width 3462 width = particle.get('width') 3463 else : # fake propagator used in multiparticle vertices 3464 mass= 'zero' 3465 width= 'zero' 3466 3467 line=line+" "+mass+" "+width+" " 3468 3469 if verts in schannels: 3470 pdgs = [] 3471 for v in verts: 3472 if v: 3473 pdgs.append(v.get('legs')[-1].get('id')) 3474 else: 3475 pdgs.append(0) 3476 lines.append(line+" S "+str(last_leg.get('id'))) 3477 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3478 # (last_leg.get('number'), nconfigs, nsubprocs, 3479 # ",".join([str(d) for d in pdgs]))) 3480 # lines.append("data tprid(%d,%d)/0/" % \ 3481 # (last_leg.get('number'), nconfigs)) 3482 elif verts in tchannels[:-1]: 3483 lines.append(line+" T "+str(last_leg.get('id'))) 3484 # lines.append("data tprid(%d,%d)/%d/" % \ 3485 # (last_leg.get('number'), nconfigs, 3486 # abs(last_leg.get('id')))) 3487 # lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 3488 # (last_leg.get('number'), nconfigs, nsubprocs, 3489 # ",".join(['0'] * nsubprocs))) 3490 3491 # Write out number of configs 3492 # lines.append("# Number of configs") 3493 # lines.append("data mapconfig(0)/%d/" % nconfigs) 3494 lines.append(" * ") # a line with just a star indicates this is the end of file 3495 # Write the file 3496 writer.writelines(lines) 3497 3498 return s_and_t_channels
3499
3500 3501 3502 #=============================================================================== 3503 # ProcessExporterFortranME 3504 #=============================================================================== 3505 -class ProcessExporterFortranME(ProcessExporterFortran):
3506 """Class to take care of exporting a set of matrix elements to 3507 MadEvent format.""" 3508 3509 matrix_file = "matrix_madevent_v4.inc" 3510 3511 # helper function for customise helas writter 3512 @staticmethod
3513 - def custom_helas_call(call, arg):
3514 if arg['mass'] == '%(M)s,%(W)s,': 3515 arg['mass'] = '%(M)s, fk_%(W)s,' 3516 elif '%(W)s' in arg['mass']: 3517 raise Exception 3518 return call, arg
3519
3520 - def copy_template(self, model):
3521 """Additional actions needed for setup of Template 3522 """ 3523 3524 super(ProcessExporterFortranME, self).copy_template(model) 3525 3526 # File created from Template (Different in some child class) 3527 filename = pjoin(self.dir_path,'Source','run_config.inc') 3528 self.write_run_config_file(writers.FortranWriter(filename)) 3529 3530 # The next file are model dependant (due to SLAH convention) 3531 self.model_name = model.get('name') 3532 # Add the symmetry.f 3533 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3534 self.write_symmetry(writers.FortranWriter(filename)) 3535 # 3536 filename = pjoin(self.dir_path,'SubProcesses','addmothers.f') 3537 self.write_addmothers(writers.FortranWriter(filename)) 3538 # Copy the different python file in the Template 3539 self.copy_python_file()
3540 3541 3542 3543 3544 3545 3546 #=========================================================================== 3547 # generate_subprocess_directory 3548 #===========================================================================
3549 - def copy_python_file(self):
3550 """copy the python file require for the Template""" 3551 3552 # madevent interface 3553 cp(_file_path+'/interface/madevent_interface.py', 3554 self.dir_path+'/bin/internal/madevent_interface.py') 3555 cp(_file_path+'/interface/extended_cmd.py', 3556 self.dir_path+'/bin/internal/extended_cmd.py') 3557 cp(_file_path+'/interface/common_run_interface.py', 3558 self.dir_path+'/bin/internal/common_run_interface.py') 3559 cp(_file_path+'/various/misc.py', self.dir_path+'/bin/internal/misc.py') 3560 cp(_file_path+'/iolibs/files.py', self.dir_path+'/bin/internal/files.py') 3561 cp(_file_path+'/iolibs/save_load_object.py', 3562 self.dir_path+'/bin/internal/save_load_object.py') 3563 cp(_file_path+'/iolibs/file_writers.py', 3564 self.dir_path+'/bin/internal/file_writers.py') 3565 #model file 3566 cp(_file_path+'../models/check_param_card.py', 3567 self.dir_path+'/bin/internal/check_param_card.py') 3568 3569 #copy all the file present in madevent directory 3570 for name in os.listdir(pjoin(_file_path, 'madevent')): 3571 if name not in ['__init__.py'] and name.endswith('.py'): 3572 cp(_file_path+'/madevent/'+name, self.dir_path+'/bin/internal/') 3573 3574 #madevent file 3575 cp(_file_path+'/__init__.py', self.dir_path+'/bin/internal/__init__.py') 3576 cp(_file_path+'/various/lhe_parser.py', 3577 self.dir_path+'/bin/internal/lhe_parser.py') 3578 cp(_file_path+'/various/banner.py', 3579 self.dir_path+'/bin/internal/banner.py') 3580 cp(_file_path+'/various/histograms.py', 3581 self.dir_path+'/bin/internal/histograms.py') 3582 cp(_file_path+'/various/plot_djrs.py', 3583 self.dir_path+'/bin/internal/plot_djrs.py') 3584 cp(_file_path+'/various/systematics.py', self.dir_path+'/bin/internal/systematics.py') 3585 3586 cp(_file_path+'/various/cluster.py', 3587 self.dir_path+'/bin/internal/cluster.py') 3588 cp(_file_path+'/madevent/combine_runs.py', 3589 self.dir_path+'/bin/internal/combine_runs.py') 3590 # logging configuration 3591 cp(_file_path+'/interface/.mg5_logging.conf', 3592 self.dir_path+'/bin/internal/me5_logging.conf') 3593 cp(_file_path+'/interface/coloring_logging.py', 3594 self.dir_path+'/bin/internal/coloring_logging.py') 3595 # shower card and FO_analyse_card. 3596 # Although not needed, it is imported by banner.py 3597 cp(_file_path+'/various/shower_card.py', 3598 self.dir_path+'/bin/internal/shower_card.py') 3599 cp(_file_path+'/various/FO_analyse_card.py', 3600 self.dir_path+'/bin/internal/FO_analyse_card.py')
3601 3602
3603 - def convert_model(self, model, wanted_lorentz = [], 3604 wanted_couplings = []):
3605 3606 super(ProcessExporterFortranME,self).convert_model(model, 3607 wanted_lorentz, wanted_couplings) 3608 3609 IGNORE_PATTERNS = ('*.pyc','*.dat','*.py~') 3610 try: 3611 shutil.rmtree(pjoin(self.dir_path,'bin','internal','ufomodel')) 3612 except OSError as error: 3613 pass 3614 model_path = model.get('modelpath') 3615 # This is not safe if there is a '##' or '-' in the path. 3616 shutil.copytree(model_path, 3617 pjoin(self.dir_path,'bin','internal','ufomodel'), 3618 ignore=shutil.ignore_patterns(*IGNORE_PATTERNS)) 3619 if hasattr(model, 'restrict_card'): 3620 out_path = pjoin(self.dir_path, 'bin', 'internal','ufomodel', 3621 'restrict_default.dat') 3622 if isinstance(model.restrict_card, check_param_card.ParamCard): 3623 model.restrict_card.write(out_path) 3624 else: 3625 files.cp(model.restrict_card, out_path)
3626 3627 #=========================================================================== 3628 # export model files 3629 #===========================================================================
3630 - def export_model_files(self, model_path):
3631 """export the model dependent files""" 3632 3633 super(ProcessExporterFortranME,self).export_model_files(model_path) 3634 3635 # Add the routine update_as_param in v4 model 3636 # This is a function created in the UFO 3637 text=""" 3638 subroutine update_as_param() 3639 call setpara('param_card.dat',.false.) 3640 return 3641 end 3642 """ 3643 ff = open(pjoin(self.dir_path, 'Source', 'MODEL', 'couplings.f'),'a') 3644 ff.write(text) 3645 ff.close() 3646 3647 # Add the symmetry.f 3648 filename = pjoin(self.dir_path,'SubProcesses','symmetry.f') 3649 self.write_symmetry(writers.FortranWriter(filename), v5=False) 3650 3651 # Modify setrun.f 3652 text = open(pjoin(self.dir_path,'Source','setrun.f')).read() 3653 text = text.replace('call setpara(param_card_name)', 'call setpara(param_card_name, .true.)') 3654 fsock = open(pjoin(self.dir_path,'Source','setrun.f'), 'w') 3655 fsock.write(text) 3656 fsock.close() 3657 3658 self.make_model_symbolic_link()
3659 3660 #=========================================================================== 3661 # generate_subprocess_directory 3662 #===========================================================================
3663 - def generate_subprocess_directory(self, matrix_element, 3664 fortran_model, 3665 me_number):
3666 """Generate the Pxxxxx directory for a subprocess in MG4 madevent, 3667 including the necessary matrix.f and various helper files""" 3668 3669 cwd = os.getcwd() 3670 path = pjoin(self.dir_path, 'SubProcesses') 3671 3672 3673 if not self.model: 3674 self.model = matrix_element.get('processes')[0].get('model') 3675 3676 3677 3678 #os.chdir(path) 3679 # Create the directory PN_xx_xxxxx in the specified path 3680 subprocdir = "P%s" % matrix_element.get('processes')[0].shell_string() 3681 try: 3682 os.mkdir(pjoin(path,subprocdir)) 3683 except os.error as error: 3684 logger.warning(error.strerror + " " + subprocdir) 3685 3686 #try: 3687 # os.chdir(subprocdir) 3688 #except os.error: 3689 # logger.error('Could not cd to directory %s' % subprocdir) 3690 # return 0 3691 3692 logger.info('Creating files in directory %s' % subprocdir) 3693 Ppath = pjoin(path, subprocdir) 3694 3695 # Extract number of external particles 3696 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 3697 3698 # Add the driver.f 3699 ncomb = matrix_element.get_helicity_combinations() 3700 filename = pjoin(Ppath,'driver.f') 3701 self.write_driver(writers.FortranWriter(filename),ncomb,n_grouped_proc=1, 3702 v5=self.opt['v5_model']) 3703 3704 # Create the matrix.f file, auto_dsig.f file and all inc files 3705 filename = pjoin(Ppath, 'matrix.f') 3706 calls, ncolor = \ 3707 self.write_matrix_element_v4(writers.FortranWriter(filename), 3708 matrix_element, fortran_model, subproc_number = me_number) 3709 3710 filename = pjoin(Ppath, 'auto_dsig.f') 3711 self.write_auto_dsig_file(writers.FortranWriter(filename), 3712 matrix_element) 3713 3714 filename = pjoin(Ppath, 'configs.inc') 3715 mapconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 3716 writers.FortranWriter(filename), 3717 matrix_element) 3718 3719 filename = pjoin(Ppath, 'config_nqcd.inc') 3720 self.write_config_nqcd_file(writers.FortranWriter(filename), 3721 nqcd_list) 3722 3723 filename = pjoin(Ppath, 'config_subproc_map.inc') 3724 self.write_config_subproc_map_file(writers.FortranWriter(filename), 3725 s_and_t_channels) 3726 3727 filename = pjoin(Ppath, 'coloramps.inc') 3728 self.write_coloramps_file(writers.FortranWriter(filename), 3729 mapconfigs, 3730 matrix_element) 3731 3732 filename = pjoin(Ppath, 'get_color.f') 3733 self.write_colors_file(writers.FortranWriter(filename), 3734 matrix_element) 3735 3736 filename = pjoin(Ppath, 'decayBW.inc') 3737 self.write_decayBW_file(writers.FortranWriter(filename), 3738 s_and_t_channels) 3739 3740 filename = pjoin(Ppath, 'dname.mg') 3741 self.write_dname_file(writers.FileWriter(filename), 3742 "P"+matrix_element.get('processes')[0].shell_string()) 3743 3744 filename = pjoin(Ppath, 'iproc.dat') 3745 self.write_iproc_file(writers.FortranWriter(filename), 3746 me_number) 3747 3748 filename = pjoin(Ppath, 'leshouche.inc') 3749 self.write_leshouche_file(writers.FortranWriter(filename), 3750 matrix_element) 3751 3752 filename = pjoin(Ppath, 'maxamps.inc') 3753 self.write_maxamps_file(writers.FortranWriter(filename), 3754 len(matrix_element.get('diagrams')), 3755 ncolor, 3756 len(matrix_element.get('processes')), 3757 1) 3758 3759 filename = pjoin(Ppath, 'mg.sym') 3760 self.write_mg_sym_file(writers.FortranWriter(filename), 3761 matrix_element) 3762 3763 filename = pjoin(Ppath, 'ncombs.inc') 3764 self.write_ncombs_file(writers.FortranWriter(filename), 3765 nexternal) 3766 3767 filename = pjoin(Ppath, 'nexternal.inc') 3768 self.write_nexternal_file(writers.FortranWriter(filename), 3769 nexternal, ninitial) 3770 3771 filename = pjoin(Ppath, 'ngraphs.inc') 3772 self.write_ngraphs_file(writers.FortranWriter(filename), 3773 len(mapconfigs)) 3774 3775 3776 filename = pjoin(Ppath, 'pmass.inc') 3777 self.write_pmass_file(writers.FortranWriter(filename), 3778 matrix_element) 3779 3780 filename = pjoin(Ppath, 'props.inc') 3781 self.write_props_file(writers.FortranWriter(filename), 3782 matrix_element, 3783 s_and_t_channels) 3784 3785 # Find config symmetries and permutations 3786 symmetry, perms, ident_perms = \ 3787 diagram_symmetry.find_symmetry(matrix_element) 3788 3789 filename = pjoin(Ppath, 'symswap.inc') 3790 self.write_symswap_file(writers.FortranWriter(filename), 3791 ident_perms) 3792 3793 filename = pjoin(Ppath, 'symfact_orig.dat') 3794 self.write_symfact_file(open(filename, 'w'), symmetry) 3795 3796 # Generate diagrams 3797 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 3798 filename = pjoin(Ppath, "matrix.ps") 3799 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 3800 get('diagrams'), 3801 filename, 3802 model=matrix_element.get('processes')[0].\ 3803 get('model'), 3804 amplitude=True) 3805 logger.info("Generating Feynman diagrams for " + \ 3806 matrix_element.get('processes')[0].nice_string()) 3807 plot.draw() 3808 3809 self.link_files_in_SubProcess(Ppath) 3810 3811 #import nexternal/leshouche in Source 3812 ln(pjoin(Ppath,'nexternal.inc'), pjoin(self.dir_path,'Source'), log=False) 3813 ln(pjoin(Ppath,'leshouche.inc'), pjoin(self.dir_path,'Source'), log=False) 3814 ln(pjoin(Ppath,'maxamps.inc'), pjoin(self.dir_path,'Source'), log=False) 3815 # Return to SubProcesses dir 3816 #os.chdir(os.path.pardir) 3817 3818 # Add subprocess to subproc.mg 3819 filename = pjoin(path, 'subproc.mg') 3820 files.append_to_file(filename, 3821 self.write_subproc, 3822 subprocdir) 3823 3824 # Return to original dir 3825 #os.chdir(cwd) 3826 3827 # Generate info page 3828 gen_infohtml.make_info_html(self.dir_path) 3829 3830 3831 if not calls: 3832 calls = 0 3833 return calls
3834 3835 link_Sub_files = ['addmothers.f', 3836 'cluster.f', 3837 'cluster.inc', 3838 'coupl.inc', 3839 'cuts.f', 3840 'cuts.inc', 3841 'genps.f', 3842 'genps.inc', 3843 'idenparts.f', 3844 'initcluster.f', 3845 'makefile', 3846 'message.inc', 3847 'myamp.f', 3848 'reweight.f', 3849 'run.inc', 3850 'maxconfigs.inc', 3851 'maxparticles.inc', 3852 'run_config.inc', 3853 'lhe_event_infos.inc', 3854 'setcuts.f', 3855 'setscales.f', 3856 'sudakov.inc', 3857 'symmetry.f', 3858 'unwgt.f', 3859 'dummy_fct.f' 3860 ] 3861 3875 3876
3877 - def finalize(self, matrix_elements, history, mg5options, flaglist):
3878 """Finalize ME v4 directory by creating jpeg diagrams, html 3879 pages,proc_card_mg5.dat and madevent.tar.gz.""" 3880 3881 if 'nojpeg' in flaglist: 3882 makejpg = False 3883 else: 3884 makejpg = True 3885 if 'online' in flaglist: 3886 online = True 3887 else: 3888 online = False 3889 3890 compiler = {'fortran': mg5options['fortran_compiler'], 3891 'cpp': mg5options['cpp_compiler'], 3892 'f2py': mg5options['f2py_compiler']} 3893 3894 # indicate that the output type is not grouped 3895 if not isinstance(self, ProcessExporterFortranMEGroup): 3896 self.proc_characteristic['grouped_matrix'] = False 3897 3898 self.proc_characteristic['complex_mass_scheme'] = mg5options['complex_mass_scheme'] 3899 3900 # set limitation linked to the model 3901 3902 3903 # indicate the PDG of all initial particle 3904 try: 3905 pdgs1 = [p.get_initial_pdg(1) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3906 pdgs2 = [p.get_initial_pdg(2) for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3907 except AttributeError: 3908 pdgs1 = [p.get_initial_pdg(1) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(1)] 3909 pdgs2 = [p.get_initial_pdg(2) for m in matrix_elements.get('matrix_elements') for p in m.get('processes') if p.get_initial_pdg(2)] 3910 self.proc_characteristic['pdg_initial1'] = pdgs1 3911 self.proc_characteristic['pdg_initial2'] = pdgs2 3912 3913 3914 modelname = self.opt['model'] 3915 if modelname == 'mssm' or modelname.startswith('mssm-'): 3916 param_card = pjoin(self.dir_path, 'Cards','param_card.dat') 3917 mg5_param = pjoin(self.dir_path, 'Source', 'MODEL', 'MG5_param.dat') 3918 check_param_card.convert_to_mg5card(param_card, mg5_param) 3919 check_param_card.check_valid_param_card(mg5_param) 3920 3921 # Add the combine_events.f modify param_card path/number of @X 3922 filename = pjoin(self.dir_path,'Source','combine_events.f') 3923 try: 3924 nb_proc =[p.get('id') for me in matrix_elements for m in me.get('matrix_elements') for p in m.get('processes')] 3925 except AttributeError: 3926 nb_proc =[p.get('id') for m in matrix_elements.get('matrix_elements') for p in m.get('processes')] 3927 nb_proc = len(set(nb_proc)) 3928 self.write_combine_events(writers.FortranWriter(filename), nb_proc) # already formatted 3929 # Write maxconfigs.inc based on max of ME's/subprocess groups 3930 filename = pjoin(self.dir_path,'Source','maxconfigs.inc') 3931 self.write_maxconfigs_file(writers.FortranWriter(filename), 3932 matrix_elements) 3933 3934 # Write maxparticles.inc based on max of ME's/subprocess groups 3935 filename = pjoin(self.dir_path,'Source','maxparticles.inc') 3936 self.write_maxparticles_file(writers.FortranWriter(filename), 3937 matrix_elements) 3938 3939 # Touch "done" file 3940 os.system('touch %s/done' % pjoin(self.dir_path,'SubProcesses')) 3941 3942 # Check for compiler 3943 self.set_compiler(compiler) 3944 self.set_cpp_compiler(compiler['cpp']) 3945 3946 3947 old_pos = os.getcwd() 3948 subpath = pjoin(self.dir_path, 'SubProcesses') 3949 3950 P_dir_list = [proc for proc in os.listdir(subpath) 3951 if os.path.isdir(pjoin(subpath,proc)) and proc[0] == 'P'] 3952 3953 devnull = os.open(os.devnull, os.O_RDWR) 3954 # Convert the poscript in jpg files (if authorize) 3955 if makejpg: 3956 try: 3957 os.remove(pjoin(self.dir_path,'HTML','card.jpg')) 3958 except Exception, error: 3959 pass 3960 3961 if misc.which('gs'): 3962 logger.info("Generate jpeg diagrams") 3963 for Pdir in P_dir_list: 3964 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_jpeg-pl')], 3965 stdout = devnull, cwd=pjoin(subpath, Pdir)) 3966 3967 logger.info("Generate web pages") 3968 # Create the WebPage using perl script 3969 3970 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], \ 3971 stdout = devnull,cwd=pjoin(self.dir_path)) 3972 3973 #os.chdir(os.path.pardir) 3974 3975 obj = gen_infohtml.make_info_html(self.dir_path) 3976 3977 if online: 3978 nb_channel = obj.rep_rule['nb_gen_diag'] 3979 open(pjoin(self.dir_path, 'Online'),'w').write(str(nb_channel)) 3980 #add the information to proc_charac 3981 self.proc_characteristic['nb_channel'] = obj.rep_rule['nb_gen_diag'] 3982 3983 # Write command history as proc_card_mg5 3984 if os.path.isdir(pjoin(self.dir_path,'Cards')): 3985 output_file = pjoin(self.dir_path,'Cards', 'proc_card_mg5.dat') 3986 history.write(output_file) 3987 3988 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 3989 stdout = devnull) 3990 3991 #crate the proc_characteristic file 3992 self.create_proc_charac(matrix_elements, history) 3993 3994 # create the run_card 3995 ProcessExporterFortran.finalize(self, matrix_elements, history, mg5options, flaglist) 3996 3997 # Run "make" to generate madevent.tar.gz file 3998 if os.path.exists(pjoin(self.dir_path,'SubProcesses', 'subproc.mg')): 3999 if os.path.exists(pjoin(self.dir_path,'madevent.tar.gz')): 4000 os.remove(pjoin(self.dir_path,'madevent.tar.gz')) 4001 misc.call([os.path.join(self.dir_path, 'bin', 'internal', 'make_madevent_tar')], 4002 stdout = devnull, cwd=self.dir_path) 4003 4004 misc.call([pjoin(self.dir_path, 'bin', 'internal', 'gen_cardhtml-pl')], 4005 stdout = devnull, cwd=self.dir_path)
4006 4007 4008 4009 4010 4011 4012 #return to the initial dir 4013 #os.chdir(old_pos) 4014 4015 #=========================================================================== 4016 # write_matrix_element_v4 4017 #===========================================================================
4018 - def write_matrix_element_v4(self, writer, matrix_element, fortran_model, 4019 proc_id = "", config_map = [], subproc_number = ""):
4020 """Export a matrix element to a matrix.f file in MG4 madevent format""" 4021 4022 if not matrix_element.get('processes') or \ 4023 not matrix_element.get('diagrams'): 4024 return 0 4025 4026 if writer: 4027 if not isinstance(writer, writers.FortranWriter): 4028 raise writers.FortranWriter.FortranWriterError(\ 4029 "writer not FortranWriter") 4030 # Set lowercase/uppercase Fortran code 4031 writers.FortranWriter.downcase = False 4032 4033 # check if MLM/.../ is supported for this matrix-element and update associate flag 4034 if self.model and 'MLM' in self.model["limitations"]: 4035 if 'MLM' not in self.proc_characteristic["limitations"]: 4036 used_couplings = matrix_element.get_used_couplings(output="set") 4037 for vertex in self.model.get('interactions'): 4038 particles = [p for p in vertex.get('particles')] 4039 if 21 in [p.get('pdg_code') for p in particles]: 4040 colors = [par.get('color') for par in particles] 4041 if 1 in colors: 4042 continue 4043 elif 'QCD' not in vertex.get('orders'): 4044 for bad_coup in vertex.get('couplings').values(): 4045 if bad_coup in used_couplings: 4046 self.proc_characteristic["limitations"].append('MLM') 4047 break 4048 4049 # The proc prefix is not used for MadEvent output so it can safely be set 4050 # to an empty string. 4051 replace_dict = {'proc_prefix':''} 4052 4053 # Extract helas calls 4054 helas_calls = fortran_model.get_matrix_element_calls(\ 4055 matrix_element) 4056 4057 4058 replace_dict['helas_calls'] = "\n".join(helas_calls) 4059 4060 4061 #adding the support for the fake width (forbidding too small width) 4062 mass_width = matrix_element.get_all_mass_widths() 4063 width_list = set([e[1] for e in mass_width]) 4064 4065 replace_dict['fake_width_declaration'] = \ 4066 (' double precision fk_%s \n' * len(width_list)) % tuple(width_list) 4067 replace_dict['fake_width_declaration'] += \ 4068 (' save fk_%s \n' * len(width_list)) % tuple(width_list) 4069 fk_w_defs = [] 4070 one_def = ' fk_%(w)s = SIGN(MAX(ABS(%(w)s), ABS(%(m)s*small_width_treatment)), %(w)s)' 4071 for m, w in mass_width: 4072 if w == 'zero': 4073 if ' fk_zero = 0d0' not in fk_w_defs: 4074 fk_w_defs.append(' fk_zero = 0d0') 4075 continue 4076 fk_w_defs.append(one_def %{'m':m, 'w':w}) 4077 replace_dict['fake_width_definitions'] = '\n'.join(fk_w_defs) 4078 4079 # Extract version number and date from VERSION file 4080 info_lines = self.get_mg5_info_lines() 4081 replace_dict['info_lines'] = info_lines 4082 4083 # Extract process info lines 4084 process_lines = self.get_process_info_lines(matrix_element) 4085 replace_dict['process_lines'] = process_lines 4086 4087 # Set proc_id 4088 replace_dict['proc_id'] = proc_id 4089 4090 # Extract ncomb 4091 ncomb = matrix_element.get_helicity_combinations() 4092 replace_dict['ncomb'] = ncomb 4093 4094 # Extract helicity lines 4095 helicity_lines = self.get_helicity_lines(matrix_element) 4096 replace_dict['helicity_lines'] = helicity_lines 4097 4098 # Extract IC line 4099 ic_line = self.get_ic_line(matrix_element) 4100 replace_dict['ic_line'] = ic_line 4101 4102 # Extract overall denominator 4103 # Averaging initial state color, spin, and identical FS particles 4104 den_factor_line = self.get_den_factor_line(matrix_element) 4105 replace_dict['den_factor_line'] = den_factor_line 4106 4107 # Extract ngraphs 4108 ngraphs = matrix_element.get_number_of_amplitudes() 4109 replace_dict['ngraphs'] = ngraphs 4110 4111 # Extract ndiags 4112 ndiags = len(matrix_element.get('diagrams')) 4113 replace_dict['ndiags'] = ndiags 4114 4115 # Set define_iconfigs_lines 4116 replace_dict['define_iconfigs_lines'] = \ 4117 """INTEGER MAPCONFIG(0:LMAXCONFIGS), ICONFIG 4118 COMMON/TO_MCONFIGS/MAPCONFIG, ICONFIG""" 4119 4120 if proc_id: 4121 # Set lines for subprocess group version 4122 # Set define_iconfigs_lines 4123 replace_dict['define_iconfigs_lines'] += \ 4124 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4125 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4126 # Set set_amp2_line 4127 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(SUBDIAG(%s))/XTOT" % \ 4128 proc_id 4129 else: 4130 # Standard running 4131 # Set set_amp2_line 4132 replace_dict['set_amp2_line'] = "ANS=ANS*AMP2(MAPCONFIG(ICONFIG))/XTOT" 4133 4134 # Extract nwavefuncs 4135 nwavefuncs = matrix_element.get_number_of_wavefunctions() 4136 replace_dict['nwavefuncs'] = nwavefuncs 4137 4138 # Extract ncolor 4139 ncolor = max(1, len(matrix_element.get('color_basis'))) 4140 replace_dict['ncolor'] = ncolor 4141 4142 # Extract color data lines 4143 color_data_lines = self.get_color_data_lines(matrix_element) 4144 replace_dict['color_data_lines'] = "\n".join(color_data_lines) 4145 4146 4147 # Set the size of Wavefunction 4148 if not self.model or any([p.get('spin') in [4,5] for p in self.model.get('particles') if p]): 4149 replace_dict['wavefunctionsize'] = 18 4150 else: 4151 replace_dict['wavefunctionsize'] = 6 4152 4153 # Extract amp2 lines 4154 amp2_lines = self.get_amp2_lines(matrix_element, config_map) 4155 replace_dict['amp2_lines'] = '\n'.join(amp2_lines) 4156 4157 # The JAMP definition depends on the splitting order 4158 split_orders=matrix_element.get('processes')[0].get('split_orders') 4159 if len(split_orders)>0: 4160 squared_orders, amp_orders = matrix_element.get_split_orders_mapping() 4161 replace_dict['chosen_so_configs']=self.set_chosen_SO_index( 4162 matrix_element.get('processes')[0],squared_orders) 4163 else: 4164 # Consider the output of a dummy order 'ALL_ORDERS' for which we 4165 # set all amplitude order to weight 1 and only one squared order 4166 # contribution which is of course ALL_ORDERS=2. 4167 squared_orders = [(2,),] 4168 amp_orders = [((1,),tuple(range(1,ngraphs+1)))] 4169 replace_dict['chosen_so_configs'] = '.TRUE.' 4170 4171 replace_dict['nAmpSplitOrders']=len(amp_orders) 4172 replace_dict['nSqAmpSplitOrders']=len(squared_orders) 4173 replace_dict['split_order_str_list']=str(split_orders) 4174 replace_dict['nSplitOrders']=max(len(split_orders),1) 4175 amp_so = self.get_split_orders_lines( 4176 [amp_order[0] for amp_order in amp_orders],'AMPSPLITORDERS') 4177 sqamp_so = self.get_split_orders_lines(squared_orders,'SQSPLITORDERS') 4178 replace_dict['ampsplitorders']='\n'.join(amp_so) 4179 replace_dict['sqsplitorders']='\n'.join(sqamp_so) 4180 4181 4182 # Extract JAMP lines 4183 # If no split_orders then artificiall add one entry called 'ALL_ORDERS' 4184 jamp_lines = self.get_JAMP_lines_split_order(\ 4185 matrix_element,amp_orders,split_order_names= 4186 split_orders if len(split_orders)>0 else ['ALL_ORDERS']) 4187 replace_dict['jamp_lines'] = '\n'.join(jamp_lines) 4188 4189 replace_dict['template_file'] = pjoin(_file_path, \ 4190 'iolibs/template_files/%s' % self.matrix_file) 4191 replace_dict['template_file2'] = pjoin(_file_path, \ 4192 'iolibs/template_files/split_orders_helping_functions.inc') 4193 4194 s1,s2 = matrix_element.get_spin_state_initial() 4195 replace_dict['nb_spin_state1'] = s1 4196 replace_dict['nb_spin_state2'] = s2 4197 4198 if writer: 4199 file = open(replace_dict['template_file']).read() 4200 file = file % replace_dict 4201 # Add the split orders helper functions. 4202 file = file + '\n' + open(replace_dict['template_file2'])\ 4203 .read()%replace_dict 4204 # Write the file 4205 writer.writelines(file) 4206 return len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor 4207 else: 4208 replace_dict['return_value'] = (len(filter(lambda call: call.find('#') != 0, helas_calls)), ncolor) 4209 return replace_dict
4210 4211 #=========================================================================== 4212 # write_auto_dsig_file 4213 #===========================================================================
4214 - def write_auto_dsig_file(self, writer, matrix_element, proc_id = ""):
4215 """Write the auto_dsig.f file for the differential cross section 4216 calculation, includes pdf call information""" 4217 4218 if not matrix_element.get('processes') or \ 4219 not matrix_element.get('diagrams'): 4220 return 0 4221 4222 nexternal, ninitial = matrix_element.get_nexternal_ninitial() 4223 self.proc_characteristic['ninitial'] = ninitial 4224 self.proc_characteristic['nexternal'] = max(self.proc_characteristic['nexternal'], nexternal) 4225 4226 # Add information relevant for MLM matching: 4227 # Maximum QCD power in all the contributions 4228 max_qcd_order = 0 4229 for diag in matrix_element.get('diagrams'): 4230 orders = diag.calculate_orders() 4231 if 'QCD' in orders: 4232 max_qcd_order = max(max_qcd_order,orders['QCD']) 4233 max_n_light_final_partons = max(len([1 for id in proc.get_final_ids() 4234 if proc.get('model').get_particle(id).get('mass')=='ZERO' and 4235 proc.get('model').get_particle(id).get('color')>1]) 4236 for proc in matrix_element.get('processes')) 4237 # Maximum number of final state light jets to be matched 4238 self.proc_characteristic['max_n_matched_jets'] = max( 4239 self.proc_characteristic['max_n_matched_jets'], 4240 min(max_qcd_order,max_n_light_final_partons)) 4241 4242 # List of default pdgs to be considered for the CKKWl merging cut 4243 self.proc_characteristic['colored_pdgs'] = \ 4244 sorted(list(set([abs(p.get('pdg_code')) for p in 4245 matrix_element.get('processes')[0].get('model').get('particles') if 4246 p.get('color')>1]))) 4247 4248 if ninitial < 1 or ninitial > 2: 4249 raise writers.FortranWriter.FortranWriterError, \ 4250 """Need ninitial = 1 or 2 to write auto_dsig file""" 4251 4252 replace_dict = {} 4253 4254 # Extract version number and date from VERSION file 4255 info_lines = self.get_mg5_info_lines() 4256 replace_dict['info_lines'] = info_lines 4257 4258 # Extract process info lines 4259 process_lines = self.get_process_info_lines(matrix_element) 4260 replace_dict['process_lines'] = process_lines 4261 4262 # Set proc_id 4263 replace_dict['proc_id'] = proc_id 4264 replace_dict['numproc'] = 1 4265 4266 # Set dsig_line 4267 if ninitial == 1: 4268 # No conversion, since result of decay should be given in GeV 4269 dsig_line = "pd(0)*dsiguu" 4270 else: 4271 # Convert result (in GeV) to pb 4272 dsig_line = "pd(0)*conv*dsiguu" 4273 4274 replace_dict['dsig_line'] = dsig_line 4275 4276 # Extract pdf lines 4277 pdf_vars, pdf_data, pdf_lines = \ 4278 self.get_pdf_lines(matrix_element, ninitial, proc_id != "") 4279 replace_dict['pdf_vars'] = pdf_vars 4280 replace_dict['pdf_data'] = pdf_data 4281 replace_dict['pdf_lines'] = pdf_lines 4282 4283 # Lines that differ between subprocess group and regular 4284 if proc_id: 4285 replace_dict['numproc'] = int(proc_id) 4286 replace_dict['passcuts_begin'] = "" 4287 replace_dict['passcuts_end'] = "" 4288 # Set lines for subprocess group version 4289 # Set define_iconfigs_lines 4290 replace_dict['define_subdiag_lines'] = \ 4291 """\nINTEGER SUBDIAG(MAXSPROC),IB(2) 4292 COMMON/TO_SUB_DIAG/SUBDIAG,IB""" 4293 replace_dict['cutsdone'] = "" 4294 else: 4295 replace_dict['passcuts_begin'] = "IF (PASSCUTS(PP)) THEN" 4296 replace_dict['passcuts_end'] = "ENDIF" 4297 replace_dict['define_subdiag_lines'] = "" 4298 replace_dict['cutsdone'] = " cutsdone=.false.\n cutspassed=.false." 4299 4300 if not isinstance(self, ProcessExporterFortranMEGroup): 4301 ncomb=matrix_element.get_helicity_combinations() 4302 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 4303 else: 4304 replace_dict['read_write_good_hel'] = "" 4305 4306 context = {'read_write_good_hel':True} 4307 4308 if writer: 4309 file = open(pjoin(_file_path, \ 4310 'iolibs/template_files/auto_dsig_v4.inc')).read() 4311 file = file % replace_dict 4312 4313 # Write the file 4314 writer.writelines(file, context=context) 4315 else: 4316 return replace_dict, context
4317 #=========================================================================== 4318 # write_coloramps_file 4319 #===========================================================================
4320 - def write_coloramps_file(self, writer, mapconfigs, matrix_element):
4321 """Write the coloramps.inc file for MadEvent""" 4322 4323 lines = self.get_icolamp_lines(mapconfigs, matrix_element, 1) 4324 lines.insert(0, "logical icolamp(%d,%d,1)" % \ 4325 (max(len(matrix_element.get('color_basis').keys()), 1), 4326 len(mapconfigs))) 4327 4328 4329 # Write the file 4330 writer.writelines(lines) 4331 4332 return True
4333 4334 #=========================================================================== 4335 # write_colors_file 4336 #===========================================================================
4337 - def write_colors_file(self, writer, matrix_elements):
4338 """Write the get_color.f file for MadEvent, which returns color 4339 for all particles used in the matrix element.""" 4340 4341 if isinstance(matrix_elements, helas_objects.HelasMatrixElement): 4342 matrix_elements = [matrix_elements] 4343 4344 model = matrix_elements[0].get('processes')[0].get('model') 4345 4346 # We need the both particle and antiparticle wf_ids, since the identity 4347 # depends on the direction of the wf. 4348 wf_ids = set(sum([sum([sum([[wf.get_pdg_code(),wf.get_anti_pdg_code()] \ 4349 for wf in d.get('wavefunctions')],[]) \ 4350 for d in me.get('diagrams')], []) \ 4351 for me in matrix_elements], [])) 4352 4353 leg_ids = set(sum([sum([sum([[l.get('id'), 4354 model.get_particle(l.get('id')).get_anti_pdg_code()] \ 4355 for l in p.get_legs_with_decays()], []) \ 4356 for p in me.get('processes')], []) \ 4357 for me in matrix_elements], [])) 4358 particle_ids = sorted(list(wf_ids.union(leg_ids))) 4359 4360 lines = """function get_color(ipdg) 4361 implicit none 4362 integer get_color, ipdg 4363 4364 if(ipdg.eq.%d)then 4365 get_color=%d 4366 return 4367 """ % (particle_ids[0], model.get_particle(particle_ids[0]).get_color()) 4368 4369 for part_id in particle_ids[1:]: 4370 lines += """else if(ipdg.eq.%d)then 4371 get_color=%d 4372 return 4373 """ % (part_id, model.get_particle(part_id).get_color()) 4374 # Dummy particle for multiparticle vertices with pdg given by 4375 # first code not in the model 4376 lines += """else if(ipdg.eq.%d)then 4377 c This is dummy particle used in multiparticle vertices 4378 get_color=2 4379 return 4380 """ % model.get_first_non_pdg() 4381 lines += """else 4382 write(*,*)'Error: No color given for pdg ',ipdg 4383 get_color=0 4384 return 4385 endif 4386 end 4387 """ 4388 4389 # Write the file 4390 writer.writelines(lines) 4391 4392 return True
4393 4394 #=========================================================================== 4395 # write_config_nqcd_file 4396 #===========================================================================
4397 - def write_config_nqcd_file(self, writer, nqcd_list):
4398 """Write the config_nqcd.inc with the number of QCD couplings 4399 for each config""" 4400 4401 lines = [] 4402 for iconf, n in enumerate(nqcd_list): 4403 lines.append("data nqcd(%d)/%d/" % (iconf+1, n)) 4404 4405 # Write the file 4406 writer.writelines(lines) 4407 4408 return True
4409 4410 #=========================================================================== 4411 # write_maxconfigs_file 4412 #===========================================================================
4413 - def write_maxconfigs_file(self, writer, matrix_elements):
4414 """Write the maxconfigs.inc file for MadEvent""" 4415 4416 if isinstance(matrix_elements, helas_objects.HelasMultiProcess): 4417 maxconfigs = max([me.get_num_configs() for me in \ 4418 matrix_elements.get('matrix_elements')]) 4419 else: 4420 maxconfigs = max([me.get_num_configs() for me in matrix_elements]) 4421 4422 lines = "integer lmaxconfigs\n" 4423 lines += "parameter(lmaxconfigs=%d)" % maxconfigs 4424 4425 # Write the file 4426 writer.writelines(lines) 4427 4428 return True
4429 4430 #=========================================================================== 4431 # read_write_good_hel 4432 #===========================================================================
4433 - def read_write_good_hel(self, ncomb):
4434 """return the code to read/write the good_hel common_block""" 4435 4436 convert = {'ncomb' : ncomb} 4437 output = """ 4438 subroutine write_good_hel(stream_id) 4439 implicit none 4440 integer stream_id 4441 INTEGER NCOMB 4442 PARAMETER ( NCOMB=%(ncomb)d) 4443 LOGICAL GOODHEL(NCOMB) 4444 INTEGER NTRY 4445 common/BLOCK_GOODHEL/NTRY,GOODHEL 4446 write(stream_id,*) GOODHEL 4447 return 4448 end 4449 4450 4451 subroutine read_good_hel(stream_id) 4452 implicit none 4453 include 'genps.inc' 4454 integer stream_id 4455 INTEGER NCOMB 4456 PARAMETER ( NCOMB=%(ncomb)d) 4457 LOGICAL GOODHEL(NCOMB) 4458 INTEGER NTRY 4459 common/BLOCK_GOODHEL/NTRY,GOODHEL 4460 read(stream_id,*) GOODHEL 4461 NTRY = MAXTRIES + 1 4462 return 4463 end 4464 4465 subroutine init_good_hel() 4466 implicit none 4467 INTEGER NCOMB 4468 PARAMETER ( NCOMB=%(ncomb)d) 4469 LOGICAL GOODHEL(NCOMB) 4470 INTEGER NTRY 4471 INTEGER I 4472 4473 do i=1,NCOMB 4474 GOODHEL(I) = .false. 4475 enddo 4476 NTRY = 0 4477 end 4478 4479 integer function get_maxsproc() 4480 implicit none 4481 get_maxsproc = 1 4482 return 4483 end 4484 4485 """ % convert 4486 4487 return output
4488 4489 #=========================================================================== 4490 # write_config_subproc_map_file 4491 #===========================================================================
4492 - def write_config_subproc_map_file(self, writer, s_and_t_channels):
4493 """Write a dummy config_subproc.inc file for MadEvent""" 4494 4495 lines = [] 4496 4497 for iconfig in range(len(s_and_t_channels)): 4498 lines.append("DATA CONFSUB(1,%d)/1/" % \ 4499 (iconfig + 1)) 4500 4501 # Write the file 4502 writer.writelines(lines) 4503 4504 return True
4505 4506 #=========================================================================== 4507 # write_configs_file 4508 #===========================================================================
4509 - def write_configs_file(self, writer, matrix_element):
4510 """Write the configs.inc file for MadEvent""" 4511 4512 # Extract number of external particles 4513 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 4514 4515 model = matrix_element.get('processes')[0].get('model') 4516 configs = [(i+1, d) for (i, d) in \ 4517 enumerate(matrix_element.get('diagrams'))] 4518 mapconfigs = [c[0] for c in configs] 4519 return mapconfigs, self.write_configs_file_from_diagrams(writer, 4520 [[c[1]] for c in configs], 4521 mapconfigs, 4522 nexternal, ninitial, 4523 model)
4524 4525 #=========================================================================== 4526 # write_run_configs_file 4527 #===========================================================================
4528 - def write_run_config_file(self, writer):
4529 """Write the run_configs.inc file for MadEvent""" 4530 4531 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 4532 4533 if self.proc_characteristic['loop_induced']: 4534 job_per_chan = 1 4535 else: 4536 job_per_chan = 5 4537 4538 if writer: 4539 text = open(path).read() % {'chanperjob': job_per_chan} 4540 writer.write(text) 4541 return True 4542 else: 4543 return {'chanperjob': job_per_chan}
4544 4545 #=========================================================================== 4546 # write_configs_file_from_diagrams 4547 #===========================================================================
4548 - def write_configs_file_from_diagrams(self, writer, configs, mapconfigs, 4549 nexternal, ninitial, model):
4550 """Write the actual configs.inc file. 4551 4552 configs is the diagrams corresponding to configs (each 4553 diagrams is a list of corresponding diagrams for all 4554 subprocesses, with None if there is no corresponding diagrams 4555 for a given process). 4556 mapconfigs gives the diagram number for each config. 4557 4558 For s-channels, we need to output one PDG for each subprocess in 4559 the subprocess group, in order to be able to pick the right 4560 one for multiprocesses.""" 4561 4562 lines = [] 4563 4564 s_and_t_channels = [] 4565 4566 nqcd_list = [] 4567 4568 vert_list = [max([d for d in config if d][0].get_vertex_leg_numbers()) \ 4569 for config in configs if [d for d in config if d][0].\ 4570 get_vertex_leg_numbers()!=[]] 4571 minvert = min(vert_list) if vert_list!=[] else 0 4572 4573 # Number of subprocesses 4574 nsubprocs = len(configs[0]) 4575 4576 nconfigs = 0 4577 4578 new_pdg = model.get_first_non_pdg() 4579 4580 for iconfig, helas_diags in enumerate(configs): 4581 if any([vert > minvert for vert in 4582 [d for d in helas_diags if d][0].get_vertex_leg_numbers()]): 4583 # Only 3-vertices allowed in configs.inc 4584 continue 4585 nconfigs += 1 4586 4587 # Need s- and t-channels for all subprocesses, including 4588 # those that don't contribute to this config 4589 empty_verts = [] 4590 stchannels = [] 4591 for h in helas_diags: 4592 if h: 4593 # get_s_and_t_channels gives vertices starting from 4594 # final state external particles and working inwards 4595 stchannels.append(h.get('amplitudes')[0].\ 4596 get_s_and_t_channels(ninitial, model, 4597 new_pdg)) 4598 else: 4599 stchannels.append((empty_verts, None)) 4600 4601 # For t-channels, just need the first non-empty one 4602 tchannels = [t for s,t in stchannels if t != None][0] 4603 4604 # For s_and_t_channels (to be used later) use only first config 4605 s_and_t_channels.append([[s for s,t in stchannels if t != None][0], 4606 tchannels]) 4607 4608 # Make sure empty_verts is same length as real vertices 4609 if any([s for s,t in stchannels]): 4610 empty_verts[:] = [None]*max([len(s) for s,t in stchannels]) 4611 4612 # Reorganize s-channel vertices to get a list of all 4613 # subprocesses for each vertex 4614 schannels = zip(*[s for s,t in stchannels]) 4615 else: 4616 schannels = [] 4617 4618 allchannels = schannels 4619 if len(tchannels) > 1: 4620 # Write out tchannels only if there are any non-trivial ones 4621 allchannels = schannels + tchannels 4622 4623 # Write out propagators for s-channel and t-channel vertices 4624 4625 lines.append("# Diagram %d" % (mapconfigs[iconfig])) 4626 # Correspondance between the config and the diagram = amp2 4627 lines.append("data mapconfig(%d)/%d/" % (nconfigs, 4628 mapconfigs[iconfig])) 4629 # Number of QCD couplings in this diagram 4630 nqcd = 0 4631 for h in helas_diags: 4632 if h: 4633 try: 4634 nqcd = h.calculate_orders()['QCD'] 4635 except KeyError: 4636 pass 4637 break 4638 else: 4639 continue 4640 4641 nqcd_list.append(nqcd) 4642 4643 for verts in allchannels: 4644 if verts in schannels: 4645 vert = [v for v in verts if v][0] 4646 else: 4647 vert = verts 4648 daughters = [leg.get('number') for leg in vert.get('legs')[:-1]] 4649 last_leg = vert.get('legs')[-1] 4650 lines.append("data (iforest(i,%d,%d),i=1,%d)/%s/" % \ 4651 (last_leg.get('number'), nconfigs, len(daughters), 4652 ",".join([str(d) for d in daughters]))) 4653 if verts in schannels: 4654 pdgs = [] 4655 for v in verts: 4656 if v: 4657 pdgs.append(v.get('legs')[-1].get('id')) 4658 else: 4659 pdgs.append(0) 4660 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4661 (last_leg.get('number'), nconfigs, nsubprocs, 4662 ",".join([str(d) for d in pdgs]))) 4663 lines.append("data tprid(%d,%d)/0/" % \ 4664 (last_leg.get('number'), nconfigs)) 4665 elif verts in tchannels[:-1]: 4666 lines.append("data tprid(%d,%d)/%d/" % \ 4667 (last_leg.get('number'), nconfigs, 4668 abs(last_leg.get('id')))) 4669 lines.append("data (sprop(i,%d,%d),i=1,%d)/%s/" % \ 4670 (last_leg.get('number'), nconfigs, nsubprocs, 4671 ",".join(['0'] * nsubprocs))) 4672 4673 # Write out number of configs 4674 lines.append("# Number of configs") 4675 lines.append("data mapconfig(0)/%d/" % nconfigs) 4676 4677 # Write the file 4678 writer.writelines(lines) 4679 4680 return s_and_t_channels, nqcd_list
4681 4682 #=========================================================================== 4683 # write_decayBW_file 4684 #===========================================================================
4685 - def write_decayBW_file(self, writer, s_and_t_channels):
4686 """Write the decayBW.inc file for MadEvent""" 4687 4688 lines = [] 4689 4690 booldict = {None: "0", True: "1", False: "2"} 4691 4692 for iconf, config in enumerate(s_and_t_channels): 4693 schannels = config[0] 4694 for vertex in schannels: 4695 # For the resulting leg, pick out whether it comes from 4696 # decay or not, as given by the onshell flag 4697 leg = vertex.get('legs')[-1] 4698 lines.append("data gForceBW(%d,%d)/%s/" % \ 4699 (leg.get('number'), iconf + 1, 4700 booldict[leg.get('onshell')])) 4701 4702 # Write the file 4703 writer.writelines(lines) 4704 4705 return True
4706 4707 #=========================================================================== 4708 # write_dname_file 4709 #===========================================================================
4710 - def write_dname_file(self, writer, dir_name):
4711 """Write the dname.mg file for MG4""" 4712 4713 line = "DIRNAME=%s" % dir_name 4714 4715 # Write the file 4716 writer.write(line + "\n") 4717 4718 return True
4719 4720 #=========================================================================== 4721 # write_driver 4722 #===========================================================================
4723 - def write_driver(self, writer, ncomb, n_grouped_proc, v5=True):
4724 """Write the SubProcess/driver.f file for MG4""" 4725 4726 path = pjoin(_file_path,'iolibs','template_files','madevent_driver.f') 4727 4728 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4729 card = 'Source/MODEL/MG5_param.dat' 4730 else: 4731 card = 'param_card.dat' 4732 # Requiring each helicity configuration to be probed by 10 points for 4733 # matrix element before using the resulting grid for MC over helicity 4734 # sampling. 4735 # We multiply this by 2 because each grouped subprocess is called at most 4736 # twice for each IMIRROR. 4737 replace_dict = {'param_card_name':card, 4738 'ncomb':ncomb, 4739 'hel_init_points':n_grouped_proc*10*2} 4740 if not v5: 4741 replace_dict['secondparam']=',.true.' 4742 else: 4743 replace_dict['secondparam']='' 4744 4745 if writer: 4746 text = open(path).read() % replace_dict 4747 writer.write(text) 4748 return True 4749 else: 4750 return replace_dict
4751 4752 #=========================================================================== 4753 # write_addmothers 4754 #===========================================================================
4755 - def write_addmothers(self, writer):
4756 """Write the SubProcess/addmothers.f""" 4757 4758 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 4759 4760 text = open(path).read() % {'iconfig': 'diag_number'} 4761 writer.write(text) 4762 4763 return True
4764 4765 4766 #=========================================================================== 4767 # write_combine_events 4768 #===========================================================================
4769 - def write_combine_events(self, writer, nb_proc=100):
4770 """Write the SubProcess/driver.f file for MG4""" 4771 4772 path = pjoin(_file_path,'iolibs','template_files','madevent_combine_events.f') 4773 4774 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4775 card = 'Source/MODEL/MG5_param.dat' 4776 else: 4777 card = 'param_card.dat' 4778 4779 #set maxpup (number of @X in the process card) 4780 4781 text = open(path).read() % {'param_card_name':card, 'maxpup':nb_proc+1} 4782 #the +1 is just a security. This is not needed but I feel(OM) safer with it. 4783 writer.write(text) 4784 4785 return True
4786 4787 4788 #=========================================================================== 4789 # write_symmetry 4790 #===========================================================================
4791 - def write_symmetry(self, writer, v5=True):
4792 """Write the SubProcess/driver.f file for ME""" 4793 4794 path = pjoin(_file_path,'iolibs','template_files','madevent_symmetry.f') 4795 4796 if self.model_name == 'mssm' or self.model_name.startswith('mssm-'): 4797 card = 'Source/MODEL/MG5_param.dat' 4798 else: 4799 card = 'param_card.dat' 4800 4801 if v5: 4802 replace_dict = {'param_card_name':card, 'setparasecondarg':''} 4803 else: 4804 replace_dict= {'param_card_name':card, 'setparasecondarg':',.true.'} 4805 4806 if writer: 4807 text = open(path).read() 4808 text = text % replace_dict 4809 writer.write(text) 4810 return True 4811 else: 4812 return replace_dict
4813 4814 4815 4816 #=========================================================================== 4817 # write_iproc_file 4818 #===========================================================================
4819 - def write_iproc_file(self, writer, me_number):
4820 """Write the iproc.dat file for MG4""" 4821 line = "%d" % (me_number + 1) 4822 4823 # Write the file 4824 for line_to_write in writer.write_line(line): 4825 writer.write(line_to_write) 4826 return True
4827 4828 #=========================================================================== 4829 # write_mg_sym_file 4830 #===========================================================================
4831 - def write_mg_sym_file(self, writer, matrix_element):
4832 """Write the mg.sym file for MadEvent.""" 4833 4834 lines = [] 4835 4836 # Extract process with all decays included 4837 final_legs = filter(lambda leg: leg.get('state') == True, 4838 matrix_element.get('processes')[0].get_legs_with_decays()) 4839 4840 ninitial = len(filter(lambda leg: leg.get('state') == False, 4841 matrix_element.get('processes')[0].get('legs'))) 4842 4843 identical_indices = {} 4844 4845 # Extract identical particle info 4846 for i, leg in enumerate(final_legs): 4847 if leg.get('id') in identical_indices: 4848 identical_indices[leg.get('id')].append(\ 4849 i + ninitial + 1) 4850 else: 4851 identical_indices[leg.get('id')] = [i + ninitial + 1] 4852 4853 # Remove keys which have only one particle 4854 for key in identical_indices.keys(): 4855 if len(identical_indices[key]) < 2: 4856 del identical_indices[key] 4857 4858 # Write mg.sym file 4859 lines.append(str(len(identical_indices.keys()))) 4860 for key in identical_indices.keys(): 4861 lines.append(str(len(identical_indices[key]))) 4862 for number in identical_indices[key]: 4863 lines.append(str(number)) 4864 4865 # Write the file 4866 writer.writelines(lines) 4867 4868 return True
4869 4870 #=========================================================================== 4871 # write_mg_sym_file 4872 #===========================================================================
4873 - def write_default_mg_sym_file(self, writer):
4874 """Write the mg.sym file for MadEvent.""" 4875 4876 lines = "0" 4877 4878 # Write the file 4879 writer.writelines(lines) 4880 4881 return True
4882 4883 #=========================================================================== 4884 # write_ncombs_file 4885 #===========================================================================
4886 - def write_ncombs_file(self, writer, nexternal):
4887 """Write the ncombs.inc file for MadEvent.""" 4888 4889 # ncomb (used for clustering) is 2^nexternal 4890 file = " integer n_max_cl\n" 4891 file = file + "parameter (n_max_cl=%d)" % (2 ** nexternal) 4892 4893 # Write the file 4894 writer.writelines(file) 4895 4896 return True
4897 4898 #=========================================================================== 4899 # write_processes_file 4900 #===========================================================================
4901 - def write_processes_file(self, writer, subproc_group):
4902 """Write the processes.dat file with info about the subprocesses 4903 in this group.""" 4904 4905 lines = [] 4906 4907 for ime, me in \ 4908 enumerate(subproc_group.get('matrix_elements')): 4909 lines.append("%s %s" % (str(ime+1) + " " * (7-len(str(ime+1))), 4910 ",".join(p.base_string() for p in \ 4911 me.get('processes')))) 4912 if me.get('has_mirror_process'): 4913 mirror_procs = [copy.copy(p) for p in me.get('processes')] 4914 for proc in mirror_procs: 4915 legs = copy.copy(proc.get('legs_with_decays')) 4916 legs.insert(0, legs.pop(1)) 4917 proc.set("legs_with_decays", legs) 4918 lines.append("mirror %s" % ",".join(p.base_string() for p in \ 4919 mirror_procs)) 4920 else: 4921 lines.append("mirror none") 4922 4923 # Write the file 4924 writer.write("\n".join(lines)) 4925 4926 return True
4927 4928 #=========================================================================== 4929 # write_symswap_file 4930 #===========================================================================
4931 - def write_symswap_file(self, writer, ident_perms):
4932 """Write the file symswap.inc for MG4 by comparing diagrams using 4933 the internal matrix element value functionality.""" 4934 4935 lines = [] 4936 4937 # Write out lines for symswap.inc file (used to permute the 4938 # external leg momenta 4939 for iperm, perm in enumerate(ident_perms): 4940 lines.append("data (isym(i,%d),i=1,nexternal)/%s/" % \ 4941 (iperm+1, ",".join([str(i+1) for i in perm]))) 4942 lines.append("data nsym/%d/" % len(ident_perms)) 4943 4944 # Write the file 4945 writer.writelines(lines) 4946 4947 return True
4948 4949 #=========================================================================== 4950 # write_symfact_file 4951 #===========================================================================
4952 - def write_symfact_file(self, writer, symmetry):
4953 """Write the files symfact.dat for MG4 by comparing diagrams using 4954 the internal matrix element value functionality.""" 4955 4956 pos = max(2, int(math.ceil(math.log10(len(symmetry))))) 4957 form = "%"+str(pos)+"r %"+str(pos+1)+"r" 4958 # Write out lines for symswap.inc file (used to permute the 4959 # external leg momenta 4960 lines = [ form %(i+1, s) for i,s in enumerate(symmetry) if s != 0] 4961 # Write the file 4962 writer.write('\n'.join(lines)) 4963 writer.write('\n') 4964 4965 return True
4966 4967 #=========================================================================== 4968 # write_symperms_file 4969 #===========================================================================
4970 - def write_symperms_file(self, writer, perms):
4971 """Write the symperms.inc file for subprocess group, used for 4972 symmetric configurations""" 4973 4974 lines = [] 4975 for iperm, perm in enumerate(perms): 4976 lines.append("data (perms(i,%d),i=1,nexternal)/%s/" % \ 4977 (iperm+1, ",".join([str(i+1) for i in perm]))) 4978 4979 # Write the file 4980 writer.writelines(lines) 4981 4982 return True
4983 4984 #=========================================================================== 4985 # write_subproc 4986 #===========================================================================
4987 - def write_subproc(self, writer, subprocdir):
4988 """Append this subprocess to the subproc.mg file for MG4""" 4989 4990 # Write line to file 4991 writer.write(subprocdir + "\n") 4992 4993 return True
4994
4995 #=============================================================================== 4996 # ProcessExporterFortranMEGroup 4997 #=============================================================================== 4998 -class ProcessExporterFortranMEGroup(ProcessExporterFortranME):
4999 """Class to take care of exporting a set of matrix elements to 5000 MadEvent subprocess group format.""" 5001 5002 matrix_file = "matrix_madevent_group_v4.inc" 5003 grouped_mode = 'madevent' 5004 #=========================================================================== 5005 # generate_subprocess_directory 5006 #===========================================================================
5007 - def generate_subprocess_directory(self, subproc_group, 5008 fortran_model, 5009 group_number):
5010 """Generate the Pn directory for a subprocess group in MadEvent, 5011 including the necessary matrix_N.f files, configs.inc and various 5012 other helper files.""" 5013 5014 assert isinstance(subproc_group, group_subprocs.SubProcessGroup), \ 5015 "subproc_group object not SubProcessGroup" 5016 5017 if not self.model: 5018 self.model = subproc_group.get('matrix_elements')[0].\ 5019 get('processes')[0].get('model') 5020 5021 cwd = os.getcwd() 5022 path = pjoin(self.dir_path, 'SubProcesses') 5023 5024 os.chdir(path) 5025 pathdir = os.getcwd() 5026 5027 # Create the directory PN in the specified path 5028 subprocdir = "P%d_%s" % (subproc_group.get('number'), 5029 subproc_group.get('name')) 5030 try: 5031 os.mkdir(subprocdir) 5032 except os.error as error: 5033 logger.warning(error.strerror + " " + subprocdir) 5034 5035 try: 5036 os.chdir(subprocdir) 5037 except os.error: 5038 logger.error('Could not cd to directory %s' % subprocdir) 5039 return 0 5040 5041 logger.info('Creating files in directory %s' % subprocdir) 5042 5043 # Create the matrix.f files, auto_dsig.f files and all inc files 5044 # for all subprocesses in the group 5045 5046 maxamps = 0 5047 maxflows = 0 5048 tot_calls = 0 5049 5050 matrix_elements = subproc_group.get('matrix_elements') 5051 5052 # Add the driver.f, all grouped ME's must share the same number of 5053 # helicity configuration 5054 ncomb = matrix_elements[0].get_helicity_combinations() 5055 for me in matrix_elements[1:]: 5056 if ncomb!=me.get_helicity_combinations(): 5057 raise MadGraph5Error, "All grouped processes must share the "+\ 5058 "same number of helicity configurations." 5059 5060 filename = 'driver.f' 5061 self.write_driver(writers.FortranWriter(filename),ncomb, 5062 n_grouped_proc=len(matrix_elements), v5=self.opt['v5_model']) 5063 5064 for ime, matrix_element in \ 5065 enumerate(matrix_elements): 5066 filename = 'matrix%d.f' % (ime+1) 5067 calls, ncolor = \ 5068 self.write_matrix_element_v4(writers.FortranWriter(filename), 5069 matrix_element, 5070 fortran_model, 5071 proc_id=str(ime+1), 5072 config_map=subproc_group.get('diagram_maps')[ime], 5073 subproc_number=group_number) 5074 5075 filename = 'auto_dsig%d.f' % (ime+1) 5076 self.write_auto_dsig_file(writers.FortranWriter(filename), 5077 matrix_element, 5078 str(ime+1)) 5079 5080 # Keep track of needed quantities 5081 tot_calls += int(calls) 5082 maxflows = max(maxflows, ncolor) 5083 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 5084 5085 # Draw diagrams 5086 if not 'noeps' in self.opt['output_options'] or self.opt['output_options']['noeps'] != 'True': 5087 filename = "matrix%d.ps" % (ime+1) 5088 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 5089 get('diagrams'), 5090 filename, 5091 model = \ 5092 matrix_element.get('processes')[0].\ 5093 get('model'), 5094 amplitude=True) 5095 logger.info("Generating Feynman diagrams for " + \ 5096 matrix_element.get('processes')[0].nice_string()) 5097 plot.draw() 5098 5099 # Extract number of external particles 5100 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 5101 5102 # Generate a list of diagrams corresponding to each configuration 5103 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 5104 # If a subprocess has no diagrams for this config, the number is 0 5105 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 5106 5107 filename = 'auto_dsig.f' 5108 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 5109 subproc_group) 5110 5111 filename = 'coloramps.inc' 5112 self.write_coloramps_file(writers.FortranWriter(filename), 5113 subproc_diagrams_for_config, 5114 maxflows, 5115 matrix_elements) 5116 5117 filename = 'get_color.f' 5118 self.write_colors_file(writers.FortranWriter(filename), 5119 matrix_elements) 5120 5121 filename = 'config_subproc_map.inc' 5122 self.write_config_subproc_map_file(writers.FortranWriter(filename), 5123 subproc_diagrams_for_config) 5124 5125 filename = 'configs.inc' 5126 nconfigs, (s_and_t_channels, nqcd_list) = self.write_configs_file(\ 5127 writers.FortranWriter(filename), 5128 subproc_group, 5129 subproc_diagrams_for_config) 5130 5131 filename = 'config_nqcd.inc' 5132 self.write_config_nqcd_file(writers.FortranWriter(filename), 5133 nqcd_list) 5134 5135 filename = 'decayBW.inc' 5136 self.write_decayBW_file(writers.FortranWriter(filename), 5137 s_and_t_channels) 5138 5139 filename = 'dname.mg' 5140 self.write_dname_file(writers.FortranWriter(filename), 5141 subprocdir) 5142 5143 filename = 'iproc.dat' 5144 self.write_iproc_file(writers.FortranWriter(filename), 5145 group_number) 5146 5147 filename = 'leshouche.inc' 5148 self.write_leshouche_file(writers.FortranWriter(filename), 5149 subproc_group) 5150 5151 filename = 'maxamps.inc' 5152 self.write_maxamps_file(writers.FortranWriter(filename), 5153 maxamps, 5154 maxflows, 5155 max([len(me.get('processes')) for me in \ 5156 matrix_elements]), 5157 len(matrix_elements)) 5158 5159 # Note that mg.sym is not relevant for this case 5160 filename = 'mg.sym' 5161 self.write_default_mg_sym_file(writers.FortranWriter(filename)) 5162 5163 filename = 'mirrorprocs.inc' 5164 self.write_mirrorprocs(writers.FortranWriter(filename), 5165 subproc_group) 5166 5167 filename = 'ncombs.inc' 5168 self.write_ncombs_file(writers.FortranWriter(filename), 5169 nexternal) 5170 5171 filename = 'nexternal.inc' 5172 self.write_nexternal_file(writers.FortranWriter(filename), 5173 nexternal, ninitial) 5174 5175 filename = 'ngraphs.inc' 5176 self.write_ngraphs_file(writers.FortranWriter(filename), 5177 nconfigs) 5178 5179 filename = 'pmass.inc' 5180 self.write_pmass_file(writers.FortranWriter(filename), 5181 matrix_element) 5182 5183 filename = 'props.inc' 5184 self.write_props_file(writers.FortranWriter(filename), 5185 matrix_element, 5186 s_and_t_channels) 5187 5188 filename = 'processes.dat' 5189 files.write_to_file(filename, 5190 self.write_processes_file, 5191 subproc_group) 5192 5193 # Find config symmetries and permutations 5194 symmetry, perms, ident_perms = \ 5195 diagram_symmetry.find_symmetry(subproc_group) 5196 5197 filename = 'symswap.inc' 5198 self.write_symswap_file(writers.FortranWriter(filename), 5199 ident_perms) 5200 5201 filename = 'symfact_orig.dat' 5202 self.write_symfact_file(open(filename, 'w'), symmetry) 5203 5204 # check consistency 5205 for i, sym_fact in enumerate(symmetry): 5206 5207 if sym_fact >= 0: 5208 continue 5209 if nqcd_list[i] != nqcd_list[abs(sym_fact)-1]: 5210 raise Exception, "identical diagram with different QCD powwer" 5211 5212 5213 5214 filename = 'symperms.inc' 5215 self.write_symperms_file(writers.FortranWriter(filename), 5216 perms) 5217 5218 # Generate jpgs -> pass in make_html 5219 #os.system(pjoin('..', '..', 'bin', 'gen_jpeg-pl')) 5220 5221 self.link_files_in_SubProcess(pjoin(pathdir,subprocdir)) 5222 5223 #import nexternal/leshouch in Source 5224 ln('nexternal.inc', '../../Source', log=False) 5225 ln('leshouche.inc', '../../Source', log=False) 5226 ln('maxamps.inc', '../../Source', log=False) 5227 5228 # Return to SubProcesses dir) 5229 os.chdir(pathdir) 5230 5231 # Add subprocess to subproc.mg 5232 filename = 'subproc.mg' 5233 files.append_to_file(filename, 5234 self.write_subproc, 5235 subprocdir) 5236 5237 # Return to original dir 5238 os.chdir(cwd) 5239 5240 if not tot_calls: 5241 tot_calls = 0 5242 return tot_calls
5243 5244 #=========================================================================== 5245 # write_super_auto_dsig_file 5246 #===========================================================================
5247 - def write_super_auto_dsig_file(self, writer, subproc_group):
5248 """Write the auto_dsig.f file selecting between the subprocesses 5249 in subprocess group mode""" 5250 5251 replace_dict = {} 5252 5253 # Extract version number and date from VERSION file 5254 info_lines = self.get_mg5_info_lines() 5255 replace_dict['info_lines'] = info_lines 5256 5257 matrix_elements = subproc_group.get('matrix_elements') 5258 5259 # Extract process info lines 5260 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 5261 matrix_elements]) 5262 replace_dict['process_lines'] = process_lines 5263 5264 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 5265 replace_dict['nexternal'] = nexternal 5266 5267 replace_dict['nsprocs'] = 2*len(matrix_elements) 5268 5269 # Generate dsig definition line 5270 dsig_def_line = "DOUBLE PRECISION " + \ 5271 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 5272 range(len(matrix_elements))]) 5273 replace_dict["dsig_def_line"] = dsig_def_line 5274 5275 # Generate dsig process lines 5276 call_dsig_proc_lines = [] 5277 for iproc in range(len(matrix_elements)): 5278 call_dsig_proc_lines.append(\ 5279 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 5280 {"num": iproc + 1, 5281 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 5282 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 5283 5284 ncomb=matrix_elements[0].get_helicity_combinations() 5285 replace_dict['read_write_good_hel'] = self.read_write_good_hel(ncomb) 5286 5287 s1,s2 = matrix_elements[0].get_spin_state_initial() 5288 replace_dict['nb_spin_state1'] = s1 5289 replace_dict['nb_spin_state2'] = s2 5290 5291 if writer: 5292 file = open(pjoin(_file_path, \ 5293 'iolibs/template_files/super_auto_dsig_group_v4.inc')).read() 5294 file = file % replace_dict 5295 5296 # Write the file 5297 writer.writelines(file) 5298 else: 5299 return replace_dict
5300 5301 #=========================================================================== 5302 # write_mirrorprocs 5303 #===========================================================================
5304 - def write_mirrorprocs(self, writer, subproc_group):
5305 """Write the mirrorprocs.inc file determining which processes have 5306 IS mirror process in subprocess group mode.""" 5307 5308 lines = [] 5309 bool_dict = {True: '.true.', False: '.false.'} 5310 matrix_elements = subproc_group.get('matrix_elements') 5311 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 5312 (len(matrix_elements), 5313 ",".join([bool_dict[me.get('has_mirror_process')] for \ 5314 me in matrix_elements]))) 5315 # Write the file 5316 writer.writelines(lines)
5317 5318 #=========================================================================== 5319 # write_addmothers 5320 #===========================================================================
5321 - def write_addmothers(self, writer):
5322 """Write the SubProcess/addmothers.f""" 5323 5324 path = pjoin(_file_path,'iolibs','template_files','addmothers.f') 5325 5326 text = open(path).read() % {'iconfig': 'lconfig'} 5327 writer.write(text) 5328 5329 return True
5330 5331 5332 #=========================================================================== 5333 # write_coloramps_file 5334 #===========================================================================
5335 - def write_coloramps_file(self, writer, diagrams_for_config, maxflows, 5336 matrix_elements):
5337 """Write the coloramps.inc file for MadEvent in Subprocess group mode""" 5338 5339 # Create a map from subprocess (matrix element) to a list of 5340 # the diagrams corresponding to each config 5341 5342 lines = [] 5343 5344 subproc_to_confdiag = {} 5345 for config in diagrams_for_config: 5346 for subproc, diag in enumerate(config): 5347 try: 5348 subproc_to_confdiag[subproc].append(diag) 5349 except KeyError: 5350 subproc_to_confdiag[subproc] = [diag] 5351 5352 for subproc in sorted(subproc_to_confdiag.keys()): 5353 lines.extend(self.get_icolamp_lines(subproc_to_confdiag[subproc], 5354 matrix_elements[subproc], 5355 subproc + 1)) 5356 5357 lines.insert(0, "logical icolamp(%d,%d,%d)" % \ 5358 (maxflows, 5359 len(diagrams_for_config), 5360 len(matrix_elements))) 5361 5362 # Write the file 5363 writer.writelines(lines) 5364 5365 return True
5366 5367 #=========================================================================== 5368 # write_config_subproc_map_file 5369 #===========================================================================
5370 - def write_config_subproc_map_file(self, writer, config_subproc_map):
5371 """Write the config_subproc_map.inc file for subprocess groups""" 5372 5373 lines = [] 5374 # Output only configs that have some corresponding diagrams 5375 iconfig = 0 5376 for config in config_subproc_map: 5377 if set(config) == set([0]): 5378 continue 5379 lines.append("DATA (CONFSUB(i,%d),i=1,%d)/%s/" % \ 5380 (iconfig + 1, len(config), 5381 ",".join([str(i) for i in config]))) 5382 iconfig += 1 5383 # Write the file 5384 writer.writelines(lines) 5385 5386 return True
5387 5388 #=========================================================================== 5389 # read_write_good_hel 5390 #===========================================================================
5391 - def read_write_good_hel(self, ncomb):
5392 """return the code to read/write the good_hel common_block""" 5393 5394 convert = {'ncomb' : ncomb} 5395 5396 output = """ 5397 subroutine write_good_hel(stream_id) 5398 implicit none 5399 integer stream_id 5400 INTEGER NCOMB 5401 PARAMETER ( NCOMB=%(ncomb)d) 5402 LOGICAL GOODHEL(NCOMB, 2) 5403 INTEGER NTRY(2) 5404 common/BLOCK_GOODHEL/NTRY,GOODHEL 5405 write(stream_id,*) GOODHEL 5406 return 5407 end 5408 5409 5410 subroutine read_good_hel(stream_id) 5411 implicit none 5412 include 'genps.inc' 5413 integer stream_id 5414 INTEGER NCOMB 5415 PARAMETER ( NCOMB=%(ncomb)d) 5416 LOGICAL GOODHEL(NCOMB, 2) 5417 INTEGER NTRY(2) 5418 common/BLOCK_GOODHEL/NTRY,GOODHEL 5419 read(stream_id,*) GOODHEL 5420 NTRY(1) = MAXTRIES + 1 5421 NTRY(2) = MAXTRIES + 1 5422 return 5423 end 5424 5425 subroutine init_good_hel() 5426 implicit none 5427 INTEGER NCOMB 5428 PARAMETER ( NCOMB=%(ncomb)d) 5429 LOGICAL GOODHEL(NCOMB, 2) 5430 INTEGER NTRY(2) 5431 INTEGER I 5432 5433 do i=1,NCOMB 5434 GOODHEL(I,1) = .false. 5435 GOODHEL(I,2) = .false. 5436 enddo 5437 NTRY(1) = 0 5438 NTRY(2) = 0 5439 end 5440 5441 integer function get_maxsproc() 5442 implicit none 5443 include 'maxamps.inc' 5444 5445 get_maxsproc = maxsproc 5446 return 5447 end 5448 5449 """ % convert 5450 5451 return output
5452 5453 5454 5455 #=========================================================================== 5456 # write_configs_file 5457 #===========================================================================
5458 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
5459 """Write the configs.inc file with topology information for a 5460 subprocess group. Use the first subprocess with a diagram for each 5461 configuration.""" 5462 5463 matrix_elements = subproc_group.get('matrix_elements') 5464 model = matrix_elements[0].get('processes')[0].get('model') 5465 5466 diagrams = [] 5467 config_numbers = [] 5468 for iconfig, config in enumerate(diagrams_for_config): 5469 # Check if any diagrams correspond to this config 5470 if set(config) == set([0]): 5471 continue 5472 subproc_diags = [] 5473 for s,d in enumerate(config): 5474 if d: 5475 subproc_diags.append(matrix_elements[s].\ 5476 get('diagrams')[d-1]) 5477 else: 5478 subproc_diags.append(None) 5479 diagrams.append(subproc_diags) 5480 config_numbers.append(iconfig + 1) 5481 5482 # Extract number of external particles 5483 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 5484 5485 return len(diagrams), \ 5486 self.write_configs_file_from_diagrams(writer, diagrams, 5487 config_numbers, 5488 nexternal, ninitial, 5489 model)
5490 5491 #=========================================================================== 5492 # write_run_configs_file 5493 #===========================================================================
5494 - def write_run_config_file(self, writer):
5495 """Write the run_configs.inc file for MadEvent""" 5496 5497 path = pjoin(_file_path,'iolibs','template_files','madevent_run_config.inc') 5498 if self.proc_characteristic['loop_induced']: 5499 job_per_chan = 1 5500 else: 5501 job_per_chan = 2 5502 text = open(path).read() % {'chanperjob':job_per_chan} 5503 writer.write(text) 5504 return True
5505 5506 5507 #=========================================================================== 5508 # write_leshouche_file 5509 #===========================================================================
5510 - def write_leshouche_file(self, writer, subproc_group):
5511 """Write the leshouche.inc file for MG4""" 5512 5513 all_lines = [] 5514 5515 for iproc, matrix_element in \ 5516 enumerate(subproc_group.get('matrix_elements')): 5517 all_lines.extend(self.get_leshouche_lines(matrix_element, 5518 iproc)) 5519 # Write the file 5520 writer.writelines(all_lines) 5521 return True
5522 5523
5524 - def finalize(self,*args, **opts):
5525 5526 super(ProcessExporterFortranMEGroup, self).finalize(*args, **opts) 5527 #ensure that the grouping information is on the correct value 5528 self.proc_characteristic['grouped_matrix'] = True
5529 5530 5531 #=============================================================================== 5532 # UFO_model_to_mg4 5533 #=============================================================================== 5534 5535 python_to_fortran = lambda x: parsers.UFOExpressionParserFortran().parse(x)
5536 5537 -class UFO_model_to_mg4(object):
5538 """ A converter of the UFO-MG5 Model to the MG4 format """ 5539 5540 # The list below shows the only variables the user is allowed to change by 5541 # himself for each PS point. If he changes any other, then calling 5542 # UPDATE_AS_PARAM() (or equivalently MP_UPDATE_AS_PARAM()) will not 5543 # correctly account for the change. 5544 PS_dependent_key = ['aS','MU_R'] 5545 mp_complex_format = 'complex*32' 5546 mp_real_format = 'real*16' 5547 # Warning, it is crucial none of the couplings/parameters of the model 5548 # starts with this prefix. I should add a check for this. 5549 # You can change it as the global variable to check_param_card.ParamCard 5550 mp_prefix = check_param_card.ParamCard.mp_prefix 5551
5552 - def __init__(self, model, output_path, opt=None):
5553 """ initialization of the objects """ 5554 5555 self.model = model 5556 self.model_name = model['name'] 5557 self.dir_path = output_path 5558 5559 self.opt = {'complex_mass': False, 'export_format': 'madevent', 'mp':True, 5560 'loop_induced': False} 5561 if opt: 5562 self.opt.update(opt) 5563 5564 self.coups_dep = [] # (name, expression, type) 5565 self.coups_indep = [] # (name, expression, type) 5566 self.params_dep = [] # (name, expression, type) 5567 self.params_indep = [] # (name, expression, type) 5568 self.params_ext = [] # external parameter 5569 self.p_to_f = parsers.UFOExpressionParserFortran(self.model) 5570 self.mp_p_to_f = parsers.UFOExpressionParserMPFortran(self.model)
5571
5573 """modify the parameter if some of them are identical up to the case""" 5574 5575 lower_dict={} 5576 duplicate = set() 5577 keys = self.model['parameters'].keys() 5578 for key in keys: 5579 for param in self.model['parameters'][key]: 5580 lower_name = param.name.lower() 5581 if not lower_name: 5582 continue 5583 try: 5584 lower_dict[lower_name].append(param) 5585 except KeyError,error: 5586 lower_dict[lower_name] = [param] 5587 else: 5588 duplicate.add(lower_name) 5589 logger.debug('%s is define both as lower case and upper case.' 5590 % lower_name) 5591 if not duplicate: 5592 return 5593 5594 re_expr = r'''\b(%s)\b''' 5595 to_change = [] 5596 change={} 5597 for value in duplicate: 5598 for i, var in enumerate(lower_dict[value]): 5599 to_change.append(var.name) 5600 new_name = '%s%s' % (var.name.lower(), 5601 ('__%d'%(i+1) if i>0 else '')) 5602 change[var.name] = new_name 5603 var.name = new_name 5604 5605 # Apply the modification to the map_CTcoup_CTparam of the model 5606 # if it has one (giving for each coupling the CT parameters whcih 5607 # are necessary and which should be exported to the model. 5608 if hasattr(self.model,'map_CTcoup_CTparam'): 5609 for coup, ctparams in self.model.map_CTcoup_CTparam: 5610 for i, ctparam in enumerate(ctparams): 5611 try: 5612 self.model.map_CTcoup_CTparam[coup][i] = change[ctparam] 5613 except KeyError: 5614 pass 5615 5616 replace = lambda match_pattern: change[match_pattern.groups()[0]] 5617 rep_pattern = re.compile(re_expr % '|'.join(to_change)) 5618 5619 # change parameters 5620 for key in keys: 5621 if key == ('external',): 5622 continue 5623 for param in self.model['parameters'][key]: 5624 param.expr = rep_pattern.sub(replace, param.expr) 5625 5626 # change couplings 5627 for key in self.model['couplings'].keys(): 5628 for coup in self.model['couplings'][key]: 5629 coup.expr = rep_pattern.sub(replace, coup.expr) 5630 5631 # change mass/width 5632 for part in self.model['particles']: 5633 if str(part.get('mass')) in to_change: 5634 part.set('mass', rep_pattern.sub(replace, str(part.get('mass')))) 5635 if str(part.get('width')) in to_change: 5636 part.set('width', rep_pattern.sub(replace, str(part.get('width'))))
5637
5638 - def refactorize(self, wanted_couplings = []):
5639 """modify the couplings to fit with MG4 convention """ 5640 5641 # Keep only separation in alphaS 5642 keys = self.model['parameters'].keys() 5643 keys.sort(key=len) 5644 for key in keys: 5645 to_add = [o for o in self.model['parameters'][key] if o.name] 5646 5647 if key == ('external',): 5648 self.params_ext += to_add 5649 elif any([(k in key) for k in self.PS_dependent_key]): 5650 self.params_dep += to_add 5651 else: 5652 self.params_indep += to_add 5653 # same for couplings 5654 keys = self.model['couplings'].keys() 5655 keys.sort(key=len) 5656 for key, coup_list in self.model['couplings'].items(): 5657 if any([(k in key) for k in self.PS_dependent_key]): 5658 self.coups_dep += [c for c in coup_list if 5659 (not wanted_couplings or c.name in \ 5660 wanted_couplings)] 5661 else: 5662 self.coups_indep += [c for c in coup_list if 5663 (not wanted_couplings or c.name in \ 5664 wanted_couplings)] 5665 5666 # MG4 use G and not aS as it basic object for alphas related computation 5667 #Pass G in the independant list 5668 if 'G' in self.params_dep: 5669 index = self.params_dep.index('G') 5670 G = self.params_dep.pop(index) 5671 # G.expr = '2*cmath.sqrt(as*pi)' 5672 # self.params_indep.insert(0, self.params_dep.pop(index)) 5673 # No need to add it if not defined 5674 5675 if 'aS' not in self.params_ext: 5676 logger.critical('aS not define as external parameter adding it!') 5677 #self.model['parameters']['aS'] = base_objects.ParamCardVariable('aS', 0.138,'DUMMY',(1,)) 5678 self.params_indep.append( base_objects. ModelVariable('aS', '0.138','real')) 5679 self.params_indep.append( base_objects. ModelVariable('G', '4.1643','real'))
5680 - def build(self, wanted_couplings = [], full=True):
5681 """modify the couplings to fit with MG4 convention and creates all the 5682 different files""" 5683 5684 self.pass_parameter_to_case_insensitive() 5685 self.refactorize(wanted_couplings) 5686 5687 # write the files 5688 if full: 5689 if wanted_couplings: 5690 # extract the wanted ct parameters 5691 self.extract_needed_CTparam(wanted_couplings=wanted_couplings) 5692 self.write_all()
5693 5694
5695 - def open(self, name, comment='c', format='default'):
5696 """ Open the file name in the correct directory and with a valid 5697 header.""" 5698 5699 file_path = pjoin(self.dir_path, name) 5700 5701 if format == 'fortran': 5702 fsock = writers.FortranWriter(file_path, 'w') 5703 else: 5704 fsock = open(file_path, 'w') 5705 5706 file.writelines(fsock, comment * 77 + '\n') 5707 file.writelines(fsock,'%(comment)s written by the UFO converter\n' % \ 5708 {'comment': comment + (6 - len(comment)) * ' '}) 5709 file.writelines(fsock, comment * 77 + '\n\n') 5710 return fsock
5711 5712
5713 - def write_all(self):
5714 """ write all the files """ 5715 #write the part related to the external parameter 5716 self.create_ident_card() 5717 self.create_param_read() 5718 5719 #write the definition of the parameter 5720 self.create_input() 5721 self.create_intparam_def(dp=True,mp=False) 5722 if self.opt['mp']: 5723 self.create_intparam_def(dp=False,mp=True) 5724 5725 # definition of the coupling. 5726 self.create_actualize_mp_ext_param_inc() 5727 self.create_coupl_inc() 5728 self.create_write_couplings() 5729 self.create_couplings() 5730 5731 # the makefile 5732 self.create_makeinc() 5733 self.create_param_write() 5734 5735 # The model functions 5736 self.create_model_functions_inc() 5737 self.create_model_functions_def() 5738 5739 # The param_card.dat 5740 self.create_param_card() 5741 5742 5743 # All the standard files 5744 self.copy_standard_file()
5745 5746 ############################################################################ 5747 ## ROUTINE CREATING THE FILES ############################################ 5748 ############################################################################ 5749
5750 - def copy_standard_file(self):
5751 """Copy the standard files for the fortran model.""" 5752 5753 #copy the library files 5754 file_to_link = ['formats.inc','printout.f', \ 5755 'rw_para.f', 'testprog.f'] 5756 5757 for filename in file_to_link: 5758 cp( MG5DIR + '/models/template_files/fortran/' + filename, \ 5759 self.dir_path) 5760 5761 file = open(os.path.join(MG5DIR,\ 5762 'models/template_files/fortran/rw_para.f')).read() 5763 5764 includes=["include \'coupl.inc\'","include \'input.inc\'", 5765 "include \'model_functions.inc\'"] 5766 if self.opt['mp']: 5767 includes.extend(["include \'mp_coupl.inc\'","include \'mp_input.inc\'"]) 5768 # In standalone and madloop we do no use the compiled param card but 5769 # still parse the .dat one so we must load it. 5770 if self.opt['loop_induced']: 5771 #loop induced follow MadEvent way to handle the card. 5772 load_card = '' 5773 lha_read_filename='lha_read.f' 5774 elif self.opt['export_format'] in ['madloop','madloop_optimized', 'madloop_matchbox']: 5775 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5776 lha_read_filename='lha_read_mp.f' 5777 elif self.opt['export_format'].startswith('standalone') \ 5778 or self.opt['export_format'] in ['madweight', 'plugin']\ 5779 or self.opt['export_format'].startswith('matchbox'): 5780 load_card = 'call LHA_loadcard(param_name,npara,param,value)' 5781 lha_read_filename='lha_read.f' 5782 else: 5783 load_card = '' 5784 lha_read_filename='lha_read.f' 5785 cp( MG5DIR + '/models/template_files/fortran/' + lha_read_filename, \ 5786 os.path.join(self.dir_path,'lha_read.f')) 5787 5788 file=file%{'includes':'\n '.join(includes), 5789 'load_card':load_card} 5790 writer=open(os.path.join(self.dir_path,'rw_para.f'),'w') 5791 writer.writelines(file) 5792 writer.close() 5793 5794 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 5795 or self.opt['loop_induced']: 5796 cp( MG5DIR + '/models/template_files/fortran/makefile_madevent', 5797 self.dir_path + '/makefile') 5798 if self.opt['export_format'] in ['FKS5_default', 'FKS5_optimized']: 5799 path = pjoin(self.dir_path, 'makefile') 5800 text = open(path).read() 5801 text = text.replace('madevent','aMCatNLO') 5802 open(path, 'w').writelines(text) 5803 elif self.opt['export_format'] in ['standalone', 'standalone_msP','standalone_msF', 5804 'madloop','madloop_optimized', 'standalone_rw', 5805 'madweight','matchbox','madloop_matchbox', 'plugin']: 5806 cp( MG5DIR + '/models/template_files/fortran/makefile_standalone', 5807 self.dir_path + '/makefile') 5808 #elif self.opt['export_format'] in []: 5809 #pass 5810 else: 5811 raise MadGraph5Error('Unknown format')
5812
5813 - def create_coupl_inc(self):
5814 """ write coupling.inc """ 5815 5816 fsock = self.open('coupl.inc', format='fortran') 5817 if self.opt['mp']: 5818 mp_fsock = self.open('mp_coupl.inc', format='fortran') 5819 mp_fsock_same_name = self.open('mp_coupl_same_name.inc',\ 5820 format='fortran') 5821 5822 # Write header 5823 header = """double precision G 5824 common/strong/ G 5825 5826 double complex gal(2) 5827 common/weak/ gal 5828 5829 double precision MU_R 5830 common/rscale/ MU_R 5831 5832 double precision Nf 5833 parameter(Nf=%d) 5834 """ % self.model.get_nflav() 5835 5836 fsock.writelines(header) 5837 5838 if self.opt['mp']: 5839 header = """%(real_mp_format)s %(mp_prefix)sG 5840 common/MP_strong/ %(mp_prefix)sG 5841 5842 %(complex_mp_format)s %(mp_prefix)sgal(2) 5843 common/MP_weak/ %(mp_prefix)sgal 5844 5845 %(complex_mp_format)s %(mp_prefix)sMU_R 5846 common/MP_rscale/ %(mp_prefix)sMU_R 5847 5848 """ 5849 5850 5851 5852 5853 mp_fsock.writelines(header%{'real_mp_format':self.mp_real_format, 5854 'complex_mp_format':self.mp_complex_format, 5855 'mp_prefix':self.mp_prefix}) 5856 mp_fsock_same_name.writelines(header%{'real_mp_format':self.mp_real_format, 5857 'complex_mp_format':self.mp_complex_format, 5858 'mp_prefix':''}) 5859 5860 # Write the Mass definition/ common block 5861 masses = set() 5862 widths = set() 5863 if self.opt['complex_mass']: 5864 complex_mass = set() 5865 5866 for particle in self.model.get('particles'): 5867 #find masses 5868 one_mass = particle.get('mass') 5869 if one_mass.lower() != 'zero': 5870 masses.add(one_mass) 5871 5872 # find width 5873 one_width = particle.get('width') 5874 if one_width.lower() != 'zero': 5875 widths.add(one_width) 5876 if self.opt['complex_mass'] and one_mass.lower() != 'zero': 5877 complex_mass.add('CMASS_%s' % one_mass) 5878 5879 if masses: 5880 fsock.writelines('double precision '+','.join(masses)+'\n') 5881 fsock.writelines('common/masses/ '+','.join(masses)+'\n\n') 5882 if self.opt['mp']: 5883 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5884 ','.join(masses)+'\n') 5885 mp_fsock_same_name.writelines('common/MP_masses/ '+\ 5886 ','.join(masses)+'\n\n') 5887 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5888 self.mp_prefix+m for m in masses])+'\n') 5889 mp_fsock.writelines('common/MP_masses/ '+\ 5890 ','.join([self.mp_prefix+m for m in masses])+'\n\n') 5891 5892 if widths: 5893 fsock.writelines('double precision '+','.join(widths)+'\n') 5894 fsock.writelines('common/widths/ '+','.join(widths)+'\n\n') 5895 if self.opt['mp']: 5896 mp_fsock_same_name.writelines(self.mp_real_format+' '+\ 5897 ','.join(widths)+'\n') 5898 mp_fsock_same_name.writelines('common/MP_widths/ '+\ 5899 ','.join(widths)+'\n\n') 5900 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5901 self.mp_prefix+w for w in widths])+'\n') 5902 mp_fsock.writelines('common/MP_widths/ '+\ 5903 ','.join([self.mp_prefix+w for w in widths])+'\n\n') 5904 5905 # Write the Couplings 5906 coupling_list = [coupl.name for coupl in self.coups_dep + self.coups_indep] 5907 fsock.writelines('double complex '+', '.join(coupling_list)+'\n') 5908 fsock.writelines('common/couplings/ '+', '.join(coupling_list)+'\n') 5909 if self.opt['mp']: 5910 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5911 ','.join(coupling_list)+'\n') 5912 mp_fsock_same_name.writelines('common/MP_couplings/ '+\ 5913 ','.join(coupling_list)+'\n\n') 5914 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5915 self.mp_prefix+c for c in coupling_list])+'\n') 5916 mp_fsock.writelines('common/MP_couplings/ '+\ 5917 ','.join([self.mp_prefix+c for c in coupling_list])+'\n\n') 5918 5919 # Write complex mass for complex mass scheme (if activated) 5920 if self.opt['complex_mass'] and complex_mass: 5921 fsock.writelines('double complex '+', '.join(complex_mass)+'\n') 5922 fsock.writelines('common/complex_mass/ '+', '.join(complex_mass)+'\n') 5923 if self.opt['mp']: 5924 mp_fsock_same_name.writelines(self.mp_complex_format+' '+\ 5925 ','.join(complex_mass)+'\n') 5926 mp_fsock_same_name.writelines('common/MP_complex_mass/ '+\ 5927 ','.join(complex_mass)+'\n\n') 5928 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 5929 self.mp_prefix+cm for cm in complex_mass])+'\n') 5930 mp_fsock.writelines('common/MP_complex_mass/ '+\ 5931 ','.join([self.mp_prefix+cm for cm in complex_mass])+'\n\n')
5932
5933 - def create_write_couplings(self):
5934 """ write the file coupl_write.inc """ 5935 5936 fsock = self.open('coupl_write.inc', format='fortran') 5937 5938 fsock.writelines("""write(*,*) ' Couplings of %s' 5939 write(*,*) ' ---------------------------------' 5940 write(*,*) ' '""" % self.model_name) 5941 def format(coupl): 5942 return 'write(*,2) \'%(name)s = \', %(name)s' % {'name': coupl.name}
5943 5944 # Write the Couplings 5945 lines = [format(coupl) for coupl in self.coups_dep + self.coups_indep] 5946 fsock.writelines('\n'.join(lines)) 5947 5948
5949 - def create_input(self):
5950 """create input.inc containing the definition of the parameters""" 5951 5952 fsock = self.open('input.inc', format='fortran') 5953 if self.opt['mp']: 5954 mp_fsock = self.open('mp_input.inc', format='fortran') 5955 5956 #find mass/ width since they are already define 5957 already_def = set() 5958 for particle in self.model.get('particles'): 5959 already_def.add(particle.get('mass').lower()) 5960 already_def.add(particle.get('width').lower()) 5961 if self.opt['complex_mass']: 5962 already_def.add('cmass_%s' % particle.get('mass').lower()) 5963 5964 is_valid = lambda name: name.lower() not in ['g', 'mu_r', 'zero'] and \ 5965 name.lower() not in already_def 5966 5967 real_parameters = [param.name for param in self.params_dep + 5968 self.params_indep if param.type == 'real' 5969 and is_valid(param.name)] 5970 5971 real_parameters += [param.name for param in self.params_ext 5972 if param.type == 'real'and 5973 is_valid(param.name)] 5974 5975 # check the parameter is a CT parameter or not 5976 # if yes, just use the needed ones 5977 real_parameters = [param for param in real_parameters \ 5978 if self.check_needed_param(param)] 5979 5980 fsock.writelines('double precision '+','.join(real_parameters)+'\n') 5981 fsock.writelines('common/params_R/ '+','.join(real_parameters)+'\n\n') 5982 if self.opt['mp']: 5983 mp_fsock.writelines(self.mp_real_format+' '+','.join([\ 5984 self.mp_prefix+p for p in real_parameters])+'\n') 5985 mp_fsock.writelines('common/MP_params_R/ '+','.join([\ 5986 self.mp_prefix+p for p in real_parameters])+'\n\n') 5987 5988 complex_parameters = [param.name for param in self.params_dep + 5989 self.params_indep if param.type == 'complex' and 5990 is_valid(param.name)] 5991 5992 # check the parameter is a CT parameter or not 5993 # if yes, just use the needed ones 5994 complex_parameters = [param for param in complex_parameters \ 5995 if self.check_needed_param(param)] 5996 5997 if complex_parameters: 5998 fsock.writelines('double complex '+','.join(complex_parameters)+'\n') 5999 fsock.writelines('common/params_C/ '+','.join(complex_parameters)+'\n\n') 6000 if self.opt['mp']: 6001 mp_fsock.writelines(self.mp_complex_format+' '+','.join([\ 6002 self.mp_prefix+p for p in complex_parameters])+'\n') 6003 mp_fsock.writelines('common/MP_params_C/ '+','.join([\ 6004 self.mp_prefix+p for p in complex_parameters])+'\n\n')
6005
6006 - def check_needed_param(self, param):
6007 """ Returns whether the parameter in argument is needed for this 6008 specific computation or not.""" 6009 6010 # If this is a leading order model or if there was no CT parameter 6011 # employed in this NLO model, one can directly return that the 6012 # parameter is needed since only CTParameters are filtered. 6013 if not hasattr(self, 'allCTparameters') or \ 6014 self.allCTparameters is None or self.usedCTparameters is None or \ 6015 len(self.allCTparameters)==0: 6016 return True 6017 6018 # We must allow the conjugate shorthand for the complex parameter as 6019 # well so we check wether either the parameter name or its name with 6020 # 'conjg__' substituted with '' is present in the list. 6021 # This is acceptable even if some parameter had an original name 6022 # including 'conjg__' in it, because at worst we export a parameter 6023 # was not needed. 6024 param = param.lower() 6025 cjg_param = param.replace('conjg__','',1) 6026 6027 # First make sure it is a CTparameter 6028 if param not in self.allCTparameters and \ 6029 cjg_param not in self.allCTparameters: 6030 return True 6031 6032 # Now check if it is in the list of CTparameters actually used 6033 return (param in self.usedCTparameters or \ 6034 cjg_param in self.usedCTparameters)
6035
6036 - def extract_needed_CTparam(self,wanted_couplings=[]):
6037 """ Extract what are the needed CT parameters given the wanted_couplings""" 6038 6039 if not hasattr(self.model,'map_CTcoup_CTparam') or not wanted_couplings: 6040 # Setting these lists to none wil disable the filtering in 6041 # check_needed_param 6042 self.allCTparameters = None 6043 self.usedCTparameters = None 6044 return 6045 6046 # All CTparameters appearin in all CT couplings 6047 allCTparameters=self.model.map_CTcoup_CTparam.values() 6048 # Define in this class the list of all CT parameters 6049 self.allCTparameters=list(\ 6050 set(itertools.chain.from_iterable(allCTparameters))) 6051 6052 # All used CT couplings 6053 w_coupls = [coupl.lower() for coupl in wanted_couplings] 6054 allUsedCTCouplings = [coupl for coupl in 6055 self.model.map_CTcoup_CTparam.keys() if coupl.lower() in w_coupls] 6056 6057 # Now define the list of all CT parameters that are actually used 6058 self.usedCTparameters=list(\ 6059 set(itertools.chain.from_iterable([ 6060 self.model.map_CTcoup_CTparam[coupl] for coupl in allUsedCTCouplings 6061 ]))) 6062 6063 # Now at last, make these list case insensitive 6064 self.allCTparameters = [ct.lower() for ct in self.allCTparameters] 6065 self.usedCTparameters = [ct.lower() for ct in self.usedCTparameters]
6066
6067 - def create_intparam_def(self, dp=True, mp=False):
6068 """ create intparam_definition.inc setting the internal parameters. 6069 Output the double precision and/or the multiple precision parameters 6070 depending on the parameters dp and mp. If mp only, then the file names 6071 get the 'mp_' prefix. 6072 """ 6073 6074 fsock = self.open('%sintparam_definition.inc'% 6075 ('mp_' if mp and not dp else ''), format='fortran') 6076 6077 fsock.write_comments(\ 6078 "Parameters that should not be recomputed event by event.\n") 6079 fsock.writelines("if(readlha) then\n") 6080 if dp: 6081 fsock.writelines("G = 2 * DSQRT(AS*PI) ! for the first init\n") 6082 if mp: 6083 fsock.writelines("MP__G = 2 * SQRT(MP__AS*MP__PI) ! for the first init\n") 6084 6085 for param in self.params_indep: 6086 if param.name == 'ZERO': 6087 continue 6088 # check whether the parameter is a CT parameter 6089 # if yes,just used the needed ones 6090 if not self.check_needed_param(param.name): 6091 continue 6092 if dp: 6093 fsock.writelines("%s = %s\n" % (param.name, 6094 self.p_to_f.parse(param.expr))) 6095 if mp: 6096 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6097 self.mp_p_to_f.parse(param.expr))) 6098 6099 fsock.writelines('endif') 6100 6101 fsock.write_comments('\nParameters that should be recomputed at an event by even basis.\n') 6102 if dp: 6103 fsock.writelines("aS = G**2/4/pi\n") 6104 if mp: 6105 fsock.writelines("MP__aS = MP__G**2/4/MP__PI\n") 6106 for param in self.params_dep: 6107 # check whether the parameter is a CT parameter 6108 # if yes,just used the needed ones 6109 if not self.check_needed_param(param.name): 6110 continue 6111 if dp: 6112 fsock.writelines("%s = %s\n" % (param.name, 6113 self.p_to_f.parse(param.expr))) 6114 elif mp: 6115 fsock.writelines("%s%s = %s\n" % (self.mp_prefix,param.name, 6116 self.mp_p_to_f.parse(param.expr))) 6117 6118 fsock.write_comments("\nDefinition of the EW coupling used in the write out of aqed\n") 6119 if ('aEWM1',) in self.model['parameters']: 6120 if dp: 6121 fsock.writelines(""" gal(1) = 3.5449077018110318d0 / DSQRT(aEWM1) 6122 gal(2) = 1d0 6123 """) 6124 elif mp: 6125 fsock.writelines(""" %(mp_prefix)sgal(1) = 2 * SQRT(MP__PI/MP__aEWM1) 6126 %(mp_prefix)sgal(2) = 1d0 6127 """ %{'mp_prefix':self.mp_prefix}) 6128 pass 6129 # in Gmu scheme, aEWM1 is not external but Gf is an exteranl variable 6130 elif ('Gf',) in self.model['parameters']: 6131 if dp: 6132 fsock.writelines(""" gal(1) = 2.378414230005442133435d0*MDL_MW*DSQRT(1D0-MDL_MW**2/MDL_MZ**2)*DSQRT(MDL_Gf) 6133 gal(2) = 1d0 6134 """) 6135 elif mp: 6136 fsock.writelines(""" %(mp_prefix)sgal(1) = 2*MP__MDL_MW*SQRT(1e0_16-MP__MDL_MW**2/MP__MDL_MZ**2)*SQRT(SQRT(2e0_16)*MP__MDL_Gf) 6137 %(mp_prefix)sgal(2) = 1d0 6138 """ %{'mp_prefix':self.mp_prefix}) 6139 pass 6140 else: 6141 if dp: 6142 logger.warning('$RED aEWM1 and Gf not define in MODEL. AQED will not be written correcty in LHE FILE') 6143 fsock.writelines(""" gal(1) = 1d0 6144 gal(2) = 1d0 6145 """) 6146 elif mp: 6147 fsock.writelines(""" %(mp_prefix)sgal(1) = 1e0_16 6148 %(mp_prefix)sgal(2) = 1e0_16 6149 """%{'mp_prefix':self.mp_prefix})
6150 6151
6152 - def create_couplings(self):
6153 """ create couplings.f and all couplingsX.f """ 6154 6155 nb_def_by_file = 25 6156 6157 self.create_couplings_main(nb_def_by_file) 6158 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6159 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6160 6161 for i in range(nb_coup_indep): 6162 # For the independent couplings, we compute the double and multiple 6163 # precision ones together 6164 data = self.coups_indep[nb_def_by_file * i: 6165 min(len(self.coups_indep), nb_def_by_file * (i+1))] 6166 self.create_couplings_part(i + 1, data, dp=True, mp=self.opt['mp']) 6167 6168 for i in range(nb_coup_dep): 6169 # For the dependent couplings, we compute the double and multiple 6170 # precision ones in separate subroutines. 6171 data = self.coups_dep[nb_def_by_file * i: 6172 min(len(self.coups_dep), nb_def_by_file * (i+1))] 6173 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6174 dp=True,mp=False) 6175 if self.opt['mp']: 6176 self.create_couplings_part( i + 1 + nb_coup_indep , data, 6177 dp=False,mp=True)
6178 6179
6180 - def create_couplings_main(self, nb_def_by_file=25):
6181 """ create couplings.f """ 6182 6183 fsock = self.open('couplings.f', format='fortran') 6184 6185 fsock.writelines("""subroutine coup() 6186 6187 implicit none 6188 double precision PI, ZERO 6189 logical READLHA 6190 parameter (PI=3.141592653589793d0) 6191 parameter (ZERO=0d0) 6192 include \'model_functions.inc\'""") 6193 if self.opt['mp']: 6194 fsock.writelines("""%s MP__PI, MP__ZERO 6195 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6196 parameter (MP__ZERO=0e0_16) 6197 include \'mp_input.inc\' 6198 include \'mp_coupl.inc\' 6199 """%self.mp_real_format) 6200 fsock.writelines("""include \'input.inc\' 6201 include \'coupl.inc\' 6202 READLHA = .true. 6203 include \'intparam_definition.inc\'""") 6204 if self.opt['mp']: 6205 fsock.writelines("""include \'mp_intparam_definition.inc\'\n""") 6206 6207 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6208 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6209 6210 fsock.writelines('\n'.join(\ 6211 ['call coup%s()' % (i + 1) for i in range(nb_coup_indep)])) 6212 6213 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6214 6215 fsock.writelines('\n'.join(\ 6216 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6217 for i in range(nb_coup_dep)])) 6218 if self.opt['mp']: 6219 fsock.writelines('\n'.join(\ 6220 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6221 for i in range(nb_coup_dep)])) 6222 fsock.writelines('''\n return \n end\n''') 6223 6224 fsock.writelines("""subroutine update_as_param() 6225 6226 implicit none 6227 double precision PI, ZERO 6228 logical READLHA 6229 parameter (PI=3.141592653589793d0) 6230 parameter (ZERO=0d0) 6231 include \'model_functions.inc\'""") 6232 fsock.writelines("""include \'input.inc\' 6233 include \'coupl.inc\' 6234 READLHA = .false.""") 6235 fsock.writelines(""" 6236 include \'intparam_definition.inc\'\n 6237 """) 6238 6239 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6240 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6241 6242 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6243 6244 fsock.writelines('\n'.join(\ 6245 ['call coup%s()' % (nb_coup_indep + i + 1) \ 6246 for i in range(nb_coup_dep)])) 6247 fsock.writelines('''\n return \n end\n''') 6248 6249 fsock.writelines("""subroutine update_as_param2(mu_r2,as2) 6250 6251 implicit none 6252 double precision PI 6253 parameter (PI=3.141592653589793d0) 6254 double precision mu_r2, as2 6255 include \'model_functions.inc\'""") 6256 fsock.writelines("""include \'input.inc\' 6257 include \'coupl.inc\'""") 6258 fsock.writelines(""" 6259 if (mu_r2.gt.0d0) MU_R = mu_r2 6260 G = SQRT(4.0d0*PI*AS2) 6261 AS = as2 6262 6263 CALL UPDATE_AS_PARAM() 6264 """) 6265 fsock.writelines('''\n return \n end\n''') 6266 6267 if self.opt['mp']: 6268 fsock.writelines("""subroutine mp_update_as_param() 6269 6270 implicit none 6271 logical READLHA 6272 include \'model_functions.inc\'""") 6273 fsock.writelines("""%s MP__PI, MP__ZERO 6274 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6275 parameter (MP__ZERO=0e0_16) 6276 include \'mp_input.inc\' 6277 include \'mp_coupl.inc\' 6278 """%self.mp_real_format) 6279 fsock.writelines("""include \'input.inc\' 6280 include \'coupl.inc\' 6281 include \'actualize_mp_ext_params.inc\' 6282 READLHA = .false. 6283 include \'mp_intparam_definition.inc\'\n 6284 """) 6285 6286 nb_coup_indep = 1 + len(self.coups_indep) // nb_def_by_file 6287 nb_coup_dep = 1 + len(self.coups_dep) // nb_def_by_file 6288 6289 fsock.write_comments('\ncouplings needed to be evaluated points by points\n') 6290 6291 fsock.writelines('\n'.join(\ 6292 ['call mp_coup%s()' % (nb_coup_indep + i + 1) \ 6293 for i in range(nb_coup_dep)])) 6294 fsock.writelines('''\n return \n end\n''')
6295
6296 - def create_couplings_part(self, nb_file, data, dp=True, mp=False):
6297 """ create couplings[nb_file].f containing information coming from data. 6298 Outputs the computation of the double precision and/or the multiple 6299 precision couplings depending on the parameters dp and mp. 6300 If mp is True and dp is False, then the prefix 'MP_' is appended to the 6301 filename and subroutine name. 6302 """ 6303 6304 fsock = self.open('%scouplings%s.f' %('mp_' if mp and not dp else '', 6305 nb_file), format='fortran') 6306 fsock.writelines("""subroutine %scoup%s() 6307 6308 implicit none 6309 include \'model_functions.inc\'"""%('mp_' if mp and not dp else '',nb_file)) 6310 if dp: 6311 fsock.writelines(""" 6312 double precision PI, ZERO 6313 parameter (PI=3.141592653589793d0) 6314 parameter (ZERO=0d0) 6315 include 'input.inc' 6316 include 'coupl.inc'""") 6317 if mp: 6318 fsock.writelines("""%s MP__PI, MP__ZERO 6319 parameter (MP__PI=3.1415926535897932384626433832795e0_16) 6320 parameter (MP__ZERO=0e0_16) 6321 include \'mp_input.inc\' 6322 include \'mp_coupl.inc\' 6323 """%self.mp_real_format) 6324 6325 for coupling in data: 6326 if dp: 6327 fsock.writelines('%s = %s' % (coupling.name, 6328 self.p_to_f.parse(coupling.expr))) 6329 if mp: 6330 fsock.writelines('%s%s = %s' % (self.mp_prefix,coupling.name, 6331 self.mp_p_to_f.parse(coupling.expr))) 6332 fsock.writelines('end')
6333
6334 - def create_model_functions_inc(self):
6335 """ Create model_functions.inc which contains the various declarations 6336 of auxiliary functions which might be used in the couplings expressions 6337 """ 6338 6339 additional_fct = [] 6340 # check for functions define in the UFO model 6341 ufo_fct = self.model.get('functions') 6342 if ufo_fct: 6343 for fct in ufo_fct: 6344 # already handle by default 6345 if fct.name not in ["complexconjugate", "re", "im", "sec", 6346 "csc", "asec", "acsc", "theta_function", "cond", 6347 "condif", "reglogp", "reglogm", "reglog", "recms", "arg", "cot", 6348 "grreglog","regsqrt"]: 6349 additional_fct.append(fct.name) 6350 6351 6352 fsock = self.open('model_functions.inc', format='fortran') 6353 fsock.writelines("""double complex cond 6354 double complex condif 6355 double complex reglog 6356 double complex reglogp 6357 double complex reglogm 6358 double complex recms 6359 double complex arg 6360 double complex grreglog 6361 double complex regsqrt 6362 %s 6363 """ % "\n".join([" double complex %s" % i for i in additional_fct])) 6364 6365 6366 if self.opt['mp']: 6367 fsock.writelines("""%(complex_mp_format)s mp_cond 6368 %(complex_mp_format)s mp_condif 6369 %(complex_mp_format)s mp_reglog 6370 %(complex_mp_format)s mp_reglogp 6371 %(complex_mp_format)s mp_reglogm 6372 %(complex_mp_format)s mp_recms 6373 %(complex_mp_format)s mp_arg 6374 %(complex_mp_format)s mp_grreglog 6375 %(complex_mp_format)s mp_regsqrt 6376 %(additional)s 6377 """ %\ 6378 {"additional": "\n".join([" %s mp_%s" % (self.mp_complex_format, i) for i in additional_fct]), 6379 'complex_mp_format':self.mp_complex_format 6380 })
6381
6382 - def create_model_functions_def(self):
6383 """ Create model_functions.f which contains the various definitions 6384 of auxiliary functions which might be used in the couplings expressions 6385 Add the functions.f functions for formfactors support 6386 """ 6387 6388 fsock = self.open('model_functions.f', format='fortran') 6389 fsock.writelines("""double complex function cond(condition,truecase,falsecase) 6390 implicit none 6391 double complex condition,truecase,falsecase 6392 if(condition.eq.(0.0d0,0.0d0)) then 6393 cond=truecase 6394 else 6395 cond=falsecase 6396 endif 6397 end 6398 6399 double complex function condif(condition,truecase,falsecase) 6400 implicit none 6401 logical condition 6402 double complex truecase,falsecase 6403 if(condition) then 6404 condif=truecase 6405 else 6406 condif=falsecase 6407 endif 6408 end 6409 6410 double complex function recms(condition,expr) 6411 implicit none 6412 logical condition 6413 double complex expr 6414 if(condition)then 6415 recms=expr 6416 else 6417 recms=dcmplx(dble(expr)) 6418 endif 6419 end 6420 6421 double complex function reglog(arg) 6422 implicit none 6423 double complex TWOPII 6424 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6425 double complex arg 6426 if(arg.eq.(0.0d0,0.0d0)) then 6427 reglog=(0.0d0,0.0d0) 6428 else 6429 reglog=log(arg) 6430 endif 6431 end 6432 6433 double complex function reglogp(arg) 6434 implicit none 6435 double complex TWOPII 6436 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6437 double complex arg 6438 if(arg.eq.(0.0d0,0.0d0))then 6439 reglogp=(0.0d0,0.0d0) 6440 else 6441 if(dble(arg).lt.0.0d0.and.dimag(arg).lt.0.0d0)then 6442 reglogp=log(arg) + TWOPII 6443 else 6444 reglogp=log(arg) 6445 endif 6446 endif 6447 end 6448 6449 double complex function reglogm(arg) 6450 implicit none 6451 double complex TWOPII 6452 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6453 double complex arg 6454 if(arg.eq.(0.0d0,0.0d0))then 6455 reglogm=(0.0d0,0.0d0) 6456 else 6457 if(dble(arg).lt.0.0d0.and.dimag(arg).gt.0.0d0)then 6458 reglogm=log(arg) - TWOPII 6459 else 6460 reglogm=log(arg) 6461 endif 6462 endif 6463 end 6464 6465 double complex function regsqrt(arg_in) 6466 implicit none 6467 double complex arg_in 6468 double complex arg 6469 arg=arg_in 6470 if(dabs(dimag(arg)).eq.0.0d0)then 6471 arg=dcmplx(dble(arg),0.0d0) 6472 endif 6473 if(dabs(dble(arg)).eq.0.0d0)then 6474 arg=dcmplx(0.0d0,dimag(arg)) 6475 endif 6476 regsqrt=sqrt(arg) 6477 end 6478 6479 double complex function grreglog(logsw,expr1_in,expr2_in) 6480 implicit none 6481 double complex TWOPII 6482 parameter (TWOPII=2.0d0*3.1415926535897932d0*(0.0d0,1.0d0)) 6483 double complex expr1_in,expr2_in 6484 double complex expr1,expr2 6485 double precision logsw 6486 double precision imagexpr 6487 logical firstsheet 6488 expr1=expr1_in 6489 expr2=expr2_in 6490 if(dabs(dimag(expr1)).eq.0.0d0)then 6491 expr1=dcmplx(dble(expr1),0.0d0) 6492 endif 6493 if(dabs(dble(expr1)).eq.0.0d0)then 6494 expr1=dcmplx(0.0d0,dimag(expr1)) 6495 endif 6496 if(dabs(dimag(expr2)).eq.0.0d0)then 6497 expr2=dcmplx(dble(expr2),0.0d0) 6498 endif 6499 if(dabs(dble(expr2)).eq.0.0d0)then 6500 expr2=dcmplx(0.0d0,dimag(expr2)) 6501 endif 6502 if(expr1.eq.(0.0d0,0.0d0))then 6503 grreglog=(0.0d0,0.0d0) 6504 else 6505 imagexpr=dimag(expr1)*dimag(expr2) 6506 firstsheet=imagexpr.ge.0.0d0 6507 firstsheet=firstsheet.or.dble(expr1).ge.0.0d0 6508 firstsheet=firstsheet.or.dble(expr2).ge.0.0d0 6509 if(firstsheet)then 6510 grreglog=log(expr1) 6511 else 6512 if(dimag(expr1).gt.0.0d0)then 6513 grreglog=log(expr1) - logsw*TWOPII 6514 else 6515 grreglog=log(expr1) + logsw*TWOPII 6516 endif 6517 endif 6518 endif 6519 end 6520 6521 double complex function arg(comnum) 6522 implicit none 6523 double complex comnum 6524 double complex iim 6525 iim = (0.0d0,1.0d0) 6526 if(comnum.eq.(0.0d0,0.0d0)) then 6527 arg=(0.0d0,0.0d0) 6528 else 6529 arg=log(comnum/abs(comnum))/iim 6530 endif 6531 end""") 6532 if self.opt['mp']: 6533 fsock.writelines(""" 6534 6535 %(complex_mp_format)s function mp_cond(condition,truecase,falsecase) 6536 implicit none 6537 %(complex_mp_format)s condition,truecase,falsecase 6538 if(condition.eq.(0.0e0_16,0.0e0_16)) then 6539 mp_cond=truecase 6540 else 6541 mp_cond=falsecase 6542 endif 6543 end 6544 6545 %(complex_mp_format)s function mp_condif(condition,truecase,falsecase) 6546 implicit none 6547 logical condition 6548 %(complex_mp_format)s truecase,falsecase 6549 if(condition) then 6550 mp_condif=truecase 6551 else 6552 mp_condif=falsecase 6553 endif 6554 end 6555 6556 %(complex_mp_format)s function mp_recms(condition,expr) 6557 implicit none 6558 logical condition 6559 %(complex_mp_format)s expr 6560 if(condition)then 6561 mp_recms=expr 6562 else 6563 mp_recms=cmplx(real(expr),kind=16) 6564 endif 6565 end 6566 6567 %(complex_mp_format)s function mp_reglog(arg) 6568 implicit none 6569 %(complex_mp_format)s TWOPII 6570 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6571 %(complex_mp_format)s arg 6572 if(arg.eq.(0.0e0_16,0.0e0_16)) then 6573 mp_reglog=(0.0e0_16,0.0e0_16) 6574 else 6575 mp_reglog=log(arg) 6576 endif 6577 end 6578 6579 %(complex_mp_format)s function mp_reglogp(arg) 6580 implicit none 6581 %(complex_mp_format)s TWOPII 6582 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6583 %(complex_mp_format)s arg 6584 if(arg.eq.(0.0e0_16,0.0e0_16))then 6585 mp_reglogp=(0.0e0_16,0.0e0_16) 6586 else 6587 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).lt.0.0e0_16)then 6588 mp_reglogp=log(arg) + TWOPII 6589 else 6590 mp_reglogp=log(arg) 6591 endif 6592 endif 6593 end 6594 6595 %(complex_mp_format)s function mp_reglogm(arg) 6596 implicit none 6597 %(complex_mp_format)s TWOPII 6598 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6599 %(complex_mp_format)s arg 6600 if(arg.eq.(0.0e0_16,0.0e0_16))then 6601 mp_reglogm=(0.0e0_16,0.0e0_16) 6602 else 6603 if(real(arg,kind=16).lt.0.0e0_16.and.imagpart(arg).gt.0.0e0_16)then 6604 mp_reglogm=log(arg) - TWOPII 6605 else 6606 mp_reglogm=log(arg) 6607 endif 6608 endif 6609 end 6610 6611 %(complex_mp_format)s function mp_regsqrt(arg_in) 6612 implicit none 6613 %(complex_mp_format)s arg_in 6614 %(complex_mp_format)s arg 6615 arg=arg_in 6616 if(abs(imagpart(arg)).eq.0.0e0_16)then 6617 arg=cmplx(real(arg,kind=16),0.0e0_16) 6618 endif 6619 if(abs(real(arg,kind=16)).eq.0.0e0_16)then 6620 arg=cmplx(0.0e0_16,imagpart(arg)) 6621 endif 6622 mp_regsqrt=sqrt(arg) 6623 end 6624 6625 6626 %(complex_mp_format)s function mp_grreglog(logsw,expr1_in,expr2_in) 6627 implicit none 6628 %(complex_mp_format)s TWOPII 6629 parameter (TWOPII=2.0e0_16*3.14169258478796109557151794433593750e0_16*(0.0e0_16,1.0e0_16)) 6630 %(complex_mp_format)s expr1_in,expr2_in 6631 %(complex_mp_format)s expr1,expr2 6632 %(real_mp_format)s logsw 6633 %(real_mp_format)s imagexpr 6634 logical firstsheet 6635 expr1=expr1_in 6636 expr2=expr2_in 6637 if(abs(imagpart(expr1)).eq.0.0e0_16)then 6638 expr1=cmplx(real(expr1,kind=16),0.0e0_16) 6639 endif 6640 if(abs(real(expr1,kind=16)).eq.0.0e0_16)then 6641 expr1=cmplx(0.0e0_16,imagpart(expr1)) 6642 endif 6643 if(abs(imagpart(expr2)).eq.0.0e0_16)then 6644 expr2=cmplx(real(expr2,kind=16),0.0e0_16) 6645 endif 6646 if(abs(real(expr2,kind=16)).eq.0.0e0_16)then 6647 expr2=cmplx(0.0e0_16,imagpart(expr2)) 6648 endif 6649 if(expr1.eq.(0.0e0_16,0.0e0_16))then 6650 mp_grreglog=(0.0e0_16,0.0e0_16) 6651 else 6652 imagexpr=imagpart(expr1)*imagpart(expr2) 6653 firstsheet=imagexpr.ge.0.0e0_16 6654 firstsheet=firstsheet.or.real(expr1,kind=16).ge.0.0e0_16 6655 firstsheet=firstsheet.or.real(expr2,kind=16).ge.0.0e0_16 6656 if(firstsheet)then 6657 mp_grreglog=log(expr1) 6658 else 6659 if(imagpart(expr1).gt.0.0e0_16)then 6660 mp_grreglog=log(expr1) - logsw*TWOPII 6661 else 6662 mp_grreglog=log(expr1) + logsw*TWOPII 6663 endif 6664 endif 6665 endif 6666 end 6667 6668 %(complex_mp_format)s function mp_arg(comnum) 6669 implicit none 6670 %(complex_mp_format)s comnum 6671 %(complex_mp_format)s imm 6672 imm = (0.0e0_16,1.0e0_16) 6673 if(comnum.eq.(0.0e0_16,0.0e0_16)) then 6674 mp_arg=(0.0e0_16,0.0e0_16) 6675 else 6676 mp_arg=log(comnum/abs(comnum))/imm 6677 endif 6678 end"""%{'complex_mp_format':self.mp_complex_format,'real_mp_format':self.mp_real_format}) 6679 6680 6681 #check for the file functions.f 6682 model_path = self.model.get('modelpath') 6683 if os.path.exists(pjoin(model_path,'Fortran','functions.f')): 6684 fsock.write_comment_line(' USER DEFINE FUNCTIONS ') 6685 input = pjoin(model_path,'Fortran','functions.f') 6686 file.writelines(fsock, open(input).read()) 6687 fsock.write_comment_line(' END USER DEFINE FUNCTIONS ') 6688 6689 # check for functions define in the UFO model 6690 ufo_fct = self.model.get('functions') 6691 if ufo_fct: 6692 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS ') 6693 for fct in ufo_fct: 6694 # already handle by default 6695 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc", "condif", 6696 "theta_function", "cond", "reglog", "reglogp", "reglogm", "recms","arg", 6697 "grreglog","regsqrt"]: 6698 ufo_fct_template = """ 6699 double complex function %(name)s(%(args)s) 6700 implicit none 6701 double complex %(args)s 6702 %(definitions)s 6703 %(name)s = %(fct)s 6704 6705 return 6706 end 6707 """ 6708 str_fct = self.p_to_f.parse(fct.expr) 6709 if not self.p_to_f.to_define: 6710 definitions = [] 6711 else: 6712 definitions=[] 6713 for d in self.p_to_f.to_define: 6714 if d == 'pi': 6715 definitions.append(' double precision pi') 6716 definitions.append(' data pi /3.1415926535897932d0/') 6717 else: 6718 definitions.append(' double complex %s' % d) 6719 6720 text = ufo_fct_template % { 6721 'name': fct.name, 6722 'args': ", ".join(fct.arguments), 6723 'fct': str_fct, 6724 'definitions': '\n'.join(definitions) 6725 } 6726 6727 fsock.writelines(text) 6728 if self.opt['mp']: 6729 fsock.write_comment_line(' START UFO DEFINE FUNCTIONS FOR MP') 6730 for fct in ufo_fct: 6731 # already handle by default 6732 if fct.name not in ["complexconjugate", "re", "im", "sec", "csc", "asec", "acsc","condif", 6733 "theta_function", "cond", "reglog", "reglogp","reglogm", "recms","arg", 6734 "grreglog","regsqrt"]: 6735 ufo_fct_template = """ 6736 %(complex_mp_format)s function mp_%(name)s(mp__%(args)s) 6737 implicit none 6738 %(complex_mp_format)s mp__%(args)s 6739 %(definitions)s 6740 mp_%(name)s = %(fct)s 6741 6742 return 6743 end 6744 """ 6745 str_fct = self.mp_p_to_f.parse(fct.expr) 6746 if not self.mp_p_to_f.to_define: 6747 definitions = [] 6748 else: 6749 definitions=[] 6750 for d in self.mp_p_to_f.to_define: 6751 if d == 'pi': 6752 definitions.append(' %s mp__pi' % self.mp_real_format) 6753 definitions.append(' data mp__pi /3.141592653589793238462643383279502884197e+00_16/') 6754 else: 6755 definitions.append(' %s mp_%s' % (self.mp_complex_format,d)) 6756 text = ufo_fct_template % { 6757 'name': fct.name, 6758 'args': ", mp__".join(fct.arguments), 6759 'fct': str_fct, 6760 'definitions': '\n'.join(definitions), 6761 'complex_mp_format': self.mp_complex_format 6762 } 6763 fsock.writelines(text) 6764 6765 6766 6767 fsock.write_comment_line(' STOP UFO DEFINE FUNCTIONS ')
6768 6769 6770
6771 - def create_makeinc(self):
6772 """create makeinc.inc containing the file to compile """ 6773 6774 fsock = self.open('makeinc.inc', comment='#') 6775 text = 'MODEL = couplings.o lha_read.o printout.o rw_para.o' 6776 text += ' model_functions.o ' 6777 6778 nb_coup_indep = 1 + len(self.coups_dep) // 25 6779 nb_coup_dep = 1 + len(self.coups_indep) // 25 6780 couplings_files=['couplings%s.o' % (i+1) \ 6781 for i in range(nb_coup_dep + nb_coup_indep) ] 6782 if self.opt['mp']: 6783 couplings_files+=['mp_couplings%s.o' % (i+1) for i in \ 6784 range(nb_coup_dep,nb_coup_dep + nb_coup_indep) ] 6785 text += ' '.join(couplings_files) 6786 fsock.writelines(text)
6787
6788 - def create_param_write(self):
6789 """ create param_write """ 6790 6791 fsock = self.open('param_write.inc', format='fortran') 6792 6793 fsock.writelines("""write(*,*) ' External Params' 6794 write(*,*) ' ---------------------------------' 6795 write(*,*) ' '""") 6796 def format(name): 6797 return 'write(*,*) \'%(name)s = \', %(name)s' % {'name': name}
6798 6799 # Write the external parameter 6800 lines = [format(param.name) for param in self.params_ext] 6801 fsock.writelines('\n'.join(lines)) 6802 6803 fsock.writelines("""write(*,*) ' Internal Params' 6804 write(*,*) ' ---------------------------------' 6805 write(*,*) ' '""") 6806 lines = [format(data.name) for data in self.params_indep 6807 if data.name != 'ZERO' and self.check_needed_param(data.name)] 6808 fsock.writelines('\n'.join(lines)) 6809 fsock.writelines("""write(*,*) ' Internal Params evaluated point by point' 6810 write(*,*) ' ----------------------------------------' 6811 write(*,*) ' '""") 6812 lines = [format(data.name) for data in self.params_dep \ 6813 if self.check_needed_param(data.name)] 6814 6815 fsock.writelines('\n'.join(lines)) 6816 6817 6818
6819 - def create_ident_card(self):
6820 """ create the ident_card.dat """ 6821 6822 def format(parameter): 6823 """return the line for the ident_card corresponding to this parameter""" 6824 colum = [parameter.lhablock.lower()] + \ 6825 [str(value) for value in parameter.lhacode] + \ 6826 [parameter.name] 6827 if not parameter.name: 6828 return '' 6829 return ' '.join(colum)+'\n'
6830 6831 fsock = self.open('ident_card.dat') 6832 6833 external_param = [format(param) for param in self.params_ext] 6834 fsock.writelines('\n'.join(external_param)) 6835
6836 - def create_actualize_mp_ext_param_inc(self):
6837 """ create the actualize_mp_ext_params.inc code """ 6838 6839 # In principle one should actualize all external, but for now, it is 6840 # hardcoded that only AS and MU_R can by dynamically changed by the user 6841 # so that we only update those ones. 6842 # Of course, to be on the safe side, one could decide to update all 6843 # external parameters. 6844 update_params_list=[p for p in self.params_ext if p.name in 6845 self.PS_dependent_key] 6846 6847 res_strings = ["%(mp_prefix)s%(name)s=%(name)s"\ 6848 %{'mp_prefix':self.mp_prefix,'name':param.name}\ 6849 for param in update_params_list] 6850 # When read_lha is false, it is G which is taken in input and not AS, so 6851 # this is what should be reset here too. 6852 if 'aS' in [param.name for param in update_params_list]: 6853 res_strings.append("%(mp_prefix)sG=G"%{'mp_prefix':self.mp_prefix}) 6854 6855 fsock = self.open('actualize_mp_ext_params.inc', format='fortran') 6856 fsock.writelines('\n'.join(res_strings))
6857
6858 - def create_param_read(self):
6859 """create param_read""" 6860 6861 if self.opt['export_format'] in ['madevent', 'FKS5_default', 'FKS5_optimized'] \ 6862 or self.opt['loop_induced']: 6863 fsock = self.open('param_read.inc', format='fortran') 6864 fsock.writelines(' include \'../param_card.inc\'') 6865 return 6866 6867 def format_line(parameter): 6868 """return the line for the ident_card corresponding to this 6869 parameter""" 6870 template = \ 6871 """ call LHA_get_real(npara,param,value,'%(name)s',%(name)s,%(value)s)""" \ 6872 % {'name': parameter.name, 6873 'value': self.p_to_f.parse(str(parameter.value.real))} 6874 if self.opt['mp']: 6875 template = template+ \ 6876 ("\n call MP_LHA_get_real(npara,param,value,'%(name)s',"+ 6877 "%(mp_prefix)s%(name)s,%(value)s)") \ 6878 % {'name': parameter.name,'mp_prefix': self.mp_prefix, 6879 'value': self.mp_p_to_f.parse(str(parameter.value.real))} 6880 return template 6881 6882 fsock = self.open('param_read.inc', format='fortran') 6883 res_strings = [format_line(param) \ 6884 for param in self.params_ext] 6885 6886 # Correct width sign for Majorana particles (where the width 6887 # and mass need to have the same sign) 6888 for particle in self.model.get('particles'): 6889 if particle.is_fermion() and particle.get('self_antipart') and \ 6890 particle.get('width').lower() != 'zero': 6891 6892 res_strings.append('%(width)s = sign(%(width)s,%(mass)s)' % \ 6893 {'width': particle.get('width'), 'mass': particle.get('mass')}) 6894 if self.opt['mp']: 6895 res_strings.append(\ 6896 ('%(mp_pref)s%(width)s = sign(%(mp_pref)s%(width)s,'+\ 6897 '%(mp_pref)s%(mass)s)')%{'width': particle.get('width'),\ 6898 'mass': particle.get('mass'),'mp_pref':self.mp_prefix}) 6899 6900 fsock.writelines('\n'.join(res_strings)) 6901 6902 6903 @staticmethod
6904 - def create_param_card_static(model, output_path, rule_card_path=False, 6905 mssm_convert=True):
6906 """ create the param_card.dat for a givent model --static method-- """ 6907 #1. Check if a default param_card is present: 6908 done = False 6909 if hasattr(model, 'restrict_card') and isinstance(model.restrict_card, str): 6910 restrict_name = os.path.basename(model.restrict_card)[9:-4] 6911 model_path = model.get('modelpath') 6912 if os.path.exists(pjoin(model_path,'paramcard_%s.dat' % restrict_name)): 6913 done = True 6914 files.cp(pjoin(model_path,'paramcard_%s.dat' % restrict_name), 6915 output_path) 6916 if not done: 6917 param_writer.ParamCardWriter(model, output_path) 6918 6919 if rule_card_path: 6920 if hasattr(model, 'rule_card'): 6921 model.rule_card.write_file(rule_card_path) 6922 6923 if mssm_convert: 6924 model_name = model.get('name') 6925 # IF MSSM convert the card to SLAH1 6926 if model_name == 'mssm' or model_name.startswith('mssm-'): 6927 import models.check_param_card as translator 6928 # Check the format of the param_card for Pythia and make it correct 6929 if rule_card_path: 6930 translator.make_valid_param_card(output_path, rule_card_path) 6931 translator.convert_to_slha1(output_path)
6932
6933 - def create_param_card(self):
6934 """ create the param_card.dat """ 6935 6936 rule_card = pjoin(self.dir_path, 'param_card_rule.dat') 6937 if not hasattr(self.model, 'rule_card'): 6938 rule_card=False 6939 self.create_param_card_static(self.model, 6940 output_path=pjoin(self.dir_path, 'param_card.dat'), 6941 rule_card_path=rule_card, 6942 mssm_convert=True)
6943
6944 -def ExportV4Factory(cmd, noclean, output_type='default', group_subprocesses=True, cmd_options={}):
6945 """ Determine which Export_v4 class is required. cmd is the command 6946 interface containing all potential usefull information. 6947 The output_type argument specifies from which context the output 6948 is called. It is 'madloop' for MadLoop5, 'amcatnlo' for FKS5 output 6949 and 'default' for tree-level outputs.""" 6950 6951 opt = dict(cmd.options) 6952 opt['output_options'] = cmd_options 6953 6954 # ========================================================================== 6955 # First check whether Ninja must be installed. 6956 # Ninja would only be required if: 6957 # a) Loop optimized output is selected 6958 # b) the process gathered from the amplitude generated use loops 6959 6960 if len(cmd._curr_amps)>0: 6961 try: 6962 curr_proc = cmd._curr_amps[0].get('process') 6963 except base_objects.PhysicsObject.PhysicsObjectError: 6964 curr_proc = None 6965 elif hasattr(cmd,'_fks_multi_proc') and \ 6966 len(cmd._fks_multi_proc.get('process_definitions'))>0: 6967 curr_proc = cmd._fks_multi_proc.get('process_definitions')[0] 6968 else: 6969 curr_proc = None 6970 6971 requires_reduction_tool = opt['loop_optimized_output'] and \ 6972 (not curr_proc is None) and \ 6973 (curr_proc.get('perturbation_couplings') != [] and \ 6974 not curr_proc.get('NLO_mode') in [None,'real','tree','LO','LOonly']) 6975 6976 # An installation is required then, but only if the specified path is the 6977 # default local one and that the Ninja library appears missing. 6978 if requires_reduction_tool: 6979 cmd.install_reduction_library() 6980 6981 # ========================================================================== 6982 # First treat the MadLoop5 standalone case 6983 MadLoop_SA_options = {'clean': not noclean, 6984 'complex_mass':cmd.options['complex_mass_scheme'], 6985 'export_format':'madloop', 6986 'mp':True, 6987 'loop_dir': os.path.join(cmd._mgme_dir,'Template','loop_material'), 6988 'cuttools_dir': cmd._cuttools_dir, 6989 'iregi_dir':cmd._iregi_dir, 6990 'golem_dir':cmd.options['golem'], 6991 'samurai_dir':cmd.options['samurai'], 6992 'ninja_dir':cmd.options['ninja'], 6993 'collier_dir':cmd.options['collier'], 6994 'fortran_compiler':cmd.options['fortran_compiler'], 6995 'f2py_compiler':cmd.options['f2py_compiler'], 6996 'output_dependencies':cmd.options['output_dependencies'], 6997 'SubProc_prefix':'P', 6998 'compute_color_flows':cmd.options['loop_color_flows'], 6999 'mode': 'reweight' if cmd._export_format == "standalone_rw" else '', 7000 'cluster_local_path': cmd.options['cluster_local_path'], 7001 'output_options': cmd_options 7002 } 7003 7004 if output_type.startswith('madloop'): 7005 import madgraph.loop.loop_exporters as loop_exporters 7006 if os.path.isdir(os.path.join(cmd._mgme_dir, 'Template/loop_material')): 7007 ExporterClass=None 7008 if not cmd.options['loop_optimized_output']: 7009 ExporterClass=loop_exporters.LoopProcessExporterFortranSA 7010 else: 7011 if output_type == "madloop": 7012 ExporterClass=loop_exporters.LoopProcessOptimizedExporterFortranSA 7013 MadLoop_SA_options['export_format'] = 'madloop_optimized' 7014 elif output_type == "madloop_matchbox": 7015 ExporterClass=loop_exporters.LoopProcessExporterFortranMatchBox 7016 MadLoop_SA_options['export_format'] = 'madloop_matchbox' 7017 else: 7018 raise Exception, "output_type not recognize %s" % output_type 7019 return ExporterClass(cmd._export_dir, MadLoop_SA_options) 7020 else: 7021 raise MadGraph5Error('MG5_aMC cannot find the \'loop_material\' directory'+\ 7022 ' in %s'%str(cmd._mgme_dir)) 7023 7024 # Then treat the aMC@NLO output 7025 elif output_type=='amcatnlo': 7026 import madgraph.iolibs.export_fks as export_fks 7027 ExporterClass=None 7028 amcatnlo_options = dict(opt) 7029 amcatnlo_options.update(MadLoop_SA_options) 7030 amcatnlo_options['mp'] = len(cmd._fks_multi_proc.get_virt_amplitudes()) > 0 7031 if not cmd.options['loop_optimized_output']: 7032 logger.info("Writing out the aMC@NLO code") 7033 ExporterClass = export_fks.ProcessExporterFortranFKS 7034 amcatnlo_options['export_format']='FKS5_default' 7035 else: 7036 logger.info("Writing out the aMC@NLO code, using optimized Loops") 7037 ExporterClass = export_fks.ProcessOptimizedExporterFortranFKS 7038 amcatnlo_options['export_format']='FKS5_optimized' 7039 return ExporterClass(cmd._export_dir, amcatnlo_options) 7040 7041 7042 # Then the default tree-level output 7043 elif output_type=='default': 7044 assert group_subprocesses in [True, False] 7045 7046 opt = dict(opt) 7047 opt.update({'clean': not noclean, 7048 'complex_mass': cmd.options['complex_mass_scheme'], 7049 'export_format':cmd._export_format, 7050 'mp': False, 7051 'sa_symmetry':False, 7052 'model': cmd._curr_model.get('name'), 7053 'v5_model': False if cmd._model_v4_path else True }) 7054 7055 format = cmd._export_format #shortcut 7056 7057 if format in ['standalone_msP', 'standalone_msF', 'standalone_rw']: 7058 opt['sa_symmetry'] = True 7059 elif format == 'plugin': 7060 opt['sa_symmetry'] = cmd._export_plugin.sa_symmetry 7061 7062 loop_induced_opt = dict(opt) 7063 loop_induced_opt.update(MadLoop_SA_options) 7064 loop_induced_opt['export_format'] = 'madloop_optimized' 7065 loop_induced_opt['SubProc_prefix'] = 'PV' 7066 # For loop_induced output with MadEvent, we must have access to the 7067 # color flows. 7068 loop_induced_opt['compute_color_flows'] = True 7069 for key in opt: 7070 if key not in loop_induced_opt: 7071 loop_induced_opt[key] = opt[key] 7072 7073 # Madevent output supports MadAnalysis5 7074 if format in ['madevent']: 7075 opt['madanalysis5'] = cmd.options['madanalysis5_path'] 7076 7077 if format == 'matrix' or format.startswith('standalone'): 7078 return ProcessExporterFortranSA(cmd._export_dir, opt, format=format) 7079 7080 elif format in ['madevent'] and group_subprocesses: 7081 if isinstance(cmd._curr_amps[0], 7082 loop_diagram_generation.LoopAmplitude): 7083 import madgraph.loop.loop_exporters as loop_exporters 7084 return loop_exporters.LoopInducedExporterMEGroup( 7085 cmd._export_dir,loop_induced_opt) 7086 else: 7087 return ProcessExporterFortranMEGroup(cmd._export_dir,opt) 7088 elif format in ['madevent']: 7089 if isinstance(cmd._curr_amps[0], 7090 loop_diagram_generation.LoopAmplitude): 7091 import madgraph.loop.loop_exporters as loop_exporters 7092 return loop_exporters.LoopInducedExporterMENoGroup( 7093 cmd._export_dir,loop_induced_opt) 7094 else: 7095 return ProcessExporterFortranME(cmd._export_dir,opt) 7096 elif format in ['matchbox']: 7097 return ProcessExporterFortranMatchBox(cmd._export_dir,opt) 7098 elif cmd._export_format in ['madweight'] and group_subprocesses: 7099 7100 return ProcessExporterFortranMWGroup(cmd._export_dir, opt) 7101 elif cmd._export_format in ['madweight']: 7102 return ProcessExporterFortranMW(cmd._export_dir, opt) 7103 elif format == 'plugin': 7104 if isinstance(cmd._curr_amps[0], 7105 loop_diagram_generation.LoopAmplitude): 7106 return cmd._export_plugin(cmd._export_dir, loop_induced_opt) 7107 else: 7108 return cmd._export_plugin(cmd._export_dir, opt) 7109 7110 else: 7111 raise Exception, 'Wrong export_v4 format' 7112 else: 7113 raise MadGraph5Error, 'Output type %s not reckognized in ExportV4Factory.'
7114
7115 7116 7117 7118 #=============================================================================== 7119 # ProcessExporterFortranMWGroup 7120 #=============================================================================== 7121 -class ProcessExporterFortranMWGroup(ProcessExporterFortranMW):
7122 """Class to take care of exporting a set of matrix elements to 7123 MadEvent subprocess group format.""" 7124 7125 matrix_file = "matrix_madweight_group_v4.inc" 7126 grouped_mode = 'madweight' 7127 #=========================================================================== 7128 # generate_subprocess_directory 7129 #===========================================================================
7130 - def generate_subprocess_directory(self, subproc_group, 7131 fortran_model, 7132 group_number):
7133 """Generate the Pn directory for a subprocess group in MadEvent, 7134 including the necessary matrix_N.f files, configs.inc and various 7135 other helper files.""" 7136 7137 if not isinstance(subproc_group, group_subprocs.SubProcessGroup): 7138 raise base_objects.PhysicsObject.PhysicsObjectError,\ 7139 "subproc_group object not SubProcessGroup" 7140 7141 if not self.model: 7142 self.model = subproc_group.get('matrix_elements')[0].\ 7143 get('processes')[0].get('model') 7144 7145 pathdir = os.path.join(self.dir_path, 'SubProcesses') 7146 7147 # Create the directory PN in the specified path 7148 subprocdir = "P%d_%s" % (subproc_group.get('number'), 7149 subproc_group.get('name')) 7150 try: 7151 os.mkdir(pjoin(pathdir, subprocdir)) 7152 except os.error as error: 7153 logger.warning(error.strerror + " " + subprocdir) 7154 7155 7156 logger.info('Creating files in directory %s' % subprocdir) 7157 Ppath = pjoin(pathdir, subprocdir) 7158 7159 # Create the matrix.f files, auto_dsig.f files and all inc files 7160 # for all subprocesses in the group 7161 7162 maxamps = 0 7163 maxflows = 0 7164 tot_calls = 0 7165 7166 matrix_elements = subproc_group.get('matrix_elements') 7167 7168 for ime, matrix_element in \ 7169 enumerate(matrix_elements): 7170 filename = pjoin(Ppath, 'matrix%d.f' % (ime+1)) 7171 calls, ncolor = \ 7172 self.write_matrix_element_v4(writers.FortranWriter(filename), 7173 matrix_element, 7174 fortran_model, 7175 str(ime+1), 7176 subproc_group.get('diagram_maps')[\ 7177 ime]) 7178 7179 filename = pjoin(Ppath, 'auto_dsig%d.f' % (ime+1)) 7180 self.write_auto_dsig_file(writers.FortranWriter(filename), 7181 matrix_element, 7182 str(ime+1)) 7183 7184 # Keep track of needed quantities 7185 tot_calls += int(calls) 7186 maxflows = max(maxflows, ncolor) 7187 maxamps = max(maxamps, len(matrix_element.get('diagrams'))) 7188 7189 # Draw diagrams 7190 filename = pjoin(Ppath, "matrix%d.ps" % (ime+1)) 7191 plot = draw.MultiEpsDiagramDrawer(matrix_element.get('base_amplitude').\ 7192 get('diagrams'), 7193 filename, 7194 model = \ 7195 matrix_element.get('processes')[0].\ 7196 get('model'), 7197 amplitude=True) 7198 logger.info("Generating Feynman diagrams for " + \ 7199 matrix_element.get('processes')[0].nice_string()) 7200 plot.draw() 7201 7202 # Extract number of external particles 7203 (nexternal, ninitial) = matrix_element.get_nexternal_ninitial() 7204 7205 # Generate a list of diagrams corresponding to each configuration 7206 # [[d1, d2, ...,dn],...] where 1,2,...,n is the subprocess number 7207 # If a subprocess has no diagrams for this config, the number is 0 7208 7209 subproc_diagrams_for_config = subproc_group.get('diagrams_for_configs') 7210 7211 filename = pjoin(Ppath, 'auto_dsig.f') 7212 self.write_super_auto_dsig_file(writers.FortranWriter(filename), 7213 subproc_group) 7214 7215 filename = pjoin(Ppath,'configs.inc') 7216 nconfigs, s_and_t_channels = self.write_configs_file(\ 7217 writers.FortranWriter(filename), 7218 subproc_group, 7219 subproc_diagrams_for_config) 7220 7221 filename = pjoin(Ppath, 'leshouche.inc') 7222 self.write_leshouche_file(writers.FortranWriter(filename), 7223 subproc_group) 7224 7225 filename = pjoin(Ppath, 'phasespace.inc') 7226 self.write_phasespace_file(writers.FortranWriter(filename), 7227 nconfigs) 7228 7229 7230 filename = pjoin(Ppath, 'maxamps.inc') 7231 self.write_maxamps_file(writers.FortranWriter(filename), 7232 maxamps, 7233 maxflows, 7234 max([len(me.get('processes')) for me in \ 7235 matrix_elements]), 7236 len(matrix_elements)) 7237 7238 filename = pjoin(Ppath, 'mirrorprocs.inc') 7239 self.write_mirrorprocs(writers.FortranWriter(filename), 7240 subproc_group) 7241 7242 filename = pjoin(Ppath, 'nexternal.inc') 7243 self.write_nexternal_file(writers.FortranWriter(filename), 7244 nexternal, ninitial) 7245 7246 filename = pjoin(Ppath, 'pmass.inc') 7247 self.write_pmass_file(writers.FortranWriter(filename), 7248 matrix_element) 7249 7250 filename = pjoin(Ppath, 'props.inc') 7251 self.write_props_file(writers.FortranWriter(filename), 7252 matrix_element, 7253 s_and_t_channels) 7254 7255 # filename = pjoin(Ppath, 'processes.dat') 7256 # files.write_to_file(filename, 7257 # self.write_processes_file, 7258 # subproc_group) 7259 7260 # Generate jpgs -> pass in make_html 7261 #os.system(os.path.join('..', '..', 'bin', 'gen_jpeg-pl')) 7262 7263 linkfiles = ['driver.f', 'cuts.f', 'initialization.f','gen_ps.f', 'makefile', 'coupl.inc','madweight_param.inc', 'run.inc', 'setscales.f'] 7264 7265 for file in linkfiles: 7266 ln('../%s' % file, cwd=Ppath) 7267 7268 ln('nexternal.inc', '../../Source', cwd=Ppath, log=False) 7269 ln('leshouche.inc', '../../Source', cwd=Ppath, log=False) 7270 ln('maxamps.inc', '../../Source', cwd=Ppath, log=False) 7271 ln('../../Source/maxparticles.inc', '.', log=True, cwd=Ppath) 7272 ln('../../Source/maxparticles.inc', '.', name='genps.inc', log=True, cwd=Ppath) 7273 ln('phasespace.inc', '../', log=True, cwd=Ppath) 7274 if not tot_calls: 7275 tot_calls = 0 7276 return tot_calls
7277 7278 7279 #=========================================================================== 7280 # Helper functions 7281 #===========================================================================
7282 - def modify_grouping(self, matrix_element):
7283 """allow to modify the grouping (if grouping is in place) 7284 return two value: 7285 - True/False if the matrix_element was modified 7286 - the new(or old) matrix element""" 7287 7288 return True, matrix_element.split_lepton_grouping()
7289 7290 #=========================================================================== 7291 # write_super_auto_dsig_file 7292 #===========================================================================
7293 - def write_super_auto_dsig_file(self, writer, subproc_group):
7294 """Write the auto_dsig.f file selecting between the subprocesses 7295 in subprocess group mode""" 7296 7297 replace_dict = {} 7298 7299 # Extract version number and date from VERSION file 7300 info_lines = self.get_mg5_info_lines() 7301 replace_dict['info_lines'] = info_lines 7302 7303 matrix_elements = subproc_group.get('matrix_elements') 7304 7305 # Extract process info lines 7306 process_lines = '\n'.join([self.get_process_info_lines(me) for me in \ 7307 matrix_elements]) 7308 replace_dict['process_lines'] = process_lines 7309 7310 nexternal, ninitial = matrix_elements[0].get_nexternal_ninitial() 7311 replace_dict['nexternal'] = nexternal 7312 7313 replace_dict['nsprocs'] = 2*len(matrix_elements) 7314 7315 # Generate dsig definition line 7316 dsig_def_line = "DOUBLE PRECISION " + \ 7317 ",".join(["DSIG%d" % (iproc + 1) for iproc in \ 7318 range(len(matrix_elements))]) 7319 replace_dict["dsig_def_line"] = dsig_def_line 7320 7321 # Generate dsig process lines 7322 call_dsig_proc_lines = [] 7323 for iproc in range(len(matrix_elements)): 7324 call_dsig_proc_lines.append(\ 7325 "IF(IPROC.EQ.%(num)d) DSIGPROC=DSIG%(num)d(P1,WGT,IMODE) ! %(proc)s" % \ 7326 {"num": iproc + 1, 7327 "proc": matrix_elements[iproc].get('processes')[0].base_string()}) 7328 replace_dict['call_dsig_proc_lines'] = "\n".join(call_dsig_proc_lines) 7329 7330 if writer: 7331 file = open(os.path.join(_file_path, \ 7332 'iolibs/template_files/super_auto_dsig_mw_group_v4.inc')).read() 7333 file = file % replace_dict 7334 # Write the file 7335 writer.writelines(file) 7336 else: 7337 return replace_dict
7338 7339 #=========================================================================== 7340 # write_mirrorprocs 7341 #===========================================================================
7342 - def write_mirrorprocs(self, writer, subproc_group):
7343 """Write the mirrorprocs.inc file determining which processes have 7344 IS mirror process in subprocess group mode.""" 7345 7346 lines = [] 7347 bool_dict = {True: '.true.', False: '.false.'} 7348 matrix_elements = subproc_group.get('matrix_elements') 7349 lines.append("DATA (MIRRORPROCS(I),I=1,%d)/%s/" % \ 7350 (len(matrix_elements), 7351 ",".join([bool_dict[me.get('has_mirror_process')] for \ 7352 me in matrix_elements]))) 7353 # Write the file 7354 writer.writelines(lines)
7355 7356 #=========================================================================== 7357 # write_configs_file 7358 #===========================================================================
7359 - def write_configs_file(self, writer, subproc_group, diagrams_for_config):
7360 """Write the configs.inc file with topology information for a 7361 subprocess group. Use the first subprocess with a diagram for each 7362 configuration.""" 7363 7364 matrix_elements = subproc_group.get('matrix_elements') 7365 model = matrix_elements[0].get('processes')[0].get('model') 7366 7367 diagrams = [] 7368 config_numbers = [] 7369 for iconfig, config in enumerate(diagrams_for_config): 7370 # Check if any diagrams correspond to this config 7371 if set(config) == set([0]): 7372 continue 7373 subproc_diags = [] 7374 for s,d in enumerate(config): 7375 if d: 7376 subproc_diags.append(matrix_elements[s].\ 7377 get('diagrams')[d-1]) 7378 else: 7379 subproc_diags.append(None) 7380 diagrams.append(subproc_diags) 7381 config_numbers.append(iconfig + 1) 7382 7383 # Extract number of external particles 7384 (nexternal, ninitial) = subproc_group.get_nexternal_ninitial() 7385 7386 return len(diagrams), \ 7387 self.write_configs_file_from_diagrams(writer, diagrams, 7388 config_numbers, 7389 nexternal, ninitial, 7390 matrix_elements[0],model)
7391 7392 #=========================================================================== 7393 # write_run_configs_file 7394 #===========================================================================
7395 - def write_run_config_file(self, writer):
7396 """Write the run_configs.inc file for MadEvent""" 7397 7398 path = os.path.join(_file_path,'iolibs','template_files','madweight_run_config.inc') 7399 text = open(path).read() % {'chanperjob':'2'} 7400 writer.write(text) 7401 return True
7402 7403 7404 #=========================================================================== 7405 # write_leshouche_file 7406 #===========================================================================
7407 - def write_leshouche_file(self, writer, subproc_group):
7408 """Write the leshouche.inc file for MG4""" 7409 7410 all_lines = [] 7411 7412 for iproc, matrix_element in \ 7413 enumerate(subproc_group.get('matrix_elements')): 7414 all_lines.extend(self.get_leshouche_lines(matrix_element, 7415 iproc)) 7416 7417 # Write the file 7418 writer.writelines(all_lines) 7419 7420 return True
7421